diff --git a/.coveragerc b/.coveragerc index de92c1c23e8..ef8ea722106 100644 --- a/.coveragerc +++ b/.coveragerc @@ -46,7 +46,6 @@ omit = homeassistant/components/android_ip_webcam/* homeassistant/components/anel_pwrctrl/switch.py homeassistant/components/anthemav/media_player.py - homeassistant/components/apache_kafka/* homeassistant/components/apcupsd/* homeassistant/components/apple_tv/* homeassistant/components/aqualogic/* @@ -68,8 +67,8 @@ omit = homeassistant/components/aurora_abb_powerone/sensor.py homeassistant/components/avea/light.py homeassistant/components/avion/light.py + homeassistant/components/avri/const.py homeassistant/components/avri/sensor.py - homeassistant/components/azure_event_hub/* homeassistant/components/azure_service_bus/* homeassistant/components/baidu/tts.py homeassistant/components/beewi_smartclim/sensor.py @@ -79,7 +78,12 @@ omit = homeassistant/components/bh1750/sensor.py homeassistant/components/bitcoin/sensor.py homeassistant/components/bizkaibus/sensor.py - homeassistant/components/blink/* + homeassistant/components/blink/__init__.py + homeassistant/components/blink/alarm_control_panel.py + homeassistant/components/blink/binary_sensor.py + homeassistant/components/blink/camera.py + homeassistant/components/blink/const.py + homeassistant/components/blink/sensor.py homeassistant/components/blinksticklight/light.py homeassistant/components/blinkt/light.py homeassistant/components/blockchain/sensor.py @@ -154,9 +158,14 @@ omit = homeassistant/components/deluge/switch.py homeassistant/components/denon/media_player.py homeassistant/components/denonavr/media_player.py + homeassistant/components/denonavr/receiver.py homeassistant/components/deutsche_bahn/sensor.py homeassistant/components/devolo_home_control/__init__.py + homeassistant/components/devolo_home_control/binary_sensor.py homeassistant/components/devolo_home_control/const.py + homeassistant/components/devolo_home_control/devolo_device.py + homeassistant/components/devolo_home_control/sensor.py + homeassistant/components/devolo_home_control/subscriber.py homeassistant/components/devolo_home_control/switch.py homeassistant/components/dht/sensor.py homeassistant/components/digital_ocean/* @@ -255,7 +264,6 @@ omit = homeassistant/components/folder_watcher/* homeassistant/components/foobot/sensor.py homeassistant/components/fortios/device_tracker.py - homeassistant/components/fortigate/* homeassistant/components/foscam/camera.py homeassistant/components/foscam/const.py homeassistant/components/foursquare/* @@ -284,6 +292,7 @@ omit = homeassistant/components/gitlab_ci/sensor.py homeassistant/components/gitter/sensor.py homeassistant/components/glances/__init__.py + homeassistant/components/glances/const.py homeassistant/components/glances/sensor.py homeassistant/components/gntp/notify.py homeassistant/components/goalfeed/* @@ -339,6 +348,8 @@ omit = homeassistant/components/hunterdouglas_powerview/sensor.py homeassistant/components/hunterdouglas_powerview/cover.py homeassistant/components/hunterdouglas_powerview/entity.py + homeassistant/components/hvv_departures/sensor.py + homeassistant/components/hvv_departures/__init__.py homeassistant/components/hydrawise/* homeassistant/components/hyperion/light.py homeassistant/components/ialarm/alarm_control_panel.py @@ -431,7 +442,6 @@ omit = homeassistant/components/linux_battery/sensor.py homeassistant/components/lirc/* homeassistant/components/llamalab_automate/notify.py - homeassistant/components/lockitron/lock.py homeassistant/components/logi_circle/__init__.py homeassistant/components/logi_circle/camera.py homeassistant/components/logi_circle/const.py @@ -538,6 +548,7 @@ omit = homeassistant/components/notion/sensor.py homeassistant/components/noaa_tides/sensor.py homeassistant/components/norway_air/air_quality.py + homeassistant/components/notify_events/notify.py homeassistant/components/nsw_fuel_station/sensor.py homeassistant/components/nuimo_controller/* homeassistant/components/nuki/lock.py @@ -714,7 +725,11 @@ omit = homeassistant/components/sinch/* homeassistant/components/slide/* homeassistant/components/sma/sensor.py - homeassistant/components/smappee/* + homeassistant/components/smappee/__init__.py + homeassistant/components/smappee/api.py + homeassistant/components/smappee/binary_sensor.py + homeassistant/components/smappee/sensor.py + homeassistant/components/smappee/switch.py homeassistant/components/smarty/* homeassistant/components/smarthab/* homeassistant/components/sms/* @@ -740,7 +755,8 @@ omit = homeassistant/components/spotcrime/sensor.py homeassistant/components/spotify/__init__.py homeassistant/components/spotify/media_player.py - homeassistant/components/squeezebox/* + homeassistant/components/squeezebox/__init__.py + homeassistant/components/squeezebox/media_player.py homeassistant/components/starline/* homeassistant/components/starlingbank/sensor.py homeassistant/components/steam_online/sensor.py @@ -797,6 +813,7 @@ omit = homeassistant/components/thomson/device_tracker.py homeassistant/components/tibber/* homeassistant/components/tikteck/light.py + homeassistant/components/tile/__init__.py homeassistant/components/tile/device_tracker.py homeassistant/components/time_date/sensor.py homeassistant/components/tmb/sensor.py @@ -804,7 +821,16 @@ omit = homeassistant/components/todoist/const.py homeassistant/components/tof/sensor.py homeassistant/components/tomato/device_tracker.py - homeassistant/components/toon/* + homeassistant/components/toon/__init__.py + homeassistant/components/toon/binary_sensor.py + homeassistant/components/toon/climate.py + homeassistant/components/toon/const.py + homeassistant/components/toon/coordinator.py + homeassistant/components/toon/helpers.py + homeassistant/components/toon/models.py + homeassistant/components/toon/oauth2.py + homeassistant/components/toon/sensor.py + homeassistant/components/toon/switch.py homeassistant/components/torque/sensor.py homeassistant/components/totalconnect/* homeassistant/components/touchline/climate.py @@ -891,7 +917,14 @@ omit = homeassistant/components/xeoma/camera.py homeassistant/components/xfinity/device_tracker.py homeassistant/components/xiaomi/camera.py - homeassistant/components/xiaomi_aqara/* + homeassistant/components/xiaomi_aqara/__init__.py + homeassistant/components/xiaomi_aqara/binary_sensor.py + homeassistant/components/xiaomi_aqara/const.py + homeassistant/components/xiaomi_aqara/cover.py + homeassistant/components/xiaomi_aqara/light.py + homeassistant/components/xiaomi_aqara/lock.py + homeassistant/components/xiaomi_aqara/sensor.py + homeassistant/components/xiaomi_aqara/switch.py homeassistant/components/xiaomi_miio/__init__.py homeassistant/components/xiaomi_miio/air_quality.py homeassistant/components/xiaomi_miio/alarm_control_panel.py diff --git a/.gitattributes b/.gitattributes index caff2fc5c1f..e70ab0a2c70 100644 --- a/.gitattributes +++ b/.gitattributes @@ -8,3 +8,5 @@ *.png binary *.zip binary *.mp3 binary + +Dockerfile.dev linguist-language=Dockerfile diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md index 713c7dc2872..f873f250da8 100644 --- a/.github/ISSUE_TEMPLATE.md +++ b/.github/ISSUE_TEMPLATE.md @@ -21,7 +21,7 @@ - Home Assistant Core release with the issue: - Last working Home Assistant Core release (if known): -- Operating environment (Home Assistant/Supervised/Docker/venv): +- Operating environment (OS/Container/Supervised/Core): - Integration causing this issue: - Link to integration documentation on our website: diff --git a/.github/ISSUE_TEMPLATE/BUG_REPORT.md b/.github/ISSUE_TEMPLATE/BUG_REPORT.md index 9bfecda724f..e60aa00a448 100644 --- a/.github/ISSUE_TEMPLATE/BUG_REPORT.md +++ b/.github/ISSUE_TEMPLATE/BUG_REPORT.md @@ -25,7 +25,7 @@ about: Report an issue with Home Assistant Core - Home Assistant Core release with the issue: - Last working Home Assistant Core release (if known): -- Operating environment (Home Assistant/Supervised/Docker/venv): +- Operating environment (OS/Container/Supervised/Core): - Integration causing this issue: - Link to integration documentation on our website: diff --git a/CODEOWNERS b/CODEOWNERS index 82e3e388026..d860a0f57ed 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -46,7 +46,7 @@ homeassistant/components/auth/* @home-assistant/core homeassistant/components/automation/* @home-assistant/core homeassistant/components/avea/* @pattyland homeassistant/components/avri/* @timvancann -homeassistant/components/awair/* @danielsjf +homeassistant/components/awair/* @ahayworth @danielsjf homeassistant/components/aws/* @awarecan @robbiet480 homeassistant/components/axis/* @Kane610 homeassistant/components/azure_event_hub/* @eavanvalkenburg @@ -57,7 +57,7 @@ homeassistant/components/bizkaibus/* @UgaitzEtxebarria homeassistant/components/blebox/* @gadgetmobile homeassistant/components/blink/* @fronzbot homeassistant/components/bmp280/* @belidzs -homeassistant/components/bmw_connected_drive/* @gerard33 +homeassistant/components/bmw_connected_drive/* @gerard33 @rikroe homeassistant/components/bom/* @maddenp homeassistant/components/braviatv/* @robbiet480 @bieniu homeassistant/components/broadlink/* @danielhiversen @felipediel @@ -86,6 +86,7 @@ homeassistant/components/cpuspeed/* @fabaff homeassistant/components/cups/* @fabaff homeassistant/components/daikin/* @fredrike homeassistant/components/darksky/* @fabaff +homeassistant/components/debugpy/* @frenck homeassistant/components/deconz/* @Kane610 homeassistant/components/delijn/* @bollewolle @Emilv2 homeassistant/components/demo/* @home-assistant/core @@ -133,7 +134,6 @@ homeassistant/components/flock/* @fabaff homeassistant/components/flume/* @ChrisMandich @bdraco homeassistant/components/flunearyou/* @bachya homeassistant/components/forked_daapd/* @uvjustin -homeassistant/components/fortigate/* @kifeo homeassistant/components/fortios/* @kimfrellsen homeassistant/components/foscam/* @skgsergio homeassistant/components/foursquare/* @robbiet480 @@ -184,7 +184,10 @@ homeassistant/components/http/* @home-assistant/core homeassistant/components/huawei_lte/* @scop @fphammerle homeassistant/components/huawei_router/* @abmantis homeassistant/components/hue/* @balloob +homeassistant/components/humidifier/* @home-assistant/core @Shulyaka homeassistant/components/hunterdouglas_powerview/* @bdraco +homeassistant/components/hvv_departures/* @vigonotion +homeassistant/components/hydrawise/* @ptcryan homeassistant/components/iammeter/* @lewei50 homeassistant/components/iaqualink/* @flz homeassistant/components/icloud/* @Quentame @@ -243,6 +246,7 @@ homeassistant/components/melissa/* @kennedyshead homeassistant/components/met/* @danielhiversen homeassistant/components/meteo_france/* @victorcerutti @oncleben31 @Quentame homeassistant/components/meteoalarm/* @rolfberkenbosch +homeassistant/components/metoffice/* @MrHarcombe homeassistant/components/miflora/* @danielhiversen @ChristianKuehnel homeassistant/components/mikrotik/* @engrbm87 homeassistant/components/mill/* @danielhiversen @@ -274,6 +278,7 @@ homeassistant/components/nissan_leaf/* @filcole homeassistant/components/nmbs/* @thibmaek homeassistant/components/no_ip/* @fabaff homeassistant/components/notify/* @home-assistant/core +homeassistant/components/notify_events/* @matrozov @papajojo homeassistant/components/notion/* @bachya homeassistant/components/nsw_fuel_station/* @nickw444 homeassistant/components/nsw_rural_fire_service_feed/* @exxamalte @@ -311,9 +316,10 @@ homeassistant/components/plaato/* @JohNan homeassistant/components/plant/* @ChristianKuehnel homeassistant/components/plex/* @jjlawren homeassistant/components/plugwise/* @CoMPaTech @bouwew -homeassistant/components/plum_lightpad/* @ColinHarrington +homeassistant/components/plum_lightpad/* @ColinHarrington @prystupa homeassistant/components/point/* @fredrike homeassistant/components/powerwall/* @bdraco @jrester +homeassistant/components/prometheus/* @knyar homeassistant/components/proxmoxve/* @k4ds3 @jhollowe homeassistant/components/ps4/* @ktnrg45 homeassistant/components/ptvsd/* @swamp-ig @@ -362,6 +368,7 @@ homeassistant/components/sinch/* @bendikrb homeassistant/components/sisyphus/* @jkeljo homeassistant/components/slide/* @ualex73 homeassistant/components/sma/* @kellerza +homeassistant/components/smappee/* @bsmappee homeassistant/components/smarthab/* @outadoc homeassistant/components/smartthings/* @andrewsayre homeassistant/components/smarty/* @z0mbieprocess @@ -375,7 +382,7 @@ homeassistant/components/somfy/* @tetienne homeassistant/components/sonarr/* @ctalkington homeassistant/components/songpal/* @rytilahti @shenxn homeassistant/components/spaceapi/* @fabaff -homeassistant/components/speedtestdotnet/* @rohankapoorcom +homeassistant/components/speedtestdotnet/* @rohankapoorcom @engrbm87 homeassistant/components/spider/* @peternijssen homeassistant/components/spotify/* @frenck homeassistant/components/sql/* @dgomes @@ -453,7 +460,6 @@ homeassistant/components/watson_tts/* @rutkai homeassistant/components/weather/* @fabaff homeassistant/components/webostv/* @bendavid homeassistant/components/websocket_api/* @home-assistant/core -homeassistant/components/wemo/* @sqldiablo homeassistant/components/wiffi/* @mampfes homeassistant/components/withings/* @vangorra homeassistant/components/wled/* @frenck diff --git a/homeassistant/auth/mfa_modules/totp.py b/homeassistant/auth/mfa_modules/totp.py index d35f237f424..2fc8c379861 100644 --- a/homeassistant/auth/mfa_modules/totp.py +++ b/homeassistant/auth/mfa_modules/totp.py @@ -117,7 +117,8 @@ class TotpAuthModule(MultiFactorAuthModule): Mfa module should extend SetupFlow """ - user = await self.hass.auth.async_get_user(user_id) # type: ignore + user = await self.hass.auth.async_get_user(user_id) + assert user is not None return TotpSetupFlow(self, self.input_schema, user) async def async_setup_user(self, user_id: str, setup_data: Any) -> str: diff --git a/homeassistant/auth/providers/__init__.py b/homeassistant/auth/providers/__init__.py index 1fa70e42b3f..35208bd847c 100644 --- a/homeassistant/auth/providers/__init__.py +++ b/homeassistant/auth/providers/__init__.py @@ -175,7 +175,7 @@ class LoginFlow(data_entry_flow.FlowHandler): """Initialize the login flow.""" self._auth_provider = auth_provider self._auth_module_id: Optional[str] = None - self._auth_manager = auth_provider.hass.auth # type: ignore + self._auth_manager = auth_provider.hass.auth self.available_mfa_modules: Dict[str, str] = {} self.created_at = dt_util.utcnow() self.invalid_mfa_times = 0 @@ -224,6 +224,7 @@ class LoginFlow(data_entry_flow.FlowHandler): errors = {} + assert self._auth_module_id is not None auth_module = self._auth_manager.get_auth_mfa_module(self._auth_module_id) if auth_module is None: # Given an invalid input to async_step_select_mfa_module @@ -234,7 +235,9 @@ class LoginFlow(data_entry_flow.FlowHandler): auth_module, "async_initialize_login_mfa_step" ): try: - await auth_module.async_initialize_login_mfa_step(self.user.id) + await auth_module.async_initialize_login_mfa_step( # type: ignore + self.user.id + ) except HomeAssistantError: _LOGGER.exception("Error initializing MFA step") return self.async_abort(reason="unknown_error") diff --git a/homeassistant/bootstrap.py b/homeassistant/bootstrap.py index 086416b7d35..94ec33f4e1a 100644 --- a/homeassistant/bootstrap.py +++ b/homeassistant/bootstrap.py @@ -1,6 +1,7 @@ """Provide methods to bootstrap a Home Assistant instance.""" import asyncio import contextlib +from datetime import datetime import logging import logging.handlers import os @@ -20,7 +21,12 @@ from homeassistant.const import ( ) from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.typing import ConfigType -from homeassistant.setup import DATA_SETUP, DATA_SETUP_STARTED, async_setup_component +from homeassistant.setup import ( + DATA_SETUP, + DATA_SETUP_STARTED, + async_set_domains_to_be_loaded, + async_setup_component, +) from homeassistant.util.logging import async_activate_log_queue_handler from homeassistant.util.package import async_get_user_site, is_virtual_env from homeassistant.util.yaml import clear_secret_cache @@ -34,12 +40,18 @@ DATA_LOGGING = "logging" LOG_SLOW_STARTUP_INTERVAL = 60 -DEBUGGER_INTEGRATIONS = {"ptvsd"} +DEBUGGER_INTEGRATIONS = {"debugpy", "ptvsd"} CORE_INTEGRATIONS = ("homeassistant", "persistent_notification") -LOGGING_INTEGRATIONS = {"logger", "system_log", "sentry"} -STAGE_1_INTEGRATIONS = { +LOGGING_INTEGRATIONS = { + # Set log levels + "logger", + # Error logging + "system_log", + "sentry", # To record data "recorder", +} +STAGE_1_INTEGRATIONS = { # To make sure we forward data to other instances "mqtt_eventstream", # To provide account link implementations @@ -50,7 +62,6 @@ STAGE_1_INTEGRATIONS = { # as possible so problem integrations can # be removed "frontend", - "config", } @@ -125,8 +136,12 @@ async def async_setup_hass( await hass.async_block_till_done() safe_mode = True + old_config = hass.config hass = core.HomeAssistant() - hass.config.config_dir = config_dir + hass.config.skip_pip = old_config.skip_pip + hass.config.internal_url = old_config.internal_url + hass.config.external_url = old_config.external_url + hass.config.config_dir = old_config.config_dir if safe_mode: _LOGGER.info("Starting in safe mode") @@ -327,76 +342,130 @@ def _get_domains(hass: core.HomeAssistant, config: Dict[str, Any]) -> Set[str]: return domains +async def _async_log_pending_setups( + domains: Set[str], setup_started: Dict[str, datetime] +) -> None: + """Periodic log of setups that are pending for longer than LOG_SLOW_STARTUP_INTERVAL.""" + while True: + await asyncio.sleep(LOG_SLOW_STARTUP_INTERVAL) + remaining = [domain for domain in domains if domain in setup_started] + + if remaining: + _LOGGER.info( + "Waiting on integrations to complete setup: %s", ", ".join(remaining), + ) + + +async def async_setup_multi_components( + hass: core.HomeAssistant, + domains: Set[str], + config: Dict[str, Any], + setup_started: Dict[str, datetime], +) -> None: + """Set up multiple domains. Log on failure.""" + futures = { + domain: hass.async_create_task(async_setup_component(hass, domain, config)) + for domain in domains + } + log_task = asyncio.create_task(_async_log_pending_setups(domains, setup_started)) + await asyncio.wait(futures.values()) + log_task.cancel() + errors = [domain for domain in domains if futures[domain].exception()] + for domain in errors: + exception = futures[domain].exception() + assert exception is not None + _LOGGER.error( + "Error setting up integration %s - received exception", + domain, + exc_info=(type(exception), exception, exception.__traceback__), + ) + + async def _async_set_up_integrations( hass: core.HomeAssistant, config: Dict[str, Any] ) -> None: """Set up all the integrations.""" - setup_started = hass.data[DATA_SETUP_STARTED] = {} + domains_to_setup = _get_domains(hass, config) - async def async_setup_multi_components(domains: Set[str]) -> None: - """Set up multiple domains. Log on failure.""" + # Resolve all dependencies so we know all integrations + # that will have to be loaded and start rightaway + integration_cache: Dict[str, loader.Integration] = {} + to_resolve = domains_to_setup + while to_resolve: + old_to_resolve = to_resolve + to_resolve = set() - async def _async_log_pending_setups() -> None: - """Periodic log of setups that are pending for longer than LOG_SLOW_STARTUP_INTERVAL.""" - while True: - await asyncio.sleep(LOG_SLOW_STARTUP_INTERVAL) - remaining = [domain for domain in domains if domain in setup_started] - - if remaining: - _LOGGER.info( - "Waiting on integrations to complete setup: %s", - ", ".join(remaining), - ) - - futures = { - domain: hass.async_create_task(async_setup_component(hass, domain, config)) - for domain in domains - } - log_task = asyncio.create_task(_async_log_pending_setups()) - await asyncio.wait(futures.values()) - log_task.cancel() - errors = [domain for domain in domains if futures[domain].exception()] - for domain in errors: - exception = futures[domain].exception() - _LOGGER.error( - "Error setting up integration %s - received exception", - domain, - exc_info=(type(exception), exception, exception.__traceback__), + integrations_to_process = [ + int_or_exc + for int_or_exc in await asyncio.gather( + *( + loader.async_get_integration(hass, domain) + for domain in old_to_resolve + ), + return_exceptions=True, ) + if isinstance(int_or_exc, loader.Integration) + ] + resolve_dependencies_tasks = [ + itg.resolve_dependencies() + for itg in integrations_to_process + if not itg.all_dependencies_resolved + ] - domains = _get_domains(hass, config) + if resolve_dependencies_tasks: + await asyncio.gather(*resolve_dependencies_tasks) + + for itg in integrations_to_process: + integration_cache[itg.domain] = itg + + for dep in itg.all_dependencies: + if dep in domains_to_setup: + continue + + domains_to_setup.add(dep) + to_resolve.add(dep) + + _LOGGER.info("Domains to be set up: %s", domains_to_setup) + + logging_domains = domains_to_setup & LOGGING_INTEGRATIONS + + # Load logging as soon as possible + if logging_domains: + _LOGGER.info("Setting up logging: %s", logging_domains) + await async_setup_multi_components(hass, logging_domains, config, setup_started) # Start up debuggers. Start these first in case they want to wait. - debuggers = domains & DEBUGGER_INTEGRATIONS + debuggers = domains_to_setup & DEBUGGER_INTEGRATIONS + if debuggers: - _LOGGER.debug("Starting up debuggers %s", debuggers) - await async_setup_multi_components(debuggers) - domains -= DEBUGGER_INTEGRATIONS + _LOGGER.debug("Setting up debuggers: %s", debuggers) + await async_setup_multi_components(hass, debuggers, config, setup_started) - # Resolve all dependencies of all components so we can find the logging - # and integrations that need faster initialization. - resolved_domains_task = asyncio.gather( - *(loader.async_component_dependencies(hass, domain) for domain in domains), - return_exceptions=True, - ) + # calculate what components to setup in what stage + stage_1_domains = set() - # Finish resolving domains - for dep_domains in await resolved_domains_task: - # Result is either a set or an exception. We ignore exceptions - # It will be properly handled during setup of the domain. - if isinstance(dep_domains, set): - domains.update(dep_domains) + # Find all dependencies of any dependency of any stage 1 integration that + # we plan on loading and promote them to stage 1 + deps_promotion = STAGE_1_INTEGRATIONS + while deps_promotion: + old_deps_promotion = deps_promotion + deps_promotion = set() - # setup components - logging_domains = domains & LOGGING_INTEGRATIONS - stage_1_domains = domains & STAGE_1_INTEGRATIONS - stage_2_domains = domains - logging_domains - stage_1_domains + for domain in old_deps_promotion: + if domain not in domains_to_setup or domain in stage_1_domains: + continue - if logging_domains: - _LOGGER.info("Setting up %s", logging_domains) + stage_1_domains.add(domain) - await async_setup_multi_components(logging_domains) + dep_itg = integration_cache.get(domain) + + if dep_itg is None: + continue + + deps_promotion.update(dep_itg.all_dependencies) + + stage_2_domains = domains_to_setup - logging_domains - debuggers - stage_1_domains # Kick off loading the registries. They don't need to be awaited. asyncio.gather( @@ -405,49 +474,17 @@ async def _async_set_up_integrations( hass.helpers.area_registry.async_get_registry(), ) + # Start setup if stage_1_domains: - _LOGGER.info("Setting up %s", stage_1_domains) + _LOGGER.info("Setting up stage 1: %s", stage_1_domains) + await async_setup_multi_components(hass, stage_1_domains, config, setup_started) - await async_setup_multi_components(stage_1_domains) + # Enables after dependencies + async_set_domains_to_be_loaded(hass, stage_1_domains | stage_2_domains) - # Load all integrations - after_dependencies: Dict[str, Set[str]] = {} - - for int_or_exc in await asyncio.gather( - *(loader.async_get_integration(hass, domain) for domain in stage_2_domains), - return_exceptions=True, - ): - # Exceptions are handled in async_setup_component. - if isinstance(int_or_exc, loader.Integration) and int_or_exc.after_dependencies: - after_dependencies[int_or_exc.domain] = set(int_or_exc.after_dependencies) - - last_load = None - while stage_2_domains: - domains_to_load = set() - - for domain in stage_2_domains: - after_deps = after_dependencies.get(domain) - # Load if integration has no after_dependencies or they are - # all loaded - if not after_deps or not after_deps - hass.config.components: - domains_to_load.add(domain) - - if not domains_to_load or domains_to_load == last_load: - break - - _LOGGER.debug("Setting up %s", domains_to_load) - - await async_setup_multi_components(domains_to_load) - - last_load = domains_to_load - stage_2_domains -= domains_to_load - - # These are stage 2 domains that never have their after_dependencies - # satisfied. if stage_2_domains: - _LOGGER.debug("Final set up: %s", stage_2_domains) - - await async_setup_multi_components(stage_2_domains) + _LOGGER.info("Setting up stage 2: %s", stage_2_domains) + await async_setup_multi_components(hass, stage_2_domains, config, setup_started) # Wrap up startup _LOGGER.debug("Waiting for startup to wrap up") diff --git a/homeassistant/components/abode/manifest.json b/homeassistant/components/abode/manifest.json index c8dace4e87b..d59ddd6217f 100644 --- a/homeassistant/components/abode/manifest.json +++ b/homeassistant/components/abode/manifest.json @@ -4,5 +4,8 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/abode", "requirements": ["abodepy==0.19.0"], - "codeowners": ["@shred86"] + "codeowners": ["@shred86"], + "homekit": { + "models": ["Abode", "Iota"] + } } diff --git a/homeassistant/components/abode/translations/nn.json b/homeassistant/components/abode/translations/nn.json deleted file mode 100644 index f7a32b0983e..00000000000 --- a/homeassistant/components/abode/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Abode" -} \ No newline at end of file diff --git a/homeassistant/components/adguard/__init__.py b/homeassistant/components/adguard/__init__.py index f968f524f3d..95dbd0c3532 100644 --- a/homeassistant/components/adguard/__init__.py +++ b/homeassistant/components/adguard/__init__.py @@ -71,7 +71,7 @@ async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool except AdGuardHomeConnectionError as exception: raise ConfigEntryNotReady from exception - if LooseVersion(MIN_ADGUARD_HOME_VERSION) > LooseVersion(version): + if version and LooseVersion(MIN_ADGUARD_HOME_VERSION) > LooseVersion(version): _LOGGER.error( "This integration requires AdGuard Home v0.99.0 or higher to work correctly" ) diff --git a/homeassistant/components/adguard/config_flow.py b/homeassistant/components/adguard/config_flow.py index e2a226eb4ce..ede01706c5d 100644 --- a/homeassistant/components/adguard/config_flow.py +++ b/homeassistant/components/adguard/config_flow.py @@ -84,7 +84,7 @@ class AdGuardHomeFlowHandler(ConfigFlow): errors["base"] = "connection_error" return await self._show_setup_form(errors) - if LooseVersion(MIN_ADGUARD_HOME_VERSION) > LooseVersion(version): + if version and LooseVersion(MIN_ADGUARD_HOME_VERSION) > LooseVersion(version): return self.async_abort( reason="adguard_home_outdated", description_placeholders={ @@ -105,7 +105,7 @@ class AdGuardHomeFlowHandler(ConfigFlow): }, ) - async def async_step_hassio(self, user_input=None): + async def async_step_hassio(self, discovery_info): """Prepare configuration for a Hass.io AdGuard Home add-on. This flow is triggered by the discovery component. @@ -113,14 +113,14 @@ class AdGuardHomeFlowHandler(ConfigFlow): entries = self._async_current_entries() if not entries: - self._hassio_discovery = user_input + self._hassio_discovery = discovery_info return await self.async_step_hassio_confirm() cur_entry = entries[0] if ( - cur_entry.data[CONF_HOST] == user_input[CONF_HOST] - and cur_entry.data[CONF_PORT] == user_input[CONF_PORT] + cur_entry.data[CONF_HOST] == discovery_info[CONF_HOST] + and cur_entry.data[CONF_PORT] == discovery_info[CONF_PORT] ): return self.async_abort(reason="single_instance_allowed") @@ -133,8 +133,8 @@ class AdGuardHomeFlowHandler(ConfigFlow): cur_entry, data={ **cur_entry.data, - CONF_HOST: user_input[CONF_HOST], - CONF_PORT: user_input[CONF_PORT], + CONF_HOST: discovery_info[CONF_HOST], + CONF_PORT: discovery_info[CONF_PORT], }, ) diff --git a/homeassistant/components/agent_dvr/config_flow.py b/homeassistant/components/agent_dvr/config_flow.py index a5c98ade1cb..cc1d6355f3f 100644 --- a/homeassistant/components/agent_dvr/config_flow.py +++ b/homeassistant/components/agent_dvr/config_flow.py @@ -23,13 +23,13 @@ class AgentFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Initialize the Agent config flow.""" self.device_config = {} - async def async_step_user(self, info=None): + async def async_step_user(self, user_input=None): """Handle an Agent config flow.""" errors = {} - if info is not None: - host = info[CONF_HOST] - port = info[CONF_PORT] + if user_input is not None: + host = user_input[CONF_HOST] + port = user_input[CONF_PORT] server_origin = generate_url(host, port) agent_client = Agent(server_origin, async_get_clientsession(self.hass)) @@ -48,8 +48,8 @@ class AgentFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): self._abort_if_unique_id_configured( updates={ - CONF_HOST: info[CONF_HOST], - CONF_PORT: info[CONF_PORT], + CONF_HOST: user_input[CONF_HOST], + CONF_PORT: user_input[CONF_PORT], SERVER_URL: server_origin, } ) diff --git a/homeassistant/components/agent_dvr/translations/es.json b/homeassistant/components/agent_dvr/translations/es.json index 7d071f621ff..b738f71dd75 100644 --- a/homeassistant/components/agent_dvr/translations/es.json +++ b/homeassistant/components/agent_dvr/translations/es.json @@ -4,7 +4,7 @@ "already_configured": "El dispositivo ya est\u00e1 configurado" }, "error": { - "already_in_progress": "La configuraci\u00f3n del flujo para el dispositivo ya est\u00e1 en marcha.", + "already_in_progress": "El flujo de configuraci\u00f3n para el dispositivo ya est\u00e1 en marcha.", "device_unavailable": "El dispositivo no est\u00e1 disponible" }, "step": { diff --git a/homeassistant/components/airly/translations/no.json b/homeassistant/components/airly/translations/no.json index 5d4f6d31785..09e77a311eb 100644 --- a/homeassistant/components/airly/translations/no.json +++ b/homeassistant/components/airly/translations/no.json @@ -10,7 +10,7 @@ "step": { "user": { "data": { - "api_key": "Airly API-n\u00f8kkel", + "api_key": "API-n\u00f8kkel", "latitude": "Breddegrad", "longitude": "Lengdegrad", "name": "Navn p\u00e5 integrasjonen" diff --git a/homeassistant/components/airvisual/translations/no.json b/homeassistant/components/airvisual/translations/no.json index 28cf8c9a5bb..8fcf00a6714 100644 --- a/homeassistant/components/airvisual/translations/no.json +++ b/homeassistant/components/airvisual/translations/no.json @@ -21,7 +21,7 @@ "node_pro": { "data": { "ip_address": "Enhetens IP-adresse / vertsnavn", - "password": "Passord for enhet" + "password": "Passord" }, "description": "Overv\u00e5ke en personlig AirVisual-enhet. Passordet kan hentes fra enhetens brukergrensesnitt.", "title": "Konfigurer en AirVisual Node / Pro" diff --git a/homeassistant/components/airvisual/translations/sk.json b/homeassistant/components/airvisual/translations/sk.json deleted file mode 100644 index e6945904d90..00000000000 --- a/homeassistant/components/airvisual/translations/sk.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "config": { - "step": { - "user": { - "data": { - "latitude": "Zemepisn\u00e1 \u0161\u00edrka", - "longitude": "Zemepisn\u00e1 d\u013a\u017eka" - } - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/alarmdecoder/__init__.py b/homeassistant/components/alarmdecoder/__init__.py index c70bcdcc45c..178f31ee87a 100644 --- a/homeassistant/components/alarmdecoder/__init__.py +++ b/homeassistant/components/alarmdecoder/__init__.py @@ -2,7 +2,7 @@ from datetime import timedelta import logging -from alarmdecoder import AlarmDecoder +from adext import AdExt from alarmdecoder.devices import SerialDevice, SocketDevice, USBDevice from alarmdecoder.util import NoDeviceError import voluptuous as vol @@ -189,13 +189,13 @@ def setup(hass, config): if device_type == "socket": host = device[CONF_HOST] port = device[CONF_DEVICE_PORT] - controller = AlarmDecoder(SocketDevice(interface=(host, port))) + controller = AdExt(SocketDevice(interface=(host, port))) elif device_type == "serial": path = device[CONF_DEVICE_PATH] baud = device[CONF_DEVICE_BAUD] - controller = AlarmDecoder(SerialDevice(interface=path)) + controller = AdExt(SerialDevice(interface=path)) elif device_type == "usb": - AlarmDecoder(USBDevice.find()) + AdExt(USBDevice.find()) return False controller.on_message += handle_message diff --git a/homeassistant/components/alarmdecoder/alarm_control_panel.py b/homeassistant/components/alarmdecoder/alarm_control_panel.py index ac90ea1796f..38b9c5999be 100644 --- a/homeassistant/components/alarmdecoder/alarm_control_panel.py +++ b/homeassistant/components/alarmdecoder/alarm_control_panel.py @@ -16,6 +16,7 @@ from homeassistant.const import ( ATTR_CODE, STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_HOME, + STATE_ALARM_ARMED_NIGHT, STATE_ALARM_DISARMED, STATE_ALARM_TRIGGERED, ) @@ -108,6 +109,8 @@ class AlarmDecoderAlarmPanel(AlarmControlPanelEntity): self._state = STATE_ALARM_TRIGGERED elif message.armed_away: self._state = STATE_ALARM_ARMED_AWAY + elif message.armed_home and (message.entry_delay_off or message.perimeter_only): + self._state = STATE_ALARM_ARMED_NIGHT elif message.armed_home: self._state = STATE_ALARM_ARMED_HOME else: @@ -178,28 +181,27 @@ class AlarmDecoderAlarmPanel(AlarmControlPanelEntity): def alarm_arm_away(self, code=None): """Send arm away command.""" - if code: - if self._auto_bypass: - self.hass.data[DATA_AD].send(f"{code!s}6#") - self.hass.data[DATA_AD].send(f"{code!s}2") - elif not self._code_arm_required: - self.hass.data[DATA_AD].send("#2") + self.hass.data[DATA_AD].arm_away( + code=code, + code_arm_required=self._code_arm_required, + auto_bypass=self._auto_bypass, + ) def alarm_arm_home(self, code=None): """Send arm home command.""" - if code: - if self._auto_bypass: - self.hass.data[DATA_AD].send(f"{code!s}6#") - self.hass.data[DATA_AD].send(f"{code!s}3") - elif not self._code_arm_required: - self.hass.data[DATA_AD].send("#3") + self.hass.data[DATA_AD].arm_home( + code=code, + code_arm_required=self._code_arm_required, + auto_bypass=self._auto_bypass, + ) def alarm_arm_night(self, code=None): """Send arm night command.""" - if code: - self.hass.data[DATA_AD].send(f"{code!s}7") - elif not self._code_arm_required: - self.hass.data[DATA_AD].send("#7") + self.hass.data[DATA_AD].arm_night( + code=code, + code_arm_required=self._code_arm_required, + auto_bypass=self._auto_bypass, + ) def alarm_toggle_chime(self, code=None): """Send toggle chime command.""" diff --git a/homeassistant/components/alarmdecoder/manifest.json b/homeassistant/components/alarmdecoder/manifest.json index 48c5cb824ad..ea2c3fb01c8 100644 --- a/homeassistant/components/alarmdecoder/manifest.json +++ b/homeassistant/components/alarmdecoder/manifest.json @@ -2,6 +2,6 @@ "domain": "alarmdecoder", "name": "AlarmDecoder", "documentation": "https://www.home-assistant.io/integrations/alarmdecoder", - "requirements": ["alarmdecoder==1.13.2"], + "requirements": ["adext==0.3"], "codeowners": ["@ajschmidt8"] } diff --git a/homeassistant/components/alert/__init__.py b/homeassistant/components/alert/__init__.py index f3b15a7af57..d85c13731b2 100644 --- a/homeassistant/components/alert/__init__.py +++ b/homeassistant/components/alert/__init__.py @@ -222,11 +222,6 @@ class Alert(ToggleEntity): return STATE_ON return STATE_IDLE - @property - def hidden(self): - """Hide the alert when it is not firing.""" - return not self._can_ack or not self._firing - async def watched_entity_change(self, entity, from_state, to_state): """Determine if the alert should start or stop.""" _LOGGER.debug("Watched entity (%s) has changed", entity) @@ -310,7 +305,9 @@ class Alert(ToggleEntity): _LOGGER.debug(msg_payload) for target in self._notifiers: - await self.hass.services.async_call(DOMAIN_NOTIFY, target, msg_payload) + await self.hass.services.async_call( + DOMAIN_NOTIFY, target, msg_payload, context=self._context + ) async def async_turn_on(self, **kwargs): """Async Unacknowledge alert.""" diff --git a/homeassistant/components/alexa/__init__.py b/homeassistant/components/alexa/__init__.py index 81b0f670058..7522b7e2d58 100644 --- a/homeassistant/components/alexa/__init__.py +++ b/homeassistant/components/alexa/__init__.py @@ -4,7 +4,6 @@ import logging import voluptuous as vol from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET, CONF_NAME -from homeassistant.core import callback from homeassistant.helpers import config_validation as cv, entityfilter from . import flash_briefings, intent, smart_home_http @@ -17,12 +16,12 @@ from .const import ( CONF_ENTITY_CONFIG, CONF_FILTER, CONF_LOCALE, + CONF_PASSWORD, CONF_SUPPORTED_LOCALES, CONF_TEXT, CONF_TITLE, CONF_UID, DOMAIN, - EVENT_ALEXA_SMART_HOME, ) _LOGGER = logging.getLogger(__name__) @@ -56,6 +55,7 @@ CONFIG_SCHEMA = vol.Schema( { DOMAIN: { CONF_FLASH_BRIEFINGS: { + vol.Required(CONF_PASSWORD): cv.string, cv.string: vol.All( cv.ensure_list, [ @@ -67,7 +67,7 @@ CONFIG_SCHEMA = vol.Schema( vol.Optional(CONF_DISPLAY_URL): cv.template, } ], - ) + ), }, # vol.Optional here would mean we couldn't distinguish between an empty # smart_home: and none at all. @@ -80,28 +80,6 @@ CONFIG_SCHEMA = vol.Schema( async def async_setup(hass, config): """Activate the Alexa component.""" - - @callback - def async_describe_logbook_event(event): - """Describe a logbook event.""" - data = event.data - entity_id = data["request"].get("entity_id") - - if entity_id: - state = hass.states.get(entity_id) - name = state.name if state else entity_id - message = f"send command {data['request']['namespace']}/{data['request']['name']} for {name}" - else: - message = ( - f"send command {data['request']['namespace']}/{data['request']['name']}" - ) - - return {"name": "Amazon Alexa", "message": message, "entity_id": entity_id} - - hass.components.logbook.async_describe_event( - DOMAIN, EVENT_ALEXA_SMART_HOME, async_describe_logbook_event - ) - if DOMAIN not in config: return True diff --git a/homeassistant/components/alexa/const.py b/homeassistant/components/alexa/const.py index 50e3edb475c..a5a1cde2e15 100644 --- a/homeassistant/components/alexa/const.py +++ b/homeassistant/components/alexa/const.py @@ -19,6 +19,7 @@ CONF_FILTER = "filter" CONF_ENTITY_CONFIG = "entity_config" CONF_ENDPOINT = "endpoint" CONF_LOCALE = "locale" +CONF_PASSWORD = "password" ATTR_UID = "uid" ATTR_UPDATE_DATE = "updateDate" @@ -39,6 +40,7 @@ API_HEADER = "header" API_PAYLOAD = "payload" API_SCOPE = "scope" API_CHANGE = "change" +API_PASSWORD = "password" CONF_DESCRIPTION = "description" CONF_DISPLAY_CATEGORIES = "display_categories" diff --git a/homeassistant/components/alexa/flash_briefings.py b/homeassistant/components/alexa/flash_briefings.py index 1205fd58091..ed3da1d10be 100644 --- a/homeassistant/components/alexa/flash_briefings.py +++ b/homeassistant/components/alexa/flash_briefings.py @@ -1,15 +1,17 @@ """Support for Alexa skill service end point.""" import copy +import hmac import logging import uuid from homeassistant.components import http -from homeassistant.const import HTTP_NOT_FOUND +from homeassistant.const import HTTP_NOT_FOUND, HTTP_UNAUTHORIZED from homeassistant.core import callback from homeassistant.helpers import template import homeassistant.util.dt as dt_util from .const import ( + API_PASSWORD, ATTR_MAIN_TEXT, ATTR_REDIRECTION_URL, ATTR_STREAM_URL, @@ -18,6 +20,7 @@ from .const import ( ATTR_UPDATE_DATE, CONF_AUDIO, CONF_DISPLAY_URL, + CONF_PASSWORD, CONF_TEXT, CONF_TITLE, CONF_UID, @@ -39,6 +42,7 @@ class AlexaFlashBriefingView(http.HomeAssistantView): """Handle Alexa Flash Briefing skill requests.""" url = FLASH_BRIEFINGS_API_ENDPOINT + requires_auth = False name = "api:alexa:flash_briefings" def __init__(self, hass, flash_briefings): @@ -52,7 +56,20 @@ class AlexaFlashBriefingView(http.HomeAssistantView): """Handle Alexa Flash Briefing request.""" _LOGGER.debug("Received Alexa flash briefing request for: %s", briefing_id) - if self.flash_briefings.get(briefing_id) is None: + if request.query.get(API_PASSWORD) is None: + err = "No password provided for Alexa flash briefing: %s" + _LOGGER.error(err, briefing_id) + return b"", HTTP_UNAUTHORIZED + + if not hmac.compare_digest( + request.query[API_PASSWORD].encode("utf-8"), + self.flash_briefings[CONF_PASSWORD].encode("utf-8"), + ): + err = "Wrong password for Alexa flash briefing: %s" + _LOGGER.error(err, briefing_id) + return b"", HTTP_UNAUTHORIZED + + if not isinstance(self.flash_briefings.get(briefing_id), list): err = "No configured Alexa flash briefing was found for: %s" _LOGGER.error(err, briefing_id) return b"", HTTP_NOT_FOUND diff --git a/homeassistant/components/alexa/logbook.py b/homeassistant/components/alexa/logbook.py new file mode 100644 index 00000000000..efc188a7f8b --- /dev/null +++ b/homeassistant/components/alexa/logbook.py @@ -0,0 +1,28 @@ +"""Describe logbook events.""" +from homeassistant.core import callback + +from .const import DOMAIN, EVENT_ALEXA_SMART_HOME + + +@callback +def async_describe_events(hass, async_describe_event): + """Describe logbook events.""" + + @callback + def async_describe_logbook_event(event): + """Describe a logbook event.""" + data = event.data + entity_id = data["request"].get("entity_id") + + if entity_id: + state = hass.states.get(entity_id) + name = state.name if state else entity_id + message = f"send command {data['request']['namespace']}/{data['request']['name']} for {name}" + else: + message = ( + f"send command {data['request']['namespace']}/{data['request']['name']}" + ) + + return {"name": "Amazon Alexa", "message": message, "entity_id": entity_id} + + async_describe_event(DOMAIN, EVENT_ALEXA_SMART_HOME, async_describe_logbook_event) diff --git a/homeassistant/components/alexa/manifest.json b/homeassistant/components/alexa/manifest.json index 6144ccc6870..1ed91866cdc 100644 --- a/homeassistant/components/alexa/manifest.json +++ b/homeassistant/components/alexa/manifest.json @@ -2,7 +2,14 @@ "domain": "alexa", "name": "Amazon Alexa", "documentation": "https://www.home-assistant.io/integrations/alexa", - "dependencies": ["http"], - "after_dependencies": ["logbook", "camera"], - "codeowners": ["@home-assistant/cloud", "@ochlocracy"] + "dependencies": [ + "http" + ], + "after_dependencies": [ + "camera" + ], + "codeowners": [ + "@home-assistant/cloud", + "@ochlocracy" + ] } diff --git a/homeassistant/components/almond/config_flow.py b/homeassistant/components/almond/config_flow.py index b1eb506270b..73dc85c5fd0 100644 --- a/homeassistant/components/almond/config_flow.py +++ b/homeassistant/components/almond/config_flow.py @@ -94,12 +94,12 @@ class AlmondFlowHandler(config_entry_oauth2_flow.AbstractOAuth2FlowHandler): data={"type": TYPE_LOCAL, "host": user_input["host"]}, ) - async def async_step_hassio(self, user_input=None): + async def async_step_hassio(self, discovery_info): """Receive a Hass.io discovery.""" if self._async_current_entries(): return self.async_abort(reason="already_setup") - self.hassio_discovery = user_input + self.hassio_discovery = discovery_info return await self.async_step_hassio_confirm() diff --git a/homeassistant/components/almond/translations/nn.json b/homeassistant/components/almond/translations/nn.json deleted file mode 100644 index adee9514928..00000000000 --- a/homeassistant/components/almond/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Almond" -} \ No newline at end of file diff --git a/homeassistant/components/ambiclimate/translations/nn.json b/homeassistant/components/ambiclimate/translations/nn.json deleted file mode 100644 index 31e478697d7..00000000000 --- a/homeassistant/components/ambiclimate/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Ambiclimate" -} \ No newline at end of file diff --git a/homeassistant/components/ambient_station/__init__.py b/homeassistant/components/ambient_station/__init__.py index 8ee37f4503e..120b83d7923 100644 --- a/homeassistant/components/ambient_station/__init__.py +++ b/homeassistant/components/ambient_station/__init__.py @@ -10,6 +10,7 @@ from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import ( ATTR_LOCATION, ATTR_NAME, + CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, CONCENTRATION_PARTS_PER_MILLION, CONF_API_KEY, DEGREE, @@ -126,6 +127,8 @@ TYPE_TEMPF = "tempf" TYPE_TEMPINF = "tempinf" TYPE_TOTALRAININ = "totalrainin" TYPE_UV = "uv" +TYPE_PM25 = "pm25" +TYPE_PM25_24H = "pm25_24h" TYPE_WEEKLYRAININ = "weeklyrainin" TYPE_WINDDIR = "winddir" TYPE_WINDDIR_AVG10M = "winddir_avg10m" @@ -218,6 +221,13 @@ SENSOR_TYPES = { TYPE_TEMPINF: ("Inside Temp", TEMP_FAHRENHEIT, TYPE_SENSOR, "temperature"), TYPE_TOTALRAININ: ("Lifetime Rain", "in", TYPE_SENSOR, None), TYPE_UV: ("uv", "Index", TYPE_SENSOR, None), + TYPE_PM25: ("PM25", CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, TYPE_SENSOR, None), + TYPE_PM25_24H: ( + "PM25 24h Avg", + CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + TYPE_SENSOR, + None, + ), TYPE_WEEKLYRAININ: ("Weekly Rain", "in", TYPE_SENSOR, None), TYPE_WINDDIR: ("Wind Dir", DEGREE, TYPE_SENSOR, None), TYPE_WINDDIR_AVG10M: ("Wind Dir Avg 10m", DEGREE, TYPE_SENSOR, None), diff --git a/homeassistant/components/ambient_station/translations/nn.json b/homeassistant/components/ambient_station/translations/nn.json deleted file mode 100644 index 1774198088a..00000000000 --- a/homeassistant/components/ambient_station/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Ambient PWS" -} \ No newline at end of file diff --git a/homeassistant/components/amcrest/camera.py b/homeassistant/components/amcrest/camera.py index 4b3640c1543..5ac6acb2071 100644 --- a/homeassistant/components/amcrest/camera.py +++ b/homeassistant/components/amcrest/camera.py @@ -130,6 +130,10 @@ class CannotSnapshot(Exception): """Conditions are not valid for taking a snapshot.""" +class AmcrestCommandFailed(Exception): + """Amcrest camera command did not work.""" + + class AmcrestCam(Camera): """An implementation of an Amcrest IP camera.""" @@ -367,12 +371,12 @@ class AmcrestCam(Camera): self._model = resp.split("=")[-1] else: self._model = "unknown" - self.is_streaming = self._api.video_enabled - self._is_recording = self._api.record_mode == "Manual" - self._motion_detection_enabled = self._api.is_motion_detector_on() - self._audio_enabled = self._api.audio_enabled - self._motion_recording_enabled = self._api.is_record_on_motion_detection() - self._color_bw = _CBW[self._api.day_night_color] + self.is_streaming = self._get_video() + self._is_recording = self._get_recording() + self._motion_detection_enabled = self._get_motion_detection() + self._audio_enabled = self._get_audio() + self._motion_recording_enabled = self._get_motion_recording() + self._color_bw = self._get_color_mode() self._rtsp_url = self._api.rtsp_url(typeno=self._resolution) except AmcrestError as error: log_update_error(_LOGGER, "get", self.name, "camera attributes", error) @@ -384,11 +388,11 @@ class AmcrestCam(Camera): def turn_off(self): """Turn off camera.""" - self._enable_video_stream(False) + self._enable_video(False) def turn_on(self): """Turn on camera.""" - self._enable_video_stream(True) + self._enable_video(True) def enable_motion_detection(self): """Enable motion detection in the camera.""" @@ -465,28 +469,53 @@ class AmcrestCam(Camera): # Methods to send commands to Amcrest camera and handle errors - def _enable_video_stream(self, enable): + def _change_setting(self, value, attr, description, action="set"): + func = description.replace(" ", "_") + description = f"camera {description} to {value}" + tries = 3 + while True: + try: + getattr(self, f"_set_{func}")(value) + new_value = getattr(self, f"_get_{func}")() + if new_value != value: + raise AmcrestCommandFailed + except (AmcrestError, AmcrestCommandFailed) as error: + if tries == 1: + log_update_error(_LOGGER, action, self.name, description, error) + return + log_update_error( + _LOGGER, action, self.name, description, error, logging.DEBUG + ) + else: + if attr: + setattr(self, attr, new_value) + self.schedule_update_ha_state() + return + tries -= 1 + + def _get_video(self): + return self._api.video_enabled + + def _set_video(self, enable): + self._api.video_enabled = enable + + def _enable_video(self, enable): """Enable or disable camera video stream.""" # Given the way the camera's state is determined by # is_streaming and is_recording, we can't leave # recording on if video stream is being turned off. if self.is_recording and not enable: self._enable_recording(False) - try: - self._api.video_enabled = enable - except AmcrestError as error: - log_update_error( - _LOGGER, - "enable" if enable else "disable", - self.name, - "camera video stream", - error, - ) - else: - self.is_streaming = enable - self.schedule_update_ha_state() + self._change_setting(enable, "is_streaming", "video") if self._control_light: - self._enable_light(self._audio_enabled or self.is_streaming) + self._change_light() + + def _get_recording(self): + return self._api.record_mode == "Manual" + + def _set_recording(self, enable): + rec_mode = {"Automatic": 0, "Manual": 1} + self._api.record_mode = rec_mode["Manual" if enable else "Automatic"] def _enable_recording(self, enable): """Turn recording on or off.""" @@ -494,86 +523,56 @@ class AmcrestCam(Camera): # is_streaming and is_recording, we can't leave # video stream off if recording is being turned on. if not self.is_streaming and enable: - self._enable_video_stream(True) - rec_mode = {"Automatic": 0, "Manual": 1} - try: - self._api.record_mode = rec_mode["Manual" if enable else "Automatic"] - except AmcrestError as error: - log_update_error( - _LOGGER, - "enable" if enable else "disable", - self.name, - "camera recording", - error, - ) - else: - self._is_recording = enable - self.schedule_update_ha_state() + self._enable_video(True) + self._change_setting(enable, "_is_recording", "recording") + + def _get_motion_detection(self): + return self._api.is_motion_detector_on() + + def _set_motion_detection(self, enable): + self._api.motion_detection = str(enable).lower() def _enable_motion_detection(self, enable): """Enable or disable motion detection.""" - try: - self._api.motion_detection = str(enable).lower() - except AmcrestError as error: - log_update_error( - _LOGGER, - "enable" if enable else "disable", - self.name, - "camera motion detection", - error, - ) - else: - self._motion_detection_enabled = enable - self.schedule_update_ha_state() + self._change_setting(enable, "_motion_detection_enabled", "motion detection") + + def _get_audio(self): + return self._api.audio_enabled + + def _set_audio(self, enable): + self._api.audio_enabled = enable def _enable_audio(self, enable): """Enable or disable audio stream.""" - try: - self._api.audio_enabled = enable - except AmcrestError as error: - log_update_error( - _LOGGER, - "enable" if enable else "disable", - self.name, - "camera audio stream", - error, - ) - else: - self._audio_enabled = enable - self.schedule_update_ha_state() + self._change_setting(enable, "_audio_enabled", "audio") if self._control_light: - self._enable_light(self._audio_enabled or self.is_streaming) + self._change_light() - def _enable_light(self, enable): + def _get_indicator_light(self): + return "true" in self._api.command( + "configManager.cgi?action=getConfig&name=LightGlobal" + ).content.decode("utf-8") + + def _set_indicator_light(self, enable): + self._api.command( + f"configManager.cgi?action=setConfig&LightGlobal[0].Enable={str(enable).lower()}" + ) + + def _change_light(self): """Enable or disable indicator light.""" - try: - self._api.command( - f"configManager.cgi?action=setConfig&LightGlobal[0].Enable={str(enable).lower()}" - ) - except AmcrestError as error: - log_update_error( - _LOGGER, - "enable" if enable else "disable", - self.name, - "indicator light", - error, - ) + self._change_setting( + self._audio_enabled or self.is_streaming, None, "indicator light" + ) + + def _get_motion_recording(self): + return self._api.is_record_on_motion_detection() + + def _set_motion_recording(self, enable): + self._api.motion_recording = str(enable).lower() def _enable_motion_recording(self, enable): """Enable or disable motion recording.""" - try: - self._api.motion_recording = str(enable).lower() - except AmcrestError as error: - log_update_error( - _LOGGER, - "enable" if enable else "disable", - self.name, - "camera motion recording", - error, - ) - else: - self._motion_recording_enabled = enable - self.schedule_update_ha_state() + self._change_setting(enable, "_motion_recording_enabled", "motion recording") def _goto_preset(self, preset): """Move camera position and zoom to preset.""" @@ -584,17 +583,15 @@ class AmcrestCam(Camera): _LOGGER, "move", self.name, f"camera to preset {preset}", error ) + def _get_color_mode(self): + return _CBW[self._api.day_night_color] + + def _set_color_mode(self, cbw): + self._api.day_night_color = _CBW.index(cbw) + def _set_color_bw(self, cbw): """Set camera color mode.""" - try: - self._api.day_night_color = _CBW.index(cbw) - except AmcrestError as error: - log_update_error( - _LOGGER, "set", self.name, f"camera color mode to {cbw}", error - ) - else: - self._color_bw = cbw - self.schedule_update_ha_state() + self._change_setting(cbw, "_color_bw", "color mode") def _start_tour(self, start): """Start camera tour.""" diff --git a/homeassistant/components/amcrest/helpers.py b/homeassistant/components/amcrest/helpers.py index 884d39abd70..ef0ae2db15b 100644 --- a/homeassistant/components/amcrest/helpers.py +++ b/homeassistant/components/amcrest/helpers.py @@ -1,4 +1,6 @@ """Helpers for amcrest component.""" +import logging + from .const import DOMAIN @@ -7,9 +9,10 @@ def service_signal(service, *args): return "_".join([DOMAIN, service, *args]) -def log_update_error(logger, action, name, entity_type, error): +def log_update_error(logger, action, name, entity_type, error, level=logging.ERROR): """Log an update error.""" - logger.error( + logger.log( + level, "Could not %s %s %s due to error: %s", action, name, diff --git a/homeassistant/components/androidtv/manifest.json b/homeassistant/components/androidtv/manifest.json index fb74ab9ab2e..dc682b780fb 100644 --- a/homeassistant/components/androidtv/manifest.json +++ b/homeassistant/components/androidtv/manifest.json @@ -4,7 +4,7 @@ "documentation": "https://www.home-assistant.io/integrations/androidtv", "requirements": [ "adb-shell==0.1.3", - "androidtv==0.0.41", + "androidtv==0.0.43", "pure-python-adb==0.2.2.dev0" ], "codeowners": ["@JeffLIrion"] diff --git a/homeassistant/components/arcam_fmj/__init__.py b/homeassistant/components/arcam_fmj/__init__.py index 008266e5a45..0875e094352 100644 --- a/homeassistant/components/arcam_fmj/__init__.py +++ b/homeassistant/components/arcam_fmj/__init__.py @@ -5,27 +5,15 @@ import logging from arcam.fmj import ConnectionFailed from arcam.fmj.client import Client import async_timeout -import voluptuous as vol from homeassistant import config_entries -from homeassistant.const import ( - CONF_HOST, - CONF_NAME, - CONF_PORT, - CONF_SCAN_INTERVAL, - CONF_ZONE, - EVENT_HOMEASSISTANT_STOP, - SERVICE_TURN_ON, -) +from homeassistant.const import CONF_HOST, CONF_PORT, EVENT_HOMEASSISTANT_STOP import homeassistant.helpers.config_validation as cv from homeassistant.helpers.typing import ConfigType, HomeAssistantType from .const import ( - DEFAULT_NAME, - DEFAULT_PORT, DEFAULT_SCAN_INTERVAL, DOMAIN, - DOMAIN_DATA_CONFIG, DOMAIN_DATA_ENTRIES, DOMAIN_DATA_TASKS, SIGNAL_CLIENT_DATA, @@ -35,44 +23,7 @@ from .const import ( _LOGGER = logging.getLogger(__name__) - -def _optional_zone(value): - if value: - return ZONE_SCHEMA(value) - return ZONE_SCHEMA({}) - - -def _zone_name_validator(config): - for zone, zone_config in config[CONF_ZONE].items(): - if CONF_NAME not in zone_config: - zone_config[ - CONF_NAME - ] = f"{DEFAULT_NAME} ({config[CONF_HOST]}:{config[CONF_PORT]}) - {zone}" - return config - - -ZONE_SCHEMA = vol.Schema( - { - vol.Optional(CONF_NAME): cv.string, - vol.Optional(SERVICE_TURN_ON): cv.SERVICE_SCHEMA, - } -) - -DEVICE_SCHEMA = vol.Schema( - vol.All( - { - vol.Required(CONF_HOST): cv.string, - vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.positive_int, - vol.Optional(CONF_ZONE, default={1: _optional_zone(None)}): { - vol.In([1, 2]): _optional_zone - }, - vol.Optional( - CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL - ): cv.positive_int, - }, - _zone_name_validator, - ) -) +CONFIG_SCHEMA = cv.deprecated(DOMAIN, invalidation_version="0.115") async def _await_cancel(task): @@ -83,27 +34,10 @@ async def _await_cancel(task): pass -CONFIG_SCHEMA = vol.Schema( - {DOMAIN: vol.All(cv.ensure_list, [DEVICE_SCHEMA])}, extra=vol.ALLOW_EXTRA -) - - async def async_setup(hass: HomeAssistantType, config: ConfigType): """Set up the component.""" hass.data[DOMAIN_DATA_ENTRIES] = {} hass.data[DOMAIN_DATA_TASKS] = {} - hass.data[DOMAIN_DATA_CONFIG] = {} - - for device in config[DOMAIN]: - hass.data[DOMAIN_DATA_CONFIG][(device[CONF_HOST], device[CONF_PORT])] = device - - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={CONF_HOST: device[CONF_HOST], CONF_PORT: device[CONF_PORT]}, - ) - ) async def _stop(_): asyncio.gather( @@ -116,21 +50,12 @@ async def async_setup(hass: HomeAssistantType, config: ConfigType): async def async_setup_entry(hass: HomeAssistantType, entry: config_entries.ConfigEntry): - """Set up an access point from a config entry.""" + """Set up config entry.""" + entries = hass.data[DOMAIN_DATA_ENTRIES] + tasks = hass.data[DOMAIN_DATA_TASKS] + client = Client(entry.data[CONF_HOST], entry.data[CONF_PORT]) - - config = hass.data[DOMAIN_DATA_CONFIG].get( - (entry.data[CONF_HOST], entry.data[CONF_PORT]), - DEVICE_SCHEMA( - {CONF_HOST: entry.data[CONF_HOST], CONF_PORT: entry.data[CONF_PORT]} - ), - ) - tasks = hass.data.setdefault(DOMAIN_DATA_TASKS, {}) - - hass.data[DOMAIN_DATA_ENTRIES][entry.entry_id] = { - "client": client, - "config": config, - } + entries[entry.entry_id] = client task = asyncio.create_task(_run_client(hass, client, DEFAULT_SCAN_INTERVAL)) tasks[entry.entry_id] = task diff --git a/homeassistant/components/arcam_fmj/config_flow.py b/homeassistant/components/arcam_fmj/config_flow.py index a92a2ec52a6..d6cf1c02d3b 100644 --- a/homeassistant/components/arcam_fmj/config_flow.py +++ b/homeassistant/components/arcam_fmj/config_flow.py @@ -1,27 +1,102 @@ """Config flow to configure the Arcam FMJ component.""" -from operator import itemgetter +import logging +from urllib.parse import urlparse + +from arcam.fmj.client import Client, ConnectionFailed +from arcam.fmj.utils import get_uniqueid_from_host, get_uniqueid_from_udn +import voluptuous as vol from homeassistant import config_entries +from homeassistant.components.ssdp import ATTR_SSDP_LOCATION, ATTR_UPNP_UDN from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import DOMAIN +from .const import DEFAULT_NAME, DEFAULT_PORT, DOMAIN, DOMAIN_DATA_ENTRIES -_GETKEY = itemgetter(CONF_HOST, CONF_PORT) +_LOGGER = logging.getLogger(__name__) + + +def get_entry_client(hass, entry): + """Retrieve client associated with a config entry.""" + return hass.data[DOMAIN_DATA_ENTRIES][entry.entry_id] @config_entries.HANDLERS.register(DOMAIN) class ArcamFmjFlowHandler(config_entries.ConfigFlow): - """Handle a SimpliSafe config flow.""" + """Handle config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL - async def async_step_import(self, import_config): - """Import a config entry from configuration.yaml.""" - entries = self.hass.config_entries.async_entries(DOMAIN) - import_key = _GETKEY(import_config) - for entry in entries: - if _GETKEY(entry.data) == import_key: - return self.async_abort(reason="already_setup") + async def _async_set_unique_id_and_update(self, host, port, uuid): + await self.async_set_unique_id(uuid) + self._abort_if_unique_id_configured({CONF_HOST: host, CONF_PORT: port}) - return self.async_create_entry(title="Arcam FMJ", data=import_config) + async def _async_check_and_create(self, host, port): + client = Client(host, port) + try: + await client.start() + except ConnectionFailed: + return self.async_abort(reason="unable_to_connect") + finally: + await client.stop() + + return self.async_create_entry( + title=f"{DEFAULT_NAME} ({host})", data={CONF_HOST: host, CONF_PORT: port}, + ) + + async def async_step_user(self, user_input=None): + """Handle a discovered device.""" + errors = {} + + if user_input is not None: + uuid = await get_uniqueid_from_host( + async_get_clientsession(self.hass), user_input[CONF_HOST] + ) + if uuid: + await self._async_set_unique_id_and_update( + user_input[CONF_HOST], user_input[CONF_PORT], uuid + ) + + return await self._async_check_and_create( + user_input[CONF_HOST], user_input[CONF_PORT] + ) + + fields = { + vol.Required(CONF_HOST): str, + vol.Required(CONF_PORT, default=DEFAULT_PORT): int, + } + + return self.async_show_form( + step_id="user", data_schema=vol.Schema(fields), errors=errors + ) + + async def async_step_confirm(self, user_input=None): + """Handle user-confirmation of discovered node.""" + context = self.context # pylint: disable=no-member + placeholders = { + "host": context[CONF_HOST], + } + context["title_placeholders"] = placeholders + + if user_input is not None: + return await self._async_check_and_create( + context[CONF_HOST], context[CONF_PORT] + ) + + return self.async_show_form( + step_id="confirm", description_placeholders=placeholders + ) + + async def async_step_ssdp(self, discovery_info): + """Handle a discovered device.""" + host = urlparse(discovery_info[ATTR_SSDP_LOCATION]).hostname + port = DEFAULT_PORT + uuid = get_uniqueid_from_udn(discovery_info[ATTR_UPNP_UDN]) + + await self._async_set_unique_id_and_update(host, port, uuid) + + context = self.context # pylint: disable=no-member + context[CONF_HOST] = host + context[CONF_PORT] = DEFAULT_PORT + return await self.async_step_confirm() diff --git a/homeassistant/components/arcam_fmj/const.py b/homeassistant/components/arcam_fmj/const.py index 180abf2c960..9f837c94bcd 100644 --- a/homeassistant/components/arcam_fmj/const.py +++ b/homeassistant/components/arcam_fmj/const.py @@ -13,4 +13,3 @@ DEFAULT_SCAN_INTERVAL = 5 DOMAIN_DATA_ENTRIES = f"{DOMAIN}.entries" DOMAIN_DATA_TASKS = f"{DOMAIN}.tasks" -DOMAIN_DATA_CONFIG = f"{DOMAIN}.config" diff --git a/homeassistant/components/arcam_fmj/manifest.json b/homeassistant/components/arcam_fmj/manifest.json index ff89641667a..053c0372d25 100644 --- a/homeassistant/components/arcam_fmj/manifest.json +++ b/homeassistant/components/arcam_fmj/manifest.json @@ -1,8 +1,14 @@ { "domain": "arcam_fmj", "name": "Arcam FMJ Receivers", - "config_flow": false, + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/arcam_fmj", - "requirements": ["arcam-fmj==0.4.6"], + "requirements": ["arcam-fmj==0.5.1"], + "ssdp": [ + { + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1", + "manufacturer": "ARCAM" + } + ], "codeowners": ["@elupus"] } diff --git a/homeassistant/components/arcam_fmj/media_player.py b/homeassistant/components/arcam_fmj/media_player.py index 27e1497a32d..0ead1f16b94 100644 --- a/homeassistant/components/arcam_fmj/media_player.py +++ b/homeassistant/components/arcam_fmj/media_player.py @@ -1,6 +1,5 @@ """Arcam media player.""" import logging -from typing import Optional from arcam.fmj import DecodeMode2CH, DecodeModeMCH, IncomingAudioFormat, SourceCodes from arcam.fmj.state import State @@ -17,21 +16,13 @@ from homeassistant.components.media_player.const import ( SUPPORT_VOLUME_SET, SUPPORT_VOLUME_STEP, ) -from homeassistant.const import ( - ATTR_ENTITY_ID, - CONF_NAME, - CONF_ZONE, - SERVICE_TURN_ON, - STATE_OFF, - STATE_ON, -) +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON from homeassistant.core import callback -from homeassistant.helpers.service import async_call_from_config -from homeassistant.helpers.typing import ConfigType, HomeAssistantType +from homeassistant.helpers.typing import HomeAssistantType +from .config_flow import get_entry_client from .const import ( DOMAIN, - DOMAIN_DATA_ENTRIES, EVENT_TURN_ON, SIGNAL_CLIENT_DATA, SIGNAL_CLIENT_STARTED, @@ -47,19 +38,17 @@ async def async_setup_entry( async_add_entities, ): """Set up the configuration entry.""" - data = hass.data[DOMAIN_DATA_ENTRIES][config_entry.entry_id] - client = data["client"] - config = data["config"] + + client = get_entry_client(hass, config_entry) async_add_entities( [ ArcamFmj( + config_entry.title, State(client, zone), config_entry.unique_id or config_entry.entry_id, - zone_config[CONF_NAME], - zone_config.get(SERVICE_TURN_ON), ) - for zone, zone_config in config[CONF_ZONE].items() + for zone in [1, 2] ], True, ) @@ -71,13 +60,13 @@ class ArcamFmj(MediaPlayerEntity): """Representation of a media device.""" def __init__( - self, state: State, uuid: str, name: str, turn_on: Optional[ConfigType] + self, device_name, state: State, uuid: str, ): """Initialize device.""" self._state = state + self._device_name = device_name + self._name = f"{device_name} - Zone: {state.zn}" self._uuid = uuid - self._name = name - self._turn_on = turn_on self._support = ( SUPPORT_SELECT_SOURCE | SUPPORT_VOLUME_SET @@ -102,6 +91,11 @@ class ArcamFmj(MediaPlayerEntity): ) ) + @property + def entity_registry_enabled_default(self) -> bool: + """Return if the entity should be enabled when first added to the entity registry.""" + return self._state.zn == 1 + @property def unique_id(self): """Return unique identifier if known.""" @@ -111,8 +105,12 @@ class ArcamFmj(MediaPlayerEntity): def device_info(self): """Return a device description for device registry.""" return { - "identifiers": {(DOMAIN, self._state.client.host, self._state.client.port)}, - "model": "FMJ", + "name": self._device_name, + "identifiers": { + (DOMAIN, self._uuid), + (DOMAIN, self._state.client.host, self._state.client.port), + }, + "model": "Arcam FMJ AVR", "manufacturer": "Arcam", } @@ -229,15 +227,6 @@ class ArcamFmj(MediaPlayerEntity): if self._state.get_power() is not None: _LOGGER.debug("Turning on device using connection") await self._state.set_power(True) - elif self._turn_on: - _LOGGER.debug("Turning on device using service call") - await async_call_from_config( - self.hass, - self._turn_on, - variables=None, - blocking=True, - validate_config=False, - ) else: _LOGGER.debug("Firing event to turn on device") self.hass.bus.async_fire(EVENT_TURN_ON, {ATTR_ENTITY_ID: self.entity_id}) diff --git a/homeassistant/components/arcam_fmj/strings.json b/homeassistant/components/arcam_fmj/strings.json index 6f60c9e2471..67aaf7a11cb 100644 --- a/homeassistant/components/arcam_fmj/strings.json +++ b/homeassistant/components/arcam_fmj/strings.json @@ -1,7 +1,28 @@ { + "config": { + "abort": { + "already_configured": "Device was already setup.", + "already_in_progress": "Config flow for device is already in progress.", + "unable_to_connect": "Unable to connect to device." + }, + "error": {}, + "flow_title": "Arcam FMJ on {host}", + "step": { + "confirm": { + "description": "Do you want to add Arcam FMJ on `{host}` to Home Assistant?" + }, + "user": { + "data": { + "host": "[%key:common::config_flow::data::host%]", + "port": "[%key:common::config_flow::data::port%]" + }, + "description": "Please enter the host name or IP address of device." + } + } + }, "device_automation": { "trigger_type": { "turn_on": "{entity_name} was requested to turn on" } } -} \ No newline at end of file +} diff --git a/homeassistant/components/arcam_fmj/translations/bg.json b/homeassistant/components/arcam_fmj/translations/bg.json deleted file mode 100644 index b78b8cbaa7b..00000000000 --- a/homeassistant/components/arcam_fmj/translations/bg.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Arcam FMJ" -} \ No newline at end of file diff --git a/homeassistant/components/arcam_fmj/translations/da.json b/homeassistant/components/arcam_fmj/translations/da.json deleted file mode 100644 index b78b8cbaa7b..00000000000 --- a/homeassistant/components/arcam_fmj/translations/da.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Arcam FMJ" -} \ No newline at end of file diff --git a/homeassistant/components/arcam_fmj/translations/es-419.json b/homeassistant/components/arcam_fmj/translations/es-419.json deleted file mode 100644 index b78b8cbaa7b..00000000000 --- a/homeassistant/components/arcam_fmj/translations/es-419.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Arcam FMJ" -} \ No newline at end of file diff --git a/homeassistant/components/arcam_fmj/translations/es.json b/homeassistant/components/arcam_fmj/translations/es.json index 2766e90cbb5..3965fba98d9 100644 --- a/homeassistant/components/arcam_fmj/translations/es.json +++ b/homeassistant/components/arcam_fmj/translations/es.json @@ -2,7 +2,7 @@ "config": { "abort": { "already_configured": "El dispositivo ya est\u00e1 configurado.", - "already_in_progress": "La configuraci\u00f3n del flujo para el dispositivo ya est\u00e1 en marcha.", + "already_in_progress": "El flujo de configuraci\u00f3n para el dispositivo ya est\u00e1 en marcha.", "unable_to_connect": "No se puede conectar con el dispositivo." }, "flow_title": "Arcam FMJ en {host}", diff --git a/homeassistant/components/arcam_fmj/translations/fr.json b/homeassistant/components/arcam_fmj/translations/fr.json index 99c938108e1..763d76a8d7d 100644 --- a/homeassistant/components/arcam_fmj/translations/fr.json +++ b/homeassistant/components/arcam_fmj/translations/fr.json @@ -1,4 +1,24 @@ { + "config": { + "abort": { + "already_configured": "L'appareil \u00e9tait d\u00e9j\u00e0 configur\u00e9.", + "already_in_progress": "Le flux de configuration de l'appareil est d\u00e9j\u00e0 en cours.", + "unable_to_connect": "Impossible de se connecter au p\u00e9riph\u00e9rique." + }, + "error": { + "one": "Vide", + "other": "Vide" + }, + "step": { + "user": { + "data": { + "host": "H\u00f4te", + "port": "Port" + }, + "description": "Veuillez saisir le nom d\u2019h\u00f4te ou l\u2019adresse IP du p\u00e9riph\u00e9rique." + } + } + }, "device_automation": { "trigger_type": { "turn_on": "Il a \u00e9t\u00e9 demand\u00e9 \u00e0 {nom_de_l'entit\u00e9} de s'allumer" diff --git a/homeassistant/components/arcam_fmj/translations/nn.json b/homeassistant/components/arcam_fmj/translations/nn.json deleted file mode 100644 index b78b8cbaa7b..00000000000 --- a/homeassistant/components/arcam_fmj/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Arcam FMJ" -} \ No newline at end of file diff --git a/homeassistant/components/arcam_fmj/translations/pl.json b/homeassistant/components/arcam_fmj/translations/pl.json index cad2b4adb25..6a2c18cbd44 100644 --- a/homeassistant/components/arcam_fmj/translations/pl.json +++ b/homeassistant/components/arcam_fmj/translations/pl.json @@ -1,4 +1,14 @@ { + "config": { + "step": { + "user": { + "data": { + "host": "Nazwa hosta lub adres IP", + "port": "Port" + } + } + } + }, "device_automation": { "trigger_type": { "turn_on": "{entity_name} zostanie poproszony o w\u0142\u0105czenie" diff --git a/homeassistant/components/arcam_fmj/translations/sl.json b/homeassistant/components/arcam_fmj/translations/sl.json deleted file mode 100644 index b78b8cbaa7b..00000000000 --- a/homeassistant/components/arcam_fmj/translations/sl.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Arcam FMJ" -} \ No newline at end of file diff --git a/homeassistant/components/arcam_fmj/translations/sv.json b/homeassistant/components/arcam_fmj/translations/sv.json deleted file mode 100644 index b78b8cbaa7b..00000000000 --- a/homeassistant/components/arcam_fmj/translations/sv.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Arcam FMJ" -} \ No newline at end of file diff --git a/homeassistant/components/asuswrt/device_tracker.py b/homeassistant/components/asuswrt/device_tracker.py index 5e3297da8ff..bb11436b2c5 100644 --- a/homeassistant/components/asuswrt/device_tracker.py +++ b/homeassistant/components/asuswrt/device_tracker.py @@ -24,6 +24,7 @@ class AsusWrtDeviceScanner(DeviceScanner): self.last_results = {} self.success_init = False self.connection = api + self._connect_error = False async def async_connect(self): """Initialize connection to the router.""" @@ -49,4 +50,15 @@ class AsusWrtDeviceScanner(DeviceScanner): """ _LOGGER.debug("Checking Devices") - self.last_results = await self.connection.async_get_connected_devices() + try: + self.last_results = await self.connection.async_get_connected_devices() + if self._connect_error: + self._connect_error = False + _LOGGER.error("Reconnected to ASUS router for device update") + + except OSError as err: + if not self._connect_error: + self._connect_error = True + _LOGGER.error( + "Error connecting to ASUS router for device update: %s", err + ) diff --git a/homeassistant/components/asuswrt/sensor.py b/homeassistant/components/asuswrt/sensor.py index cbe32a1ec43..77555deaba4 100644 --- a/homeassistant/components/asuswrt/sensor.py +++ b/homeassistant/components/asuswrt/sensor.py @@ -49,6 +49,7 @@ class AsuswrtSensor(Entity): self._devices = None self._rates = None self._speed = None + self._connect_error = False @property def name(self): @@ -62,9 +63,23 @@ class AsuswrtSensor(Entity): async def async_update(self): """Fetch status from asuswrt.""" - self._devices = await self._api.async_get_connected_devices() - self._rates = await self._api.async_get_bytes_total() - self._speed = await self._api.async_get_current_transfer_rates() + try: + self._devices = await self._api.async_get_connected_devices() + self._rates = await self._api.async_get_bytes_total() + self._speed = await self._api.async_get_current_transfer_rates() + if self._connect_error: + self._connect_error = False + _LOGGER.error( + "Reconnected to ASUS router for %s update", self.entity_id + ) + except OSError as err: + if not self._connect_error: + self._connect_error = True + _LOGGER.error( + "Error connecting to ASUS router for %s update: %s", + self.entity_id, + err, + ) class AsuswrtDevicesSensor(AsuswrtSensor): diff --git a/homeassistant/components/atag/translations/de.json b/homeassistant/components/atag/translations/de.json index 1b2b1324864..e87be7f4e0a 100644 --- a/homeassistant/components/atag/translations/de.json +++ b/homeassistant/components/atag/translations/de.json @@ -1,7 +1,7 @@ { "config": { "abort": { - "already_configured": "Nur ein Atag-Ger\u00e4t kann mit Home Assistant verbunden werden." + "already_configured": "Dieses Ger\u00e4t wurde bereits zu HomeAssistant hinzugef\u00fcgt" }, "error": { "connection_error": "Verbindung fehlgeschlagen, versuchen Sie es erneut" diff --git a/homeassistant/components/auth/manifest.json b/homeassistant/components/auth/manifest.json index b8c711c1dda..2674bdfb032 100644 --- a/homeassistant/components/auth/manifest.json +++ b/homeassistant/components/auth/manifest.json @@ -3,7 +3,6 @@ "name": "Auth", "documentation": "https://www.home-assistant.io/integrations/auth", "dependencies": ["http"], - "after_dependencies": ["onboarding"], "codeowners": ["@home-assistant/core"], "quality_scale": "internal" } diff --git a/homeassistant/components/automation/__init__.py b/homeassistant/components/automation/__init__.py index 8b2c036034b..e5f2f611cdb 100644 --- a/homeassistant/components/automation/__init__.py +++ b/homeassistant/components/automation/__init__.py @@ -222,19 +222,6 @@ async def async_setup(hass, config): hass, DOMAIN, SERVICE_RELOAD, reload_service_handler, schema=vol.Schema({}) ) - @callback - def async_describe_logbook_event(event): - """Describe a logbook event.""" - return { - "name": event.data.get(ATTR_NAME), - "message": "has been triggered", - "entity_id": event.data.get(ATTR_ENTITY_ID), - } - - hass.components.logbook.async_describe_event( - DOMAIN, EVENT_AUTOMATION_TRIGGERED, async_describe_logbook_event - ) - return True diff --git a/homeassistant/components/automation/logbook.py b/homeassistant/components/automation/logbook.py new file mode 100644 index 00000000000..2e3ad2475fc --- /dev/null +++ b/homeassistant/components/automation/logbook.py @@ -0,0 +1,23 @@ +"""Describe logbook events.""" +from homeassistant.const import ATTR_ENTITY_ID, ATTR_NAME +from homeassistant.core import callback + +from . import DOMAIN, EVENT_AUTOMATION_TRIGGERED + + +@callback +def async_describe_events(hass, async_describe_event): # type: ignore + """Describe logbook events.""" + + @callback + def async_describe_logbook_event(event): # type: ignore + """Describe a logbook event.""" + return { + "name": event.data.get(ATTR_NAME), + "message": "has been triggered", + "entity_id": event.data.get(ATTR_ENTITY_ID), + } + + async_describe_event( + DOMAIN, EVENT_AUTOMATION_TRIGGERED, async_describe_logbook_event + ) diff --git a/homeassistant/components/automation/manifest.json b/homeassistant/components/automation/manifest.json index a93baa0528a..a8dc43844e0 100644 --- a/homeassistant/components/automation/manifest.json +++ b/homeassistant/components/automation/manifest.json @@ -2,7 +2,12 @@ "domain": "automation", "name": "Automation", "documentation": "https://www.home-assistant.io/integrations/automation", - "after_dependencies": ["device_automation", "logbook", "webhook"], - "codeowners": ["@home-assistant/core"], + "after_dependencies": [ + "device_automation", + "webhook" + ], + "codeowners": [ + "@home-assistant/core" + ], "quality_scale": "internal" } diff --git a/homeassistant/components/automation/mqtt.py b/homeassistant/components/automation/mqtt.py index 046cbba2873..8bb8ad46041 100644 --- a/homeassistant/components/automation/mqtt.py +++ b/homeassistant/components/automation/mqtt.py @@ -19,7 +19,7 @@ DEFAULT_QOS = 0 TRIGGER_SCHEMA = vol.Schema( { vol.Required(CONF_PLATFORM): mqtt.DOMAIN, - vol.Required(CONF_TOPIC): mqtt.valid_subscribe_topic, + vol.Required(CONF_TOPIC): mqtt.util.valid_subscribe_topic, vol.Optional(CONF_PAYLOAD): cv.string, vol.Optional(CONF_ENCODING, default=DEFAULT_ENCODING): cv.string, vol.Optional(CONF_QOS, default=DEFAULT_QOS): vol.All( diff --git a/homeassistant/components/avri/.translations/en.json b/homeassistant/components/avri/.translations/en.json new file mode 100644 index 00000000000..83cd4232d42 --- /dev/null +++ b/homeassistant/components/avri/.translations/en.json @@ -0,0 +1,24 @@ +{ + "config": { + "abort": { + "already_configured": "This address is already configured." + }, + "error": { + "invalid_country_code": "Unknown 2 letter country code.", + "invalid_house_number": "Invalid house number." + }, + "step": { + "user": { + "data": { + "country_code": "2 Letter country code", + "house_number": "House number", + "house_number_extension": "House number extension", + "zip_code": "Zip code" + }, + "description": "Enter your address", + "title": "Avri" + } + } + }, + "title": "Avri" +} \ No newline at end of file diff --git a/homeassistant/components/avri/.translations/nl.json b/homeassistant/components/avri/.translations/nl.json new file mode 100644 index 00000000000..22798b09689 --- /dev/null +++ b/homeassistant/components/avri/.translations/nl.json @@ -0,0 +1,24 @@ +{ + "config": { + "abort": { + "already_configured": "Dit adres is reeds geconfigureerd." + }, + "error": { + "invalid_country_code": "Onbekende landcode", + "invalid_house_number": "Ongeldig huisnummer." + }, + "step": { + "user": { + "data": { + "country_code": "2 Letter landcode", + "house_number": "Huisnummer", + "house_number_extension": "Huisnummer toevoeging", + "zip_code": "Postcode" + }, + "description": "Vul je adres in.", + "title": "Avri" + } + } + }, + "title": "Avri" +} \ No newline at end of file diff --git a/homeassistant/components/avri/__init__.py b/homeassistant/components/avri/__init__.py index 4d99b2ed0e4..3165b6ee87a 100644 --- a/homeassistant/components/avri/__init__.py +++ b/homeassistant/components/avri/__init__.py @@ -1 +1,63 @@ """The avri component.""" +import asyncio +from datetime import timedelta +import logging + +from avri.api import Avri + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant + +from .const import ( + CONF_COUNTRY_CODE, + CONF_HOUSE_NUMBER, + CONF_HOUSE_NUMBER_EXTENSION, + CONF_ZIP_CODE, + DOMAIN, +) + +_LOGGER = logging.getLogger(__name__) + +PLATFORMS = ["sensor"] +SCAN_INTERVAL = timedelta(hours=4) + + +async def async_setup(hass: HomeAssistant, config: dict): + """Set up the Avri component.""" + hass.data[DOMAIN] = {} + return True + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): + """Set up Avri from a config entry.""" + client = Avri( + postal_code=entry.data[CONF_ZIP_CODE], + house_nr=entry.data[CONF_HOUSE_NUMBER], + house_nr_extension=entry.data.get(CONF_HOUSE_NUMBER_EXTENSION), + country_code=entry.data[CONF_COUNTRY_CODE], + ) + + hass.data[DOMAIN][entry.entry_id] = client + + for component in PLATFORMS: + hass.async_create_task( + hass.config_entries.async_forward_entry_setup(entry, component) + ) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry): + """Unload a config entry.""" + unload_ok = all( + await asyncio.gather( + *[ + hass.config_entries.async_forward_entry_unload(entry, component) + for component in PLATFORMS + ] + ) + ) + if unload_ok: + hass.data[DOMAIN].pop(entry.entry_id) + + return unload_ok diff --git a/homeassistant/components/avri/config_flow.py b/homeassistant/components/avri/config_flow.py new file mode 100644 index 00000000000..d6f9dbf7b62 --- /dev/null +++ b/homeassistant/components/avri/config_flow.py @@ -0,0 +1,74 @@ +"""Config flow for Avri component.""" +import pycountry +import voluptuous as vol + +from homeassistant import config_entries +from homeassistant.const import CONF_ID + +from .const import ( + CONF_COUNTRY_CODE, + CONF_HOUSE_NUMBER, + CONF_HOUSE_NUMBER_EXTENSION, + CONF_ZIP_CODE, + DEFAULT_COUNTRY_CODE, +) +from .const import DOMAIN # pylint:disable=unused-import + +DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_ZIP_CODE): str, + vol.Required(CONF_HOUSE_NUMBER): int, + vol.Optional(CONF_HOUSE_NUMBER_EXTENSION): str, + vol.Optional(CONF_COUNTRY_CODE, default=DEFAULT_COUNTRY_CODE): str, + } +) + + +class AvriConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): + """Avri config flow.""" + + VERSION = 1 + + async def _show_setup_form(self, errors=None): + """Show the setup form to the user.""" + return self.async_show_form( + step_id="user", data_schema=DATA_SCHEMA, errors=errors or {}, + ) + + async def async_step_user(self, user_input=None): + """Handle the initial step.""" + if user_input is None: + return await self._show_setup_form() + + zip_code = user_input[CONF_ZIP_CODE].replace(" ", "").upper() + + errors = {} + if user_input[CONF_HOUSE_NUMBER] <= 0: + errors[CONF_HOUSE_NUMBER] = "invalid_house_number" + return await self._show_setup_form(errors) + if not pycountry.countries.get(alpha_2=user_input[CONF_COUNTRY_CODE]): + errors[CONF_COUNTRY_CODE] = "invalid_country_code" + return await self._show_setup_form(errors) + + unique_id = ( + f"{zip_code}" + f" " + f"{user_input[CONF_HOUSE_NUMBER]}" + f'{user_input.get(CONF_HOUSE_NUMBER_EXTENSION, "")}' + ) + + await self.async_set_unique_id(unique_id) + self._abort_if_unique_id_configured() + + return self.async_create_entry( + title=unique_id, + data={ + CONF_ID: unique_id, + CONF_ZIP_CODE: zip_code, + CONF_HOUSE_NUMBER: user_input[CONF_HOUSE_NUMBER], + CONF_HOUSE_NUMBER_EXTENSION: user_input.get( + CONF_HOUSE_NUMBER_EXTENSION, "" + ), + CONF_COUNTRY_CODE: user_input[CONF_COUNTRY_CODE], + }, + ) diff --git a/homeassistant/components/avri/const.py b/homeassistant/components/avri/const.py new file mode 100644 index 00000000000..dab3491b356 --- /dev/null +++ b/homeassistant/components/avri/const.py @@ -0,0 +1,8 @@ +"""Constants for the Avri integration.""" +CONF_COUNTRY_CODE = "country_code" +CONF_ZIP_CODE = "zip_code" +CONF_HOUSE_NUMBER = "house_number" +CONF_HOUSE_NUMBER_EXTENSION = "house_number_extension" +DOMAIN = "avri" +ICON = "mdi:trash-can-outline" +DEFAULT_COUNTRY_CODE = "NL" diff --git a/homeassistant/components/avri/manifest.json b/homeassistant/components/avri/manifest.json index 41be3251b10..8a418bfb7bd 100644 --- a/homeassistant/components/avri/manifest.json +++ b/homeassistant/components/avri/manifest.json @@ -2,6 +2,12 @@ "domain": "avri", "name": "Avri", "documentation": "https://www.home-assistant.io/integrations/avri", - "requirements": ["avri-api==0.1.7"], - "codeowners": ["@timvancann"] -} + "requirements": [ + "avri-api==0.1.7", + "pycountry==19.8.18" + ], + "codeowners": [ + "@timvancann" + ], + "config_flow": true +} \ No newline at end of file diff --git a/homeassistant/components/avri/sensor.py b/homeassistant/components/avri/sensor.py index a221147f065..a4931004a92 100644 --- a/homeassistant/components/avri/sensor.py +++ b/homeassistant/components/avri/sensor.py @@ -1,45 +1,25 @@ """Support for Avri waste curbside collection pickup.""" -from datetime import timedelta import logging from avri.api import Avri, AvriException -import voluptuous as vol -from homeassistant.components.sensor import PLATFORM_SCHEMA -from homeassistant.const import CONF_NAME +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_ID, DEVICE_CLASS_TIMESTAMP from homeassistant.exceptions import PlatformNotReady -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity +from homeassistant.helpers.typing import HomeAssistantType + +from .const import DOMAIN, ICON _LOGGER = logging.getLogger(__name__) -CONF_COUNTRY_CODE = "country_code" -CONF_ZIP_CODE = "zip_code" -CONF_HOUSE_NUMBER = "house_number" -CONF_HOUSE_NUMBER_EXTENSION = "house_number_extension" -DEFAULT_NAME = "avri" -ICON = "mdi:trash-can-outline" -SCAN_INTERVAL = timedelta(hours=4) -DEFAULT_COUNTRY_CODE = "NL" - -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_ZIP_CODE): cv.string, - vol.Required(CONF_HOUSE_NUMBER): cv.positive_int, - vol.Optional(CONF_HOUSE_NUMBER_EXTENSION): cv.string, - vol.Optional(CONF_COUNTRY_CODE, default=DEFAULT_COUNTRY_CODE): cv.string, - vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, - } -) -def setup_platform(hass, config, add_entities, discovery_info=None): +async def async_setup_entry( + hass: HomeAssistantType, entry: ConfigEntry, async_add_entities +) -> None: """Set up the Avri Waste platform.""" - client = Avri( - postal_code=config[CONF_ZIP_CODE], - house_nr=config[CONF_HOUSE_NUMBER], - house_nr_extension=config.get(CONF_HOUSE_NUMBER_EXTENSION), - country_code=config[CONF_COUNTRY_CODE], - ) + client = hass.data[DOMAIN][entry.entry_id] + integration_id = entry.data[CONF_ID] try: each_upcoming = client.upcoming_of_each() @@ -47,22 +27,23 @@ def setup_platform(hass, config, add_entities, discovery_info=None): raise PlatformNotReady from ex else: entities = [ - AvriWasteUpcoming(config[CONF_NAME], client, upcoming.name) + AvriWasteUpcoming(client, upcoming.name, integration_id) for upcoming in each_upcoming ] - add_entities(entities, True) + async_add_entities(entities, True) class AvriWasteUpcoming(Entity): """Avri Waste Sensor.""" - def __init__(self, name: str, client: Avri, waste_type: str): + def __init__(self, client: Avri, waste_type: str, integration_id: str): """Initialize the sensor.""" self._waste_type = waste_type - self._name = f"{name}_{self._waste_type}" + self._name = f"{self._waste_type}".title() self._state = None self._client = client self._state_available = False + self._integration_id = integration_id @property def name(self): @@ -72,13 +53,7 @@ class AvriWasteUpcoming(Entity): @property def unique_id(self) -> str: """Return a unique ID.""" - return ( - f"{self._waste_type}" - f"-{self._client.country_code}" - f"-{self._client.postal_code}" - f"-{self._client.house_nr}" - f"-{self._client.house_nr_extension}" - ) + return (f"{self._integration_id}" f"-{self._waste_type}").replace(" ", "") @property def state(self): @@ -90,13 +65,21 @@ class AvriWasteUpcoming(Entity): """Return True if entity is available.""" return self._state_available + @property + def device_class(self): + """Return the device class of the sensor.""" + return DEVICE_CLASS_TIMESTAMP + @property def icon(self): """Icon to use in the frontend.""" return ICON - def update(self): - """Update device state.""" + async def async_update(self): + """Update the data.""" + if not self.enabled: + return + try: pickup_events = self._client.upcoming_of_each() except AvriException as ex: diff --git a/homeassistant/components/avri/strings.json b/homeassistant/components/avri/strings.json new file mode 100644 index 00000000000..9c7af6e47f2 --- /dev/null +++ b/homeassistant/components/avri/strings.json @@ -0,0 +1,24 @@ +{ + "title": "Avri", + "config": { + "abort": { + "already_configured": "This address is already configured." + }, + "error": { + "invalid_house_number": "Invalid house number.", + "invalid_country_code": "Unknown 2 letter country code." + }, + "step": { + "user": { + "data": { + "zip_code": "Zip code", + "house_number": "House number", + "house_number_extension": "House number extension", + "country_code": "2 Letter country code" + }, + "description": "Enter your address", + "title": "Avri" + } + } + } +} diff --git a/homeassistant/components/avri/translations/ar.json b/homeassistant/components/avri/translations/ar.json new file mode 100644 index 00000000000..b23bf7e8970 --- /dev/null +++ b/homeassistant/components/avri/translations/ar.json @@ -0,0 +1,7 @@ +{ + "config": { + "abort": { + "already_configured": "\u062a\u0645 \u062a\u0643\u0648\u064a\u0646 \u0647\u0630\u0627 \u0627\u0644\u0639\u0646\u0648\u0627\u0646 \u0628\u0627\u0644\u0641\u0639\u0644." + } + } +} \ No newline at end of file diff --git a/homeassistant/components/avri/translations/fr.json b/homeassistant/components/avri/translations/fr.json index 1c4a5d48b06..6d272aaa51e 100644 --- a/homeassistant/components/avri/translations/fr.json +++ b/homeassistant/components/avri/translations/fr.json @@ -1,6 +1,10 @@ { "config": { + "abort": { + "already_configured": "Cette adresse est d\u00e9j\u00e0 configur\u00e9e." + }, "error": { + "invalid_country_code": "Code pays \u00e0 2 lettres inconnu.", "invalid_house_number": "Num\u00e9ro de maison invalide." }, "step": { diff --git a/homeassistant/components/awair/__init__.py b/homeassistant/components/awair/__init__.py index c9a08cb40b5..c002693d6e9 100644 --- a/homeassistant/components/awair/__init__.py +++ b/homeassistant/components/awair/__init__.py @@ -1 +1,112 @@ """The awair component.""" + +from asyncio import gather +from typing import Any, Optional + +from async_timeout import timeout +from python_awair import Awair +from python_awair.exceptions import AuthError + +from homeassistant.const import CONF_ACCESS_TOKEN +from homeassistant.core import Config, HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import API_TIMEOUT, DOMAIN, LOGGER, UPDATE_INTERVAL, AwairResult + +PLATFORMS = ["sensor"] + + +async def async_setup(hass: HomeAssistant, config: Config) -> bool: + """Set up Awair integration.""" + return True + + +async def async_setup_entry(hass, config_entry) -> bool: + """Set up Awair integration from a config entry.""" + session = async_get_clientsession(hass) + coordinator = AwairDataUpdateCoordinator(hass, config_entry, session) + + await coordinator.async_refresh() + + if not coordinator.last_update_success: + raise ConfigEntryNotReady + + hass.data.setdefault(DOMAIN, {}) + hass.data[DOMAIN][config_entry.entry_id] = coordinator + + for platform in PLATFORMS: + hass.async_create_task( + hass.config_entries.async_forward_entry_setup(config_entry, platform) + ) + + return True + + +async def async_unload_entry(hass, config_entry) -> bool: + """Unload Awair configuration.""" + tasks = [] + for platform in PLATFORMS: + tasks.append( + hass.config_entries.async_forward_entry_unload(config_entry, platform) + ) + + unload_ok = all(await gather(*tasks)) + if unload_ok: + hass.data[DOMAIN].pop(config_entry.entry_id) + + return unload_ok + + +class AwairDataUpdateCoordinator(DataUpdateCoordinator): + """Define a wrapper class to update Awair data.""" + + def __init__(self, hass, config_entry, session) -> None: + """Set up the AwairDataUpdateCoordinator class.""" + access_token = config_entry.data[CONF_ACCESS_TOKEN] + self._awair = Awair(access_token=access_token, session=session) + self._config_entry = config_entry + + super().__init__(hass, LOGGER, name=DOMAIN, update_interval=UPDATE_INTERVAL) + + async def _async_update_data(self) -> Optional[Any]: + """Update data via Awair client library.""" + with timeout(API_TIMEOUT): + try: + LOGGER.debug("Fetching users and devices") + user = await self._awair.user() + devices = await user.devices() + results = await gather( + *[self._fetch_air_data(device) for device in devices] + ) + return {result.device.uuid: result for result in results} + except AuthError as err: + flow_context = { + "source": "reauth", + "unique_id": self._config_entry.unique_id, + } + + matching_flows = [ + flow + for flow in self.hass.config_entries.flow.async_progress() + if flow["context"] == flow_context + ] + + if not matching_flows: + self.hass.async_create_task( + self.hass.config_entries.flow.async_init( + DOMAIN, context=flow_context, data=self._config_entry.data, + ) + ) + + raise UpdateFailed(err) + except Exception as err: + raise UpdateFailed(err) + + async def _fetch_air_data(self, device): + """Fetch latest air quality data.""" + LOGGER.debug("Fetching data for %s", device.uuid) + air_data = await device.air_data_latest() + LOGGER.debug(air_data) + return AwairResult(device=device, air_data=air_data) diff --git a/homeassistant/components/awair/config_flow.py b/homeassistant/components/awair/config_flow.py new file mode 100644 index 00000000000..886a51342c5 --- /dev/null +++ b/homeassistant/components/awair/config_flow.py @@ -0,0 +1,109 @@ +"""Config flow for Awair.""" + +from typing import Optional + +from python_awair import Awair +from python_awair.exceptions import AuthError, AwairError +import voluptuous as vol + +from homeassistant.config_entries import CONN_CLASS_CLOUD_POLL, ConfigFlow +from homeassistant.const import CONF_ACCESS_TOKEN +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .const import DOMAIN, LOGGER # pylint: disable=unused-import + + +class AwairFlowHandler(ConfigFlow, domain=DOMAIN): + """Config flow for Awair.""" + + VERSION = 1 + CONNECTION_CLASS = CONN_CLASS_CLOUD_POLL + + async def async_step_import(self, conf: dict): + """Import a configuration from config.yaml.""" + if self.hass.config_entries.async_entries(DOMAIN): + return self.async_abort(reason="already_setup") + + user, error = await self._check_connection(conf[CONF_ACCESS_TOKEN]) + if error is not None: + return self.async_abort(reason=error) + + await self.async_set_unique_id(user.email) + self._abort_if_unique_id_configured() + + return self.async_create_entry( + title=f"{user.email} ({user.user_id})", + data={CONF_ACCESS_TOKEN: conf[CONF_ACCESS_TOKEN]}, + ) + + async def async_step_user(self, user_input: Optional[dict] = None): + """Handle a flow initialized by the user.""" + errors = {} + + if user_input is not None: + user, error = await self._check_connection(user_input[CONF_ACCESS_TOKEN]) + + if user is not None: + await self.async_set_unique_id(user.email) + self._abort_if_unique_id_configured() + + title = f"{user.email} ({user.user_id})" + return self.async_create_entry(title=title, data=user_input) + + if error != "auth": + return self.async_abort(reason=error) + + errors = {CONF_ACCESS_TOKEN: "auth"} + + return self.async_show_form( + step_id="user", + data_schema=vol.Schema({vol.Required(CONF_ACCESS_TOKEN): str}), + errors=errors, + ) + + async def async_step_reauth(self, user_input: Optional[dict] = None): + """Handle re-auth if token invalid.""" + errors = {} + + if user_input is not None: + access_token = user_input[CONF_ACCESS_TOKEN] + _, error = await self._check_connection(access_token) + + if error is None: + for entry in self._async_current_entries(): + if entry.unique_id == self.unique_id: + self.hass.config_entries.async_update_entry( + entry, data=user_input + ) + + return self.async_abort(reason="reauth_successful") + + if error != "auth": + return self.async_abort(reason=error) + + errors = {CONF_ACCESS_TOKEN: error} + + return self.async_show_form( + step_id="reauth", + data_schema=vol.Schema({vol.Required(CONF_ACCESS_TOKEN): str}), + errors=errors, + ) + + async def _check_connection(self, access_token: str): + """Check the access token is valid.""" + session = async_get_clientsession(self.hass) + awair = Awair(access_token=access_token, session=session) + + try: + user = await awair.user() + devices = await user.devices() + if not devices: + return (None, "no_devices") + + return (user, None) + + except AuthError: + return (None, "auth") + except AwairError as err: + LOGGER.error("Unexpected API error: %s", err) + return (None, "unknown") diff --git a/homeassistant/components/awair/const.py b/homeassistant/components/awair/const.py new file mode 100644 index 00000000000..5735078eee5 --- /dev/null +++ b/homeassistant/components/awair/const.py @@ -0,0 +1,120 @@ +"""Constants for the Awair component.""" + +from dataclasses import dataclass +from datetime import timedelta +import logging + +from python_awair.devices import AwairDevice + +from homeassistant.const import ( + ATTR_DEVICE_CLASS, + CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + CONCENTRATION_PARTS_PER_BILLION, + CONCENTRATION_PARTS_PER_MILLION, + DEVICE_CLASS_HUMIDITY, + DEVICE_CLASS_ILLUMINANCE, + DEVICE_CLASS_TEMPERATURE, + TEMP_CELSIUS, + UNIT_PERCENTAGE, +) + +API_CO2 = "carbon_dioxide" +API_DUST = "dust" +API_HUMID = "humidity" +API_LUX = "illuminance" +API_PM10 = "particulate_matter_10" +API_PM25 = "particulate_matter_2_5" +API_SCORE = "score" +API_SPL_A = "sound_pressure_level" +API_TEMP = "temperature" +API_TIMEOUT = 20 +API_VOC = "volatile_organic_compounds" + +ATTRIBUTION = "Awair air quality sensor" + +ATTR_ICON = "icon" +ATTR_LABEL = "label" +ATTR_UNIT = "unit" +ATTR_UNIQUE_ID = "unique_id" + +DOMAIN = "awair" + +DUST_ALIASES = [API_PM25, API_PM10] + +LOGGER = logging.getLogger(__package__) + +UPDATE_INTERVAL = timedelta(minutes=5) + +SENSOR_TYPES = { + API_SCORE: { + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:blur", + ATTR_UNIT: UNIT_PERCENTAGE, + ATTR_LABEL: "Awair score", + ATTR_UNIQUE_ID: "score", # matches legacy format + }, + API_HUMID: { + ATTR_DEVICE_CLASS: DEVICE_CLASS_HUMIDITY, + ATTR_ICON: None, + ATTR_UNIT: UNIT_PERCENTAGE, + ATTR_LABEL: "Humidity", + ATTR_UNIQUE_ID: "HUMID", # matches legacy format + }, + API_LUX: { + ATTR_DEVICE_CLASS: DEVICE_CLASS_ILLUMINANCE, + ATTR_ICON: None, + ATTR_UNIT: "lx", + ATTR_LABEL: "Illuminance", + ATTR_UNIQUE_ID: "illuminance", + }, + API_SPL_A: { + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:ear-hearing", + ATTR_UNIT: "dBa", + ATTR_LABEL: "Sound level", + ATTR_UNIQUE_ID: "sound_level", + }, + API_VOC: { + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:cloud", + ATTR_UNIT: CONCENTRATION_PARTS_PER_BILLION, + ATTR_LABEL: "Volatile organic compounds", + ATTR_UNIQUE_ID: "VOC", # matches legacy format + }, + API_TEMP: { + ATTR_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE, + ATTR_ICON: None, + ATTR_UNIT: TEMP_CELSIUS, + ATTR_LABEL: "Temperature", + ATTR_UNIQUE_ID: "TEMP", # matches legacy format + }, + API_PM25: { + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:blur", + ATTR_UNIT: CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + ATTR_LABEL: "PM2.5", + ATTR_UNIQUE_ID: "PM25", # matches legacy format + }, + API_PM10: { + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:blur", + ATTR_UNIT: CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + ATTR_LABEL: "PM10", + ATTR_UNIQUE_ID: "PM10", # matches legacy format + }, + API_CO2: { + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:cloud", + ATTR_UNIT: CONCENTRATION_PARTS_PER_MILLION, + ATTR_LABEL: "Carbon dioxide", + ATTR_UNIQUE_ID: "CO2", # matches legacy format + }, +} + + +@dataclass +class AwairResult: + """Wrapper class to hold an awair device and set of air data.""" + + device: AwairDevice + air_data: dict diff --git a/homeassistant/components/awair/manifest.json b/homeassistant/components/awair/manifest.json index 2ead58c0fe8..8ae89951442 100644 --- a/homeassistant/components/awair/manifest.json +++ b/homeassistant/components/awair/manifest.json @@ -2,6 +2,7 @@ "domain": "awair", "name": "Awair", "documentation": "https://www.home-assistant.io/integrations/awair", - "requirements": ["python_awair==0.0.4"], - "codeowners": ["@danielsjf"] + "requirements": ["python_awair==0.1.1"], + "codeowners": ["@ahayworth", "@danielsjf"], + "config_flow": true } diff --git a/homeassistant/components/awair/sensor.py b/homeassistant/components/awair/sensor.py index 301055c7e61..e4e2f3fbbd6 100644 --- a/homeassistant/components/awair/sensor.py +++ b/homeassistant/components/awair/sensor.py @@ -1,248 +1,245 @@ -"""Support for the Awair indoor air quality monitor.""" +"""Support for Awair sensors.""" -from datetime import timedelta -import logging -import math +from typing import Callable, List, Optional -from python_awair import AwairClient +from python_awair.devices import AwairDevice import voluptuous as vol -from homeassistant.const import ( - CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, - CONCENTRATION_PARTS_PER_BILLION, - CONCENTRATION_PARTS_PER_MILLION, - CONF_ACCESS_TOKEN, - CONF_DEVICES, - DEVICE_CLASS_HUMIDITY, - DEVICE_CLASS_TEMPERATURE, - TEMP_CELSIUS, - UNIT_PERCENTAGE, -) -from homeassistant.exceptions import PlatformNotReady -from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.components.awair import AwairDataUpdateCoordinator, AwairResult +from homeassistant.components.sensor import PLATFORM_SCHEMA +from homeassistant.config_entries import SOURCE_IMPORT +from homeassistant.const import ATTR_ATTRIBUTION, ATTR_DEVICE_CLASS, CONF_ACCESS_TOKEN +from homeassistant.helpers import device_registry as dr import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity -from homeassistant.util import Throttle, dt +from homeassistant.helpers.typing import ConfigType, HomeAssistantType -_LOGGER = logging.getLogger(__name__) +from .const import ( + API_DUST, + API_PM25, + API_SCORE, + API_TEMP, + API_VOC, + ATTR_ICON, + ATTR_LABEL, + ATTR_UNIQUE_ID, + ATTR_UNIT, + ATTRIBUTION, + DOMAIN, + DUST_ALIASES, + LOGGER, + SENSOR_TYPES, +) -ATTR_SCORE = "score" -ATTR_TIMESTAMP = "timestamp" -ATTR_LAST_API_UPDATE = "last_api_update" -ATTR_COMPONENT = "component" -ATTR_VALUE = "value" -ATTR_SENSORS = "sensors" - -CONF_UUID = "uuid" - -DEVICE_CLASS_PM2_5 = "PM2.5" -DEVICE_CLASS_PM10 = "PM10" -DEVICE_CLASS_CARBON_DIOXIDE = "CO2" -DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS = "VOC" -DEVICE_CLASS_SCORE = "score" - -SENSOR_TYPES = { - "TEMP": { - "device_class": DEVICE_CLASS_TEMPERATURE, - "unit_of_measurement": TEMP_CELSIUS, - "icon": "mdi:thermometer", - }, - "HUMID": { - "device_class": DEVICE_CLASS_HUMIDITY, - "unit_of_measurement": UNIT_PERCENTAGE, - "icon": "mdi:water-percent", - }, - "CO2": { - "device_class": DEVICE_CLASS_CARBON_DIOXIDE, - "unit_of_measurement": CONCENTRATION_PARTS_PER_MILLION, - "icon": "mdi:periodic-table-co2", - }, - "VOC": { - "device_class": DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS, - "unit_of_measurement": CONCENTRATION_PARTS_PER_BILLION, - "icon": "mdi:cloud", - }, - # Awair docs don't actually specify the size they measure for 'dust', - # but 2.5 allows the sensor to show up in HomeKit - "DUST": { - "device_class": DEVICE_CLASS_PM2_5, - "unit_of_measurement": CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, - "icon": "mdi:cloud", - }, - "PM25": { - "device_class": DEVICE_CLASS_PM2_5, - "unit_of_measurement": CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, - "icon": "mdi:cloud", - }, - "PM10": { - "device_class": DEVICE_CLASS_PM10, - "unit_of_measurement": CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, - "icon": "mdi:cloud", - }, - "score": { - "device_class": DEVICE_CLASS_SCORE, - "unit_of_measurement": UNIT_PERCENTAGE, - "icon": "mdi:percent", - }, -} - -AWAIR_QUOTA = 300 - -# This is the minimum time between throttled update calls. -# Don't bother asking us for state more often than that. -SCAN_INTERVAL = timedelta(minutes=5) - -AWAIR_DEVICE_SCHEMA = vol.Schema({vol.Required(CONF_UUID): cv.string}) - -PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_ACCESS_TOKEN): cv.string, - vol.Optional(CONF_DEVICES): vol.All(cv.ensure_list, [AWAIR_DEVICE_SCHEMA]), - } +PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( + {vol.Required(CONF_ACCESS_TOKEN): cv.string}, extra=vol.ALLOW_EXTRA, ) -# Awair *heavily* throttles calls that get user information, -# and calls that get the list of user-owned devices - they -# allow 30 per DAY. So, we permit a user to provide a static -# list of devices, and they may provide the same set of information -# that the devices() call would return. However, the only thing -# used at this time is the `uuid` value. async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): - """Connect to the Awair API and find devices.""" + """Import Awair configuration from YAML.""" + LOGGER.warning( + "Loading Awair via platform setup is deprecated. Please remove it from your configuration." + ) + hass.async_create_task( + hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=config, + ) + ) - token = config[CONF_ACCESS_TOKEN] - client = AwairClient(token, session=async_get_clientsession(hass)) - try: - all_devices = [] - devices = config.get(CONF_DEVICES, await client.devices()) +async def async_setup_entry( + hass: HomeAssistantType, + config_entry: ConfigType, + async_add_entities: Callable[[List[Entity], bool], None], +): + """Set up Awair sensor entity based on a config entry.""" + coordinator = hass.data[DOMAIN][config_entry.entry_id] + sensors = [] - # Try to throttle dynamically based on quota and number of devices. - throttle_minutes = math.ceil(60 / ((AWAIR_QUOTA / len(devices)) / 24)) - throttle = timedelta(minutes=throttle_minutes) + data: List[AwairResult] = coordinator.data.values() + for result in data: + if result.air_data: + sensors.append(AwairSensor(API_SCORE, result.device, coordinator)) + device_sensors = result.air_data.sensors.keys() + for sensor in device_sensors: + if sensor in SENSOR_TYPES: + sensors.append(AwairSensor(sensor, result.device, coordinator)) - for device in devices: - _LOGGER.debug("Found awair device: %s", device) - awair_data = AwairData(client, device[CONF_UUID], throttle) - await awair_data.async_update() - for sensor in SENSOR_TYPES: - if sensor in awair_data.data: - awair_sensor = AwairSensor(awair_data, device, sensor, throttle) - all_devices.append(awair_sensor) + # The "DUST" sensor for Awair is a combo pm2.5/pm10 sensor only + # present on first-gen devices in lieu of separate pm2.5/pm10 sensors. + # We handle that by creating fake pm2.5/pm10 sensors that will always + # report identical values, and we let users decide how they want to use + # that data - because we can't really tell what kind of particles the + # "DUST" sensor actually detected. However, it's still useful data. + if API_DUST in device_sensors: + for alias_kind in DUST_ALIASES: + sensors.append(AwairSensor(alias_kind, result.device, coordinator)) - async_add_entities(all_devices, True) - return - except AwairClient.AuthError: - _LOGGER.error("Awair API access_token invalid") - except AwairClient.RatelimitError: - _LOGGER.error("Awair API ratelimit exceeded.") - except ( - AwairClient.QueryError, - AwairClient.NotFoundError, - AwairClient.GenericError, - ) as error: - _LOGGER.error("Unexpected Awair API error: %s", error) - - raise PlatformNotReady + async_add_entities(sensors) class AwairSensor(Entity): - """Implementation of an Awair device.""" + """Defines an Awair sensor entity.""" - def __init__(self, data, device, sensor_type, throttle): - """Initialize the sensor.""" - self._uuid = device[CONF_UUID] - self._device_class = SENSOR_TYPES[sensor_type]["device_class"] - self._name = f"Awair {self._device_class}" - unit = SENSOR_TYPES[sensor_type]["unit_of_measurement"] - self._unit_of_measurement = unit - self._data = data - self._type = sensor_type - self._throttle = throttle + def __init__( + self, kind: str, device: AwairDevice, coordinator: AwairDataUpdateCoordinator, + ) -> None: + """Set up an individual AwairSensor.""" + self._kind = kind + self._device = device + self._coordinator = coordinator @property - def name(self): + def should_poll(self) -> bool: + """Return the polling requirement of the entity.""" + return False + + @property + def name(self) -> str: """Return the name of the sensor.""" - return self._name + name = SENSOR_TYPES[self._kind][ATTR_LABEL] + if self._device.name: + name = f"{self._device.name} {name}" + + return name @property - def device_class(self): - """Return the device class.""" - return self._device_class + def unique_id(self) -> str: + """Return the uuid as the unique_id.""" + unique_id_tag = SENSOR_TYPES[self._kind][ATTR_UNIQUE_ID] + + # This integration used to create a sensor that was labelled as a "PM2.5" + # sensor for first-gen Awair devices, but its unique_id reflected the truth: + # under the hood, it was a "DUST" sensor. So we preserve that specific unique_id + # for users with first-gen devices that are upgrading. + if self._kind == API_PM25 and API_DUST in self._air_data.sensors: + unique_id_tag = "DUST" + + return f"{self._device.uuid}_{unique_id_tag}" @property - def icon(self): - """Icon to use in the frontend.""" - return SENSOR_TYPES[self._type]["icon"] + def available(self) -> bool: + """Determine if the sensor is available based on API results.""" + # If the last update was successful... + if self._coordinator.last_update_success and self._air_data: + # and the results included our sensor type... + if self._kind in self._air_data.sensors: + # then we are available. + return True + + # or, we're a dust alias + if self._kind in DUST_ALIASES and API_DUST in self._air_data.sensors: + return True + + # or we are API_SCORE + if self._kind == API_SCORE: + # then we are available. + return True + + # Otherwise, we are not. + return False @property - def state(self): - """Return the state of the device.""" - return self._data.data[self._type] + def state(self) -> float: + """Return the state, rounding off to reasonable values.""" + state: float + + # Special-case for "SCORE", which we treat as the AQI + if self._kind == API_SCORE: + state = self._air_data.score + elif self._kind in DUST_ALIASES and API_DUST in self._air_data.sensors: + state = self._air_data.sensors.dust + else: + state = self._air_data.sensors[self._kind] + + if self._kind == API_VOC or self._kind == API_SCORE: + return round(state) + + if self._kind == API_TEMP: + return round(state, 1) + + return round(state, 2) @property - def device_state_attributes(self): - """Return additional attributes.""" - return self._data.attrs - - # The Awair device should be reporting metrics in quite regularly. - # Based on the raw data from the API, it looks like every ~10 seconds - # is normal. Here we assert that the device is not available if the - # last known API timestamp is more than (3 * throttle) minutes in the - # past. It implies that either hass is somehow unable to query the API - # for new data or that the device is not checking in. Either condition - # fits the definition for 'not available'. We pick (3 * throttle) minutes - # to allow for transient errors to correct themselves. - @property - def available(self): - """Device availability based on the last update timestamp.""" - if ATTR_LAST_API_UPDATE not in self.device_state_attributes: - return False - - last_api_data = self.device_state_attributes[ATTR_LAST_API_UPDATE] - return (dt.utcnow() - last_api_data) < (3 * self._throttle) + def icon(self) -> str: + """Return the icon.""" + return SENSOR_TYPES[self._kind][ATTR_ICON] @property - def unique_id(self): - """Return the unique id of this entity.""" - return f"{self._uuid}_{self._type}" + def device_class(self) -> str: + """Return the device_class.""" + return SENSOR_TYPES[self._kind][ATTR_DEVICE_CLASS] @property - def unit_of_measurement(self): - """Return the unit of measurement of this entity.""" - return self._unit_of_measurement + def unit_of_measurement(self) -> str: + """Return the unit the value is expressed in.""" + return SENSOR_TYPES[self._kind][ATTR_UNIT] - async def async_update(self): - """Get the latest data.""" - await self._data.async_update() + @property + def device_state_attributes(self) -> dict: + """Return the Awair Index alongside state attributes. + The Awair Index is a subjective score ranging from 0-4 (inclusive) that + is is used by the Awair app when displaying the relative "safety" of a + given measurement. Each value is mapped to a color indicating the safety: -class AwairData: - """Get data from Awair API.""" + 0: green + 1: yellow + 2: light-orange + 3: orange + 4: red - def __init__(self, client, uuid, throttle): - """Initialize the data object.""" - self._client = client - self._uuid = uuid - self.data = {} - self.attrs = {} - self.async_update = Throttle(throttle)(self._async_update) + The API indicates that both positive and negative values may be returned, + but the negative values are mapped to identical colors as the positive values. + Knowing that, we just return the absolute value of a given index so that + users don't have to handle positive/negative values that ultimately "mean" + the same thing. - async def _async_update(self): - """Get the data from Awair API.""" - resp = await self._client.air_data_latest(self._uuid) + https://docs.developer.getawair.com/?version=latest#awair-score-and-index + """ + attrs = {ATTR_ATTRIBUTION: ATTRIBUTION} + if self._kind in self._air_data.indices: + attrs["awair_index"] = abs(self._air_data.indices[self._kind]) + elif self._kind in DUST_ALIASES and API_DUST in self._air_data.indices: + attrs["awair_index"] = abs(self._air_data.indices.dust) - if not resp: - return + return attrs - timestamp = dt.parse_datetime(resp[0][ATTR_TIMESTAMP]) - self.attrs[ATTR_LAST_API_UPDATE] = timestamp - self.data[ATTR_SCORE] = resp[0][ATTR_SCORE] + @property + def device_info(self) -> dict: + """Device information.""" + info = { + "identifiers": {(DOMAIN, self._device.uuid)}, + "manufacturer": "Awair", + "model": self._device.model, + } - # The air_data_latest call only returns one item, so this should - # be safe to only process one entry. - for sensor in resp[0][ATTR_SENSORS]: - self.data[sensor[ATTR_COMPONENT]] = round(sensor[ATTR_VALUE], 1) + if self._device.name: + info["name"] = self._device.name - _LOGGER.debug("Got Awair Data for %s: %s", self._uuid, self.data) + if self._device.mac_address: + info["connections"] = { + (dr.CONNECTION_NETWORK_MAC, self._device.mac_address) + } + + return info + + async def async_added_to_hass(self) -> None: + """Connect to dispatcher listening for entity data notifications.""" + self.async_on_remove( + self._coordinator.async_add_listener(self.async_write_ha_state) + ) + + async def async_update(self) -> None: + """Update Awair entity.""" + await self._coordinator.async_request_refresh() + + @property + def _air_data(self) -> Optional[AwairResult]: + """Return the latest data for our device, or None.""" + result: Optional[AwairResult] = self._coordinator.data.get(self._device.uuid) + if result: + return result.air_data + + return None diff --git a/homeassistant/components/awair/strings.json b/homeassistant/components/awair/strings.json new file mode 100644 index 00000000000..1351cbd2db0 --- /dev/null +++ b/homeassistant/components/awair/strings.json @@ -0,0 +1,29 @@ +{ + "config": { + "step": { + "user": { + "description": "You must register for an Awair developer access token at: https://developer.getawair.com/onboard/login", + "data": { + "access_token": "[%key:common::config_flow::data::access_token%]", + "email": "[%key:common::config_flow::data::email%]" + } + }, + "reauth": { + "description": "Please re-enter your Awair developer access token.", + "data": { + "access_token": "[%key:common::config_flow::data::access_token%]", + "email": "[%key:common::config_flow::data::email%]" + } + } + }, + "error": { + "auth": "[%key:common::config_flow::error::invalid_access_token%]", + "unknown": "Unknown Awair API error." + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "no_devices": "[%key:common::config_flow::abort::no_devices_found%]", + "reauth_successful": "[%key:common::config_flow::data::access_token%] updated successfully" + } + } +} diff --git a/homeassistant/components/awair/translations/ca.json b/homeassistant/components/awair/translations/ca.json new file mode 100644 index 00000000000..682fe89aa3b --- /dev/null +++ b/homeassistant/components/awair/translations/ca.json @@ -0,0 +1,28 @@ +{ + "config": { + "abort": { + "already_configured": "El compte ja ha estat configurat", + "no_devices": "No s'han trobat dispositius a la xarxa", + "reauth_successful": "Token d'acc\u00e9s actualitzat correctament" + }, + "error": { + "auth": "Token d'acc\u00e9s no v\u00e0lid", + "unknown": "Error desconegut de l'API Awair." + }, + "step": { + "reauth": { + "data": { + "access_token": "Token d'acc\u00e9s", + "email": "Correu electr\u00f2nic" + }, + "description": "Torna a introduir el token d'acc\u00e9s de desenvolupador d'Awair." + }, + "user": { + "data": { + "access_token": "Token d'acc\u00e9s", + "email": "Correu electr\u00f2nic" + } + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/awair/translations/en.json b/homeassistant/components/awair/translations/en.json new file mode 100644 index 00000000000..1f3beb390a8 --- /dev/null +++ b/homeassistant/components/awair/translations/en.json @@ -0,0 +1,29 @@ +{ + "config": { + "abort": { + "already_configured": "Account is already configured", + "no_devices": "No devices found on the network", + "reauth_successful": "Access Token updated successfully" + }, + "error": { + "auth": "Invalid access token", + "unknown": "Unknown Awair API error." + }, + "step": { + "reauth": { + "data": { + "access_token": "Access Token", + "email": "Email" + }, + "description": "Please re-enter your Awair developer access token." + }, + "user": { + "data": { + "access_token": "Access Token", + "email": "Email" + }, + "description": "You must register for an Awair developer access token at: https://developer.getawair.com/onboard/login" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/awair/translations/es.json b/homeassistant/components/awair/translations/es.json new file mode 100644 index 00000000000..90215ba9c30 --- /dev/null +++ b/homeassistant/components/awair/translations/es.json @@ -0,0 +1,29 @@ +{ + "config": { + "abort": { + "already_configured": "La cuenta ya ha sido configurada", + "no_devices": "No se encontraron dispositivos en la red", + "reauth_successful": "Token de acceso actualizado correctamente " + }, + "error": { + "auth": "Token de acceso no v\u00e1lido", + "unknown": "Error desconocido en API Awair" + }, + "step": { + "reauth": { + "data": { + "access_token": "Token de acceso", + "email": "Correo electr\u00f3nico" + }, + "description": "Por favor, vuelve a introducir tu token de acceso de desarrollador Awair." + }, + "user": { + "data": { + "access_token": "Token de acceso", + "email": "Correo electr\u00f3nico" + }, + "description": "Debes registrarte para obtener un token de acceso de desarrollador Awair en: https://developer.getawair.com/onboard/login" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/awair/translations/no.json b/homeassistant/components/awair/translations/no.json new file mode 100644 index 00000000000..afce9147d0b --- /dev/null +++ b/homeassistant/components/awair/translations/no.json @@ -0,0 +1,21 @@ +{ + "config": { + "error": { + "unknown": "Ukjent Awair API-feil." + }, + "step": { + "reauth": { + "data": { + "email": "Epost" + }, + "description": "Skriv inn tilgangstokenet for Awair-utviklere p\u00e5 nytt." + }, + "user": { + "data": { + "email": "Epost " + }, + "description": "Du m\u00e5 registrere deg for et Awair-utviklertilgangstoken p\u00e5: https://developer.getawair.com/onboard/login" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/awair/translations/ru.json b/homeassistant/components/awair/translations/ru.json new file mode 100644 index 00000000000..7a8f9e1b5c4 --- /dev/null +++ b/homeassistant/components/awair/translations/ru.json @@ -0,0 +1,29 @@ +{ + "config": { + "abort": { + "already_configured": "\u0423\u0447\u0451\u0442\u043d\u0430\u044f \u0437\u0430\u043f\u0438\u0441\u044c \u0443\u0436\u0435 \u0434\u043e\u0431\u0430\u0432\u043b\u0435\u043d\u0430.", + "no_devices": "\u0423\u0441\u0442\u0440\u043e\u0439\u0441\u0442\u0432\u0430 \u043d\u0435 \u043d\u0430\u0439\u0434\u0435\u043d\u044b \u0432 \u0441\u0435\u0442\u0438.", + "reauth_successful": "\u0422\u043e\u043a\u0435\u043d \u0434\u043e\u0441\u0442\u0443\u043f\u0430 \u0443\u0441\u043f\u0435\u0448\u043d\u043e \u043e\u0431\u043d\u043e\u0432\u043b\u0435\u043d." + }, + "error": { + "auth": "\u041d\u0435\u0432\u0435\u0440\u043d\u044b\u0439 \u0442\u043e\u043a\u0435\u043d \u0434\u043e\u0441\u0442\u0443\u043f\u0430.", + "unknown": "\u041d\u0435\u0438\u0437\u0432\u0435\u0441\u0442\u043d\u0430\u044f \u043e\u0448\u0438\u0431\u043a\u0430." + }, + "step": { + "reauth": { + "data": { + "access_token": "\u0422\u043e\u043a\u0435\u043d \u0434\u043e\u0441\u0442\u0443\u043f\u0430", + "email": "\u0410\u0434\u0440\u0435\u0441 \u044d\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u043e\u0439 \u043f\u043e\u0447\u0442\u044b" + }, + "description": "\u041f\u043e\u0436\u0430\u043b\u0443\u0439\u0441\u0442\u0430, \u0432\u0432\u0435\u0434\u0438\u0442\u0435 \u043f\u043e\u0432\u0442\u043e\u0440\u043d\u043e \u0412\u0430\u0448 \u0442\u043e\u043a\u0435\u043d \u0434\u043e\u0441\u0442\u0443\u043f\u0430." + }, + "user": { + "data": { + "access_token": "\u0422\u043e\u043a\u0435\u043d \u0434\u043e\u0441\u0442\u0443\u043f\u0430", + "email": "\u0410\u0434\u0440\u0435\u0441 \u044d\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u043e\u0439 \u043f\u043e\u0447\u0442\u044b" + }, + "description": "\u0414\u043b\u044f \u043f\u043e\u043b\u0443\u0447\u0435\u043d\u0438\u044f \u0442\u043e\u043a\u0435\u043d\u0430 \u0434\u043e\u0441\u0442\u0443\u043f\u0430 \u043a Awair \u0412\u044b \u0434\u043e\u043b\u0436\u043d\u044b \u0437\u0430\u0440\u0435\u0433\u0438\u0441\u0442\u0440\u0438\u0440\u043e\u0432\u0430\u0442\u044c\u0441\u044f \u043f\u043e \u0430\u0434\u0440\u0435\u0441\u0443: https://developer.getawair.com/onboard/login" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/awair/translations/zh-Hant.json b/homeassistant/components/awair/translations/zh-Hant.json new file mode 100644 index 00000000000..0a3e877d749 --- /dev/null +++ b/homeassistant/components/awair/translations/zh-Hant.json @@ -0,0 +1,29 @@ +{ + "config": { + "abort": { + "already_configured": "\u5e33\u865f\u5df2\u7d93\u8a2d\u5b9a\u5b8c\u6210", + "no_devices": "\u7db2\u8def\u4e0a\u627e\u4e0d\u5230\u8a2d\u5099", + "reauth_successful": "\u5b58\u53d6\u5bc6\u9470 \u5df2\u6210\u529f\u66f4\u65b0" + }, + "error": { + "auth": "\u5b58\u53d6\u5bc6\u9470\u7121\u6548", + "unknown": "\u672a\u77e5 Awair API \u932f\u8aa4\u3002" + }, + "step": { + "reauth": { + "data": { + "access_token": "\u5b58\u53d6\u5bc6\u9470", + "email": "\u96fb\u5b50\u90f5\u4ef6" + }, + "description": "\u8acb\u91cd\u65b0\u8f38\u5165 Awair \u958b\u767c\u8005\u5b58\u53d6\u5bc6\u9470\u3002" + }, + "user": { + "data": { + "access_token": "\u5b58\u53d6\u5bc6\u9470", + "email": "\u96fb\u5b50\u90f5\u4ef6" + }, + "description": "\u5fc5\u9808\u5148\u8a3b\u518a Awair \u958b\u767c\u8005\u5b58\u53d6\u5bc6\u9470\uff1ahttps://developer.getawair.com/onboard/login" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/axis/binary_sensor.py b/homeassistant/components/axis/binary_sensor.py index c9e8436fdeb..feae2c8fc99 100644 --- a/homeassistant/components/axis/binary_sensor.py +++ b/homeassistant/components/axis/binary_sensor.py @@ -42,7 +42,9 @@ async def async_setup_entry(hass, config_entry, async_add_entities): """Add binary sensor from Axis device.""" event = device.api.event[event_id] - if event.CLASS != CLASS_OUTPUT: + if event.CLASS != CLASS_OUTPUT and not ( + event.CLASS == CLASS_LIGHT and event.TYPE == "Light" + ): async_add_entities([AxisBinarySensor(event, device)], True) device.listeners.append( diff --git a/homeassistant/components/axis/camera.py b/homeassistant/components/axis/camera.py index 8e7e4592cb6..69047268b07 100644 --- a/homeassistant/components/axis/camera.py +++ b/homeassistant/components/axis/camera.py @@ -27,7 +27,7 @@ async def async_setup_entry(hass, config_entry, async_add_entities): device = hass.data[AXIS_DOMAIN][config_entry.unique_id] - if not device.option_camera: + if not device.api.vapix.params.image_format: return async_add_entities([AxisCamera(device)]) diff --git a/homeassistant/components/axis/const.py b/homeassistant/components/axis/const.py index 203bbdf94c7..12a10391e4c 100644 --- a/homeassistant/components/axis/const.py +++ b/homeassistant/components/axis/const.py @@ -3,6 +3,7 @@ import logging from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN +from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN LOGGER = logging.getLogger(__package__) @@ -11,7 +12,6 @@ DOMAIN = "axis" ATTR_MANUFACTURER = "Axis Communications AB" -CONF_CAMERA = "camera" CONF_EVENTS = "events" CONF_MODEL = "model" CONF_STREAM_PROFILE = "stream_profile" @@ -20,4 +20,4 @@ DEFAULT_EVENTS = True DEFAULT_STREAM_PROFILE = "No stream profile" DEFAULT_TRIGGER_TIME = 0 -PLATFORMS = [BINARY_SENSOR_DOMAIN, CAMERA_DOMAIN, SWITCH_DOMAIN] +PLATFORMS = [BINARY_SENSOR_DOMAIN, CAMERA_DOMAIN, LIGHT_DOMAIN, SWITCH_DOMAIN] diff --git a/homeassistant/components/axis/device.py b/homeassistant/components/axis/device.py index bf1639559f2..18845ce12a3 100644 --- a/homeassistant/components/axis/device.py +++ b/homeassistant/components/axis/device.py @@ -29,7 +29,6 @@ from homeassistant.setup import async_when_setup from .const import ( ATTR_MANUFACTURER, - CONF_CAMERA, CONF_EVENTS, CONF_MODEL, CONF_STREAM_PROFILE, @@ -78,12 +77,6 @@ class AxisNetworkDevice: """Return the serial number of this device.""" return self.config_entry.unique_id - @property - def option_camera(self): - """Config entry option defining if camera should be used.""" - supported_formats = self.api.vapix.params.image_format - return self.config_entry.options.get(CONF_CAMERA, bool(supported_formats)) - @property def option_events(self): """Config entry option defining if platforms based on events should be created.""" diff --git a/homeassistant/components/axis/light.py b/homeassistant/components/axis/light.py new file mode 100644 index 00000000000..75b2d59e5f5 --- /dev/null +++ b/homeassistant/components/axis/light.py @@ -0,0 +1,116 @@ +"""Support for Axis lights.""" + +from axis.event_stream import CLASS_LIGHT + +from homeassistant.components.light import ( + ATTR_BRIGHTNESS, + SUPPORT_BRIGHTNESS, + LightEntity, +) +from homeassistant.core import callback +from homeassistant.helpers.dispatcher import async_dispatcher_connect + +from .axis_base import AxisEventBase +from .const import DOMAIN as AXIS_DOMAIN + + +async def async_setup_entry(hass, config_entry, async_add_entities): + """Set up a Axis light.""" + device = hass.data[AXIS_DOMAIN][config_entry.unique_id] + + if not device.api.vapix.light_control: + return + + @callback + def async_add_sensor(event_id): + """Add light from Axis device.""" + event = device.api.event[event_id] + + if event.CLASS == CLASS_LIGHT and event.TYPE == "Light": + async_add_entities([AxisLight(event, device)], True) + + device.listeners.append( + async_dispatcher_connect(hass, device.signal_new_event, async_add_sensor) + ) + + +class AxisLight(AxisEventBase, LightEntity): + """Representation of a light Axis event.""" + + def __init__(self, event, device): + """Initialize the Axis light.""" + super().__init__(event, device) + + self.light_id = f"led{self.event.id}" + + self.current_intensity = 0 + self.max_intensity = 0 + + self._features = SUPPORT_BRIGHTNESS + + async def async_added_to_hass(self) -> None: + """Subscribe lights events.""" + await super().async_added_to_hass() + + def get_light_capabilities(): + """Get light capabilities.""" + current_intensity = self.device.api.vapix.light_control.get_current_intensity( + self.light_id + ) + self.current_intensity = current_intensity["data"]["intensity"] + + max_intensity = self.device.api.vapix.light_control.get_valid_intensity( + self.light_id + ) + self.max_intensity = max_intensity["data"]["ranges"][0]["high"] + + await self.hass.async_add_executor_job(get_light_capabilities) + + @property + def supported_features(self): + """Flag supported features.""" + return self._features + + @property + def name(self): + """Return the name of the light.""" + light_type = self.device.api.vapix.light_control[self.light_id].light_type + return f"{self.device.name} {light_type} {self.event.TYPE} {self.event.id}" + + @property + def is_on(self): + """Return true if light is on.""" + return self.event.is_tripped + + @property + def brightness(self): + """Return the brightness of this light between 0..255.""" + return int((self.current_intensity / self.max_intensity) * 255) + + def turn_on(self, **kwargs): + """Turn on light.""" + if not self.is_on: + self.device.api.vapix.light_control.activate_light(self.light_id) + + if ATTR_BRIGHTNESS in kwargs: + intensity = int((kwargs[ATTR_BRIGHTNESS] / 255) * self.max_intensity) + self.device.api.vapix.light_control.set_manual_intensity( + self.light_id, intensity + ) + + def turn_off(self, **kwargs): + """Turn off light.""" + if self.is_on: + self.device.api.vapix.light_control.deactivate_light(self.light_id) + + def update(self): + """Update brightness.""" + current_intensity = self.device.api.vapix.light_control.get_current_intensity( + self.light_id + ) + self.current_intensity = current_intensity["data"]["intensity"] + + @property + def should_poll(self): + """Brightness needs polling.""" + return True diff --git a/homeassistant/components/axis/manifest.json b/homeassistant/components/axis/manifest.json index ea5b024e8fb..95175fce51a 100644 --- a/homeassistant/components/axis/manifest.json +++ b/homeassistant/components/axis/manifest.json @@ -3,7 +3,7 @@ "name": "Axis", "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/axis", - "requirements": ["axis==31"], + "requirements": ["axis==33"], "zeroconf": ["_axis-video._tcp.local."], "after_dependencies": ["mqtt"], "codeowners": ["@Kane610"] diff --git a/homeassistant/components/axis/translations/es.json b/homeassistant/components/axis/translations/es.json index 38a33309145..045f939ab3c 100644 --- a/homeassistant/components/axis/translations/es.json +++ b/homeassistant/components/axis/translations/es.json @@ -8,7 +8,7 @@ }, "error": { "already_configured": "El dispositivo ya est\u00e1 configurado", - "already_in_progress": "El flujo de configuraci\u00f3n del dispositivo ya est\u00e1 en curso.", + "already_in_progress": "El flujo de configuraci\u00f3n del dispositivo ya est\u00e1 en marcha.", "device_unavailable": "El dispositivo no est\u00e1 disponible", "faulty_credentials": "Credenciales de usuario incorrectas" }, diff --git a/homeassistant/components/baidu/tts.py b/homeassistant/components/baidu/tts.py index 2d0857de135..5a14fc78020 100644 --- a/homeassistant/components/baidu/tts.py +++ b/homeassistant/components/baidu/tts.py @@ -12,6 +12,7 @@ _LOGGER = logging.getLogger(__name__) SUPPORTED_LANGUAGES = ["zh"] DEFAULT_LANG = "zh" +SUPPORTED_PERSON = [0, 1, 3, 4, 5, 103, 106, 110, 111] CONF_APP_ID = "app_id" CONF_SECRET_KEY = "secret_key" @@ -35,9 +36,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( vol.Optional(CONF_VOLUME, default=5): vol.All( vol.Coerce(int), vol.Range(min=0, max=15) ), - vol.Optional(CONF_PERSON, default=0): vol.All( - vol.Coerce(int), vol.Range(min=0, max=4) - ), + vol.Optional(CONF_PERSON, default=0): vol.In(SUPPORTED_PERSON), } ) diff --git a/homeassistant/components/blebox/translations/fr.json b/homeassistant/components/blebox/translations/fr.json index d2f43a2c328..75d506a8212 100644 --- a/homeassistant/components/blebox/translations/fr.json +++ b/homeassistant/components/blebox/translations/fr.json @@ -12,6 +12,7 @@ "step": { "user": { "data": { + "host": "Adresse IP", "port": "Port" }, "description": "Configurez votre BleBox pour l'int\u00e9grer \u00e0 Home Assistant.", diff --git a/homeassistant/components/blebox/translations/no.json b/homeassistant/components/blebox/translations/no.json index ff6073410ac..03f054687ae 100644 --- a/homeassistant/components/blebox/translations/no.json +++ b/homeassistant/components/blebox/translations/no.json @@ -13,6 +13,7 @@ "step": { "user": { "data": { + "host": "IP adresse", "port": "Port" }, "description": "Konfigurer BleBox-en til \u00e5 integreres med Home Assistant.", diff --git a/homeassistant/components/blink/__init__.py b/homeassistant/components/blink/__init__.py index 04f9652bcb5..2344ce7b432 100644 --- a/homeassistant/components/blink/__init__.py +++ b/homeassistant/components/blink/__init__.py @@ -14,6 +14,7 @@ from homeassistant.const import ( CONF_SCAN_INTERVAL, CONF_USERNAME, ) +from homeassistant.core import callback from homeassistant.helpers import config_validation as cv from .const import ( @@ -58,7 +59,7 @@ def _blink_startup_wrapper(entry): no_prompt=True, device_id=DEVICE_ID, ) - blink.refresh_rate = entry.data[CONF_SCAN_INTERVAL] + blink.refresh_rate = entry.options.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL) try: blink.login_response = entry.data["login_response"] @@ -91,6 +92,8 @@ async def async_setup(hass, config): async def async_setup_entry(hass, entry): """Set up Blink via config entry.""" + _async_import_options_from_data_if_missing(hass, entry) + hass.data[DOMAIN][entry.entry_id] = await hass.async_add_executor_job( _blink_startup_wrapper, entry ) @@ -130,6 +133,16 @@ async def async_setup_entry(hass, entry): return True +@callback +def _async_import_options_from_data_if_missing(hass, entry): + options = dict(entry.options) + if CONF_SCAN_INTERVAL not in entry.options: + options[CONF_SCAN_INTERVAL] = entry.data.get( + CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL + ) + hass.config_entries.async_update_entry(entry, options=options) + + async def async_unload_entry(hass, entry): """Unload Blink entry.""" unload_ok = all( diff --git a/homeassistant/components/blink/config_flow.py b/homeassistant/components/blink/config_flow.py index 281dee17cb1..4cd89175ab6 100644 --- a/homeassistant/components/blink/config_flow.py +++ b/homeassistant/components/blink/config_flow.py @@ -11,6 +11,7 @@ from homeassistant.const import ( CONF_SCAN_INTERVAL, CONF_USERNAME, ) +from homeassistant.core import callback from .const import DEFAULT_OFFSET, DEFAULT_SCAN_INTERVAL, DEVICE_ID, DOMAIN @@ -40,10 +41,15 @@ class BlinkConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): self.data = { CONF_USERNAME: "", CONF_PASSWORD: "", - CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL, "login_response": None, } + @staticmethod + @callback + def async_get_options_flow(config_entry): + """Get options flow for this handler.""" + return BlinkOptionsFlowHandler(config_entry) + async def async_step_user(self, user_input=None): """Handle a flow initiated by the user.""" errors = {} @@ -54,7 +60,7 @@ class BlinkConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(self.data[CONF_USERNAME]) if CONF_SCAN_INTERVAL in user_input: - self.data[CONF_SCAN_INTERVAL] = user_input["scan_interval"] + self.data[CONF_SCAN_INTERVAL] = user_input[CONF_SCAN_INTERVAL] self.blink = Blink( username=self.data[CONF_USERNAME], @@ -107,6 +113,40 @@ class BlinkConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): return await self.async_step_user(import_data) +class BlinkOptionsFlowHandler(config_entries.OptionsFlow): + """Handle Blink options.""" + + def __init__(self, config_entry): + """Initialize Blink options flow.""" + self.config_entry = config_entry + self.options = dict(config_entry.options) + self.blink = None + + async def async_step_init(self, user_input=None): + """Manage the Blink options.""" + self.blink = self.hass.data[DOMAIN][self.config_entry.entry_id] + self.options[CONF_SCAN_INTERVAL] = self.blink.refresh_rate + + return await self.async_step_simple_options() + + async def async_step_simple_options(self, user_input=None): + """For simple options.""" + if user_input is not None: + self.options.update(user_input) + self.blink.refresh_rate = user_input[CONF_SCAN_INTERVAL] + return self.async_create_entry(title="", data=self.options) + + options = self.config_entry.options + scan_interval = options.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL) + + return self.async_show_form( + step_id="simple_options", + data_schema=vol.Schema( + {vol.Optional(CONF_SCAN_INTERVAL, default=scan_interval,): int} + ), + ) + + class Require2FA(exceptions.HomeAssistantError): """Error to indicate we require 2FA.""" diff --git a/homeassistant/components/blink/strings.json b/homeassistant/components/blink/strings.json index dcd4a488c5c..e3bbe4006f3 100644 --- a/homeassistant/components/blink/strings.json +++ b/homeassistant/components/blink/strings.json @@ -21,5 +21,16 @@ "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } - } + }, + "options": { + "step": { + "simple_options": { + "data": { + "scan_interval": "Scan Interval (seconds)" + }, + "title": "Blink options", + "description": "Configure Blink integration" + } + } + } } diff --git a/homeassistant/components/blink/translations/ca.json b/homeassistant/components/blink/translations/ca.json index 01dac3598a3..1984840b3ea 100644 --- a/homeassistant/components/blink/translations/ca.json +++ b/homeassistant/components/blink/translations/ca.json @@ -29,7 +29,9 @@ "simple_options": { "data": { "scan_interval": "Interval d'escaneig (segons)" - } + }, + "description": "Configura la integraci\u00f3 Blink", + "title": "Opcions de Blink" } } } diff --git a/homeassistant/components/blink/translations/fr.json b/homeassistant/components/blink/translations/fr.json index 9be94a60a3e..5fc163849aa 100644 --- a/homeassistant/components/blink/translations/fr.json +++ b/homeassistant/components/blink/translations/fr.json @@ -1,5 +1,8 @@ { "config": { + "abort": { + "already_configured": "P\u00e9riph\u00e9rique d\u00e9j\u00e0 configur\u00e9" + }, "error": { "invalid_auth": "Authentification invalide", "unknown": "Erreur inattendue" @@ -20,5 +23,15 @@ "title": "Connectez-vous avec un compte Blink" } } + }, + "options": { + "step": { + "simple_options": { + "data": { + "scan_interval": "Intervalle de balayage (secondes)" + }, + "title": "Options de clignotement" + } + } } } \ No newline at end of file diff --git a/homeassistant/components/blink/translations/lb.json b/homeassistant/components/blink/translations/lb.json index 27ab3e6fd87..830f5364896 100644 --- a/homeassistant/components/blink/translations/lb.json +++ b/homeassistant/components/blink/translations/lb.json @@ -23,5 +23,16 @@ "title": "Mam Blink Kont verbannen" } } + }, + "options": { + "step": { + "simple_options": { + "data": { + "scan_interval": "Scan Intervall (sekonnen)" + }, + "description": "Blink Integratioun ariichten", + "title": "Blink Optiounen" + } + } } } \ No newline at end of file diff --git a/homeassistant/components/bmw_connected_drive/__init__.py b/homeassistant/components/bmw_connected_drive/__init__.py index b8f60dafdbb..b99ae97aa61 100644 --- a/homeassistant/components/bmw_connected_drive/__init__.py +++ b/homeassistant/components/bmw_connected_drive/__init__.py @@ -41,6 +41,7 @@ _SERVICE_MAP = { "light_flash": "trigger_remote_light_flash", "sound_horn": "trigger_remote_horn", "activate_air_conditioning": "trigger_remote_air_conditioning", + "find_vehicle": "trigger_remote_vehicle_finder", } diff --git a/homeassistant/components/bmw_connected_drive/manifest.json b/homeassistant/components/bmw_connected_drive/manifest.json index 4521af8d36e..c7cacfb6f63 100644 --- a/homeassistant/components/bmw_connected_drive/manifest.json +++ b/homeassistant/components/bmw_connected_drive/manifest.json @@ -2,7 +2,7 @@ "domain": "bmw_connected_drive", "name": "BMW Connected Drive", "documentation": "https://www.home-assistant.io/integrations/bmw_connected_drive", - "requirements": ["bimmer_connected==0.7.5"], + "requirements": ["bimmer_connected==0.7.7"], "dependencies": [], - "codeowners": ["@gerard33"] + "codeowners": ["@gerard33", "@rikroe"] } diff --git a/homeassistant/components/bmw_connected_drive/services.yaml b/homeassistant/components/bmw_connected_drive/services.yaml index b9605429a8e..170289edaea 100644 --- a/homeassistant/components/bmw_connected_drive/services.yaml +++ b/homeassistant/components/bmw_connected_drive/services.yaml @@ -35,6 +35,16 @@ activate_air_conditioning: The vehicle identification number (VIN) of the vehicle, 17 characters example: WBANXXXXXX1234567 +find_vehicle: + description: > + Request vehicle to update the gps location. The vehicle is identified via the vin + (see below). + fields: + vin: + description: > + The vehicle identification number (VIN) of the vehicle, 17 characters + example: WBANXXXXXX1234567 + update_state: description: > Fetch the last state of the vehicles of all your accounts from the BMW diff --git a/homeassistant/components/braviatv/translations/no.json b/homeassistant/components/braviatv/translations/no.json index ff86974f763..cd687d8f2d0 100644 --- a/homeassistant/components/braviatv/translations/no.json +++ b/homeassistant/components/braviatv/translations/no.json @@ -19,7 +19,7 @@ }, "user": { "data": { - "host": "TV-vertsnavn eller IP-adresse" + "host": "Vert" }, "description": "Sett opp Sony Bravia TV-integrasjon. Hvis du har problemer med konfigurasjonen, g\u00e5 til: [https://www.home-assistant.io/integrations/braviatv](https://www.home-assistant.io/integrations/braviatv)\n\n Forsikre deg om at TV-en er sl\u00e5tt p\u00e5.", "title": "" diff --git a/homeassistant/components/broadlink/__init__.py b/homeassistant/components/broadlink/__init__.py index d8b7f60b5b4..fe0c79a0b19 100644 --- a/homeassistant/components/broadlink/__init__.py +++ b/homeassistant/components/broadlink/__init__.py @@ -2,7 +2,6 @@ import asyncio from base64 import b64decode, b64encode from binascii import unhexlify -from datetime import timedelta import logging import re @@ -13,7 +12,7 @@ from homeassistant.const import CONF_HOST import homeassistant.helpers.config_validation as cv from homeassistant.util.dt import utcnow -from .const import CONF_PACKET, DOMAIN, SERVICE_LEARN, SERVICE_SEND +from .const import CONF_PACKET, DOMAIN, LEARNING_TIMEOUT, SERVICE_LEARN, SERVICE_SEND _LOGGER = logging.getLogger(__name__) @@ -84,7 +83,7 @@ async def async_setup_service(hass, host, device): _LOGGER.info("Press the key you want Home Assistant to learn") start_time = utcnow() - while (utcnow() - start_time) < timedelta(seconds=20): + while (utcnow() - start_time) < LEARNING_TIMEOUT: await asyncio.sleep(1) try: packet = await device.async_request(device.api.check_data) diff --git a/homeassistant/components/broadlink/const.py b/homeassistant/components/broadlink/const.py index 3264ec225ca..a8a448a9aff 100644 --- a/homeassistant/components/broadlink/const.py +++ b/homeassistant/components/broadlink/const.py @@ -1,7 +1,8 @@ """Constants for broadlink platform.""" +from datetime import timedelta + CONF_PACKET = "packet" -DEFAULT_LEARNING_TIMEOUT = 20 DEFAULT_NAME = "Broadlink" DEFAULT_PORT = 80 DEFAULT_RETRY = 3 @@ -9,6 +10,8 @@ DEFAULT_TIMEOUT = 5 DOMAIN = "broadlink" +LEARNING_TIMEOUT = timedelta(seconds=30) + SERVICE_LEARN = "learn" SERVICE_SEND = "send" diff --git a/homeassistant/components/broadlink/remote.py b/homeassistant/components/broadlink/remote.py index 03ecb9b7634..d7b4c051bcc 100644 --- a/homeassistant/components/broadlink/remote.py +++ b/homeassistant/components/broadlink/remote.py @@ -24,7 +24,6 @@ from homeassistant.components.remote import ( ATTR_DELAY_SECS, ATTR_DEVICE, ATTR_NUM_REPEATS, - ATTR_TIMEOUT, DEFAULT_DELAY_SECS, DOMAIN as COMPONENT, PLATFORM_SCHEMA, @@ -40,10 +39,10 @@ from homeassistant.util.dt import utcnow from . import DOMAIN, data_packet, hostname, mac_address from .const import ( - DEFAULT_LEARNING_TIMEOUT, DEFAULT_NAME, DEFAULT_PORT, DEFAULT_TIMEOUT, + LEARNING_TIMEOUT, RM4_TYPES, RM_TYPES, ) @@ -74,10 +73,7 @@ SERVICE_SEND_SCHEMA = MINIMUM_SERVICE_SCHEMA.extend( ) SERVICE_LEARN_SCHEMA = MINIMUM_SERVICE_SCHEMA.extend( - { - vol.Optional(ATTR_ALTERNATIVE, default=False): cv.boolean, - vol.Optional(ATTR_TIMEOUT, default=DEFAULT_LEARNING_TIMEOUT): cv.positive_int, - } + {vol.Optional(ATTR_ALTERNATIVE, default=False): cv.boolean} ) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( @@ -267,7 +263,6 @@ class BroadlinkRemote(RemoteEntity): commands = kwargs[ATTR_COMMAND] device = kwargs[ATTR_DEVICE] toggle = kwargs[ATTR_ALTERNATIVE] - timeout = kwargs[ATTR_TIMEOUT] if not self._state: return @@ -275,66 +270,47 @@ class BroadlinkRemote(RemoteEntity): should_store = False for command in commands: try: - should_store |= await self._async_learn_code( - command, device, toggle, timeout - ) - except (AuthorizationError, DeviceOfflineError): + code = await self._async_learn_command(command) + if toggle: + code = [code, await self._async_learn_command(command)] + except (AuthorizationError, DeviceOfflineError) as err_msg: + _LOGGER.error("Failed to learn '%s': %s", command, err_msg) break - except BroadlinkException: - pass + except (BroadlinkException, TimeoutError) as err_msg: + _LOGGER.error("Failed to learn '%s': %s", command, err_msg) + continue + else: + self._codes.setdefault(device, {}).update({command: code}) + should_store = True if should_store: await self._code_storage.async_save(self._codes) - async def _async_learn_code(self, command, device, toggle, timeout): - """Learn a code from a remote. - - Capture an additional code for toggle commands. - """ + async def _async_learn_command(self, command): + """Learn a command from a remote.""" try: - if not toggle: - code = await self._async_capture_code(command, timeout) - else: - code = [ - await self._async_capture_code(command, timeout), - await self._async_capture_code(command, timeout), - ] - except TimeoutError: - _LOGGER.error("Failed to learn '%s/%s': No code received", command, device) - return False + await self.device.async_request(self.device.api.enter_learning) except BroadlinkException as err_msg: - _LOGGER.error("Failed to learn '%s/%s': %s", command, device, err_msg) + _LOGGER.debug("Failed to enter learning mode: %s", err_msg) raise - self._codes.setdefault(device, {}).update({command: code}) - return True - - async def _async_capture_code(self, command, timeout): - """Enter learning mode and capture a code from a remote.""" - await self.device.async_request(self.device.api.enter_learning) - self.hass.components.persistent_notification.async_create( f"Press the '{command}' button.", title="Learn command", notification_id="learn_command", ) - code = None - start_time = utcnow() - while (utcnow() - start_time) < timedelta(seconds=timeout): - await asyncio.sleep(1) - try: - code = await self.device.async_request(self.device.api.check_data) - except (ReadError, StorageError): - continue - else: - break - - self.hass.components.persistent_notification.async_dismiss( - notification_id="learn_command" - ) - - if code is None: - raise TimeoutError - - return b64encode(code).decode("utf8") + try: + start_time = utcnow() + while (utcnow() - start_time) < LEARNING_TIMEOUT: + await asyncio.sleep(1) + try: + code = await self.device.async_request(self.device.api.check_data) + except (ReadError, StorageError): + continue + return b64encode(code).decode("utf8") + raise TimeoutError("No code received") + finally: + self.hass.components.persistent_notification.async_dismiss( + notification_id="learn_command" + ) diff --git a/homeassistant/components/brother/config_flow.py b/homeassistant/components/brother/config_flow.py index e50105e0b27..8b3a9539cc3 100644 --- a/homeassistant/components/brother/config_flow.py +++ b/homeassistant/components/brother/config_flow.py @@ -69,16 +69,18 @@ class BrotherConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): step_id="user", data_schema=DATA_SCHEMA, errors=errors ) - async def async_step_zeroconf(self, user_input=None): + async def async_step_zeroconf(self, discovery_info): """Handle zeroconf discovery.""" - if user_input is None: + if discovery_info is None: return self.async_abort(reason="connection_error") - if not user_input.get("name") or not user_input["name"].startswith("Brother"): + if not discovery_info.get("name") or not discovery_info["name"].startswith( + "Brother" + ): return self.async_abort(reason="not_brother_printer") # Hostname is format: brother.local. - self.host = user_input["hostname"].rstrip(".") + self.host = discovery_info["hostname"].rstrip(".") self.brother = Brother(self.host) try: diff --git a/homeassistant/components/brother/translations/no.json b/homeassistant/components/brother/translations/no.json index 51716f66e3d..bfc5d811f42 100644 --- a/homeassistant/components/brother/translations/no.json +++ b/homeassistant/components/brother/translations/no.json @@ -13,7 +13,7 @@ "step": { "user": { "data": { - "host": "Vertsnavn eller IP-adresse til skriveren", + "host": "Vert", "type": "Skriver type" }, "description": "Sett opp Brother skriver integrasjonen. Hvis du har problemer med konfigurasjonen, bes\u00f8k dokumentasjonen her: [https://www.home-assistant.io/integrations/brother](https://www.home-assistant.io/integrations/brother)" diff --git a/homeassistant/components/cast/translations/et.json b/homeassistant/components/cast/translations/et.json deleted file mode 100644 index 0e652624ef6..00000000000 --- a/homeassistant/components/cast/translations/et.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "confirm": { - "title": "" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/cast/translations/hr.json b/homeassistant/components/cast/translations/hr.json deleted file mode 100644 index e3f09f8b09c..00000000000 --- a/homeassistant/components/cast/translations/hr.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "confirm": { - "title": "Google Cast" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/cert_expiry/__init__.py b/homeassistant/components/cert_expiry/__init__.py index 28a79a3e505..38c73f8df2b 100644 --- a/homeassistant/components/cert_expiry/__init__.py +++ b/homeassistant/components/cert_expiry/__init__.py @@ -1,6 +1,20 @@ """The cert_expiry component.""" +from datetime import timedelta +import logging + from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.typing import HomeAssistantType +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DEFAULT_PORT, DOMAIN +from .errors import TemporaryFailure, ValidationFailure +from .helper import get_cert_expiry_timestamp + +_LOGGER = logging.getLogger(__name__) + +SCAN_INTERVAL = timedelta(hours=12) async def async_setup(hass, config): @@ -10,6 +24,20 @@ async def async_setup(hass, config): async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry): """Load the saved entities.""" + host = entry.data[CONF_HOST] + port = entry.data[CONF_PORT] + + coordinator = CertExpiryDataUpdateCoordinator(hass, host, port) + await coordinator.async_refresh() + + if not coordinator.last_update_success: + raise ConfigEntryNotReady + + hass.data.setdefault(DOMAIN, {}) + hass.data[DOMAIN][entry.entry_id] = coordinator + + if entry.unique_id is None: + hass.config_entries.async_update_entry(entry, unique_id=f"{host}:{port}") hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, "sensor") @@ -20,3 +48,37 @@ async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry): async def async_unload_entry(hass, entry): """Unload a config entry.""" return await hass.config_entries.async_forward_entry_unload(entry, "sensor") + + +class CertExpiryDataUpdateCoordinator(DataUpdateCoordinator): + """Class to manage fetching Cert Expiry data from single endpoint.""" + + def __init__(self, hass, host, port): + """Initialize global Cert Expiry data updater.""" + self.host = host + self.port = port + self.cert_error = None + self.is_cert_valid = False + + display_port = f":{port}" if port != DEFAULT_PORT else "" + name = f"{self.host}{display_port}" + + super().__init__( + hass, _LOGGER, name=name, update_interval=SCAN_INTERVAL, + ) + + async def _async_update_data(self): + """Fetch certificate.""" + try: + timestamp = await get_cert_expiry_timestamp(self.hass, self.host, self.port) + except TemporaryFailure as err: + raise UpdateFailed(err.args[0]) + except ValidationFailure as err: + self.cert_error = err + self.is_cert_valid = False + _LOGGER.error("Certificate validation error: %s [%s]", self.host, err) + return None + + self.cert_error = None + self.is_cert_valid = True + return timestamp diff --git a/homeassistant/components/cert_expiry/config_flow.py b/homeassistant/components/cert_expiry/config_flow.py index 3f77701906f..e23d832bb20 100644 --- a/homeassistant/components/cert_expiry/config_flow.py +++ b/homeassistant/components/cert_expiry/config_flow.py @@ -13,7 +13,7 @@ from .errors import ( ResolveFailed, ValidationFailure, ) -from .helper import get_cert_time_to_expiry +from .helper import get_cert_expiry_timestamp _LOGGER = logging.getLogger(__name__) @@ -31,7 +31,7 @@ class CertexpiryConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): async def _test_connection(self, user_input=None): """Test connection to the server and try to get the certificate.""" try: - await get_cert_time_to_expiry( + await get_cert_expiry_timestamp( self.hass, user_input[CONF_HOST], user_input.get(CONF_PORT, DEFAULT_PORT), diff --git a/homeassistant/components/cert_expiry/helper.py b/homeassistant/components/cert_expiry/helper.py index bb9f2762f3a..f4caee8abf2 100644 --- a/homeassistant/components/cert_expiry/helper.py +++ b/homeassistant/components/cert_expiry/helper.py @@ -1,8 +1,9 @@ """Helper functions for the Cert Expiry platform.""" -from datetime import datetime import socket import ssl +from homeassistant.util import dt + from .const import TIMEOUT from .errors import ( ConnectionRefused, @@ -23,8 +24,8 @@ def get_cert(host, port): return cert -async def get_cert_time_to_expiry(hass, hostname, port): - """Return the certificate's time to expiry in days.""" +async def get_cert_expiry_timestamp(hass, hostname, port): + """Return the certificate's expiration timestamp.""" try: cert = await hass.async_add_executor_job(get_cert, hostname, port) except socket.gaierror: @@ -39,6 +40,4 @@ async def get_cert_time_to_expiry(hass, hostname, port): raise ValidationFailure(err.args[0]) ts_seconds = ssl.cert_time_to_seconds(cert["notAfter"]) - timestamp = datetime.fromtimestamp(ts_seconds) - expiry = timestamp - datetime.today() - return expiry.days + return dt.utc_from_timestamp(ts_seconds) diff --git a/homeassistant/components/cert_expiry/sensor.py b/homeassistant/components/cert_expiry/sensor.py index ec1e9110317..55b72bdefcd 100644 --- a/homeassistant/components/cert_expiry/sensor.py +++ b/homeassistant/components/cert_expiry/sensor.py @@ -9,18 +9,17 @@ from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import ( CONF_HOST, CONF_PORT, + DEVICE_CLASS_TIMESTAMP, EVENT_HOMEASSISTANT_START, TIME_DAYS, ) from homeassistant.core import callback -from homeassistant.exceptions import PlatformNotReady import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.helpers.event import async_call_later +from homeassistant.util import dt from .const import DEFAULT_PORT, DOMAIN -from .errors import TemporaryFailure, ValidationFailure -from .helper import get_cert_time_to_expiry _LOGGER = logging.getLogger(__name__) @@ -56,63 +55,37 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info= async def async_setup_entry(hass, entry, async_add_entities): """Add cert-expiry entry.""" - days = 0 - error = None - hostname = entry.data[CONF_HOST] - port = entry.data[CONF_PORT] + coordinator = hass.data[DOMAIN][entry.entry_id] - if entry.unique_id is None: - hass.config_entries.async_update_entry(entry, unique_id=f"{hostname}:{port}") + sensors = [ + SSLCertificateDays(coordinator), + SSLCertificateTimestamp(coordinator), + ] - try: - days = await get_cert_time_to_expiry(hass, hostname, port) - except TemporaryFailure as err: - _LOGGER.error(err) - raise PlatformNotReady - except ValidationFailure as err: - error = err - - async_add_entities( - [SSLCertificate(hostname, port, days, error)], False, - ) - return True + async_add_entities(sensors, True) -class SSLCertificate(Entity): - """Implementation of the certificate expiry sensor.""" +class CertExpiryEntity(Entity): + """Defines a base Cert Expiry entity.""" - def __init__(self, server_name, server_port, days, error): - """Initialize the sensor.""" - self.server_name = server_name - self.server_port = server_port - display_port = f":{server_port}" if server_port != DEFAULT_PORT else "" - self._name = f"Cert Expiry ({self.server_name}{display_port})" - self._available = True - self._error = error - self._state = days - self._valid = False - if error is None: - self._valid = True + def __init__(self, coordinator): + """Initialize the Cert Expiry entity.""" + self.coordinator = coordinator + + async def async_added_to_hass(self): + """Connect to dispatcher listening for entity data notifications.""" + self.async_on_remove( + self.coordinator.async_add_listener(self.async_write_ha_state) + ) + + async def async_update(self): + """Update Cert Expiry entity.""" + await self.coordinator.async_request_refresh() @property - def name(self): - """Return the name of the sensor.""" - return self._name - - @property - def unique_id(self): - """Return a unique id for the sensor.""" - return f"{self.server_name}:{self.server_port}" - - @property - def unit_of_measurement(self): - """Return the unit this state is expressed in.""" - return TIME_DAYS - - @property - def state(self): - """Return the state of the sensor.""" - return self._state + def available(self): + """Return True if entity is available.""" + return self.coordinator.last_update_success @property def icon(self): @@ -120,42 +93,68 @@ class SSLCertificate(Entity): return "mdi:certificate" @property - def available(self): - """Return the availability of the sensor.""" - return self._available - - async def async_update(self): - """Fetch the certificate information.""" - try: - days_to_expiry = await get_cert_time_to_expiry( - self.hass, self.server_name, self.server_port - ) - except TemporaryFailure as err: - _LOGGER.error(err.args[0]) - self._available = False - return - except ValidationFailure as err: - _LOGGER.error( - "Certificate validation error: %s [%s]", self.server_name, err - ) - self._available = True - self._error = err - self._state = 0 - self._valid = False - return - except Exception: # pylint: disable=broad-except - _LOGGER.exception( - "Unknown error checking %s:%s", self.server_name, self.server_port - ) - self._available = False - return - - self._available = True - self._error = None - self._state = days_to_expiry - self._valid = True + def should_poll(self): + """Return the polling requirement of the entity.""" + return False @property def device_state_attributes(self): """Return additional sensor state attributes.""" - return {"is_valid": self._valid, "error": str(self._error)} + return { + "is_valid": self.coordinator.is_cert_valid, + "error": str(self.coordinator.cert_error), + } + + +class SSLCertificateDays(CertExpiryEntity): + """Implementation of the Cert Expiry days sensor.""" + + @property + def name(self): + """Return the name of the sensor.""" + return f"Cert Expiry ({self.coordinator.name})" + + @property + def state(self): + """Return the state of the sensor.""" + if not self.coordinator.is_cert_valid: + return 0 + + expiry = self.coordinator.data - dt.utcnow() + return expiry.days + + @property + def unique_id(self): + """Return a unique id for the sensor.""" + return f"{self.coordinator.host}:{self.coordinator.port}" + + @property + def unit_of_measurement(self): + """Return the unit this state is expressed in.""" + return TIME_DAYS + + +class SSLCertificateTimestamp(CertExpiryEntity): + """Implementation of the Cert Expiry timestamp sensor.""" + + @property + def device_class(self): + """Return the device class of the sensor.""" + return DEVICE_CLASS_TIMESTAMP + + @property + def name(self): + """Return the name of the sensor.""" + return f"Cert Expiry Timestamp ({self.coordinator.name})" + + @property + def state(self): + """Return the state of the sensor.""" + if self.coordinator.data: + return self.coordinator.data.isoformat() + return None + + @property + def unique_id(self): + """Return a unique id for the sensor.""" + return f"{self.coordinator.host}:{self.coordinator.port}-timestamp" diff --git a/homeassistant/components/cert_expiry/translations/cs.json b/homeassistant/components/cert_expiry/translations/cs.json deleted file mode 100644 index 58a5a281ea2..00000000000 --- a/homeassistant/components/cert_expiry/translations/cs.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "config": { - "error": { - "certificate_error": "Certifik\u00e1t nelze ov\u011b\u0159it", - "wrong_host": "Certifik\u00e1t neodpov\u00edd\u00e1 n\u00e1zvu hostitele" - } - } -} \ No newline at end of file diff --git a/homeassistant/components/cert_expiry/translations/pl.json b/homeassistant/components/cert_expiry/translations/pl.json index 7f92253507c..1561f30fcff 100644 --- a/homeassistant/components/cert_expiry/translations/pl.json +++ b/homeassistant/components/cert_expiry/translations/pl.json @@ -16,7 +16,7 @@ "name": "Nazwa certyfikatu", "port": "Port" }, - "title": "Zdefiniuj certyfikat do przetestowania" + "title": "Zdefiniuj certyfikat do sprawdzenia" } } }, diff --git a/homeassistant/components/cloud/http_api.py b/homeassistant/components/cloud/http_api.py index c3809f76b8c..6710d8682e2 100644 --- a/homeassistant/components/cloud/http_api.py +++ b/homeassistant/components/cloud/http_api.py @@ -71,6 +71,8 @@ _CLOUD_ERRORS = { HTTP_INTERNAL_SERVER_ERROR, "Remote UI not compatible with 127.0.0.1/::1 as trusted proxies.", ), + asyncio.TimeoutError: (502, "Unable to reach the Home Assistant cloud."), + aiohttp.ClientError: (HTTP_INTERNAL_SERVER_ERROR, "Error making internal request",), } @@ -120,11 +122,6 @@ async def async_setup(hass): HTTP_BAD_REQUEST, "Password change required.", ), - asyncio.TimeoutError: (502, "Unable to reach the Home Assistant cloud."), - aiohttp.ClientError: ( - HTTP_INTERNAL_SERVER_ERROR, - "Error making internal request", - ), } ) @@ -166,10 +163,17 @@ def _ws_handle_cloud_errors(handler): def _process_cloud_exception(exc, where): """Process a cloud exception.""" - err_info = _CLOUD_ERRORS.get(exc.__class__) + err_info = None + + for err, value_info in _CLOUD_ERRORS.items(): + if isinstance(exc, err): + err_info = value_info + break + if err_info is None: _LOGGER.exception("Unexpected error processing request for %s", where) err_info = (502, f"Unexpected error: {exc}") + return err_info diff --git a/homeassistant/components/cloud/manifest.json b/homeassistant/components/cloud/manifest.json index b72aec18c34..8d58e98c0e5 100644 --- a/homeassistant/components/cloud/manifest.json +++ b/homeassistant/components/cloud/manifest.json @@ -2,7 +2,7 @@ "domain": "cloud", "name": "Home Assistant Cloud", "documentation": "https://www.home-assistant.io/integrations/cloud", - "requirements": ["hass-nabucasa==0.34.6"], + "requirements": ["hass-nabucasa==0.34.7"], "dependencies": ["http", "webhook", "alexa"], "after_dependencies": ["google_assistant"], "codeowners": ["@home-assistant/cloud"] diff --git a/homeassistant/components/config/config_entries.py b/homeassistant/components/config/config_entries.py index 584255764a3..32934d4e970 100644 --- a/homeassistant/components/config/config_entries.py +++ b/homeassistant/components/config/config_entries.py @@ -4,7 +4,7 @@ import voluptuous as vol import voluptuous_serialize from homeassistant import config_entries, data_entry_flow -from homeassistant.auth.permissions.const import CAT_CONFIG_ENTRIES +from homeassistant.auth.permissions.const import CAT_CONFIG_ENTRIES, POLICY_EDIT from homeassistant.components import websocket_api from homeassistant.components.http import HomeAssistantView from homeassistant.const import HTTP_NOT_FOUND @@ -180,7 +180,7 @@ class OptionManagerFlowIndexView(FlowManagerIndexView): handler in request is entry_id. """ if not request["hass_user"].is_admin: - raise Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission="edit") + raise Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT) # pylint: disable=no-value-for-parameter return await super().post(request) @@ -195,7 +195,7 @@ class OptionManagerFlowResourceView(FlowManagerResourceView): async def get(self, request, flow_id): """Get the current state of a data_entry_flow.""" if not request["hass_user"].is_admin: - raise Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission="edit") + raise Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT) return await super().get(request, flow_id) @@ -203,7 +203,7 @@ class OptionManagerFlowResourceView(FlowManagerResourceView): async def post(self, request, flow_id): """Handle a POST request.""" if not request["hass_user"].is_admin: - raise Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission="edit") + raise Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT) # pylint: disable=no-value-for-parameter return await super().post(request, flow_id) @@ -274,7 +274,7 @@ async def system_options_update(hass, connection, msg): {"type": "config_entries/update", "entry_id": str, vol.Optional("title"): str} ) async def config_entry_update(hass, connection, msg): - """Update config entry system options.""" + """Update config entry.""" changes = dict(msg) changes.pop("id") changes.pop("type") diff --git a/homeassistant/components/coronavirus/sensor.py b/homeassistant/components/coronavirus/sensor.py index 2887427ec6b..d24c33a7752 100644 --- a/homeassistant/components/coronavirus/sensor.py +++ b/homeassistant/components/coronavirus/sensor.py @@ -51,9 +51,14 @@ class CoronavirusSensor(Entity): def state(self): """State of the sensor.""" if self.country == OPTION_WORLDWIDE: - return sum( - getattr(case, self.info_type) for case in self.coordinator.data.values() - ) + sum_cases = 0 + for case in self.coordinator.data.values(): + value = getattr(case, self.info_type) + if value is None: + continue + sum_cases += value + + return sum_cases return getattr(self.coordinator.data[self.country], self.info_type) diff --git a/homeassistant/components/daikin/config_flow.py b/homeassistant/components/daikin/config_flow.py index 70553df2d91..467d91328b5 100644 --- a/homeassistant/components/daikin/config_flow.py +++ b/homeassistant/components/daikin/config_flow.py @@ -129,8 +129,8 @@ class FlowHandler(config_entries.ConfigFlow): async def async_step_zeroconf(self, discovery_info): """Prepare configuration for a discovered Daikin device.""" _LOGGER.debug("Zeroconf user_input: %s", discovery_info) - devices = Discovery().poll(discovery_info[CONF_HOST]) - await self.async_set_unique_id(next(iter(devices.values()))[KEY_MAC]) + devices = Discovery().poll(ip=discovery_info[CONF_HOST]) + await self.async_set_unique_id(next(iter(devices))[KEY_MAC]) self._abort_if_unique_id_configured() self.host = discovery_info[CONF_HOST] return await self.async_step_user() diff --git a/homeassistant/components/daikin/const.py b/homeassistant/components/daikin/const.py index 30d34b898d3..a28221dbcbf 100644 --- a/homeassistant/components/daikin/const.py +++ b/homeassistant/components/daikin/const.py @@ -5,11 +5,13 @@ from homeassistant.const import ( CONF_NAME, CONF_TYPE, CONF_UNIT_OF_MEASUREMENT, + DEVICE_CLASS_HUMIDITY, DEVICE_CLASS_POWER, DEVICE_CLASS_TEMPERATURE, ENERGY_KILO_WATT_HOUR, POWER_KILO_WATT, TEMP_CELSIUS, + UNIT_PERCENTAGE, ) ATTR_TARGET_TEMPERATURE = "target_temperature" @@ -18,11 +20,14 @@ ATTR_OUTSIDE_TEMPERATURE = "outside_temperature" ATTR_TOTAL_POWER = "total_power" ATTR_COOL_ENERGY = "cool_energy" ATTR_HEAT_ENERGY = "heat_energy" +ATTR_HUMIDITY = "humidity" +ATTR_TARGET_HUMIDITY = "target_humidity" ATTR_STATE_ON = "on" ATTR_STATE_OFF = "off" SENSOR_TYPE_TEMPERATURE = "temperature" +SENSOR_TYPE_HUMIDITY = "humidity" SENSOR_TYPE_POWER = "power" SENSOR_TYPE_ENERGY = "energy" @@ -39,6 +44,18 @@ SENSOR_TYPES = { CONF_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE, CONF_UNIT_OF_MEASUREMENT: TEMP_CELSIUS, }, + ATTR_HUMIDITY: { + CONF_NAME: "Humidity", + CONF_TYPE: SENSOR_TYPE_HUMIDITY, + CONF_DEVICE_CLASS: DEVICE_CLASS_HUMIDITY, + CONF_UNIT_OF_MEASUREMENT: UNIT_PERCENTAGE, + }, + ATTR_TARGET_HUMIDITY: { + CONF_NAME: "Target Humidity", + CONF_TYPE: SENSOR_TYPE_HUMIDITY, + CONF_DEVICE_CLASS: DEVICE_CLASS_HUMIDITY, + CONF_UNIT_OF_MEASUREMENT: UNIT_PERCENTAGE, + }, ATTR_TOTAL_POWER: { CONF_NAME: "Total Power Consumption", CONF_TYPE: SENSOR_TYPE_POWER, diff --git a/homeassistant/components/daikin/manifest.json b/homeassistant/components/daikin/manifest.json index 1a180aa5ab6..dc9bc9653f8 100644 --- a/homeassistant/components/daikin/manifest.json +++ b/homeassistant/components/daikin/manifest.json @@ -3,7 +3,7 @@ "name": "Daikin AC", "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/daikin", - "requirements": ["pydaikin==2.1.2"], + "requirements": ["pydaikin==2.2.0"], "codeowners": ["@fredrike"], "zeroconf": ["_dkapi._tcp.local."], "quality_scale": "platinum" diff --git a/homeassistant/components/daikin/sensor.py b/homeassistant/components/daikin/sensor.py index 7ff79338a79..73cadba9e45 100644 --- a/homeassistant/components/daikin/sensor.py +++ b/homeassistant/components/daikin/sensor.py @@ -14,10 +14,13 @@ from . import DOMAIN as DAIKIN_DOMAIN, DaikinApi from .const import ( ATTR_COOL_ENERGY, ATTR_HEAT_ENERGY, + ATTR_HUMIDITY, ATTR_INSIDE_TEMPERATURE, ATTR_OUTSIDE_TEMPERATURE, + ATTR_TARGET_HUMIDITY, ATTR_TOTAL_POWER, SENSOR_TYPE_ENERGY, + SENSOR_TYPE_HUMIDITY, SENSOR_TYPE_POWER, SENSOR_TYPE_TEMPERATURE, SENSOR_TYPES, @@ -44,6 +47,9 @@ async def async_setup_entry(hass, entry, async_add_entities): sensors.append(ATTR_TOTAL_POWER) sensors.append(ATTR_COOL_ENERGY) sensors.append(ATTR_HEAT_ENERGY) + if daikin_api.device.support_humidity: + sensors.append(ATTR_HUMIDITY) + sensors.append(ATTR_TARGET_HUMIDITY) async_add_entities([DaikinSensor.factory(daikin_api, sensor) for sensor in sensors]) @@ -55,6 +61,7 @@ class DaikinSensor(Entity): """Initialize any DaikinSensor.""" cls = { SENSOR_TYPE_TEMPERATURE: DaikinClimateSensor, + SENSOR_TYPE_HUMIDITY: DaikinClimateSensor, SENSOR_TYPE_POWER: DaikinPowerSensor, SENSOR_TYPE_ENERGY: DaikinPowerSensor, }[SENSOR_TYPES[monitored_state][CONF_TYPE]] @@ -117,6 +124,11 @@ class DaikinClimateSensor(DaikinSensor): return self._api.device.inside_temperature if self._device_attribute == ATTR_OUTSIDE_TEMPERATURE: return self._api.device.outside_temperature + + if self._device_attribute == ATTR_HUMIDITY: + return self._api.device.humidity + if self._device_attribute == ATTR_TARGET_HUMIDITY: + return self._api.device.target_humidity return None diff --git a/homeassistant/components/daikin/translations/fr.json b/homeassistant/components/daikin/translations/fr.json index f3351631fa8..b9c7d920c13 100644 --- a/homeassistant/components/daikin/translations/fr.json +++ b/homeassistant/components/daikin/translations/fr.json @@ -3,6 +3,11 @@ "abort": { "already_configured": "L'appareil est d\u00e9j\u00e0 configur\u00e9" }, + "error": { + "device_fail": "Erreur inattendue", + "device_timeout": "Echec de la connexion", + "forbidden": "Authentification invalide" + }, "step": { "user": { "data": { diff --git a/homeassistant/components/daikin/translations/nn.json b/homeassistant/components/daikin/translations/nn.json deleted file mode 100644 index fb8f82824c2..00000000000 --- a/homeassistant/components/daikin/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Daikin AC" -} \ No newline at end of file diff --git a/homeassistant/components/datadog/__init__.py b/homeassistant/components/datadog/__init__.py index 36b4037f70a..b5b7664f8b0 100644 --- a/homeassistant/components/datadog/__init__.py +++ b/homeassistant/components/datadog/__init__.py @@ -75,9 +75,6 @@ def setup(hass, config): if state is None or state.state == STATE_UNKNOWN: return - if state.attributes.get("hidden") is True: - return - states = dict(state.attributes) metric = f"{prefix}.{state.domain}" tags = [f"entity:{state.entity_id}"] diff --git a/homeassistant/components/ddwrt/device_tracker.py b/homeassistant/components/ddwrt/device_tracker.py index 27f6895fc43..9b6fd1bdb64 100644 --- a/homeassistant/components/ddwrt/device_tracker.py +++ b/homeassistant/components/ddwrt/device_tracker.py @@ -113,7 +113,7 @@ class DdWrtDeviceScanner(DeviceScanner): Return boolean if scanning successful. """ - _LOGGER.info("Checking ARP") + _LOGGER.debug("Checking ARP") endpoint = "Wireless" if self.wireless_only else "Lan" url = f"{self.protocol}://{self.host}/Status_{endpoint}.live.asp" diff --git a/homeassistant/components/debugpy/__init__.py b/homeassistant/components/debugpy/__init__.py new file mode 100644 index 00000000000..caa691b2369 --- /dev/null +++ b/homeassistant/components/debugpy/__init__.py @@ -0,0 +1,79 @@ +"""The Remote Python Debugger integration.""" +from asyncio import Event +import logging +from threading import Thread +from typing import Optional + +import debugpy +import voluptuous as vol + +from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.core import HomeAssistant, ServiceCall +import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.service import async_register_admin_service +from homeassistant.helpers.typing import ConfigType + +DOMAIN = "debugpy" +CONF_WAIT = "wait" +CONF_START = "start" +SERVICE_START = "start" + +CONFIG_SCHEMA = vol.Schema( + { + DOMAIN: vol.Schema( + { + vol.Optional(CONF_HOST, default="0.0.0.0"): cv.string, + vol.Optional(CONF_PORT, default=5678): cv.port, + vol.Optional(CONF_START, default=True): cv.boolean, + vol.Optional(CONF_WAIT, default=False): cv.boolean, + } + ) + }, + extra=vol.ALLOW_EXTRA, +) + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the Remote Python Debugger component.""" + conf = config[DOMAIN] + + async def debug_start( + call: Optional[ServiceCall] = None, *, wait: bool = True + ) -> None: + """Start the debugger.""" + debugpy.listen((conf[CONF_HOST], conf[CONF_PORT])) + + wait = conf[CONF_WAIT] + if wait: + _LOGGER.warning( + "Waiting for remote debug connection on %s:%s", + conf[CONF_HOST], + conf[CONF_PORT], + ) + ready = Event() + + def waitfor(): + debugpy.wait_for_client() + hass.loop.call_soon_threadsafe(ready.set) + + Thread(target=waitfor).start() + + await ready.wait() + else: + _LOGGER.warning( + "Listening for remote debug connection on %s:%s", + conf[CONF_HOST], + conf[CONF_PORT], + ) + + async_register_admin_service( + hass, DOMAIN, SERVICE_START, debug_start, schema=vol.Schema({}) + ) + + # If set to start the debugger on startup, do so + if conf[CONF_START]: + await debug_start(wait=conf[CONF_WAIT]) + + return True diff --git a/homeassistant/components/debugpy/manifest.json b/homeassistant/components/debugpy/manifest.json new file mode 100644 index 00000000000..c27e7411de2 --- /dev/null +++ b/homeassistant/components/debugpy/manifest.json @@ -0,0 +1,8 @@ +{ + "domain": "debugpy", + "name": "Remote Python Debugger", + "documentation": "https://www.home-assistant.io/integrations/debugpy", + "requirements": ["debugpy==1.0.0b11"], + "codeowners": ["@frenck"], + "quality_scale": "internal" +} diff --git a/homeassistant/components/debugpy/services.yaml b/homeassistant/components/debugpy/services.yaml new file mode 100644 index 00000000000..4e3c19dd0d7 --- /dev/null +++ b/homeassistant/components/debugpy/services.yaml @@ -0,0 +1,3 @@ +# Describes the format for available Remote Python Debugger services +start: + description: Start the Remote Python Debugger. diff --git a/homeassistant/components/deconz/config_flow.py b/homeassistant/components/deconz/config_flow.py index f52a18bbd07..f3ae5682131 100644 --- a/homeassistant/components/deconz/config_flow.py +++ b/homeassistant/components/deconz/config_flow.py @@ -205,25 +205,25 @@ class DeconzFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): return await self.async_step_link() - async def async_step_hassio(self, user_input=None): + async def async_step_hassio(self, discovery_info): """Prepare configuration for a Hass.io deCONZ bridge. This flow is triggered by the discovery component. """ - LOGGER.debug("deCONZ HASSIO discovery %s", pformat(user_input)) + LOGGER.debug("deCONZ HASSIO discovery %s", pformat(discovery_info)) - self.bridge_id = normalize_bridge_id(user_input[CONF_SERIAL]) + self.bridge_id = normalize_bridge_id(discovery_info[CONF_SERIAL]) await self.async_set_unique_id(self.bridge_id) self._abort_if_unique_id_configured( updates={ - CONF_HOST: user_input[CONF_HOST], - CONF_PORT: user_input[CONF_PORT], - CONF_API_KEY: user_input[CONF_API_KEY], + CONF_HOST: discovery_info[CONF_HOST], + CONF_PORT: discovery_info[CONF_PORT], + CONF_API_KEY: discovery_info[CONF_API_KEY], } ) - self._hassio_discovery = user_input + self._hassio_discovery = discovery_info return await self.async_step_hassio_confirm() diff --git a/homeassistant/components/deconz/translations/es.json b/homeassistant/components/deconz/translations/es.json index 5a4a3f29258..877623188bb 100644 --- a/homeassistant/components/deconz/translations/es.json +++ b/homeassistant/components/deconz/translations/es.json @@ -2,7 +2,7 @@ "config": { "abort": { "already_configured": "La pasarela ya est\u00e1 configurada", - "already_in_progress": "La configuraci\u00f3n del flujo para la pasarela ya est\u00e1 en curso.", + "already_in_progress": "El flujo de configuraci\u00f3n para la pasarela ya est\u00e1 en marcha.", "no_bridges": "No se han descubierto pasarelas deCONZ", "not_deconz_bridge": "No es una pasarela deCONZ", "one_instance_only": "El componente solo admite una instancia de deCONZ", diff --git a/homeassistant/components/deconz/translations/et.json b/homeassistant/components/deconz/translations/et.json deleted file mode 100644 index 45bb3967060..00000000000 --- a/homeassistant/components/deconz/translations/et.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "config": { - "step": { - "manual_confirm": { - "data": { - "host": "", - "port": "" - } - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/deconz/translations/hr.json b/homeassistant/components/deconz/translations/hr.json deleted file mode 100644 index 50fec879cb6..00000000000 --- a/homeassistant/components/deconz/translations/hr.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "config": { - "step": { - "manual_confirm": { - "data": { - "host": "Host", - "port": "Port" - } - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/deconz/translations/th.json b/homeassistant/components/deconz/translations/th.json deleted file mode 100644 index db5e0efae10..00000000000 --- a/homeassistant/components/deconz/translations/th.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "config": { - "step": { - "manual_confirm": { - "data": { - "port": "Port" - } - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/default_config/manifest.json b/homeassistant/components/default_config/manifest.json index 0b80e172904..338aeb2e285 100644 --- a/homeassistant/components/default_config/manifest.json +++ b/homeassistant/components/default_config/manifest.json @@ -5,7 +5,6 @@ "dependencies": [ "automation", "cloud", - "config", "frontend", "history", "logbook", diff --git a/homeassistant/components/demo/__init__.py b/homeassistant/components/demo/__init__.py index 344ffbd9fd3..8121d493315 100644 --- a/homeassistant/components/demo/__init__.py +++ b/homeassistant/components/demo/__init__.py @@ -18,6 +18,7 @@ COMPONENTS_WITH_CONFIG_ENTRY_DEMO_PLATFORM = [ "climate", "cover", "fan", + "humidifier", "light", "lock", "media_player", diff --git a/homeassistant/components/demo/humidifier.py b/homeassistant/components/demo/humidifier.py new file mode 100644 index 00000000000..35eb6e18537 --- /dev/null +++ b/homeassistant/components/demo/humidifier.py @@ -0,0 +1,124 @@ +"""Demo platform that offers a fake humidifier device.""" +from homeassistant.components.humidifier import HumidifierEntity +from homeassistant.components.humidifier.const import ( + DEVICE_CLASS_DEHUMIDIFIER, + DEVICE_CLASS_HUMIDIFIER, + SUPPORT_MODES, +) + +SUPPORT_FLAGS = 0 + + +async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): + """Set up the Demo humidifier devices.""" + async_add_entities( + [ + DemoHumidifier( + name="Humidifier", + mode=None, + target_humidity=68, + device_class=DEVICE_CLASS_HUMIDIFIER, + ), + DemoHumidifier( + name="Dehumidifier", + mode=None, + target_humidity=54, + device_class=DEVICE_CLASS_DEHUMIDIFIER, + ), + DemoHumidifier( + name="Hygrostat", + mode="home", + available_modes=["home", "eco"], + target_humidity=50, + ), + ] + ) + + +async def async_setup_entry(hass, config_entry, async_add_entities): + """Set up the Demo humidifier devices config entry.""" + await async_setup_platform(hass, {}, async_add_entities) + + +class DemoHumidifier(HumidifierEntity): + """Representation of a demo humidifier device.""" + + def __init__( + self, + name, + mode, + target_humidity, + available_modes=None, + is_on=True, + device_class=None, + ): + """Initialize the humidifier device.""" + self._name = name + self._state = is_on + self._support_flags = SUPPORT_FLAGS + if mode is not None: + self._support_flags = self._support_flags | SUPPORT_MODES + self._target_humidity = target_humidity + self._mode = mode + self._available_modes = available_modes + self._device_class = device_class + + @property + def supported_features(self): + """Return the list of supported features.""" + return self._support_flags + + @property + def should_poll(self): + """Return the polling state.""" + return False + + @property + def name(self): + """Return the name of the humidity device.""" + return self._name + + @property + def target_humidity(self): + """Return the humidity we try to reach.""" + return self._target_humidity + + @property + def mode(self): + """Return current mode.""" + return self._mode + + @property + def available_modes(self): + """Return available modes.""" + return self._available_modes + + @property + def is_on(self): + """Return true if the humidifier is on.""" + return self._state + + @property + def device_class(self): + """Return the device class of the humidifier.""" + return self._device_class + + async def async_turn_on(self, **kwargs): + """Turn the device on.""" + self._state = True + self.async_write_ha_state() + + async def async_turn_off(self, **kwargs): + """Turn the device off.""" + self._state = False + self.async_write_ha_state() + + async def async_set_humidity(self, humidity): + """Set new humidity level.""" + self._target_humidity = humidity + self.async_write_ha_state() + + async def async_set_mode(self, mode): + """Update mode.""" + self._mode = mode + self.async_write_ha_state() diff --git a/homeassistant/components/demo/translations/fr.json b/homeassistant/components/demo/translations/fr.json index 941f04f5c9e..a3bd8f470f8 100644 --- a/homeassistant/components/demo/translations/fr.json +++ b/homeassistant/components/demo/translations/fr.json @@ -1,6 +1,12 @@ { "options": { "step": { + "init": { + "data": { + "one": "Vide", + "other": "Vide" + } + }, "options_1": { "data": { "bool": "Bool\u00e9en facultatif", diff --git a/homeassistant/components/demo/weather.py b/homeassistant/components/demo/weather.py index b17c88fa828..3c87cd1c27c 100644 --- a/homeassistant/components/demo/weather.py +++ b/homeassistant/components/demo/weather.py @@ -4,6 +4,7 @@ from datetime import timedelta from homeassistant.components.weather import ( ATTR_FORECAST_CONDITION, ATTR_FORECAST_PRECIPITATION, + ATTR_FORECAST_PRECIPITATION_PROBABILITY, ATTR_FORECAST_TEMP, ATTR_FORECAST_TEMP_LOW, ATTR_FORECAST_TIME, @@ -48,13 +49,13 @@ def setup_platform(hass, config, add_entities, discovery_info=None): 0.5, TEMP_CELSIUS, [ - ["rainy", 1, 22, 15], - ["rainy", 5, 19, 8], - ["cloudy", 0, 15, 9], - ["sunny", 0, 12, 6], - ["partlycloudy", 2, 14, 7], - ["rainy", 15, 18, 7], - ["fog", 0.2, 21, 12], + ["rainy", 1, 22, 15, 60], + ["rainy", 5, 19, 8, 30], + ["cloudy", 0, 15, 9, 10], + ["sunny", 0, 12, 6, 0], + ["partlycloudy", 2, 14, 7, 20], + ["rainy", 15, 18, 7, 0], + ["fog", 0.2, 21, 12, 100], ], ), DemoWeather( @@ -66,13 +67,13 @@ def setup_platform(hass, config, add_entities, discovery_info=None): 4.8, TEMP_FAHRENHEIT, [ - ["snowy", 2, -10, -15], - ["partlycloudy", 1, -13, -14], - ["sunny", 0, -18, -22], - ["sunny", 0.1, -23, -23], - ["snowy", 4, -19, -20], - ["sunny", 0.3, -14, -19], - ["sunny", 0, -9, -12], + ["snowy", 2, -10, -15, 60], + ["partlycloudy", 1, -13, -14, 25], + ["sunny", 0, -18, -22, 70], + ["sunny", 0.1, -23, -23, 90], + ["snowy", 4, -19, -20, 40], + ["sunny", 0.3, -14, -19, 0], + ["sunny", 0, -9, -12, 0], ], ), ] @@ -163,6 +164,7 @@ class DemoWeather(WeatherEntity): ATTR_FORECAST_PRECIPITATION: entry[1], ATTR_FORECAST_TEMP: entry[2], ATTR_FORECAST_TEMP_LOW: entry[3], + ATTR_FORECAST_PRECIPITATION_PROBABILITY: entry[4], } reftime = reftime + timedelta(hours=4) forecast_data.append(data_dict) diff --git a/homeassistant/components/denonavr/__init__.py b/homeassistant/components/denonavr/__init__.py index 8877a7dfb3b..89c6413d146 100644 --- a/homeassistant/components/denonavr/__init__.py +++ b/homeassistant/components/denonavr/__init__.py @@ -1,15 +1,33 @@ """The denonavr component.""" +import logging + import voluptuous as vol -from homeassistant.const import ATTR_ENTITY_ID -import homeassistant.helpers.config_validation as cv +from homeassistant import config_entries, core +from homeassistant.const import ATTR_ENTITY_ID, CONF_HOST +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import config_validation as cv, entity_registry as er from homeassistant.helpers.dispatcher import dispatcher_send -DOMAIN = "denonavr" +from .config_flow import ( + CONF_SHOW_ALL_SOURCES, + CONF_ZONE2, + CONF_ZONE3, + DEFAULT_SHOW_SOURCES, + DEFAULT_TIMEOUT, + DEFAULT_ZONE2, + DEFAULT_ZONE3, + DOMAIN, +) +from .receiver import ConnectDenonAVR +CONF_RECEIVER = "receiver" +UNDO_UPDATE_LISTENER = "undo_update_listener" SERVICE_GET_COMMAND = "get_command" ATTR_COMMAND = "command" +_LOGGER = logging.getLogger(__name__) + CALL_SCHEMA = vol.Schema({vol.Required(ATTR_ENTITY_ID): cv.comp_entity_ids}) GET_COMMAND_SCHEMA = CALL_SCHEMA.extend({vol.Required(ATTR_COMMAND): cv.string}) @@ -19,7 +37,7 @@ SERVICE_TO_METHOD = { } -def setup(hass, config): +def setup(hass: core.HomeAssistant, config: dict): """Set up the denonavr platform.""" def service_handler(service): @@ -33,3 +51,72 @@ def setup(hass, config): hass.services.register(DOMAIN, service, service_handler, schema=schema) return True + + +async def async_setup_entry( + hass: core.HomeAssistant, entry: config_entries.ConfigEntry +): + """Set up the denonavr components from a config entry.""" + hass.data.setdefault(DOMAIN, {}) + + # Connect to receiver + connect_denonavr = ConnectDenonAVR( + hass, + entry.data[CONF_HOST], + DEFAULT_TIMEOUT, + entry.options.get(CONF_SHOW_ALL_SOURCES, DEFAULT_SHOW_SOURCES), + entry.options.get(CONF_ZONE2, DEFAULT_ZONE2), + entry.options.get(CONF_ZONE3, DEFAULT_ZONE3), + ) + if not await connect_denonavr.async_connect_receiver(): + raise ConfigEntryNotReady + receiver = connect_denonavr.receiver + + undo_listener = entry.add_update_listener(update_listener) + + hass.data[DOMAIN][entry.entry_id] = { + CONF_RECEIVER: receiver, + UNDO_UPDATE_LISTENER: undo_listener, + } + + hass.async_create_task( + hass.config_entries.async_forward_entry_setup(entry, "media_player") + ) + + return True + + +async def async_unload_entry( + hass: core.HomeAssistant, config_entry: config_entries.ConfigEntry +): + """Unload a config entry.""" + unload_ok = await hass.config_entries.async_forward_entry_unload( + config_entry, "media_player" + ) + + hass.data[DOMAIN][config_entry.entry_id][UNDO_UPDATE_LISTENER]() + + # Remove zone2 and zone3 entities if needed + entity_registry = await er.async_get_registry(hass) + entries = er.async_entries_for_config_entry(entity_registry, config_entry.entry_id) + zone2_id = f"{config_entry.unique_id}-Zone2" + zone3_id = f"{config_entry.unique_id}-Zone3" + for entry in entries: + if entry.unique_id == zone2_id and not config_entry.options.get(CONF_ZONE2): + entity_registry.async_remove(entry.entity_id) + _LOGGER.debug("Removing zone2 from DenonAvr") + if entry.unique_id == zone3_id and not config_entry.options.get(CONF_ZONE3): + entity_registry.async_remove(entry.entity_id) + _LOGGER.debug("Removing zone3 from DenonAvr") + + if unload_ok: + hass.data[DOMAIN].pop(config_entry.entry_id) + + return unload_ok + + +async def update_listener( + hass: core.HomeAssistant, config_entry: config_entries.ConfigEntry +): + """Handle options update.""" + await hass.config_entries.async_reload(config_entry.entry_id) diff --git a/homeassistant/components/denonavr/config_flow.py b/homeassistant/components/denonavr/config_flow.py new file mode 100644 index 00000000000..595f958ce01 --- /dev/null +++ b/homeassistant/components/denonavr/config_flow.py @@ -0,0 +1,256 @@ +"""Config flow to configure Denon AVR receivers using their HTTP interface.""" +from functools import partial +import logging +from urllib.parse import urlparse + +import denonavr +from getmac import get_mac_address +import voluptuous as vol + +from homeassistant import config_entries +from homeassistant.components import ssdp +from homeassistant.const import CONF_HOST, CONF_MAC +from homeassistant.core import callback +from homeassistant.helpers.device_registry import format_mac + +from .receiver import ConnectDenonAVR + +_LOGGER = logging.getLogger(__name__) + +DOMAIN = "denonavr" + +SUPPORTED_MANUFACTURERS = ["Denon", "DENON", "Marantz"] + +CONF_SHOW_ALL_SOURCES = "show_all_sources" +CONF_ZONE2 = "zone2" +CONF_ZONE3 = "zone3" +CONF_TYPE = "type" +CONF_MODEL = "model" +CONF_MANUFACTURER = "manufacturer" +CONF_SERIAL_NUMBER = "serial_number" + +DEFAULT_SHOW_SOURCES = False +DEFAULT_TIMEOUT = 5 +DEFAULT_ZONE2 = False +DEFAULT_ZONE3 = False + +CONFIG_SCHEMA = vol.Schema({vol.Optional(CONF_HOST): str}) + + +class OptionsFlowHandler(config_entries.OptionsFlow): + """Options for the component.""" + + def __init__(self, config_entry: config_entries.ConfigEntry): + """Init object.""" + self.config_entry = config_entry + + async def async_step_init(self, user_input=None): + """Manage the options.""" + if user_input is not None: + return self.async_create_entry(title="", data=user_input) + + settings_schema = vol.Schema( + { + vol.Optional( + CONF_SHOW_ALL_SOURCES, + default=self.config_entry.options.get( + CONF_SHOW_ALL_SOURCES, DEFAULT_SHOW_SOURCES + ), + ): bool, + vol.Optional( + CONF_ZONE2, + default=self.config_entry.options.get(CONF_ZONE2, DEFAULT_ZONE2), + ): bool, + vol.Optional( + CONF_ZONE3, + default=self.config_entry.options.get(CONF_ZONE3, DEFAULT_ZONE3), + ): bool, + } + ) + + return self.async_show_form(step_id="init", data_schema=settings_schema) + + +class DenonAvrFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): + """Handle a Denon AVR config flow.""" + + VERSION = 1 + CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL + + def __init__(self): + """Initialize the Denon AVR flow.""" + self.host = None + self.serial_number = None + self.model_name = None + self.timeout = DEFAULT_TIMEOUT + self.show_all_sources = DEFAULT_SHOW_SOURCES + self.zone2 = DEFAULT_ZONE2 + self.zone3 = DEFAULT_ZONE3 + self.d_receivers = [] + + @staticmethod + @callback + def async_get_options_flow(config_entry) -> OptionsFlowHandler: + """Get the options flow.""" + return OptionsFlowHandler(config_entry) + + async def async_step_user(self, user_input=None): + """Handle a flow initialized by the user.""" + errors = {} + if user_input is not None: + # check if IP address is set manually + host = user_input.get(CONF_HOST) + if host: + self.host = host + return await self.async_step_connect() + + # discovery using denonavr library + self.d_receivers = await self.hass.async_add_executor_job(denonavr.discover) + # More than one receiver could be discovered by that method + if len(self.d_receivers) == 1: + self.host = self.d_receivers[0]["host"] + return await self.async_step_connect() + if len(self.d_receivers) > 1: + # show selection form + return await self.async_step_select() + + errors["base"] = "discovery_error" + + return self.async_show_form( + step_id="user", data_schema=CONFIG_SCHEMA, errors=errors + ) + + async def async_step_select(self, user_input=None): + """Handle multiple receivers found.""" + errors = {} + if user_input is not None: + self.host = user_input["select_host"] + return await self.async_step_connect() + + select_scheme = vol.Schema( + { + vol.Required("select_host"): vol.In( + [d_receiver["host"] for d_receiver in self.d_receivers] + ) + } + ) + + return self.async_show_form( + step_id="select", data_schema=select_scheme, errors=errors + ) + + async def async_step_confirm(self, user_input=None): + """Allow the user to confirm adding the device.""" + if user_input is not None: + return await self.async_step_connect() + + return self.async_show_form(step_id="confirm") + + async def async_step_connect(self, user_input=None): + """Connect to the receiver.""" + connect_denonavr = ConnectDenonAVR( + self.hass, + self.host, + self.timeout, + self.show_all_sources, + self.zone2, + self.zone3, + ) + if not await connect_denonavr.async_connect_receiver(): + return self.async_abort(reason="connection_error") + receiver = connect_denonavr.receiver + + mac_address = await self.async_get_mac(self.host) + + if not self.serial_number: + self.serial_number = receiver.serial_number + if not self.model_name: + self.model_name = (receiver.model_name).replace("*", "") + + if self.serial_number is not None: + unique_id = self.construct_unique_id(self.model_name, self.serial_number) + await self.async_set_unique_id(unique_id) + self._abort_if_unique_id_configured() + else: + _LOGGER.error( + "Could not get serial number of host %s, " + "unique_id's will not be available", + self.host, + ) + for entry in self._async_current_entries(): + if entry.data[CONF_HOST] == self.host: + return self.async_abort(reason="already_configured") + + return self.async_create_entry( + title=receiver.name, + data={ + CONF_HOST: self.host, + CONF_MAC: mac_address, + CONF_TYPE: receiver.receiver_type, + CONF_MODEL: self.model_name, + CONF_MANUFACTURER: receiver.manufacturer, + CONF_SERIAL_NUMBER: self.serial_number, + }, + ) + + async def async_step_ssdp(self, discovery_info): + """Handle a discovered Denon AVR. + + This flow is triggered by the SSDP component. It will check if the + host is already configured and delegate to the import step if not. + """ + # Filter out non-Denon AVRs#1 + if ( + discovery_info.get(ssdp.ATTR_UPNP_MANUFACTURER) + not in SUPPORTED_MANUFACTURERS + ): + return self.async_abort(reason="not_denonavr_manufacturer") + + # Check if required information is present to set the unique_id + if ( + ssdp.ATTR_UPNP_MODEL_NAME not in discovery_info + or ssdp.ATTR_UPNP_SERIAL not in discovery_info + ): + return self.async_abort(reason="not_denonavr_missing") + + self.model_name = discovery_info[ssdp.ATTR_UPNP_MODEL_NAME].replace("*", "") + self.serial_number = discovery_info[ssdp.ATTR_UPNP_SERIAL] + self.host = urlparse(discovery_info[ssdp.ATTR_SSDP_LOCATION]).hostname + + unique_id = self.construct_unique_id(self.model_name, self.serial_number) + await self.async_set_unique_id(unique_id) + self._abort_if_unique_id_configured({CONF_HOST: self.host}) + + # pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167 + self.context.update( + { + "title_placeholders": { + "name": discovery_info.get(ssdp.ATTR_UPNP_FRIENDLY_NAME, self.host) + } + } + ) + + return await self.async_step_confirm() + + @staticmethod + def construct_unique_id(model_name, serial_number): + """Construct the unique id from the ssdp discovery or user_step.""" + return f"{model_name}-{serial_number}" + + async def async_get_mac(self, host): + """Get the mac address of the DenonAVR receiver.""" + try: + mac_address = await self.hass.async_add_executor_job( + partial(get_mac_address, **{"ip": host}) + ) + if not mac_address: + mac_address = await self.hass.async_add_executor_job( + partial(get_mac_address, **{"hostname": host}) + ) + except Exception as err: # pylint: disable=broad-except + _LOGGER.error("Unable to get mac address: %s", err) + mac_address = None + + if mac_address is not None: + mac_address = format_mac(mac_address) + return mac_address diff --git a/homeassistant/components/denonavr/manifest.json b/homeassistant/components/denonavr/manifest.json index a26bbdd58ab..4ea844ef060 100644 --- a/homeassistant/components/denonavr/manifest.json +++ b/homeassistant/components/denonavr/manifest.json @@ -1,7 +1,46 @@ { "domain": "denonavr", "name": "Denon AVR Network Receivers", + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/denonavr", - "requirements": ["denonavr==0.8.1"], - "codeowners": ["@scarface-4711", "@starkillerOG"] + "requirements": ["denonavr==0.9.3", "getmac==0.8.2"], + "codeowners": ["@scarface-4711", "@starkillerOG"], + "ssdp": [ + { + "manufacturer": "Denon", + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1" + }, + { + "manufacturer": "DENON", + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1" + }, + { + "manufacturer": "Marantz", + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1" + }, + { + "manufacturer": "Denon", + "deviceType": "urn:schemas-upnp-org:device:MediaServer:1" + }, + { + "manufacturer": "DENON", + "deviceType": "urn:schemas-upnp-org:device:MediaServer:1" + }, + { + "manufacturer": "Marantz", + "deviceType": "urn:schemas-upnp-org:device:MediaServer:1" + }, + { + "manufacturer": "Denon", + "deviceType": "urn:schemas-denon-com:device:AiosDevice:1" + }, + { + "manufacturer": "DENON", + "deviceType": "urn:schemas-denon-com:device:AiosDevice:1" + }, + { + "manufacturer": "Marantz", + "deviceType": "urn:schemas-denon-com:device:AiosDevice:1" + } + ] } diff --git a/homeassistant/components/denonavr/media_player.py b/homeassistant/components/denonavr/media_player.py index 524e728588b..c28b1a4cab5 100644 --- a/homeassistant/components/denonavr/media_player.py +++ b/homeassistant/components/denonavr/media_player.py @@ -1,12 +1,8 @@ """Support for Denon AVR receivers using their HTTP interface.""" -from collections import namedtuple import logging -import denonavr -import voluptuous as vol - -from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity +from homeassistant.components.media_player import MediaPlayerEntity from homeassistant.components.media_player.const import ( MEDIA_TYPE_CHANNEL, MEDIA_TYPE_MUSIC, @@ -25,10 +21,7 @@ from homeassistant.components.media_player.const import ( ) from homeassistant.const import ( ATTR_ENTITY_ID, - CONF_HOST, - CONF_NAME, - CONF_TIMEOUT, - CONF_ZONE, + CONF_MAC, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE, STATE_OFF, @@ -36,25 +29,22 @@ from homeassistant.const import ( STATE_PAUSED, STATE_PLAYING, ) -import homeassistant.helpers.config_validation as cv +from homeassistant.helpers import device_registry as dr from homeassistant.helpers.dispatcher import async_dispatcher_connect -from . import DOMAIN +from . import CONF_RECEIVER +from .config_flow import ( + CONF_MANUFACTURER, + CONF_MODEL, + CONF_SERIAL_NUMBER, + CONF_TYPE, + DOMAIN, +) _LOGGER = logging.getLogger(__name__) ATTR_SOUND_MODE_RAW = "sound_mode_raw" -CONF_INVALID_ZONES_ERR = "Invalid Zone (expected Zone2 or Zone3)" -CONF_SHOW_ALL_SOURCES = "show_all_sources" -CONF_VALID_ZONES = ["Zone2", "Zone3"] -CONF_ZONES = "zones" - -DEFAULT_SHOW_SOURCES = False -DEFAULT_TIMEOUT = 2 - -KEY_DENON_CACHE = "denonavr_hosts" - SUPPORT_DENON = ( SUPPORT_VOLUME_STEP | SUPPORT_VOLUME_MUTE @@ -73,99 +63,32 @@ SUPPORT_MEDIA_MODES = ( | SUPPORT_PLAY ) -DENON_ZONE_SCHEMA = vol.Schema( - { - vol.Required(CONF_ZONE): vol.In(CONF_VALID_ZONES, CONF_INVALID_ZONES_ERR), - vol.Optional(CONF_NAME): cv.string, - } -) -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( - { - vol.Optional(CONF_HOST): cv.string, - vol.Optional(CONF_NAME): cv.string, - vol.Optional(CONF_SHOW_ALL_SOURCES, default=DEFAULT_SHOW_SOURCES): cv.boolean, - vol.Optional(CONF_ZONES): vol.All(cv.ensure_list, [DENON_ZONE_SCHEMA]), - vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int, - } -) - -NewHost = namedtuple("NewHost", ["host", "name"]) - - -def setup_platform(hass, config, add_entities, discovery_info=None): - """Set up the Denon platform.""" - # Initialize list with receivers to be started - receivers = [] - - cache = hass.data.get(KEY_DENON_CACHE) - if cache is None: - cache = hass.data[KEY_DENON_CACHE] = set() - - # Get config option for show_all_sources and timeout - show_all_sources = config[CONF_SHOW_ALL_SOURCES] - timeout = config[CONF_TIMEOUT] - - # Get config option for additional zones - zones = config.get(CONF_ZONES) - if zones is not None: - add_zones = {} - for entry in zones: - add_zones[entry[CONF_ZONE]] = entry.get(CONF_NAME) - else: - add_zones = None - - # Start assignment of host and name - new_hosts = [] - # 1. option: manual setting - if config.get(CONF_HOST) is not None: - host = config.get(CONF_HOST) - name = config.get(CONF_NAME) - new_hosts.append(NewHost(host=host, name=name)) - - # 2. option: discovery using netdisco - if discovery_info is not None: - host = discovery_info.get("host") - name = discovery_info.get("name") - new_hosts.append(NewHost(host=host, name=name)) - - # 3. option: discovery using denonavr library - if config.get(CONF_HOST) is None and discovery_info is None: - d_receivers = denonavr.discover() - # More than one receiver could be discovered by that method - for d_receiver in d_receivers: - host = d_receiver["host"] - name = d_receiver["friendlyName"] - new_hosts.append(NewHost(host=host, name=name)) - - for entry in new_hosts: - # Check if host not in cache, append it and save for later - # starting - if entry.host not in cache: - new_device = denonavr.DenonAVR( - host=entry.host, - name=entry.name, - show_all_inputs=show_all_sources, - timeout=timeout, - add_zones=add_zones, - ) - for new_zone in new_device.zones.values(): - receivers.append(DenonDevice(new_zone)) - cache.add(host) - _LOGGER.info("Denon receiver at host %s initialized", host) - - # Add all freshly discovered receivers - if receivers: - add_entities(receivers) +async def async_setup_entry(hass, config_entry, async_add_entities): + """Set up the DenonAVR receiver from a config entry.""" + entities = [] + receiver = hass.data[DOMAIN][config_entry.entry_id][CONF_RECEIVER] + for receiver_zone in receiver.zones.values(): + if config_entry.data[CONF_SERIAL_NUMBER] is not None: + unique_id = f"{config_entry.unique_id}-{receiver_zone.zone}" + else: + unique_id = None + entities.append(DenonDevice(receiver_zone, unique_id, config_entry)) + _LOGGER.debug( + "%s receiver at host %s initialized", receiver.manufacturer, receiver.host + ) + async_add_entities(entities) class DenonDevice(MediaPlayerEntity): """Representation of a Denon Media Player Device.""" - def __init__(self, receiver): + def __init__(self, receiver, unique_id, config_entry): """Initialize the device.""" self._receiver = receiver self._name = self._receiver.name + self._unique_id = unique_id + self._config_entry = config_entry self._muted = self._receiver.muted self._volume = self._receiver.volume self._current_source = self._receiver.input_func @@ -237,6 +160,30 @@ class DenonDevice(MediaPlayerEntity): self._sound_mode = self._receiver.sound_mode self._sound_mode_raw = self._receiver.sound_mode_raw + @property + def unique_id(self): + """Return the unique id of the zone.""" + return self._unique_id + + @property + def device_info(self): + """Return the device info of the receiver.""" + if self._config_entry.data[CONF_SERIAL_NUMBER] is None: + return None + + device_info = { + "identifiers": {(DOMAIN, self._config_entry.unique_id)}, + "manufacturer": self._config_entry.data[CONF_MANUFACTURER], + "name": self._config_entry.title, + "model": f"{self._config_entry.data[CONF_MODEL]}-{self._config_entry.data[CONF_TYPE]}", + } + if self._config_entry.data[CONF_MAC] is not None: + device_info["connections"] = { + (dr.CONNECTION_NETWORK_MAC, self._config_entry.data[CONF_MAC]) + } + + return device_info + @property def name(self): """Return the name of the device.""" diff --git a/homeassistant/components/denonavr/receiver.py b/homeassistant/components/denonavr/receiver.py new file mode 100644 index 00000000000..557427c8c41 --- /dev/null +++ b/homeassistant/components/denonavr/receiver.py @@ -0,0 +1,71 @@ +"""Code to handle a DenonAVR receiver.""" +import logging + +import denonavr + +_LOGGER = logging.getLogger(__name__) + + +class ConnectDenonAVR: + """Class to async connect to a DenonAVR receiver.""" + + def __init__(self, hass, host, timeout, show_all_inputs, zone2, zone3): + """Initialize the class.""" + self._hass = hass + self._receiver = None + self._host = host + self._show_all_inputs = show_all_inputs + self._timeout = timeout + + self._zones = {} + if zone2: + self._zones["Zone2"] = None + if zone3: + self._zones["Zone3"] = None + + @property + def receiver(self): + """Return the class containing all connections to the receiver.""" + return self._receiver + + async def async_connect_receiver(self): + """Connect to the DenonAVR receiver.""" + if not await self._hass.async_add_executor_job(self.init_receiver_class): + return False + + if ( + self._receiver.manufacturer is None + or self._receiver.name is None + or self._receiver.model_name is None + or self._receiver.receiver_type is None + ): + return False + + _LOGGER.debug( + "%s receiver %s at host %s connected, model %s, serial %s, type %s", + self._receiver.manufacturer, + self._receiver.name, + self._receiver.host, + self._receiver.model_name, + self._receiver.serial_number, + self._receiver.receiver_type, + ) + + return True + + def init_receiver_class(self): + """Initialize the DenonAVR class in a way that can called by async_add_executor_job.""" + try: + self._receiver = denonavr.DenonAVR( + host=self._host, + show_all_inputs=self._show_all_inputs, + timeout=self._timeout, + add_zones=self._zones, + ) + except ConnectionError: + _LOGGER.error( + "ConnectionError during setup of denonavr with host %s", self._host + ) + return False + + return True diff --git a/homeassistant/components/denonavr/strings.json b/homeassistant/components/denonavr/strings.json new file mode 100644 index 00000000000..b01782adf32 --- /dev/null +++ b/homeassistant/components/denonavr/strings.json @@ -0,0 +1,48 @@ +{ + "config": { + "flow_title": "Denon AVR Network Receiver: {name}", + "step": { + "user": { + "title": "Denon AVR Network Receivers", + "description": "Connect to your receiver, if the IP address is not set, auto-discovery is used", + "data": { + "host": "IP address" + } + }, + "confirm": { + "title": "Denon AVR Network Receivers", + "description": "Please confirm adding the receiver" + }, + "select": { + "title": "Select the receiver that you wish to connect", + "description": "Run the setup again if you want to connect additional receivers", + "data": { + "select_host": "Receiver IP" + } + } + }, + "error": { + "discovery_error": "Failed to discover a Denon AVR Network Receiver" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "already_in_progress": "Config flow for this Denon AVR is already in progress", + "connection_error": "Failed to connect, please try again", + "not_denonavr_manufacturer": "Not a Denon AVR Network Receiver, discovered manafucturer did not match", + "not_denonavr_missing": "Not a Denon AVR Network Receiver, discovery information not complete" + } + }, + "options": { + "step": { + "init": { + "title": "Denon AVR Network Receivers", + "description": "Specify optional settings", + "data": { + "show_all_sources": "Show all sources", + "zone2": "Set up Zone 2", + "zone3": "Set up Zone 3" + } + } + } + } +} diff --git a/homeassistant/components/denonavr/translations/ca.json b/homeassistant/components/denonavr/translations/ca.json new file mode 100644 index 00000000000..4f7c34a158e --- /dev/null +++ b/homeassistant/components/denonavr/translations/ca.json @@ -0,0 +1,48 @@ +{ + "config": { + "abort": { + "already_configured": "El dispositiu ja est\u00e0 configurat", + "already_in_progress": "El flux de dades de configuraci\u00f3 per aquest Denon AVR ja est\u00e0 en curs", + "connection_error": "No s'ha pogut connectar, torna-ho a provar", + "not_denonavr_manufacturer": "No \u00e9s un receptor de xarxa Denon AVR, no coincideix el fabricant descobert", + "not_denonavr_missing": "No \u00e9s un receptor de xarxa Denon AVR, informaci\u00f3 de descobriment no completa" + }, + "error": { + "discovery_error": "No s'ha pogut descobrir un receptor de xarxa AVR de Denon" + }, + "flow_title": "Receptor de xarxa AVR de Denon: {name}", + "step": { + "confirm": { + "description": "Confirma l'addici\u00f3 del receptor", + "title": "Receptors de xarxa AVR de Denon" + }, + "select": { + "data": { + "select_host": "IP del receptor" + }, + "description": "Torna a executar la configuraci\u00f3 si vols connectar receptors addicionals", + "title": "Selecciona el receptor al qual connectar-te" + }, + "user": { + "data": { + "host": "Adre\u00e7a IP" + }, + "description": "Connecta el teu receptor, si no es configura l'adre\u00e7a IP, s'utilitza el descobriment autom\u00e0tic", + "title": "Receptors de xarxa AVR de Denon" + } + } + }, + "options": { + "step": { + "init": { + "data": { + "show_all_sources": "Mostra totes les fonts", + "zone2": "Configura la Zona 2", + "zone3": "Configura la Zona 3" + }, + "description": "Especifica par\u00e0metres opcionals", + "title": "Receptors de xarxa AVR de Denon" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/denonavr/translations/en.json b/homeassistant/components/denonavr/translations/en.json new file mode 100644 index 00000000000..7afc68d8fc1 --- /dev/null +++ b/homeassistant/components/denonavr/translations/en.json @@ -0,0 +1,48 @@ +{ + "config": { + "abort": { + "already_configured": "Device is already configured", + "already_in_progress": "Config flow for this Denon AVR is already in progress", + "connection_error": "Failed to connect, please try again", + "not_denonavr_manufacturer": "Not a Denon AVR Network Receiver, discovered manafucturer did not match", + "not_denonavr_missing": "Not a Denon AVR Network Receiver, discovery information not complete" + }, + "error": { + "discovery_error": "Failed to discover a Denon AVR Network Receiver" + }, + "flow_title": "Denon AVR Network Receiver: {name}", + "step": { + "confirm": { + "description": "Please confirm adding the receiver", + "title": "Denon AVR Network Receivers" + }, + "select": { + "data": { + "select_host": "Receiver IP" + }, + "description": "Run the setup again if you want to connect additional receivers", + "title": "Select the receiver that you wish to connect" + }, + "user": { + "data": { + "host": "IP address" + }, + "description": "Connect to your receiver, if the IP address is not set, auto-discovery is used", + "title": "Denon AVR Network Receivers" + } + } + }, + "options": { + "step": { + "init": { + "data": { + "show_all_sources": "Show all sources", + "zone2": "Set up Zone 2", + "zone3": "Set up Zone 3" + }, + "description": "Specify optional settings", + "title": "Denon AVR Network Receivers" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/denonavr/translations/es.json b/homeassistant/components/denonavr/translations/es.json new file mode 100644 index 00000000000..69568002c35 --- /dev/null +++ b/homeassistant/components/denonavr/translations/es.json @@ -0,0 +1,48 @@ +{ + "config": { + "abort": { + "already_configured": "El dispositivo ya est\u00e1 configurado", + "already_in_progress": "El flujo de configuraci\u00f3n para este AVR Denon ya est\u00e1 en marcha.", + "connection_error": "No se ha podido conectar, por favor, int\u00e9ntelo de nuevo.", + "not_denonavr_manufacturer": "No es un Receptor AVR Denon AVR en Red, el fabricante detectado no concuerda", + "not_denonavr_missing": "No es un Receptor AVR Denon AVR en Red, la informaci\u00f3n detectada no est\u00e1 completa" + }, + "error": { + "discovery_error": "Error detectando un Receptor AVR Denon en Red" + }, + "flow_title": "Receptor AVR Denon en Red: {name}", + "step": { + "confirm": { + "description": "Por favor confirma la adici\u00f3n del receptor", + "title": "Receptores AVR Denon en Red" + }, + "select": { + "data": { + "select_host": "IP del Receptor" + }, + "description": "Ejecuta la configuraci\u00f3n de nuevo si deseas conectar receptores adicionales", + "title": "Selecciona el receptor con el que quieres conectar." + }, + "user": { + "data": { + "host": "Direcci\u00f3n IP" + }, + "description": "Con\u00e9ctar con tu receptor, si la direcci\u00f3n IP no est\u00e1 configurada, se utilizar\u00e1 la detecci\u00f3n autom\u00e1tica", + "title": "Receptores AVR Denon en Red" + } + } + }, + "options": { + "step": { + "init": { + "data": { + "show_all_sources": "Mostrar todas las fuentes", + "zone2": "Configurar la Zona 2", + "zone3": "Configurar la Zona 3" + }, + "description": "Especificar configuraciones opcionales", + "title": "Receptores AVR Denon en Red" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/denonavr/translations/it.json b/homeassistant/components/denonavr/translations/it.json new file mode 100644 index 00000000000..fd2bb06b498 --- /dev/null +++ b/homeassistant/components/denonavr/translations/it.json @@ -0,0 +1,48 @@ +{ + "config": { + "abort": { + "already_configured": "Il dispositivo \u00e8 gi\u00e0 configurato", + "already_in_progress": "Il flusso di configurazione per questo Denon AVR \u00e8 gi\u00e0 in corso", + "connection_error": "Impossibile connettersi, si prega di riprovare", + "not_denonavr_manufacturer": "Non \u00e8 un ricevitore di rete Denon AVR, il produttore rilevato non corrisponde", + "not_denonavr_missing": "Non \u00e8 un ricevitore di rete Denon AVR, le informazioni di rilevamento non sono complete" + }, + "error": { + "discovery_error": "Impossibile rilevare un ricevitore di rete Denon AVR" + }, + "flow_title": "Ricevitore di rete Denon AVR: {name}", + "step": { + "confirm": { + "description": "Si prega di confermare l'aggiunta del ricevitore", + "title": "Ricevitori di rete Denon AVR" + }, + "select": { + "data": { + "select_host": "IP del ricevitore" + }, + "description": "Eseguire nuovamente il setup se si desidera collegare altri ricevitori", + "title": "Selezionare il ricevitore che si desidera collegare" + }, + "user": { + "data": { + "host": "Indirizzo IP" + }, + "description": "Collegare il ricevitore, se l'indirizzo IP non \u00e8 impostato, sar\u00e0 utilizzato il rilevamento automatico", + "title": "Ricevitori di rete Denon AVR" + } + } + }, + "options": { + "step": { + "init": { + "data": { + "show_all_sources": "Mostra tutte le fonti", + "zone2": "Imposta la Zona 2", + "zone3": "Imposta la Zona 3" + }, + "description": "Specificare le impostazioni opzionali", + "title": "Ricevitori di rete Denon AVR" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/denonavr/translations/ko.json b/homeassistant/components/denonavr/translations/ko.json new file mode 100644 index 00000000000..f7e43da9ba0 --- /dev/null +++ b/homeassistant/components/denonavr/translations/ko.json @@ -0,0 +1,48 @@ +{ + "config": { + "abort": { + "already_configured": "\uae30\uae30\uac00 \uc774\ubbf8 \uad6c\uc131\ub418\uc5c8\uc2b5\ub2c8\ub2e4", + "already_in_progress": "Denon AVR \uad6c\uc131\uc774 \uc774\ubbf8 \uc9c4\ud589 \uc911\uc785\ub2c8\ub2e4.", + "connection_error": "\uc5f0\uacb0\ud558\uc9c0 \ubabb\ud588\uc2b5\ub2c8\ub2e4. \ub2e4\uc2dc \uc2dc\ub3c4\ud574\uc8fc\uc138\uc694", + "not_denonavr_manufacturer": "Denon AVR \ub124\ud2b8\uc6cc\ud06c \ub9ac\uc2dc\ubc84\uac00 \uc544\ub2d9\ub2c8\ub2e4. \ubc1c\uacac\ub41c \uc81c\uc870\uc0ac\uac00 \uc77c\uce58\ud558\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4", + "not_denonavr_missing": "Denon AVR \ub124\ud2b8\uc6cc\ud06c \ub9ac\uc2dc\ubc84\uac00 \uc544\ub2d9\ub2c8\ub2e4. \uac80\uc0c9 \uc815\ubcf4\uac00 \uc644\uc804\ud558\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4" + }, + "error": { + "discovery_error": "Denon AVR \ub124\ud2b8\uc6cc\ud06c \ub9ac\uc2dc\ubc84\ub97c \ucc3e\uc9c0 \ubabb\ud588\uc2b5\ub2c8\ub2e4" + }, + "flow_title": "Denon AVR \ub124\ud2b8\uc6cc\ud06c \ub9ac\uc2dc\ubc84: {name}", + "step": { + "confirm": { + "description": "\ub9ac\uc2dc\ubc84 \ucd94\uac00\ub97c \ud655\uc778\ud574\uc8fc\uc138\uc694", + "title": "Denon AVR \ub124\ud2b8\uc6cc\ud06c \ub9ac\uc2dc\ubc84" + }, + "select": { + "data": { + "select_host": "\ub9ac\uc2dc\ubc84 IP" + }, + "description": "\ub9ac\uc2dc\ubc84 \uc5f0\uacb0\uc744 \ucd94\uac00\ud558\ub824\uba74 \uc124\uc815\uc744 \ub2e4\uc2dc \uc2e4\ud589\ud574\uc8fc\uc138\uc694", + "title": "\uc5f0\uacb0\ud560 \ub9ac\uc2dc\ubc84\ub97c \uc120\ud0dd\ud574\uc8fc\uc138\uc694." + }, + "user": { + "data": { + "host": "IP \uc8fc\uc18c" + }, + "description": "\ub9ac\uc2dc\ubc84\uc5d0 \uc5f0\uacb0\ud569\ub2c8\ub2e4. IP \uc8fc\uc18c\uac00 \uc124\uc815\ub418\uc9c0 \uc54a\uc740 \uacbd\uc6b0 \uc790\ub3d9 \uac80\uc0c9\uc774 \uc0ac\uc6a9\ub429\ub2c8\ub2e4", + "title": "Denon AVR \ub124\ud2b8\uc6cc\ud06c \ub9ac\uc2dc\ubc84" + } + } + }, + "options": { + "step": { + "init": { + "data": { + "show_all_sources": "\ubaa8\ub4e0 \uc785\ub825\uc18c\uc2a4 \ud45c\uc2dc", + "zone2": "Zone 2 \uc124\uc815", + "zone3": "Zone 3 \uc124\uc815" + }, + "description": "\uc635\uc158 \uc124\uc815 \uc9c0\uc815", + "title": "Denon AVR \ub124\ud2b8\uc6cc\ud06c \ub9ac\uc2dc\ubc84" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/denonavr/translations/lb.json b/homeassistant/components/denonavr/translations/lb.json new file mode 100644 index 00000000000..89a7b09c06e --- /dev/null +++ b/homeassistant/components/denonavr/translations/lb.json @@ -0,0 +1,46 @@ +{ + "config": { + "abort": { + "already_in_progress": "Konfiguratioun fir d\u00ebsen Denon AVR ass schonn am gaang.", + "connection_error": "Feeler beim verbannen, prob\u00e9ier w.e.g. nach emol.", + "not_denonavr_manufacturer": "Kee Denon AVR Netzwierk Empf\u00e4nger, entdeckte Hiersteller passt net", + "not_denonavr_missing": "Kee Denon AVR Netzwierk Empf\u00e4nger, Discovery Informatioun net vollst\u00e4nneg" + }, + "error": { + "discovery_error": "Feeler beim entdecken vun engem Denon AVR Netzwierk Empf\u00e4nger" + }, + "flow_title": "Denon AVR Netzwierk Empf\u00e4nger: {name}", + "step": { + "confirm": { + "title": "Denon AVR Netzwierk Empf\u00e4nger" + }, + "select": { + "data": { + "select_host": "IP vum Receiver" + }, + "description": "Start den Setup nach eemol falls nach zous\u00e4tzlech Receiver solle verbonne ginn", + "title": "Wielt de Receiver aus dee soll verbonne ginn" + }, + "user": { + "data": { + "host": "IP Adress" + }, + "description": "Mam Receiver verbannen, falls keng IP Adress uginn ass g\u00ebtt auto-discovery benotzt", + "title": "Denon AVR Netzwierk Empf\u00e4nger" + } + } + }, + "options": { + "step": { + "init": { + "data": { + "show_all_sources": "All Quelle uweisen", + "zone2": "Zone 2 ariichten", + "zone3": "Zone 3 ariichten" + }, + "description": "Optionell Astellungen uginn", + "title": "Denon AVR Netzwierk Empf\u00e4nger" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/denonavr/translations/no.json b/homeassistant/components/denonavr/translations/no.json new file mode 100644 index 00000000000..acdab9f4d3f --- /dev/null +++ b/homeassistant/components/denonavr/translations/no.json @@ -0,0 +1,47 @@ +{ + "config": { + "abort": { + "already_in_progress": "Konfigurasjonsflyt for denne Denon AVR p\u00e5g\u00e5r allerede", + "connection_error": "Klarte ikke \u00e5 koble til, vennligst pr\u00f8v igjen", + "not_denonavr_manufacturer": "Ikke en Denon AVR Network Receiver, oppdaget manafucturer stemte ikke overens", + "not_denonavr_missing": "Ikke en Denon AVR Network Receiver, oppdagelsesinformasjon ikke fullf\u00f8rt" + }, + "error": { + "discovery_error": "Kunne ikke oppdage en Denon AVR Network Receiver" + }, + "flow_title": "Denon AVR nettverksmottaker: {name}", + "step": { + "confirm": { + "description": "Bekreft at du legger til mottakeren", + "title": "Denon AVR nettverksmottakere" + }, + "select": { + "data": { + "select_host": "IP-mottaker" + }, + "description": "Kj\u00f8r oppsettet igjen hvis du vil koble til flere mottakere", + "title": "Velg mottakeren du vil koble til" + }, + "user": { + "data": { + "host": "IP adresse" + }, + "description": "Koble til mottakeren, hvis IP-adressen ikke er angitt, brukes automatisk oppdagelse", + "title": "Denon AVR Network Receivers" + } + } + }, + "options": { + "step": { + "init": { + "data": { + "show_all_sources": "Vis alle kilder", + "zone2": "Sett opp sone 2", + "zone3": "Sett opp sone 3" + }, + "description": "Spesifiser valgfrie innstillinger", + "title": "Denon AVR Network Receivers" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/denonavr/translations/pl.json b/homeassistant/components/denonavr/translations/pl.json new file mode 100644 index 00000000000..d8fd08f01f7 --- /dev/null +++ b/homeassistant/components/denonavr/translations/pl.json @@ -0,0 +1,48 @@ +{ + "config": { + "abort": { + "already_configured": "Urz\u0105dzenie jest ju\u017c skonfigurowane.", + "already_in_progress": "Konfiguracja urz\u0105dzenia jest ju\u017c w toku.", + "connection_error": "Nie mo\u017cna nawi\u0105za\u0107 po\u0142\u0105czenia.", + "not_denonavr_manufacturer": "Nie jest to urz\u0105dzenie AVR firmy Denon, producent wykrytego urz\u0105dzenia nie pasuje.", + "not_denonavr_missing": "Nie jest to urz\u0105dzenie AVR firmy Denon, dane z automatycznego wykrywania nie s\u0105 kompletne." + }, + "error": { + "discovery_error": "Nie uda\u0142o si\u0119 wykry\u0107 urz\u0105dzenia AVR firmy Denon" + }, + "flow_title": "Denon AVR: {name}", + "step": { + "confirm": { + "description": "Prosz\u0119 potwierdzi\u0107 dodanie urz\u0105dzenia", + "title": "Denon AVR" + }, + "select": { + "data": { + "select_host": "Adres IP" + }, + "description": "Uruchom konfiguracj\u0119 integracji ponownie, je\u015bli chcesz pod\u0142\u0105czy\u0107 dodatkowe urz\u0105dzenia.", + "title": "Wybierz urz\u0105dzenie, z kt\u00f3rym chcesz si\u0119 po\u0142\u0105czy\u0107." + }, + "user": { + "data": { + "host": "Adres IP" + }, + "description": "\u0141\u0105czenie z urz\u0105dzeniem, je\u015bli adres IP nie jest zdefiniowany, u\u017cywane jest automatyczne wykrywanie.", + "title": "Denon AVR" + } + } + }, + "options": { + "step": { + "init": { + "data": { + "show_all_sources": "Poka\u017c wszystkie \u017ar\u00f3d\u0142a", + "zone2": "Konfiguracja Strefy 2", + "zone3": "Konfiguracja Strefy 3" + }, + "description": "Ustawienia opcjonalne", + "title": "Denon AVR" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/denonavr/translations/ru.json b/homeassistant/components/denonavr/translations/ru.json new file mode 100644 index 00000000000..de9ca15c730 --- /dev/null +++ b/homeassistant/components/denonavr/translations/ru.json @@ -0,0 +1,48 @@ +{ + "config": { + "abort": { + "already_configured": "\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0430 \u044d\u0442\u043e\u0433\u043e \u0443\u0441\u0442\u0440\u043e\u0439\u0441\u0442\u0432\u0430 \u0443\u0436\u0435 \u0432\u044b\u043f\u043e\u043b\u043d\u0435\u043d\u0430.", + "already_in_progress": "\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0430 \u044d\u0442\u043e\u0433\u043e \u0443\u0441\u0442\u0440\u043e\u0439\u0441\u0442\u0432\u0430 \u0443\u0436\u0435 \u0432\u044b\u043f\u043e\u043b\u043d\u044f\u0435\u0442\u0441\u044f.", + "connection_error": "\u041d\u0435 \u0443\u0434\u0430\u043b\u043e\u0441\u044c \u043f\u043e\u0434\u043a\u043b\u044e\u0447\u0438\u0442\u044c\u0441\u044f, \u043f\u043e\u043f\u0440\u043e\u0431\u0443\u0439\u0442\u0435 \u0435\u0449\u0435 \u0440\u0430\u0437.", + "not_denonavr_manufacturer": "\u042d\u0442\u043e \u043d\u0435 \u0440\u0435\u0441\u0438\u0432\u0435\u0440 Denon. \u041f\u0440\u043e\u0438\u0437\u0432\u043e\u0434\u0438\u0442\u0435\u043b\u044c \u043d\u0435 \u0441\u043e\u043e\u0442\u0432\u0435\u0442\u0441\u0442\u0432\u0443\u0435\u0442.", + "not_denonavr_missing": "\u041d\u0435\u043f\u043e\u043b\u043d\u0430\u044f \u0438\u043d\u0444\u043e\u0440\u043c\u0430\u0446\u0438\u044f \u0434\u043b\u044f \u043e\u0431\u043d\u0430\u0440\u0443\u0436\u0435\u043d\u0438\u044f \u0443\u0441\u0442\u0440\u043e\u0439\u0441\u0442\u0432\u0430." + }, + "error": { + "discovery_error": "\u041d\u0435 \u0443\u0434\u0430\u043b\u043e\u0441\u044c \u043e\u0431\u043d\u0430\u0440\u0443\u0436\u0438\u0442\u044c \u0440\u0435\u0441\u0438\u0432\u0435\u0440 Denon." + }, + "flow_title": "\u0420\u0435\u0441\u0438\u0432\u0435\u0440 Denon: {name}", + "step": { + "confirm": { + "description": "\u041f\u043e\u0434\u0442\u0432\u0435\u0440\u0434\u0438\u0442\u0435 \u0434\u043e\u0431\u0430\u0432\u043b\u0435\u043d\u0438\u0435 \u0440\u0435\u0441\u0438\u0432\u0435\u0440\u0430", + "title": "\u0420\u0435\u0441\u0438\u0432\u0435\u0440\u044b Denon" + }, + "select": { + "data": { + "select_host": "IP-\u0430\u0434\u0440\u0435\u0441" + }, + "description": "\u0417\u0430\u043f\u0443\u0441\u0442\u0438\u0442\u0435 \u043d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0443 \u0435\u0449\u0451 \u0440\u0430\u0437, \u0435\u0441\u043b\u0438 \u0412\u044b \u0445\u043e\u0442\u0438\u0442\u0435 \u0434\u043e\u0431\u0430\u0432\u0438\u0442\u044c \u0435\u0449\u0451 \u043e\u0434\u0438\u043d \u0440\u0435\u0441\u0438\u0432\u0435\u0440", + "title": "\u0412\u044b\u0431\u0435\u0440\u0438\u0442\u0435 \u0440\u0435\u0441\u0438\u0432\u0435\u0440, \u043a\u043e\u0442\u043e\u0440\u044b\u0439 \u0412\u044b \u0445\u043e\u0442\u0438\u0442\u0435 \u043f\u043e\u0434\u043a\u043b\u044e\u0447\u0438\u0442\u044c" + }, + "user": { + "data": { + "host": "IP-\u0430\u0434\u0440\u0435\u0441" + }, + "description": "\u0415\u0441\u043b\u0438 IP-\u0430\u0434\u0440\u0435\u0441 \u043d\u0435 \u0443\u043a\u0430\u0437\u0430\u043d, \u0431\u0443\u0434\u0435\u0442 \u0438\u0441\u043f\u043e\u043b\u044c\u0437\u043e\u0432\u0430\u0442\u044c\u0441\u044f \u0430\u0432\u0442\u043e\u043c\u0430\u0442\u0438\u0447\u0435\u0441\u043a\u043e\u0435 \u043e\u0431\u043d\u0430\u0440\u0443\u0436\u0435\u043d\u0438\u0435", + "title": "\u0420\u0435\u0441\u0438\u0432\u0435\u0440\u044b Denon" + } + } + }, + "options": { + "step": { + "init": { + "data": { + "show_all_sources": "\u041f\u043e\u043a\u0430\u0437\u0430\u0442\u044c \u0432\u0441\u0435 \u0438\u0441\u0442\u043e\u0447\u043d\u0438\u043a\u0438", + "zone2": "\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0430 \u0437\u043e\u043d\u044b 2", + "zone3": "\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0430 \u0437\u043e\u043d\u044b 3" + }, + "description": "\u0414\u043e\u043f\u043e\u043b\u043d\u0438\u0442\u0435\u043b\u044c\u043d\u044b\u0435 \u043d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0438", + "title": "\u0420\u0435\u0441\u0438\u0432\u0435\u0440\u044b Denon" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/denonavr/translations/zh-Hant.json b/homeassistant/components/denonavr/translations/zh-Hant.json new file mode 100644 index 00000000000..8dc32220d7c --- /dev/null +++ b/homeassistant/components/denonavr/translations/zh-Hant.json @@ -0,0 +1,48 @@ +{ + "config": { + "abort": { + "already_configured": "\u8a2d\u5099\u5df2\u7d93\u8a2d\u5b9a\u5b8c\u6210", + "already_in_progress": "Denon AVR \u8a2d\u5099\u8a2d\u5b9a\u5df2\u7d93\u9032\u884c\u4e2d\u3002", + "connection_error": "\u9023\u7dda\u5931\u6557\uff0c\u8acb\u518d\u8a66\u4e00\u6b21", + "not_denonavr_manufacturer": "\u4e26\u975e Denon AVR \u7db2\u8def\u63a5\u6536\u5668\uff0c\u6240\u63a2\u7d22\u4e4b\u88fd\u9020\u5ee0\u5546\u4e0d\u7b26\u5408", + "not_denonavr_missing": "\u4e26\u975e Denon AVR \u7db2\u8def\u63a5\u6536\u5668\uff0c\u63a2\u7d22\u8cc7\u8a0a\u4e0d\u5b8c\u6574" + }, + "error": { + "discovery_error": "\u7121\u6cd5\u627e\u5230 Denon AVR \u7db2\u8def\u63a5\u6536\u5668" + }, + "flow_title": "Denon AVR \u7db2\u8def\u63a5\u6536\u5668\uff1a{name}", + "step": { + "confirm": { + "description": "\u8acb\u78ba\u8a8d\u65b0\u589e\u63a5\u6536\u5668", + "title": "Denon AVR \u7db2\u8def\u63a5\u6536\u5668" + }, + "select": { + "data": { + "select_host": "\u63a5\u6536\u5668 IP" + }, + "description": "\u8acb\u518d\u6b21\u57f7\u884c\u8a2d\u5b9a\uff0c\u5047\u5982\u9084\u8981\u65b0\u589e\u5176\u4ed6\u63a5\u6536\u5668", + "title": "\u9078\u64c7\u6240\u8981\u9023\u7dda\u7684\u63a5\u6536\u5668\u3002" + }, + "user": { + "data": { + "host": "IP \u4f4d\u5740" + }, + "description": "\u9023\u7dda\u81f3\u63a5\u6536\u5668\u3002\u5047\u5982\u672a\u8a2d\u5b9a IP \u4f4d\u5740\uff0c\u5c07\u4f7f\u7528\u81ea\u52d5\u63a2\u7d22\u3002", + "title": "Denon AVR \u7db2\u8def\u63a5\u6536\u5668" + } + } + }, + "options": { + "step": { + "init": { + "data": { + "show_all_sources": "\u986f\u793a\u6240\u6709\u4f86\u6e90", + "zone2": "\u8a2d\u5b9a\u5340\u57df 2", + "zone3": "\u8a2d\u5b9a\u5340\u57df 3" + }, + "description": "\u6307\u5b9a\u9078\u9805\u8a2d\u5b9a", + "title": "Denon AVR \u7db2\u8def\u63a5\u6536\u5668" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/device_automation/manifest.json b/homeassistant/components/device_automation/manifest.json index 2eadd214bc1..033a54312be 100644 --- a/homeassistant/components/device_automation/manifest.json +++ b/homeassistant/components/device_automation/manifest.json @@ -2,7 +2,6 @@ "domain": "device_automation", "name": "Device Automation", "documentation": "https://www.home-assistant.io/integrations/device_automation", - "dependencies": ["webhook"], "codeowners": ["@home-assistant/core"], "quality_scale": "internal" } diff --git a/homeassistant/components/devolo_home_control/binary_sensor.py b/homeassistant/components/devolo_home_control/binary_sensor.py new file mode 100644 index 00000000000..87af86f02af --- /dev/null +++ b/homeassistant/components/devolo_home_control/binary_sensor.py @@ -0,0 +1,72 @@ +"""Platform for binary sensor integration.""" +import logging + +from homeassistant.components.binary_sensor import BinarySensorEntity +from homeassistant.config_entries import ConfigEntry +from homeassistant.helpers.typing import HomeAssistantType + +from .const import DOMAIN +from .devolo_device import DevoloDeviceEntity + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistantType, entry: ConfigEntry, async_add_entities +) -> None: + """Get all binary sensor and multi level sensor devices and setup them via config entry.""" + entities = [] + + for device in hass.data[DOMAIN]["homecontrol"].binary_sensor_devices: + for binary_sensor in device.binary_sensor_property: + entities.append( + DevoloBinaryDeviceEntity( + homecontrol=hass.data[DOMAIN]["homecontrol"], + device_instance=device, + element_uid=binary_sensor, + ) + ) + + async_add_entities(entities, False) + + +class DevoloBinaryDeviceEntity(DevoloDeviceEntity, BinarySensorEntity): + """Representation of a binary sensor within devolo Home Control.""" + + def __init__(self, homecontrol, device_instance, element_uid): + """Initialize a devolo binary sensor.""" + if device_instance.binary_sensor_property.get(element_uid).sub_type != "": + name = f"{device_instance.itemName} {device_instance.binary_sensor_property.get(element_uid).sub_type}" + else: + name = f"{device_instance.itemName} {device_instance.binary_sensor_property.get(element_uid).sensor_type}" + + super().__init__( + homecontrol=homecontrol, + device_instance=device_instance, + element_uid=element_uid, + name=name, + sync=self._sync, + ) + + self._binary_sensor_property = self._device_instance.binary_sensor_property.get( + self._unique_id + ) + + self._state = self._binary_sensor_property.state + + self._subscriber = None + + @property + def is_on(self): + """Return the state.""" + return self._state + + def _sync(self, message=None): + """Update the binary sensor state.""" + if message[0].startswith("devolo.BinarySensor"): + self._state = self._device_instance.binary_sensor_property[message[0]].state + elif message[0].startswith("hdm"): + self._available = self._device_instance.is_online() + else: + _LOGGER.debug("No valid message received: %s", message) + self.schedule_update_ha_state() diff --git a/homeassistant/components/devolo_home_control/const.py b/homeassistant/components/devolo_home_control/const.py index 0d5bb9a3356..599e44fe8f0 100644 --- a/homeassistant/components/devolo_home_control/const.py +++ b/homeassistant/components/devolo_home_control/const.py @@ -3,6 +3,6 @@ DOMAIN = "devolo_home_control" DEFAULT_MYDEVOLO = "https://www.mydevolo.com" DEFAULT_MPRM = "https://homecontrol.mydevolo.com" -PLATFORMS = ["switch"] +PLATFORMS = ["binary_sensor", "sensor", "switch"] CONF_MYDEVOLO = "mydevolo_url" CONF_HOMECONTROL = "home_control_url" diff --git a/homeassistant/components/devolo_home_control/devolo_device.py b/homeassistant/components/devolo_home_control/devolo_device.py new file mode 100644 index 00000000000..1694aeb3f47 --- /dev/null +++ b/homeassistant/components/devolo_home_control/devolo_device.py @@ -0,0 +1,75 @@ +"""Base class for a device entity integrated in devolo Home Control.""" +import logging + +from homeassistant.helpers.entity import Entity + +from .const import DOMAIN +from .subscriber import Subscriber + +_LOGGER = logging.getLogger(__name__) + + +class DevoloDeviceEntity(Entity): + """Representation of a sensor within devolo Home Control.""" + + def __init__(self, homecontrol, device_instance, element_uid, name, sync): + """Initialize a devolo device entity.""" + self._device_instance = device_instance + self._name = name + self._unique_id = element_uid + self._homecontrol = homecontrol + + # This is not doing I/O. It fetches an internal state of the API + self._available = device_instance.is_online() + + # Get the brand and model information + self._brand = device_instance.brand + self._model = device_instance.name + + self.subscriber = None + self.sync_callback = sync + + async def async_added_to_hass(self) -> None: + """Call when entity is added to hass.""" + self.subscriber = Subscriber( + self._device_instance.itemName, callback=self.sync_callback + ) + self._homecontrol.publisher.register( + self._device_instance.uid, self.subscriber, self.sync_callback + ) + + async def async_will_remove_from_hass(self) -> None: + """Call when entity is removed or disabled.""" + self._homecontrol.publisher.unregister( + self._device_instance.uid, self.subscriber + ) + + @property + def unique_id(self): + """Return the unique ID of the entity.""" + return self._unique_id + + @property + def device_info(self): + """Return the device info.""" + return { + "identifiers": {(DOMAIN, self._device_instance.uid)}, + "name": self._device_instance.itemName, + "manufacturer": self._brand, + "model": self._model, + } + + @property + def should_poll(self): + """Return the polling state.""" + return False + + @property + def name(self): + """Return the display name of this entity.""" + return self._name + + @property + def available(self) -> bool: + """Return the online state.""" + return self._available diff --git a/homeassistant/components/devolo_home_control/manifest.json b/homeassistant/components/devolo_home_control/manifest.json index 867848f89d1..1ee54f23fde 100644 --- a/homeassistant/components/devolo_home_control/manifest.json +++ b/homeassistant/components/devolo_home_control/manifest.json @@ -1,11 +1,9 @@ { "domain": "devolo_home_control", - "name": "devolo_home_control", + "name": "devolo Home Control", "documentation": "https://www.home-assistant.io/integrations/devolo_home_control", "requirements": ["devolo-home-control-api==0.11.0"], "config_flow": true, - "codeowners": [ - "@2Fake", - "@Shutgun"], + "codeowners": ["@2Fake", "@Shutgun"], "quality_scale": "silver" -} \ No newline at end of file +} diff --git a/homeassistant/components/devolo_home_control/sensor.py b/homeassistant/components/devolo_home_control/sensor.py new file mode 100644 index 00000000000..d0d3388ef17 --- /dev/null +++ b/homeassistant/components/devolo_home_control/sensor.py @@ -0,0 +1,91 @@ +"""Platform for sensor integration.""" +import logging + +from homeassistant.components.sensor import ( + DEVICE_CLASS_HUMIDITY, + DEVICE_CLASS_ILLUMINANCE, + DEVICE_CLASS_TEMPERATURE, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.helpers.typing import HomeAssistantType + +from .const import DOMAIN +from .devolo_device import DevoloDeviceEntity + +_LOGGER = logging.getLogger(__name__) + +DEVICE_CLASS_MAPPING = { + "temperature": DEVICE_CLASS_TEMPERATURE, + "light": DEVICE_CLASS_ILLUMINANCE, + "humidity": DEVICE_CLASS_HUMIDITY, +} + + +async def async_setup_entry( + hass: HomeAssistantType, entry: ConfigEntry, async_add_entities +) -> None: + """Get all sensor devices and setup them via config entry.""" + entities = [] + + for device in hass.data[DOMAIN]["homecontrol"].multi_level_sensor_devices: + for multi_level_sensor in device.multi_level_sensor_property: + entities.append( + DevoloMultiLevelDeviceEntity( + homecontrol=hass.data[DOMAIN]["homecontrol"], + device_instance=device, + element_uid=multi_level_sensor, + ) + ) + async_add_entities(entities, False) + + +class DevoloMultiLevelDeviceEntity(DevoloDeviceEntity): + """Representation o a multi level sensor within devolo Home Control.""" + + def __init__(self, homecontrol, device_instance, element_uid): + """Initialize a devolo multi level sensor.""" + self._multi_level_sensor_property = device_instance.multi_level_sensor_property[ + element_uid + ] + + self._state = self._multi_level_sensor_property.value + + self._device_class = DEVICE_CLASS_MAPPING.get( + self._multi_level_sensor_property.sensor_type + ) + self._unit = self._multi_level_sensor_property.unit + + super().__init__( + homecontrol=homecontrol, + device_instance=device_instance, + element_uid=element_uid, + name=f"{device_instance.itemName} {self._multi_level_sensor_property.sensor_type}", + sync=self._sync, + ) + + @property + def device_class(self) -> str: + """Return device class.""" + return self._device_class + + @property + def state(self): + """Return the state of the sensor.""" + return self._state + + @property + def unit_of_measurement(self): + """Return the unit of measurement of this entity.""" + return self._unit + + def _sync(self, message=None): + """Update the multi level sensor state.""" + if message[0].startswith("devolo.MultiLevelSensor"): + self._state = self._device_instance.multi_level_sensor_property[ + message[0] + ].value + elif message[0].startswith("hdm"): + self._available = self._device_instance.is_online() + else: + _LOGGER.debug("No valid message received: %s", message) + self.schedule_update_ha_state() diff --git a/homeassistant/components/devolo_home_control/subscriber.py b/homeassistant/components/devolo_home_control/subscriber.py new file mode 100644 index 00000000000..d291e4b174f --- /dev/null +++ b/homeassistant/components/devolo_home_control/subscriber.py @@ -0,0 +1,19 @@ +"""Subscriber for devolo home control API publisher.""" + +import logging + +_LOGGER = logging.getLogger(__name__) + + +class Subscriber: + """Subscriber class for the publisher in mprm websocket class.""" + + def __init__(self, name, callback): + """Initiate the subscriber.""" + self.name = name + self.callback = callback + + def update(self, message): + """Trigger hass to update the device.""" + _LOGGER.debug('%s got message "%s"', self.name, message) + self.callback(message) diff --git a/homeassistant/components/dialogflow/translations/nn.json b/homeassistant/components/dialogflow/translations/nn.json deleted file mode 100644 index 81b7a05690d..00000000000 --- a/homeassistant/components/dialogflow/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Dialogflow" -} \ No newline at end of file diff --git a/homeassistant/components/discovery/__init__.py b/homeassistant/components/discovery/__init__.py index afcf8cc341d..1c2f816ad40 100644 --- a/homeassistant/components/discovery/__init__.py +++ b/homeassistant/components/discovery/__init__.py @@ -47,6 +47,7 @@ SERVICE_XIAOMI_GW = "xiaomi_gw" CONFIG_ENTRY_HANDLERS = { SERVICE_DAIKIN: "daikin", SERVICE_TELLDUSLIVE: "tellduslive", + "logitech_mediaserver": "squeezebox", } SERVICE_HANDLERS = { @@ -57,7 +58,6 @@ SERVICE_HANDLERS = { SERVICE_APPLE_TV: ("apple_tv", None), SERVICE_ENIGMA2: ("media_player", "enigma2"), SERVICE_WINK: ("wink", None), - SERVICE_XIAOMI_GW: ("xiaomi_aqara", None), SERVICE_SABNZBD: ("sabnzbd", None), SERVICE_SAMSUNG_PRINTER: ("sensor", "syncthru"), SERVICE_KONNECTED: ("konnected", None), @@ -65,8 +65,6 @@ SERVICE_HANDLERS = { SERVICE_FREEBOX: ("freebox", None), SERVICE_YEELIGHT: ("yeelight", None), "yamaha": ("media_player", "yamaha"), - "logitech_mediaserver": ("media_player", "squeezebox"), - "denonavr": ("media_player", "denonavr"), "frontier_silicon": ("media_player", "frontier_silicon"), "openhome": ("media_player", "openhome"), "bose_soundtouch": ("media_player", "soundtouch"), @@ -82,6 +80,7 @@ OPTIONAL_SERVICE_HANDLERS = {SERVICE_DLNA_DMR: ("media_player", "dlna_dmr")} MIGRATED_SERVICE_HANDLERS = [ "axis", "deconz", + "denonavr", "esphome", "google_cast", SERVICE_HEOS, @@ -92,6 +91,7 @@ MIGRATED_SERVICE_HANDLERS = [ "sonos", "songpal", SERVICE_WEMO, + SERVICE_XIAOMI_GW, ] DEFAULT_ENABLED = ( diff --git a/homeassistant/components/discovery/manifest.json b/homeassistant/components/discovery/manifest.json index 89d800a1c36..4b716b604f1 100644 --- a/homeassistant/components/discovery/manifest.json +++ b/homeassistant/components/discovery/manifest.json @@ -2,7 +2,7 @@ "domain": "discovery", "name": "Discovery", "documentation": "https://www.home-assistant.io/integrations/discovery", - "requirements": ["netdisco==2.7.0"], + "requirements": ["netdisco==2.7.1"], "after_dependencies": ["zeroconf"], "codeowners": [], "quality_scale": "internal" diff --git a/homeassistant/components/doorbird/translations/ca.json b/homeassistant/components/doorbird/translations/ca.json index e01e31e0f0a..1639b471d4d 100644 --- a/homeassistant/components/doorbird/translations/ca.json +++ b/homeassistant/components/doorbird/translations/ca.json @@ -29,7 +29,7 @@ "data": { "events": "Llista d'esdeveniments separats per comes." }, - "description": "Afegeix el/s noms del/s esdeveniment/s que vulguis seguir separats per comes. Despr\u00e9s d'introduir-los, utilitzeu l'aplicaci\u00f3 de DoorBird per assignar-los a un esdeveniment espec\u00edfic. Consulta la documentaci\u00f3 a https://www.home-assistant.io/integrations/doorbird/#events.\nExemple: algu_ha_premut_el_boto, moviment_detectat" + "description": "Afegeix el/s noms del/s esdeveniment/s que vulguis seguir separats per comes. Despr\u00e9s d'introduir-los, utilitza l'aplicaci\u00f3 de DoorBird per assignar-los a un esdeveniment espec\u00edfic. Consulta la documentaci\u00f3 a https://www.home-assistant.io/integrations/doorbird/#events.\nExemple: algu_ha_premut_el_boto, moviment_detectat" } } } diff --git a/homeassistant/components/doorbird/translations/no.json b/homeassistant/components/doorbird/translations/no.json index f7d126b1bc7..8f3a580e43e 100644 --- a/homeassistant/components/doorbird/translations/no.json +++ b/homeassistant/components/doorbird/translations/no.json @@ -13,7 +13,7 @@ "step": { "user": { "data": { - "host": "Vert (IP-adresse)", + "host": "Vert", "name": "Enhetsnavn", "password": "Passord", "username": "Brukernavn" diff --git a/homeassistant/components/dynalite/const.py b/homeassistant/components/dynalite/const.py index 82d66dba7ba..e5a4e90d1bd 100644 --- a/homeassistant/components/dynalite/const.py +++ b/homeassistant/components/dynalite/const.py @@ -1,7 +1,6 @@ """Constants for the Dynalite component.""" import logging -from homeassistant.components.cover import DEVICE_CLASS_SHUTTER from homeassistant.const import CONF_ROOM LOGGER = logging.getLogger(__package__) @@ -36,7 +35,6 @@ CONF_TILT_TIME = "tilt" CONF_TIME_COVER = "time_cover" DEFAULT_CHANNEL_TYPE = "light" -DEFAULT_COVER_CLASS = DEVICE_CLASS_SHUTTER DEFAULT_NAME = "dynalite" DEFAULT_PORT = 12345 DEFAULT_TEMPLATES = { diff --git a/homeassistant/components/dynalite/cover.py b/homeassistant/components/dynalite/cover.py index e44fd150f38..0673e07fc0d 100644 --- a/homeassistant/components/dynalite/cover.py +++ b/homeassistant/components/dynalite/cover.py @@ -1,12 +1,18 @@ """Support for the Dynalite channels as covers.""" from typing import Callable -from homeassistant.components.cover import DEVICE_CLASSES, CoverEntity +from homeassistant.components.cover import ( + DEVICE_CLASS_SHUTTER, + DEVICE_CLASSES, + CoverEntity, +) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from .dynalitebase import DynaliteBase, async_setup_entry_base +DEFAULT_COVER_CLASS = DEVICE_CLASS_SHUTTER + async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: Callable @@ -31,8 +37,10 @@ class DynaliteCover(DynaliteBase, CoverEntity): def device_class(self) -> str: """Return the class of the device.""" dev_cls = self._device.device_class - assert dev_cls in DEVICE_CLASSES - return dev_cls + ret_val = DEFAULT_COVER_CLASS + if dev_cls in DEVICE_CLASSES: + ret_val = dev_cls + return ret_val @property def current_cover_position(self) -> int: diff --git a/homeassistant/components/dynalite/manifest.json b/homeassistant/components/dynalite/manifest.json index 581110ba583..e09410e7ef5 100644 --- a/homeassistant/components/dynalite/manifest.json +++ b/homeassistant/components/dynalite/manifest.json @@ -4,5 +4,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/dynalite", "codeowners": ["@ziv1234"], - "requirements": ["dynalite_devices==0.1.40"] + "requirements": ["dynalite_devices==0.1.41"] } diff --git a/homeassistant/components/dyson/climate.py b/homeassistant/components/dyson/climate.py index 6b2d7cbe74c..b17b510e93f 100644 --- a/homeassistant/components/dyson/climate.py +++ b/homeassistant/components/dyson/climate.py @@ -1,19 +1,36 @@ """Support for Dyson Pure Hot+Cool link fan.""" import logging -from libpurecool.const import FocusMode, HeatMode, HeatState, HeatTarget +from libpurecool.const import ( + FanPower, + FanSpeed, + FanState, + FocusMode, + HeatMode, + HeatState, + HeatTarget, +) +from libpurecool.dyson_pure_hotcool import DysonPureHotCool from libpurecool.dyson_pure_hotcool_link import DysonPureHotCoolLink from libpurecool.dyson_pure_state import DysonPureHotCoolState +from libpurecool.dyson_pure_state_v2 import DysonPureHotCoolV2State from homeassistant.components.climate import ClimateEntity from homeassistant.components.climate.const import ( CURRENT_HVAC_COOL, CURRENT_HVAC_HEAT, CURRENT_HVAC_IDLE, + CURRENT_HVAC_OFF, + FAN_AUTO, FAN_DIFFUSE, FAN_FOCUS, + FAN_HIGH, + FAN_LOW, + FAN_MEDIUM, + FAN_OFF, HVAC_MODE_COOL, HVAC_MODE_HEAT, + HVAC_MODE_OFF, SUPPORT_FAN_MODE, SUPPORT_TARGET_TEMPERATURE, ) @@ -24,26 +41,53 @@ from . import DYSON_DEVICES _LOGGER = logging.getLogger(__name__) SUPPORT_FAN = [FAN_FOCUS, FAN_DIFFUSE] +SUPPORT_FAN_PCOOL = [FAN_OFF, FAN_AUTO, FAN_LOW, FAN_MEDIUM, FAN_HIGH] SUPPORT_HVAG = [HVAC_MODE_COOL, HVAC_MODE_HEAT] +SUPPORT_HVAC_PCOOL = [HVAC_MODE_COOL, HVAC_MODE_HEAT, HVAC_MODE_OFF] SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_FAN_MODE +DYSON_KNOWN_CLIMATE_DEVICES = "dyson_known_climate_devices" -def setup_platform(hass, config, add_devices, discovery_info=None): +SPEED_MAP = { + FanSpeed.FAN_SPEED_1.value: FAN_LOW, + FanSpeed.FAN_SPEED_2.value: FAN_LOW, + FanSpeed.FAN_SPEED_3.value: FAN_LOW, + FanSpeed.FAN_SPEED_4.value: FAN_LOW, + FanSpeed.FAN_SPEED_AUTO.value: FAN_AUTO, + FanSpeed.FAN_SPEED_5.value: FAN_MEDIUM, + FanSpeed.FAN_SPEED_6.value: FAN_MEDIUM, + FanSpeed.FAN_SPEED_7.value: FAN_MEDIUM, + FanSpeed.FAN_SPEED_8.value: FAN_HIGH, + FanSpeed.FAN_SPEED_9.value: FAN_HIGH, + FanSpeed.FAN_SPEED_10.value: FAN_HIGH, +} + + +def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Dyson fan components.""" if discovery_info is None: return - # Get Dyson Devices from parent component. - add_devices( - [ - DysonPureHotCoolLinkDevice(device) - for device in hass.data[DYSON_DEVICES] - if isinstance(device, DysonPureHotCoolLink) - ] - ) + known_devices = hass.data.setdefault(DYSON_KNOWN_CLIMATE_DEVICES, set()) + + # Get Dyson Devices from parent component + new_entities = [] + + for device in hass.data[DYSON_DEVICES]: + if device.serial not in known_devices: + if isinstance(device, DysonPureHotCool): + dyson_entity = DysonPureHotCoolEntity(device) + new_entities.append(dyson_entity) + known_devices.add(device.serial) + elif isinstance(device, DysonPureHotCoolLink): + dyson_entity = DysonPureHotCoolLinkEntity(device) + new_entities.append(dyson_entity) + known_devices.add(device.serial) + + add_entities(new_entities) -class DysonPureHotCoolLinkDevice(ClimateEntity): +class DysonPureHotCoolLinkEntity(ClimateEntity): """Representation of a Dyson climate fan.""" def __init__(self, device): @@ -57,11 +101,11 @@ class DysonPureHotCoolLinkDevice(ClimateEntity): def on_message(self, message): """Call when new messages received from the climate.""" - if not isinstance(message, DysonPureHotCoolState): - return - - _LOGGER.debug("Message received for climate device %s : %s", self.name, message) - self.schedule_update_ha_state() + if isinstance(message, DysonPureHotCoolState): + _LOGGER.debug( + "Message received for climate device %s : %s", self.name, message + ) + self.schedule_update_ha_state() @property def should_poll(self): @@ -188,3 +232,164 @@ class DysonPureHotCoolLinkDevice(ClimateEntity): def max_temp(self): """Return the maximum temperature.""" return 37 + + +class DysonPureHotCoolEntity(ClimateEntity): + """Representation of a Dyson climate hot+cool fan.""" + + def __init__(self, device): + """Initialize the fan.""" + self._device = device + + async def async_added_to_hass(self): + """Call when entity is added to hass.""" + self.hass.async_add_executor_job( + self._device.add_message_listener, self.on_message + ) + + def on_message(self, message): + """Call when new messages received from the climate device.""" + if isinstance(message, DysonPureHotCoolV2State): + _LOGGER.debug( + "Message received for climate device %s : %s", self.name, message + ) + self.schedule_update_ha_state() + + @property + def should_poll(self): + """No polling needed.""" + return False + + @property + def supported_features(self): + """Return the list of supported features.""" + return SUPPORT_FLAGS + + @property + def name(self): + """Return the display name of this climate.""" + return self._device.name + + @property + def temperature_unit(self): + """Return the unit of measurement.""" + return TEMP_CELSIUS + + @property + def current_temperature(self): + """Return the current temperature.""" + if self._device.environmental_state.temperature is not None: + temperature_kelvin = self._device.environmental_state.temperature + if temperature_kelvin != 0: + return float("{:.1f}".format(temperature_kelvin - 273)) + return None + + @property + def target_temperature(self): + """Return the target temperature.""" + heat_target = int(self._device.state.heat_target) / 10 + return int(heat_target - 273) + + @property + def current_humidity(self): + """Return the current humidity.""" + if self._device.environmental_state.humidity is not None: + if self._device.environmental_state.humidity != 0: + return self._device.environmental_state.humidity + return None + + @property + def hvac_mode(self): + """Return hvac operation ie. heat, cool mode. + + Need to be one of HVAC_MODE_*. + """ + if self._device.state.fan_power == FanPower.POWER_OFF.value: + return HVAC_MODE_OFF + if self._device.state.heat_mode == HeatMode.HEAT_ON.value: + return HVAC_MODE_HEAT + return HVAC_MODE_COOL + + @property + def hvac_modes(self): + """Return the list of available hvac operation modes. + + Need to be a subset of HVAC_MODES. + """ + return SUPPORT_HVAC_PCOOL + + @property + def hvac_action(self): + """Return the current running hvac operation if supported. + + Need to be one of CURRENT_HVAC_*. + """ + if self._device.state.fan_power == FanPower.POWER_OFF.value: + return CURRENT_HVAC_OFF + if self._device.state.heat_mode == HeatMode.HEAT_ON.value: + if self._device.state.heat_state == HeatState.HEAT_STATE_ON.value: + return CURRENT_HVAC_HEAT + return CURRENT_HVAC_IDLE + return CURRENT_HVAC_COOL + + @property + def fan_mode(self): + """Return the fan setting.""" + if self._device.state.fan_state == FanState.FAN_OFF.value: + return FAN_OFF + + return SPEED_MAP[self._device.state.speed] + + @property + def fan_modes(self): + """Return the list of available fan modes.""" + return SUPPORT_FAN_PCOOL + + def set_temperature(self, **kwargs): + """Set new target temperature.""" + target_temp = kwargs.get(ATTR_TEMPERATURE) + if target_temp is None: + _LOGGER.error("Missing target temperature %s", kwargs) + return + target_temp = int(target_temp) + _LOGGER.debug("Set %s temperature %s", self.name, target_temp) + # Limit the target temperature into acceptable range. + target_temp = min(self.max_temp, target_temp) + target_temp = max(self.min_temp, target_temp) + self._device.set_heat_target(HeatTarget.celsius(target_temp)) + + def set_fan_mode(self, fan_mode): + """Set new fan mode.""" + _LOGGER.debug("Set %s focus mode %s", self.name, fan_mode) + if fan_mode == FAN_OFF: + self._device.turn_off() + elif fan_mode == FAN_LOW: + self._device.set_fan_speed(FanSpeed.FAN_SPEED_4) + elif fan_mode == FAN_MEDIUM: + self._device.set_fan_speed(FanSpeed.FAN_SPEED_7) + elif fan_mode == FAN_HIGH: + self._device.set_fan_speed(FanSpeed.FAN_SPEED_10) + elif fan_mode == FAN_AUTO: + self._device.set_fan_speed(FanSpeed.FAN_SPEED_AUTO) + + def set_hvac_mode(self, hvac_mode): + """Set new target hvac mode.""" + _LOGGER.debug("Set %s heat mode %s", self.name, hvac_mode) + if hvac_mode == HVAC_MODE_OFF: + self._device.turn_off() + elif self._device.state.fan_power == FanPower.POWER_OFF.value: + self._device.turn_on() + if hvac_mode == HVAC_MODE_HEAT: + self._device.enable_heat_mode() + elif hvac_mode == HVAC_MODE_COOL: + self._device.disable_heat_mode() + + @property + def min_temp(self): + """Return the minimum temperature.""" + return 1 + + @property + def max_temp(self): + """Return the maximum temperature.""" + return 37 diff --git a/homeassistant/components/ebusd/const.py b/homeassistant/components/ebusd/const.py index 10ed0b68e87..6e3e25bc756 100644 --- a/homeassistant/components/ebusd/const.py +++ b/homeassistant/components/ebusd/const.py @@ -40,6 +40,7 @@ SENSOR_TYPES = { ], "HolidayTemperature": ["HolidayTemp", TEMP_CELSIUS, "mdi:thermometer", 0], "HWTemperatureDesired": ["HwcTempDesired", TEMP_CELSIUS, "mdi:thermometer", 0], + "HWActualTemperature": ["HwcStorageTemp", TEMP_CELSIUS, "mdi:thermometer", 0], "HWTimerMonday": ["hwcTimer.Monday", None, "mdi:timer", 1], "HWTimerTuesday": ["hwcTimer.Tuesday", None, "mdi:timer", 1], "HWTimerWednesday": ["hwcTimer.Wednesday", None, "mdi:timer", 1], @@ -47,6 +48,7 @@ SENSOR_TYPES = { "HWTimerFriday": ["hwcTimer.Friday", None, "mdi:timer", 1], "HWTimerSaturday": ["hwcTimer.Saturday", None, "mdi:timer", 1], "HWTimerSunday": ["hwcTimer.Sunday", None, "mdi:timer", 1], + "HWOperativeMode": ["HwcOpMode", None, "mdi:math-compass", 3], "WaterPressure": ["WaterPressure", PRESSURE_BAR, "mdi:water-pump", 0], "Zone1RoomZoneMapping": ["z1RoomZoneMapping", None, "mdi:label", 0], "Zone1NightTemperature": ["z1NightTemp", TEMP_CELSIUS, "mdi:weather-night", 0], diff --git a/homeassistant/components/ebusd/translations/ebusd.en.json b/homeassistant/components/ebusd/translations/ebusd.en.json deleted file mode 100644 index 16ab79fc582..00000000000 --- a/homeassistant/components/ebusd/translations/ebusd.en.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "state": { - "day": "Day", - "night": "Night", - "auto": "Automatic" - } -} \ No newline at end of file diff --git a/homeassistant/components/ebusd/translations/ebusd.it.json b/homeassistant/components/ebusd/translations/ebusd.it.json deleted file mode 100644 index d0b95daaafa..00000000000 --- a/homeassistant/components/ebusd/translations/ebusd.it.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "state": { - "day": "Giorno", - "night": "Notte", - "auto": "Automatico" - } -} \ No newline at end of file diff --git a/homeassistant/components/ecobee/manifest.json b/homeassistant/components/ecobee/manifest.json index f25bdca2fe6..38d6b4577b6 100644 --- a/homeassistant/components/ecobee/manifest.json +++ b/homeassistant/components/ecobee/manifest.json @@ -3,6 +3,6 @@ "name": "ecobee", "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/ecobee", - "requirements": ["python-ecobee-api==0.2.5"], + "requirements": ["python-ecobee-api==0.2.7"], "codeowners": ["@marthoc"] } diff --git a/homeassistant/components/ecobee/translations/nn.json b/homeassistant/components/ecobee/translations/nn.json deleted file mode 100644 index b23da4e97d1..00000000000 --- a/homeassistant/components/ecobee/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "ecobee" -} \ No newline at end of file diff --git a/homeassistant/components/elgato/translations/no.json b/homeassistant/components/elgato/translations/no.json index ebd10d6aa0e..54b84966cdc 100644 --- a/homeassistant/components/elgato/translations/no.json +++ b/homeassistant/components/elgato/translations/no.json @@ -11,8 +11,8 @@ "step": { "user": { "data": { - "host": "Vert eller IP-adresse", - "port": "Portnummer" + "host": "Vert", + "port": "Port" }, "description": "Sett opp Elgato Key Light for \u00e5 integrere med Home Assistant." }, diff --git a/homeassistant/components/elkm1/translations/no.json b/homeassistant/components/elkm1/translations/no.json index 6870ca4926d..d00c4ab2eef 100644 --- a/homeassistant/components/elkm1/translations/no.json +++ b/homeassistant/components/elkm1/translations/no.json @@ -13,11 +13,11 @@ "user": { "data": { "address": "IP-adressen eller domenet eller seriell port hvis du kobler til via seriell.", - "password": "Passord (bare sikkert).", + "password": "Passord", "prefix": "Et unikt prefiks (la v\u00e6re tomt hvis du bare har en ElkM1).", "protocol": "protokoll", "temperature_unit": "Temperaturenheten ElkM1 bruker.", - "username": "Brukernavn (bare sikkert)." + "username": "Brukernavn" }, "description": "Adressestrengen m\u00e5 v\u00e6re i formen 'adresse [: port]' for 'sikker' og 'ikke-sikker'. Eksempel: '192.168.1.1'. Porten er valgfri og er standard til 2101 for 'ikke-sikker' og 2601 for 'sikker'. For den serielle protokollen m\u00e5 adressen v\u00e6re i formen 'tty [: baud]'. Eksempel: '/ dev / ttyS1'. Baud er valgfri og er standard til 115200.", "title": "Koble til Elk-M1-kontroll" diff --git a/homeassistant/components/emulated_hue/hue_api.py b/homeassistant/components/emulated_hue/hue_api.py index 069a4b60d0c..15d3f323092 100644 --- a/homeassistant/components/emulated_hue/hue_api.py +++ b/homeassistant/components/emulated_hue/hue_api.py @@ -1,4 +1,5 @@ """Support for a Hue API to control Home Assistant.""" +import asyncio import hashlib import logging @@ -223,14 +224,7 @@ class HueFullStateView(HomeAssistantView): json_response = { "lights": create_list_of_entities(self.config, request), - "config": { - "mac": "00:00:00:00:00:00", - "swversion": "01003542", - "apiversion": "1.17.0", - "whitelist": {HUE_API_USERNAME: {"name": "HASS BRIDGE"}}, - "ipaddress": f"{self.config.advertise_ip}:{self.config.advertise_port}", - "linkbutton": True, - }, + "config": create_config_model(self.config, request), } return self.json(json_response) @@ -255,14 +249,7 @@ class HueConfigView(HomeAssistantView): if username != HUE_API_USERNAME: return self.json(UNAUTHORIZED_USER) - json_response = { - "mac": "00:00:00:00:00:00", - "swversion": "01003542", - "apiversion": "1.17.0", - "whitelist": {HUE_API_USERNAME: {"name": "HASS BRIDGE"}}, - "ipaddress": f"{self.config.advertise_ip}:{self.config.advertise_port}", - "linkbutton": True, - } + json_response = create_config_model(self.config, request) return self.json(json_response) @@ -555,6 +542,10 @@ class HueOneLightChangeView(HomeAssistantView): create_hue_success_response(entity_number, val, parsed[key]) ) + # Echo fetches the state immediately after the PUT method returns. + # Waiting for a short time allows the changes to propagate. + await asyncio.sleep(0.25) + return self.json(json_response) @@ -751,6 +742,18 @@ def create_hue_success_response(entity_number, attr, value): return {"success": {success_key: value}} +def create_config_model(config, request): + """Create a config resource.""" + return { + "mac": "00:00:00:00:00:00", + "swversion": "01003542", + "apiversion": "1.17.0", + "whitelist": {HUE_API_USERNAME: {"name": "HASS BRIDGE"}}, + "ipaddress": f"{config.advertise_ip}:{config.advertise_port}", + "linkbutton": True, + } + + def create_list_of_entities(config, request): """Create a list of all entities.""" hass = request.app["hass"] diff --git a/homeassistant/components/environment_canada/manifest.json b/homeassistant/components/environment_canada/manifest.json index 1fd4d19e370..463db053beb 100644 --- a/homeassistant/components/environment_canada/manifest.json +++ b/homeassistant/components/environment_canada/manifest.json @@ -2,6 +2,6 @@ "domain": "environment_canada", "name": "Environment Canada", "documentation": "https://www.home-assistant.io/integrations/environment_canada", - "requirements": ["env_canada==0.0.38"], + "requirements": ["env_canada==0.0.39"], "codeowners": ["@michaeldavie"] } diff --git a/homeassistant/components/environment_canada/weather.py b/homeassistant/components/environment_canada/weather.py index 10666b4a34e..7bc614bd09e 100644 --- a/homeassistant/components/environment_canada/weather.py +++ b/homeassistant/components/environment_canada/weather.py @@ -8,6 +8,7 @@ import voluptuous as vol from homeassistant.components.weather import ( ATTR_FORECAST_CONDITION, + ATTR_FORECAST_PRECIPITATION_PROBABILITY, ATTR_FORECAST_TEMP, ATTR_FORECAST_TEMP_LOW, ATTR_FORECAST_TIME, @@ -183,6 +184,9 @@ def get_forecast(ec_data, forecast_type): ATTR_FORECAST_CONDITION: icon_code_to_condition( int(half_days[0]["icon_code"]) ), + ATTR_FORECAST_PRECIPITATION_PROBABILITY: int( + half_days[0]["precip_probability"] + ), } ) half_days = half_days[2:] @@ -200,6 +204,9 @@ def get_forecast(ec_data, forecast_type): ATTR_FORECAST_CONDITION: icon_code_to_condition( int(half_days[high]["icon_code"]) ), + ATTR_FORECAST_PRECIPITATION_PROBABILITY: int( + half_days[high]["precip_probability"] + ), } ) @@ -215,6 +222,9 @@ def get_forecast(ec_data, forecast_type): ATTR_FORECAST_CONDITION: icon_code_to_condition( int(hours[hour]["icon_code"]) ), + ATTR_FORECAST_PRECIPITATION_PROBABILITY: int( + hours[hour]["precip_probability"] + ), } ) diff --git a/homeassistant/components/esphome/config_flow.py b/homeassistant/components/esphome/config_flow.py index cb9b7958efa..5c35909088d 100644 --- a/homeassistant/components/esphome/config_flow.py +++ b/homeassistant/components/esphome/config_flow.py @@ -29,7 +29,7 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN): async def async_step_user( self, user_input: Optional[ConfigType] = None, error: Optional[str] = None - ): + ): # pylint: disable=arguments-differ """Handle a flow initialized by the user.""" if user_input is not None: return await self._async_authenticate_or_add(user_input) @@ -100,10 +100,10 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN): for entry in self._async_current_entries(): already_configured = False - if ( - entry.data[CONF_HOST] == address - or entry.data[CONF_HOST] == discovery_info[CONF_HOST] - ): + if CONF_HOST in entry.data and entry.data[CONF_HOST] in [ + address, + discovery_info[CONF_HOST], + ]: # Is this address or IP address already configured? already_configured = True elif entry.entry_id in self.hass.data.get(DATA_KEY, {}): diff --git a/homeassistant/components/esphome/translations/af.json b/homeassistant/components/esphome/translations/af.json deleted file mode 100644 index 9d6d417a053..00000000000 --- a/homeassistant/components/esphome/translations/af.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "[%key:component::esphome::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/esphome/translations/ar.json b/homeassistant/components/esphome/translations/ar.json deleted file mode 100644 index 9d6d417a053..00000000000 --- a/homeassistant/components/esphome/translations/ar.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "[%key:component::esphome::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/esphome/translations/bs.json b/homeassistant/components/esphome/translations/bs.json deleted file mode 100644 index 9d6d417a053..00000000000 --- a/homeassistant/components/esphome/translations/bs.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "[%key:component::esphome::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/esphome/translations/cy.json b/homeassistant/components/esphome/translations/cy.json deleted file mode 100644 index 9d6d417a053..00000000000 --- a/homeassistant/components/esphome/translations/cy.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "[%key:component::esphome::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/esphome/translations/el.json b/homeassistant/components/esphome/translations/el.json deleted file mode 100644 index 9d6d417a053..00000000000 --- a/homeassistant/components/esphome/translations/el.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "[%key:component::esphome::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/esphome/translations/eo.json b/homeassistant/components/esphome/translations/eo.json deleted file mode 100644 index 9d6d417a053..00000000000 --- a/homeassistant/components/esphome/translations/eo.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "[%key:component::esphome::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/esphome/translations/et.json b/homeassistant/components/esphome/translations/et.json deleted file mode 100644 index 9d6d417a053..00000000000 --- a/homeassistant/components/esphome/translations/et.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "[%key:component::esphome::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/esphome/translations/eu.json b/homeassistant/components/esphome/translations/eu.json deleted file mode 100644 index 9d6d417a053..00000000000 --- a/homeassistant/components/esphome/translations/eu.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "[%key:component::esphome::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/esphome/translations/fa.json b/homeassistant/components/esphome/translations/fa.json deleted file mode 100644 index 9d6d417a053..00000000000 --- a/homeassistant/components/esphome/translations/fa.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "[%key:component::esphome::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/esphome/translations/gsw.json b/homeassistant/components/esphome/translations/gsw.json deleted file mode 100644 index 9d6d417a053..00000000000 --- a/homeassistant/components/esphome/translations/gsw.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "[%key:component::esphome::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/esphome/translations/he.json b/homeassistant/components/esphome/translations/he.json deleted file mode 100644 index 9d6d417a053..00000000000 --- a/homeassistant/components/esphome/translations/he.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "[%key:component::esphome::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/esphome/translations/hi.json b/homeassistant/components/esphome/translations/hi.json deleted file mode 100644 index 9d6d417a053..00000000000 --- a/homeassistant/components/esphome/translations/hi.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "[%key:component::esphome::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/esphome/translations/hr.json b/homeassistant/components/esphome/translations/hr.json deleted file mode 100644 index 9d6d417a053..00000000000 --- a/homeassistant/components/esphome/translations/hr.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "[%key:component::esphome::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/esphome/translations/iba.json b/homeassistant/components/esphome/translations/iba.json deleted file mode 100644 index 9d6d417a053..00000000000 --- a/homeassistant/components/esphome/translations/iba.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "[%key:component::esphome::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/esphome/translations/is.json b/homeassistant/components/esphome/translations/is.json deleted file mode 100644 index 9d6d417a053..00000000000 --- a/homeassistant/components/esphome/translations/is.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "[%key:component::esphome::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/esphome/translations/ja.json b/homeassistant/components/esphome/translations/ja.json deleted file mode 100644 index 9d6d417a053..00000000000 --- a/homeassistant/components/esphome/translations/ja.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "[%key:component::esphome::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/esphome/translations/lt.json b/homeassistant/components/esphome/translations/lt.json deleted file mode 100644 index 9d6d417a053..00000000000 --- a/homeassistant/components/esphome/translations/lt.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "[%key:component::esphome::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/esphome/translations/lv.json b/homeassistant/components/esphome/translations/lv.json deleted file mode 100644 index 9d6d417a053..00000000000 --- a/homeassistant/components/esphome/translations/lv.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "[%key:component::esphome::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/esphome/translations/ro.json b/homeassistant/components/esphome/translations/ro.json deleted file mode 100644 index 9d6d417a053..00000000000 --- a/homeassistant/components/esphome/translations/ro.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "[%key:component::esphome::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/esphome/translations/sk.json b/homeassistant/components/esphome/translations/sk.json deleted file mode 100644 index 9d6d417a053..00000000000 --- a/homeassistant/components/esphome/translations/sk.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "[%key:component::esphome::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/esphome/translations/sr-Latn.json b/homeassistant/components/esphome/translations/sr-Latn.json deleted file mode 100644 index 9d6d417a053..00000000000 --- a/homeassistant/components/esphome/translations/sr-Latn.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "[%key:component::esphome::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/esphome/translations/sr.json b/homeassistant/components/esphome/translations/sr.json deleted file mode 100644 index 9d6d417a053..00000000000 --- a/homeassistant/components/esphome/translations/sr.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "[%key:component::esphome::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/esphome/translations/ta.json b/homeassistant/components/esphome/translations/ta.json deleted file mode 100644 index 9d6d417a053..00000000000 --- a/homeassistant/components/esphome/translations/ta.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "[%key:component::esphome::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/esphome/translations/te.json b/homeassistant/components/esphome/translations/te.json deleted file mode 100644 index 9d6d417a053..00000000000 --- a/homeassistant/components/esphome/translations/te.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "[%key:component::esphome::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/esphome/translations/tr.json b/homeassistant/components/esphome/translations/tr.json deleted file mode 100644 index 9d6d417a053..00000000000 --- a/homeassistant/components/esphome/translations/tr.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "[%key:component::esphome::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/esphome/translations/ur.json b/homeassistant/components/esphome/translations/ur.json deleted file mode 100644 index 9d6d417a053..00000000000 --- a/homeassistant/components/esphome/translations/ur.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "[%key:component::esphome::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/esphome/translations/vi.json b/homeassistant/components/esphome/translations/vi.json deleted file mode 100644 index 9d6d417a053..00000000000 --- a/homeassistant/components/esphome/translations/vi.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "[%key:component::esphome::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/fitbit/sensor.py b/homeassistant/components/fitbit/sensor.py index 7e713505a0e..d3f33832369 100644 --- a/homeassistant/components/fitbit/sensor.py +++ b/homeassistant/components/fitbit/sensor.py @@ -16,6 +16,7 @@ from homeassistant.const import ( CONF_CLIENT_ID, CONF_CLIENT_SECRET, CONF_UNIT_SYSTEM, + LENGTH_FEET, MASS_KILOGRAMS, MASS_MILLIGRAMS, TIME_MILLISECONDS, @@ -117,7 +118,7 @@ FITBIT_MEASUREMENTS = { "en_US": { "duration": TIME_MILLISECONDS, "distance": "mi", - "elevation": "ft", + "elevation": LENGTH_FEET, "height": "in", "weight": "lbs", "body": "in", diff --git a/homeassistant/components/flick_electric/config_flow.py b/homeassistant/components/flick_electric/config_flow.py index 2106a6f8d62..8e6020ebd8a 100644 --- a/homeassistant/components/flick_electric/config_flow.py +++ b/homeassistant/components/flick_electric/config_flow.py @@ -55,7 +55,7 @@ class FlickConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): else: return token is not None - async def async_step_user(self, user_input): + async def async_step_user(self, user_input=None): """Handle gathering login info.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/forked_daapd/config_flow.py b/homeassistant/components/forked_daapd/config_flow.py index 07eaaf4c3fe..d27c40af316 100644 --- a/homeassistant/components/forked_daapd/config_flow.py +++ b/homeassistant/components/forked_daapd/config_flow.py @@ -156,14 +156,18 @@ class ForkedDaapdFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): async def async_step_zeroconf(self, discovery_info): """Prepare configuration for a discovered forked-daapd device.""" - if not ( - discovery_info.get("properties") - and int(discovery_info["properties"].get("mtd-version", "0").split(".")[0]) - >= 27 - and discovery_info["properties"].get("Machine Name") + version_num = 0 + if discovery_info.get("properties") and discovery_info["properties"].get( + "Machine Name" ): + try: + version_num = int( + discovery_info["properties"].get("mtd-version", "0").split(".")[0] + ) + except ValueError: + pass + if version_num < 27: return self.async_abort(reason="not_forked_daapd") - await self.async_set_unique_id(discovery_info["properties"]["Machine Name"]) self._abort_if_unique_id_configured() diff --git a/homeassistant/components/forked_daapd/translations/de.json b/homeassistant/components/forked_daapd/translations/de.json index 4a82bf666cd..a3cdc53c52a 100644 --- a/homeassistant/components/forked_daapd/translations/de.json +++ b/homeassistant/components/forked_daapd/translations/de.json @@ -6,7 +6,8 @@ "error": { "unknown_error": "Unbekannter Fehler", "wrong_host_or_port": "Verbindung konnte nicht hergestellt werden. Bitte \u00fcberpr\u00fcfen Sie Host und Port.", - "wrong_password": "Ung\u00fcltiges Passwort" + "wrong_password": "Ung\u00fcltiges Passwort", + "wrong_server_type": "F\u00fcr die forked-daapd Integration ist ein forked-daapd Server mit der Version > = 27.0 erforderlich." }, "step": { "user": { diff --git a/homeassistant/components/fortigate/__init__.py b/homeassistant/components/fortigate/__init__.py deleted file mode 100644 index 2dbd7ef45c0..00000000000 --- a/homeassistant/components/fortigate/__init__.py +++ /dev/null @@ -1,79 +0,0 @@ -"""Fortigate integration.""" -import logging - -from pyFGT.fortigate import FGTConnectionError, FortiGate -import voluptuous as vol - -from homeassistant.const import ( - CONF_API_KEY, - CONF_DEVICES, - CONF_HOST, - CONF_USERNAME, - EVENT_HOMEASSISTANT_STOP, -) -from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.discovery import async_load_platform - -_LOGGER = logging.getLogger(__name__) - -DOMAIN = "fortigate" - -DATA_FGT = DOMAIN - -CONFIG_SCHEMA = vol.Schema( - vol.All( - cv.deprecated(DOMAIN, invalidation_version="0.112.0"), - { - DOMAIN: vol.Schema( - { - vol.Required(CONF_HOST): cv.string, - vol.Required(CONF_USERNAME): cv.string, - vol.Required(CONF_API_KEY): cv.string, - vol.Optional(CONF_DEVICES, default=[]): vol.All( - cv.ensure_list, [cv.string] - ), - } - ) - }, - ), - extra=vol.ALLOW_EXTRA, -) - - -async def async_setup(hass, config): - """Start the Fortigate component.""" - conf = config[DOMAIN] - - host = conf[CONF_HOST] - user = conf[CONF_USERNAME] - api_key = conf[CONF_API_KEY] - devices = conf[CONF_DEVICES] - - is_success = await async_setup_fortigate(hass, config, host, user, api_key, devices) - - return is_success - - -async def async_setup_fortigate(hass, config, host, user, api_key, devices): - """Start up the Fortigate component platforms.""" - fgt = FortiGate(host, user, apikey=api_key, disable_request_warnings=True) - - try: - fgt.login() - except FGTConnectionError: - _LOGGER.error("Failed to connect to Fortigate") - return False - - hass.data[DATA_FGT] = {"fgt": fgt, "devices": devices} - - hass.async_create_task( - async_load_platform(hass, "device_tracker", DOMAIN, {}, config) - ) - - async def close_fgt(event): - """Close Fortigate connection on HA Stop.""" - fgt.logout() - - hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, close_fgt) - - return True diff --git a/homeassistant/components/fortigate/device_tracker.py b/homeassistant/components/fortigate/device_tracker.py deleted file mode 100644 index 23df0ee266e..00000000000 --- a/homeassistant/components/fortigate/device_tracker.py +++ /dev/null @@ -1,89 +0,0 @@ -"""Device tracker for Fortigate firewalls.""" -from collections import namedtuple -import logging - -from homeassistant.components.device_tracker import DeviceScanner - -from . import DATA_FGT - -_LOGGER = logging.getLogger(__name__) - -DETECTED_DEVICES = "/monitor/user/detected-device" - - -async def async_get_scanner(hass, config): - """Validate the configuration and return a Fortigate scanner.""" - scanner = FortigateDeviceScanner(hass.data[DATA_FGT]) - await scanner.async_connect() - return scanner if scanner.success_init else None - - -Device = namedtuple("Device", ["hostname", "mac"]) - - -def _build_device(device_dict): - """Return a Device from data.""" - return Device(device_dict["hostname"], device_dict["mac"]) - - -class FortigateDeviceScanner(DeviceScanner): - """Query the Fortigate firewall.""" - - def __init__(self, hass_data): - """Initialize the scanner.""" - self.last_results = {} - self.success_init = False - self.connection = hass_data["fgt"] - self.devices = hass_data["devices"] - - def get_results(self): - """Get the results from the Fortigate.""" - results = self.connection.get(DETECTED_DEVICES, "vdom=root")[1]["results"] - - ret = [] - for result in results: - if "hostname" not in result: - continue - - ret.append(result) - - return ret - - async def async_connect(self): - """Initialize connection to the router.""" - # Test if the firewall is accessible - data = self.get_results() - self.success_init = data is not None - - async def async_scan_devices(self): - """Scan for new devices and return a list with found device MACs.""" - await self.async_update_info() - return [device.mac for device in self.last_results] - - def get_device_name(self, device): - """Return the name of the given device or None if we don't know.""" - name = next( - (result.hostname for result in self.last_results if result.mac == device), - None, - ) - return name - - async def async_update_info(self): - """Ensure the information from the Fortigate firewall is up to date.""" - _LOGGER.debug("Checking devices") - - hosts = self.get_results() - - all_results = [_build_device(device) for device in hosts if device["is_online"]] - - # If the 'devices' configuration field is filled - if self.devices is not None: - last_results = [ - device for device in all_results if device.hostname in self.devices - ] - _LOGGER.debug(last_results) - # If the 'devices' configuration field is not filled - else: - last_results = all_results - - self.last_results = last_results diff --git a/homeassistant/components/fortigate/manifest.json b/homeassistant/components/fortigate/manifest.json deleted file mode 100644 index 395f8e05890..00000000000 --- a/homeassistant/components/fortigate/manifest.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "domain": "fortigate", - "name": "FortiGate", - "documentation": "https://www.home-assistant.io/integrations/fortigate", - "codeowners": ["@kifeo"], - "requirements": ["pyfgt==0.5.1"] -} diff --git a/homeassistant/components/freebox/config_flow.py b/homeassistant/components/freebox/config_flow.py index b2d1a0ab771..9cef6aa0c38 100644 --- a/homeassistant/components/freebox/config_flow.py +++ b/homeassistant/components/freebox/config_flow.py @@ -105,6 +105,6 @@ class FreeboxFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Import a config entry.""" return await self.async_step_user(user_input) - async def async_step_discovery(self, user_input=None): + async def async_step_discovery(self, discovery_info): """Initialize step from discovery.""" - return await self.async_step_user(user_input) + return await self.async_step_user(discovery_info) diff --git a/homeassistant/components/fritzbox/config_flow.py b/homeassistant/components/fritzbox/config_flow.py index 25a81333bd6..6f4befab8ff 100644 --- a/homeassistant/components/fritzbox/config_flow.py +++ b/homeassistant/components/fritzbox/config_flow.py @@ -110,12 +110,12 @@ class FritzboxConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): step_id="user", data_schema=DATA_SCHEMA_USER, errors=errors ) - async def async_step_ssdp(self, user_input): + async def async_step_ssdp(self, discovery_info): """Handle a flow initialized by discovery.""" - host = urlparse(user_input[ATTR_SSDP_LOCATION]).hostname + host = urlparse(discovery_info[ATTR_SSDP_LOCATION]).hostname self.context[CONF_HOST] = host - uuid = user_input.get(ATTR_UPNP_UDN) + uuid = discovery_info.get(ATTR_UPNP_UDN) if uuid: if uuid.startswith("uuid:"): uuid = uuid[5:] @@ -134,7 +134,7 @@ class FritzboxConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): return self.async_abort(reason="already_configured") self._host = host - self._name = user_input.get(ATTR_UPNP_FRIENDLY_NAME) or host + self._name = discovery_info.get(ATTR_UPNP_FRIENDLY_NAME) or host self.context["title_placeholders"] = {"name": self._name} return await self.async_step_confirm() diff --git a/homeassistant/components/fritzbox/translations/es.json b/homeassistant/components/fritzbox/translations/es.json index 123b98ee9dc..d677acde160 100644 --- a/homeassistant/components/fritzbox/translations/es.json +++ b/homeassistant/components/fritzbox/translations/es.json @@ -2,7 +2,7 @@ "config": { "abort": { "already_configured": "Este AVM FRITZ!Box ya est\u00e1 configurado.", - "already_in_progress": "La configuraci\u00f3n del AVM FRITZ!Box ya est\u00e1 en progreso.", + "already_in_progress": "La configuraci\u00f3n del AVM FRITZ!Box ya est\u00e1 en marcha.", "not_found": "No se encontr\u00f3 ning\u00fan AVM FRITZ!Box compatible en la red.", "not_supported": "Conectado a AVM FRITZ!Box pero no es capaz de controlar dispositivos Smart Home." }, diff --git a/homeassistant/components/fritzbox/translations/no.json b/homeassistant/components/fritzbox/translations/no.json index 55518d0288a..44d8c28418b 100644 --- a/homeassistant/components/fritzbox/translations/no.json +++ b/homeassistant/components/fritzbox/translations/no.json @@ -20,7 +20,7 @@ }, "user": { "data": { - "host": "Vert eller IP-adresse", + "host": "Vert ", "password": "Passord", "username": "Brukernavn" }, diff --git a/homeassistant/components/frontend/__init__.py b/homeassistant/components/frontend/__init__.py index e5b93399c43..aec574d8d40 100644 --- a/homeassistant/components/frontend/__init__.py +++ b/homeassistant/components/frontend/__init__.py @@ -281,8 +281,13 @@ async def async_setup(hass, config): # To smooth transition to new urls, add redirects to new urls of dev tools # Added June 27, 2019. Can be removed in 2021. - for panel in ("event", "info", "service", "state", "template", "mqtt"): + for panel in ("event", "service", "state", "template"): hass.http.register_redirect(f"/dev-{panel}", f"/developer-tools/{panel}") + for panel in ("logs", "info", "mqtt"): + # Can be removed in 2021. + hass.http.register_redirect(f"/dev-{panel}", f"/config/{panel}") + # Added June 20 2020. Can be removed in 2022. + hass.http.register_redirect(f"/developer-tools/{panel}", f"/config/{panel}") async_register_built_in_panel( hass, diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 5a0078b8c42..8be62aa42f6 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -2,10 +2,11 @@ "domain": "frontend", "name": "Home Assistant Frontend", "documentation": "https://www.home-assistant.io/integrations/frontend", - "requirements": ["home-assistant-frontend==20200603.3"], + "requirements": ["home-assistant-frontend==20200701.0"], "dependencies": [ "api", "auth", + "config", "device_automation", "http", "lovelace", diff --git a/homeassistant/components/generic/camera.py b/homeassistant/components/generic/camera.py index 768ef108969..91f5322ae81 100644 --- a/homeassistant/components/generic/camera.py +++ b/homeassistant/components/generic/camera.py @@ -40,7 +40,7 @@ DEFAULT_NAME = "Generic Camera" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_STILL_IMAGE_URL): cv.template, - vol.Optional(CONF_STREAM_SOURCE, default=None): vol.Any(None, cv.string), + vol.Optional(CONF_STREAM_SOURCE): cv.template, vol.Optional(CONF_AUTHENTICATION, default=HTTP_BASIC_AUTHENTICATION): vol.In( [HTTP_BASIC_AUTHENTICATION, HTTP_DIGEST_AUTHENTICATION] ), @@ -72,8 +72,10 @@ class GenericCamera(Camera): self._authentication = device_info.get(CONF_AUTHENTICATION) self._name = device_info.get(CONF_NAME) self._still_image_url = device_info[CONF_STILL_IMAGE_URL] - self._stream_source = device_info[CONF_STREAM_SOURCE] + self._stream_source = device_info.get(CONF_STREAM_SOURCE) self._still_image_url.hass = hass + if self._stream_source is not None: + self._stream_source.hass = hass self._limit_refetch = device_info[CONF_LIMIT_REFETCH_TO_URL_CHANGE] self._frame_interval = 1 / device_info[CONF_FRAMERATE] self._supported_features = SUPPORT_STREAM if self._stream_source else 0 @@ -166,4 +168,11 @@ class GenericCamera(Camera): async def stream_source(self): """Return the source of the stream.""" - return self._stream_source + if self._stream_source is None: + return None + + try: + return self._stream_source.async_render() + except TemplateError as err: + _LOGGER.error("Error parsing template %s: %s", self._stream_source, err) + return None diff --git a/homeassistant/components/generic_thermostat/climate.py b/homeassistant/components/generic_thermostat/climate.py index d7889513402..407923dc161 100644 --- a/homeassistant/components/generic_thermostat/climate.py +++ b/homeassistant/components/generic_thermostat/climate.py @@ -449,12 +449,16 @@ class GenericThermostat(ClimateEntity, RestoreEntity): async def _async_heater_turn_on(self): """Turn heater toggleable device on.""" data = {ATTR_ENTITY_ID: self.heater_entity_id} - await self.hass.services.async_call(HA_DOMAIN, SERVICE_TURN_ON, data) + await self.hass.services.async_call( + HA_DOMAIN, SERVICE_TURN_ON, data, context=self._context + ) async def _async_heater_turn_off(self): """Turn heater toggleable device off.""" data = {ATTR_ENTITY_ID: self.heater_entity_id} - await self.hass.services.async_call(HA_DOMAIN, SERVICE_TURN_OFF, data) + await self.hass.services.async_call( + HA_DOMAIN, SERVICE_TURN_OFF, data, context=self._context + ) async def async_set_preset_mode(self, preset_mode: str): """Set new preset mode.""" diff --git a/homeassistant/components/geniushub/__init__.py b/homeassistant/components/geniushub/__init__.py index 0b99224bf7f..16967fb265a 100644 --- a/homeassistant/components/geniushub/__init__.py +++ b/homeassistant/components/geniushub/__init__.py @@ -167,6 +167,7 @@ class GeniusBroker: self.hass = hass self.client = client self._hub_uid = hub_uid + self._connect_error = False @property def hub_uid(self) -> int: @@ -178,8 +179,19 @@ class GeniusBroker: """Update the geniushub client's data.""" try: await self.client.update() - except aiohttp.ClientResponseError as err: - _LOGGER.warning("Update failed, message is: %s", err) + if self._connect_error: + self._connect_error = False + _LOGGER.warning("Connection to geniushub re-established") + except ( + aiohttp.ClientResponseError, + aiohttp.client_exceptions.ClientConnectorError, + ) as err: + if not self._connect_error: + self._connect_error = True + _LOGGER.warning( + "Connection to geniushub failed (unable to update), message is: %s", + err, + ) return self.make_debug_log_entries() @@ -240,7 +252,6 @@ class GeniusDevice(GeniusEntity): @property def device_state_attributes(self) -> Dict[str, Any]: """Return the device state attributes.""" - attrs = {} attrs["assigned_zone"] = self._device.data["assignedZones"][0]["name"] if self._last_comms: diff --git a/homeassistant/components/glances/const.py b/homeassistant/components/glances/const.py index 53dc6352049..d30dd87baf3 100644 --- a/homeassistant/components/glances/const.py +++ b/homeassistant/components/glances/const.py @@ -1,4 +1,6 @@ """Constants for Glances component.""" +import sys + from homeassistant.const import ( DATA_GIBIBYTES, DATA_MEBIBYTES, @@ -18,6 +20,11 @@ DEFAULT_SCAN_INTERVAL = 60 DATA_UPDATED = "glances_data_updated" SUPPORTED_VERSIONS = [2, 3] +if sys.maxsize > 2 ** 32: + CPU_ICON = "mdi:cpu-64-bit" +else: + CPU_ICON = "mdi:cpu-32-bit" + SENSOR_TYPES = { "disk_use_percent": ["fs", "used percent", UNIT_PERCENTAGE, "mdi:harddisk"], "disk_use": ["fs", "used", DATA_GIBIBYTES, "mdi:harddisk"], @@ -28,12 +35,12 @@ SENSOR_TYPES = { "swap_use_percent": ["memswap", "Swap used percent", UNIT_PERCENTAGE, "mdi:memory"], "swap_use": ["memswap", "Swap used", DATA_GIBIBYTES, "mdi:memory"], "swap_free": ["memswap", "Swap free", DATA_GIBIBYTES, "mdi:memory"], - "processor_load": ["load", "CPU load", "15 min", "mdi:memory"], - "process_running": ["processcount", "Running", "Count", "mdi:memory"], - "process_total": ["processcount", "Total", "Count", "mdi:memory"], - "process_thread": ["processcount", "Thread", "Count", "mdi:memory"], - "process_sleeping": ["processcount", "Sleeping", "Count", "mdi:memory"], - "cpu_use_percent": ["cpu", "CPU used", UNIT_PERCENTAGE, "mdi:memory"], + "processor_load": ["load", "CPU load", "15 min", CPU_ICON], + "process_running": ["processcount", "Running", "Count", CPU_ICON], + "process_total": ["processcount", "Total", "Count", CPU_ICON], + "process_thread": ["processcount", "Thread", "Count", CPU_ICON], + "process_sleeping": ["processcount", "Sleeping", "Count", CPU_ICON], + "cpu_use_percent": ["cpu", "CPU used", UNIT_PERCENTAGE, CPU_ICON], "sensor_temp": ["sensors", "Temp", TEMP_CELSIUS, "mdi:thermometer"], "docker_active": ["docker", "Containers active", "", "mdi:docker"], "docker_cpu_use": ["docker", "Containers CPU used", UNIT_PERCENTAGE, "mdi:docker"], diff --git a/homeassistant/components/glances/translations/nn.json b/homeassistant/components/glances/translations/nn.json deleted file mode 100644 index c392b228e89..00000000000 --- a/homeassistant/components/glances/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Glances" -} \ No newline at end of file diff --git a/homeassistant/components/gogogate2/config_flow.py b/homeassistant/components/gogogate2/config_flow.py index bca340fa62b..8c33af6af10 100644 --- a/homeassistant/components/gogogate2/config_flow.py +++ b/homeassistant/components/gogogate2/config_flow.py @@ -63,7 +63,7 @@ class Gogogate2FlowHandler(ConfigFlow, domain=DOMAIN): CONF_IP_ADDRESS, default=user_input.get(CONF_IP_ADDRESS, "") ): str, vol.Required( - CONF_USERNAME, default=user_input.get(CONF_USERNAME, "") + CONF_USERNAME, default=user_input.get(CONF_USERNAME, "admin") ): str, vol.Required( CONF_PASSWORD, default=user_input.get(CONF_PASSWORD, "") diff --git a/homeassistant/components/gogogate2/strings.json b/homeassistant/components/gogogate2/strings.json index bbd4e8d80d1..d519d9b9ea0 100644 --- a/homeassistant/components/gogogate2/strings.json +++ b/homeassistant/components/gogogate2/strings.json @@ -10,7 +10,7 @@ "step": { "user": { "title": "Setup GogoGate2", - "description": "Provide requisite information below.", + "description": "Provide requisite information below. Note: only the 'admin' user is known to work.", "data": { "ip_address": "IP Address", "username": "[%key:common::config_flow::data::username%]", diff --git a/homeassistant/components/gogogate2/translations/ca.json b/homeassistant/components/gogogate2/translations/ca.json index 43525e1870d..31c130e00c7 100644 --- a/homeassistant/components/gogogate2/translations/ca.json +++ b/homeassistant/components/gogogate2/translations/ca.json @@ -14,7 +14,7 @@ "password": "Contrasenya", "username": "Nom d'usuari" }, - "description": "Proporciona, a continuaci\u00f3, la informaci\u00f3 necess\u00e0ria.", + "description": "Proporciona, a continuaci\u00f3, la informaci\u00f3 necess\u00e0ria. Nota: sembla que nom\u00e9s funciona l'usuari administrador.", "title": "Configuraci\u00f3 de GogoGate2" } } diff --git a/homeassistant/components/gogogate2/translations/en.json b/homeassistant/components/gogogate2/translations/en.json index d5a93091d91..e587cf6c001 100644 --- a/homeassistant/components/gogogate2/translations/en.json +++ b/homeassistant/components/gogogate2/translations/en.json @@ -14,7 +14,7 @@ "password": "Password", "username": "Username" }, - "description": "Provide requisite information below.", + "description": "Provide requisite information below. Note: only the 'admin' user is known to work.", "title": "Setup GogoGate2" } } diff --git a/homeassistant/components/gogogate2/translations/ru.json b/homeassistant/components/gogogate2/translations/ru.json index 9f428658820..3f78e4aacdf 100644 --- a/homeassistant/components/gogogate2/translations/ru.json +++ b/homeassistant/components/gogogate2/translations/ru.json @@ -14,7 +14,7 @@ "password": "\u041f\u0430\u0440\u043e\u043b\u044c", "username": "\u041b\u043e\u0433\u0438\u043d" }, - "description": "\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u0442\u0435 Home Assistant \u0434\u043b\u044f \u0438\u043d\u0442\u0435\u0433\u0440\u0430\u0446\u0438\u0438 \u0441 GogoGate2.", + "description": "\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u0442\u0435 Home Assistant \u0434\u043b\u044f \u0438\u043d\u0442\u0435\u0433\u0440\u0430\u0446\u0438\u0438 \u0441 GogoGate2. \u041f\u0440\u0438\u043c\u0435\u0447\u0430\u043d\u0438\u0435: \u0438\u043d\u0442\u0435\u0433\u0440\u0430\u0446\u0438\u044f \u0431\u0443\u0434\u0435\u0442 \u0440\u0430\u0431\u043e\u0442\u0430\u0442\u044c \u0442\u043e\u043b\u044c\u043a\u043e \u0441 \u043f\u043e\u043b\u044c\u0437\u043e\u0432\u0430\u0442\u0435\u043b\u0435\u043c 'admin'.", "title": "GogoGate2" } } diff --git a/homeassistant/components/gogogate2/translations/zh-Hant.json b/homeassistant/components/gogogate2/translations/zh-Hant.json index 7ba01116084..35ae424327c 100644 --- a/homeassistant/components/gogogate2/translations/zh-Hant.json +++ b/homeassistant/components/gogogate2/translations/zh-Hant.json @@ -14,7 +14,7 @@ "password": "\u5bc6\u78bc", "username": "\u4f7f\u7528\u8005\u540d\u7a31" }, - "description": "\u8acb\u65bc\u4e0b\u65b9\u63d0\u4f9b\u6240\u9700\u8cc7\u8a0a\u3002", + "description": "\u8acb\u65bc\u4e0b\u65b9\u63d0\u4f9b\u6240\u9700\u8cc7\u8a0a\u3002\u6ce8\u610f\uff1a\u50c5\u6709 'admin' \u4f7f\u7528\u8005\u53ef\u4ee5\u6b63\u5e38\u4f7f\u7528\u3002", "title": "\u8a2d\u5b9a GogoGate2" } } diff --git a/homeassistant/components/google_assistant/trait.py b/homeassistant/components/google_assistant/trait.py index 41f980fbbdf..3ed31f35e48 100644 --- a/homeassistant/components/google_assistant/trait.py +++ b/homeassistant/components/google_assistant/trait.py @@ -988,6 +988,14 @@ class ArmDisArmTrait(_Trait): STATE_ALARM_TRIGGERED: SERVICE_ALARM_TRIGGER, } + state_to_support = { + STATE_ALARM_ARMED_HOME: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME, + STATE_ALARM_ARMED_AWAY: alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, + STATE_ALARM_ARMED_NIGHT: alarm_control_panel.const.SUPPORT_ALARM_ARM_NIGHT, + STATE_ALARM_ARMED_CUSTOM_BYPASS: alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, + STATE_ALARM_TRIGGERED: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER, + } + @staticmethod def supported(domain, features, device_class): """Test if state is supported.""" @@ -998,11 +1006,20 @@ class ArmDisArmTrait(_Trait): """Return if the trait might ask for 2FA.""" return True + def _supported_states(self): + """Return supported states.""" + features = self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0) + return [ + state + for state, required_feature in self.state_to_support.items() + if features & required_feature != 0 + ] + def sync_attributes(self): """Return ArmDisarm attributes for a sync request.""" response = {} levels = [] - for state in self.state_to_service: + for state in self._supported_states(): # level synonyms are generated from state names # 'armed_away' becomes 'armed away' or 'away' level_synonym = [state.replace("_", " ")] @@ -1014,6 +1031,7 @@ class ArmDisArmTrait(_Trait): "level_values": [{"level_synonym": level_synonym, "lang": "en"}], } levels.append(level) + response["availableArmLevels"] = {"levels": levels, "ordered": False} return response @@ -1031,11 +1049,26 @@ class ArmDisArmTrait(_Trait): async def execute(self, command, data, params, challenge): """Execute an ArmDisarm command.""" if params["arm"] and not params.get("cancel"): - if self.state.state == params["armLevel"]: + arm_level = params.get("armLevel") + + # If no arm level given, we can only arm it if there is + # only one supported arm type. We never default to triggered. + if not arm_level: + states = self._supported_states() + + if STATE_ALARM_TRIGGERED in states: + states.remove(STATE_ALARM_TRIGGERED) + + if len(states) != 1: + raise SmartHomeError(ERR_NOT_SUPPORTED, "ArmLevel missing") + + arm_level = states[0] + + if self.state.state == arm_level: raise SmartHomeError(ERR_ALREADY_ARMED, "System is already armed") if self.state.attributes["code_arm_required"]: _verify_pin_challenge(data, self.state, challenge) - service = self.state_to_service[params["armLevel"]] + service = self.state_to_service[arm_level] # disarm the system without asking for code when # 'cancel' arming action is received while current status is pending elif ( diff --git a/homeassistant/components/google_cloud/tts.py b/homeassistant/components/google_cloud/tts.py index 96bd9e93919..1658fcec1f5 100644 --- a/homeassistant/components/google_cloud/tts.py +++ b/homeassistant/components/google_cloud/tts.py @@ -25,6 +25,7 @@ SUPPORTED_LANGUAGES = [ "ar-XA", "bn-IN", "cmn-CN", + "cmn-TW", "cs-CZ", "da-DK", "de-DE", diff --git a/homeassistant/components/griddy/translations/da.json b/homeassistant/components/griddy/translations/da.json deleted file mode 100644 index 639633dc14c..00000000000 --- a/homeassistant/components/griddy/translations/da.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Griddy" -} \ No newline at end of file diff --git a/homeassistant/components/group/cover.py b/homeassistant/components/group/cover.py index 0832d466f8c..427530dadb5 100644 --- a/homeassistant/components/group/cover.py +++ b/homeassistant/components/group/cover.py @@ -200,25 +200,30 @@ class CoverGroup(CoverEntity): """Return current tilt position for all covers.""" return self._tilt_position + @property + def device_state_attributes(self): + """Return the state attributes for the cover group.""" + return {ATTR_ENTITY_ID: self._entities} + async def async_open_cover(self, **kwargs): """Move the covers up.""" data = {ATTR_ENTITY_ID: self._covers[KEY_OPEN_CLOSE]} await self.hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER, data, blocking=True + DOMAIN, SERVICE_OPEN_COVER, data, blocking=True, context=self._context ) async def async_close_cover(self, **kwargs): """Move the covers down.""" data = {ATTR_ENTITY_ID: self._covers[KEY_OPEN_CLOSE]} await self.hass.services.async_call( - DOMAIN, SERVICE_CLOSE_COVER, data, blocking=True + DOMAIN, SERVICE_CLOSE_COVER, data, blocking=True, context=self._context ) async def async_stop_cover(self, **kwargs): """Fire the stop action.""" data = {ATTR_ENTITY_ID: self._covers[KEY_STOP]} await self.hass.services.async_call( - DOMAIN, SERVICE_STOP_COVER, data, blocking=True + DOMAIN, SERVICE_STOP_COVER, data, blocking=True, context=self._context ) async def async_set_cover_position(self, **kwargs): @@ -228,28 +233,32 @@ class CoverGroup(CoverEntity): ATTR_POSITION: kwargs[ATTR_POSITION], } await self.hass.services.async_call( - DOMAIN, SERVICE_SET_COVER_POSITION, data, blocking=True + DOMAIN, + SERVICE_SET_COVER_POSITION, + data, + blocking=True, + context=self._context, ) async def async_open_cover_tilt(self, **kwargs): """Tilt covers open.""" data = {ATTR_ENTITY_ID: self._tilts[KEY_OPEN_CLOSE]} await self.hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER_TILT, data, blocking=True + DOMAIN, SERVICE_OPEN_COVER_TILT, data, blocking=True, context=self._context ) async def async_close_cover_tilt(self, **kwargs): """Tilt covers closed.""" data = {ATTR_ENTITY_ID: self._tilts[KEY_OPEN_CLOSE]} await self.hass.services.async_call( - DOMAIN, SERVICE_CLOSE_COVER_TILT, data, blocking=True + DOMAIN, SERVICE_CLOSE_COVER_TILT, data, blocking=True, context=self._context ) async def async_stop_cover_tilt(self, **kwargs): """Stop cover tilt.""" data = {ATTR_ENTITY_ID: self._tilts[KEY_STOP]} await self.hass.services.async_call( - DOMAIN, SERVICE_STOP_COVER_TILT, data, blocking=True + DOMAIN, SERVICE_STOP_COVER_TILT, data, blocking=True, context=self._context ) async def async_set_cover_tilt_position(self, **kwargs): @@ -259,7 +268,11 @@ class CoverGroup(CoverEntity): ATTR_TILT_POSITION: kwargs[ATTR_TILT_POSITION], } await self.hass.services.async_call( - DOMAIN, SERVICE_SET_COVER_TILT_POSITION, data, blocking=True + DOMAIN, + SERVICE_SET_COVER_TILT_POSITION, + data, + blocking=True, + context=self._context, ) async def async_update(self): diff --git a/homeassistant/components/group/light.py b/homeassistant/components/group/light.py index 56408f410b8..69329b96122 100644 --- a/homeassistant/components/group/light.py +++ b/homeassistant/components/group/light.py @@ -183,6 +183,11 @@ class LightGroup(light.LightEntity): """No polling needed for a light group.""" return False + @property + def device_state_attributes(self): + """Return the state attributes for the light group.""" + return {ATTR_ENTITY_ID: self._entity_ids} + async def async_turn_on(self, **kwargs): """Forward the turn_on command to all lights in the light group.""" data = {ATTR_ENTITY_ID: self._entity_ids} @@ -228,7 +233,11 @@ class LightGroup(light.LightEntity): if not emulate_color_temp_entity_ids: await self.hass.services.async_call( - light.DOMAIN, light.SERVICE_TURN_ON, data, blocking=True + light.DOMAIN, + light.SERVICE_TURN_ON, + data, + blocking=True, + context=self._context, ) return @@ -244,13 +253,18 @@ class LightGroup(light.LightEntity): await asyncio.gather( self.hass.services.async_call( - light.DOMAIN, light.SERVICE_TURN_ON, data, blocking=True + light.DOMAIN, + light.SERVICE_TURN_ON, + data, + blocking=True, + context=self._context, ), self.hass.services.async_call( light.DOMAIN, light.SERVICE_TURN_ON, emulate_color_temp_data, blocking=True, + context=self._context, ), ) @@ -262,7 +276,11 @@ class LightGroup(light.LightEntity): data[ATTR_TRANSITION] = kwargs[ATTR_TRANSITION] await self.hass.services.async_call( - light.DOMAIN, light.SERVICE_TURN_OFF, data, blocking=True + light.DOMAIN, + light.SERVICE_TURN_OFF, + data, + blocking=True, + context=self._context, ) async def async_update(self): diff --git a/homeassistant/components/growatt_server/manifest.json b/homeassistant/components/growatt_server/manifest.json index 7d8a8a3852f..775ada9076d 100644 --- a/homeassistant/components/growatt_server/manifest.json +++ b/homeassistant/components/growatt_server/manifest.json @@ -2,6 +2,6 @@ "domain": "growatt_server", "name": "Growatt", "documentation": "https://www.home-assistant.io/integrations/growatt_server/", - "requirements": ["growattServer==0.0.1"], + "requirements": ["growattServer==0.0.4"], "codeowners": ["@indykoning"] } diff --git a/homeassistant/components/growatt_server/sensor.py b/homeassistant/components/growatt_server/sensor.py index c228bcbe4ab..95d6ffd0ff7 100644 --- a/homeassistant/components/growatt_server/sensor.py +++ b/homeassistant/components/growatt_server/sensor.py @@ -29,81 +29,253 @@ DEFAULT_PLANT_ID = "0" DEFAULT_NAME = "Growatt" SCAN_INTERVAL = datetime.timedelta(minutes=5) +# Sensor type order is: Sensor name, Unit of measurement, api data name, additional options + TOTAL_SENSOR_TYPES = { - "total_money_today": ("Total money today", "€", "plantMoneyText", None), - "total_money_total": ("Money lifetime", "€", "totalMoneyText", None), - "total_energy_today": ( - "Energy Today", - ENERGY_KILO_WATT_HOUR, - "todayEnergy", - "power", + "total_money_today": ("Total money today", "€", "plantMoneyText", {}), + "total_money_total": ("Money lifetime", "€", "totalMoneyText", {}), + "total_energy_today": ("Energy Today", ENERGY_KILO_WATT_HOUR, "todayEnergy", {},), + "total_output_power": ( + "Output Power", + POWER_WATT, + "invTodayPpv", + {"device_class": "power"}, ), - "total_output_power": ("Output Power", POWER_WATT, "invTodayPpv", "power"), "total_energy_output": ( "Lifetime energy output", ENERGY_KILO_WATT_HOUR, "totalEnergy", - "power", + {}, + ), + "total_maximum_output": ( + "Maximum power", + POWER_WATT, + "nominalPower", + {"device_class": "power"}, ), - "total_maximum_output": ("Maximum power", POWER_WATT, "nominalPower", "power"), } INVERTER_SENSOR_TYPES = { - "inverter_energy_today": ( - "Energy today", - ENERGY_KILO_WATT_HOUR, - "e_today", - "power", - ), + "inverter_energy_today": ("Energy today", ENERGY_KILO_WATT_HOUR, "e_today", {},), "inverter_energy_total": ( "Lifetime energy output", ENERGY_KILO_WATT_HOUR, "e_total", - "power", + {}, ), - "inverter_voltage_input_1": ("Input 1 voltage", VOLT, "vpv1", None), + "inverter_voltage_input_1": ("Input 1 voltage", VOLT, "vpv1", {}), "inverter_amperage_input_1": ( "Input 1 Amperage", ELECTRICAL_CURRENT_AMPERE, "ipv1", - None, + {}, ), - "inverter_wattage_input_1": ("Input 1 Wattage", POWER_WATT, "ppv1", "power"), - "inverter_voltage_input_2": ("Input 2 voltage", VOLT, "vpv2", None), + "inverter_wattage_input_1": ( + "Input 1 Wattage", + POWER_WATT, + "ppv1", + {"device_class": "power"}, + ), + "inverter_voltage_input_2": ("Input 2 voltage", VOLT, "vpv2", {}), "inverter_amperage_input_2": ( "Input 2 Amperage", ELECTRICAL_CURRENT_AMPERE, "ipv2", - None, + {}, ), - "inverter_wattage_input_2": ("Input 2 Wattage", POWER_WATT, "ppv2", "power"), - "inverter_voltage_input_3": ("Input 3 voltage", VOLT, "vpv3", None), + "inverter_wattage_input_2": ( + "Input 2 Wattage", + POWER_WATT, + "ppv2", + {"device_class": "power"}, + ), + "inverter_voltage_input_3": ("Input 3 voltage", VOLT, "vpv3", {}), "inverter_amperage_input_3": ( "Input 3 Amperage", ELECTRICAL_CURRENT_AMPERE, "ipv3", - None, + {}, ), - "inverter_wattage_input_3": ("Input 3 Wattage", POWER_WATT, "ppv3", "power"), - "inverter_internal_wattage": ("Internal wattage", POWER_WATT, "ppv", "power"), - "inverter_reactive_voltage": ("Reactive voltage", VOLT, "vacr", None), + "inverter_wattage_input_3": ( + "Input 3 Wattage", + POWER_WATT, + "ppv3", + {"device_class": "power"}, + ), + "inverter_internal_wattage": ( + "Internal wattage", + POWER_WATT, + "ppv", + {"device_class": "power"}, + ), + "inverter_reactive_voltage": ("Reactive voltage", VOLT, "vacr", {}), "inverter_inverter_reactive_amperage": ( "Reactive amperage", ELECTRICAL_CURRENT_AMPERE, "iacr", - None, + {}, + ), + "inverter_frequency": ("AC frequency", FREQUENCY_HERTZ, "fac", {}), + "inverter_current_wattage": ( + "Output power", + POWER_WATT, + "pac", + {"device_class": "power"}, ), - "inverter_frequency": ("AC frequency", FREQUENCY_HERTZ, "fac", None), - "inverter_current_wattage": ("Output power", POWER_WATT, "pac", "power"), "inverter_current_reactive_wattage": ( "Reactive wattage", POWER_WATT, "pacr", - "power", + {"device_class": "power"}, ), } -SENSOR_TYPES = {**TOTAL_SENSOR_TYPES, **INVERTER_SENSOR_TYPES} +STORAGE_SENSOR_TYPES = { + "storage_storage_production_today": ( + "Storage production today", + ENERGY_KILO_WATT_HOUR, + "eBatDisChargeToday", + {}, + ), + "storage_storage_production_lifetime": ( + "Lifetime Storage production", + ENERGY_KILO_WATT_HOUR, + "eBatDisChargeTotal", + {}, + ), + "storage_grid_discharge_today": ( + "Grid discharged today", + ENERGY_KILO_WATT_HOUR, + "eacDisChargeToday", + {}, + ), + "storage_load_consumption_today": ( + "Load consumption today", + ENERGY_KILO_WATT_HOUR, + "eopDischrToday", + {}, + ), + "storage_load_consumption_lifetime": ( + "Lifetime load consumption", + ENERGY_KILO_WATT_HOUR, + "eopDischrTotal", + {}, + ), + "storage_grid_charged_today": ( + "Grid charged today", + ENERGY_KILO_WATT_HOUR, + "eacChargeToday", + {}, + ), + "storage_charge_storage_lifetime": ( + "Lifetime storaged charged", + ENERGY_KILO_WATT_HOUR, + "eChargeTotal", + {}, + ), + "storage_solar_production": ( + "Solar power production", + POWER_WATT, + "ppv", + {"device_class": "power"}, + ), + "storage_battery_percentage": ( + "Battery percentage", + "%", + "capacity", + {"device_class": "battery"}, + ), + "storage_power_flow": ( + "Storage charging/ discharging(-ve)", + POWER_WATT, + "pCharge", + {"device_class": "power"}, + ), + "storage_load_consumption_solar_storage": ( + "Load consumption(Solar + Storage)", + "VA", + "rateVA", + {}, + ), + "storage_charge_today": ( + "Charge today", + ENERGY_KILO_WATT_HOUR, + "eChargeToday", + {}, + ), + "storage_import_from_grid": ( + "Import from grid", + POWER_WATT, + "pAcInPut", + {"device_class": "power"}, + ), + "storage_import_from_grid_today": ( + "Import from grid today", + ENERGY_KILO_WATT_HOUR, + "eToUserToday", + {}, + ), + "storage_import_from_grid_total": ( + "Import from grid total", + ENERGY_KILO_WATT_HOUR, + "eToUserTotal", + {}, + ), + "storage_load_consumption": ( + "Load consumption", + POWER_WATT, + "outPutPower", + {"device_class": "power"}, + ), + "storage_grid_voltage": ("AC input voltage", VOLT, "vGrid", {"round": 2}), + "storage_pv_charging_voltage": ("PV charging voltage", VOLT, "vpv", {"round": 2}), + "storage_ac_input_frequency_out": ( + "AC input frequency", + FREQUENCY_HERTZ, + "freqOutPut", + {"round": 2}, + ), + "storage_output_voltage": ("Output voltage", VOLT, "outPutVolt", {"round": 2}), + "storage_ac_output_frequency": ( + "Ac output frequency", + FREQUENCY_HERTZ, + "freqGrid", + {"round": 2}, + ), + "storage_current_PV": ( + "Solar charge current", + ELECTRICAL_CURRENT_AMPERE, + "iAcCharge", + {"round": 2}, + ), + "storage_current_1": ( + "Solar current to storage", + ELECTRICAL_CURRENT_AMPERE, + "iChargePV1", + {"round": 2}, + ), + "storage_grid_amperage_input": ( + "Grid charge current", + ELECTRICAL_CURRENT_AMPERE, + "chgCurr", + {"round": 2}, + ), + "storage_grid_out_current": ( + "Grid out current", + ELECTRICAL_CURRENT_AMPERE, + "outPutCurrent", + {"round": 2}, + ), + "storage_battery_voltage": ("Battery voltage", VOLT, "vBat", {"round": 2}), + "storage_load_percentage": ( + "Load percentage", + "%", + "loadPercent", + {"device_class": "battery", "round": 2}, + ), +} + +SENSOR_TYPES = {**TOTAL_SENSOR_TYPES, **INVERTER_SENSOR_TYPES, **STORAGE_SENSOR_TYPES} PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { @@ -134,25 +306,34 @@ def setup_platform(hass, config, add_entities, discovery_info=None): plant_info = api.plant_list(user_id) plant_id = plant_info["data"][0]["plantId"] - # Get a list of inverters for specified plant to add sensors for. - inverters = api.inverter_list(plant_id) + # Get a list of devices for specified plant to add sensors for. + devices = api.device_list(plant_id) entities = [] - probe = GrowattData(api, username, password, plant_id, True) + probe = GrowattData(api, username, password, plant_id, "total") for sensor in TOTAL_SENSOR_TYPES: entities.append( GrowattInverter(probe, f"{name} Total", sensor, f"{plant_id}-{sensor}") ) - # Add sensors for each inverter in the specified plant. - for inverter in inverters: - probe = GrowattData(api, username, password, inverter["deviceSn"], False) - for sensor in INVERTER_SENSOR_TYPES: + # Add sensors for each device in the specified plant. + for device in devices: + probe = GrowattData( + api, username, password, device["deviceSn"], device["deviceType"] + ) + sensors = [] + if device["deviceType"] == "inverter": + sensors = INVERTER_SENSOR_TYPES + elif device["deviceType"] == "storage": + probe.plant_id = plant_id + sensors = STORAGE_SENSOR_TYPES + + for sensor in sensors: entities.append( GrowattInverter( probe, - f"{inverter['deviceAilas']}", + f"{device['deviceAilas']}", sensor, - f"{inverter['deviceSn']}-{sensor}", + f"{device['deviceSn']}-{sensor}", ) ) @@ -188,12 +369,16 @@ class GrowattInverter(Entity): @property def state(self): """Return the state of the sensor.""" - return self.probe.get_data(SENSOR_TYPES[self.sensor][2]) + result = self.probe.get_data(SENSOR_TYPES[self.sensor][2]) + round_to = SENSOR_TYPES[self.sensor][3].get("round") + if round_to is not None: + result = round(result, round_to) + return result @property def device_class(self): """Return the device class of the sensor.""" - return SENSOR_TYPES[self.sensor][3] + return SENSOR_TYPES[self.sensor][3].get("device_class") @property def unit_of_measurement(self): @@ -208,12 +393,13 @@ class GrowattInverter(Entity): class GrowattData: """The class for handling data retrieval.""" - def __init__(self, api, username, password, inverter_id, is_total=False): + def __init__(self, api, username, password, device_id, growatt_type): """Initialize the probe.""" - self.is_total = is_total + self.growatt_type = growatt_type self.api = api - self.inverter_id = inverter_id + self.device_id = device_id + self.plant_id = None self.data = {} self.username = username self.password = password @@ -222,19 +408,27 @@ class GrowattData: def update(self): """Update probe data.""" self.api.login(self.username, self.password) - _LOGGER.debug("Updating data for %s", self.inverter_id) + _LOGGER.debug("Updating data for %s", self.device_id) try: - if self.is_total: - total_info = self.api.plant_info(self.inverter_id) + if self.growatt_type == "total": + total_info = self.api.plant_info(self.device_id) del total_info["deviceList"] # PlantMoneyText comes in as "3.1/€" remove anything that isn't part of the number total_info["plantMoneyText"] = re.sub( r"[^\d.,]", "", total_info["plantMoneyText"] ) self.data = total_info - else: - inverter_info = self.api.inverter_detail(self.inverter_id) + elif self.growatt_type == "inverter": + inverter_info = self.api.inverter_detail(self.device_id) self.data = inverter_info["data"] + elif self.growatt_type == "storage": + storage_info_detail = self.api.storage_params(self.device_id)[ + "storageDetailBean" + ] + storage_energy_overview = self.api.storage_energy_overview( + self.plant_id, self.device_id + ) + self.data = {**storage_info_detail, **storage_energy_overview} except json.decoder.JSONDecodeError: _LOGGER.error("Unable to fetch data from Growatt server") diff --git a/homeassistant/components/guardian/config_flow.py b/homeassistant/components/guardian/config_flow.py index dae8fafb1e0..769344e3b01 100644 --- a/homeassistant/components/guardian/config_flow.py +++ b/homeassistant/components/guardian/config_flow.py @@ -80,7 +80,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): title=info[CONF_UID], data={CONF_UID: info["uid"], **user_input} ) - async def async_step_zeroconf(self, discovery_info=None): + async def async_step_zeroconf(self, discovery_info): """Handle the configuration via zeroconf.""" if discovery_info is None: return self.async_abort(reason="connection_error") diff --git a/homeassistant/components/harmony/__init__.py b/homeassistant/components/harmony/__init__.py index 540e39f8f44..f08d4dcd151 100644 --- a/homeassistant/components/harmony/__init__.py +++ b/homeassistant/components/harmony/__init__.py @@ -13,7 +13,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.dispatcher import async_dispatcher_send -from .const import DOMAIN, HARMONY_OPTIONS_UPDATE, PLATFORMS +from .const import ATTR_ACTIVITY_NOTIFY, DOMAIN, HARMONY_OPTIONS_UPDATE, PLATFORMS from .remote import HarmonyRemote _LOGGER = logging.getLogger(__name__) @@ -38,11 +38,18 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): name = entry.data[CONF_NAME] activity = entry.options.get(ATTR_ACTIVITY) delay_secs = entry.options.get(ATTR_DELAY_SECS, DEFAULT_DELAY_SECS) + activity_notify = entry.options.get(ATTR_ACTIVITY_NOTIFY, False) harmony_conf_file = hass.config.path(f"harmony_{entry.unique_id}.conf") try: device = HarmonyRemote( - name, entry.unique_id, address, activity, harmony_conf_file, delay_secs + name, + entry.unique_id, + address, + activity, + harmony_conf_file, + delay_secs, + activity_notify, ) connected_ok = await device.connect() except (asyncio.TimeoutError, ValueError, AttributeError): diff --git a/homeassistant/components/harmony/config_flow.py b/homeassistant/components/harmony/config_flow.py index 8d43b2d69ca..8487509407c 100644 --- a/homeassistant/components/harmony/config_flow.py +++ b/homeassistant/components/harmony/config_flow.py @@ -14,7 +14,7 @@ from homeassistant.components.remote import ( from homeassistant.const import CONF_HOST, CONF_NAME from homeassistant.core import callback -from .const import DOMAIN, UNIQUE_ID +from .const import ATTR_ACTIVITY_NOTIFY, DOMAIN, UNIQUE_ID from .util import ( find_best_name_for_remote, find_unique_id_for_remote, @@ -162,6 +162,8 @@ def _options_from_user_input(user_input): options[ATTR_ACTIVITY] = user_input[ATTR_ACTIVITY] if ATTR_DELAY_SECS in user_input: options[ATTR_DELAY_SECS] = user_input[ATTR_DELAY_SECS] + if ATTR_ACTIVITY_NOTIFY in user_input: + options[ATTR_ACTIVITY_NOTIFY] = user_input[ATTR_ACTIVITY_NOTIFY] return options @@ -190,6 +192,10 @@ class OptionsFlowHandler(config_entries.OptionsFlow): vol.Optional( ATTR_ACTIVITY, default=self.config_entry.options.get(ATTR_ACTIVITY), ): vol.In(remote.activity_names), + vol.Optional( + ATTR_ACTIVITY_NOTIFY, + default=self.config_entry.options.get(ATTR_ACTIVITY_NOTIFY, False), + ): vol.Coerce(bool), } ) return self.async_show_form(step_id="init", data_schema=data_schema) diff --git a/homeassistant/components/harmony/const.py b/homeassistant/components/harmony/const.py index 4cd5dce0af5..dcb4f74912f 100644 --- a/homeassistant/components/harmony/const.py +++ b/homeassistant/components/harmony/const.py @@ -6,3 +6,4 @@ PLATFORMS = ["remote"] UNIQUE_ID = "unique_id" ACTIVITY_POWER_OFF = "PowerOff" HARMONY_OPTIONS_UPDATE = "harmony_options_update" +ATTR_ACTIVITY_NOTIFY = "activity_notify" diff --git a/homeassistant/components/harmony/manifest.json b/homeassistant/components/harmony/manifest.json index 154fd211aa8..40f88ad19ef 100644 --- a/homeassistant/components/harmony/manifest.json +++ b/homeassistant/components/harmony/manifest.json @@ -2,7 +2,7 @@ "domain": "harmony", "name": "Logitech Harmony Hub", "documentation": "https://www.home-assistant.io/integrations/harmony", - "requirements": ["aioharmony==0.1.13"], + "requirements": ["aioharmony==0.2.5"], "codeowners": ["@ehendrix23", "@bramkragten", "@bdraco"], "ssdp": [ { diff --git a/homeassistant/components/harmony/remote.py b/homeassistant/components/harmony/remote.py index 25b68b42e72..d5d8eb5773f 100644 --- a/homeassistant/components/harmony/remote.py +++ b/homeassistant/components/harmony/remote.py @@ -3,12 +3,9 @@ import asyncio import json import logging +from aioharmony.const import ClientCallbackType import aioharmony.exceptions as aioexc -from aioharmony.harmonyapi import ( - ClientCallbackType, - HarmonyAPI as HarmonyClient, - SendCommandDevice, -) +from aioharmony.harmonyapi import HarmonyAPI as HarmonyClient, SendCommandDevice import voluptuous as vol from homeassistant.components import remote @@ -31,6 +28,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from .const import ( ACTIVITY_POWER_OFF, + ATTR_ACTIVITY_NOTIFY, DOMAIN, HARMONY_OPTIONS_UPDATE, SERVICE_CHANGE_CHANNEL, @@ -128,7 +126,9 @@ async def async_setup_entry( class HarmonyRemote(remote.RemoteEntity): """Remote representation used to control a Harmony device.""" - def __init__(self, name, unique_id, host, activity, out_path, delay_secs): + def __init__( + self, name, unique_id, host, activity, out_path, delay_secs, activity_notify + ): """Initialize HarmonyRemote class.""" self._name = name self.host = host @@ -140,6 +140,7 @@ class HarmonyRemote(remote.RemoteEntity): self.delay_secs = delay_secs self._available = False self._unique_id = unique_id + self._activity_notify = activity_notify @property def activity_names(self): @@ -162,16 +163,29 @@ class HarmonyRemote(remote.RemoteEntity): if ATTR_ACTIVITY in data: self.default_activity = data[ATTR_ACTIVITY] + if ATTR_ACTIVITY_NOTIFY in data: + self._activity_notify = data[ATTR_ACTIVITY_NOTIFY] + self._update_callbacks() + + def _update_callbacks(self): + callbacks = { + "config_updated": self.new_config, + "connect": self.got_connected, + "disconnect": self.got_disconnected, + "new_activity_starting": None, + "new_activity": None, + } + if self._activity_notify: + callbacks["new_activity_starting"] = self.new_activity + else: + callbacks["new_activity"] = self.new_activity + self._client.callbacks = ClientCallbackType(**callbacks) + async def async_added_to_hass(self): """Complete the initialization.""" _LOGGER.debug("%s: Harmony Hub added", self._name) # Register the callbacks - self._client.callbacks = ClientCallbackType( - new_activity=self.new_activity, - config_updated=self.new_config, - connect=self.got_connected, - disconnect=self.got_disconnected, - ) + self._update_callbacks() self.async_on_remove( async_dispatcher_connect( diff --git a/homeassistant/components/harmony/strings.json b/homeassistant/components/harmony/strings.json index 86de34672be..053d5cea8bd 100644 --- a/homeassistant/components/harmony/strings.json +++ b/homeassistant/components/harmony/strings.json @@ -28,7 +28,8 @@ "description": "Adjust Harmony Hub Options", "data": { "activity": "The default activity to execute when none is specified.", - "delay_secs": "The delay between sending commands." + "delay_secs": "The delay between sending commands.", + "activity_notify": "Update current activity on start of activity switch." } } } diff --git a/homeassistant/components/harmony/translations/ca.json b/homeassistant/components/harmony/translations/ca.json index 5bb279c0482..0406160a7a6 100644 --- a/homeassistant/components/harmony/translations/ca.json +++ b/homeassistant/components/harmony/translations/ca.json @@ -27,6 +27,7 @@ "init": { "data": { "activity": "Activitat predeterminada a executar quan no se n'especifica cap.", + "activity_notify": "Actualitza l'activitat actual al canviar l'activitat.", "delay_secs": "Retard entre l'enviament d'ordres." }, "description": "Ajusta les opcions de Harmony Hub" diff --git a/homeassistant/components/harmony/translations/en.json b/homeassistant/components/harmony/translations/en.json index ce13e79e279..d180ff4ba7d 100644 --- a/homeassistant/components/harmony/translations/en.json +++ b/homeassistant/components/harmony/translations/en.json @@ -27,6 +27,7 @@ "init": { "data": { "activity": "The default activity to execute when none is specified.", + "activity_notify": "Update current activity on start of activity switch.", "delay_secs": "The delay between sending commands." }, "description": "Adjust Harmony Hub Options" diff --git a/homeassistant/components/harmony/translations/es.json b/homeassistant/components/harmony/translations/es.json index a5d96ec7ef3..97656e5441d 100644 --- a/homeassistant/components/harmony/translations/es.json +++ b/homeassistant/components/harmony/translations/es.json @@ -27,6 +27,7 @@ "init": { "data": { "activity": "La actividad por defecto a ejecutar cuando no se especifica ninguna.", + "activity_notify": "Actualice la actividad actual al inicio del cambio de actividad.", "delay_secs": "El retraso entre el env\u00edo de comandos." }, "description": "Ajustar las opciones de Harmony Hub" diff --git a/homeassistant/components/harmony/translations/it.json b/homeassistant/components/harmony/translations/it.json index c658e69e0c0..bd3bece0a4f 100644 --- a/homeassistant/components/harmony/translations/it.json +++ b/homeassistant/components/harmony/translations/it.json @@ -27,6 +27,7 @@ "init": { "data": { "activity": "L'attivit\u00e0 predefinita da eseguire quando nessuna \u00e8 specificata.", + "activity_notify": "Aggiorna l'attivit\u00e0 corrente all'avvio del cambio attivit\u00e0.", "delay_secs": "Il ritardo tra l'invio dei comandi." }, "description": "Regolare le opzioni di Harmony Hub" diff --git a/homeassistant/components/harmony/translations/no.json b/homeassistant/components/harmony/translations/no.json index 83e7a2f6cb7..14df560d104 100644 --- a/homeassistant/components/harmony/translations/no.json +++ b/homeassistant/components/harmony/translations/no.json @@ -15,7 +15,7 @@ }, "user": { "data": { - "host": "Vertsnavn eller IP-adresse", + "host": "Vert", "name": "Navn p\u00e5 hub" }, "title": "Sett opp Logitech Harmony Hub" @@ -27,6 +27,7 @@ "init": { "data": { "activity": "Standardaktiviteten som skal utf\u00f8res n\u00e5r ingen er angitt.", + "activity_notify": "Oppdater gjeldende aktivitet ved starten av aktivitetsbryteren.", "delay_secs": "Forsinkelsen mellom sending av kommandoer." }, "description": "Juster alternativene for harmonihub" diff --git a/homeassistant/components/harmony/translations/pl.json b/homeassistant/components/harmony/translations/pl.json index 12bbcfaca18..d8a3c22f3c3 100644 --- a/homeassistant/components/harmony/translations/pl.json +++ b/homeassistant/components/harmony/translations/pl.json @@ -27,6 +27,7 @@ "init": { "data": { "activity": "Domy\u015blna aktywno\u015b\u0107 do wykonania, gdy \u017cadnej nie okre\u015blono.", + "activity_notify": "Aktualizowanie bie\u017c\u0105cej aktywno\u015bci przy rozpoczynaniu prze\u0142\u0105czania aktywno\u015bci.", "delay_secs": "Op\u00f3\u017anienie mi\u0119dzy wysy\u0142aniem polece\u0144." }, "description": "Dostosuj opcje huba Harmony" diff --git a/homeassistant/components/harmony/translations/ru.json b/homeassistant/components/harmony/translations/ru.json index 4e995a26c48..b2bed0c71f2 100644 --- a/homeassistant/components/harmony/translations/ru.json +++ b/homeassistant/components/harmony/translations/ru.json @@ -27,6 +27,7 @@ "init": { "data": { "activity": "\u0410\u043a\u0442\u0438\u0432\u043d\u043e\u0441\u0442\u044c \u043f\u043e \u0443\u043c\u043e\u043b\u0447\u0430\u043d\u0438\u044e, \u043a\u043e\u0433\u0434\u0430 \u043d\u0438 \u043e\u0434\u043d\u0430 \u0438\u0437 \u043d\u0438\u0445 \u043d\u0435 \u0443\u043a\u0430\u0437\u0430\u043d\u0430.", + "activity_notify": "\u041e\u0431\u043d\u043e\u0432\u0438\u0442\u044c \u0442\u0435\u043a\u0443\u0449\u0443\u044e \u0430\u043a\u0442\u0438\u0432\u043d\u043e\u0441\u0442\u044c \u043f\u0440\u0438 \u0437\u0430\u043f\u0443\u0441\u043a\u0435 \u0430\u043a\u0442\u0438\u0432\u043d\u043e\u0441\u0442\u0438.", "delay_secs": "\u0417\u0430\u0434\u0435\u0440\u0436\u043a\u0430 \u043c\u0435\u0436\u0434\u0443 \u043e\u0442\u043f\u0440\u0430\u0432\u043a\u043e\u0439 \u043a\u043e\u043c\u0430\u043d\u0434." }, "description": "\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0430 \u043f\u0430\u0440\u0430\u043c\u0435\u0442\u0440\u043e\u0432 Harmony Hub" diff --git a/homeassistant/components/harmony/translations/zh-Hant.json b/homeassistant/components/harmony/translations/zh-Hant.json index dfd1249d629..d7ae177efee 100644 --- a/homeassistant/components/harmony/translations/zh-Hant.json +++ b/homeassistant/components/harmony/translations/zh-Hant.json @@ -27,6 +27,7 @@ "init": { "data": { "activity": "\u7576\u672a\u6307\u5b9a\u6642\u9810\u8a2d\u57f7\u884c\u6d3b\u52d5\u3002", + "activity_notify": "\u65bc\u958b\u59cb\u6d3b\u52d5\u958b\u95dc\u6642\u66f4\u65b0\u76ee\u524d\u6d3b\u52d5\u3002", "delay_secs": "\u50b3\u9001\u547d\u4ee4\u9593\u9694\u79d2\u6578\u3002" }, "description": "\u8abf\u6574 Harmony Hub \u9078\u9805" diff --git a/homeassistant/components/harmony/util.py b/homeassistant/components/harmony/util.py index 412aa2c6940..daee1845c2d 100644 --- a/homeassistant/components/harmony/util.py +++ b/homeassistant/components/harmony/util.py @@ -9,11 +9,10 @@ from .const import DOMAIN def find_unique_id_for_remote(harmony: HarmonyAPI): """Find the unique id for both websocket and xmpp clients.""" - websocket_unique_id = harmony.hub_config.info.get("activeRemoteId") - if websocket_unique_id is not None: - return str(websocket_unique_id) + if harmony.hub_id is not None: + return str(harmony.hub_id) - # fallback to the xmpp unique id if websocket is not available + # fallback timeStampHash if Hub ID is not available return harmony.config["global"]["timeStampHash"].split(";")[-1] diff --git a/homeassistant/components/heos/media_player.py b/homeassistant/components/heos/media_player.py index 7e827c96f55..15d3c4573db 100644 --- a/homeassistant/components/heos/media_player.py +++ b/homeassistant/components/heos/media_player.py @@ -161,7 +161,7 @@ class HeosMediaPlayer(MediaPlayerEntity): @log_command_error("play media") async def async_play_media(self, media_type, media_id, **kwargs): """Play a piece of media.""" - if media_type == MEDIA_TYPE_URL: + if media_type in (MEDIA_TYPE_URL, MEDIA_TYPE_MUSIC): await self._player.play_url(media_id) return diff --git a/homeassistant/components/heos/translations/cs.json b/homeassistant/components/heos/translations/cs.json deleted file mode 100644 index f77038cb8b6..00000000000 --- a/homeassistant/components/heos/translations/cs.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "HEOS" -} \ No newline at end of file diff --git a/homeassistant/components/heos/translations/nn.json b/homeassistant/components/heos/translations/nn.json deleted file mode 100644 index 8703148b3f8..00000000000 --- a/homeassistant/components/heos/translations/nn.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "config": { - "step": { - "user": { - "data": { - "access_token": "Vert" - } - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/hisense_aehw4a1/manifest.json b/homeassistant/components/hisense_aehw4a1/manifest.json index 02535142d1b..e702e285277 100644 --- a/homeassistant/components/hisense_aehw4a1/manifest.json +++ b/homeassistant/components/hisense_aehw4a1/manifest.json @@ -3,6 +3,6 @@ "name": "Hisense AEH-W4A1", "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/hisense_aehw4a1", - "requirements": ["pyaehw4a1==0.3.4"], + "requirements": ["pyaehw4a1==0.3.5"], "codeowners": ["@bannhead"] } diff --git a/homeassistant/components/history/__init__.py b/homeassistant/components/history/__init__.py index 06b26cce362..f943c126d3e 100644 --- a/homeassistant/components/history/__init__.py +++ b/homeassistant/components/history/__init__.py @@ -2,6 +2,7 @@ from collections import defaultdict from datetime import timedelta from itertools import groupby +import json import logging import time from typing import Optional, cast @@ -12,17 +13,20 @@ import voluptuous as vol from homeassistant.components import recorder from homeassistant.components.http import HomeAssistantView -from homeassistant.components.recorder.models import States, process_timestamp +from homeassistant.components.recorder.models import ( + States, + process_timestamp, + process_timestamp_to_utc_isoformat, +) from homeassistant.components.recorder.util import execute, session_scope from homeassistant.const import ( - ATTR_HIDDEN, CONF_DOMAINS, CONF_ENTITIES, CONF_EXCLUDE, CONF_INCLUDE, HTTP_BAD_REQUEST, ) -from homeassistant.core import split_entity_id +from homeassistant.core import Context, State, split_entity_id import homeassistant.helpers.config_validation as cv import homeassistant.util.dt as dt_util @@ -36,21 +40,55 @@ CONF_ORDER = "use_include_order" STATE_KEY = "state" LAST_CHANGED_KEY = "last_changed" -CONFIG_SCHEMA = vol.Schema( +# Not reusing from entityfilter because history does not support glob filtering +_FILTER_SCHEMA_INNER = vol.Schema( { - DOMAIN: recorder.FILTER_SCHEMA.extend( - {vol.Optional(CONF_ORDER, default=False): cv.boolean} - ) - }, - extra=vol.ALLOW_EXTRA, + vol.Optional(CONF_DOMAINS, default=[]): vol.All(cv.ensure_list, [cv.string]), + vol.Optional(CONF_ENTITIES, default=[]): cv.entity_ids, + } +) +_FILTER_SCHEMA = vol.Schema( + { + vol.Optional( + CONF_INCLUDE, default=_FILTER_SCHEMA_INNER({}) + ): _FILTER_SCHEMA_INNER, + vol.Optional( + CONF_EXCLUDE, default=_FILTER_SCHEMA_INNER({}) + ): _FILTER_SCHEMA_INNER, + vol.Optional(CONF_ORDER, default=False): cv.boolean, + } ) -SIGNIFICANT_DOMAINS = ("climate", "device_tracker", "thermostat", "water_heater") +CONFIG_SCHEMA = vol.Schema({DOMAIN: _FILTER_SCHEMA}, extra=vol.ALLOW_EXTRA) + +SIGNIFICANT_DOMAINS = ( + "climate", + "device_tracker", + "humidifier", + "thermostat", + "water_heater", +) IGNORE_DOMAINS = ("zone", "scene") -NEED_ATTRIBUTE_DOMAINS = {"climate", "water_heater", "thermostat", "script"} +NEED_ATTRIBUTE_DOMAINS = { + "climate", + "humidifier", + "script", + "thermostat", + "water_heater", +} SCRIPT_DOMAIN = "script" ATTR_CAN_CANCEL = "can_cancel" +QUERY_STATES = [ + States.domain, + States.entity_id, + States.state, + States.attributes, + States.last_changed, + States.last_updated, + States.created, +] + def get_significant_states(hass, *args, **kwargs): """Wrap _get_significant_states with a sql session.""" @@ -79,7 +117,7 @@ def _get_significant_states( timer_start = time.perf_counter() if significant_changes_only: - query = session.query(States).filter( + query = session.query(*QUERY_STATES).filter( ( States.domain.in_(SIGNIFICANT_DOMAINS) | (States.last_changed == States.last_updated) @@ -87,7 +125,7 @@ def _get_significant_states( & (States.last_updated > start_time) ) else: - query = session.query(States).filter(States.last_updated > start_time) + query = session.query(*QUERY_STATES).filter(States.last_updated > start_time) if filters: query = filters.apply(query, entity_ids) @@ -97,7 +135,7 @@ def _get_significant_states( query = query.order_by(States.entity_id, States.last_updated) - states = execute(query, to_native=False) + states = execute(query) if _LOGGER.isEnabledFor(logging.DEBUG): elapsed = time.perf_counter() - timer_start @@ -117,9 +155,8 @@ def _get_significant_states( def state_changes_during_period(hass, start_time, end_time=None, entity_id=None): """Return states changes during UTC period start_time - end_time.""" - with session_scope(hass=hass) as session: - query = session.query(States).filter( + query = session.query(*QUERY_STATES).filter( (States.last_changed == States.last_updated) & (States.last_updated > start_time) ) @@ -132,20 +169,19 @@ def state_changes_during_period(hass, start_time, end_time=None, entity_id=None) entity_ids = [entity_id] if entity_id is not None else None - states = execute( - query.order_by(States.entity_id, States.last_updated), to_native=False - ) + states = execute(query.order_by(States.entity_id, States.last_updated)) return _sorted_states_to_json(hass, session, states, start_time, entity_ids) def get_last_state_changes(hass, number_of_states, entity_id): """Return the last number_of_states.""" - start_time = dt_util.utcnow() with session_scope(hass=hass) as session: - query = session.query(States).filter(States.last_changed == States.last_updated) + query = session.query(*QUERY_STATES).filter( + States.last_changed == States.last_updated + ) if entity_id is not None: query = query.filter_by(entity_id=entity_id.lower()) @@ -155,8 +191,7 @@ def get_last_state_changes(hass, number_of_states, entity_id): states = execute( query.order_by(States.entity_id, States.last_updated.desc()).limit( number_of_states - ), - to_native=False, + ) ) return _sorted_states_to_json( @@ -171,7 +206,6 @@ def get_last_state_changes(hass, number_of_states, entity_id): def get_states(hass, utc_point_in_time, entity_ids=None, run=None, filters=None): """Return the states at a specific point in time.""" - if run is None: run = recorder.run_information_from_instance(hass, utc_point_in_time) @@ -189,6 +223,21 @@ def _get_states_with_session( session, utc_point_in_time, entity_ids=None, run=None, filters=None ): """Return the states at a specific point in time.""" + query = session.query(*QUERY_STATES) + + if entity_ids and len(entity_ids) == 1: + # Use an entirely different (and extremely fast) query if we only + # have a single entity id + query = ( + query.filter( + States.last_updated < utc_point_in_time, + States.entity_id.in_(entity_ids), + ) + .order_by(States.last_updated.desc()) + .limit(1) + ) + return [LazyState(row) for row in execute(query)] + if run is None: run = recorder.run_information_with_session(session, utc_point_in_time) @@ -196,70 +245,46 @@ def _get_states_with_session( if run is None: return [] - query = session.query(States) + # We have more than one entity to look at (most commonly we want + # all entities,) so we need to do a search on all states since the + # last recorder run started. - if entity_ids and len(entity_ids) == 1: - # Use an entirely different (and extremely fast) query if we only - # have a single entity id - query = ( - query.filter( - States.last_updated >= run.start, - States.last_updated < utc_point_in_time, - States.entity_id.in_(entity_ids), - ) - .order_by(States.last_updated.desc()) - .limit(1) - ) + most_recent_states_by_date = session.query( + States.entity_id.label("max_entity_id"), + func.max(States.last_updated).label("max_last_updated"), + ).filter( + (States.last_updated >= run.start) & (States.last_updated < utc_point_in_time) + ) - else: - # We have more than one entity to look at (most commonly we want - # all entities,) so we need to do a search on all states since the - # last recorder run started. + if entity_ids: + most_recent_states_by_date.filter(States.entity_id.in_(entity_ids)) - most_recent_states_by_date = session.query( - States.entity_id.label("max_entity_id"), - func.max(States.last_updated).label("max_last_updated"), - ).filter( - (States.last_updated >= run.start) - & (States.last_updated < utc_point_in_time) - ) + most_recent_states_by_date = most_recent_states_by_date.group_by(States.entity_id) - if entity_ids: - most_recent_states_by_date.filter(States.entity_id.in_(entity_ids)) + most_recent_states_by_date = most_recent_states_by_date.subquery() - most_recent_states_by_date = most_recent_states_by_date.group_by( - States.entity_id - ) + most_recent_state_ids = session.query( + func.max(States.state_id).label("max_state_id") + ).join( + most_recent_states_by_date, + and_( + States.entity_id == most_recent_states_by_date.c.max_entity_id, + States.last_updated == most_recent_states_by_date.c.max_last_updated, + ), + ) - most_recent_states_by_date = most_recent_states_by_date.subquery() + most_recent_state_ids = most_recent_state_ids.group_by(States.entity_id) - most_recent_state_ids = session.query( - func.max(States.state_id).label("max_state_id") - ).join( - most_recent_states_by_date, - and_( - States.entity_id == most_recent_states_by_date.c.max_entity_id, - States.last_updated == most_recent_states_by_date.c.max_last_updated, - ), - ) + most_recent_state_ids = most_recent_state_ids.subquery() - most_recent_state_ids = most_recent_state_ids.group_by(States.entity_id) + query = query.join( + most_recent_state_ids, States.state_id == most_recent_state_ids.c.max_state_id, + ).filter(~States.domain.in_(IGNORE_DOMAINS)) - most_recent_state_ids = most_recent_state_ids.subquery() + if filters: + query = filters.apply(query, entity_ids) - query = query.join( - most_recent_state_ids, - States.state_id == most_recent_state_ids.c.max_state_id, - ).filter(~States.domain.in_(IGNORE_DOMAINS)) - - if filters: - query = filters.apply(query, entity_ids) - - return [ - state - for state in execute(query) - if not state.attributes.get(ATTR_HIDDEN, False) - ] + return [LazyState(row) for row in execute(query)] def _sorted_states_to_json( @@ -306,7 +331,7 @@ def _sorted_states_to_json( # Called in a tight loop so cache the function # here - _process_timestamp = process_timestamp + _process_timestamp_to_utc_isoformat = process_timestamp_to_utc_isoformat # Append all changes to it for ent_id, group in groupby(states, lambda state: state.entity_id): @@ -316,12 +341,11 @@ def _sorted_states_to_json( ent_results.extend( [ native_state - for native_state in (db_state.to_native() for db_state in group) + for native_state in (LazyState(db_state) for db_state in group) if ( domain != SCRIPT_DOMAIN or native_state.attributes.get(ATTR_CAN_CANCEL) ) - and not native_state.attributes.get(ATTR_HIDDEN, False) ] ) continue @@ -331,18 +355,12 @@ def _sorted_states_to_json( # in-between only provide the "state" and the # "last_changed". if not ent_results: - ent_results.append(next(group).to_native()) + ent_results.append(LazyState(next(group))) - initial_state = ent_results[-1] prev_state = ent_results[-1] initial_state_count = len(ent_results) for db_state in group: - if ATTR_HIDDEN in db_state.attributes and db_state.to_native().attributes.get( - ATTR_HIDDEN, False - ): - continue - # With minimal response we do not care about attribute # changes so we can filter out duplicate states if db_state.state == prev_state.state: @@ -351,22 +369,18 @@ def _sorted_states_to_json( ent_results.append( { STATE_KEY: db_state.state, - LAST_CHANGED_KEY: _process_timestamp( + LAST_CHANGED_KEY: _process_timestamp_to_utc_isoformat( db_state.last_changed - ).isoformat(), + ), } ) prev_state = db_state - if ( - prev_state - and prev_state != initial_state - and len(ent_results) != initial_state_count - ): + if prev_state and len(ent_results) != initial_state_count: # There was at least one state change # replace the last minimal state with # a full state - ent_results[-1] = prev_state.to_native() + ent_results[-1] = LazyState(prev_state) # Filter out the empty lists if some states had 0 results. return {key: val for key, val in result.items() if val} @@ -416,7 +430,7 @@ class HistoryPeriodView(HomeAssistantView): self, request: web.Request, datetime: Optional[str] = None ) -> web.Response: """Return history over a period of time.""" - + datetime_ = None if datetime: datetime_ = dt_util.parse_datetime(datetime) @@ -537,7 +551,6 @@ class Filters: * if include and exclude is defined - select the entities specified in the include and filter out the ones from the exclude list. """ - # specific entities requested - do not in/exclude anything if entity_ids is not None: return query.filter(States.entity_id.in_(entity_ids)) @@ -578,3 +591,89 @@ class Filters: if self.excluded_entities: query = query.filter(~States.entity_id.in_(self.excluded_entities)) return query + + +class LazyState(State): + """A lazy version of core State.""" + + __slots__ = [ + "_row", + "entity_id", + "state", + "_attributes", + "_last_changed", + "_last_updated", + "_context", + ] + + def __init__(self, row): # pylint: disable=super-init-not-called + """Init the lazy state.""" + self._row = row + self.entity_id = self._row.entity_id + self.state = self._row.state + self._attributes = None + self._last_changed = None + self._last_updated = None + self._context = None + + @property # type: ignore + def attributes(self): + """State attributes.""" + if not self._attributes: + try: + self._attributes = json.loads(self._row.attributes) + except ValueError: + # When json.loads fails + _LOGGER.exception("Error converting row to state: %s", self) + self._attributes = {} + return self._attributes + + @attributes.setter + def attributes(self, value): + """Set attributes.""" + self._attributes = value + + @property # type: ignore + def context(self): + """State context.""" + if not self._context: + self._context = Context(id=None) + return self._context + + @context.setter + def context(self, value): + """Set context.""" + self._context = value + + @property # type: ignore + def last_changed(self): + """Last changed datetime.""" + if not self._last_changed: + self._last_changed = process_timestamp(self._row.last_changed) + return self._last_changed + + @last_changed.setter + def last_changed(self, value): + """Set last changed datetime.""" + self._last_changed = value + + @property # type: ignore + def last_updated(self): + """Last updated datetime.""" + if not self._last_updated: + self._last_updated = process_timestamp(self._row.last_updated) + return self._last_updated + + @last_updated.setter + def last_updated(self, value): + """Set last updated datetime.""" + self._last_updated = value + + def __eq__(self, other): + """Return the comparison.""" + return ( + other.__class__ in [self.__class__, State] + and self.entity_id == other.entity_id + and self.state == other.state + and self.attributes == other.attributes + ) diff --git a/homeassistant/components/homeassistant/__init__.py b/homeassistant/components/homeassistant/__init__.py index e0a4d88ec6a..83166ba4cce 100644 --- a/homeassistant/components/homeassistant/__init__.py +++ b/homeassistant/components/homeassistant/__init__.py @@ -78,7 +78,9 @@ async def async_setup(hass: ha.HomeAssistant, config: dict) -> bool: data[ATTR_ENTITY_ID] = list(ent_ids) tasks.append( - hass.services.async_call(domain, service.service, data, blocking) + hass.services.async_call( + domain, service.service, data, blocking, context=service.context + ) ) if tasks: diff --git a/homeassistant/components/homekit/__init__.py b/homeassistant/components/homekit/__init__.py index a315ddd41e9..c5a921a9dd2 100644 --- a/homeassistant/components/homekit/__init__.py +++ b/homeassistant/components/homekit/__init__.py @@ -21,7 +21,6 @@ from homeassistant.const import ( ATTR_BATTERY_CHARGING, ATTR_BATTERY_LEVEL, ATTR_ENTITY_ID, - ATTR_SERVICE, CONF_IP_ADDRESS, CONF_NAME, CONF_PORT, @@ -33,14 +32,7 @@ from homeassistant.core import CoreState, HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady, Unauthorized from homeassistant.helpers import device_registry, entity_registry import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.entityfilter import ( - BASE_FILTER_SCHEMA, - CONF_EXCLUDE_DOMAINS, - CONF_EXCLUDE_ENTITIES, - CONF_INCLUDE_DOMAINS, - CONF_INCLUDE_ENTITIES, - convert_filter, -) +from homeassistant.helpers.entityfilter import BASE_FILTER_SCHEMA, FILTER_SCHEMA from homeassistant.loader import async_get_integration from homeassistant.util import get_local_ip @@ -48,12 +40,10 @@ from .accessories import get_accessory from .aidmanager import AccessoryAidStorage from .const import ( AID_STORAGE, - ATTR_DISPLAY_NAME, ATTR_INTERGRATION, ATTR_MANUFACTURER, ATTR_MODEL, ATTR_SOFTWARE_VERSION, - ATTR_VALUE, BRIDGE_NAME, BRIDGE_SERIAL_NUMBER, CONF_ADVERTISE_IP, @@ -71,7 +61,6 @@ from .const import ( DEFAULT_PORT, DEFAULT_SAFE_MODE, DOMAIN, - EVENT_HOMEKIT_CHANGED, HOMEKIT, HOMEKIT_PAIRING_QR, HOMEKIT_PAIRING_QR_SECRET, @@ -144,7 +133,6 @@ RESET_ACCESSORY_SERVICE_SCHEMA = vol.Schema( async def async_setup(hass: HomeAssistant, config: dict): """Set up the HomeKit from yaml.""" - hass.data.setdefault(DOMAIN, {}) _async_register_events_and_services(hass) @@ -221,17 +209,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): entity_config = options.get(CONF_ENTITY_CONFIG, {}).copy() auto_start = options.get(CONF_AUTO_START, DEFAULT_AUTO_START) safe_mode = options.get(CONF_SAFE_MODE, DEFAULT_SAFE_MODE) - entity_filter = convert_filter( - options.get( - CONF_FILTER, - { - CONF_INCLUDE_DOMAINS: [], - CONF_EXCLUDE_DOMAINS: [], - CONF_INCLUDE_ENTITIES: [], - CONF_EXCLUDE_ENTITIES: [], - }, - ) - ) + entity_filter = FILTER_SCHEMA(options.get(CONF_FILTER, {})) homekit = HomeKit( hass, @@ -272,7 +250,6 @@ async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry): async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry): """Unload a config entry.""" - dismiss_setup_message(hass, entry.entry_id) hass.data[DOMAIN][entry.entry_id][UNDO_UPDATE_LISTENER]() @@ -319,7 +296,6 @@ def _async_import_options_from_data_if_missing(hass: HomeAssistant, entry: Confi @callback def _async_register_events_and_services(hass: HomeAssistant): """Register events and services for HomeKit.""" - hass.http.register_view(HomeKitPairingQRView) def handle_homekit_reset_accessory(service): @@ -345,26 +321,6 @@ def _async_register_events_and_services(hass: HomeAssistant): schema=RESET_ACCESSORY_SERVICE_SCHEMA, ) - @callback - def async_describe_logbook_event(event): - """Describe a logbook event.""" - data = event.data - entity_id = data.get(ATTR_ENTITY_ID) - value = data.get(ATTR_VALUE) - - value_msg = f" to {value}" if value else "" - message = f"send command {data[ATTR_SERVICE]}{value_msg} for {data[ATTR_DISPLAY_NAME]}" - - return { - "name": "HomeKit", - "message": message, - "entity_id": entity_id, - } - - hass.components.logbook.async_describe_event( - DOMAIN, EVENT_HOMEKIT_CHANGED, async_describe_logbook_event - ) - async def async_handle_homekit_service_start(service): """Handle start HomeKit service call.""" for entry_id in hass.data[DOMAIN]: @@ -504,7 +460,6 @@ class HomeKit: async def async_start(self, *args): """Start the accessory driver.""" - if self.status != STATUS_READY: return self.status = STATUS_WAIT diff --git a/homeassistant/components/homekit/logbook.py b/homeassistant/components/homekit/logbook.py new file mode 100644 index 00000000000..0ea5a5d542a --- /dev/null +++ b/homeassistant/components/homekit/logbook.py @@ -0,0 +1,28 @@ +"""Describe logbook events.""" +from homeassistant.const import ATTR_ENTITY_ID, ATTR_SERVICE +from homeassistant.core import callback + +from .const import ATTR_DISPLAY_NAME, ATTR_VALUE, DOMAIN, EVENT_HOMEKIT_CHANGED + + +@callback +def async_describe_events(hass, async_describe_event): + """Describe logbook events.""" + + @callback + def async_describe_logbook_event(event): + """Describe a logbook event.""" + data = event.data + entity_id = data.get(ATTR_ENTITY_ID) + value = data.get(ATTR_VALUE) + + value_msg = f" to {value}" if value else "" + message = f"send command {data[ATTR_SERVICE]}{value_msg} for {data[ATTR_DISPLAY_NAME]}" + + return { + "name": "HomeKit", + "message": message, + "entity_id": entity_id, + } + + async_describe_event(DOMAIN, EVENT_HOMEKIT_CHANGED, async_describe_logbook_event) diff --git a/homeassistant/components/homekit/manifest.json b/homeassistant/components/homekit/manifest.json index 985fcc1e799..8a5fc90ae07 100644 --- a/homeassistant/components/homekit/manifest.json +++ b/homeassistant/components/homekit/manifest.json @@ -2,9 +2,23 @@ "domain": "homekit", "name": "HomeKit", "documentation": "https://www.home-assistant.io/integrations/homekit", - "requirements": ["HAP-python==2.9.1","fnvhash==0.1.0","PyQRCode==1.2.1","base36==0.1.1","PyTurboJPEG==1.4.0"], - "dependencies": ["http", "camera", "ffmpeg"], - "after_dependencies": ["logbook", "zeroconf"], - "codeowners": ["@bdraco"], + "requirements": [ + "HAP-python==2.9.1", + "fnvhash==0.1.0", + "PyQRCode==1.2.1", + "base36==0.1.1", + "PyTurboJPEG==1.4.0" + ], + "dependencies": [ + "http", + "camera", + "ffmpeg" + ], + "after_dependencies": [ + "zeroconf" + ], + "codeowners": [ + "@bdraco" + ], "config_flow": true } diff --git a/homeassistant/components/homekit_controller/translations/es.json b/homeassistant/components/homekit_controller/translations/es.json index b7639223f51..b48ec79e9db 100644 --- a/homeassistant/components/homekit_controller/translations/es.json +++ b/homeassistant/components/homekit_controller/translations/es.json @@ -3,7 +3,7 @@ "abort": { "accessory_not_found_error": "No se puede a\u00f1adir el emparejamiento porque ya no se puede encontrar el dispositivo.", "already_configured": "El accesorio ya est\u00e1 configurado con este controlador.", - "already_in_progress": "El flujo de configuraci\u00f3n del dispositivo ya est\u00e1 en curso.", + "already_in_progress": "El flujo de configuraci\u00f3n del dispositivo ya est\u00e1 en marcha.", "already_paired": "Este accesorio ya est\u00e1 emparejado con otro dispositivo. Por favor, reinicia el accesorio e int\u00e9ntalo de nuevo.", "ignored_model": "El soporte de HomeKit para este modelo est\u00e1 bloqueado ya que est\u00e1 disponible una integraci\u00f3n nativa m\u00e1s completa.", "invalid_config_entry": "Este dispositivo se muestra como listo para vincular, pero ya existe una entrada que causa conflicto en Home Assistant y se debe eliminar primero.", diff --git a/homeassistant/components/homematicip_cloud/cover.py b/homeassistant/components/homematicip_cloud/cover.py index 580e2d21a11..d11a08d80a6 100644 --- a/homeassistant/components/homematicip_cloud/cover.py +++ b/homeassistant/components/homematicip_cloud/cover.py @@ -6,6 +6,7 @@ from homematicip.aio.device import ( AsyncFullFlushBlind, AsyncFullFlushShutter, AsyncGarageDoorModuleTormatic, + AsyncHoermannDrivesModule, ) from homematicip.aio.group import AsyncExtendedLinkedShutterGroup from homematicip.base.enums import DoorCommand, DoorState @@ -40,8 +41,10 @@ async def async_setup_entry( entities.append(HomematicipCoverSlats(hap, device)) elif isinstance(device, AsyncFullFlushShutter): entities.append(HomematicipCoverShutter(hap, device)) - elif isinstance(device, AsyncGarageDoorModuleTormatic): - entities.append(HomematicipGarageDoorModuleTormatic(hap, device)) + elif isinstance( + device, (AsyncHoermannDrivesModule, AsyncGarageDoorModuleTormatic) + ): + entities.append(HomematicipGarageDoorModule(hap, device)) for group in hap.home.groups: if isinstance(group, AsyncExtendedLinkedShutterGroup): @@ -118,8 +121,8 @@ class HomematicipCoverSlats(HomematicipCoverShutter, CoverEntity): await self._device.set_shutter_stop() -class HomematicipGarageDoorModuleTormatic(HomematicipGenericDevice, CoverEntity): - """Representation of a HomematicIP Garage Door Module for Tormatic.""" +class HomematicipGarageDoorModule(HomematicipGenericDevice, CoverEntity): + """Representation of a HomematicIP Garage Door Module.""" @property def current_cover_position(self) -> int: diff --git a/homeassistant/components/homematicip_cloud/manifest.json b/homeassistant/components/homematicip_cloud/manifest.json index ef362300c66..42efeb73821 100644 --- a/homeassistant/components/homematicip_cloud/manifest.json +++ b/homeassistant/components/homematicip_cloud/manifest.json @@ -3,7 +3,7 @@ "name": "HomematicIP Cloud", "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/homematicip_cloud", - "requirements": ["homematicip==0.10.17"], + "requirements": ["homematicip==0.10.18"], "codeowners": ["@SukramJ"], "quality_scale": "platinum" } diff --git a/homeassistant/components/honeywell/climate.py b/homeassistant/components/honeywell/climate.py index 5969dcdcc27..07ac6c9b217 100644 --- a/homeassistant/components/honeywell/climate.py +++ b/homeassistant/components/honeywell/climate.py @@ -47,24 +47,29 @@ ATTR_FAN_ACTION = "fan_action" CONF_COOL_AWAY_TEMPERATURE = "away_cool_temperature" CONF_HEAT_AWAY_TEMPERATURE = "away_heat_temperature" +CONF_DEV_ID = "thermostat" +CONF_LOC_ID = "location" DEFAULT_COOL_AWAY_TEMPERATURE = 88 DEFAULT_HEAT_AWAY_TEMPERATURE = 61 -DEFAULT_REGION = "eu" -REGIONS = ["eu", "us"] -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_USERNAME): cv.string, - vol.Required(CONF_PASSWORD): cv.string, - vol.Optional( - CONF_COOL_AWAY_TEMPERATURE, default=DEFAULT_COOL_AWAY_TEMPERATURE - ): vol.Coerce(int), - vol.Optional( - CONF_HEAT_AWAY_TEMPERATURE, default=DEFAULT_HEAT_AWAY_TEMPERATURE - ): vol.Coerce(int), - vol.Optional(CONF_REGION, default=DEFAULT_REGION): vol.In(REGIONS), - } +PLATFORM_SCHEMA = vol.All( + cv.deprecated(CONF_REGION), + PLATFORM_SCHEMA.extend( + { + vol.Required(CONF_USERNAME): cv.string, + vol.Required(CONF_PASSWORD): cv.string, + vol.Optional( + CONF_COOL_AWAY_TEMPERATURE, default=DEFAULT_COOL_AWAY_TEMPERATURE + ): vol.Coerce(int), + vol.Optional( + CONF_HEAT_AWAY_TEMPERATURE, default=DEFAULT_HEAT_AWAY_TEMPERATURE + ): vol.Coerce(int), + vol.Optional(CONF_REGION): cv.string, + vol.Optional(CONF_DEV_ID): cv.string, + vol.Optional(CONF_LOC_ID): cv.string, + } + ), ) HVAC_MODE_TO_HW_MODE = { @@ -104,44 +109,36 @@ def setup_platform(hass, config, add_entities, discovery_info=None): username = config.get(CONF_USERNAME) password = config.get(CONF_PASSWORD) - if config.get(CONF_REGION) == "us": - try: - client = somecomfort.SomeComfort(username, password) - except somecomfort.AuthError: - _LOGGER.error("Failed to login to honeywell account %s", username) - return - except somecomfort.SomeComfortError: - _LOGGER.error( - "Failed to initialize the Honeywell client: " - "Check your configuration (username, password), " - "or maybe you have exceeded the API rate limit?" - ) - return - - dev_id = config.get("thermostat") - loc_id = config.get("location") - cool_away_temp = config.get(CONF_COOL_AWAY_TEMPERATURE) - heat_away_temp = config.get(CONF_HEAT_AWAY_TEMPERATURE) - - add_entities( - [ - HoneywellUSThermostat( - client, device, cool_away_temp, heat_away_temp, username, password - ) - for location in client.locations_by_id.values() - for device in location.devices_by_id.values() - if ( - (not loc_id or location.locationid == loc_id) - and (not dev_id or device.deviceid == dev_id) - ) - ] + try: + client = somecomfort.SomeComfort(username, password) + except somecomfort.AuthError: + _LOGGER.error("Failed to login to honeywell account %s", username) + return + except somecomfort.SomeComfortError: + _LOGGER.error( + "Failed to initialize the Honeywell client: " + "Check your configuration (username, password), " + "or maybe you have exceeded the API rate limit?" ) return - _LOGGER.warning( - "The honeywell component has been deprecated for EU (i.e. non-US) " - "systems. For EU-based systems, use the evohome component, " - "see: https://www.home-assistant.io/integrations/evohome" + dev_id = config.get(CONF_DEV_ID) + loc_id = config.get(CONF_LOC_ID) + cool_away_temp = config.get(CONF_COOL_AWAY_TEMPERATURE) + heat_away_temp = config.get(CONF_HEAT_AWAY_TEMPERATURE) + + add_entities( + [ + HoneywellUSThermostat( + client, device, cool_away_temp, heat_away_temp, username, password, + ) + for location in client.locations_by_id.values() + for device in location.devices_by_id.values() + if ( + (not loc_id or location.locationid == loc_id) + and (not dev_id or device.deviceid == dev_id) + ) + ] ) diff --git a/homeassistant/components/huawei_lte/config_flow.py b/homeassistant/components/huawei_lte/config_flow.py index 223ca9dc34a..b834f4dab94 100644 --- a/homeassistant/components/huawei_lte/config_flow.py +++ b/homeassistant/components/huawei_lte/config_flow.py @@ -206,13 +206,19 @@ class ConfigFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): user_input=user_input, errors=errors ) - title = await self.hass.async_add_executor_job(get_router_title, conn) + # pylint: disable=no-member + title = self.context.get("title_placeholders", {}).get( + CONF_NAME + ) or await self.hass.async_add_executor_job(get_router_title, conn) await self.hass.async_add_executor_job(logout) return self.async_create_entry(title=title, data=user_input) async def async_step_ssdp(self, discovery_info): """Handle SSDP initiated config flow.""" + await self.async_set_unique_id(discovery_info[ssdp.ATTR_UPNP_UDN]) + self._abort_if_unique_id_configured() + # Attempt to distinguish from other non-LTE Huawei router devices, at least # some ones we are interested in have "Mobile Wi-Fi" friendlyName. if "mobile" not in discovery_info.get(ssdp.ATTR_UPNP_FRIENDLY_NAME, "").lower(): @@ -235,6 +241,10 @@ class ConfigFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): if self._already_configured(user_input): return self.async_abort(reason="already_configured") + # pylint: disable=no-member + self.context["title_placeholders"] = { + CONF_NAME: discovery_info.get(ssdp.ATTR_UPNP_FRIENDLY_NAME) + } return await self._async_show_user_form(user_input) diff --git a/homeassistant/components/huawei_lte/strings.json b/homeassistant/components/huawei_lte/strings.json index 19a37757d5f..554ec0f53ca 100644 --- a/homeassistant/components/huawei_lte/strings.json +++ b/homeassistant/components/huawei_lte/strings.json @@ -16,6 +16,7 @@ "response_error": "Unknown error from device", "unknown_connection_error": "Unknown error connecting to device" }, + "flow_title": "Huawei LTE: {name}", "step": { "user": { "data": { diff --git a/homeassistant/components/huawei_lte/translations/ca.json b/homeassistant/components/huawei_lte/translations/ca.json index a8f4f9584c2..1a7b245c9fe 100644 --- a/homeassistant/components/huawei_lte/translations/ca.json +++ b/homeassistant/components/huawei_lte/translations/ca.json @@ -16,6 +16,7 @@ "response_error": "S'ha produ\u00eft un error desconegut del dispositiu", "unknown_connection_error": "S'ha produ\u00eft un error desconegut en connectar-se al dispositiu" }, + "flow_title": "Huawei LTE: {name}", "step": { "user": { "data": { diff --git a/homeassistant/components/huawei_lte/translations/en.json b/homeassistant/components/huawei_lte/translations/en.json index 4496759d3ac..022328ea2ea 100644 --- a/homeassistant/components/huawei_lte/translations/en.json +++ b/homeassistant/components/huawei_lte/translations/en.json @@ -16,6 +16,7 @@ "response_error": "Unknown error from device", "unknown_connection_error": "Unknown error connecting to device" }, + "flow_title": "Huawei LTE: {name}", "step": { "user": { "data": { diff --git a/homeassistant/components/huawei_lte/translations/es.json b/homeassistant/components/huawei_lte/translations/es.json index 495ddb81bc3..b9d4ae2afc8 100644 --- a/homeassistant/components/huawei_lte/translations/es.json +++ b/homeassistant/components/huawei_lte/translations/es.json @@ -16,6 +16,7 @@ "response_error": "Error desconocido del dispositivo", "unknown_connection_error": "Error desconocido al conectarse al dispositivo" }, + "flow_title": "Huawei LTE: {name}", "step": { "user": { "data": { diff --git a/homeassistant/components/huawei_lte/translations/nn.json b/homeassistant/components/huawei_lte/translations/nn.json deleted file mode 100644 index ea06e4158e9..00000000000 --- a/homeassistant/components/huawei_lte/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Huawei LTE" -} \ No newline at end of file diff --git a/homeassistant/components/huawei_lte/translations/no.json b/homeassistant/components/huawei_lte/translations/no.json index 99dc194763c..414be9048c6 100644 --- a/homeassistant/components/huawei_lte/translations/no.json +++ b/homeassistant/components/huawei_lte/translations/no.json @@ -16,6 +16,7 @@ "response_error": "Ukjent feil fra enheten", "unknown_connection_error": "Ukjent feil under tilkobling til enhet" }, + "flow_title": "Huawei LTE: {name}", "step": { "user": { "data": { diff --git a/homeassistant/components/huawei_lte/translations/ru.json b/homeassistant/components/huawei_lte/translations/ru.json index 6e4c34c095d..5830cb8ccb3 100644 --- a/homeassistant/components/huawei_lte/translations/ru.json +++ b/homeassistant/components/huawei_lte/translations/ru.json @@ -16,6 +16,7 @@ "response_error": "\u041d\u0435\u0438\u0437\u0432\u0435\u0441\u0442\u043d\u0430\u044f \u043e\u0448\u0438\u0431\u043a\u0430.", "unknown_connection_error": "\u041e\u0448\u0438\u0431\u043a\u0430 \u043f\u043e\u0434\u043a\u043b\u044e\u0447\u0435\u043d\u0438\u044f \u043a \u0443\u0441\u0442\u0440\u043e\u0439\u0441\u0442\u0432\u0443." }, + "flow_title": "Huawei LTE: {name}", "step": { "user": { "data": { diff --git a/homeassistant/components/huawei_lte/translations/zh-Hant.json b/homeassistant/components/huawei_lte/translations/zh-Hant.json index 4094733ba62..55cc0b6acf6 100644 --- a/homeassistant/components/huawei_lte/translations/zh-Hant.json +++ b/homeassistant/components/huawei_lte/translations/zh-Hant.json @@ -16,6 +16,7 @@ "response_error": "\u4f86\u81ea\u8a2d\u5099\u672a\u77e5\u932f\u8aa4", "unknown_connection_error": "\u9023\u7dda\u81f3\u8a2d\u5099\u672a\u77e5\u932f\u8aa4" }, + "flow_title": "\u83ef\u70ba LTE\uff1a{name}", "step": { "user": { "data": { diff --git a/homeassistant/components/hue/translations/es.json b/homeassistant/components/hue/translations/es.json index f4762560485..08b7ed311c9 100644 --- a/homeassistant/components/hue/translations/es.json +++ b/homeassistant/components/hue/translations/es.json @@ -3,7 +3,7 @@ "abort": { "all_configured": "Ya se han configurado todas las pasarelas Philips Hue", "already_configured": "La pasarela ya esta configurada", - "already_in_progress": "La configuraci\u00f3n del flujo para la pasarela ya est\u00e1 en curso.", + "already_in_progress": "El flujo de configuraci\u00f3n para la pasarela ya est\u00e1 en marcha.", "cannot_connect": "No se puede conectar a la pasarela", "discover_timeout": "Imposible encontrar pasarelas Philips Hue", "no_bridges": "No se han encontrado pasarelas Philips Hue.", diff --git a/homeassistant/components/humidifier/__init__.py b/homeassistant/components/humidifier/__init__.py new file mode 100644 index 00000000000..fc455feb477 --- /dev/null +++ b/homeassistant/components/humidifier/__init__.py @@ -0,0 +1,175 @@ +"""Provides functionality to interact with humidifier devices.""" +from datetime import timedelta +import logging +from typing import Any, Dict, List, Optional + +import voluptuous as vol + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ( + SERVICE_TOGGLE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_ON, +) +import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.config_validation import ( # noqa: F401 + PLATFORM_SCHEMA, + PLATFORM_SCHEMA_BASE, +) +from homeassistant.helpers.entity import ToggleEntity +from homeassistant.helpers.entity_component import EntityComponent +from homeassistant.helpers.typing import ConfigType, HomeAssistantType +from homeassistant.loader import bind_hass + +from .const import ( + ATTR_AVAILABLE_MODES, + ATTR_HUMIDITY, + ATTR_MAX_HUMIDITY, + ATTR_MIN_HUMIDITY, + ATTR_MODE, + DEFAULT_MAX_HUMIDITY, + DEFAULT_MIN_HUMIDITY, + DEVICE_CLASS_DEHUMIDIFIER, + DEVICE_CLASS_HUMIDIFIER, + DOMAIN, + SERVICE_SET_HUMIDITY, + SERVICE_SET_MODE, + SUPPORT_MODES, +) + +_LOGGER = logging.getLogger(__name__) + + +SCAN_INTERVAL = timedelta(seconds=60) + +DEVICE_CLASSES = [DEVICE_CLASS_HUMIDIFIER, DEVICE_CLASS_DEHUMIDIFIER] + +DEVICE_CLASSES_SCHEMA = vol.All(vol.Lower, vol.In(DEVICE_CLASSES)) + + +@bind_hass +def is_on(hass, entity_id): + """Return if the humidifier is on based on the statemachine. + + Async friendly. + """ + return hass.states.is_state(entity_id, STATE_ON) + + +async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool: + """Set up humidifier devices.""" + component = hass.data[DOMAIN] = EntityComponent( + _LOGGER, DOMAIN, hass, SCAN_INTERVAL + ) + await component.async_setup(config) + + component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_turn_on") + component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off") + component.async_register_entity_service(SERVICE_TOGGLE, {}, "async_toggle") + component.async_register_entity_service( + SERVICE_SET_MODE, + {vol.Required(ATTR_MODE): cv.string}, + "async_set_mode", + [SUPPORT_MODES], + ) + component.async_register_entity_service( + SERVICE_SET_HUMIDITY, + { + vol.Required(ATTR_HUMIDITY): vol.All( + vol.Coerce(int), vol.Range(min=0, max=100) + ) + }, + "async_set_humidity", + ) + + return True + + +async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool: + """Set up a config entry.""" + return await hass.data[DOMAIN].async_setup_entry(entry) + + +async def async_unload_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.data[DOMAIN].async_unload_entry(entry) + + +class HumidifierEntity(ToggleEntity): + """Representation of a humidifier device.""" + + @property + def capability_attributes(self) -> Dict[str, Any]: + """Return capability attributes.""" + supported_features = self.supported_features or 0 + data = { + ATTR_MIN_HUMIDITY: self.min_humidity, + ATTR_MAX_HUMIDITY: self.max_humidity, + } + + if supported_features & SUPPORT_MODES: + data[ATTR_AVAILABLE_MODES] = self.available_modes + + return data + + @property + def state_attributes(self) -> Dict[str, Any]: + """Return the optional state attributes.""" + supported_features = self.supported_features or 0 + data = {} + + if self.target_humidity is not None: + data[ATTR_HUMIDITY] = self.target_humidity + + if supported_features & SUPPORT_MODES: + data[ATTR_MODE] = self.mode + + return data + + @property + def target_humidity(self) -> Optional[int]: + """Return the humidity we try to reach.""" + return None + + @property + def mode(self) -> Optional[str]: + """Return the current mode, e.g., home, auto, baby. + + Requires SUPPORT_MODES. + """ + raise NotImplementedError + + @property + def available_modes(self) -> Optional[List[str]]: + """Return a list of available modes. + + Requires SUPPORT_MODES. + """ + raise NotImplementedError + + def set_humidity(self, humidity: int) -> None: + """Set new target humidity.""" + raise NotImplementedError() + + async def async_set_humidity(self, humidity: int) -> None: + """Set new target humidity.""" + await self.hass.async_add_executor_job(self.set_humidity, humidity) + + def set_mode(self, mode: str) -> None: + """Set new mode.""" + raise NotImplementedError() + + async def async_set_mode(self, mode: str) -> None: + """Set new mode.""" + await self.hass.async_add_executor_job(self.set_mode, mode) + + @property + def min_humidity(self) -> int: + """Return the minimum humidity.""" + return DEFAULT_MIN_HUMIDITY + + @property + def max_humidity(self) -> int: + """Return the maximum humidity.""" + return DEFAULT_MAX_HUMIDITY diff --git a/homeassistant/components/humidifier/const.py b/homeassistant/components/humidifier/const.py new file mode 100644 index 00000000000..82e87ae5c31 --- /dev/null +++ b/homeassistant/components/humidifier/const.py @@ -0,0 +1,30 @@ +"""Provides the constants needed for component.""" + +MODE_NORMAL = "normal" +MODE_ECO = "eco" +MODE_AWAY = "away" +MODE_BOOST = "boost" +MODE_COMFORT = "comfort" +MODE_HOME = "home" +MODE_SLEEP = "sleep" +MODE_AUTO = "auto" +MODE_BABY = "baby" + +ATTR_MODE = "mode" +ATTR_AVAILABLE_MODES = "available_modes" +ATTR_HUMIDITY = "humidity" +ATTR_MAX_HUMIDITY = "max_humidity" +ATTR_MIN_HUMIDITY = "min_humidity" + +DEFAULT_MIN_HUMIDITY = 0 +DEFAULT_MAX_HUMIDITY = 100 + +DOMAIN = "humidifier" + +DEVICE_CLASS_HUMIDIFIER = "humidifier" +DEVICE_CLASS_DEHUMIDIFIER = "dehumidifier" + +SERVICE_SET_MODE = "set_mode" +SERVICE_SET_HUMIDITY = "set_humidity" + +SUPPORT_MODES = 1 diff --git a/homeassistant/components/humidifier/device_action.py b/homeassistant/components/humidifier/device_action.py new file mode 100644 index 00000000000..a6194994a9c --- /dev/null +++ b/homeassistant/components/humidifier/device_action.py @@ -0,0 +1,121 @@ +"""Provides device actions for Humidifier.""" +from typing import List, Optional + +import voluptuous as vol + +from homeassistant.components.device_automation import toggle_entity +from homeassistant.const import ( + ATTR_ENTITY_ID, + CONF_DEVICE_ID, + CONF_DOMAIN, + CONF_ENTITY_ID, + CONF_TYPE, +) +from homeassistant.core import Context, HomeAssistant +from homeassistant.helpers import entity_registry +import homeassistant.helpers.config_validation as cv + +from . import DOMAIN, const + +SET_HUMIDITY_SCHEMA = cv.DEVICE_ACTION_BASE_SCHEMA.extend( + { + vol.Required(CONF_TYPE): "set_humidity", + vol.Required(CONF_ENTITY_ID): cv.entity_domain(DOMAIN), + vol.Required(const.ATTR_HUMIDITY): vol.Coerce(int), + } +) + +SET_MODE_SCHEMA = cv.DEVICE_ACTION_BASE_SCHEMA.extend( + { + vol.Required(CONF_TYPE): "set_mode", + vol.Required(CONF_ENTITY_ID): cv.entity_domain(DOMAIN), + vol.Required(const.ATTR_MODE): cv.string, + } +) + +ONOFF_SCHEMA = toggle_entity.ACTION_SCHEMA.extend({vol.Required(CONF_DOMAIN): DOMAIN}) + +ACTION_SCHEMA = vol.Any(SET_HUMIDITY_SCHEMA, SET_MODE_SCHEMA, ONOFF_SCHEMA) + + +async def async_get_actions(hass: HomeAssistant, device_id: str) -> List[dict]: + """List device actions for Humidifier devices.""" + registry = await entity_registry.async_get_registry(hass) + actions = await toggle_entity.async_get_actions(hass, device_id, DOMAIN) + + # Get all the integrations entities for this device + for entry in entity_registry.async_entries_for_device(registry, device_id): + if entry.domain != DOMAIN: + continue + + state = hass.states.get(entry.entity_id) + + actions.append( + { + CONF_DEVICE_ID: device_id, + CONF_DOMAIN: DOMAIN, + CONF_ENTITY_ID: entry.entity_id, + CONF_TYPE: "set_humidity", + } + ) + + # We need a state or else we can't populate the available modes. + if state is None: + continue + + if state.attributes["supported_features"] & const.SUPPORT_MODES: + actions.append( + { + CONF_DEVICE_ID: device_id, + CONF_DOMAIN: DOMAIN, + CONF_ENTITY_ID: entry.entity_id, + CONF_TYPE: "set_mode", + } + ) + + return actions + + +async def async_call_action_from_config( + hass: HomeAssistant, config: dict, variables: dict, context: Optional[Context] +) -> None: + """Execute a device action.""" + config = ACTION_SCHEMA(config) + + service_data = {ATTR_ENTITY_ID: config[CONF_ENTITY_ID]} + + if config[CONF_TYPE] == "set_humidity": + service = const.SERVICE_SET_HUMIDITY + service_data[const.ATTR_HUMIDITY] = config[const.ATTR_HUMIDITY] + elif config[CONF_TYPE] == "set_mode": + service = const.SERVICE_SET_MODE + service_data[const.ATTR_MODE] = config[const.ATTR_MODE] + else: + return await toggle_entity.async_call_action_from_config( + hass, config, variables, context, DOMAIN + ) + + await hass.services.async_call( + DOMAIN, service, service_data, blocking=True, context=context + ) + + +async def async_get_action_capabilities(hass, config): + """List action capabilities.""" + state = hass.states.get(config[CONF_ENTITY_ID]) + action_type = config[CONF_TYPE] + + fields = {} + + if action_type == "set_humidity": + fields[vol.Required(const.ATTR_HUMIDITY)] = vol.Coerce(int) + elif action_type == "set_mode": + if state: + available_modes = state.attributes.get(const.ATTR_AVAILABLE_MODES, []) + else: + available_modes = [] + fields[vol.Required(const.ATTR_MODE)] = vol.In(available_modes) + else: + return {} + + return {"extra_fields": vol.Schema(fields)} diff --git a/homeassistant/components/humidifier/manifest.json b/homeassistant/components/humidifier/manifest.json new file mode 100644 index 00000000000..b64065a2583 --- /dev/null +++ b/homeassistant/components/humidifier/manifest.json @@ -0,0 +1,7 @@ +{ + "domain": "humidifier", + "name": "Humidifier", + "documentation": "https://www.home-assistant.io/integrations/humidifier", + "codeowners": ["@home-assistant/core", "@Shulyaka"], + "quality_scale": "internal" +} diff --git a/homeassistant/components/humidifier/reproduce_state.py b/homeassistant/components/humidifier/reproduce_state.py new file mode 100644 index 00000000000..e9b1777d63f --- /dev/null +++ b/homeassistant/components/humidifier/reproduce_state.py @@ -0,0 +1,96 @@ +"""Module that groups code required to handle state restore for component.""" +import asyncio +import logging +from typing import Any, Dict, Iterable, Optional + +from homeassistant.const import SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON +from homeassistant.core import Context, State +from homeassistant.helpers.typing import HomeAssistantType + +from .const import ( + ATTR_HUMIDITY, + ATTR_MODE, + DOMAIN, + SERVICE_SET_HUMIDITY, + SERVICE_SET_MODE, +) + +_LOGGER = logging.getLogger(__name__) + + +async def _async_reproduce_states( + hass: HomeAssistantType, + state: State, + *, + context: Optional[Context] = None, + reproduce_options: Optional[Dict[str, Any]] = None, +) -> None: + """Reproduce component states.""" + cur_state = hass.states.get(state.entity_id) + + if cur_state is None: + _LOGGER.warning("Unable to find entity %s", state.entity_id) + return + + async def call_service(service: str, keys: Iterable, data=None): + """Call service with set of attributes given.""" + data = data or {} + data["entity_id"] = state.entity_id + for key in keys: + if key in state.attributes: + data[key] = state.attributes[key] + + await hass.services.async_call( + DOMAIN, service, data, blocking=True, context=context + ) + + if state.state == STATE_OFF: + # Ensure the device is off if it needs to be and exit + if cur_state.state != STATE_OFF: + await call_service(SERVICE_TURN_OFF, []) + return + + if state.state != STATE_ON: + # we can't know how to handle this + _LOGGER.warning( + "Invalid state specified for %s: %s", state.entity_id, state.state + ) + return + + # First of all, turn on if needed, because the device might not + # be able to set mode and humidity while being off + if cur_state.state != STATE_ON: + await call_service(SERVICE_TURN_ON, []) + # refetch the state as turning on might allow us to see some more values + cur_state = hass.states.get(state.entity_id) + + # Then set the mode before target humidity, because switching modes + # may invalidate target humidity + if ATTR_MODE in state.attributes and state.attributes[ + ATTR_MODE + ] != cur_state.attributes.get(ATTR_MODE): + await call_service(SERVICE_SET_MODE, [ATTR_MODE]) + + # Next, restore target humidity for the current mode + if ATTR_HUMIDITY in state.attributes and state.attributes[ + ATTR_HUMIDITY + ] != cur_state.attributes.get(ATTR_HUMIDITY): + await call_service(SERVICE_SET_HUMIDITY, [ATTR_HUMIDITY]) + + +async def async_reproduce_states( + hass: HomeAssistantType, + states: Iterable[State], + *, + context: Optional[Context] = None, + reproduce_options: Optional[Dict[str, Any]] = None, +) -> None: + """Reproduce component states.""" + await asyncio.gather( + *( + _async_reproduce_states( + hass, state, context=context, reproduce_options=reproduce_options + ) + for state in states + ) + ) diff --git a/homeassistant/components/humidifier/services.yaml b/homeassistant/components/humidifier/services.yaml new file mode 100644 index 00000000000..d10f2fb604b --- /dev/null +++ b/homeassistant/components/humidifier/services.yaml @@ -0,0 +1,42 @@ +# Describes the format for available humidifier services + +set_mode: + description: Set mode for humidifier device. + fields: + entity_id: + description: Name(s) of entities to change. + example: 'humidifier.bedroom' + mode: + description: New mode + example: 'away' + +set_humidity: + description: Set target humidity of humidifier device. + fields: + entity_id: + description: Name(s) of entities to change. + example: 'humidifier.bedroom' + humidity: + description: New target humidity for humidifier device. + example: 50 + +turn_on: + description: Turn humidifier device on. + fields: + entity_id: + description: Name(s) of entities to change. + example: 'humidifier.bedroom' + +turn_off: + description: Turn humidifier device off. + fields: + entity_id: + description: Name(s) of entities to change. + example: 'humidifier.bedroom' + +toggle: + description: Toggles a humidifier device. + fields: + entity_id: + description: Name(s) of entities to toggle. + example: 'humidifier.bedroom' diff --git a/homeassistant/components/humidifier/strings.json b/homeassistant/components/humidifier/strings.json new file mode 100644 index 00000000000..de7086cd053 --- /dev/null +++ b/homeassistant/components/humidifier/strings.json @@ -0,0 +1,18 @@ +{ + "title": "Humidifier", + "device_automation": { + "action_type": { + "set_humidity": "Set humidity for {entity_name}", + "set_mode": "Change mode on {entity_name}", + "toggle": "Toggle {entity_name}", + "turn_on": "Turn on {entity_name}", + "turn_off": "Turn off {entity_name}" + } + }, + "state": { + "_": { + "off": "[%key:common::state::off%]", + "on": "[%key:common::state::on%]" + } + } +} diff --git a/homeassistant/components/humidifier/translations/en.json b/homeassistant/components/humidifier/translations/en.json new file mode 100644 index 00000000000..5a5f803b2a3 --- /dev/null +++ b/homeassistant/components/humidifier/translations/en.json @@ -0,0 +1,18 @@ +{ + "device_automation": { + "action_type": { + "set_humidity": "Set humidity for {entity_name}", + "set_mode": "Change mode on {entity_name}", + "toggle": "Toggle {entity_name}", + "turn_off": "Turn off {entity_name}", + "turn_on": "Turn on {entity_name}" + } + }, + "state": { + "_": { + "off": "Off", + "on": "On" + } + }, + "title": "Humidifier" +} \ No newline at end of file diff --git a/homeassistant/components/hvv_departures/__init__.py b/homeassistant/components/hvv_departures/__init__.py new file mode 100644 index 00000000000..853ed9460c8 --- /dev/null +++ b/homeassistant/components/hvv_departures/__init__.py @@ -0,0 +1,52 @@ +"""The HVV integration.""" +import asyncio + +from homeassistant.components.sensor import DOMAIN as DOMAIN_SENSOR +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant +from homeassistant.helpers import aiohttp_client + +from .const import DOMAIN +from .hub import GTIHub + +PLATFORMS = [DOMAIN_SENSOR] + + +async def async_setup(hass: HomeAssistant, config: dict): + """Set up the HVV component.""" + return True + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): + """Set up HVV from a config entry.""" + + hub = GTIHub( + entry.data[CONF_HOST], + entry.data[CONF_USERNAME], + entry.data[CONF_PASSWORD], + aiohttp_client.async_get_clientsession(hass), + ) + + hass.data.setdefault(DOMAIN, {}) + hass.data[DOMAIN][entry.entry_id] = hub + + for component in PLATFORMS: + hass.async_create_task( + hass.config_entries.async_forward_entry_setup(entry, component) + ) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry): + """Unload a config entry.""" + unload_ok = all( + await asyncio.gather( + *[ + hass.config_entries.async_forward_entry_unload(entry, component) + for component in PLATFORMS + ] + ) + ) + return unload_ok diff --git a/homeassistant/components/hvv_departures/config_flow.py b/homeassistant/components/hvv_departures/config_flow.py new file mode 100644 index 00000000000..720114413d9 --- /dev/null +++ b/homeassistant/components/hvv_departures/config_flow.py @@ -0,0 +1,218 @@ +"""Config flow for HVV integration.""" +import logging + +from pygti.auth import GTI_DEFAULT_HOST +from pygti.exceptions import CannotConnect, InvalidAuth +import voluptuous as vol + +from homeassistant import config_entries +from homeassistant.const import CONF_HOST, CONF_OFFSET, CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import callback +from homeassistant.helpers import aiohttp_client +import homeassistant.helpers.config_validation as cv + +from .const import ( # pylint:disable=unused-import + CONF_FILTER, + CONF_REAL_TIME, + CONF_STATION, + DOMAIN, +) +from .hub import GTIHub + +_LOGGER = logging.getLogger(__name__) + +SCHEMA_STEP_USER = vol.Schema( + { + vol.Required(CONF_HOST, default=GTI_DEFAULT_HOST): str, + vol.Required(CONF_USERNAME): str, + vol.Required(CONF_PASSWORD): str, + } +) + +SCHEMA_STEP_STATION = vol.Schema({vol.Required(CONF_STATION): str}) + +SCHEMA_STEP_OPTIONS = vol.Schema( + { + vol.Required(CONF_FILTER): vol.In([]), + vol.Required(CONF_OFFSET, default=0): vol.All(int, vol.Range(min=0)), + vol.Optional(CONF_REAL_TIME, default=True): bool, + } +) + + +class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): + """Handle a config flow for HVV.""" + + VERSION = 1 + CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL + + def __init__(self): + """Initialize component.""" + self.hub = None + self.data = None + self.stations = {} + + async def async_step_user(self, user_input=None): + """Handle the initial step.""" + errors = {} + + if user_input is not None: + session = aiohttp_client.async_get_clientsession(self.hass) + self.hub = GTIHub( + user_input[CONF_HOST], + user_input[CONF_USERNAME], + user_input[CONF_PASSWORD], + session, + ) + + try: + response = await self.hub.authenticate() + _LOGGER.debug("Init gti: %r", response) + except CannotConnect: + errors["base"] = "cannot_connect" + except InvalidAuth: + errors["base"] = "invalid_auth" + + if not errors: + self.data = user_input + return await self.async_step_station() + + return self.async_show_form( + step_id="user", data_schema=SCHEMA_STEP_USER, errors=errors + ) + + async def async_step_station(self, user_input=None): + """Handle the step where the user inputs his/her station.""" + if user_input is not None: + + errors = {} + + check_name = await self.hub.gti.checkName( + {"theName": {"name": user_input[CONF_STATION]}, "maxList": 20} + ) + + stations = check_name.get("results") + + self.stations = { + f"{station.get('name')}": station + for station in stations + if station.get("type") == "STATION" + } + + if not self.stations: + errors["base"] = "no_results" + + return self.async_show_form( + step_id="station", data_schema=SCHEMA_STEP_STATION, errors=errors + ) + + # schema + + return await self.async_step_station_select() + + return self.async_show_form(step_id="station", data_schema=SCHEMA_STEP_STATION) + + async def async_step_station_select(self, user_input=None): + """Handle the step where the user inputs his/her station.""" + + schema = vol.Schema({vol.Required(CONF_STATION): vol.In(list(self.stations))}) + + if user_input is None: + return self.async_show_form(step_id="station_select", data_schema=schema) + + self.data.update({"station": self.stations[user_input[CONF_STATION]]}) + + title = self.data[CONF_STATION]["name"] + + return self.async_create_entry(title=title, data=self.data) + + @staticmethod + @callback + def async_get_options_flow(config_entry): + """Get options flow.""" + return OptionsFlowHandler(config_entry) + + +class OptionsFlowHandler(config_entries.OptionsFlow): + """Options flow handler.""" + + def __init__(self, config_entry): + """Initialize HVV Departures options flow.""" + self.config_entry = config_entry + self.options = dict(config_entry.options) + self.departure_filters = {} + self.hub = None + + async def async_step_init(self, user_input=None): + """Manage the options.""" + errors = {} + if not self.departure_filters: + + departure_list = {} + self.hub = self.hass.data[DOMAIN][self.config_entry.entry_id] + + try: + departure_list = await self.hub.gti.departureList( + { + "station": self.config_entry.data[CONF_STATION], + "time": {"date": "heute", "time": "jetzt"}, + "maxList": 5, + "maxTimeOffset": 200, + "useRealtime": True, + "returnFilters": True, + } + ) + except CannotConnect: + errors["base"] = "cannot_connect" + except InvalidAuth: + errors["base"] = "invalid_auth" + + if not errors: + self.departure_filters = { + str(i): departure_filter + for i, departure_filter in enumerate(departure_list.get("filter")) + } + + if user_input is not None and not errors: + + options = { + CONF_FILTER: [ + self.departure_filters[x] for x in user_input[CONF_FILTER] + ], + CONF_OFFSET: user_input[CONF_OFFSET], + CONF_REAL_TIME: user_input[CONF_REAL_TIME], + } + + return self.async_create_entry(title="", data=options) + + if CONF_FILTER in self.config_entry.options: + old_filter = [ + i + for (i, f) in self.departure_filters.items() + if f in self.config_entry.options.get(CONF_FILTER) + ] + else: + old_filter = [] + + return self.async_show_form( + step_id="init", + data_schema=vol.Schema( + { + vol.Optional(CONF_FILTER, default=old_filter): cv.multi_select( + { + key: f"{departure_filter['serviceName']}, {departure_filter['label']}" + for key, departure_filter in self.departure_filters.items() + } + ), + vol.Required( + CONF_OFFSET, + default=self.config_entry.options.get(CONF_OFFSET, 0), + ): vol.All(int, vol.Range(min=0)), + vol.Optional( + CONF_REAL_TIME, + default=self.config_entry.options.get(CONF_REAL_TIME, True), + ): bool, + } + ), + errors=errors, + ) diff --git a/homeassistant/components/hvv_departures/const.py b/homeassistant/components/hvv_departures/const.py new file mode 100644 index 00000000000..ae03d1cf58a --- /dev/null +++ b/homeassistant/components/hvv_departures/const.py @@ -0,0 +1,10 @@ +"""Constants for the HVV Departure integration.""" + +DOMAIN = "hvv_departures" +DEFAULT_NAME = DOMAIN +MANUFACTURER = "HVV" +ATTRIBUTION = "Data provided by www.hvv.de" + +CONF_STATION = "station" +CONF_REAL_TIME = "real_time" +CONF_FILTER = "filter" diff --git a/homeassistant/components/hvv_departures/hub.py b/homeassistant/components/hvv_departures/hub.py new file mode 100644 index 00000000000..7cffbed345c --- /dev/null +++ b/homeassistant/components/hvv_departures/hub.py @@ -0,0 +1,20 @@ +"""Hub.""" + +from pygti.gti import GTI, Auth + + +class GTIHub: + """GTI Hub.""" + + def __init__(self, host, username, password, session): + """Initialize.""" + self.host = host + self.username = username + self.password = password + + self.gti = GTI(Auth(session, self.username, self.password, self.host)) + + async def authenticate(self): + """Test if we can authenticate with the host.""" + + return await self.gti.init() diff --git a/homeassistant/components/hvv_departures/manifest.json b/homeassistant/components/hvv_departures/manifest.json new file mode 100644 index 00000000000..cdb8ed2524f --- /dev/null +++ b/homeassistant/components/hvv_departures/manifest.json @@ -0,0 +1,12 @@ +{ + "domain": "hvv_departures", + "name": "HVV Departures", + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/hvv_departures", + "requirements": [ + "pygti==0.6.0" + ], + "codeowners": [ + "@vigonotion" + ] +} \ No newline at end of file diff --git a/homeassistant/components/hvv_departures/sensor.py b/homeassistant/components/hvv_departures/sensor.py new file mode 100644 index 00000000000..d3a02462eb9 --- /dev/null +++ b/homeassistant/components/hvv_departures/sensor.py @@ -0,0 +1,201 @@ +"""Sensor platform for hvv.""" +from datetime import timedelta +import logging + +from aiohttp import ClientConnectorError +from pygti.exceptions import InvalidAuth + +from homeassistant.const import ATTR_ATTRIBUTION, ATTR_ID, DEVICE_CLASS_TIMESTAMP +from homeassistant.helpers import aiohttp_client +from homeassistant.helpers.entity import Entity +from homeassistant.util import Throttle +from homeassistant.util.dt import utcnow + +from .const import ATTRIBUTION, CONF_STATION, DOMAIN, MANUFACTURER + +MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=1) +MAX_LIST = 20 +MAX_TIME_OFFSET = 360 +ICON = "mdi:bus" +UNIT_OF_MEASUREMENT = "min" + +ATTR_DEPARTURE = "departure" +ATTR_LINE = "line" +ATTR_ORIGIN = "origin" +ATTR_DIRECTION = "direction" +ATTR_TYPE = "type" +ATTR_DELAY = "delay" +ATTR_NEXT = "next" + +PARALLEL_UPDATES = 0 + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry(hass, config_entry, async_add_devices): + """Set up the sensor platform.""" + hub = hass.data[DOMAIN][config_entry.entry_id] + + session = aiohttp_client.async_get_clientsession(hass) + + sensor = HVVDepartureSensor(hass, config_entry, session, hub) + async_add_devices([sensor], True) + + +class HVVDepartureSensor(Entity): + """HVVDepartureSensor class.""" + + def __init__(self, hass, config_entry, session, hub): + """Initialize.""" + self.config_entry = config_entry + self.station_name = self.config_entry.data[CONF_STATION]["name"] + self.attr = {ATTR_ATTRIBUTION: ATTRIBUTION} + self._available = False + self._state = None + self._name = f"Departures at {self.station_name}" + self._last_error = None + + self.gti = hub.gti + + @Throttle(MIN_TIME_BETWEEN_UPDATES) + async def async_update(self, **kwargs): + """Update the sensor.""" + + departure_time = utcnow() + timedelta( + minutes=self.config_entry.options.get("offset", 0) + ) + + payload = { + "station": self.config_entry.data[CONF_STATION], + "time": { + "date": departure_time.strftime("%d.%m.%Y"), + "time": departure_time.strftime("%H:%M"), + }, + "maxList": MAX_LIST, + "maxTimeOffset": MAX_TIME_OFFSET, + "useRealtime": self.config_entry.options.get("realtime", False), + } + + if "filter" in self.config_entry.options: + payload.update({"filter": self.config_entry.options["filter"]}) + + try: + data = await self.gti.departureList(payload) + except InvalidAuth as error: + if self._last_error != InvalidAuth: + _LOGGER.error("Authentication failed: %r", error) + self._last_error = InvalidAuth + self._available = False + except ClientConnectorError as error: + if self._last_error != ClientConnectorError: + _LOGGER.warning("Network unavailable: %r", error) + self._last_error = ClientConnectorError + self._available = False + except Exception as error: # pylint: disable=broad-except + if self._last_error != error: + _LOGGER.error("Error occurred while fetching data: %r", error) + self._last_error = error + self._available = False + + if not (data["returnCode"] == "OK" and data.get("departures")): + self._available = False + return + + if self._last_error == ClientConnectorError: + _LOGGER.debug("Network available again") + + self._last_error = None + + departure = data["departures"][0] + line = departure["line"] + delay = departure.get("delay", 0) + self._available = True + self._state = ( + departure_time + + timedelta(minutes=departure["timeOffset"]) + + timedelta(seconds=delay) + ).isoformat() + + self.attr.update( + { + ATTR_LINE: line["name"], + ATTR_ORIGIN: line["origin"], + ATTR_DIRECTION: line["direction"], + ATTR_TYPE: line["type"]["shortInfo"], + ATTR_ID: line["id"], + ATTR_DELAY: delay, + } + ) + + departures = [] + for departure in data["departures"]: + line = departure["line"] + delay = departure.get("delay", 0) + departures.append( + { + ATTR_DEPARTURE: departure_time + + timedelta(minutes=departure["timeOffset"]) + + timedelta(seconds=delay), + ATTR_LINE: line["name"], + ATTR_ORIGIN: line["origin"], + ATTR_DIRECTION: line["direction"], + ATTR_TYPE: line["type"]["shortInfo"], + ATTR_ID: line["id"], + ATTR_DELAY: delay, + } + ) + self.attr[ATTR_NEXT] = departures + + @property + def unique_id(self): + """Return a unique ID to use for this sensor.""" + station_id = self.config_entry.data[CONF_STATION]["id"] + station_type = self.config_entry.data[CONF_STATION]["type"] + + return f"{self.config_entry.entry_id}-{station_id}-{station_type}" + + @property + def device_info(self): + """Return the device info for this sensor.""" + return { + "identifiers": { + ( + DOMAIN, + self.config_entry.entry_id, + self.config_entry.data[CONF_STATION]["id"], + self.config_entry.data[CONF_STATION]["type"], + ) + }, + "name": self._name, + "manufacturer": MANUFACTURER, + } + + @property + def name(self): + """Return the name of the sensor.""" + return self._name + + @property + def state(self): + """Return the state of the sensor.""" + return self._state + + @property + def icon(self): + """Return the icon of the sensor.""" + return ICON + + @property + def available(self): + """Return True if entity is available.""" + return self._available + + @property + def device_class(self): + """Return the class of this device, from component DEVICE_CLASSES.""" + return DEVICE_CLASS_TIMESTAMP + + @property + def device_state_attributes(self): + """Return the state attributes.""" + return self.attr diff --git a/homeassistant/components/hvv_departures/strings.json b/homeassistant/components/hvv_departures/strings.json new file mode 100644 index 00000000000..dfd6484f7d8 --- /dev/null +++ b/homeassistant/components/hvv_departures/strings.json @@ -0,0 +1,48 @@ +{ + "title": "HVV Departures", + "config": { + "step": { + "user": { + "title": "Connect to the HVV API", + "data": { + "host": "Host", + "username": "Username", + "password": "Password" + } + }, + "station": { + "title": "Enter Station/Address", + "data": { + "station": "Station/Address" + } + }, + "station_select": { + "title": "Select Station/Address", + "data": { + "station": "Station/Address" + } + } + }, + "error": { + "cannot_connect": "Failed to connect, please try again", + "invalid_auth": "Invalid authentication", + "no_results": "No results. Try with a different station/address" + }, + "abort": { + "already_configured": "Device is already configured" + } + }, + "options": { + "step": { + "init": { + "title": "Options", + "description": "Change options for this departure sensor", + "data": { + "filter": "Select lines", + "offset": "Offset (minutes)", + "real_time": "Use real time data" + } + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/hvv_departures/translations/ca.json b/homeassistant/components/hvv_departures/translations/ca.json new file mode 100644 index 00000000000..4b295512feb --- /dev/null +++ b/homeassistant/components/hvv_departures/translations/ca.json @@ -0,0 +1,48 @@ +{ + "config": { + "abort": { + "already_configured": "El dispositiu ja est\u00e0 configurat" + }, + "error": { + "cannot_connect": "No s'ha pogut connectar, torna-ho a provar", + "invalid_auth": "Autenticaci\u00f3 inv\u00e0lida", + "no_results": "Sense resultats. Prova-ho amb una altra estaci\u00f3/adre\u00e7a" + }, + "step": { + "station": { + "data": { + "station": "Estaci\u00f3/adre\u00e7a" + }, + "title": "Introdueix estaci\u00f3/adre\u00e7a" + }, + "station_select": { + "data": { + "station": "Estaci\u00f3/adre\u00e7a" + }, + "title": "Selecciona d'estaci\u00f3/adre\u00e7a" + }, + "user": { + "data": { + "host": "Amfitri\u00f3", + "password": "Contrasenya", + "username": "Nom d'usuari" + }, + "title": "Connexi\u00f3 amb l'API d'HVV" + } + } + }, + "options": { + "step": { + "init": { + "data": { + "filter": "Selecciona l\u00ednies", + "offset": "\u00d2fset (minuts)", + "real_time": "Utilitza dades en temps real" + }, + "description": "Canvia les opcions d'aquest sensor de sortides", + "title": "Opcions" + } + } + }, + "title": "HVV Departures" +} \ No newline at end of file diff --git a/homeassistant/components/hvv_departures/translations/en.json b/homeassistant/components/hvv_departures/translations/en.json new file mode 100644 index 00000000000..ede3ece2f4a --- /dev/null +++ b/homeassistant/components/hvv_departures/translations/en.json @@ -0,0 +1,48 @@ +{ + "config": { + "abort": { + "already_configured": "Device is already configured" + }, + "error": { + "cannot_connect": "Failed to connect, please try again", + "invalid_auth": "Invalid authentication", + "no_results": "No results. Try with a different station/address" + }, + "step": { + "station": { + "data": { + "station": "Station/Address" + }, + "title": "Enter Station/Address" + }, + "station_select": { + "data": { + "station": "Station/Address" + }, + "title": "Select Station/Address" + }, + "user": { + "data": { + "host": "Host", + "password": "Password", + "username": "Username" + }, + "title": "Connect to the HVV API" + } + } + }, + "options": { + "step": { + "init": { + "data": { + "filter": "Select lines", + "offset": "Offset (minutes)", + "real_time": "Use real time data" + }, + "description": "Change options for this departure sensor", + "title": "Options" + } + } + }, + "title": "HVV Departures" +} \ No newline at end of file diff --git a/homeassistant/components/hvv_departures/translations/es.json b/homeassistant/components/hvv_departures/translations/es.json new file mode 100644 index 00000000000..7c9e9384e95 --- /dev/null +++ b/homeassistant/components/hvv_departures/translations/es.json @@ -0,0 +1,48 @@ +{ + "config": { + "abort": { + "already_configured": "El dispositivo ya est\u00e1 configurado" + }, + "error": { + "cannot_connect": "No se ha podido conectar, por favor, int\u00e9ntelo de nuevo.", + "invalid_auth": "Autenticaci\u00f3n no v\u00e1lida", + "no_results": "No hay resultados. Int\u00e9ntalo con una estaci\u00f3n/direcci\u00f3n diferente" + }, + "step": { + "station": { + "data": { + "station": "Estacion/Direccion" + }, + "title": "Introducir Estaci\u00f3n/Direcci\u00f3n" + }, + "station_select": { + "data": { + "station": "Estacion/Direccion" + }, + "title": "Seleccionar Estaci\u00f3n/Direcci\u00f3n" + }, + "user": { + "data": { + "host": "Host", + "password": "Contrase\u00f1a", + "username": "Usuario" + }, + "title": "Conectar con el API de HVV" + } + } + }, + "options": { + "step": { + "init": { + "data": { + "filter": "Seleccionar l\u00edneas", + "offset": "Desfase (minutos)", + "real_time": "Usar datos en tiempo real" + }, + "description": "Cambiar opciones para este sensor de salidas", + "title": "Opciones" + } + } + }, + "title": "Salidas de HVV" +} \ No newline at end of file diff --git a/homeassistant/components/hvv_departures/translations/fr.json b/homeassistant/components/hvv_departures/translations/fr.json new file mode 100644 index 00000000000..0e6fcdc19ba --- /dev/null +++ b/homeassistant/components/hvv_departures/translations/fr.json @@ -0,0 +1,27 @@ +{ + "config": { + "step": { + "station_select": { + "title": "S\u00e9lectionner la station/l\u2019adresse" + }, + "user": { + "data": { + "host": "H\u00f4te", + "password": "Mot de passe", + "username": "Nom d'utilisateur" + } + } + } + }, + "options": { + "step": { + "init": { + "data": { + "offset": "D\u00e9calage (minutes)", + "real_time": "Utiliser des donn\u00e9es en temps r\u00e9el" + }, + "title": "Options" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/hvv_departures/translations/it.json b/homeassistant/components/hvv_departures/translations/it.json new file mode 100644 index 00000000000..f1dd507a0d5 --- /dev/null +++ b/homeassistant/components/hvv_departures/translations/it.json @@ -0,0 +1,48 @@ +{ + "config": { + "abort": { + "already_configured": "Il dispositivo \u00e8 gi\u00e0 configurato" + }, + "error": { + "cannot_connect": "Impossibile connettersi, si prega di riprovare", + "invalid_auth": "Autenticazione non valida", + "no_results": "Nessun risultato. Prova con un'altra stazione/indirizzo" + }, + "step": { + "station": { + "data": { + "station": "Stazione/Indirizzo" + }, + "title": "Inserisci stazione/indirizzo" + }, + "station_select": { + "data": { + "station": "Stazione/Indirizzo" + }, + "title": "Seleziona Stazione/Indirizzo" + }, + "user": { + "data": { + "host": "Host", + "password": "Password", + "username": "Nome utente" + }, + "title": "Connettiti all'API HVV" + } + } + }, + "options": { + "step": { + "init": { + "data": { + "filter": "Seleziona le linee", + "offset": "Offset (minuti)", + "real_time": "Usa dati in tempo reale" + }, + "description": "Modifica le opzioni per questo sensore di partenza", + "title": "Opzioni" + } + } + }, + "title": "Partenze HVV" +} \ No newline at end of file diff --git a/homeassistant/components/hvv_departures/translations/ko.json b/homeassistant/components/hvv_departures/translations/ko.json new file mode 100644 index 00000000000..61b6f5cd49e --- /dev/null +++ b/homeassistant/components/hvv_departures/translations/ko.json @@ -0,0 +1,48 @@ +{ + "config": { + "abort": { + "already_configured": "\uae30\uae30\uac00 \uc774\ubbf8 \uad6c\uc131\ub418\uc5c8\uc2b5\ub2c8\ub2e4" + }, + "error": { + "cannot_connect": "\uc5f0\uacb0\ud558\uc9c0 \ubabb\ud588\uc2b5\ub2c8\ub2e4. \ub2e4\uc2dc \uc2dc\ub3c4\ud574\uc8fc\uc138\uc694", + "invalid_auth": "\uc778\uc99d\uc774 \uc798\ubabb\ub418\uc5c8\uc2b5\ub2c8\ub2e4", + "no_results": "\uacb0\uacfc\ub97c \ucc3e\uc744 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4. \ub2e4\ub978 \uc2a4\ud14c\uc774\uc158\uc774\ub098 \uc8fc\uc18c\ub97c \uc0ac\uc6a9\ud574\uc8fc\uc138\uc694" + }, + "step": { + "station": { + "data": { + "station": "\uc2a4\ud14c\uc774\uc158 / \uc8fc\uc18c" + }, + "title": "\uc2a4\ud14c\uc774\uc158 / \uc8fc\uc18c \uc785\ub825\ud558\uae30" + }, + "station_select": { + "data": { + "station": "\uc2a4\ud14c\uc774\uc158 / \uc8fc\uc18c" + }, + "title": "\uc2a4\ud14c\uc774\uc158 / \uc8fc\uc18c \uc120\ud0dd\ud558\uae30" + }, + "user": { + "data": { + "host": "\ud638\uc2a4\ud2b8", + "password": "\ube44\ubc00\ubc88\ud638", + "username": "\uc0ac\uc6a9\uc790 \uc774\ub984" + }, + "title": "HVV API \uc5d0 \uc5f0\uacb0\ud558\uae30" + } + } + }, + "options": { + "step": { + "init": { + "data": { + "filter": "\ub178\uc120 \uc120\ud0dd", + "offset": "\uc624\ud504\uc14b (\ubd84)", + "real_time": "\uc2e4\uc2dc\uac04 \ub370\uc774\ud130 \uc0ac\uc6a9" + }, + "description": "\ucd9c\ubc1c \uc13c\uc11c \uc635\uc158 \ubcc0\uacbd", + "title": "\uc635\uc158" + } + } + }, + "title": "HVV \ucd9c\ubc1c\ud3b8" +} \ No newline at end of file diff --git a/homeassistant/components/hvv_departures/translations/lb.json b/homeassistant/components/hvv_departures/translations/lb.json new file mode 100644 index 00000000000..5adb1d22b0a --- /dev/null +++ b/homeassistant/components/hvv_departures/translations/lb.json @@ -0,0 +1,45 @@ +{ + "config": { + "abort": { + "already_configured": "Apparat ass scho konfigur\u00e9iert" + }, + "error": { + "cannot_connect": "Feeler beim verbannen, prob\u00e9ier w.e.g. nach emol.", + "invalid_auth": "Ong\u00eblteg Authentifikatioun", + "no_results": "Keng Resultater. Prob\u00e9ier mat enger aanerer Statioun/Adress" + }, + "step": { + "station": { + "data": { + "station": "Statioun/Adress" + }, + "title": "Statioun/Adress aginn" + }, + "station_select": { + "data": { + "station": "Statioun/Adress" + }, + "title": "Statioun/Adress auswielen" + }, + "user": { + "data": { + "host": "Apparat", + "password": "Passwuert", + "username": "Benotzernumm" + }, + "title": "Mat der HVV API verbannen" + } + } + }, + "options": { + "step": { + "init": { + "data": { + "filter": "Zeilen auswielen", + "offset": "Offset (Minutten)" + }, + "title": "Optiounen" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/hvv_departures/translations/no.json b/homeassistant/components/hvv_departures/translations/no.json new file mode 100644 index 00000000000..52b91ef31d4 --- /dev/null +++ b/homeassistant/components/hvv_departures/translations/no.json @@ -0,0 +1,48 @@ +{ + "config": { + "abort": { + "already_configured": "Enheten er allerede konfigurert" + }, + "error": { + "cannot_connect": "Klarte ikke \u00e5 koble til, vennligst pr\u00f8v igjen", + "invalid_auth": "Ugyldig godkjenning", + "no_results": "Ingen resultater. Pr\u00f8v med en annen stasjon/adresse" + }, + "step": { + "station": { + "data": { + "station": "Stasjon/adresse" + }, + "title": "Angi stasjon/adresse" + }, + "station_select": { + "data": { + "station": "Stasjon/adresse" + }, + "title": "Velg Stasjon/adresse" + }, + "user": { + "data": { + "host": "Vert", + "password": "Passord", + "username": "Brukernavn" + }, + "title": "Koble til HVV API" + } + } + }, + "options": { + "step": { + "init": { + "data": { + "filter": "Velg linjer", + "offset": "Offset (minutter)", + "real_time": "Bruk sanntidsdata" + }, + "description": "Endre alternativer for denne avgangssensoren", + "title": "Alternativer" + } + } + }, + "title": "HVV Avganger" +} \ No newline at end of file diff --git a/homeassistant/components/hvv_departures/translations/pl.json b/homeassistant/components/hvv_departures/translations/pl.json new file mode 100644 index 00000000000..5bf87fc08a8 --- /dev/null +++ b/homeassistant/components/hvv_departures/translations/pl.json @@ -0,0 +1,48 @@ +{ + "config": { + "abort": { + "already_configured": "Urz\u0105dzenie jest ju\u017c skonfigurowane." + }, + "error": { + "cannot_connect": "Nie mo\u017cna nawi\u0105za\u0107 po\u0142\u0105czenia.", + "invalid_auth": "Niepoprawne uwierzytelnienie.", + "no_results": "Brak wynik\u00f3w. Spr\u00f3buj z inn\u0105 stacj\u0105/adresem." + }, + "step": { + "station": { + "data": { + "station": "Stacja/adres" + }, + "title": "Wprowad\u017a stacj\u0119/adres" + }, + "station_select": { + "data": { + "station": "Stacja/adres" + }, + "title": "Wybierz stacj\u0119/adres" + }, + "user": { + "data": { + "host": "Nazwa hosta lub adres IP", + "password": "[%key_id:common::config_flow::data::password%]", + "username": "[%key_id:common::config_flow::data::username%]" + }, + "title": "Po\u0142\u0105czenie z API HVV" + } + } + }, + "options": { + "step": { + "init": { + "data": { + "filter": "Wybierz linie", + "offset": "Przesuni\u0119cie (minuty)", + "real_time": "U\u017cywaj danych czasu rzeczywistego" + }, + "description": "Zmie\u0144 opcje dla tego sensora odjazd\u00f3w", + "title": "Opcje" + } + } + }, + "title": "Odjazdy HVV" +} \ No newline at end of file diff --git a/homeassistant/components/hvv_departures/translations/ru.json b/homeassistant/components/hvv_departures/translations/ru.json new file mode 100644 index 00000000000..b83981ae76f --- /dev/null +++ b/homeassistant/components/hvv_departures/translations/ru.json @@ -0,0 +1,48 @@ +{ + "config": { + "abort": { + "already_configured": "\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0430 \u044d\u0442\u043e\u0433\u043e \u0443\u0441\u0442\u0440\u043e\u0439\u0441\u0442\u0432\u0430 \u0443\u0436\u0435 \u0432\u044b\u043f\u043e\u043b\u043d\u0435\u043d\u0430." + }, + "error": { + "cannot_connect": "\u041d\u0435 \u0443\u0434\u0430\u043b\u043e\u0441\u044c \u043f\u043e\u0434\u043a\u043b\u044e\u0447\u0438\u0442\u044c\u0441\u044f, \u043f\u043e\u043f\u0440\u043e\u0431\u0443\u0439\u0442\u0435 \u0435\u0449\u0435 \u0440\u0430\u0437.", + "invalid_auth": "\u041d\u0435\u0432\u0435\u0440\u043d\u0430\u044f \u0430\u0443\u0442\u0435\u043d\u0442\u0438\u0444\u0438\u043a\u0430\u0446\u0438\u044f.", + "no_results": "\u041d\u0435\u0442 \u0440\u0435\u0437\u0443\u043b\u044c\u0442\u0430\u0442\u043e\u0432. \u041f\u043e\u043f\u0440\u043e\u0431\u0443\u0439\u0442\u0435 \u0441 \u0434\u0440\u0443\u0433\u043e\u0439 \u0441\u0442\u0430\u043d\u0446\u0438\u0435\u0439 / \u0430\u0434\u0440\u0435\u0441\u043e\u043c." + }, + "step": { + "station": { + "data": { + "station": "\u0421\u0442\u0430\u043d\u0446\u0438\u044f / \u0410\u0434\u0440\u0435\u0441" + }, + "title": "\u0412\u0432\u0435\u0434\u0438\u0442\u0435 \u0441\u0442\u0430\u043d\u0446\u0438\u044e / \u0430\u0434\u0440\u0435\u0441" + }, + "station_select": { + "data": { + "station": "\u0421\u0442\u0430\u043d\u0446\u0438\u044f / \u0410\u0434\u0440\u0435\u0441" + }, + "title": "\u0412\u044b\u0431\u0435\u0440\u0438\u0442\u0435 \u0441\u0442\u0430\u043d\u0446\u0438\u044e / \u0430\u0434\u0440\u0435\u0441" + }, + "user": { + "data": { + "host": "\u0425\u043e\u0441\u0442", + "password": "\u041f\u0430\u0440\u043e\u043b\u044c", + "username": "\u041b\u043e\u0433\u0438\u043d" + }, + "title": "\u041f\u043e\u0434\u043a\u043b\u044e\u0447\u0435\u043d\u0438\u0435 \u043a API HVV" + } + } + }, + "options": { + "step": { + "init": { + "data": { + "filter": "\u0412\u044b\u0431\u0435\u0440\u0438\u0442\u0435 \u043b\u0438\u043d\u0438\u0438", + "offset": "\u0421\u043c\u0435\u0449\u0435\u043d\u0438\u0435 (\u0432 \u043c\u0438\u043d\u0443\u0442\u0430\u0445)", + "real_time": "\u0418\u0441\u043f\u043e\u043b\u044c\u0437\u043e\u0432\u0430\u0442\u044c \u0434\u0430\u043d\u043d\u044b\u0435 \u0432 \u0440\u0435\u0436\u0438\u043c\u0435 \u0440\u0435\u0430\u043b\u044c\u043d\u043e\u0433\u043e \u0432\u0440\u0435\u043c\u0435\u043d\u0438" + }, + "description": "\u041f\u0430\u0440\u0430\u043c\u0435\u0442\u0440\u044b \u0441\u0435\u043d\u0441\u043e\u0440\u0430 \u043e\u0442\u043f\u0440\u0430\u0432\u043b\u0435\u043d\u0438\u044f", + "title": "\u041f\u0430\u0440\u0430\u043c\u0435\u0442\u0440\u044b" + } + } + }, + "title": "HVV Departures" +} \ No newline at end of file diff --git a/homeassistant/components/hvv_departures/translations/zh-Hant.json b/homeassistant/components/hvv_departures/translations/zh-Hant.json new file mode 100644 index 00000000000..ee22830c030 --- /dev/null +++ b/homeassistant/components/hvv_departures/translations/zh-Hant.json @@ -0,0 +1,48 @@ +{ + "config": { + "abort": { + "already_configured": "\u8a2d\u5099\u5df2\u7d93\u8a2d\u5b9a\u5b8c\u6210" + }, + "error": { + "cannot_connect": "\u9023\u7dda\u5931\u6557\uff0c\u8acb\u518d\u8a66\u4e00\u6b21", + "invalid_auth": "\u9a57\u8b49\u78bc\u7121\u6548", + "no_results": "\u6c92\u6709\u7d50\u679c\u3002\u8acb\u5617\u8a66\u5176\u4ed6\u8eca\u7ad9/\u5730\u5740" + }, + "step": { + "station": { + "data": { + "station": "\u8eca\u7ad9/\u5730\u5740" + }, + "title": "\u8f38\u5165\u8eca\u7ad9/\u5730\u5740" + }, + "station_select": { + "data": { + "station": "\u8eca\u7ad9/\u5730\u5740" + }, + "title": "\u9078\u64c7\u8eca\u7ad9/\u5730\u5740" + }, + "user": { + "data": { + "host": "\u4e3b\u6a5f\u7aef", + "password": "\u5bc6\u78bc", + "username": "\u4f7f\u7528\u8005\u540d\u7a31" + }, + "title": "\u9023\u7dda\u81f3 HVV API" + } + } + }, + "options": { + "step": { + "init": { + "data": { + "filter": "\u9078\u64c7\u73ed\u8eca", + "offset": "\u504f\u79fb\uff08\u5206\uff09", + "real_time": "\u4f7f\u7528\u5373\u6642\u8cc7\u6599" + }, + "description": "\u8b8a\u66f4\u51fa\u767c\u611f\u6e2c\u5668\u9078\u9805", + "title": "\u9078\u9805" + } + } + }, + "title": "HVV \u5217\u8eca\u6642\u523b" +} \ No newline at end of file diff --git a/homeassistant/components/hydrawise/__init__.py b/homeassistant/components/hydrawise/__init__.py index 28b577354d2..08827baae68 100644 --- a/homeassistant/components/hydrawise/__init__.py +++ b/homeassistant/components/hydrawise/__init__.py @@ -6,6 +6,12 @@ from hydrawiser.core import Hydrawiser from requests.exceptions import ConnectTimeout, HTTPError import voluptuous as vol +from homeassistant.components.binary_sensor import ( + DEVICE_CLASS_CONNECTIVITY, + DEVICE_CLASS_MOISTURE, +) +from homeassistant.components.sensor import DEVICE_CLASS_TIMESTAMP +from homeassistant.components.switch import DEVICE_CLASS_SWITCH from homeassistant.const import ( ATTR_ATTRIBUTION, CONF_ACCESS_TOKEN, @@ -40,16 +46,15 @@ DEVICE_MAP_INDEX = [ "UNIT_OF_MEASURE_INDEX", ] DEVICE_MAP = { - "auto_watering": ["Automatic Watering", "mdi:autorenew", "", ""], - "is_watering": ["Watering", "", "moisture", ""], - "manual_watering": ["Manual Watering", "mdi:water-pump", "", ""], - "next_cycle": ["Next Cycle", "mdi:calendar-clock", "", ""], - "status": ["Status", "", "connectivity", ""], - "watering_time": ["Watering Time", "mdi:water-pump", "", TIME_MINUTES], - "rain_sensor": ["Rain Sensor", "", "moisture", ""], + "auto_watering": ["Automatic Watering", None, DEVICE_CLASS_SWITCH, None], + "is_watering": ["Watering", None, DEVICE_CLASS_MOISTURE, None], + "manual_watering": ["Manual Watering", None, DEVICE_CLASS_SWITCH, None], + "next_cycle": ["Next Cycle", None, DEVICE_CLASS_TIMESTAMP, None], + "status": ["Status", None, DEVICE_CLASS_CONNECTIVITY, None], + "watering_time": ["Watering Time", "mdi:water-pump", None, TIME_MINUTES], } -BINARY_SENSORS = ["is_watering", "status", "rain_sensor"] +BINARY_SENSORS = ["is_watering", "status"] SENSORS = ["next_cycle", "watering_time"] @@ -149,3 +154,15 @@ class HydrawiseEntity(Entity): def device_state_attributes(self): """Return the state attributes.""" return {ATTR_ATTRIBUTION: ATTRIBUTION, "identifier": self.data.get("relay")} + + @property + def device_class(self): + """Return the device class of the sensor type.""" + return DEVICE_MAP[self._sensor_type][ + DEVICE_MAP_INDEX.index("DEVICE_CLASS_INDEX") + ] + + @property + def icon(self): + """Return the icon to use in the frontend, if any.""" + return DEVICE_MAP[self._sensor_type][DEVICE_MAP_INDEX.index("ICON_INDEX")] diff --git a/homeassistant/components/hydrawise/binary_sensor.py b/homeassistant/components/hydrawise/binary_sensor.py index 389506c6d5a..e39ffce73a9 100644 --- a/homeassistant/components/hydrawise/binary_sensor.py +++ b/homeassistant/components/hydrawise/binary_sensor.py @@ -7,13 +7,7 @@ from homeassistant.components.binary_sensor import PLATFORM_SCHEMA, BinarySensor from homeassistant.const import CONF_MONITORED_CONDITIONS import homeassistant.helpers.config_validation as cv -from . import ( - BINARY_SENSORS, - DATA_HYDRAWISE, - DEVICE_MAP, - DEVICE_MAP_INDEX, - HydrawiseEntity, -) +from . import BINARY_SENSORS, DATA_HYDRAWISE, HydrawiseEntity _LOGGER = logging.getLogger(__name__) @@ -32,17 +26,14 @@ def setup_platform(hass, config, add_entities, discovery_info=None): sensors = [] for sensor_type in config.get(CONF_MONITORED_CONDITIONS): - if sensor_type in ["status", "rain_sensor"]: + if sensor_type == "status": sensors.append( - HydrawiseBinarySensor(hydrawise.controller_status, sensor_type) + HydrawiseBinarySensor(hydrawise.current_controller, sensor_type) ) - else: # create a sensor for each zone for zone in hydrawise.relays: - zone_data = zone - zone_data["running"] = hydrawise.controller_status.get("running", False) - sensors.append(HydrawiseBinarySensor(zone_data, sensor_type)) + sensors.append(HydrawiseBinarySensor(zone, sensor_type)) add_entities(sensors, True) @@ -61,21 +52,6 @@ class HydrawiseBinarySensor(HydrawiseEntity, BinarySensorEntity): mydata = self.hass.data[DATA_HYDRAWISE].data if self._sensor_type == "status": self._state = mydata.status == "All good!" - elif self._sensor_type == "rain_sensor": - for sensor in mydata.sensors: - if sensor["name"] == "Rain": - self._state = sensor["active"] == 1 elif self._sensor_type == "is_watering": - if not mydata.running: - self._state = False - elif int(mydata.running[0]["relay"]) == self.data["relay"]: - self._state = True - else: - self._state = False - - @property - def device_class(self): - """Return the device class of the sensor type.""" - return DEVICE_MAP[self._sensor_type][ - DEVICE_MAP_INDEX.index("DEVICE_CLASS_INDEX") - ] + relay_data = mydata.relays[self.data["relay"] - 1] + self._state = relay_data["timestr"] == "Now" diff --git a/homeassistant/components/hydrawise/manifest.json b/homeassistant/components/hydrawise/manifest.json index 1d6a2ee85ea..d5a18620edd 100644 --- a/homeassistant/components/hydrawise/manifest.json +++ b/homeassistant/components/hydrawise/manifest.json @@ -2,6 +2,6 @@ "domain": "hydrawise", "name": "Hunter Hydrawise", "documentation": "https://www.home-assistant.io/integrations/hydrawise", - "requirements": ["hydrawiser==0.1.1"], - "codeowners": [] + "requirements": ["hydrawiser==0.2"], + "codeowners": ["@ptcryan"] } diff --git a/homeassistant/components/hydrawise/sensor.py b/homeassistant/components/hydrawise/sensor.py index 88146dbeb0d..6a0c6ab0d80 100644 --- a/homeassistant/components/hydrawise/sensor.py +++ b/homeassistant/components/hydrawise/sensor.py @@ -6,8 +6,9 @@ import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import CONF_MONITORED_CONDITIONS import homeassistant.helpers.config_validation as cv +from homeassistant.util import dt -from . import DATA_HYDRAWISE, DEVICE_MAP, DEVICE_MAP_INDEX, SENSORS, HydrawiseEntity +from . import DATA_HYDRAWISE, SENSORS, HydrawiseEntity _LOGGER = logging.getLogger(__name__) @@ -19,6 +20,9 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( } ) +TWO_YEAR_SECONDS = 60 * 60 * 24 * 365 * 2 +WATERING_TIME_ICON = "mdi:water-pump" + def setup_platform(hass, config, add_entities, discovery_info=None): """Set up a sensor for a Hydrawise device.""" @@ -44,23 +48,15 @@ class HydrawiseSensor(HydrawiseEntity): """Get the latest data and updates the states.""" mydata = self.hass.data[DATA_HYDRAWISE].data _LOGGER.debug("Updating Hydrawise sensor: %s", self._name) + relay_data = mydata.relays[self.data["relay"] - 1] if self._sensor_type == "watering_time": - if not mydata.running: - self._state = 0 + if relay_data["timestr"] == "Now": + self._state = int(relay_data["run"] / 60) else: - if int(mydata.running[0]["relay"]) == self.data["relay"]: - self._state = int(mydata.running[0]["time_left"] / 60) - else: - self._state = 0 + self._state = 0 else: # _sensor_type == 'next_cycle' - for relay in mydata.relays: - if relay["relay"] == self.data["relay"]: - if relay["nicetime"] == "Not scheduled": - self._state = "not_scheduled" - else: - self._state = f"{relay['nicetime'].split(',')[0]} {relay['nicetime'].split(' ')[3]}" - - @property - def icon(self): - """Icon to use in the frontend, if any.""" - return DEVICE_MAP[self._sensor_type][DEVICE_MAP_INDEX.index("ICON_INDEX")] + next_cycle = min(relay_data["time"], TWO_YEAR_SECONDS) + _LOGGER.debug("New cycle time: %s", next_cycle) + self._state = dt.utc_from_timestamp( + dt.as_timestamp(dt.now()) + next_cycle + ).isoformat() diff --git a/homeassistant/components/hydrawise/switch.py b/homeassistant/components/hydrawise/switch.py index 577fde85d37..a385e504d7f 100644 --- a/homeassistant/components/hydrawise/switch.py +++ b/homeassistant/components/hydrawise/switch.py @@ -12,8 +12,6 @@ from . import ( CONF_WATERING_TIME, DATA_HYDRAWISE, DEFAULT_WATERING_TIME, - DEVICE_MAP, - DEVICE_MAP_INDEX, SWITCHES, HydrawiseEntity, ) @@ -62,43 +60,30 @@ class HydrawiseSwitch(HydrawiseEntity, SwitchEntity): def turn_on(self, **kwargs): """Turn the device on.""" + relay_data = self.data["relay"] - 1 if self._sensor_type == "manual_watering": self.hass.data[DATA_HYDRAWISE].data.run_zone( - self._default_watering_timer, (self.data["relay"] - 1) + self._default_watering_timer, relay_data ) elif self._sensor_type == "auto_watering": - self.hass.data[DATA_HYDRAWISE].data.suspend_zone( - 0, (self.data["relay"] - 1) - ) + self.hass.data[DATA_HYDRAWISE].data.suspend_zone(0, relay_data) def turn_off(self, **kwargs): """Turn the device off.""" + relay_data = self.data["relay"] - 1 if self._sensor_type == "manual_watering": - self.hass.data[DATA_HYDRAWISE].data.run_zone(0, (self.data["relay"] - 1)) + self.hass.data[DATA_HYDRAWISE].data.run_zone(0, relay_data) elif self._sensor_type == "auto_watering": - self.hass.data[DATA_HYDRAWISE].data.suspend_zone( - 365, (self.data["relay"] - 1) - ) + self.hass.data[DATA_HYDRAWISE].data.suspend_zone(365, relay_data) def update(self): """Update device state.""" + relay_data = self.data["relay"] - 1 mydata = self.hass.data[DATA_HYDRAWISE].data _LOGGER.debug("Updating Hydrawise switch: %s", self._name) if self._sensor_type == "manual_watering": - if not mydata.running: - self._state = False - else: - self._state = int(mydata.running[0]["relay"]) == self.data["relay"] + self._state = mydata.relays[relay_data]["timestr"] == "Now" elif self._sensor_type == "auto_watering": - for relay in mydata.relays: - if relay["relay"] == self.data["relay"]: - if relay.get("suspended") is not None: - self._state = False - else: - self._state = True - break - - @property - def icon(self): - """Return the icon to use in the frontend, if any.""" - return DEVICE_MAP[self._sensor_type][DEVICE_MAP_INDEX.index("ICON_INDEX")] + self._state = (mydata.relays[relay_data]["timestr"] != "") and ( + mydata.relays[relay_data]["timestr"] != "Now" + ) diff --git a/homeassistant/components/iaqualink/translations/nn.json b/homeassistant/components/iaqualink/translations/nn.json deleted file mode 100644 index 57b177c0572..00000000000 --- a/homeassistant/components/iaqualink/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Jandy iAqualink" -} \ No newline at end of file diff --git a/homeassistant/components/icloud/translations/fr.json b/homeassistant/components/icloud/translations/fr.json index 61aacd004ea..0ede270fd79 100644 --- a/homeassistant/components/icloud/translations/fr.json +++ b/homeassistant/components/icloud/translations/fr.json @@ -20,6 +20,7 @@ "user": { "data": { "password": "Mot de passe", + "username": "Email", "with_family": "Avec la famille" }, "description": "Entrez vos identifiants", diff --git a/homeassistant/components/ifttt/translations/et.json b/homeassistant/components/ifttt/translations/et.json deleted file mode 100644 index d8a4c453015..00000000000 --- a/homeassistant/components/ifttt/translations/et.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "" -} \ No newline at end of file diff --git a/homeassistant/components/ifttt/translations/hr.json b/homeassistant/components/ifttt/translations/hr.json deleted file mode 100644 index 8e00d0bfe2d..00000000000 --- a/homeassistant/components/ifttt/translations/hr.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "IFTTT" -} \ No newline at end of file diff --git a/homeassistant/components/ifttt/translations/th.json b/homeassistant/components/ifttt/translations/th.json deleted file mode 100644 index 8e00d0bfe2d..00000000000 --- a/homeassistant/components/ifttt/translations/th.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "IFTTT" -} \ No newline at end of file diff --git a/homeassistant/components/ifttt/translations/tr.json b/homeassistant/components/ifttt/translations/tr.json deleted file mode 100644 index 0061b9f6166..00000000000 --- a/homeassistant/components/ifttt/translations/tr.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "IFTT" -} \ No newline at end of file diff --git a/homeassistant/components/ihc/manifest.json b/homeassistant/components/ihc/manifest.json index 6403d800a16..fe54117e56a 100644 --- a/homeassistant/components/ihc/manifest.json +++ b/homeassistant/components/ihc/manifest.json @@ -2,6 +2,6 @@ "domain": "ihc", "name": "IHC Controller", "documentation": "https://www.home-assistant.io/integrations/ihc", - "requirements": ["defusedxml==0.6.0", "ihcsdk==2.6.0"], + "requirements": ["defusedxml==0.6.0", "ihcsdk==2.7.0"], "codeowners": [] } diff --git a/homeassistant/components/influxdb/__init__.py b/homeassistant/components/influxdb/__init__.py index 0d1999e0d7b..9823d57e200 100644 --- a/homeassistant/components/influxdb/__init__.py +++ b/homeassistant/components/influxdb/__init__.py @@ -5,12 +5,17 @@ import queue import re import threading import time +from typing import Dict from influxdb import InfluxDBClient, exceptions +from influxdb_client import InfluxDBClient as InfluxDBClientV2 +from influxdb_client.client.write_api import ASYNCHRONOUS, SYNCHRONOUS +from influxdb_client.rest import ApiException import requests.exceptions import voluptuous as vol from homeassistant.const import ( + CONF_API_VERSION, CONF_DOMAINS, CONF_ENTITIES, CONF_EXCLUDE, @@ -20,6 +25,8 @@ from homeassistant.const import ( CONF_PATH, CONF_PORT, CONF_SSL, + CONF_TOKEN, + CONF_URL, CONF_USERNAME, CONF_VERIFY_SSL, EVENT_HOMEASSISTANT_STOP, @@ -34,6 +41,8 @@ from homeassistant.helpers.entity_values import EntityValues _LOGGER = logging.getLogger(__name__) CONF_DB_NAME = "database" +CONF_BUCKET = "bucket" +CONF_ORG = "organization" CONF_TAGS = "tags" CONF_DEFAULT_MEASUREMENT = "default_measurement" CONF_OVERRIDE_MEASUREMENT = "override_measurement" @@ -44,9 +53,14 @@ CONF_COMPONENT_CONFIG_DOMAIN = "component_config_domain" CONF_RETRY_COUNT = "max_retries" DEFAULT_DATABASE = "home_assistant" +DEFAULT_HOST_V2 = "us-west-2-1.aws.cloud2.influxdata.com" +DEFAULT_SSL_V2 = True +DEFAULT_BUCKET = "Home Assistant" DEFAULT_VERIFY_SSL = True -DOMAIN = "influxdb" +DEFAULT_API_VERSION = "1" +DOMAIN = "influxdb" +API_VERSION_2 = "2" TIMEOUT = 5 RETRY_DELAY = 20 QUEUE_BACKLOG_SECONDS = 30 @@ -55,62 +69,122 @@ RETRY_INTERVAL = 60 # seconds BATCH_TIMEOUT = 1 BATCH_BUFFER_SIZE = 100 -COMPONENT_CONFIG_SCHEMA_ENTRY = vol.Schema( - {vol.Optional(CONF_OVERRIDE_MEASUREMENT): cv.string} +DB_CONNECTION_FAILURE_MSG = () + + +def create_influx_url(conf: Dict) -> Dict: + """Build URL used from config inputs and default when necessary.""" + if conf[CONF_API_VERSION] == API_VERSION_2: + if CONF_SSL not in conf: + conf[CONF_SSL] = DEFAULT_SSL_V2 + if CONF_HOST not in conf: + conf[CONF_HOST] = DEFAULT_HOST_V2 + + url = conf[CONF_HOST] + if conf[CONF_SSL]: + url = f"https://{url}" + else: + url = f"http://{url}" + + if CONF_PORT in conf: + url = f"{url}:{conf[CONF_PORT]}" + + if CONF_PATH in conf: + url = f"{url}{conf[CONF_PATH]}" + + conf[CONF_URL] = url + + return conf + + +def validate_version_specific_config(conf: Dict) -> Dict: + """Ensure correct config fields are provided based on API version used.""" + if conf[CONF_API_VERSION] == API_VERSION_2: + if CONF_TOKEN not in conf: + raise vol.Invalid( + f"{CONF_TOKEN} and {CONF_BUCKET} are required when {CONF_API_VERSION} is {API_VERSION_2}" + ) + + if CONF_USERNAME in conf: + raise vol.Invalid( + f"{CONF_USERNAME} and {CONF_PASSWORD} are only allowed when {CONF_API_VERSION} is {DEFAULT_API_VERSION}" + ) + + else: + if CONF_TOKEN in conf: + raise vol.Invalid( + f"{CONF_TOKEN} and {CONF_BUCKET} are only allowed when {CONF_API_VERSION} is {API_VERSION_2}" + ) + + return conf + + +COMPONENT_CONFIG_SCHEMA_CONNECTION = { + # Connection config for V1 and V2 APIs. + vol.Optional(CONF_API_VERSION, default=DEFAULT_API_VERSION): vol.All( + vol.Coerce(str), vol.In([DEFAULT_API_VERSION, API_VERSION_2]), + ), + vol.Optional(CONF_HOST): cv.string, + vol.Optional(CONF_PATH): cv.string, + vol.Optional(CONF_PORT): cv.port, + vol.Optional(CONF_SSL): cv.boolean, + # Connection config for V1 API only. + vol.Inclusive(CONF_USERNAME, "authentication"): cv.string, + vol.Inclusive(CONF_PASSWORD, "authentication"): cv.string, + vol.Optional(CONF_DB_NAME, default=DEFAULT_DATABASE): cv.string, + vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): cv.boolean, + # Connection config for V2 API only. + vol.Inclusive(CONF_TOKEN, "v2_authentication"): cv.string, + vol.Inclusive(CONF_ORG, "v2_authentication"): cv.string, + vol.Optional(CONF_BUCKET, default=DEFAULT_BUCKET): cv.string, +} + +_CONFIG_SCHEMA_ENTRY = vol.Schema({vol.Optional(CONF_OVERRIDE_MEASUREMENT): cv.string}) + +_CONFIG_SCHEMA = vol.Schema( + { + vol.Optional(CONF_EXCLUDE, default={}): vol.Schema( + { + vol.Optional(CONF_ENTITIES, default=[]): cv.entity_ids, + vol.Optional(CONF_DOMAINS, default=[]): vol.All( + cv.ensure_list, [cv.string] + ), + } + ), + vol.Optional(CONF_INCLUDE, default={}): vol.Schema( + { + vol.Optional(CONF_ENTITIES, default=[]): cv.entity_ids, + vol.Optional(CONF_DOMAINS, default=[]): vol.All( + cv.ensure_list, [cv.string] + ), + } + ), + vol.Optional(CONF_RETRY_COUNT, default=0): cv.positive_int, + vol.Optional(CONF_DEFAULT_MEASUREMENT): cv.string, + vol.Optional(CONF_OVERRIDE_MEASUREMENT): cv.string, + vol.Optional(CONF_TAGS, default={}): vol.Schema({cv.string: cv.string}), + vol.Optional(CONF_TAGS_ATTRIBUTES, default=[]): vol.All( + cv.ensure_list, [cv.string] + ), + vol.Optional(CONF_COMPONENT_CONFIG, default={}): vol.Schema( + {cv.entity_id: _CONFIG_SCHEMA_ENTRY} + ), + vol.Optional(CONF_COMPONENT_CONFIG_GLOB, default={}): vol.Schema( + {cv.string: _CONFIG_SCHEMA_ENTRY} + ), + vol.Optional(CONF_COMPONENT_CONFIG_DOMAIN, default={}): vol.Schema( + {cv.string: _CONFIG_SCHEMA_ENTRY} + ), + } ) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.All( - vol.Schema( - { - vol.Optional(CONF_HOST): cv.string, - vol.Inclusive(CONF_USERNAME, "authentication"): cv.string, - vol.Inclusive(CONF_PASSWORD, "authentication"): cv.string, - vol.Optional(CONF_EXCLUDE, default={}): vol.Schema( - { - vol.Optional(CONF_ENTITIES, default=[]): cv.entity_ids, - vol.Optional(CONF_DOMAINS, default=[]): vol.All( - cv.ensure_list, [cv.string] - ), - } - ), - vol.Optional(CONF_INCLUDE, default={}): vol.Schema( - { - vol.Optional(CONF_ENTITIES, default=[]): cv.entity_ids, - vol.Optional(CONF_DOMAINS, default=[]): vol.All( - cv.ensure_list, [cv.string] - ), - } - ), - vol.Optional(CONF_DB_NAME, default=DEFAULT_DATABASE): cv.string, - vol.Optional(CONF_PATH): cv.string, - vol.Optional(CONF_PORT): cv.port, - vol.Optional(CONF_SSL): cv.boolean, - vol.Optional(CONF_RETRY_COUNT, default=0): cv.positive_int, - vol.Optional(CONF_DEFAULT_MEASUREMENT): cv.string, - vol.Optional(CONF_OVERRIDE_MEASUREMENT): cv.string, - vol.Optional(CONF_TAGS, default={}): vol.Schema( - {cv.string: cv.string} - ), - vol.Optional(CONF_TAGS_ATTRIBUTES, default=[]): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional( - CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL - ): cv.boolean, - vol.Optional(CONF_COMPONENT_CONFIG, default={}): vol.Schema( - {cv.entity_id: COMPONENT_CONFIG_SCHEMA_ENTRY} - ), - vol.Optional(CONF_COMPONENT_CONFIG_GLOB, default={}): vol.Schema( - {cv.string: COMPONENT_CONFIG_SCHEMA_ENTRY} - ), - vol.Optional(CONF_COMPONENT_CONFIG_DOMAIN, default={}): vol.Schema( - {cv.string: COMPONENT_CONFIG_SCHEMA_ENTRY} - ), - } - ) - ) + _CONFIG_SCHEMA.extend(COMPONENT_CONFIG_SCHEMA_CONNECTION), + validate_version_specific_config, + create_influx_url, + ), }, extra=vol.ALLOW_EXTRA, ) @@ -119,34 +193,65 @@ RE_DIGIT_TAIL = re.compile(r"^[^\.]*\d+\.?\d+[^\.]*$") RE_DECIMAL = re.compile(r"[^\d.]+") +def get_influx_connection(client_kwargs, bucket): + """Create and check the correct influx connection for the API version.""" + if bucket is not None: + # Test connection by synchronously writing nothing. + # If config is valid this will generate a `Bad Request` exception but not make anything. + # If config is invalid we will output an error. + # Hopefully a better way to test connection is added in the future. + try: + influx = InfluxDBClientV2(**client_kwargs) + influx.write_api(write_options=SYNCHRONOUS).write(bucket=bucket) + + except ApiException as exc: + # 400 is the success state since it means we can write we just gave a bad point. + if exc.status != 400: + raise exc + + else: + influx = InfluxDBClient(**client_kwargs) + influx.write_points([]) + + return influx + + def setup(hass, config): """Set up the InfluxDB component.""" - conf = config[DOMAIN] - + use_v2_api = conf[CONF_API_VERSION] == API_VERSION_2 + bucket = None kwargs = { - "database": conf[CONF_DB_NAME], - "verify_ssl": conf[CONF_VERIFY_SSL], "timeout": TIMEOUT, } - if CONF_HOST in conf: - kwargs["host"] = conf[CONF_HOST] + if use_v2_api: + kwargs["url"] = conf[CONF_URL] + kwargs["token"] = conf[CONF_TOKEN] + kwargs["org"] = conf[CONF_ORG] + bucket = conf[CONF_BUCKET] - if CONF_PATH in conf: - kwargs["path"] = conf[CONF_PATH] + else: + kwargs["database"] = conf[CONF_DB_NAME] + kwargs["verify_ssl"] = conf[CONF_VERIFY_SSL] - if CONF_PORT in conf: - kwargs["port"] = conf[CONF_PORT] + if CONF_USERNAME in conf: + kwargs["username"] = conf[CONF_USERNAME] - if CONF_USERNAME in conf: - kwargs["username"] = conf[CONF_USERNAME] + if CONF_PASSWORD in conf: + kwargs["password"] = conf[CONF_PASSWORD] - if CONF_PASSWORD in conf: - kwargs["password"] = conf[CONF_PASSWORD] + if CONF_HOST in conf: + kwargs["host"] = conf[CONF_HOST] - if CONF_SSL in conf: - kwargs["ssl"] = conf[CONF_SSL] + if CONF_PATH in conf: + kwargs["path"] = conf[CONF_PATH] + + if CONF_PORT in conf: + kwargs["port"] = conf[CONF_PORT] + + if CONF_SSL in conf: + kwargs["ssl"] = conf[CONF_SSL] include = conf.get(CONF_INCLUDE, {}) exclude = conf.get(CONF_EXCLUDE, {}) @@ -166,10 +271,11 @@ def setup(hass, config): max_tries = conf.get(CONF_RETRY_COUNT) try: - influx = InfluxDBClient(**kwargs) - influx.write_points([]) + influx = get_influx_connection(kwargs, bucket) + if use_v2_api: + write_api = influx.write_api(write_options=ASYNCHRONOUS) except (exceptions.InfluxDBClientError, requests.exceptions.ConnectionError) as exc: - _LOGGER.warning( + _LOGGER.error( "Database host is not accessible due to '%s', please " "check your entries in the configuration file (host, " "port, etc.) and verify that the database exists and is " @@ -179,6 +285,17 @@ def setup(hass, config): ) event_helper.call_later(hass, RETRY_INTERVAL, lambda _: setup(hass, config)) return True + except ApiException as exc: + _LOGGER.error( + "Bucket is not accessible due to '%s', please " + "check your entries in the configuration file (url, org, " + "bucket, etc.) and verify that the org and bucket exist and the " + "provided token has WRITE access. Retrying again in %s seconds.", + exc, + RETRY_INTERVAL, + ) + event_helper.call_later(hass, RETRY_INTERVAL, lambda _: setup(hass, config)) + return True def event_to_json(event): """Add an event to the outgoing Influx list.""" @@ -270,7 +387,15 @@ def setup(hass, config): return json - instance = hass.data[DOMAIN] = InfluxThread(hass, influx, event_to_json, max_tries) + if use_v2_api: + instance = hass.data[DOMAIN] = InfluxThread( + hass, None, bucket, write_api, event_to_json, max_tries + ) + else: + instance = hass.data[DOMAIN] = InfluxThread( + hass, influx, None, None, event_to_json, max_tries + ) + instance.start() def shutdown(event): @@ -287,11 +412,13 @@ def setup(hass, config): class InfluxThread(threading.Thread): """A threaded event handler class.""" - def __init__(self, hass, influx, event_to_json, max_tries): + def __init__(self, hass, influx, bucket, write_api, event_to_json, max_tries): """Initialize the listener.""" threading.Thread.__init__(self, name="InfluxDB") self.queue = queue.Queue() self.influx = influx + self.bucket = bucket + self.write_api = write_api self.event_to_json = event_to_json self.max_tries = max_tries self.write_errors = 0 @@ -346,10 +473,12 @@ class InfluxThread(threading.Thread): def write_to_influxdb(self, json): """Write preprocessed events to influxdb, with retry.""" - for retry in range(self.max_tries + 1): try: - self.influx.write_points(json) + if self.write_api is not None: + self.write_api.write(bucket=self.bucket, record=json) + else: + self.influx.write_points(json) if self.write_errors: _LOGGER.error("Resumed, lost %d events", self.write_errors) @@ -361,6 +490,7 @@ class InfluxThread(threading.Thread): exceptions.InfluxDBClientError, exceptions.InfluxDBServerError, OSError, + ApiException, ) as err: if retry < self.max_tries: time.sleep(RETRY_DELAY) diff --git a/homeassistant/components/influxdb/manifest.json b/homeassistant/components/influxdb/manifest.json index 94577f5735f..596c0ecc6ce 100644 --- a/homeassistant/components/influxdb/manifest.json +++ b/homeassistant/components/influxdb/manifest.json @@ -2,6 +2,6 @@ "domain": "influxdb", "name": "InfluxDB", "documentation": "https://www.home-assistant.io/integrations/influxdb", - "requirements": ["influxdb==5.2.3"], + "requirements": ["influxdb==5.2.3", "influxdb-client==1.6.0"], "codeowners": ["@fabaff"] } diff --git a/homeassistant/components/influxdb/sensor.py b/homeassistant/components/influxdb/sensor.py index 64ab1174b8b..0cf25c0b2f4 100644 --- a/homeassistant/components/influxdb/sensor.py +++ b/homeassistant/components/influxdb/sensor.py @@ -1,18 +1,25 @@ """InfluxDB component which allows you to get data from an Influx database.""" from datetime import timedelta import logging +from typing import Dict from influxdb import InfluxDBClient, exceptions +from influxdb_client import InfluxDBClient as InfluxDBClientV2 +from influxdb_client.rest import ApiException import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( + CONF_API_VERSION, CONF_HOST, CONF_NAME, CONF_PASSWORD, + CONF_PATH, CONF_PORT, CONF_SSL, + CONF_TOKEN, CONF_UNIT_OF_MEASUREMENT, + CONF_URL, CONF_USERNAME, CONF_VALUE_TEMPLATE, CONF_VERIFY_SSL, @@ -23,79 +30,161 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle -from . import CONF_DB_NAME +from . import ( + API_VERSION_2, + COMPONENT_CONFIG_SCHEMA_CONNECTION, + CONF_BUCKET, + CONF_DB_NAME, + CONF_ORG, + DEFAULT_API_VERSION, + create_influx_url, + validate_version_specific_config, +) _LOGGER = logging.getLogger(__name__) -DEFAULT_HOST = "localhost" -DEFAULT_PORT = 8086 -DEFAULT_DATABASE = "home_assistant" -DEFAULT_SSL = False -DEFAULT_VERIFY_SSL = False DEFAULT_GROUP_FUNCTION = "mean" DEFAULT_FIELD = "value" CONF_QUERIES = "queries" +CONF_QUERIES_FLUX = "queries_flux" CONF_GROUP_FUNCTION = "group_function" CONF_FIELD = "field" CONF_MEASUREMENT_NAME = "measurement" CONF_WHERE = "where" +CONF_RANGE_START = "range_start" +CONF_RANGE_STOP = "range_stop" +CONF_FUNCTION = "function" +CONF_QUERY = "query" +CONF_IMPORTS = "imports" + +DEFAULT_RANGE_START = "-15m" +DEFAULT_RANGE_STOP = "now()" + MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60) -_QUERY_SCHEME = vol.Schema( +_QUERY_SENSOR_SCHEMA = vol.Schema( { vol.Required(CONF_NAME): cv.string, - vol.Required(CONF_MEASUREMENT_NAME): cv.string, - vol.Required(CONF_WHERE): cv.template, - vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string, vol.Optional(CONF_VALUE_TEMPLATE): cv.template, - vol.Optional(CONF_DB_NAME, default=DEFAULT_DATABASE): cv.string, - vol.Optional(CONF_GROUP_FUNCTION, default=DEFAULT_GROUP_FUNCTION): cv.string, - vol.Optional(CONF_FIELD, default=DEFAULT_FIELD): cv.string, + vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string, } ) -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_QUERIES): [_QUERY_SCHEME], - vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string, - vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, - vol.Inclusive(CONF_USERNAME, "authentication"): cv.string, - vol.Inclusive(CONF_PASSWORD, "authentication"): cv.string, - vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean, - vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): cv.boolean, - } +_QUERY_SCHEMA = { + "InfluxQL": _QUERY_SENSOR_SCHEMA.extend( + { + vol.Optional(CONF_DB_NAME): cv.string, + vol.Required(CONF_MEASUREMENT_NAME): cv.string, + vol.Optional( + CONF_GROUP_FUNCTION, default=DEFAULT_GROUP_FUNCTION + ): cv.string, + vol.Optional(CONF_FIELD, default=DEFAULT_FIELD): cv.string, + vol.Required(CONF_WHERE): cv.template, + } + ), + "Flux": _QUERY_SENSOR_SCHEMA.extend( + { + vol.Optional(CONF_BUCKET): cv.string, + vol.Optional(CONF_RANGE_START, default=DEFAULT_RANGE_START): cv.string, + vol.Optional(CONF_RANGE_STOP, default=DEFAULT_RANGE_STOP): cv.string, + vol.Required(CONF_QUERY): cv.template, + vol.Optional(CONF_IMPORTS): vol.All(cv.ensure_list, [cv.string]), + vol.Optional(CONF_GROUP_FUNCTION): cv.string, + } + ), +} + + +def validate_query_format_for_version(conf: Dict) -> Dict: + """Ensure queries are provided in correct format based on API version.""" + if conf[CONF_API_VERSION] == API_VERSION_2: + if CONF_QUERIES_FLUX not in conf: + raise vol.Invalid( + f"{CONF_QUERIES_FLUX} is required when {CONF_API_VERSION} is {API_VERSION_2}" + ) + + else: + if CONF_QUERIES not in conf: + raise vol.Invalid( + f"{CONF_QUERIES} is required when {CONF_API_VERSION} is {DEFAULT_API_VERSION}" + ) + + return conf + + +PLATFORM_SCHEMA = vol.All( + PLATFORM_SCHEMA.extend(COMPONENT_CONFIG_SCHEMA_CONNECTION).extend( + { + vol.Exclusive(CONF_QUERIES, "queries"): [_QUERY_SCHEMA["InfluxQL"]], + vol.Exclusive(CONF_QUERIES_FLUX, "queries"): [_QUERY_SCHEMA["Flux"]], + } + ), + validate_version_specific_config, + validate_query_format_for_version, + create_influx_url, ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the InfluxDB component.""" - influx_conf = { - "host": config[CONF_HOST], - "password": config.get(CONF_PASSWORD), - "port": config.get(CONF_PORT), - "ssl": config[CONF_SSL], - "username": config.get(CONF_USERNAME), - "verify_ssl": config.get(CONF_VERIFY_SSL), - } + use_v2_api = config[CONF_API_VERSION] == API_VERSION_2 + queries = None - dev = [] + if use_v2_api: + influx_conf = { + "url": config[CONF_URL], + "token": config[CONF_TOKEN], + "org": config[CONF_ORG], + } + bucket = config[CONF_BUCKET] + queries = config[CONF_QUERIES_FLUX] - for query in config.get(CONF_QUERIES): - sensor = InfluxSensor(hass, influx_conf, query) + for v2_query in queries: + if CONF_BUCKET not in v2_query: + v2_query[CONF_BUCKET] = bucket + + else: + influx_conf = { + "database": config[CONF_DB_NAME], + "verify_ssl": config[CONF_VERIFY_SSL], + } + + if CONF_USERNAME in config: + influx_conf["username"] = config[CONF_USERNAME] + + if CONF_PASSWORD in config: + influx_conf["password"] = config[CONF_PASSWORD] + + if CONF_HOST in config: + influx_conf["host"] = config[CONF_HOST] + + if CONF_PATH in config: + influx_conf["path"] = config[CONF_PATH] + + if CONF_PORT in config: + influx_conf["port"] = config[CONF_PORT] + + if CONF_SSL in config: + influx_conf["ssl"] = config[CONF_SSL] + + queries = config[CONF_QUERIES] + + entities = [] + for query in queries: + sensor = InfluxSensor(hass, influx_conf, query, use_v2_api) if sensor.connected: - dev.append(sensor) + entities.append(sensor) - add_entities(dev, True) + add_entities(entities, True) class InfluxSensor(Entity): """Implementation of a Influxdb sensor.""" - def __init__(self, hass, influx_conf, query): + def __init__(self, hass, influx_conf, query, use_v2_api): """Initialize the sensor.""" - self._name = query.get(CONF_NAME) self._unit_of_measurement = query.get(CONF_UNIT_OF_MEASUREMENT) value_template = query.get(CONF_VALUE_TEMPLATE) @@ -104,32 +193,54 @@ class InfluxSensor(Entity): self._value_template.hass = hass else: self._value_template = None - database = query.get(CONF_DB_NAME) self._state = None self._hass = hass - where_clause = query.get(CONF_WHERE) - where_clause.hass = hass + if use_v2_api: + influx = InfluxDBClientV2(**influx_conf) + query_api = influx.query_api() + query_clause = query.get(CONF_QUERY) + query_clause.hass = hass + bucket = query[CONF_BUCKET] + + else: + if CONF_DB_NAME in query: + kwargs = influx_conf.copy() + kwargs[CONF_DB_NAME] = query[CONF_DB_NAME] + else: + kwargs = influx_conf + + influx = InfluxDBClient(**kwargs) + where_clause = query.get(CONF_WHERE) + where_clause.hass = hass + query_api = None - influx = InfluxDBClient( - host=influx_conf["host"], - port=influx_conf["port"], - username=influx_conf["username"], - password=influx_conf["password"], - database=database, - ssl=influx_conf["ssl"], - verify_ssl=influx_conf["verify_ssl"], - ) try: - influx.query("SHOW SERIES LIMIT 1;") - self.connected = True - self.data = InfluxSensorData( - influx, - query.get(CONF_GROUP_FUNCTION), - query.get(CONF_FIELD), - query.get(CONF_MEASUREMENT_NAME), - where_clause, - ) + if query_api is not None: + query_api.query( + f'from(bucket: "{bucket}") |> range(start: -1ms) |> keep(columns: ["_time"]) |> limit(n: 1)' + ) + self.connected = True + self.data = InfluxSensorDataV2( + query_api, + bucket, + query.get(CONF_RANGE_START), + query.get(CONF_RANGE_STOP), + query_clause, + query.get(CONF_IMPORTS), + query.get(CONF_GROUP_FUNCTION), + ) + + else: + influx.query("SHOW SERIES LIMIT 1;") + self.connected = True + self.data = InfluxSensorDataV1( + influx, + query.get(CONF_GROUP_FUNCTION), + query.get(CONF_FIELD), + query.get(CONF_MEASUREMENT_NAME), + where_clause, + ) except exceptions.InfluxDBClientError as exc: _LOGGER.error( "Database host is not accessible due to '%s', please" @@ -138,6 +249,15 @@ class InfluxSensor(Entity): exc, ) self.connected = False + except ApiException as exc: + _LOGGER.error( + "Bucket is not accessible due to '%s', please " + "check your entries in the configuration file (url, org, " + "bucket, etc.) and verify that the org and bucket exist and the " + "provided token has READ access.", + exc, + ) + self.connected = False @property def name(self): @@ -173,8 +293,76 @@ class InfluxSensor(Entity): self._state = value -class InfluxSensorData: - """Class for handling the data retrieval.""" +class InfluxSensorDataV2: + """Class for handling the data retrieval with v2 API.""" + + def __init__( + self, query_api, bucket, range_start, range_stop, query, imports, group + ): + """Initialize the data object.""" + self.query_api = query_api + self.bucket = bucket + self.range_start = range_start + self.range_stop = range_stop + self.query = query + self.imports = imports + self.group = group + self.value = None + self.full_query = None + + self.query_prefix = f'from(bucket:"{bucket}") |> range(start: {range_start}, stop: {range_stop}) |>' + if imports is not None: + for i in imports: + self.query_prefix = f'import "{i}" {self.query_prefix}' + + if group is None: + self.query_postfix = "|> limit(n: 1)" + else: + self.query_postfix = f'|> {group}(column: "_value")' + + @Throttle(MIN_TIME_BETWEEN_UPDATES) + def update(self): + """Get the latest data by querying influx.""" + _LOGGER.debug("Rendering query: %s", self.query) + try: + rendered_query = self.query.render() + except TemplateError as ex: + _LOGGER.error("Could not render query template: %s", ex) + return + + self.full_query = f"{self.query_prefix} {rendered_query} {self.query_postfix}" + + _LOGGER.info("Running query: %s", self.full_query) + + try: + tables = self.query_api.query(self.full_query) + except ApiException as exc: + _LOGGER.error( + "Could not execute query '%s' due to '%s', " + "Check the syntax of your query", + self.full_query, + exc, + ) + self.value = None + return + + if not tables: + _LOGGER.warning( + "Query returned no results, sensor state set to UNKNOWN: %s", + self.full_query, + ) + self.value = None + else: + if len(tables) > 1: + _LOGGER.warning( + "Query returned multiple tables, only value from first one is shown: %s", + self.full_query, + ) + self.value = tables[0].records[0].values["_value"] + + +class InfluxSensorDataV1: + """Class for handling the data retrieval with v1 API.""" def __init__(self, influx, group, field, measurement, where): """Initialize the data object.""" @@ -200,7 +388,18 @@ class InfluxSensorData: _LOGGER.info("Running query: %s", self.query) - points = list(self.influx.query(self.query).get_points()) + try: + points = list(self.influx.query(self.query).get_points()) + except exceptions.InfluxDBClientError as exc: + _LOGGER.error( + "Could not execute query '%s' due to '%s', " + "Check the syntax of your query", + self.query, + exc, + ) + self.value = None + return + if not points: _LOGGER.warning( "Query returned no points, sensor state set to UNKNOWN: %s", self.query diff --git a/homeassistant/components/insteon/manifest.json b/homeassistant/components/insteon/manifest.json index 70a7375d51f..d1a31117fb9 100644 --- a/homeassistant/components/insteon/manifest.json +++ b/homeassistant/components/insteon/manifest.json @@ -2,6 +2,6 @@ "domain": "insteon", "name": "Insteon", "documentation": "https://www.home-assistant.io/integrations/insteon", - "requirements": ["pyinsteon==1.0.4"], + "requirements": ["pyinsteon==1.0.5"], "codeowners": ["@teharris1"] } \ No newline at end of file diff --git a/homeassistant/components/intesishome/climate.py b/homeassistant/components/intesishome/climate.py index ecd00bde986..781117e8b71 100644 --- a/homeassistant/components/intesishome/climate.py +++ b/homeassistant/components/intesishome/climate.py @@ -199,6 +199,7 @@ class IntesisAC(ClimateEntity): await self._controller.connect() except IHConnectionError as ex: _LOGGER.error("Exception connecting to IntesisHome: %s", ex) + raise PlatformNotReady @property def name(self): diff --git a/homeassistant/components/intesishome/manifest.json b/homeassistant/components/intesishome/manifest.json index b6170225320..4131811807a 100644 --- a/homeassistant/components/intesishome/manifest.json +++ b/homeassistant/components/intesishome/manifest.json @@ -3,5 +3,5 @@ "name": "IntesisHome", "documentation": "https://www.home-assistant.io/integrations/intesishome", "codeowners": ["@jnimmo"], - "requirements": ["pyintesishome==1.7.4"] + "requirements": ["pyintesishome==1.7.5"] } diff --git a/homeassistant/components/ios/translations/et.json b/homeassistant/components/ios/translations/et.json deleted file mode 100644 index 0e652624ef6..00000000000 --- a/homeassistant/components/ios/translations/et.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "confirm": { - "title": "" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/ios/translations/fi.json b/homeassistant/components/ios/translations/fi.json deleted file mode 100644 index f88bd919e33..00000000000 --- a/homeassistant/components/ios/translations/fi.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "confirm": { - "title": "Home Assistant iOS" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/ipp/config_flow.py b/homeassistant/components/ipp/config_flow.py index ba12d7ec8e2..671bb2dd4cd 100644 --- a/homeassistant/components/ipp/config_flow.py +++ b/homeassistant/components/ipp/config_flow.py @@ -152,16 +152,17 @@ class IPPFlowHandler(ConfigFlow, domain=DOMAIN): _LOGGER.debug( "Unable to determine unique id from discovery info and IPP response" ) - return self.async_abort(reason="unique_id_required") - await self.async_set_unique_id(unique_id) - self._abort_if_unique_id_configured( - updates={ - CONF_HOST: self.discovery_info[CONF_HOST], - CONF_NAME: self.discovery_info[CONF_NAME], - }, - ) + if unique_id: + await self.async_set_unique_id(unique_id) + self._abort_if_unique_id_configured( + updates={ + CONF_HOST: self.discovery_info[CONF_HOST], + CONF_NAME: self.discovery_info[CONF_NAME], + }, + ) + await self._async_handle_discovery_without_unique_id() return await self.async_step_zeroconf_confirm() async def async_step_zeroconf_confirm( diff --git a/homeassistant/components/ipp/translations/no.json b/homeassistant/components/ipp/translations/no.json index c031864cf4d..543deee14fa 100644 --- a/homeassistant/components/ipp/translations/no.json +++ b/homeassistant/components/ipp/translations/no.json @@ -1,8 +1,8 @@ { "config": { "abort": { - "already_configured": "Denne skriveren er allerede konfigurert.", - "connection_error": "Klarte ikke \u00e5 koble til skriveren.", + "already_configured": "Enheten er allerede konfigurert.", + "connection_error": "Tilkobling mislyktes", "connection_upgrade": "Kunne ikke koble til skriveren fordi tilkoblingsoppgradering var n\u00f8dvendig.", "ipp_error": "Oppdaget IPP-feil.", "ipp_version_error": "IPP-versjon st\u00f8ttes ikke av skriveren.", @@ -10,7 +10,7 @@ "unique_id_required": "Enheten mangler unik identifikasjon som kreves for oppdagelse." }, "error": { - "connection_error": "Klarte ikke \u00e5 koble til skriveren.", + "connection_error": "Tilkobling mislyktes", "connection_upgrade": "Kunne ikke koble til skriveren. Vennligst pr\u00f8v igjen med alternativet SSL / TLS merket." }, "flow_title": "Skriver: {name}", @@ -18,7 +18,7 @@ "user": { "data": { "base_path": "Relativ bane til skriveren", - "host": "Vert eller IP-adresse", + "host": "Vert", "port": "Port", "ssl": "Skriveren st\u00f8tter kommunikasjon over SSL/TLS", "verify_ssl": "Skriveren bruker et riktig SSL-sertifikat" diff --git a/homeassistant/components/iqvia/manifest.json b/homeassistant/components/iqvia/manifest.json index 0acbecddf8d..5d880888ef5 100644 --- a/homeassistant/components/iqvia/manifest.json +++ b/homeassistant/components/iqvia/manifest.json @@ -3,6 +3,6 @@ "name": "IQVIA", "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/iqvia", - "requirements": ["numpy==1.18.4", "pyiqvia==0.2.1"], + "requirements": ["numpy==1.19.0", "pyiqvia==0.2.1"], "codeowners": ["@bachya"] } diff --git a/homeassistant/components/isy994/const.py b/homeassistant/components/isy994/const.py index afbe44011d8..b2748223f51 100644 --- a/homeassistant/components/isy994/const.py +++ b/homeassistant/components/isy994/const.py @@ -316,8 +316,8 @@ UOM_FRIENDLY_NAME = { "3": f"btu/{TIME_HOURS}", "4": TEMP_CELSIUS, "5": LENGTH_CENTIMETERS, - "6": "ft³", - "7": f"ft³/{TIME_MINUTES}", + "6": f"{LENGTH_FEET}³", + "7": f"{LENGTH_FEET}³/{TIME_MINUTES}", "8": "m³", "9": TIME_DAYS, "10": TIME_DAYS, diff --git a/homeassistant/components/isy994/translations/no.json b/homeassistant/components/isy994/translations/no.json index 0e2d4fec686..7864a6916cd 100644 --- a/homeassistant/components/isy994/translations/no.json +++ b/homeassistant/components/isy994/translations/no.json @@ -1,5 +1,8 @@ { "config": { + "abort": { + "already_configured": "Enheten er allerede konfigurert" + }, "error": { "invalid_host": "Vertsoppf\u00f8ringen var ikke i fullstendig URL-format, for eksempel http://192.168.10.100:80", "unknown": "[%key:common::config_flow::error::unknown%" diff --git a/homeassistant/components/itach/remote.py b/homeassistant/components/itach/remote.py index 8f1f642e49e..6d08826ea53 100644 --- a/homeassistant/components/itach/remote.py +++ b/homeassistant/components/itach/remote.py @@ -5,7 +5,11 @@ import pyitachip2ir import voluptuous as vol from homeassistant.components import remote -from homeassistant.components.remote import ATTR_NUM_REPEATS, PLATFORM_SCHEMA +from homeassistant.components.remote import ( + ATTR_NUM_REPEATS, + DEFAULT_NUM_REPEATS, + PLATFORM_SCHEMA, +) from homeassistant.const import ( CONF_DEVICES, CONF_HOST, @@ -20,11 +24,15 @@ _LOGGER = logging.getLogger(__name__) DEFAULT_PORT = 4998 CONNECT_TIMEOUT = 5000 +DEFAULT_MODADDR = 1 +DEFAULT_CONNADDR = 1 +DEFAULT_IR_COUNT = 1 CONF_MODADDR = "modaddr" CONF_CONNADDR = "connaddr" CONF_COMMANDS = "commands" CONF_DATA = "data" +CONF_IR_COUNT = "ir_count" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { @@ -36,8 +44,9 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( [ { vol.Optional(CONF_NAME): cv.string, - vol.Optional(CONF_MODADDR): vol.Coerce(int), - vol.Required(CONF_CONNADDR): vol.Coerce(int), + vol.Optional(CONF_MODADDR): cv.positive_int, + vol.Required(CONF_CONNADDR): cv.positive_int, + vol.Optional(CONF_IR_COUNT): cv.positive_int, vol.Required(CONF_COMMANDS): vol.All( cv.ensure_list, [ @@ -67,8 +76,9 @@ def setup_platform(hass, config, add_entities, discovery_info=None): devices = [] for data in config.get(CONF_DEVICES): name = data.get(CONF_NAME) - modaddr = int(data.get(CONF_MODADDR, 1)) - connaddr = int(data.get(CONF_CONNADDR, 1)) + modaddr = int(data.get(CONF_MODADDR, DEFAULT_MODADDR)) + connaddr = int(data.get(CONF_CONNADDR, DEFAULT_CONNADDR)) + ir_count = int(data.get(CONF_IR_COUNT, DEFAULT_IR_COUNT)) cmddatas = "" for cmd in data.get(CONF_COMMANDS): cmdname = cmd[CONF_NAME].strip() @@ -79,7 +89,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None): cmddata = '""' cmddatas += f"{cmdname}\n{cmddata}\n" itachip2ir.addDevice(name, modaddr, connaddr, cmddatas) - devices.append(ITachIP2IRRemote(itachip2ir, name)) + devices.append(ITachIP2IRRemote(itachip2ir, name, ir_count)) add_entities(devices, True) return True @@ -87,11 +97,12 @@ def setup_platform(hass, config, add_entities, discovery_info=None): class ITachIP2IRRemote(remote.RemoteEntity): """Device that sends commands to an ITachIP2IR device.""" - def __init__(self, itachip2ir, name): + def __init__(self, itachip2ir, name, ir_count): """Initialize device.""" self.itachip2ir = itachip2ir self._power = False self._name = name or DEVICE_DEFAULT_NAME + self._ir_count = ir_count or DEFAULT_IR_COUNT @property def name(self): @@ -106,22 +117,22 @@ class ITachIP2IRRemote(remote.RemoteEntity): def turn_on(self, **kwargs): """Turn the device on.""" self._power = True - num_repeats = kwargs.get(ATTR_NUM_REPEATS, 1) - self.itachip2ir.send(self._name, "ON", num_repeats) + self.itachip2ir.send(self._name, "ON", self._ir_count) self.schedule_update_ha_state() def turn_off(self, **kwargs): """Turn the device off.""" self._power = False - num_repeats = kwargs.get(ATTR_NUM_REPEATS, 1) - self.itachip2ir.send(self._name, "OFF", num_repeats) + self.itachip2ir.send(self._name, "OFF", self._ir_count) self.schedule_update_ha_state() def send_command(self, command, **kwargs): """Send a command to one device.""" - num_repeats = kwargs.get(ATTR_NUM_REPEATS, 1) + num_repeats = kwargs.get(ATTR_NUM_REPEATS, DEFAULT_NUM_REPEATS) for single_command in command: - self.itachip2ir.send(self._name, single_command, num_repeats) + self.itachip2ir.send( + self._name, single_command, self._ir_count * num_repeats + ) def update(self): """Update the device.""" diff --git a/homeassistant/components/izone/translations/nn.json b/homeassistant/components/izone/translations/nn.json deleted file mode 100644 index af7146230d0..00000000000 --- a/homeassistant/components/izone/translations/nn.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "confirm": { - "title": "iZone" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/kef/manifest.json b/homeassistant/components/kef/manifest.json index 1eb9a9e19c2..586629765f6 100644 --- a/homeassistant/components/kef/manifest.json +++ b/homeassistant/components/kef/manifest.json @@ -3,5 +3,5 @@ "name": "KEF", "documentation": "https://www.home-assistant.io/integrations/kef", "codeowners": ["@basnijholt"], - "requirements": ["aiokef==0.2.10", "getmac==0.8.2"] + "requirements": ["aiokef==0.2.12", "getmac==0.8.2"] } diff --git a/homeassistant/components/kef/media_player.py b/homeassistant/components/kef/media_player.py index 1ba4d63ae4f..d033592cb3f 100644 --- a/homeassistant/components/kef/media_player.py +++ b/homeassistant/components/kef/media_player.py @@ -133,7 +133,7 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info= supports_on, sources, speaker_type, - ioloop=hass.loop, + loop=hass.loop, unique_id=unique_id, ) @@ -191,7 +191,7 @@ class KefMediaPlayer(MediaPlayerEntity): supports_on, sources, speaker_type, - ioloop, + loop, unique_id, ): """Initialize the media player.""" @@ -204,7 +204,7 @@ class KefMediaPlayer(MediaPlayerEntity): maximum_volume, standby_time, inverse_speaker_mode, - ioloop=ioloop, + loop=loop, ) self._unique_id = unique_id self._supports_on = supports_on diff --git a/homeassistant/components/konnected/config_flow.py b/homeassistant/components/konnected/config_flow.py index a6b01560c50..f545c5f2f2a 100644 --- a/homeassistant/components/konnected/config_flow.py +++ b/homeassistant/components/konnected/config_flow.py @@ -185,7 +185,7 @@ class KonnectedFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): self.data[CONF_PORT] = port try: status = await get_status(self.hass, host, port) - self.data[CONF_ID] = status["mac"].replace(":", "") + self.data[CONF_ID] = status.get("chipId", status["mac"].replace(":", "")) except (CannotConnect, KeyError): raise CannotConnect else: @@ -293,7 +293,9 @@ class KonnectedFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): except CannotConnect: errors["base"] = "cannot_connect" else: - self.data[CONF_ID] = status["mac"].replace(":", "") + self.data[CONF_ID] = status.get( + "chipId", status["mac"].replace(":", "") + ) self.data[CONF_MODEL] = status.get("model", KONN_MODEL) # save off our discovered host info diff --git a/homeassistant/components/konnected/panel.py b/homeassistant/components/konnected/panel.py index 793a5ee3d21..3b19a700837 100644 --- a/homeassistant/components/konnected/panel.py +++ b/homeassistant/components/konnected/panel.py @@ -76,7 +76,7 @@ class AlarmPanel: @property def device_id(self): - """Device id is the MAC address as string with punctuation removed.""" + """Device id is the chipId (pro) or MAC address as string with punctuation removed.""" return self.config.get(CONF_ID) @property diff --git a/homeassistant/components/konnected/translations/es.json b/homeassistant/components/konnected/translations/es.json index eae14b2ca1a..0b3bff1ff92 100644 --- a/homeassistant/components/konnected/translations/es.json +++ b/homeassistant/components/konnected/translations/es.json @@ -2,7 +2,7 @@ "config": { "abort": { "already_configured": "El dispositivo ya est\u00e1 configurado", - "already_in_progress": "El flujo de configuraci\u00f3n para el dispositivo ya est\u00e1 en curso.", + "already_in_progress": "El flujo de configuraci\u00f3n para el dispositivo ya est\u00e1 en marcha.", "not_konn_panel": "No es un dispositivo Konnected.io reconocido", "unknown": "Se produjo un error desconocido" }, diff --git a/homeassistant/components/lametric/notify.py b/homeassistant/components/lametric/notify.py index 052eb3bceac..d04225f2ce4 100644 --- a/homeassistant/components/lametric/notify.py +++ b/homeassistant/components/lametric/notify.py @@ -20,10 +20,12 @@ from . import DOMAIN as LAMETRIC_DOMAIN _LOGGER = logging.getLogger(__name__) AVAILABLE_PRIORITIES = ["info", "warning", "critical"] +AVAILABLE_ICON_TYPES = ["none", "info", "alert"] CONF_CYCLES = "cycles" CONF_LIFETIME = "lifetime" CONF_PRIORITY = "priority" +CONF_ICON_TYPE = "icon_type" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { @@ -31,6 +33,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( vol.Optional(CONF_LIFETIME, default=10): cv.positive_int, vol.Optional(CONF_CYCLES, default=1): cv.positive_int, vol.Optional(CONF_PRIORITY, default="warning"): vol.In(AVAILABLE_PRIORITIES), + vol.Optional(CONF_ICON_TYPE, default="info"): vol.In(AVAILABLE_ICON_TYPES), } ) @@ -44,19 +47,23 @@ def get_service(hass, config, discovery_info=None): config[CONF_LIFETIME] * 1000, config[CONF_CYCLES], config[CONF_PRIORITY], + config[CONF_ICON_TYPE], ) class LaMetricNotificationService(BaseNotificationService): """Implement the notification service for LaMetric.""" - def __init__(self, hasslametricmanager, icon, lifetime, cycles, priority): + def __init__( + self, hasslametricmanager, icon, lifetime, cycles, priority, icon_type + ): """Initialize the service.""" self.hasslametricmanager = hasslametricmanager self._icon = icon self._lifetime = lifetime self._cycles = cycles self._priority = priority + self._icon_type = icon_type self._devices = [] def send_message(self, message="", **kwargs): @@ -69,6 +76,7 @@ class LaMetricNotificationService(BaseNotificationService): cycles = self._cycles sound = None priority = self._priority + icon_type = self._icon_type # Additional data? if data is not None: @@ -82,6 +90,15 @@ class LaMetricNotificationService(BaseNotificationService): _LOGGER.error("Sound ID %s unknown, ignoring", data["sound"]) if "cycles" in data: cycles = int(data["cycles"]) + if "icon_type" in data: + if data["icon_type"] in AVAILABLE_ICON_TYPES: + icon_type = data["icon_type"] + else: + _LOGGER.warning( + "Priority %s invalid, using default %s", + data["priority"], + priority, + ) if "priority" in data: if data["priority"] in AVAILABLE_PRIORITIES: priority = data["priority"] @@ -91,7 +108,6 @@ class LaMetricNotificationService(BaseNotificationService): data["priority"], priority, ) - text_frame = SimpleFrame(icon, message) _LOGGER.debug( "Icon/Message/Cycles/Lifetime: %s, %s, %d, %d", @@ -120,7 +136,10 @@ class LaMetricNotificationService(BaseNotificationService): try: lmn.set_device(dev) lmn.send_notification( - model, lifetime=self._lifetime, priority=priority + model, + lifetime=self._lifetime, + priority=priority, + icon_type=icon_type, ) _LOGGER.debug("Sent notification to LaMetric %s", dev["name"]) except OSError: diff --git a/homeassistant/components/lcn/cover.py b/homeassistant/components/lcn/cover.py index a5bea69e9ae..a4fa0a19b76 100644 --- a/homeassistant/components/lcn/cover.py +++ b/homeassistant/components/lcn/cover.py @@ -49,9 +49,10 @@ class LcnOutputsCover(LcnDevice, CoverEntity): ] else: self.reverse_time = None - self._closed = None - self.state_up = False - self.state_down = False + + self._is_closed = False + self._is_closing = False + self._is_opening = False async def async_added_to_hass(self): """Run when entity about to be added to hass.""" @@ -66,26 +67,44 @@ class LcnOutputsCover(LcnDevice, CoverEntity): @property def is_closed(self): """Return if the cover is closed.""" - return self._closed + return self._is_closed + + @property + def is_opening(self): + """Return if the cover is opening or not.""" + return self._is_opening + + @property + def is_closing(self): + """Return if the cover is closing or not.""" + return self._is_closing + + @property + def assumed_state(self): + """Return True if unable to access real state of the entity.""" + return True async def async_close_cover(self, **kwargs): """Close the cover.""" - self._closed = True - + self._is_opening = False + self._is_closing = True state = pypck.lcn_defs.MotorStateModifier.DOWN self.address_connection.control_motors_outputs(state) self.async_write_ha_state() async def async_open_cover(self, **kwargs): """Open the cover.""" - self._closed = False + self._is_closed = False + self._is_opening = True + self._is_closing = False state = pypck.lcn_defs.MotorStateModifier.UP self.address_connection.control_motors_outputs(state, self.reverse_time) self.async_write_ha_state() async def async_stop_cover(self, **kwargs): """Stop the cover.""" - self._closed = None + self._is_closing = False + self._is_opening = False state = pypck.lcn_defs.MotorStateModifier.STOP self.address_connection.control_motors_outputs(state, self.reverse_time) self.async_write_ha_state() @@ -98,15 +117,19 @@ class LcnOutputsCover(LcnDevice, CoverEntity): ): return - if input_obj.get_output_id() == self.output_ids[0]: - self.state_up = input_obj.get_percent() > 0 - else: # self.output_ids[1] - self.state_down = input_obj.get_percent() > 0 - - if self.state_up and not self.state_down: - self._closed = False # Cover open - elif self.state_down and not self.state_up: - self._closed = True # Cover closed + if input_obj.get_percent() > 0: # motor is on + if input_obj.get_output_id() == self.output_ids[0]: + self._is_opening = True + self._is_closing = False + else: # self.output_ids[1] + self._is_opening = False + self._is_closing = True + self._is_closed = self._is_closing + else: # motor is off + # cover is assumed to be closed if we were in closing state before + self._is_closed = self._is_closing + self._is_closing = False + self._is_opening = False self.async_write_ha_state() @@ -153,7 +176,6 @@ class LcnRelayCover(LcnDevice, CoverEntity): async def async_close_cover(self, **kwargs): """Close the cover.""" - self._is_closed = True self._is_opening = False self._is_closing = True states = [pypck.lcn_defs.MotorStateModifier.NOCHANGE] * 4 @@ -173,8 +195,6 @@ class LcnRelayCover(LcnDevice, CoverEntity): async def async_stop_cover(self, **kwargs): """Stop the cover.""" - if self._is_opening or self._is_closing: - self._is_closed = self._is_closing self._is_closing = False self._is_opening = False states = [pypck.lcn_defs.MotorStateModifier.NOCHANGE] * 4 @@ -191,9 +211,9 @@ class LcnRelayCover(LcnDevice, CoverEntity): if states[self.motor_port_onoff]: # motor is on self._is_opening = not states[self.motor_port_updown] # set direction self._is_closing = states[self.motor_port_updown] # set direction - self._is_closed = self._is_closing - else: + else: # motor is off self._is_opening = False self._is_closing = False + self._is_closed = states[self.motor_port_updown] self.async_write_ha_state() diff --git a/homeassistant/components/lifx/light.py b/homeassistant/components/lifx/light.py index 2b7629cdaf2..26a2acfa517 100644 --- a/homeassistant/components/lifx/light.py +++ b/homeassistant/components/lifx/light.py @@ -641,7 +641,9 @@ class LIFXLight(LightEntity): """Start an effect with default parameters.""" service = kwargs[ATTR_EFFECT] data = {ATTR_ENTITY_ID: self.entity_id} - await self.hass.services.async_call(LIFX_DOMAIN, service, data) + await self.hass.services.async_call( + LIFX_DOMAIN, service, data, context=self._context + ) async def async_update(self): """Update bulb status.""" diff --git a/homeassistant/components/lifx/translations/nn.json b/homeassistant/components/lifx/translations/nn.json deleted file mode 100644 index c189eb1e180..00000000000 --- a/homeassistant/components/lifx/translations/nn.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "confirm": { - "title": "LIFX" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/lockitron/__init__.py b/homeassistant/components/lockitron/__init__.py deleted file mode 100644 index d2f9f749533..00000000000 --- a/homeassistant/components/lockitron/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""The lockitron component.""" diff --git a/homeassistant/components/lockitron/lock.py b/homeassistant/components/lockitron/lock.py deleted file mode 100644 index e1ece3da725..00000000000 --- a/homeassistant/components/lockitron/lock.py +++ /dev/null @@ -1,87 +0,0 @@ -"""Lockitron lock platform.""" -import logging - -import requests -import voluptuous as vol - -from homeassistant.components.lock import PLATFORM_SCHEMA, LockEntity -from homeassistant.const import CONF_ACCESS_TOKEN, CONF_ID, HTTP_OK -import homeassistant.helpers.config_validation as cv - -_LOGGER = logging.getLogger(__name__) - -DOMAIN = "lockitron" - -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( - {vol.Required(CONF_ACCESS_TOKEN): cv.string, vol.Required(CONF_ID): cv.string} -) -BASE_URL = "https://api.lockitron.com" - - -def setup_platform(hass, config, add_entities, discovery_info=None): - """Set up the Lockitron platform.""" - access_token = config.get(CONF_ACCESS_TOKEN) - device_id = config.get(CONF_ID) - response = requests.get( - f"{BASE_URL}/v2/locks/{device_id}?access_token={access_token}", timeout=5 - ) - if response.status_code == HTTP_OK: - add_entities([Lockitron(response.json()["state"], access_token, device_id)]) - else: - _LOGGER.error("Error retrieving lock status during init: %s", response.text) - - -class Lockitron(LockEntity): - """Representation of a Lockitron lock.""" - - LOCK_STATE = "lock" - UNLOCK_STATE = "unlock" - - def __init__(self, state, access_token, device_id): - """Initialize the lock.""" - self._state = state - self.access_token = access_token - self.device_id = device_id - - @property - def name(self): - """Return the name of the device.""" - return DOMAIN - - @property - def is_locked(self): - """Return True if the lock is currently locked, else False.""" - return self._state == Lockitron.LOCK_STATE - - def lock(self, **kwargs): - """Lock the device.""" - self._state = self.do_change_request(Lockitron.LOCK_STATE) - - def unlock(self, **kwargs): - """Unlock the device.""" - self._state = self.do_change_request(Lockitron.UNLOCK_STATE) - - def update(self): - """Update the internal state of the device.""" - response = requests.get( - f"{BASE_URL}/v2/locks/{self.device_id}?access_token={self.access_token}", - timeout=5, - ) - if response.status_code == HTTP_OK: - self._state = response.json()["state"] - else: - _LOGGER.error("Error retrieving lock status: %s", response.text) - - def do_change_request(self, requested_state): - """Execute the change request and pull out the new state.""" - response = requests.put( - f"{BASE_URL}/v2/locks/{self.device_id}?access_token={self.access_token}&state={requested_state}", - timeout=5, - ) - if response.status_code == HTTP_OK: - return response.json()["state"] - - _LOGGER.error( - "Error setting lock state: %s\n%s", requested_state, response.text - ) - return self._state diff --git a/homeassistant/components/lockitron/manifest.json b/homeassistant/components/lockitron/manifest.json deleted file mode 100644 index 088bc847621..00000000000 --- a/homeassistant/components/lockitron/manifest.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "domain": "lockitron", - "name": "Lockitron", - "documentation": "https://www.home-assistant.io/integrations/lockitron", - "codeowners": [] -} diff --git a/homeassistant/components/logbook/__init__.py b/homeassistant/components/logbook/__init__.py index d9310da23fd..28d6c7fcd48 100644 --- a/homeassistant/components/logbook/__init__.py +++ b/homeassistant/components/logbook/__init__.py @@ -1,40 +1,55 @@ """Event parser and human readable log generator.""" from datetime import timedelta from itertools import groupby +import json import logging import time from sqlalchemy.exc import SQLAlchemyError +from sqlalchemy.orm import aliased import voluptuous as vol from homeassistant.components import sun from homeassistant.components.http import HomeAssistantView -from homeassistant.components.recorder.models import Events, States +from homeassistant.components.recorder.models import ( + Events, + States, + process_timestamp, + process_timestamp_to_utc_isoformat, +) from homeassistant.components.recorder.util import ( QUERY_RETRY_WAIT, RETRIES, session_scope, ) from homeassistant.const import ( + ATTR_DEVICE_CLASS, ATTR_DOMAIN, ATTR_ENTITY_ID, - ATTR_HIDDEN, + ATTR_FRIENDLY_NAME, ATTR_NAME, + ATTR_UNIT_OF_MEASUREMENT, CONF_EXCLUDE, CONF_INCLUDE, EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, EVENT_LOGBOOK_ENTRY, - EVENT_SCRIPT_STARTED, EVENT_STATE_CHANGED, HTTP_BAD_REQUEST, STATE_NOT_HOME, STATE_OFF, STATE_ON, ) -from homeassistant.core import DOMAIN as HA_DOMAIN, State, callback, split_entity_id +from homeassistant.core import DOMAIN as HA_DOMAIN, callback, split_entity_id import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.entityfilter import generate_filter +from homeassistant.helpers.entityfilter import ( + INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA, + convert_include_exclude_filter, + generate_filter, +) +from homeassistant.helpers.integration_platform import ( + async_process_integration_platforms, +) from homeassistant.loader import bind_hass import homeassistant.util.dt as dt_util @@ -50,40 +65,18 @@ DOMAIN = "logbook" GROUP_BY_MINUTES = 15 +EMPTY_JSON_OBJECT = "{}" CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: vol.Schema( - { - CONF_EXCLUDE: vol.Schema( - { - vol.Optional(CONF_ENTITIES, default=[]): cv.entity_ids, - vol.Optional(CONF_DOMAINS, default=[]): vol.All( - cv.ensure_list, [cv.string] - ), - } - ), - CONF_INCLUDE: vol.Schema( - { - vol.Optional(CONF_ENTITIES, default=[]): cv.entity_ids, - vol.Optional(CONF_DOMAINS, default=[]): vol.All( - cv.ensure_list, [cv.string] - ), - } - ), - } - ) - }, - extra=vol.ALLOW_EXTRA, + {DOMAIN: INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA}, extra=vol.ALLOW_EXTRA ) -ALL_EVENT_TYPES = [ - EVENT_STATE_CHANGED, - EVENT_LOGBOOK_ENTRY, +HOMEASSISTANT_EVENTS = [ EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, - EVENT_SCRIPT_STARTED, ] +ALL_EVENT_TYPES = [EVENT_STATE_CHANGED, EVENT_LOGBOOK_ENTRY, *HOMEASSISTANT_EVENTS] + LOG_MESSAGE_SCHEMA = vol.Schema( { vol.Required(ATTR_NAME): cv.string, @@ -112,14 +105,9 @@ def async_log_entry(hass, name, message, domain=None, entity_id=None): hass.bus.async_fire(EVENT_LOGBOOK_ENTRY, data) -@bind_hass -def async_describe_event(hass, domain, event_name, describe_callback): - """Teach logbook how to describe a new event.""" - hass.data.setdefault(DOMAIN, {})[event_name] = (domain, describe_callback) - - async def async_setup(hass, config): - """Listen for download events to download files.""" + """Logbook setup.""" + hass.data[DOMAIN] = {} @callback def log_message(service): @@ -140,9 +128,23 @@ async def async_setup(hass, config): ) hass.services.async_register(DOMAIN, "log", log_message, schema=LOG_MESSAGE_SCHEMA) + + await async_process_integration_platforms(hass, DOMAIN, _process_logbook_platform) + return True +async def _process_logbook_platform(hass, domain, platform): + """Process a logbook platform.""" + + @callback + def _async_describe_event(domain, event_name, describe_callback): + """Teach logbook how to describe a new event.""" + hass.data[DOMAIN][event_name] = (domain, describe_callback) + + platform.async_describe_events(hass, _async_describe_event) + + class LogbookView(HomeAssistantView): """Handle logbook view requests.""" @@ -171,8 +173,17 @@ class LogbookView(HomeAssistantView): period = int(period) entity_id = request.query.get("entity") - start_day = dt_util.as_utc(datetime) - timedelta(days=period - 1) - end_day = start_day + timedelta(days=period) + + end_time = request.query.get("end_time") + if end_time is None: + start_day = dt_util.as_utc(datetime) - timedelta(days=period - 1) + end_day = start_day + timedelta(days=period) + else: + start_day = datetime + end_day = dt_util.parse_datetime(end_time) + if end_day is None: + return self.json_message("Invalid end_time", HTTP_BAD_REQUEST) + hass = request.app["hass"] def json_events(): @@ -184,18 +195,19 @@ class LogbookView(HomeAssistantView): return await hass.async_add_job(json_events) -def humanify(hass, events): +def humanify(hass, events, entity_attr_cache, prev_states=None): """Generate a converted list of events into Entry objects. Will try to group events if possible: - if 2+ sensor updates in GROUP_BY_MINUTES, show last - if Home Assistant stop and start happen in same minute call it restarted """ - domain_prefixes = tuple(f"{dom}." for dom in CONTINUOUS_DOMAINS) + if prev_states is None: + prev_states = {} # Group events in batches of GROUP_BY_MINUTES for _, g_events in groupby( - events, lambda event: event.time_fired.minute // GROUP_BY_MINUTES + events, lambda event: event.time_fired_minute // GROUP_BY_MINUTES ): events_batch = list(g_events) @@ -210,22 +222,20 @@ def humanify(hass, events): # Process events for event in events_batch: if event.event_type == EVENT_STATE_CHANGED: - entity_id = event.data.get("entity_id") - - if entity_id.startswith(domain_prefixes): - last_sensor_event[entity_id] = event + if event.domain in CONTINUOUS_DOMAINS: + last_sensor_event[event.entity_id] = event elif event.event_type == EVENT_HOMEASSISTANT_STOP: - if event.time_fired.minute in start_stop_events: + if event.time_fired_minute in start_stop_events: continue - start_stop_events[event.time_fired.minute] = 1 + start_stop_events[event.time_fired_minute] = 1 elif event.event_type == EVENT_HOMEASSISTANT_START: - if event.time_fired.minute not in start_stop_events: + if event.time_fired_minute not in start_stop_events: continue - start_stop_events[event.time_fired.minute] = 2 + start_stop_events[event.time_fired_minute] = 2 # Yield entries external_events = hass.data.get(DOMAIN, {}) @@ -233,71 +243,73 @@ def humanify(hass, events): if event.event_type in external_events: domain, describe_event = external_events[event.event_type] data = describe_event(event) - data["when"] = event.time_fired + data["when"] = event.time_fired_isoformat data["domain"] = domain - data["context_id"] = event.context.id - data["context_user_id"] = event.context.user_id + data["context_user_id"] = event.context_user_id yield data if event.event_type == EVENT_STATE_CHANGED: - to_state = State.from_dict(event.data.get("new_state")) + entity_id = event.entity_id - domain = to_state.domain + # Skip events that have not changed state + if entity_id in prev_states and prev_states[entity_id] == event.state: + continue + + prev_states[entity_id] = event.state + domain = event.domain - # Skip all but the last sensor state if ( domain in CONTINUOUS_DOMAINS - and event != last_sensor_event[to_state.entity_id] + and event != last_sensor_event[entity_id] ): + # Skip all but the last sensor state continue - # Don't show continuous sensor value changes in the logbook - if domain in CONTINUOUS_DOMAINS and to_state.attributes.get( - "unit_of_measurement" - ): - continue + name = entity_attr_cache.get( + entity_id, ATTR_FRIENDLY_NAME, event + ) or split_entity_id(entity_id)[1].replace("_", " ") yield { - "when": event.time_fired, - "name": to_state.name, - "message": _entry_message_from_state(domain, to_state), + "when": event.time_fired_isoformat, + "name": name, + "message": _entry_message_from_event( + hass, entity_id, domain, event, entity_attr_cache + ), "domain": domain, - "entity_id": to_state.entity_id, - "context_id": event.context.id, - "context_user_id": event.context.user_id, + "entity_id": entity_id, + "context_user_id": event.context_user_id, } elif event.event_type == EVENT_HOMEASSISTANT_START: - if start_stop_events.get(event.time_fired.minute) == 2: + if start_stop_events.get(event.time_fired_minute) == 2: continue yield { - "when": event.time_fired, + "when": event.time_fired_isoformat, "name": "Home Assistant", "message": "started", "domain": HA_DOMAIN, - "context_id": event.context.id, - "context_user_id": event.context.user_id, + "context_user_id": event.context_user_id, } elif event.event_type == EVENT_HOMEASSISTANT_STOP: - if start_stop_events.get(event.time_fired.minute) == 2: + if start_stop_events.get(event.time_fired_minute) == 2: action = "restarted" else: action = "stopped" yield { - "when": event.time_fired, + "when": event.time_fired_isoformat, "name": "Home Assistant", "message": action, "domain": HA_DOMAIN, - "context_id": event.context.id, - "context_user_id": event.context.user_id, + "context_user_id": event.context_user_id, } elif event.event_type == EVENT_LOGBOOK_ENTRY: - domain = event.data.get(ATTR_DOMAIN) - entity_id = event.data.get(ATTR_ENTITY_ID) + event_data = event.data + domain = event_data.get(ATTR_DOMAIN) + entity_id = event_data.get(ATTR_ENTITY_ID) if domain is None and entity_id is not None: try: domain = split_entity_id(str(entity_id))[0] @@ -305,24 +317,11 @@ def humanify(hass, events): pass yield { - "when": event.time_fired, - "name": event.data.get(ATTR_NAME), - "message": event.data.get(ATTR_MESSAGE), + "when": event.time_fired_isoformat, + "name": event_data.get(ATTR_NAME), + "message": event_data.get(ATTR_MESSAGE), "domain": domain, "entity_id": entity_id, - "context_id": event.context.id, - "context_user_id": event.context.user_id, - } - - elif event.event_type == EVENT_SCRIPT_STARTED: - yield { - "when": event.time_fired, - "name": event.data.get(ATTR_NAME), - "message": "started", - "domain": "script", - "entity_id": event.data.get(ATTR_ENTITY_ID), - "context_id": event.context.id, - "context_user_id": event.context.user_id, } @@ -352,170 +351,188 @@ def _get_related_entity_ids(session, entity_filter): time.sleep(QUERY_RETRY_WAIT) -def _generate_filter_from_config(config): - excluded_entities = [] - excluded_domains = [] - included_entities = [] - included_domains = [] - - exclude = config.get(CONF_EXCLUDE) - if exclude: - excluded_entities = exclude.get(CONF_ENTITIES, []) - excluded_domains = exclude.get(CONF_DOMAINS, []) - include = config.get(CONF_INCLUDE) - if include: - included_entities = include.get(CONF_ENTITIES, []) - included_domains = include.get(CONF_DOMAINS, []) - - return generate_filter( - included_domains, included_entities, excluded_domains, excluded_entities - ) +def _all_entities_filter(_): + """Filter that accepts all entities.""" + return True def _get_events(hass, config, start_day, end_day, entity_id=None): """Get events for a period of time.""" - entities_filter = _generate_filter_from_config(config) + entity_attr_cache = EntityAttributeCache(hass) def yield_events(query): """Yield Events that are not filtered away.""" - for row in query.yield_per(500): - event = row.to_native() - if _keep_event(hass, event, entities_filter): + for row in query.yield_per(1000): + event = LazyEventPartialState(row) + if _keep_event(hass, event, entities_filter, entity_attr_cache): yield event with session_scope(hass=hass) as session: if entity_id is not None: entity_ids = [entity_id.lower()] - else: + entities_filter = generate_filter([], entity_ids, [], []) + elif config.get(CONF_EXCLUDE) or config.get(CONF_INCLUDE): + entities_filter = convert_include_exclude_filter(config) entity_ids = _get_related_entity_ids(session, entities_filter) + else: + entities_filter = _all_entities_filter + entity_ids = None + + old_state = aliased(States, name="old_state") query = ( - session.query(Events) + session.query( + Events.event_type, + Events.event_data, + Events.time_fired, + Events.context_user_id, + States.state_id, + States.state, + States.entity_id, + States.domain, + States.attributes, + old_state.state_id.label("old_state_id"), + ) .order_by(Events.time_fired) .outerjoin(States, (Events.event_id == States.event_id)) + .outerjoin(old_state, (States.old_state_id == old_state.state_id)) + # The below filter, removes state change events that do not have + # and old_state, new_state, or the old and + # new state are the same for v8 schema or later. + # + # If the events/states were stored before v8 schema, we relay on the + # prev_states dict to remove them. + # + # When all data is schema v8 or later, the check for EMPTY_JSON_OBJECT + # can be removed. + .filter( + (Events.event_type != EVENT_STATE_CHANGED) + | (Events.event_data != EMPTY_JSON_OBJECT) + | ( + (States.state_id.isnot(None)) + & (old_state.state_id.isnot(None)) + & (States.state != old_state.state) + ) + ) .filter( Events.event_type.in_(ALL_EVENT_TYPES + list(hass.data.get(DOMAIN, {}))) ) .filter((Events.time_fired > start_day) & (Events.time_fired < end_day)) - .filter( + ) + + if entity_ids: + query = query.filter( ( (States.last_updated == States.last_changed) & States.entity_id.in_(entity_ids) ) | (States.state_id.is_(None)) ) - ) + else: + query = query.filter( + (States.last_updated == States.last_changed) + | (States.state_id.is_(None)) + ) - return list(humanify(hass, yield_events(query))) + # When all data is schema v8 or later, prev_states can be removed + prev_states = {} + return list(humanify(hass, yield_events(query), entity_attr_cache, prev_states)) -def _keep_event(hass, event, entities_filter): - domain = event.data.get(ATTR_DOMAIN) - entity_id = event.data.get("entity_id") - if entity_id: - domain = split_entity_id(entity_id)[0] - +def _keep_event(hass, event, entities_filter, entity_attr_cache): if event.event_type == EVENT_STATE_CHANGED: + entity_id = event.entity_id if entity_id is None: return False # Do not report on new entities - old_state = event.data.get("old_state") - if old_state is None: - return False - # Do not report on entity removal - new_state = event.data.get("new_state") - if new_state is None: + if not event.has_old_and_new_state: return False - # Do not report on only attribute changes - if new_state.get("state") == old_state.get("state"): + if event.domain in CONTINUOUS_DOMAINS and entity_attr_cache.get( + entity_id, ATTR_UNIT_OF_MEASUREMENT, event + ): + # Don't show continuous sensor value changes in the logbook return False - - attributes = new_state.get("attributes", {}) - - # Also filter auto groups. - if domain == "group" and attributes.get("auto", False): - return False - - # exclude entities which are customized hidden - hidden = attributes.get(ATTR_HIDDEN, False) - if hidden: - return False - - elif event.event_type == EVENT_LOGBOOK_ENTRY: - domain = event.data.get(ATTR_DOMAIN) - - elif event.event_type == EVENT_SCRIPT_STARTED: - domain = "script" - - elif not entity_id and event.event_type in hass.data.get(DOMAIN, {}): + elif event.event_type in HOMEASSISTANT_EVENTS: + entity_id = f"{HA_DOMAIN}." + elif event.event_type in hass.data[DOMAIN] and ATTR_ENTITY_ID not in event.data: # If the entity_id isn't described, use the domain that describes # the event for filtering. domain = hass.data[DOMAIN][event.event_type][0] - - if not entity_id and domain: + if domain is None: + return False entity_id = f"{domain}." + else: + event_data = event.data + entity_id = event_data.get(ATTR_ENTITY_ID) + if entity_id is None: + domain = event_data.get(ATTR_DOMAIN) + if domain is None: + return False + entity_id = f"{domain}." - return not entity_id or entities_filter(entity_id) + return entities_filter(entity_id) -def _entry_message_from_state(domain, state): +def _entry_message_from_event(hass, entity_id, domain, event, entity_attr_cache): """Convert a state to a message for the logbook.""" # We pass domain in so we don't have to split entity_id again + state_state = event.state + if domain in ["device_tracker", "person"]: - if state.state == STATE_NOT_HOME: + if state_state == STATE_NOT_HOME: return "is away" - return f"is at {state.state}" + return f"is at {state_state}" if domain == "sun": - if state.state == sun.STATE_ABOVE_HORIZON: + if state_state == sun.STATE_ABOVE_HORIZON: return "has risen" return "has set" - device_class = state.attributes.get("device_class") - if domain == "binary_sensor" and device_class: + if domain == "binary_sensor": + device_class = entity_attr_cache.get(entity_id, ATTR_DEVICE_CLASS, event) if device_class == "battery": - if state.state == STATE_ON: + if state_state == STATE_ON: return "is low" - if state.state == STATE_OFF: + if state_state == STATE_OFF: return "is normal" if device_class == "connectivity": - if state.state == STATE_ON: + if state_state == STATE_ON: return "is connected" - if state.state == STATE_OFF: + if state_state == STATE_OFF: return "is disconnected" if device_class in ["door", "garage_door", "opening", "window"]: - if state.state == STATE_ON: + if state_state == STATE_ON: return "is opened" - if state.state == STATE_OFF: + if state_state == STATE_OFF: return "is closed" if device_class == "lock": - if state.state == STATE_ON: + if state_state == STATE_ON: return "is unlocked" - if state.state == STATE_OFF: + if state_state == STATE_OFF: return "is locked" if device_class == "plug": - if state.state == STATE_ON: + if state_state == STATE_ON: return "is plugged in" - if state.state == STATE_OFF: + if state_state == STATE_OFF: return "is unplugged" if device_class == "presence": - if state.state == STATE_ON: + if state_state == STATE_ON: return "is at home" - if state.state == STATE_OFF: + if state_state == STATE_OFF: return "is away" if device_class == "safety": - if state.state == STATE_ON: + if state_state == STATE_ON: return "is unsafe" - if state.state == STATE_OFF: + if state_state == STATE_OFF: return "is safe" if device_class in [ @@ -532,16 +549,149 @@ def _entry_message_from_state(domain, state): "sound", "vibration", ]: - if state.state == STATE_ON: + if state_state == STATE_ON: return f"detected {device_class}" - if state.state == STATE_OFF: + if state_state == STATE_OFF: return f"cleared (no {device_class} detected)" - if state.state == STATE_ON: + if state_state == STATE_ON: # Future: combine groups and its entity entries ? return "turned on" - if state.state == STATE_OFF: + if state_state == STATE_OFF: return "turned off" - return f"changed to {state.state}" + return f"changed to {state_state}" + + +class LazyEventPartialState: + """A lazy version of core Event with limited State joined in.""" + + __slots__ = [ + "_row", + "_event_data", + "_time_fired", + "_time_fired_isoformat", + "_attributes", + "event_type", + "entity_id", + "state", + "domain", + ] + + def __init__(self, row): + """Init the lazy event.""" + self._row = row + self._event_data = None + self._time_fired = None + self._time_fired_isoformat = None + self._attributes = None + self.event_type = self._row.event_type + self.entity_id = self._row.entity_id + self.state = self._row.state + self.domain = self._row.domain + + @property + def context_user_id(self): + """Context user id of event.""" + return self._row.context_user_id + + @property + def attributes(self): + """State attributes.""" + if not self._attributes: + if ( + self._row.attributes is None + or self._row.attributes == EMPTY_JSON_OBJECT + ): + self._attributes = {} + else: + self._attributes = json.loads(self._row.attributes) + return self._attributes + + @property + def data(self): + """Event data.""" + if not self._event_data: + if self._row.event_data == EMPTY_JSON_OBJECT: + self._event_data = {} + else: + self._event_data = json.loads(self._row.event_data) + return self._event_data + + @property + def time_fired_minute(self): + """Minute the event was fired not converted.""" + return self._row.time_fired.minute + + @property + def time_fired(self): + """Time event was fired in utc.""" + if not self._time_fired: + self._time_fired = ( + process_timestamp(self._row.time_fired) or dt_util.utcnow() + ) + return self._time_fired + + @property + def time_fired_isoformat(self): + """Time event was fired in utc isoformat.""" + if not self._time_fired_isoformat: + if self._time_fired: + self._time_fired_isoformat = self._time_fired.isoformat() + else: + self._time_fired_isoformat = process_timestamp_to_utc_isoformat( + self._row.time_fired or dt_util.utcnow() + ) + return self._time_fired_isoformat + + @property + def has_old_and_new_state(self): + """Check the json data to see if new_state and old_state is present without decoding.""" + # Delete this check once all states are saved in the v8 schema + # format or later (they have the old_state_id column). + + # New events in v8 schema format + if self._row.event_data == EMPTY_JSON_OBJECT: + return self._row.state_id is not None and self._row.old_state_id is not None + + # Old events not in v8 schema format + return ( + '"old_state": {' in self._row.event_data + and '"new_state": {' in self._row.event_data + ) + + +class EntityAttributeCache: + """A cache to lookup static entity_id attribute. + + This class should not be used to lookup attributes + that are expected to change state. + """ + + def __init__(self, hass): + """Init the cache.""" + self._hass = hass + self._cache = {} + + def get(self, entity_id, attribute, event): + """Lookup an attribute for an entity or get it from the cache.""" + if entity_id in self._cache: + if attribute in self._cache[entity_id]: + return self._cache[entity_id][attribute] + else: + self._cache[entity_id] = {} + + current_state = self._hass.states.get(entity_id) + if current_state: + # Try the current state as its faster than decoding the + # attributes + self._cache[entity_id][attribute] = current_state.attributes.get( + attribute, None + ) + else: + # If the entity has been removed, decode the attributes + # instead + self._cache[entity_id][attribute] = event.attributes.get(attribute) + + return self._cache[entity_id][attribute] diff --git a/homeassistant/components/luftdaten/translations/nn.json b/homeassistant/components/luftdaten/translations/nn.json deleted file mode 100644 index 2639d90be2d..00000000000 --- a/homeassistant/components/luftdaten/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Luftdaten" -} \ No newline at end of file diff --git a/homeassistant/components/lutron_caseta/__init__.py b/homeassistant/components/lutron_caseta/__init__.py index ff7ec61ecc8..40b65293f1d 100644 --- a/homeassistant/components/lutron_caseta/__init__.py +++ b/homeassistant/components/lutron_caseta/__init__.py @@ -127,6 +127,16 @@ class LutronCasetaDevice(Entity): """Return the unique ID of the device (serial).""" return str(self.serial) + @property + def device_info(self): + """Return the device info.""" + return { + "identifiers": {(DOMAIN, self.serial)}, + "name": self.name, + "manufacturer": "Lutron", + "model": self._device["model"], + } + @property def device_state_attributes(self): """Return the state attributes.""" diff --git a/homeassistant/components/lutron_caseta/translations/bg.json b/homeassistant/components/lutron_caseta/translations/bg.json deleted file mode 100644 index 970d722fe4c..00000000000 --- a/homeassistant/components/lutron_caseta/translations/bg.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Lutron Cas\u00e9ta" -} \ No newline at end of file diff --git a/homeassistant/components/lutron_caseta/translations/da.json b/homeassistant/components/lutron_caseta/translations/da.json deleted file mode 100644 index 970d722fe4c..00000000000 --- a/homeassistant/components/lutron_caseta/translations/da.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Lutron Cas\u00e9ta" -} \ No newline at end of file diff --git a/homeassistant/components/lutron_caseta/translations/de.json b/homeassistant/components/lutron_caseta/translations/de.json deleted file mode 100644 index 970d722fe4c..00000000000 --- a/homeassistant/components/lutron_caseta/translations/de.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Lutron Cas\u00e9ta" -} \ No newline at end of file diff --git a/homeassistant/components/lutron_caseta/translations/es-419.json b/homeassistant/components/lutron_caseta/translations/es-419.json deleted file mode 100644 index 970d722fe4c..00000000000 --- a/homeassistant/components/lutron_caseta/translations/es-419.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Lutron Cas\u00e9ta" -} \ No newline at end of file diff --git a/homeassistant/components/lutron_caseta/translations/fr.json b/homeassistant/components/lutron_caseta/translations/fr.json deleted file mode 100644 index 970d722fe4c..00000000000 --- a/homeassistant/components/lutron_caseta/translations/fr.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Lutron Cas\u00e9ta" -} \ No newline at end of file diff --git a/homeassistant/components/lutron_caseta/translations/hu.json b/homeassistant/components/lutron_caseta/translations/hu.json deleted file mode 100644 index 970d722fe4c..00000000000 --- a/homeassistant/components/lutron_caseta/translations/hu.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Lutron Cas\u00e9ta" -} \ No newline at end of file diff --git a/homeassistant/components/lutron_caseta/translations/nl.json b/homeassistant/components/lutron_caseta/translations/nl.json deleted file mode 100644 index 970d722fe4c..00000000000 --- a/homeassistant/components/lutron_caseta/translations/nl.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Lutron Cas\u00e9ta" -} \ No newline at end of file diff --git a/homeassistant/components/lutron_caseta/translations/sl.json b/homeassistant/components/lutron_caseta/translations/sl.json deleted file mode 100644 index 970d722fe4c..00000000000 --- a/homeassistant/components/lutron_caseta/translations/sl.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Lutron Cas\u00e9ta" -} \ No newline at end of file diff --git a/homeassistant/components/lutron_caseta/translations/sv.json b/homeassistant/components/lutron_caseta/translations/sv.json deleted file mode 100644 index 970d722fe4c..00000000000 --- a/homeassistant/components/lutron_caseta/translations/sv.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Lutron Cas\u00e9ta" -} \ No newline at end of file diff --git a/homeassistant/components/mailgun/translations/nn.json b/homeassistant/components/mailgun/translations/nn.json deleted file mode 100644 index 9e8d91a8874..00000000000 --- a/homeassistant/components/mailgun/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Mailgun" -} \ No newline at end of file diff --git a/homeassistant/components/media_extractor/manifest.json b/homeassistant/components/media_extractor/manifest.json index b70729a7435..6e3717481cf 100644 --- a/homeassistant/components/media_extractor/manifest.json +++ b/homeassistant/components/media_extractor/manifest.json @@ -2,7 +2,7 @@ "domain": "media_extractor", "name": "Media Extractor", "documentation": "https://www.home-assistant.io/integrations/media_extractor", - "requirements": ["youtube_dl==2020.05.29"], + "requirements": ["youtube_dl==2020.06.16.1"], "dependencies": ["media_player"], "codeowners": [], "quality_scale": "internal" diff --git a/homeassistant/components/metoffice/__init__.py b/homeassistant/components/metoffice/__init__.py index 94cc8b636d4..8a68646240a 100644 --- a/homeassistant/components/metoffice/__init__.py +++ b/homeassistant/components/metoffice/__init__.py @@ -1 +1,86 @@ -"""The metoffice component.""" +"""The Met Office integration.""" + +import asyncio +import logging + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + +from .const import ( + DEFAULT_SCAN_INTERVAL, + DOMAIN, + METOFFICE_COORDINATOR, + METOFFICE_DATA, + METOFFICE_NAME, +) +from .data import MetOfficeData + +_LOGGER = logging.getLogger(__name__) + +PLATFORMS = ["sensor", "weather"] + + +async def async_setup(hass: HomeAssistant, config: dict): + """Set up the Met Office weather component.""" + return True + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): + """Set up a Met Office entry.""" + + latitude = entry.data[CONF_LATITUDE] + longitude = entry.data[CONF_LONGITUDE] + api_key = entry.data[CONF_API_KEY] + site_name = entry.data[CONF_NAME] + + metoffice_data = MetOfficeData(hass, api_key, latitude, longitude) + await metoffice_data.async_update_site() + if metoffice_data.site_name is None: + raise ConfigEntryNotReady() + + metoffice_coordinator = DataUpdateCoordinator( + hass, + _LOGGER, + name=f"MetOffice Coordinator for {site_name}", + update_method=metoffice_data.async_update, + update_interval=DEFAULT_SCAN_INTERVAL, + ) + + metoffice_hass_data = hass.data.setdefault(DOMAIN, {}) + metoffice_hass_data[entry.entry_id] = { + METOFFICE_DATA: metoffice_data, + METOFFICE_COORDINATOR: metoffice_coordinator, + METOFFICE_NAME: site_name, + } + + # Fetch initial data so we have data when entities subscribe + await metoffice_coordinator.async_refresh() + if metoffice_data.now is None: + raise ConfigEntryNotReady() + + for component in PLATFORMS: + hass.async_create_task( + hass.config_entries.async_forward_entry_setup(entry, component) + ) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry): + """Unload a config entry.""" + unload_ok = all( + await asyncio.gather( + *[ + hass.config_entries.async_forward_entry_unload(entry, component) + for component in PLATFORMS + ] + ) + ) + if unload_ok: + hass.data[DOMAIN].pop(entry.entry_id) + if not hass.data[DOMAIN]: + hass.data.pop(DOMAIN) + return unload_ok diff --git a/homeassistant/components/metoffice/config_flow.py b/homeassistant/components/metoffice/config_flow.py new file mode 100644 index 00000000000..b71c3de67e3 --- /dev/null +++ b/homeassistant/components/metoffice/config_flow.py @@ -0,0 +1,79 @@ +"""Config flow for Met Office integration.""" +import logging + +import voluptuous as vol + +from homeassistant import config_entries, core, exceptions +from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME +from homeassistant.helpers import config_validation as cv + +from .const import DOMAIN # pylint: disable=unused-import +from .data import MetOfficeData + +_LOGGER = logging.getLogger(__name__) + + +async def validate_input(hass: core.HomeAssistant, data): + """Validate that the user input allows us to connect to DataPoint. + + Data has the keys from DATA_SCHEMA with values provided by the user. + """ + latitude = data[CONF_LATITUDE] + longitude = data[CONF_LONGITUDE] + api_key = data[CONF_API_KEY] + + metoffice_data = MetOfficeData(hass, api_key, latitude, longitude) + await metoffice_data.async_update_site() + if metoffice_data.site_name is None: + raise CannotConnect() + + return {"site_name": metoffice_data.site_name} + + +class MetOfficeConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): + """Handle a config flow for Met Office weather integration.""" + + VERSION = 1 + CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL + + async def async_step_user(self, user_input=None): + """Handle the initial step.""" + errors = {} + if user_input is not None: + await self.async_set_unique_id( + f"{user_input[CONF_LATITUDE]}_{user_input[CONF_LONGITUDE]}" + ) + self._abort_if_unique_id_configured() + + try: + info = await validate_input(self.hass, user_input) + except CannotConnect: + errors["base"] = "cannot_connect" + except Exception: # pylint: disable=broad-except + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + user_input[CONF_NAME] = info["site_name"] + return self.async_create_entry( + title=user_input[CONF_NAME], data=user_input + ) + + data_schema = vol.Schema( + { + vol.Required(CONF_API_KEY): str, + vol.Required( + CONF_LATITUDE, default=self.hass.config.latitude + ): cv.latitude, + vol.Required( + CONF_LONGITUDE, default=self.hass.config.longitude + ): cv.longitude, + }, + ) + + return self.async_show_form( + step_id="user", data_schema=data_schema, errors=errors + ) + + +class CannotConnect(exceptions.HomeAssistantError): + """Error to indicate we cannot connect.""" diff --git a/homeassistant/components/metoffice/const.py b/homeassistant/components/metoffice/const.py new file mode 100644 index 00000000000..b088672b8a5 --- /dev/null +++ b/homeassistant/components/metoffice/const.py @@ -0,0 +1,51 @@ +"""Constants for Met Office Integration.""" +from datetime import timedelta + +DOMAIN = "metoffice" + +DEFAULT_NAME = "Met Office" +ATTRIBUTION = "Data provided by the Met Office" + +DEFAULT_SCAN_INTERVAL = timedelta(minutes=15) + +METOFFICE_DATA = "metoffice_data" +METOFFICE_COORDINATOR = "metoffice_coordinator" +METOFFICE_MONITORED_CONDITIONS = "metoffice_monitored_conditions" +METOFFICE_NAME = "metoffice_name" + +MODE_3HOURLY = "3hourly" + +CONDITION_CLASSES = { + "cloudy": ["7", "8"], + "fog": ["5", "6"], + "hail": ["19", "20", "21"], + "lightning": ["30"], + "lightning-rainy": ["28", "29"], + "partlycloudy": ["2", "3"], + "pouring": ["13", "14", "15"], + "rainy": ["9", "10", "11", "12"], + "snowy": ["22", "23", "24", "25", "26", "27"], + "snowy-rainy": ["16", "17", "18"], + "sunny": ["0", "1"], + "windy": [], + "windy-variant": [], + "exceptional": [], +} + +VISIBILITY_CLASSES = { + "VP": "Very Poor", + "PO": "Poor", + "MO": "Moderate", + "GO": "Good", + "VG": "Very Good", + "EX": "Excellent", +} + +VISIBILITY_DISTANCE_CLASSES = { + "VP": "<1", + "PO": "1-4", + "MO": "4-10", + "GO": "10-20", + "VG": "20-40", + "EX": ">40", +} diff --git a/homeassistant/components/metoffice/data.py b/homeassistant/components/metoffice/data.py new file mode 100644 index 00000000000..8f718b8d4b8 --- /dev/null +++ b/homeassistant/components/metoffice/data.py @@ -0,0 +1,78 @@ +"""Common Met Office Data class used by both sensor and entity.""" + +import logging + +import datapoint + +from .const import MODE_3HOURLY + +_LOGGER = logging.getLogger(__name__) + + +class MetOfficeData: + """Get current and forecast data from Datapoint. + + Please note that the 'datapoint' library is not asyncio-friendly, so some + calls have had to be wrapped with the standard hassio helper + async_add_executor_job. + """ + + def __init__(self, hass, api_key, latitude, longitude): + """Initialize the data object.""" + self._hass = hass + self._datapoint = datapoint.connection(api_key=api_key) + self._site = None + + # Public attributes + self.latitude = latitude + self.longitude = longitude + + # Holds the current data from the Met Office + self.site_id = None + self.site_name = None + self.now = None + + async def async_update_site(self): + """Async wrapper for getting the DataPoint site.""" + return await self._hass.async_add_executor_job(self._update_site) + + def _update_site(self): + """Return the nearest DataPoint Site to the held latitude/longitude.""" + try: + new_site = self._datapoint.get_nearest_forecast_site( + latitude=self.latitude, longitude=self.longitude + ) + if self._site is None or self._site.id != new_site.id: + self._site = new_site + self.now = None + + self.site_id = self._site.id + self.site_name = self._site.name + + except datapoint.exceptions.APIException as err: + _LOGGER.error("Received error from Met Office Datapoint: %s", err) + self._site = None + self.site_id = None + self.site_name = None + self.now = None + + return self._site + + async def async_update(self): + """Async wrapper for update method.""" + return await self._hass.async_add_executor_job(self._update) + + def _update(self): + """Get the latest data from DataPoint.""" + if self._site is None: + _LOGGER.error("No Met Office forecast site held, check logs for problems") + return + + try: + forecast = self._datapoint.get_forecast_for_site( + self._site.id, MODE_3HOURLY + ) + self.now = forecast.now() + except (ValueError, datapoint.exceptions.APIException) as err: + _LOGGER.error("Check Met Office connection: %s", err.args) + self.now = None diff --git a/homeassistant/components/metoffice/manifest.json b/homeassistant/components/metoffice/manifest.json index 20120d90b18..0c5d4e1d625 100644 --- a/homeassistant/components/metoffice/manifest.json +++ b/homeassistant/components/metoffice/manifest.json @@ -3,5 +3,6 @@ "name": "Met Office", "documentation": "https://www.home-assistant.io/integrations/metoffice", "requirements": ["datapoint==0.9.5"], - "codeowners": [] + "codeowners": ["@MrHarcombe"], + "config_flow": true } diff --git a/homeassistant/components/metoffice/sensor.py b/homeassistant/components/metoffice/sensor.py index b594517ac50..e314423a0a5 100644 --- a/homeassistant/components/metoffice/sensor.py +++ b/homeassistant/components/metoffice/sensor.py @@ -1,27 +1,31 @@ """Support for UK Met Office weather service.""" -from datetime import timedelta + import logging -import datapoint as dp -import voluptuous as vol - -from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( ATTR_ATTRIBUTION, - CONF_API_KEY, - CONF_LATITUDE, - CONF_LONGITUDE, - CONF_MONITORED_CONDITIONS, - CONF_NAME, + DEVICE_CLASS_HUMIDITY, + DEVICE_CLASS_TEMPERATURE, LENGTH_KILOMETERS, SPEED_MILES_PER_HOUR, TEMP_CELSIUS, UNIT_PERCENTAGE, UV_INDEX, ) -import homeassistant.helpers.config_validation as cv +from homeassistant.core import callback from homeassistant.helpers.entity import Entity -from homeassistant.util import Throttle +from homeassistant.helpers.typing import ConfigType, HomeAssistantType + +from .const import ( + ATTRIBUTION, + CONDITION_CLASSES, + DOMAIN, + METOFFICE_COORDINATOR, + METOFFICE_DATA, + METOFFICE_NAME, + VISIBILITY_CLASSES, + VISIBILITY_DISTANCE_CLASSES, +) _LOGGER = logging.getLogger(__name__) @@ -30,175 +34,190 @@ ATTR_SENSOR_ID = "sensor_id" ATTR_SITE_ID = "site_id" ATTR_SITE_NAME = "site_name" -ATTRIBUTION = "Data provided by the Met Office" - -CONDITION_CLASSES = { - "cloudy": ["7", "8"], - "fog": ["5", "6"], - "hail": ["19", "20", "21"], - "lightning": ["30"], - "lightning-rainy": ["28", "29"], - "partlycloudy": ["2", "3"], - "pouring": ["13", "14", "15"], - "rainy": ["9", "10", "11", "12"], - "snowy": ["22", "23", "24", "25", "26", "27"], - "snowy-rainy": ["16", "17", "18"], - "sunny": ["0", "1"], - "windy": [], - "windy-variant": [], - "exceptional": [], -} - -DEFAULT_NAME = "Met Office" - -VISIBILITY_CLASSES = { - "VP": "<1", - "PO": "1-4", - "MO": "4-10", - "GO": "10-20", - "VG": "20-40", - "EX": ">40", -} - -MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=35) - -# Sensor types are defined like: Name, units +# Sensor types are defined as: +# variable -> [0]title, [1]device_class, [2]units, [3]icon, [4]enabled_by_default SENSOR_TYPES = { - "name": ["Station Name", None], - "weather": ["Weather", None], - "temperature": ["Temperature", TEMP_CELSIUS], - "feels_like_temperature": ["Feels Like Temperature", TEMP_CELSIUS], - "wind_speed": ["Wind Speed", SPEED_MILES_PER_HOUR], - "wind_direction": ["Wind Direction", None], - "wind_gust": ["Wind Gust", SPEED_MILES_PER_HOUR], - "visibility": ["Visibility", None], - "visibility_distance": ["Visibility Distance", LENGTH_KILOMETERS], - "uv": ["UV", UV_INDEX], - "precipitation": ["Probability of Precipitation", UNIT_PERCENTAGE], - "humidity": ["Humidity", UNIT_PERCENTAGE], + "name": ["Station Name", None, None, "mdi:label-outline", False], + "weather": [ + "Weather", + None, + None, + "mdi:weather-sunny", # but will adapt to current conditions + True, + ], + "temperature": ["Temperature", DEVICE_CLASS_TEMPERATURE, TEMP_CELSIUS, None, True], + "feels_like_temperature": [ + "Feels Like Temperature", + DEVICE_CLASS_TEMPERATURE, + TEMP_CELSIUS, + None, + False, + ], + "wind_speed": [ + "Wind Speed", + None, + SPEED_MILES_PER_HOUR, + "mdi:weather-windy", + True, + ], + "wind_direction": ["Wind Direction", None, None, "mdi:compass-outline", False], + "wind_gust": ["Wind Gust", None, SPEED_MILES_PER_HOUR, "mdi:weather-windy", False], + "visibility": ["Visibility", None, None, "mdi:eye", False], + "visibility_distance": [ + "Visibility Distance", + None, + LENGTH_KILOMETERS, + "mdi:eye", + False, + ], + "uv": ["UV Index", None, UV_INDEX, "mdi:weather-sunny-alert", True], + "precipitation": [ + "Probability of Precipitation", + None, + UNIT_PERCENTAGE, + "mdi:weather-rainy", + True, + ], + "humidity": ["Humidity", DEVICE_CLASS_HUMIDITY, UNIT_PERCENTAGE, None, False], } -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_API_KEY): cv.string, - vol.Required(CONF_MONITORED_CONDITIONS, default=[]): vol.All( - cv.ensure_list, [vol.In(SENSOR_TYPES)] - ), - vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, - vol.Inclusive( - CONF_LATITUDE, "coordinates", "Latitude and longitude must exist together" - ): cv.latitude, - vol.Inclusive( - CONF_LONGITUDE, "coordinates", "Latitude and longitude must exist together" - ): cv.longitude, - } -) +async def async_setup_entry( + hass: HomeAssistantType, entry: ConfigType, async_add_entities +) -> None: + """Set up the Met Office weather sensor platform.""" + hass_data = hass.data[DOMAIN][entry.entry_id] -def setup_platform(hass, config, add_entities, discovery_info=None): - """Set up the Met Office sensor platform.""" - api_key = config.get(CONF_API_KEY) - latitude = config.get(CONF_LATITUDE, hass.config.latitude) - longitude = config.get(CONF_LONGITUDE, hass.config.longitude) - name = config.get(CONF_NAME) - - datapoint = dp.connection(api_key=api_key) - - if None in (latitude, longitude): - _LOGGER.error("Latitude or longitude not set in Home Assistant config") - return - - try: - site = datapoint.get_nearest_site(latitude=latitude, longitude=longitude) - except dp.exceptions.APIException as err: - _LOGGER.error("Received error from Met Office Datapoint: %s", err) - return - - if not site: - _LOGGER.error("Unable to get nearest Met Office forecast site") - return - - data = MetOfficeCurrentData(hass, datapoint, site) - data.update() - if data.data is None: - return - - sensors = [] - for variable in config[CONF_MONITORED_CONDITIONS]: - sensors.append(MetOfficeCurrentSensor(site, data, variable, name)) - - add_entities(sensors, True) + async_add_entities( + [ + MetOfficeCurrentSensor(entry.data, hass_data, sensor_type) + for sensor_type in SENSOR_TYPES + ], + False, + ) class MetOfficeCurrentSensor(Entity): - """Implementation of a Met Office current sensor.""" + """Implementation of a Met Office current weather condition sensor.""" - def __init__(self, site, data, condition, name): + def __init__(self, entry_data, hass_data, sensor_type): """Initialize the sensor.""" - self._condition = condition - self.data = data - self._name = name - self.site = site + self._data = hass_data[METOFFICE_DATA] + self._coordinator = hass_data[METOFFICE_COORDINATOR] + + self._type = sensor_type + self._name = f"{hass_data[METOFFICE_NAME]} {SENSOR_TYPES[self._type][0]}" + self._unique_id = f"{SENSOR_TYPES[self._type][0]}_{self._data.latitude}_{self._data.longitude}" + + self.metoffice_site_id = None + self.metoffice_site_name = None + self.metoffice_now = None @property def name(self): """Return the name of the sensor.""" - return f"{self._name} {SENSOR_TYPES[self._condition][0]}" + return self._name + + @property + def unique_id(self): + """Return the unique of the sensor.""" + return self._unique_id @property def state(self): """Return the state of the sensor.""" - if self._condition == "visibility_distance" and hasattr( - self.data.data, "visibility" + value = None + + if self._type == "visibility_distance" and hasattr( + self.metoffice_now, "visibility" ): - return VISIBILITY_CLASSES.get(self.data.data.visibility.value) - if hasattr(self.data.data, self._condition): - variable = getattr(self.data.data, self._condition) - if self._condition == "weather": - return [ - k - for k, v in CONDITION_CLASSES.items() - if self.data.data.weather.value in v - ][0] - return variable.value - return None + value = VISIBILITY_DISTANCE_CLASSES.get(self.metoffice_now.visibility.value) + + if self._type == "visibility" and hasattr(self.metoffice_now, "visibility"): + value = VISIBILITY_CLASSES.get(self.metoffice_now.visibility.value) + + elif self._type == "weather" and hasattr(self.metoffice_now, self._type): + value = [ + k + for k, v in CONDITION_CLASSES.items() + if self.metoffice_now.weather.value in v + ][0] + + elif hasattr(self.metoffice_now, self._type): + value = getattr(self.metoffice_now, self._type) + + if not isinstance(value, int): + value = value.value + + return value @property def unit_of_measurement(self): """Return the unit of measurement.""" - return SENSOR_TYPES[self._condition][1] + return SENSOR_TYPES[self._type][2] + + @property + def icon(self): + """Return the icon for the entity card.""" + value = SENSOR_TYPES[self._type][3] + if self._type == "weather": + value = self.state + if value is None: + value = "sunny" + elif value == "partlycloudy": + value = "partly-cloudy" + value = f"mdi:weather-{value}" + + return value + + @property + def device_class(self): + """Return the device class of the sensor.""" + return SENSOR_TYPES[self._type][1] @property def device_state_attributes(self): """Return the state attributes of the device.""" - attr = {} - attr[ATTR_ATTRIBUTION] = ATTRIBUTION - attr[ATTR_LAST_UPDATE] = self.data.data.date - attr[ATTR_SENSOR_ID] = self._condition - attr[ATTR_SITE_ID] = self.site.id - attr[ATTR_SITE_NAME] = self.site.name - return attr + return { + ATTR_ATTRIBUTION: ATTRIBUTION, + ATTR_LAST_UPDATE: self.metoffice_now.date if self.metoffice_now else None, + ATTR_SENSOR_ID: self._type, + ATTR_SITE_ID: self.metoffice_site_id if self.metoffice_site_id else None, + ATTR_SITE_NAME: self.metoffice_site_name + if self.metoffice_site_name + else None, + } - def update(self): - """Update current conditions.""" - self.data.update() + async def async_added_to_hass(self) -> None: + """Set up a listener and load data.""" + self.async_on_remove( + self._coordinator.async_add_listener(self._update_callback) + ) + self._update_callback() + async def async_update(self): + """Schedule a custom update via the common entity update service.""" + await self._coordinator.async_request_refresh() -class MetOfficeCurrentData: - """Get data from Datapoint.""" + @callback + def _update_callback(self) -> None: + """Load data from integration.""" + self.metoffice_site_id = self._data.site_id + self.metoffice_site_name = self._data.site_name + self.metoffice_now = self._data.now + self.async_write_ha_state() - def __init__(self, hass, datapoint, site): - """Initialize the data object.""" - self._datapoint = datapoint - self._site = site - self.data = None + @property + def should_poll(self) -> bool: + """Entities do not individually poll.""" + return False - @Throttle(MIN_TIME_BETWEEN_UPDATES) - def update(self): - """Get the latest data from Datapoint.""" - try: - forecast = self._datapoint.get_forecast_for_site(self._site.id, "3hourly") - self.data = forecast.now() - except (ValueError, dp.exceptions.APIException) as err: - _LOGGER.error("Check Met Office %s", err.args) - self.data = None + @property + def entity_registry_enabled_default(self) -> bool: + """Return if the entity should be enabled when first added to the entity registry.""" + return SENSOR_TYPES[self._type][4] + + @property + def available(self): + """Return if state is available.""" + return self.metoffice_site_id is not None and self.metoffice_now is not None diff --git a/homeassistant/components/metoffice/strings.json b/homeassistant/components/metoffice/strings.json new file mode 100644 index 00000000000..74d8b16542a --- /dev/null +++ b/homeassistant/components/metoffice/strings.json @@ -0,0 +1,22 @@ +{ + "config": { + "step": { + "user": { + "description": "The latitude and longitude will be used to find the closest weather station.", + "title": "Connect to the UK Met Office", + "data": { + "api_key": "Met Office DataPoint API key", + "latitude": "Latitude", + "longitude": "Longitude" + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + } +} \ No newline at end of file diff --git a/homeassistant/components/metoffice/translations/ca.json b/homeassistant/components/metoffice/translations/ca.json new file mode 100644 index 00000000000..6b90228c254 --- /dev/null +++ b/homeassistant/components/metoffice/translations/ca.json @@ -0,0 +1,22 @@ +{ + "config": { + "abort": { + "already_configured": "El dispositiu ja est\u00e0 configurat" + }, + "error": { + "cannot_connect": "No s'ha pogut connectar", + "unknown": "Error inesperat" + }, + "step": { + "user": { + "data": { + "api_key": "Clau API DataPoint de Met Office", + "latitude": "Latitud", + "longitude": "Longitud" + }, + "description": "La latitud i la longitud s'utilitzaran per trobar l'estaci\u00f3 meteorol\u00f2gica m\u00e9s propera.", + "title": "Connecta't amb Met Office (UK)" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/metoffice/translations/en.json b/homeassistant/components/metoffice/translations/en.json new file mode 100644 index 00000000000..341102b688d --- /dev/null +++ b/homeassistant/components/metoffice/translations/en.json @@ -0,0 +1,22 @@ +{ + "config": { + "abort": { + "already_configured": "Device is already configured" + }, + "error": { + "cannot_connect": "Failed to connect", + "unknown": "Unexpected error" + }, + "step": { + "user": { + "data": { + "api_key": "Met Office DataPoint API key", + "latitude": "Latitude", + "longitude": "Longitude" + }, + "description": "The latitude and longitude will be used to find the closest weather station.", + "title": "Connect to the UK Met Office" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/metoffice/translations/es.json b/homeassistant/components/metoffice/translations/es.json new file mode 100644 index 00000000000..8412a08d55d --- /dev/null +++ b/homeassistant/components/metoffice/translations/es.json @@ -0,0 +1,22 @@ +{ + "config": { + "abort": { + "already_configured": "El dispositivo ya est\u00e1 configurado" + }, + "error": { + "cannot_connect": "No se pudo conectar", + "unknown": "Error inesperado" + }, + "step": { + "user": { + "data": { + "api_key": "Clave API de Met Office DataPoint", + "latitude": "Latitud", + "longitude": "Longitud" + }, + "description": "La latitud y la longitud se utilizar\u00e1n para encontrar la estaci\u00f3n meteorol\u00f3gica m\u00e1s cercana.", + "title": "Con\u00e9ctar con la Oficina Meteorol\u00f3gica del Reino Unido" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/metoffice/translations/fr.json b/homeassistant/components/metoffice/translations/fr.json new file mode 100644 index 00000000000..9d687394cd1 --- /dev/null +++ b/homeassistant/components/metoffice/translations/fr.json @@ -0,0 +1,13 @@ +{ + "config": { + "step": { + "user": { + "data": { + "latitude": "Latitude", + "longitude": "Longitude" + }, + "description": "La latitude et la longitude seront utilis\u00e9es pour trouver la station m\u00e9t\u00e9o la plus proche." + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/metoffice/translations/it.json b/homeassistant/components/metoffice/translations/it.json new file mode 100644 index 00000000000..ef2a25b5eea --- /dev/null +++ b/homeassistant/components/metoffice/translations/it.json @@ -0,0 +1,22 @@ +{ + "config": { + "abort": { + "already_configured": "Il dispositivo \u00e8 gi\u00e0 configurato" + }, + "error": { + "cannot_connect": "Impossibile connettersi", + "unknown": "Errore imprevisto" + }, + "step": { + "user": { + "data": { + "api_key": "Chiave API Met Office DataPoint", + "latitude": "Latitudine", + "longitude": "Logitudine" + }, + "description": "La latitudine e la longitudine verranno utilizzate per trovare la stazione meteorologica pi\u00f9 vicina.", + "title": "Connettiti al Met Office del Regno Unito" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/metoffice/translations/ko.json b/homeassistant/components/metoffice/translations/ko.json new file mode 100644 index 00000000000..b1af2afaf30 --- /dev/null +++ b/homeassistant/components/metoffice/translations/ko.json @@ -0,0 +1,22 @@ +{ + "config": { + "abort": { + "already_configured": "\uae30\uae30\uac00 \uc774\ubbf8 \uad6c\uc131\ub418\uc5c8\uc2b5\ub2c8\ub2e4" + }, + "error": { + "cannot_connect": "\uc5f0\uacb0\ud558\uc9c0 \ubabb\ud588\uc2b5\ub2c8\ub2e4", + "unknown": "\uc608\uc0c1\uce58 \ubabb\ud55c \uc624\ub958\uac00 \ubc1c\uc0dd\ud588\uc2b5\ub2c8\ub2e4" + }, + "step": { + "user": { + "data": { + "api_key": "\uc601\uad6d \uae30\uc0c1\uccad DataPoint API \ud0a4", + "latitude": "\uc704\ub3c4", + "longitude": "\uacbd\ub3c4" + }, + "description": "\uc704\ub3c4\uc640 \uacbd\ub3c4\ub97c \uae30\ubc18\uc73c\ub85c \uac00\uc7a5 \uac00\uae4c\uc6b4 \uae30\uc0c1 \uad00\uce21\uc18c\ub97c \ucc3e\uc544 \uc0ac\uc6a9\ud569\ub2c8\ub2e4.", + "title": "\uc601\uad6d \uae30\uc0c1\uccad\uc5d0 \uc5f0\uacb0\ud558\uae30" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/metoffice/translations/lb.json b/homeassistant/components/metoffice/translations/lb.json new file mode 100644 index 00000000000..26ee66d9786 --- /dev/null +++ b/homeassistant/components/metoffice/translations/lb.json @@ -0,0 +1,14 @@ +{ + "config": { + "step": { + "user": { + "data": { + "api_key": "Met Office DataPoint API Schl\u00ebssel", + "latitude": "Breedegrad", + "longitude": "L\u00e4ngegrad" + }, + "description": "L\u00e4ngegrad a Breedegrad gi benotzt fir d\u00e9i nooste Statioun auszewielen." + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/metoffice/translations/no.json b/homeassistant/components/metoffice/translations/no.json new file mode 100644 index 00000000000..0711e25b73f --- /dev/null +++ b/homeassistant/components/metoffice/translations/no.json @@ -0,0 +1,15 @@ +{ + "config": { + "step": { + "user": { + "data": { + "api_key": "Met Office DataPoint API-n\u00f8kkel", + "latitude": "Breddegrad", + "longitude": "Lengdegrad" + }, + "description": "Breddegraden og lengdegraden vil bli brukt til \u00e5 finne den n\u00e6rmeste v\u00e6rstasjonen.", + "title": "Koble til UK Met Office" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/metoffice/translations/pl.json b/homeassistant/components/metoffice/translations/pl.json new file mode 100644 index 00000000000..7167faf5494 --- /dev/null +++ b/homeassistant/components/metoffice/translations/pl.json @@ -0,0 +1,22 @@ +{ + "config": { + "abort": { + "already_configured": "Urz\u0105dzenie jest ju\u017c skonfigurowane." + }, + "error": { + "cannot_connect": "Nie mo\u017cna nawi\u0105za\u0107 po\u0142\u0105czenia.", + "unknown": "Nieoczekiwany b\u0142\u0105d." + }, + "step": { + "user": { + "data": { + "api_key": "Klucz API", + "latitude": "Szeroko\u015b\u0107 geograficzna", + "longitude": "D\u0142ugo\u015b\u0107 geograficzna" + }, + "description": "Szeroko\u015b\u0107 i d\u0142ugo\u015b\u0107 geograficzna zostan\u0105 wykorzystane do znalezienia najbli\u017cszej stacji pogodowej.", + "title": "Po\u0142\u0105czenie z UK Met Office" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/metoffice/translations/ru.json b/homeassistant/components/metoffice/translations/ru.json new file mode 100644 index 00000000000..2b9716439eb --- /dev/null +++ b/homeassistant/components/metoffice/translations/ru.json @@ -0,0 +1,22 @@ +{ + "config": { + "abort": { + "already_configured": "\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0430 \u044d\u0442\u043e\u0433\u043e \u0443\u0441\u0442\u0440\u043e\u0439\u0441\u0442\u0432\u0430 \u0443\u0436\u0435 \u0432\u044b\u043f\u043e\u043b\u043d\u0435\u043d\u0430." + }, + "error": { + "cannot_connect": "\u041e\u0448\u0438\u0431\u043a\u0430 \u043f\u043e\u0434\u043a\u043b\u044e\u0447\u0435\u043d\u0438\u044f.", + "unknown": "\u041d\u0435\u043f\u0440\u0435\u0434\u0432\u0438\u0434\u0435\u043d\u043d\u0430\u044f \u043e\u0448\u0438\u0431\u043a\u0430." + }, + "step": { + "user": { + "data": { + "api_key": "\u041a\u043b\u044e\u0447 API Met Office DataPoint", + "latitude": "\u0428\u0438\u0440\u043e\u0442\u0430", + "longitude": "\u0414\u043e\u043b\u0433\u043e\u0442\u0430" + }, + "description": "\u0428\u0438\u0440\u043e\u0442\u0430 \u0438 \u0434\u043e\u043b\u0433\u043e\u0442\u0430 \u0431\u0443\u0434\u0443\u0442 \u0438\u0441\u043f\u043e\u043b\u044c\u0437\u043e\u0432\u0430\u043d\u044b \u0434\u043b\u044f \u043f\u043e\u0438\u0441\u043a\u0430 \u0431\u043b\u0438\u0436\u0430\u0439\u0448\u0435\u0439 \u043c\u0435\u0442\u0435\u043e\u0441\u0442\u0430\u043d\u0446\u0438\u0438.", + "title": "\u041f\u043e\u0434\u043a\u043b\u044e\u0447\u0435\u043d\u0438\u0435 \u043a Met Office UK" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/metoffice/translations/zh-Hant.json b/homeassistant/components/metoffice/translations/zh-Hant.json new file mode 100644 index 00000000000..3b89a94f6b0 --- /dev/null +++ b/homeassistant/components/metoffice/translations/zh-Hant.json @@ -0,0 +1,22 @@ +{ + "config": { + "abort": { + "already_configured": "\u8a2d\u5099\u5df2\u7d93\u8a2d\u5b9a\u5b8c\u6210" + }, + "error": { + "cannot_connect": "\u9023\u7dda\u5931\u6557", + "unknown": "\u672a\u9810\u671f\u932f\u8aa4" + }, + "step": { + "user": { + "data": { + "api_key": "Met Office DataPoint API \u5bc6\u9470", + "latitude": "\u7def\u5ea6", + "longitude": "\u7d93\u5ea6" + }, + "description": "\u5c07\u6703\u4f7f\u7528\u7d93\u7def\u5ea6\u8cc7\u8a0a\u5c0b\u627e\u6700\u8fd1\u7684\u6c23\u8c61\u89c0\u6e2c\u7ad9\u3002", + "title": "\u9023\u7dda\u81f3 UK Met Office" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/metoffice/weather.py b/homeassistant/components/metoffice/weather.py index 09350588d46..f94c2a4ad7a 100644 --- a/homeassistant/components/metoffice/weather.py +++ b/homeassistant/components/metoffice/weather.py @@ -1,127 +1,161 @@ """Support for UK Met Office weather service.""" + import logging -import datapoint as dp -import voluptuous as vol +from homeassistant.components.weather import WeatherEntity +from homeassistant.const import LENGTH_KILOMETERS, TEMP_CELSIUS +from homeassistant.core import callback +from homeassistant.helpers.typing import ConfigType, HomeAssistantType -from homeassistant.components.weather import PLATFORM_SCHEMA, WeatherEntity -from homeassistant.const import ( - CONF_API_KEY, - CONF_LATITUDE, - CONF_LONGITUDE, - CONF_NAME, - TEMP_CELSIUS, +from .const import ( + ATTRIBUTION, + CONDITION_CLASSES, + DEFAULT_NAME, + DOMAIN, + METOFFICE_COORDINATOR, + METOFFICE_DATA, + METOFFICE_NAME, + VISIBILITY_CLASSES, + VISIBILITY_DISTANCE_CLASSES, ) -from homeassistant.helpers import config_validation as cv - -from .sensor import ATTRIBUTION, CONDITION_CLASSES, MetOfficeCurrentData _LOGGER = logging.getLogger(__name__) -DEFAULT_NAME = "Met Office" -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_API_KEY): cv.string, - vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, - vol.Inclusive( - CONF_LATITUDE, "coordinates", "Latitude and longitude must exist together" - ): cv.latitude, - vol.Inclusive( - CONF_LONGITUDE, "coordinates", "Latitude and longitude must exist together" - ): cv.longitude, - } -) +async def async_setup_entry( + hass: HomeAssistantType, entry: ConfigType, async_add_entities +) -> None: + """Set up the Met Office weather sensor platform.""" + hass_data = hass.data[DOMAIN][entry.entry_id] - -def setup_platform(hass, config, add_entities, discovery_info=None): - """Set up the Met Office weather platform.""" - name = config.get(CONF_NAME) - datapoint = dp.connection(api_key=config.get(CONF_API_KEY)) - - latitude = config.get(CONF_LATITUDE, hass.config.latitude) - longitude = config.get(CONF_LONGITUDE, hass.config.longitude) - - if None in (latitude, longitude): - _LOGGER.error("Latitude or longitude not set in Home Assistant config") - return - - try: - site = datapoint.get_nearest_site(latitude=latitude, longitude=longitude) - except dp.exceptions.APIException as err: - _LOGGER.error("Received error from Met Office Datapoint: %s", err) - return - - if not site: - _LOGGER.error("Unable to get nearest Met Office forecast site") - return - - data = MetOfficeCurrentData(hass, datapoint, site) - try: - data.update() - except (ValueError, dp.exceptions.APIException) as err: - _LOGGER.error("Received error from Met Office Datapoint: %s", err) - return - - add_entities([MetOfficeWeather(site, data, name)], True) + async_add_entities( + [MetOfficeWeather(entry.data, hass_data,)], False, + ) class MetOfficeWeather(WeatherEntity): """Implementation of a Met Office weather condition.""" - def __init__(self, site, data, name): - """Initialise the platform with a data instance and site.""" - self._name = name - self.data = data - self.site = site + def __init__(self, entry_data, hass_data): + """Initialise the platform with a data instance.""" + self._data = hass_data[METOFFICE_DATA] + self._coordinator = hass_data[METOFFICE_COORDINATOR] - def update(self): - """Update current conditions.""" - self.data.update() + self._name = f"{DEFAULT_NAME} {hass_data[METOFFICE_NAME]}" + self._unique_id = f"{self._data.latitude}_{self._data.longitude}" + + self.metoffice_now = None @property def name(self): """Return the name of the sensor.""" - return f"{self._name} {self.site.name}" + return self._name + + @property + def unique_id(self): + """Return the unique of the sensor.""" + return self._unique_id @property def condition(self): """Return the current condition.""" - return [ - k for k, v in CONDITION_CLASSES.items() if self.data.data.weather.value in v - ][0] + return ( + [ + k + for k, v in CONDITION_CLASSES.items() + if self.metoffice_now.weather.value in v + ][0] + if self.metoffice_now + else None + ) @property def temperature(self): """Return the platform temperature.""" - return self.data.data.temperature.value + return ( + self.metoffice_now.temperature.value + if self.metoffice_now and self.metoffice_now.temperature + else None + ) @property def temperature_unit(self): """Return the unit of measurement.""" return TEMP_CELSIUS + @property + def visibility(self): + """Return the platform visibility.""" + _visibility = None + if hasattr(self.metoffice_now, "visibility"): + _visibility = f"{VISIBILITY_CLASSES.get(self.metoffice_now.visibility.value)} - {VISIBILITY_DISTANCE_CLASSES.get(self.metoffice_now.visibility.value)}" + return _visibility + + @property + def visibility_unit(self): + """Return the unit of measurement.""" + return LENGTH_KILOMETERS + @property def pressure(self): """Return the mean sea-level pressure.""" - return None + return ( + self.metoffice_now.pressure.value + if self.metoffice_now and self.metoffice_now.pressure + else None + ) @property def humidity(self): """Return the relative humidity.""" - return self.data.data.humidity.value + return ( + self.metoffice_now.humidity.value + if self.metoffice_now and self.metoffice_now.humidity + else None + ) @property def wind_speed(self): """Return the wind speed.""" - return self.data.data.wind_speed.value + return ( + self.metoffice_now.wind_speed.value + if self.metoffice_now and self.metoffice_now.wind_speed + else None + ) @property def wind_bearing(self): """Return the wind bearing.""" - return self.data.data.wind_direction.value + return ( + self.metoffice_now.wind_direction.value + if self.metoffice_now and self.metoffice_now.wind_direction + else None + ) @property def attribution(self): """Return the attribution.""" return ATTRIBUTION + + async def async_added_to_hass(self) -> None: + """Set up a listener and load data.""" + self.async_on_remove( + self._coordinator.async_add_listener(self._update_callback) + ) + self._update_callback() + + @callback + def _update_callback(self) -> None: + """Load data from integration.""" + self.metoffice_now = self._data.now + self.async_write_ha_state() + + @property + def should_poll(self) -> bool: + """Entities do not individually poll.""" + return False + + @property + def available(self): + """Return if state is available.""" + return self.metoffice_now is not None diff --git a/homeassistant/components/mobile_app/const.py b/homeassistant/components/mobile_app/const.py index 6e83a08c508..6174e34f57a 100644 --- a/homeassistant/components/mobile_app/const.py +++ b/homeassistant/components/mobile_app/const.py @@ -72,3 +72,5 @@ ATTR_SENSOR_UOM = "unit_of_measurement" SIGNAL_SENSOR_UPDATE = f"{DOMAIN}_sensor_update" SIGNAL_LOCATION_UPDATE = DOMAIN + "_location_update_{}" + +ATTR_CAMERA_ENTITY_ID = "camera_entity_id" diff --git a/homeassistant/components/mobile_app/http_api.py b/homeassistant/components/mobile_app/http_api.py index 7d8d6c28243..a5a96b83bc6 100644 --- a/homeassistant/components/mobile_app/http_api.py +++ b/homeassistant/components/mobile_app/http_api.py @@ -3,6 +3,7 @@ import secrets from typing import Dict from aiohttp.web import Request, Response +import emoji from nacl.secret import SecretBox import voluptuous as vol @@ -10,6 +11,7 @@ from homeassistant.components.http import HomeAssistantView from homeassistant.components.http.data_validator import RequestDataValidator from homeassistant.const import CONF_WEBHOOK_ID, HTTP_CREATED from homeassistant.helpers import config_validation as cv +from homeassistant.util import slugify from .const import ( ATTR_APP_DATA, @@ -75,6 +77,20 @@ class RegistrationsView(HomeAssistantView): data[CONF_USER_ID] = request["hass_user"].id + if slugify(data[ATTR_DEVICE_NAME], separator=""): + # if slug is not empty and would not only be underscores + # use DEVICE_NAME + pass + elif emoji.emoji_count(data[ATTR_DEVICE_NAME]): + # If otherwise empty string contains emoji + # use descriptive name of the first emoji + data[ATTR_DEVICE_NAME] = emoji.demojize( + emoji.emoji_lis(data[ATTR_DEVICE_NAME])[0]["emoji"] + ).replace(":", "") + else: + # Fallback to DEVICE_ID + data[ATTR_DEVICE_NAME] = data[ATTR_DEVICE_ID] + await hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, data=data, context={"source": "registration"} diff --git a/homeassistant/components/mobile_app/manifest.json b/homeassistant/components/mobile_app/manifest.json index 0576a466d7e..61e90e6bd8e 100644 --- a/homeassistant/components/mobile_app/manifest.json +++ b/homeassistant/components/mobile_app/manifest.json @@ -3,9 +3,9 @@ "name": "Mobile App", "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/mobile_app", - "requirements": ["PyNaCl==1.3.0"], + "requirements": ["PyNaCl==1.3.0", "emoji==0.5.4"], "dependencies": ["http", "webhook", "person"], - "after_dependencies": ["cloud"], + "after_dependencies": ["cloud", "camera"], "codeowners": ["@robbiet480"], "quality_scale": "internal" } diff --git a/homeassistant/components/mobile_app/translations/fi.json b/homeassistant/components/mobile_app/translations/fi.json deleted file mode 100644 index 373ae986d8d..00000000000 --- a/homeassistant/components/mobile_app/translations/fi.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "confirm": { - "title": "Mobiilisovellus" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/mobile_app/translations/nn.json b/homeassistant/components/mobile_app/translations/nn.json deleted file mode 100644 index 25828e48db5..00000000000 --- a/homeassistant/components/mobile_app/translations/nn.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "confirm": { - "title": "Mobilapp" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/mobile_app/translations/pt.json b/homeassistant/components/mobile_app/translations/pt.json deleted file mode 100644 index d2c326270e1..00000000000 --- a/homeassistant/components/mobile_app/translations/pt.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "confirm": { - "title": "Aplica\u00e7\u00e3o m\u00f3vel" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/mobile_app/webhook.py b/homeassistant/components/mobile_app/webhook.py index c71f3699019..ca9c31011ed 100644 --- a/homeassistant/components/mobile_app/webhook.py +++ b/homeassistant/components/mobile_app/webhook.py @@ -11,6 +11,7 @@ import voluptuous as vol from homeassistant.components.binary_sensor import ( DEVICE_CLASSES as BINARY_SENSOR_CLASSES, ) +from homeassistant.components.camera import SUPPORT_STREAM as CAMERA_SUPPORT_STREAM from homeassistant.components.device_tracker import ( ATTR_BATTERY, ATTR_GPS, @@ -29,7 +30,7 @@ from homeassistant.const import ( HTTP_CREATED, ) from homeassistant.core import EventOrigin -from homeassistant.exceptions import ServiceNotFound, TemplateError +from homeassistant.exceptions import HomeAssistantError, ServiceNotFound, TemplateError from homeassistant.helpers import config_validation as cv, device_registry as dr from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.template import attach @@ -40,6 +41,7 @@ from .const import ( ATTR_ALTITUDE, ATTR_APP_DATA, ATTR_APP_VERSION, + ATTR_CAMERA_ENTITY_ID, ATTR_COURSE, ATTR_DEVICE_ID, ATTR_DEVICE_NAME, @@ -240,6 +242,32 @@ async def webhook_fire_event(hass, config_entry, data): return empty_okay_response() +@WEBHOOK_COMMANDS.register("stream_camera") +@validate_schema({vol.Required(ATTR_CAMERA_ENTITY_ID): cv.string}) +async def webhook_stream_camera(hass, config_entry, data): + """Handle a request to HLS-stream a camera.""" + camera = hass.states.get(data[ATTR_CAMERA_ENTITY_ID]) + + if camera is None: + return webhook_response( + {"success": False}, registration=config_entry.data, status=HTTP_BAD_REQUEST, + ) + + resp = {"mjpeg_path": "/api/camera_proxy_stream/%s" % (camera.entity_id)} + + if camera.attributes["supported_features"] & CAMERA_SUPPORT_STREAM: + try: + resp["hls_path"] = await hass.components.camera.async_request_stream( + camera.entity_id, "hls" + ) + except HomeAssistantError: + resp["hls_path"] = None + else: + resp["hls_path"] = None + + return webhook_response(resp, registration=config_entry.data) + + @WEBHOOK_COMMANDS.register("render_template") @validate_schema( { diff --git a/homeassistant/components/monoprice/translations/no.json b/homeassistant/components/monoprice/translations/no.json index b95b9496951..3de551f073a 100644 --- a/homeassistant/components/monoprice/translations/no.json +++ b/homeassistant/components/monoprice/translations/no.json @@ -10,7 +10,7 @@ "step": { "user": { "data": { - "port": "Seriell port", + "port": "Port", "source_1": "Navn p\u00e5 kilden #1", "source_2": "Navn p\u00e5 kilden #2", "source_3": "Navn p\u00e5 kilden #3", diff --git a/homeassistant/components/moon/translations/sensor.ar.json b/homeassistant/components/moon/translations/sensor.ar.json deleted file mode 100644 index 94af741f5f4..00000000000 --- a/homeassistant/components/moon/translations/sensor.ar.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "state": { - "first_quarter": "\u0627\u0644\u0631\u0628\u0639 \u0627\u0644\u0623\u0648\u0644", - "full_moon": "\u0627\u0644\u0642\u0645\u0631 \u0627\u0644\u0643\u0627\u0645\u0644" - } -} \ No newline at end of file diff --git a/homeassistant/components/moon/translations/sensor.et.json b/homeassistant/components/moon/translations/sensor.et.json deleted file mode 100644 index 0d82e0d8f94..00000000000 --- a/homeassistant/components/moon/translations/sensor.et.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "Esimene veerand", - "full_moon": "T\u00e4iskuu", - "last_quarter": "Viimane veerand", - "new_moon": "Kuu loomine", - "waning_crescent": "Vanakuu", - "waning_gibbous": "Kahanev kuu", - "waxing_crescent": "Noorkuu", - "waxing_gibbous": "Kasvav kuu" - } -} \ No newline at end of file diff --git a/homeassistant/components/moon/translations/sensor.he.json b/homeassistant/components/moon/translations/sensor.he.json deleted file mode 100644 index 6531d3c8265..00000000000 --- a/homeassistant/components/moon/translations/sensor.he.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "\u05e8\u05d1\u05e2\u05d5\u05df \u05e8\u05d0\u05e9\u05d5\u05df", - "full_moon": "\u05d9\u05e8\u05d7 \u05de\u05dc\u05d0", - "last_quarter": "\u05e8\u05d1\u05e2\u05d5\u05df \u05d0\u05d7\u05e8\u05d5\u05df", - "new_moon": "\u05e8\u05d0\u05e9 \u05d7\u05d5\u05d3\u05e9", - "waning_crescent": "Waning crescent", - "waning_gibbous": "Waning gibbous", - "waxing_crescent": "Waxing crescent", - "waxing_gibbous": "Waxing gibbous" - } -} \ No newline at end of file diff --git a/homeassistant/components/moon/translations/sensor.id.json b/homeassistant/components/moon/translations/sensor.id.json deleted file mode 100644 index 3ce14204fb5..00000000000 --- a/homeassistant/components/moon/translations/sensor.id.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "Babak pertama", - "full_moon": "Bulan purnama", - "last_quarter": "Kuartal terakhir", - "new_moon": "Bulan baru", - "waning_crescent": "Waning crescent", - "waning_gibbous": "Waning gibbous", - "waxing_crescent": "Waxing crescent", - "waxing_gibbous": "Waxing gibbous" - } -} \ No newline at end of file diff --git a/homeassistant/components/moon/translations/sensor.nn.json b/homeassistant/components/moon/translations/sensor.nn.json deleted file mode 100644 index 7c516bcce50..00000000000 --- a/homeassistant/components/moon/translations/sensor.nn.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "Fyrste kvartal", - "full_moon": "Fullm\u00e5ne", - "last_quarter": "Siste kvartal", - "new_moon": "Nym\u00e5ne", - "waning_crescent": "Minkande halvm\u00e5ne", - "waning_gibbous": "Minkande m\u00e5ne", - "waxing_crescent": "Veksande halvm\u00e5ne", - "waxing_gibbous": "Veksande m\u00e5ne" - } -} \ No newline at end of file diff --git a/homeassistant/components/moon/translations/sensor.ro.json b/homeassistant/components/moon/translations/sensor.ro.json deleted file mode 100644 index 6f64e497c74..00000000000 --- a/homeassistant/components/moon/translations/sensor.ro.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "state": { - "full_moon": "Lun\u0103 plin\u0103", - "new_moon": "Lun\u0103 nou\u0103" - } -} \ No newline at end of file diff --git a/homeassistant/components/mqtt/__init__.py b/homeassistant/components/mqtt/__init__.py index 2be31895979..bb2ec7a8bcb 100644 --- a/homeassistant/components/mqtt/__init__.py +++ b/homeassistant/components/mqtt/__init__.py @@ -8,11 +8,10 @@ import logging from operator import attrgetter import os import ssl -import sys from typing import Any, Callable, List, Optional, Union import attr -import requests.certs +import certifi import voluptuous as vol from homeassistant import config_entries @@ -32,7 +31,7 @@ from homeassistant.const import ( from homeassistant.core import Event, ServiceCall, callback from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.helpers import config_validation as cv, event, template -from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.dispatcher import async_dispatcher_connect, dispatcher_send from homeassistant.helpers.entity import Entity from homeassistant.helpers.typing import ConfigType, HomeAssistantType, ServiceDataType from homeassistant.loader import bind_hass @@ -42,21 +41,33 @@ from homeassistant.util.logging import catch_log_exception # Loading the config flow file will register the flow from . import config_flow # noqa: F401 pylint: disable=unused-import -from . import debug_info, discovery, server +from . import debug_info, discovery from .const import ( ATTR_DISCOVERY_HASH, ATTR_DISCOVERY_TOPIC, + ATTR_PAYLOAD, + ATTR_QOS, + ATTR_RETAIN, + ATTR_TOPIC, + CONF_BIRTH_MESSAGE, CONF_BROKER, CONF_DISCOVERY, + CONF_QOS, + CONF_RETAIN, CONF_STATE_TOPIC, + CONF_WILL_MESSAGE, DEFAULT_DISCOVERY, DEFAULT_QOS, + DEFAULT_RETAIN, + MQTT_CONNECTED, + MQTT_DISCONNECTED, PROTOCOL_311, ) from .debug_info import log_messages from .discovery import MQTT_DISCOVERY_UPDATED, clear_discovery_hash, set_discovery_hash from .models import Message, MessageCallbackType, PublishPayloadType from .subscription import async_subscribe_topics, async_unsubscribe_topics +from .util import _VALID_QOS_SCHEMA, valid_publish_topic, valid_subscribe_topic _LOGGER = logging.getLogger(__name__) @@ -64,13 +75,10 @@ DOMAIN = "mqtt" DATA_MQTT = "mqtt" DATA_MQTT_CONFIG = "mqtt_config" -DATA_MQTT_HASS_CONFIG = "mqtt_hass_config" SERVICE_PUBLISH = "publish" SERVICE_DUMP = "dump" -CONF_EMBEDDED = "embedded" - CONF_DISCOVERY_PREFIX = "discovery_prefix" CONF_KEEPALIVE = "keepalive" CONF_CERTIFICATE = "certificate" @@ -79,17 +87,12 @@ CONF_CLIENT_CERT = "client_cert" CONF_TLS_INSECURE = "tls_insecure" CONF_TLS_VERSION = "tls_version" -CONF_BIRTH_MESSAGE = "birth_message" -CONF_WILL_MESSAGE = "will_message" - CONF_COMMAND_TOPIC = "command_topic" CONF_AVAILABILITY_TOPIC = "availability_topic" CONF_PAYLOAD_AVAILABLE = "payload_available" CONF_PAYLOAD_NOT_AVAILABLE = "payload_not_available" CONF_JSON_ATTRS_TOPIC = "json_attributes_topic" CONF_JSON_ATTRS_TEMPLATE = "json_attributes_template" -CONF_QOS = "qos" -CONF_RETAIN = "retain" CONF_UNIQUE_ID = "unique_id" CONF_IDENTIFIERS = "identifiers" @@ -104,18 +107,13 @@ PROTOCOL_31 = "3.1" DEFAULT_PORT = 1883 DEFAULT_KEEPALIVE = 60 -DEFAULT_RETAIN = False DEFAULT_PROTOCOL = PROTOCOL_311 DEFAULT_DISCOVERY_PREFIX = "homeassistant" DEFAULT_TLS_PROTOCOL = "auto" DEFAULT_PAYLOAD_AVAILABLE = "online" DEFAULT_PAYLOAD_NOT_AVAILABLE = "offline" -ATTR_TOPIC = "topic" -ATTR_PAYLOAD = "payload" ATTR_PAYLOAD_TEMPLATE = "payload_template" -ATTR_QOS = CONF_QOS -ATTR_RETAIN = CONF_RETAIN MAX_RECONNECT_WAIT = 300 # seconds @@ -124,59 +122,6 @@ CONNECTION_FAILED = "connection_failed" CONNECTION_FAILED_RECOVERABLE = "connection_failed_recoverable" -def valid_topic(value: Any) -> str: - """Validate that this is a valid topic name/filter.""" - value = cv.string(value) - try: - raw_value = value.encode("utf-8") - except UnicodeError: - raise vol.Invalid("MQTT topic name/filter must be valid UTF-8 string.") - if not raw_value: - raise vol.Invalid("MQTT topic name/filter must not be empty.") - if len(raw_value) > 65535: - raise vol.Invalid( - "MQTT topic name/filter must not be longer than 65535 encoded bytes." - ) - if "\0" in value: - raise vol.Invalid("MQTT topic name/filter must not contain null character.") - return value - - -def valid_subscribe_topic(value: Any) -> str: - """Validate that we can subscribe using this MQTT topic.""" - value = valid_topic(value) - for i in (i for i, c in enumerate(value) if c == "+"): - if (i > 0 and value[i - 1] != "/") or ( - i < len(value) - 1 and value[i + 1] != "/" - ): - raise vol.Invalid( - "Single-level wildcard must occupy an entire level of the filter" - ) - - index = value.find("#") - if index != -1: - if index != len(value) - 1: - # If there are multiple wildcards, this will also trigger - raise vol.Invalid( - "Multi-level wildcard must be the last " - "character in the topic filter." - ) - if len(value) > 1 and value[index - 1] != "/": - raise vol.Invalid( - "Multi-level wildcard must be after a topic level separator." - ) - - return value - - -def valid_publish_topic(value: Any) -> str: - """Validate that we can publish using this MQTT topic.""" - value = valid_topic(value) - if "+" in value or "#" in value: - raise vol.Invalid("Wildcards can not be used in topic names") - return value - - def validate_device_has_at_least_one_identifier(value: ConfigType) -> ConfigType: """Validate that a device info entry has at least one identifying value.""" if not value.get(CONF_IDENTIFIERS) and not value.get(CONF_CONNECTIONS): @@ -187,8 +132,6 @@ def validate_device_has_at_least_one_identifier(value: ConfigType) -> ConfigType return value -_VALID_QOS_SCHEMA = vol.All(vol.Coerce(int), vol.In([0, 1, 2])) - CLIENT_KEY_AUTH_MSG = ( "client_key and client_cert must both be present in " "the MQTT broker configuration" @@ -217,42 +160,42 @@ def embedded_broker_deprecated(value): CONFIG_SCHEMA = vol.Schema( { - DOMAIN: vol.Schema( - { - vol.Optional(CONF_CLIENT_ID): cv.string, - vol.Optional(CONF_KEEPALIVE, default=DEFAULT_KEEPALIVE): vol.All( - vol.Coerce(int), vol.Range(min=15) - ), - vol.Optional(CONF_BROKER): cv.string, - vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, - vol.Optional(CONF_USERNAME): cv.string, - vol.Optional(CONF_PASSWORD): cv.string, - vol.Optional(CONF_CERTIFICATE): vol.Any("auto", cv.isfile), - vol.Inclusive( - CONF_CLIENT_KEY, "client_key_auth", msg=CLIENT_KEY_AUTH_MSG - ): cv.isfile, - vol.Inclusive( - CONF_CLIENT_CERT, "client_key_auth", msg=CLIENT_KEY_AUTH_MSG - ): cv.isfile, - vol.Optional(CONF_TLS_INSECURE): cv.boolean, - vol.Optional(CONF_TLS_VERSION, default=DEFAULT_TLS_PROTOCOL): vol.Any( - "auto", "1.0", "1.1", "1.2" - ), - vol.Optional(CONF_PROTOCOL, default=DEFAULT_PROTOCOL): vol.All( - cv.string, vol.In([PROTOCOL_31, PROTOCOL_311]) - ), - vol.Optional(CONF_EMBEDDED): vol.All( - server.HBMQTT_CONFIG_SCHEMA, embedded_broker_deprecated - ), - vol.Optional(CONF_WILL_MESSAGE): MQTT_WILL_BIRTH_SCHEMA, - vol.Optional(CONF_BIRTH_MESSAGE): MQTT_WILL_BIRTH_SCHEMA, - vol.Optional(CONF_DISCOVERY, default=DEFAULT_DISCOVERY): cv.boolean, - # discovery_prefix must be a valid publish topic because if no - # state topic is specified, it will be created with the given prefix. - vol.Optional( - CONF_DISCOVERY_PREFIX, default=DEFAULT_DISCOVERY_PREFIX - ): valid_publish_topic, - } + DOMAIN: vol.All( + cv.deprecated(CONF_TLS_VERSION, invalidation_version="0.115"), + vol.Schema( + { + vol.Optional(CONF_CLIENT_ID): cv.string, + vol.Optional(CONF_KEEPALIVE, default=DEFAULT_KEEPALIVE): vol.All( + vol.Coerce(int), vol.Range(min=15) + ), + vol.Optional(CONF_BROKER): cv.string, + vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, + vol.Optional(CONF_USERNAME): cv.string, + vol.Optional(CONF_PASSWORD): cv.string, + vol.Optional(CONF_CERTIFICATE): vol.Any("auto", cv.isfile), + vol.Inclusive( + CONF_CLIENT_KEY, "client_key_auth", msg=CLIENT_KEY_AUTH_MSG + ): cv.isfile, + vol.Inclusive( + CONF_CLIENT_CERT, "client_key_auth", msg=CLIENT_KEY_AUTH_MSG + ): cv.isfile, + vol.Optional(CONF_TLS_INSECURE): cv.boolean, + vol.Optional( + CONF_TLS_VERSION, default=DEFAULT_TLS_PROTOCOL + ): vol.Any("auto", "1.0", "1.1", "1.2"), + vol.Optional(CONF_PROTOCOL, default=DEFAULT_PROTOCOL): vol.All( + cv.string, vol.In([PROTOCOL_31, PROTOCOL_311]) + ), + vol.Optional(CONF_WILL_MESSAGE): MQTT_WILL_BIRTH_SCHEMA, + vol.Optional(CONF_BIRTH_MESSAGE): MQTT_WILL_BIRTH_SCHEMA, + vol.Optional(CONF_DISCOVERY, default=DEFAULT_DISCOVERY): cv.boolean, + # discovery_prefix must be a valid publish topic because if no + # state topic is specified, it will be created with the given prefix. + vol.Optional( + CONF_DISCOVERY_PREFIX, default=DEFAULT_DISCOVERY_PREFIX + ): valid_publish_topic, + } + ), ) }, extra=vol.ALLOW_EXTRA, @@ -365,11 +308,19 @@ def async_publish( @bind_hass def publish_template( hass: HomeAssistantType, topic, payload_template, qos=None, retain=None +) -> None: + """Publish message to an MQTT topic.""" + hass.add_job(async_publish_template, hass, topic, payload_template, qos, retain) + + +@bind_hass +def async_publish_template( + hass: HomeAssistantType, topic, payload_template, qos=None, retain=None ) -> None: """Publish message to an MQTT topic using a template payload.""" data = _build_publish_data(topic, qos, retain) data[ATTR_PAYLOAD_TEMPLATE] = payload_template - hass.services.call(DOMAIN, SERVICE_PUBLISH, data) + hass.async_create_task(hass.services.async_call(DOMAIN, SERVICE_PUBLISH, data)) def wrap_msg_callback(msg_callback: MessageCallbackType) -> MessageCallbackType: @@ -464,36 +415,15 @@ def subscribe( return remove -async def _async_setup_server(hass: HomeAssistantType, config: ConfigType): - """Try to start embedded MQTT broker. - - This method is a coroutine. - """ - conf: ConfigType = config.get(DOMAIN, {}) - - success, broker_config = await server.async_start( - hass, conf.get(CONF_PASSWORD), conf.get(CONF_EMBEDDED) - ) - - if not success: - return None - - return broker_config - - async def _async_setup_discovery( - hass: HomeAssistantType, conf: ConfigType, hass_config: ConfigType, config_entry + hass: HomeAssistantType, conf: ConfigType, config_entry ) -> bool: """Try to start the discovery of MQTT devices. This method is a coroutine. """ - if discovery is None: - _LOGGER.error("Unable to load MQTT discovery") - return False - success: bool = await discovery.async_start( - hass, conf[CONF_DISCOVERY_PREFIX], hass_config, config_entry + hass, conf[CONF_DISCOVERY_PREFIX], config_entry ) return success @@ -503,11 +433,6 @@ async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool: """Start the MQTT protocol service.""" conf: Optional[ConfigType] = config.get(DOMAIN) - # We need this because discovery can cause components to be set up and - # otherwise it will not load the users config. - # This needs a better solution. - hass.data[DATA_MQTT_HASS_CONFIG] = config - websocket_api.async_register_command(hass, websocket_subscribe) websocket_api.async_register_command(hass, websocket_remove_device) websocket_api.async_register_command(hass, websocket_mqtt_info) @@ -519,28 +444,6 @@ async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool: conf = dict(conf) - if CONF_EMBEDDED in conf or CONF_BROKER not in conf: - - broker_config = await _async_setup_server(hass, config) - - if broker_config is None: - _LOGGER.error("Unable to start embedded MQTT broker") - return False - - conf.update( - { - CONF_BROKER: broker_config[0], - CONF_PORT: broker_config[1], - CONF_USERNAME: broker_config[2], - CONF_PASSWORD: broker_config[3], - CONF_CERTIFICATE: broker_config[4], - CONF_PROTOCOL: broker_config[5], - CONF_CLIENT_KEY: None, - CONF_CLIENT_CERT: None, - CONF_TLS_INSECURE: None, - } - ) - hass.data[DATA_MQTT_CONFIG] = conf # Only import if we haven't before. @@ -554,6 +457,11 @@ async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool: return True +def _merge_config(entry, conf): + """Merge configuration.yaml config with config entry.""" + return {**conf, **entry.data} + + async def async_setup_entry(hass, entry): """Load a config entry.""" conf = hass.data.get(DATA_MQTT_CONFIG) @@ -574,76 +482,9 @@ async def async_setup_entry(hass, entry): entry.data, ) - conf.update(entry.data) + conf = _merge_config(entry, conf) - broker = conf[CONF_BROKER] - port = conf[CONF_PORT] - client_id = conf.get(CONF_CLIENT_ID) - keepalive = conf[CONF_KEEPALIVE] - username = conf.get(CONF_USERNAME) - password = conf.get(CONF_PASSWORD) - certificate = conf.get(CONF_CERTIFICATE) - client_key = conf.get(CONF_CLIENT_KEY) - client_cert = conf.get(CONF_CLIENT_CERT) - tls_insecure = conf.get(CONF_TLS_INSECURE) - protocol = conf[CONF_PROTOCOL] - - # For cloudmqtt.com, secured connection, auto fill in certificate - if ( - certificate is None - and 19999 < conf[CONF_PORT] < 30000 - and broker.endswith(".cloudmqtt.com") - ): - certificate = os.path.join( - os.path.dirname(__file__), "addtrustexternalcaroot.crt" - ) - - # When the certificate is set to auto, use bundled certs from requests - elif certificate == "auto": - certificate = requests.certs.where() - - if CONF_WILL_MESSAGE in conf: - will_message = Message(**conf[CONF_WILL_MESSAGE]) - else: - will_message = None - - if CONF_BIRTH_MESSAGE in conf: - birth_message = Message(**conf[CONF_BIRTH_MESSAGE]) - else: - birth_message = None - - # Be able to override versions other than TLSv1.0 under Python3.6 - conf_tls_version: str = conf.get(CONF_TLS_VERSION) - if conf_tls_version == "1.2": - tls_version = ssl.PROTOCOL_TLSv1_2 - elif conf_tls_version == "1.1": - tls_version = ssl.PROTOCOL_TLSv1_1 - elif conf_tls_version == "1.0": - tls_version = ssl.PROTOCOL_TLSv1 - else: - # Python3.6 supports automatic negotiation of highest TLS version - if sys.hexversion >= 0x03060000: - tls_version = ssl.PROTOCOL_TLS # pylint: disable=no-member - else: - tls_version = ssl.PROTOCOL_TLSv1 - - hass.data[DATA_MQTT] = MQTT( - hass, - broker=broker, - port=port, - client_id=client_id, - keepalive=keepalive, - username=username, - password=password, - certificate=certificate, - client_key=client_key, - client_cert=client_cert, - tls_insecure=tls_insecure, - protocol=protocol, - will_message=will_message, - birth_message=birth_message, - tls_version=tls_version, - ) + hass.data[DATA_MQTT] = MQTT(hass, entry, conf,) await hass.data[DATA_MQTT].async_connect() @@ -714,9 +555,7 @@ async def async_setup_entry(hass, entry): ) if conf.get(CONF_DISCOVERY): - await _async_setup_discovery( - hass, conf, hass.data[DATA_MQTT_HASS_CONFIG], entry - ) + await _async_setup_discovery(hass, conf, entry) return True @@ -734,59 +573,92 @@ class Subscription: class MQTT: """Home Assistant MQTT client.""" - def __init__( - self, - hass: HomeAssistantType, - broker: str, - port: int, - client_id: Optional[str], - keepalive: Optional[int], - username: Optional[str], - password: Optional[str], - certificate: Optional[str], - client_key: Optional[str], - client_cert: Optional[str], - tls_insecure: Optional[bool], - protocol: Optional[str], - will_message: Optional[Message], - birth_message: Optional[Message], - tls_version: Optional[int], - ) -> None: + def __init__(self, hass: HomeAssistantType, config_entry, conf,) -> None: """Initialize Home Assistant MQTT client.""" - # We don't import them on the top because some integrations + # We don't import on the top because some integrations # should be able to optionally rely on MQTT. - # pylint: disable=import-outside-toplevel - import paho.mqtt.client as mqtt + import paho.mqtt.client as mqtt # pylint: disable=import-outside-toplevel self.hass = hass - self.broker = broker - self.port = port - self.keepalive = keepalive + self.config_entry = config_entry + self.conf = conf self.subscriptions: List[Subscription] = [] - self.birth_message = birth_message self.connected = False self._mqttc: mqtt.Client = None self._paho_lock = asyncio.Lock() - if protocol == PROTOCOL_31: + self.init_client() + self.config_entry.add_update_listener(self.async_config_entry_updated) + + @staticmethod + async def async_config_entry_updated(hass, entry) -> None: + """Handle signals of config entry being updated. + + This is a static method because a class method (bound method), can not be used with weak references. + Causes for this is config entry options changing. + """ + self = hass.data[DATA_MQTT] + + conf = hass.data.get(DATA_MQTT_CONFIG) + if conf is None: + conf = CONFIG_SCHEMA({DOMAIN: dict(entry.data)})[DOMAIN] + + self.conf = _merge_config(entry, conf) + await self.async_disconnect() + self.init_client() + await self.async_connect() + + await discovery.async_stop(hass) + if self.conf.get(CONF_DISCOVERY): + await _async_setup_discovery(hass, self.conf, entry) + + def init_client(self): + """Initialize paho client.""" + # We don't import on the top because some integrations + # should be able to optionally rely on MQTT. + import paho.mqtt.client as mqtt # pylint: disable=import-outside-toplevel + + if self.conf[CONF_PROTOCOL] == PROTOCOL_31: proto: int = mqtt.MQTTv31 else: proto = mqtt.MQTTv311 + client_id = self.conf.get(CONF_CLIENT_ID) if client_id is None: self._mqttc = mqtt.Client(protocol=proto) else: self._mqttc = mqtt.Client(client_id, protocol=proto) + username = self.conf.get(CONF_USERNAME) + password = self.conf.get(CONF_PASSWORD) if username is not None: self._mqttc.username_pw_set(username, password) + certificate = self.conf.get(CONF_CERTIFICATE) + + # For cloudmqtt.com, secured connection, auto fill in certificate + if ( + certificate is None + and 19999 < self.conf[CONF_PORT] < 30000 + and self.conf[CONF_BROKER].endswith(".cloudmqtt.com") + ): + certificate = os.path.join( + os.path.dirname(__file__), "addtrustexternalcaroot.crt" + ) + + # When the certificate is set to auto, use bundled certs from certifi + elif certificate == "auto": + certificate = certifi.where() + + client_key = self.conf.get(CONF_CLIENT_KEY) + client_cert = self.conf.get(CONF_CLIENT_CERT) + tls_insecure = self.conf.get(CONF_TLS_INSECURE) if certificate is not None: self._mqttc.tls_set( certificate, certfile=client_cert, keyfile=client_key, - tls_version=tls_version, + tls_version=ssl.PROTOCOL_TLS, ) if tls_insecure is not None: @@ -796,6 +668,11 @@ class MQTT: self._mqttc.on_disconnect = self._mqtt_on_disconnect self._mqttc.on_message = self._mqtt_on_message + if CONF_WILL_MESSAGE in self.conf: + will_message = Message(**self.conf[CONF_WILL_MESSAGE]) + else: + will_message = None + if will_message is not None: self._mqttc.will_set( # pylint: disable=no-value-for-parameter *attr.astuple( @@ -815,14 +692,17 @@ class MQTT: ) async def async_connect(self) -> str: - """Connect to the host. Does process messages yet.""" + """Connect to the host. Does not process messages yet.""" # pylint: disable=import-outside-toplevel import paho.mqtt.client as mqtt result: int = None try: result = await self.hass.async_add_executor_job( - self._mqttc.connect, self.broker, self.port, self.keepalive + self._mqttc.connect, + self.conf[CONF_BROKER], + self.conf[CONF_PORT], + self.conf[CONF_KEEPALIVE], ) except OSError as err: _LOGGER.error("Failed to connect to MQTT server due to exception: %s", err) @@ -923,7 +803,13 @@ class MQTT: return self.connected = True - _LOGGER.info("Connected to MQTT server (%s)", result_code) + dispatcher_send(self.hass, MQTT_CONNECTED) + _LOGGER.info( + "Connected to MQTT server %s:%s (%s)", + self.conf[CONF_BROKER], + self.conf[CONF_PORT], + result_code, + ) # Group subscriptions to only re-subscribe once for each topic. keyfunc = attrgetter("topic") @@ -932,11 +818,12 @@ class MQTT: max_qos = max(subscription.qos for subscription in subs) self.hass.add_job(self._async_perform_subscription, topic, max_qos) - if self.birth_message: + if CONF_BIRTH_MESSAGE in self.conf: + birth_message = Message(**self.conf[CONF_BIRTH_MESSAGE]) self.hass.add_job( self.async_publish( # pylint: disable=no-value-for-parameter *attr.astuple( - self.birth_message, + birth_message, filter=lambda attr, value: attr.name not in ["subscribed_topic", "timestamp"], ) @@ -990,7 +877,13 @@ class MQTT: def _mqtt_on_disconnect(self, _mqttc, _userdata, result_code: int) -> None: """Disconnected callback.""" self.connected = False - _LOGGER.warning("Disconnected from MQTT server (%s)", result_code) + dispatcher_send(self.hass, MQTT_DISCONNECTED) + _LOGGER.warning( + "Disconnected from MQTT server %s:%s (%s)", + self.conf[CONF_BROKER], + self.conf[CONF_PORT], + result_code, + ) def _raise_on_error(result_code: int) -> None: @@ -1099,6 +992,11 @@ class MqttAvailability(Entity): """Subscribe MQTT events.""" await super().async_added_to_hass() await self._availability_subscribe_topics() + async_dispatcher_connect(self.hass, MQTT_CONNECTED, self.async_mqtt_connect) + async_dispatcher_connect(self.hass, MQTT_DISCONNECTED, self.async_mqtt_connect) + self.async_on_remove( + async_dispatcher_connect(self.hass, MQTT_CONNECTED, self.async_mqtt_connect) + ) async def availability_discovery_update(self, config: dict): """Handle updated discovery message.""" @@ -1131,6 +1029,12 @@ class MqttAvailability(Entity): }, ) + @callback + def async_mqtt_connect(self): + """Update state on connection/disconnection to MQTT broker.""" + if self.hass.is_running: + self.async_write_ha_state() + async def async_will_remove_from_hass(self): """Unsubscribe when removed.""" self._availability_sub_state = await async_unsubscribe_topics( @@ -1141,6 +1045,8 @@ class MqttAvailability(Entity): def available(self) -> bool: """Return if the device is available.""" availability_topic = self._avail_config.get(CONF_AVAILABILITY_TOPIC) + if not self.hass.data[DATA_MQTT].connected: + return False return availability_topic is None or self._available diff --git a/homeassistant/components/mqtt/config_flow.py b/homeassistant/components/mqtt/config_flow.py index b0ba58158e0..2f4feaed5e9 100644 --- a/homeassistant/components/mqtt/config_flow.py +++ b/homeassistant/components/mqtt/config_flow.py @@ -1,5 +1,6 @@ """Config flow for MQTT.""" from collections import OrderedDict +import logging import queue import voluptuous as vol @@ -13,7 +14,22 @@ from homeassistant.const import ( CONF_USERNAME, ) -from .const import CONF_BROKER, CONF_DISCOVERY, DEFAULT_DISCOVERY +from .const import ( + ATTR_PAYLOAD, + ATTR_QOS, + ATTR_RETAIN, + ATTR_TOPIC, + CONF_BIRTH_MESSAGE, + CONF_BROKER, + CONF_DISCOVERY, + CONF_WILL_MESSAGE, + DEFAULT_DISCOVERY, + DEFAULT_QOS, + DEFAULT_RETAIN, +) +from .util import MQTT_WILL_BIRTH_SCHEMA + +_LOGGER = logging.getLogger(__name__) @config_entries.HANDLERS.register("mqtt") @@ -25,6 +41,11 @@ class FlowHandler(config_entries.ConfigFlow): _hassio_discovery = None + @staticmethod + def async_get_options_flow(config_entry): + """Get the options flow for this handler.""" + return MQTTOptionsFlowHandler(config_entry) + async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" if self._async_current_entries(): @@ -74,12 +95,12 @@ class FlowHandler(config_entries.ConfigFlow): return self.async_create_entry(title="configuration.yaml", data={}) - async def async_step_hassio(self, user_input=None): + async def async_step_hassio(self, discovery_info): """Receive a Hass.io discovery.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") - self._hassio_discovery = user_input + self._hassio_discovery = discovery_info return await self.async_step_hassio_confirm() @@ -123,6 +144,163 @@ class FlowHandler(config_entries.ConfigFlow): ) +class MQTTOptionsFlowHandler(config_entries.OptionsFlow): + """Handle MQTT options.""" + + def __init__(self, config_entry): + """Initialize MQTT options flow.""" + self.config_entry = config_entry + self.broker_config = {} + self.options = dict(config_entry.options) + + async def async_step_init(self, user_input=None): + """Manage the MQTT options.""" + return await self.async_step_broker() + + async def async_step_broker(self, user_input=None): + """Manage the MQTT options.""" + errors = {} + current_config = self.config_entry.data + if user_input is not None: + can_connect = await self.hass.async_add_executor_job( + try_connection, + user_input[CONF_BROKER], + user_input[CONF_PORT], + user_input.get(CONF_USERNAME), + user_input.get(CONF_PASSWORD), + ) + + if can_connect: + self.broker_config.update(user_input) + return await self.async_step_options() + + errors["base"] = "cannot_connect" + + fields = OrderedDict() + fields[vol.Required(CONF_BROKER, default=current_config[CONF_BROKER])] = str + fields[vol.Required(CONF_PORT, default=current_config[CONF_PORT])] = vol.Coerce( + int + ) + fields[ + vol.Optional( + CONF_USERNAME, + description={"suggested_value": current_config.get(CONF_USERNAME)}, + ) + ] = str + fields[ + vol.Optional( + CONF_PASSWORD, + description={"suggested_value": current_config.get(CONF_PASSWORD)}, + ) + ] = str + + return self.async_show_form( + step_id="broker", data_schema=vol.Schema(fields), errors=errors, + ) + + async def async_step_options(self, user_input=None): + """Manage the MQTT options.""" + errors = {} + current_config = self.config_entry.data + options_config = {} + if user_input is not None: + bad_birth = False + bad_will = False + + if "birth_topic" in user_input: + birth_message = { + ATTR_TOPIC: user_input["birth_topic"], + ATTR_PAYLOAD: user_input.get("birth_payload", ""), + ATTR_QOS: user_input["birth_qos"], + ATTR_RETAIN: user_input["birth_retain"], + } + try: + birth_message = MQTT_WILL_BIRTH_SCHEMA(birth_message) + options_config[CONF_BIRTH_MESSAGE] = birth_message + except vol.Invalid: + errors["base"] = "bad_birth" + bad_birth = True + + if "will_topic" in user_input: + will_message = { + ATTR_TOPIC: user_input["will_topic"], + ATTR_PAYLOAD: user_input.get("will_payload", ""), + ATTR_QOS: user_input["will_qos"], + ATTR_RETAIN: user_input["will_retain"], + } + try: + will_message = MQTT_WILL_BIRTH_SCHEMA(will_message) + options_config[CONF_WILL_MESSAGE] = will_message + except vol.Invalid: + errors["base"] = "bad_will" + bad_will = True + + options_config[CONF_DISCOVERY] = user_input[CONF_DISCOVERY] + + if not bad_birth and not bad_will: + updated_config = {} + updated_config.update(self.broker_config) + updated_config.update(options_config) + self.hass.config_entries.async_update_entry( + self.config_entry, data=updated_config + ) + return self.async_create_entry(title="", data=None) + + birth_topic = None + birth_payload = None + birth_qos = DEFAULT_QOS + birth_retain = DEFAULT_RETAIN + if CONF_BIRTH_MESSAGE in current_config: + birth_topic = current_config[CONF_BIRTH_MESSAGE][ATTR_TOPIC] + birth_payload = current_config[CONF_BIRTH_MESSAGE][ATTR_PAYLOAD] + birth_qos = current_config[CONF_BIRTH_MESSAGE].get(ATTR_QOS, DEFAULT_QOS) + birth_retain = current_config[CONF_BIRTH_MESSAGE].get( + ATTR_RETAIN, DEFAULT_RETAIN + ) + + will_topic = None + will_payload = None + will_qos = DEFAULT_QOS + will_retain = DEFAULT_RETAIN + if CONF_WILL_MESSAGE in current_config: + will_topic = current_config[CONF_WILL_MESSAGE][ATTR_TOPIC] + will_payload = current_config[CONF_WILL_MESSAGE][ATTR_PAYLOAD] + will_qos = current_config[CONF_WILL_MESSAGE].get(ATTR_QOS, DEFAULT_QOS) + will_retain = current_config[CONF_WILL_MESSAGE].get( + ATTR_RETAIN, DEFAULT_RETAIN + ) + + fields = OrderedDict() + fields[ + vol.Optional( + CONF_DISCOVERY, + default=current_config.get(CONF_DISCOVERY, DEFAULT_DISCOVERY), + ) + ] = bool + fields[ + vol.Optional("birth_topic", description={"suggested_value": birth_topic}) + ] = str + fields[ + vol.Optional( + "birth_payload", description={"suggested_value": birth_payload} + ) + ] = str + fields[vol.Optional("birth_qos", default=birth_qos)] = vol.In([0, 1, 2]) + fields[vol.Optional("birth_retain", default=birth_retain)] = bool + fields[ + vol.Optional("will_topic", description={"suggested_value": will_topic}) + ] = str + fields[ + vol.Optional("will_payload", description={"suggested_value": will_payload}) + ] = str + fields[vol.Optional("will_qos", default=will_qos)] = vol.In([0, 1, 2]) + fields[vol.Optional("will_retain", default=will_retain)] = bool + + return self.async_show_form( + step_id="options", data_schema=vol.Schema(fields), errors=errors, + ) + + def try_connection(broker, port, username, password, protocol="3.1"): """Test if we can connect to an MQTT broker.""" # pylint: disable=import-outside-toplevel diff --git a/homeassistant/components/mqtt/const.py b/homeassistant/components/mqtt/const.py index 5d1fe2e2505..62d2643bc91 100644 --- a/homeassistant/components/mqtt/const.py +++ b/homeassistant/components/mqtt/const.py @@ -1,11 +1,25 @@ """Constants used by multiple MQTT modules.""" -CONF_BROKER = "broker" -CONF_DISCOVERY = "discovery" -DEFAULT_DISCOVERY = False - ATTR_DISCOVERY_HASH = "discovery_hash" ATTR_DISCOVERY_PAYLOAD = "discovery_payload" ATTR_DISCOVERY_TOPIC = "discovery_topic" +ATTR_PAYLOAD = "payload" +ATTR_QOS = "qos" +ATTR_RETAIN = "retain" +ATTR_TOPIC = "topic" + +CONF_BROKER = "broker" +CONF_BIRTH_MESSAGE = "birth_message" +CONF_DISCOVERY = "discovery" +CONF_QOS = ATTR_QOS +CONF_RETAIN = ATTR_RETAIN CONF_STATE_TOPIC = "state_topic" -PROTOCOL_311 = "3.1.1" +CONF_WILL_MESSAGE = "will_message" + +DEFAULT_DISCOVERY = False DEFAULT_QOS = 0 +DEFAULT_RETAIN = False + +MQTT_CONNECTED = "mqtt_connected" +MQTT_DISCONNECTED = "mqtt_disconnected" + +PROTOCOL_311 = "3.1.1" diff --git a/homeassistant/components/mqtt/discovery.py b/homeassistant/components/mqtt/discovery.py index 1f2b7162556..281172b6332 100644 --- a/homeassistant/components/mqtt/discovery.py +++ b/homeassistant/components/mqtt/discovery.py @@ -35,8 +35,9 @@ SUPPORTED_COMPONENTS = [ ] ALREADY_DISCOVERED = "mqtt_discovered_components" -DATA_CONFIG_ENTRY_LOCK = "mqtt_config_entry_lock" CONFIG_ENTRY_IS_SETUP = "mqtt_config_entry_is_setup" +DATA_CONFIG_ENTRY_LOCK = "mqtt_config_entry_lock" +DISCOVERY_UNSUBSCRIBE = "mqtt_discovery_unsubscribe" MQTT_DISCOVERY_UPDATED = "mqtt_discovery_updated_{}" MQTT_DISCOVERY_NEW = "mqtt_discovery_new_{}_{}" @@ -58,9 +59,9 @@ class MQTTConfig(dict): async def async_start( - hass: HomeAssistantType, discovery_topic, hass_config, config_entry=None + hass: HomeAssistantType, discovery_topic, config_entry=None ) -> bool: - """Initialize of MQTT Discovery.""" + """Start MQTT Discovery.""" async def async_device_message_received(msg): """Process the received message.""" @@ -163,8 +164,15 @@ async def async_start( hass.data[DATA_CONFIG_ENTRY_LOCK] = asyncio.Lock() hass.data[CONFIG_ENTRY_IS_SETUP] = set() - await mqtt.async_subscribe( + hass.data[DISCOVERY_UNSUBSCRIBE] = await mqtt.async_subscribe( hass, f"{discovery_topic}/#", async_device_message_received, 0 ) return True + + +async def async_stop(hass: HomeAssistantType) -> bool: + """Stop MQTT Discovery.""" + if DISCOVERY_UNSUBSCRIBE in hass.data and hass.data[DISCOVERY_UNSUBSCRIBE]: + hass.data[DISCOVERY_UNSUBSCRIBE]() + hass.data[DISCOVERY_UNSUBSCRIBE] = None diff --git a/homeassistant/components/mqtt/manifest.json b/homeassistant/components/mqtt/manifest.json index 37070627477..8b293eb06f6 100644 --- a/homeassistant/components/mqtt/manifest.json +++ b/homeassistant/components/mqtt/manifest.json @@ -3,7 +3,7 @@ "name": "MQTT", "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/mqtt", - "requirements": ["hbmqtt==0.9.5", "paho-mqtt==1.5.0"], + "requirements": ["paho-mqtt==1.5.0"], "dependencies": ["http"], "codeowners": ["@home-assistant/core", "@emontnemery"] } diff --git a/homeassistant/components/mqtt/server.py b/homeassistant/components/mqtt/server.py deleted file mode 100644 index 1b2a56a2195..00000000000 --- a/homeassistant/components/mqtt/server.py +++ /dev/null @@ -1,99 +0,0 @@ -"""Support for a local MQTT broker.""" -import logging -import tempfile - -import voluptuous as vol - -from homeassistant.const import EVENT_HOMEASSISTANT_STOP -import homeassistant.helpers.config_validation as cv - -from .const import PROTOCOL_311 - -_LOGGER = logging.getLogger(__name__) - -# None allows custom config to be created through generate_config -HBMQTT_CONFIG_SCHEMA = vol.Any( - None, - vol.Schema( - { - vol.Optional("auth"): vol.Schema( - {vol.Optional("password-file"): cv.isfile}, extra=vol.ALLOW_EXTRA - ), - vol.Optional("listeners"): vol.Schema( - {vol.Required("default"): vol.Schema(dict), str: vol.Schema(dict)} - ), - }, - extra=vol.ALLOW_EXTRA, - ), -) - - -async def async_start(hass, password, server_config): - """Initialize MQTT Server. - - This method is a coroutine. - """ - # pylint: disable=import-outside-toplevel - from hbmqtt.broker import Broker, BrokerException - - passwd = tempfile.NamedTemporaryFile() - - gen_server_config, client_config = generate_config(hass, passwd, password) - - try: - if server_config is None: - server_config = gen_server_config - - broker = Broker(server_config, hass.loop) - await broker.start() - except BrokerException: - _LOGGER.exception("Error initializing MQTT server") - return False, None - finally: - passwd.close() - - async def async_shutdown_mqtt_server(event): - """Shut down the MQTT server.""" - await broker.shutdown() - - hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, async_shutdown_mqtt_server) - - return True, client_config - - -def generate_config(hass, passwd, password): - """Generate a configuration based on current Home Assistant instance.""" - # pylint: disable=import-outside-toplevel - from passlib.apps import custom_app_context - - config = { - "listeners": { - "default": { - "max-connections": 50000, - "bind": "0.0.0.0:1883", - "type": "tcp", - }, - "ws-1": {"bind": "0.0.0.0:8080", "type": "ws"}, - }, - "auth": {"allow-anonymous": password is None}, - "plugins": ["auth_anonymous"], - "topic-check": {"enabled": True, "plugins": ["topic_taboo"]}, - } - - if password: - username = "homeassistant" - - # Encrypt with what hbmqtt uses to verify - passwd.write( - f"homeassistant:{custom_app_context.encrypt(password)}\n".encode("utf-8") - ) - passwd.flush() - - config["auth"]["password-file"] = passwd.name - config["plugins"].append("auth_file") - else: - username = None - - client_config = ("localhost", 1883, username, password, None, PROTOCOL_311) - - return config, client_config diff --git a/homeassistant/components/mqtt/strings.json b/homeassistant/components/mqtt/strings.json index 305f3a206a7..d10bc8bc4e6 100644 --- a/homeassistant/components/mqtt/strings.json +++ b/homeassistant/components/mqtt/strings.json @@ -47,5 +47,37 @@ "button_5": "Fifth button", "button_6": "Sixth button" } + }, + "options": { + "step": { + "broker": { + "description": "Please enter the connection information of your MQTT broker.", + "data": { + "broker": "Broker", + "port": "[%key:common::config_flow::data::port%]", + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + } + }, + "options": { + "description": "Please select MQTT options.", + "data": { + "discovery": "Enable discovery", + "birth_topic": "Birth message topic", + "birth_payload": "Birth message payload", + "birth_qos": "Birth message QoS", + "birth_retain": "Birth message retain", + "will_topic": "Will message topic", + "will_payload": "Will message payload", + "will_qos": "Will message QoS", + "will_retain": "Will message retain" + } + } + }, + "error": { + "cannot_connect": "Unable to connect to the broker.", + "bad_birth": "Invalid birth topic.", + "bad_will": "Invalid will topic." + } } } \ No newline at end of file diff --git a/homeassistant/components/mqtt/translations/ca.json b/homeassistant/components/mqtt/translations/ca.json index f0c9b5d50d0..715327d004a 100644 --- a/homeassistant/components/mqtt/translations/ca.json +++ b/homeassistant/components/mqtt/translations/ca.json @@ -47,5 +47,37 @@ "button_short_release": "\"{subtype}\" alliberat", "button_triple_press": "\"{subtype}\" clicat tres vegades" } + }, + "options": { + "error": { + "bad_birth": "Topic missatge de naixement inv\u00e0lid.", + "bad_will": "Topic missatge d'\u00faltima voluntat inv\u00e0lid.", + "cannot_connect": "No es pot connectar amb el broker." + }, + "step": { + "broker": { + "data": { + "broker": "Broker", + "password": "Contrasenya", + "port": "Port", + "username": "Nom d'usuari" + }, + "description": "Introdueix la informaci\u00f3 de connexi\u00f3 del teu broker MQTT." + }, + "options": { + "data": { + "birth_payload": "Dades (payload) missatge de naixement", + "birth_qos": "QoS missatge de naixement", + "birth_retain": "Retenci\u00f3 missatge de naixement", + "birth_topic": "Topic missatge de naixement", + "discovery": "Activar descobriment", + "will_payload": "Dades (payload) missatge d'\u00faltima voluntat", + "will_qos": "QoS missatge d'\u00faltima voluntat", + "will_retain": "Retenci\u00f3 missatge d'\u00faltima voluntat", + "will_topic": "Topic missatge d'\u00faltima voluntat" + }, + "description": "Selecciona les opcions MQTT." + } + } } } \ No newline at end of file diff --git a/homeassistant/components/mqtt/translations/en.json b/homeassistant/components/mqtt/translations/en.json index dc3231533d0..99cd59be13b 100644 --- a/homeassistant/components/mqtt/translations/en.json +++ b/homeassistant/components/mqtt/translations/en.json @@ -47,5 +47,37 @@ "button_short_release": "\"{subtype}\" released", "button_triple_press": "\"{subtype}\" triple clicked" } + }, + "options": { + "error": { + "bad_birth": "Invalid birth topic.", + "bad_will": "Invalid will topic.", + "cannot_connect": "Unable to connect to the broker." + }, + "step": { + "broker": { + "data": { + "broker": "Broker", + "password": "Password", + "port": "Port", + "username": "Username" + }, + "description": "Please enter the connection information of your MQTT broker." + }, + "options": { + "data": { + "birth_payload": "Birth message payload", + "birth_qos": "Birth message QoS", + "birth_retain": "Birth message retain", + "birth_topic": "Birth message topic", + "discovery": "Enable discovery", + "will_payload": "Will message payload", + "will_qos": "Will message QoS", + "will_retain": "Will message retain", + "will_topic": "Will message topic" + }, + "description": "Please select MQTT options." + } + } } } \ No newline at end of file diff --git a/homeassistant/components/mqtt/translations/es.json b/homeassistant/components/mqtt/translations/es.json index a55d2d7bd07..52dda70695a 100644 --- a/homeassistant/components/mqtt/translations/es.json +++ b/homeassistant/components/mqtt/translations/es.json @@ -47,5 +47,37 @@ "button_short_release": "\"{subtype}\" soltado", "button_triple_press": "\"{subtype}\" triple pulsaci\u00f3n" } + }, + "options": { + "error": { + "bad_birth": "Tema de nacimiento inv\u00e1lido.", + "bad_will": "Tema deseado inv\u00e1lido.", + "cannot_connect": "No se puede conectar con el agente." + }, + "step": { + "broker": { + "data": { + "broker": "Agente", + "password": "Contrase\u00f1a", + "port": "Puerto", + "username": "Usuario" + }, + "description": "Por favor, introduce la informaci\u00f3n de tu agente MQTT." + }, + "options": { + "data": { + "birth_payload": "Carga del mensaje de nacimiento", + "birth_qos": "QoS del mensaje de nacimiento", + "birth_retain": "Retenci\u00f3n del mensaje de nacimiento", + "birth_topic": "Tema del mensaje de nacimiento", + "discovery": "Habilitar descubrimiento", + "will_payload": "Enviar\u00e1 la carga", + "will_qos": "El mensaje usar\u00e1 el QoS", + "will_retain": "Retendr\u00e1 el mensaje", + "will_topic": "Enviar\u00e1 un mensaje al tema" + }, + "description": "Por favor, selecciona las opciones para MQTT." + } + } } } \ No newline at end of file diff --git a/homeassistant/components/mqtt/translations/et.json b/homeassistant/components/mqtt/translations/et.json deleted file mode 100644 index 20ff3db6518..00000000000 --- a/homeassistant/components/mqtt/translations/et.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "broker": { - "title": "" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/mqtt/translations/no.json b/homeassistant/components/mqtt/translations/no.json index 962da69062b..1871b2bffe7 100644 --- a/homeassistant/components/mqtt/translations/no.json +++ b/homeassistant/components/mqtt/translations/no.json @@ -47,5 +47,33 @@ "button_short_release": "\"{subtype}\" utgitt", "button_triple_press": "\"{subtype}\" trippel klikket" } + }, + "options": { + "error": { + "bad_birth": "Ugyldig f\u00f8dselsemne.", + "bad_will": "Ugyldig emne.", + "cannot_connect": "Kan ikke koble til megleren." + }, + "step": { + "broker": { + "data": { + "broker": "Megler" + }, + "description": "Vennligst oppgi tilkoblingsinformasjonen for din MQTT megler." + }, + "options": { + "data": { + "birth_qos": "F\u00f8dselsmelding QoS", + "birth_retain": "F\u00f8dselsmelding beholder", + "birth_topic": "F\u00f8dselsmeldingsemne", + "discovery": "Aktiver oppdagelse", + "will_payload": "Vil melde nyttelast", + "will_qos": "Vil melding til QoS", + "will_retain": "Vil meldingen beholde", + "will_topic": "Vil melding emne" + }, + "description": "Vennligst velg MQTT-alternativer." + } + } } } \ No newline at end of file diff --git a/homeassistant/components/mqtt/translations/ru.json b/homeassistant/components/mqtt/translations/ru.json index e1c5c0d979e..21f5e11322f 100644 --- a/homeassistant/components/mqtt/translations/ru.json +++ b/homeassistant/components/mqtt/translations/ru.json @@ -47,5 +47,27 @@ "button_short_release": "{subtype} \u043e\u0442\u043f\u0443\u0449\u0435\u043d\u0430", "button_triple_press": "{subtype} \u043d\u0430\u0436\u0430\u0442\u0430 \u0442\u0440\u0438 \u0440\u0430\u0437\u0430" } + }, + "options": { + "error": { + "cannot_connect": "\u041d\u0435 \u0443\u0434\u0430\u043b\u043e\u0441\u044c \u043f\u043e\u0434\u043a\u043b\u044e\u0447\u0438\u0442\u044c\u0441\u044f \u043a \u0431\u0440\u043e\u043a\u0435\u0440\u0443" + }, + "step": { + "broker": { + "data": { + "broker": "\u0411\u0440\u043e\u043a\u0435\u0440", + "password": "\u041f\u0430\u0440\u043e\u043b\u044c", + "port": "\u041f\u043e\u0440\u0442", + "username": "\u041b\u043e\u0433\u0438\u043d" + }, + "description": "\u0412\u0432\u0435\u0434\u0438\u0442\u0435 \u0438\u043d\u0444\u043e\u0440\u043c\u0430\u0446\u0438\u044e \u043e \u043f\u043e\u0434\u043a\u043b\u044e\u0447\u0435\u043d\u0438\u0438 \u043a \u0412\u0430\u0448\u0435\u043c\u0443 \u0431\u0440\u043e\u043a\u0435\u0440\u0443 MQTT." + }, + "options": { + "data": { + "discovery": "\u0420\u0430\u0437\u0440\u0435\u0448\u0438\u0442\u044c \u043e\u0431\u043d\u0430\u0440\u0443\u0436\u0435\u043d\u0438\u0435" + }, + "description": "\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0430 \u043f\u0430\u0440\u0430\u043c\u0435\u0442\u0440\u043e\u0432 MQTT." + } + } } } \ No newline at end of file diff --git a/homeassistant/components/mqtt/translations/zh-Hant.json b/homeassistant/components/mqtt/translations/zh-Hant.json index 2b50e38ae7e..07458569f73 100644 --- a/homeassistant/components/mqtt/translations/zh-Hant.json +++ b/homeassistant/components/mqtt/translations/zh-Hant.json @@ -47,5 +47,37 @@ "button_short_release": "\"{subtype}\" \u91cb\u653e", "button_triple_press": "\"{subtype}\" \u4e09\u9023\u64ca" } + }, + "options": { + "error": { + "bad_birth": "Birth \u4e3b\u984c\u7121\u6548\u3002", + "bad_will": "Will \u4e3b\u984c\u7121\u6548\u3002", + "cannot_connect": "\u7121\u6cd5\u9023\u7dda\u81f3 Broker\u3002" + }, + "step": { + "broker": { + "data": { + "broker": "Broker", + "password": "\u5bc6\u78bc", + "port": "\u901a\u8a0a\u57e0", + "username": "\u4f7f\u7528\u8005\u540d\u7a31" + }, + "description": "\u8acb\u8f38\u5165 MQTT Broker \u9023\u7dda\u8cc7\u8a0a\u3002" + }, + "options": { + "data": { + "birth_payload": "Birth \u8a0a\u606f payload", + "birth_qos": "Birth \u8a0a\u606f QoS", + "birth_retain": "Birth \u8a0a\u606f Retain", + "birth_topic": "Birth \u8a0a\u606f\u4e3b\u984c", + "discovery": "\u958b\u555f\u63a2\u7d22", + "will_payload": "Will \u8a0a\u606f payload", + "will_qos": "Will \u8a0a\u606f QoS", + "will_retain": "Will \u8a0a\u606f Retain", + "will_topic": "Will \u8a0a\u606f\u4e3b\u984c" + }, + "description": "\u8acb\u9078\u64c7 MQTT \u9078\u9805\u3002" + } + } } } \ No newline at end of file diff --git a/homeassistant/components/mqtt/util.py b/homeassistant/components/mqtt/util.py new file mode 100644 index 00000000000..568dbabd7b0 --- /dev/null +++ b/homeassistant/components/mqtt/util.py @@ -0,0 +1,82 @@ +"""Utility functions for the MQTT integration.""" +from typing import Any + +import voluptuous as vol + +from homeassistant.const import CONF_PAYLOAD +from homeassistant.helpers import config_validation as cv + +from .const import ( + ATTR_PAYLOAD, + ATTR_QOS, + ATTR_RETAIN, + ATTR_TOPIC, + DEFAULT_QOS, + DEFAULT_RETAIN, +) + + +def valid_topic(value: Any) -> str: + """Validate that this is a valid topic name/filter.""" + value = cv.string(value) + try: + raw_value = value.encode("utf-8") + except UnicodeError: + raise vol.Invalid("MQTT topic name/filter must be valid UTF-8 string.") + if not raw_value: + raise vol.Invalid("MQTT topic name/filter must not be empty.") + if len(raw_value) > 65535: + raise vol.Invalid( + "MQTT topic name/filter must not be longer than 65535 encoded bytes." + ) + if "\0" in value: + raise vol.Invalid("MQTT topic name/filter must not contain null character.") + return value + + +def valid_subscribe_topic(value: Any) -> str: + """Validate that we can subscribe using this MQTT topic.""" + value = valid_topic(value) + for i in (i for i, c in enumerate(value) if c == "+"): + if (i > 0 and value[i - 1] != "/") or ( + i < len(value) - 1 and value[i + 1] != "/" + ): + raise vol.Invalid( + "Single-level wildcard must occupy an entire level of the filter" + ) + + index = value.find("#") + if index != -1: + if index != len(value) - 1: + # If there are multiple wildcards, this will also trigger + raise vol.Invalid( + "Multi-level wildcard must be the last " + "character in the topic filter." + ) + if len(value) > 1 and value[index - 1] != "/": + raise vol.Invalid( + "Multi-level wildcard must be after a topic level separator." + ) + + return value + + +def valid_publish_topic(value: Any) -> str: + """Validate that we can publish using this MQTT topic.""" + value = valid_topic(value) + if "+" in value or "#" in value: + raise vol.Invalid("Wildcards can not be used in topic names") + return value + + +_VALID_QOS_SCHEMA = vol.All(vol.Coerce(int), vol.In([0, 1, 2])) + +MQTT_WILL_BIRTH_SCHEMA = vol.Schema( + { + vol.Required(ATTR_TOPIC): valid_publish_topic, + vol.Required(ATTR_PAYLOAD, CONF_PAYLOAD): cv.string, + vol.Optional(ATTR_QOS, default=DEFAULT_QOS): _VALID_QOS_SCHEMA, + vol.Optional(ATTR_RETAIN, default=DEFAULT_RETAIN): cv.boolean, + }, + required=True, +) diff --git a/homeassistant/components/mqtt_statestream/__init__.py b/homeassistant/components/mqtt_statestream/__init__.py index 8e63bffe568..d7c971b7d35 100644 --- a/homeassistant/components/mqtt_statestream/__init__.py +++ b/homeassistant/components/mqtt_statestream/__init__.py @@ -4,16 +4,13 @@ import json import voluptuous as vol from homeassistant.components.mqtt import valid_publish_topic -from homeassistant.const import ( - CONF_DOMAINS, - CONF_ENTITIES, - CONF_EXCLUDE, - CONF_INCLUDE, - MATCH_ALL, -) +from homeassistant.const import MATCH_ALL from homeassistant.core import callback import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.entityfilter import generate_filter +from homeassistant.helpers.entityfilter import ( + INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA, + convert_include_exclude_filter, +) from homeassistant.helpers.event import async_track_state_change from homeassistant.helpers.json import JSONEncoder @@ -25,29 +22,13 @@ DOMAIN = "mqtt_statestream" CONFIG_SCHEMA = vol.Schema( { - DOMAIN: vol.Schema( + DOMAIN: INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA.extend( { - vol.Optional(CONF_EXCLUDE, default={}): vol.Schema( - { - vol.Optional(CONF_ENTITIES, default=[]): cv.entity_ids, - vol.Optional(CONF_DOMAINS, default=[]): vol.All( - cv.ensure_list, [cv.string] - ), - } - ), - vol.Optional(CONF_INCLUDE, default={}): vol.Schema( - { - vol.Optional(CONF_ENTITIES, default=[]): cv.entity_ids, - vol.Optional(CONF_DOMAINS, default=[]): vol.All( - cv.ensure_list, [cv.string] - ), - } - ), vol.Required(CONF_BASE_TOPIC): valid_publish_topic, vol.Optional(CONF_PUBLISH_ATTRIBUTES, default=False): cv.boolean, vol.Optional(CONF_PUBLISH_TIMESTAMPS, default=False): cv.boolean, } - ) + ), }, extra=vol.ALLOW_EXTRA, ) @@ -55,18 +36,11 @@ CONFIG_SCHEMA = vol.Schema( async def async_setup(hass, config): """Set up the MQTT state feed.""" - conf = config.get(DOMAIN, {}) + conf = config.get(DOMAIN) + publish_filter = convert_include_exclude_filter(conf) base_topic = conf.get(CONF_BASE_TOPIC) - pub_include = conf.get(CONF_INCLUDE, {}) - pub_exclude = conf.get(CONF_EXCLUDE, {}) publish_attributes = conf.get(CONF_PUBLISH_ATTRIBUTES) publish_timestamps = conf.get(CONF_PUBLISH_TIMESTAMPS) - publish_filter = generate_filter( - pub_include.get(CONF_DOMAINS, []), - pub_include.get(CONF_ENTITIES, []), - pub_exclude.get(CONF_DOMAINS, []), - pub_exclude.get(CONF_ENTITIES, []), - ) if not base_topic.endswith("/"): base_topic = f"{base_topic}/" diff --git a/homeassistant/components/myq/manifest.json b/homeassistant/components/myq/manifest.json index 10107967056..540f08c0776 100644 --- a/homeassistant/components/myq/manifest.json +++ b/homeassistant/components/myq/manifest.json @@ -2,7 +2,7 @@ "domain": "myq", "name": "MyQ", "documentation": "https://www.home-assistant.io/integrations/myq", - "requirements": ["pymyq==2.0.4"], + "requirements": ["pymyq==2.0.5"], "codeowners": ["@bdraco"], "config_flow": true, "homekit": { diff --git a/homeassistant/components/neato/translations/pt-BR.json b/homeassistant/components/neato/translations/pt-BR.json deleted file mode 100644 index d8a4c453015..00000000000 --- a/homeassistant/components/neato/translations/pt-BR.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "" -} \ No newline at end of file diff --git a/homeassistant/components/nest/translations/ca.json b/homeassistant/components/nest/translations/ca.json index 92fd386c5c3..375b6847f56 100644 --- a/homeassistant/components/nest/translations/ca.json +++ b/homeassistant/components/nest/translations/ca.json @@ -24,7 +24,7 @@ "data": { "code": "Codi PIN" }, - "description": "Per enlla\u00e7ar el teu compte de Nest, [autoritza el vostre compte]({url}). \n\nDespr\u00e9s de l'autoritzaci\u00f3, copia i enganxa el codi pin que es mostra a sota.", + "description": "Per enlla\u00e7ar el teu compte de Nest, [autoritza el teu compte]({url}). \n\nDespr\u00e9s de l'autoritzaci\u00f3, copia i enganxa el codi pin que es mostra a sota.", "title": "Enlla\u00e7 amb el compte de Nest" } } diff --git a/homeassistant/components/nest/translations/ja.json b/homeassistant/components/nest/translations/ja.json deleted file mode 100644 index 2efbf376648..00000000000 --- a/homeassistant/components/nest/translations/ja.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Nest" -} \ No newline at end of file diff --git a/homeassistant/components/netatmo/translations/cs.json b/homeassistant/components/netatmo/translations/cs.json deleted file mode 100644 index bab99c32124..00000000000 --- a/homeassistant/components/netatmo/translations/cs.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "config": { - "abort": { - "authorize_url_timeout": "\u010casov\u00fd limit autoriza\u010dn\u00edho URL vypr\u0161el" - } - } -} \ No newline at end of file diff --git a/homeassistant/components/netdata/manifest.json b/homeassistant/components/netdata/manifest.json index d1394f5526f..02a5bbddacd 100644 --- a/homeassistant/components/netdata/manifest.json +++ b/homeassistant/components/netdata/manifest.json @@ -2,6 +2,6 @@ "domain": "netdata", "name": "Netdata", "documentation": "https://www.home-assistant.io/integrations/netdata", - "requirements": ["netdata==0.1.2"], + "requirements": ["netdata==0.2.0"], "codeowners": ["@fabaff"] } diff --git a/homeassistant/components/netdata/sensor.py b/homeassistant/components/netdata/sensor.py index 4406734b094..83d70b0742f 100644 --- a/homeassistant/components/netdata/sensor.py +++ b/homeassistant/components/netdata/sensor.py @@ -93,6 +93,7 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info= ) ) + dev.append(NetdataAlarms(netdata, name, host, port)) async_add_entities(dev, True) @@ -145,6 +146,62 @@ class NetdataSensor(Entity): ) +class NetdataAlarms(Entity): + """Implementation of a Netdata alarm sensor.""" + + def __init__(self, netdata, name, host, port): + """Initialize the Netdata alarm sensor.""" + self.netdata = netdata + self._state = None + self._name = name + self._host = host + self._port = port + + @property + def name(self): + """Return the name of the sensor.""" + return f"{self._name} Alarms" + + @property + def state(self): + """Return the state of the resources.""" + return self._state + + @property + def icon(self): + """Status symbol if type is symbol.""" + if self._state == "ok": + return "mdi:check" + if self._state == "warning": + return "mdi:alert-outline" + if self._state == "critical": + return "mdi:alert" + return "mdi:crosshairs-question" + + @property + def available(self): + """Could the resource be accessed during the last update call.""" + return self.netdata.available + + async def async_update(self): + """Get the latest alarms from Netdata REST API.""" + await self.netdata.async_update() + alarms = self.netdata.api.alarms["alarms"] + self._state = None + number_of_alarms = len(alarms) + number_of_relevant_alarms = number_of_alarms + + _LOGGER.debug("Host %s has %s alarms", self.name, number_of_alarms) + + for alarm in alarms: + if alarms[alarm]["recipient"] == "silent": + number_of_relevant_alarms = number_of_relevant_alarms - 1 + elif alarms[alarm]["status"] == "CRITICAL": + self._state = "critical" + return + self._state = "ok" if number_of_relevant_alarms == 0 else "warning" + + class NetdataData: """The class for handling the data retrieval.""" @@ -159,6 +216,7 @@ class NetdataData: try: await self.api.get_allmetrics() + await self.api.get_alarms() self.available = True except NetdataError: _LOGGER.error("Unable to retrieve data from Netdata") diff --git a/homeassistant/components/notify_events/__init__.py b/homeassistant/components/notify_events/__init__.py new file mode 100644 index 00000000000..98702f75cba --- /dev/null +++ b/homeassistant/components/notify_events/__init__.py @@ -0,0 +1,20 @@ +"""The notify_events component.""" +import voluptuous as vol + +from homeassistant.const import CONF_TOKEN +from homeassistant.helpers import discovery +import homeassistant.helpers.config_validation as cv + +from .const import DOMAIN + +CONFIG_SCHEMA = vol.Schema( + {DOMAIN: vol.Schema({vol.Required(CONF_TOKEN): cv.string})}, extra=vol.ALLOW_EXTRA +) + + +def setup(hass, config): + """Set up the notify_events component.""" + + hass.data[DOMAIN] = config[DOMAIN] + discovery.load_platform(hass, "notify", DOMAIN, {}, config) + return True diff --git a/homeassistant/components/notify_events/const.py b/homeassistant/components/notify_events/const.py new file mode 100644 index 00000000000..811e0fca5f5 --- /dev/null +++ b/homeassistant/components/notify_events/const.py @@ -0,0 +1,3 @@ +"""Const for notify_events.""" + +DOMAIN = "notify_events" diff --git a/homeassistant/components/notify_events/manifest.json b/homeassistant/components/notify_events/manifest.json new file mode 100644 index 00000000000..9f0055e0164 --- /dev/null +++ b/homeassistant/components/notify_events/manifest.json @@ -0,0 +1,7 @@ +{ + "domain": "notify_events", + "name": "Notify.Events", + "documentation": "https://www.home-assistant.io/integrations/notify_events", + "codeowners": ["@matrozov", "@papajojo"], + "requirements": ["notify-events==1.0.4"] +} diff --git a/homeassistant/components/notify_events/notify.py b/homeassistant/components/notify_events/notify.py new file mode 100644 index 00000000000..23df01a128b --- /dev/null +++ b/homeassistant/components/notify_events/notify.py @@ -0,0 +1,120 @@ +"""Notify.Events platform for notify component.""" +import logging +import os.path + +from notify_events import Message + +from homeassistant.components.notify import ( + ATTR_DATA, + ATTR_TITLE, + BaseNotificationService, +) +from homeassistant.const import CONF_TOKEN + +from .const import DOMAIN + +ATTR_LEVEL = "level" +ATTR_PRIORITY = "priority" + +ATTR_FILES = "files" +ATTR_IMAGES = "images" + +ATTR_FILE_URL = "url" +ATTR_FILE_PATH = "path" +ATTR_FILE_CONTENT = "content" +ATTR_FILE_NAME = "name" +ATTR_FILE_MIME_TYPE = "mime_type" + +ATTR_FILE_KIND_FILE = "file" +ATTR_FILE_KIND_IMAGE = "image" + +_LOGGER = logging.getLogger(__name__) + + +def get_service(hass, config, discovery_info=None): + """Get the Notify.Events notification service.""" + return NotifyEventsNotificationService(hass.data[DOMAIN][CONF_TOKEN]) + + +class NotifyEventsNotificationService(BaseNotificationService): + """Implement the notification service for Notify.Events.""" + + def __init__(self, token): + """Initialize the service.""" + self.token = token + + def file_exists(self, filename) -> bool: + """Check if a file exists on disk and is in authorized path.""" + if not self.hass.config.is_allowed_path(filename): + return False + return os.path.isfile(filename) + + def attach_file(self, msg: Message, item: dict, kind: str = ATTR_FILE_KIND_FILE): + """Append a file or image to message.""" + file_name = None + mime_type = None + + if ATTR_FILE_NAME in item: + file_name = item[ATTR_FILE_NAME] + + if ATTR_FILE_MIME_TYPE in item: + mime_type = item[ATTR_FILE_MIME_TYPE] + + if ATTR_FILE_URL in item: + if kind == ATTR_FILE_KIND_IMAGE: + msg.add_image_from_url(item[ATTR_FILE_URL], file_name, mime_type) + else: + msg.add_file_from_url(item[ATTR_FILE_URL], file_name, mime_type) + elif ATTR_FILE_CONTENT in item: + if kind == ATTR_FILE_KIND_IMAGE: + msg.add_image_from_content( + item[ATTR_FILE_CONTENT], file_name, mime_type + ) + else: + msg.add_file_from_content(item[ATTR_FILE_CONTENT], file_name, mime_type) + elif ATTR_FILE_PATH in item: + file_exists = self.file_exists(item[ATTR_FILE_PATH]) + + if file_exists: + if kind == ATTR_FILE_KIND_IMAGE: + msg.add_image(item[ATTR_FILE_PATH], file_name, mime_type) + else: + msg.add_file(item[ATTR_FILE_PATH], file_name, mime_type) + else: + _LOGGER.error("File does not exist: %s", item[ATTR_FILE_PATH]) + + def prepare_message(self, message, data) -> Message: + """Prepare a message to send.""" + msg = Message(message) + + if ATTR_TITLE in data: + msg.set_title(data[ATTR_TITLE]) + + if ATTR_LEVEL in data: + try: + msg.set_level(data[ATTR_LEVEL]) + except ValueError as error: + _LOGGER.warning("Setting level error: %s", error) + + if ATTR_PRIORITY in data: + try: + msg.set_priority(data[ATTR_PRIORITY]) + except ValueError as error: + _LOGGER.warning("Setting priority error: %s", error) + + if ATTR_IMAGES in data: + for image in data[ATTR_IMAGES]: + self.attach_file(msg, image, ATTR_FILE_KIND_IMAGE) + + if ATTR_FILES in data: + for file in data[ATTR_FILES]: + self.attach_file(msg, file) + + return msg + + def send_message(self, message, **kwargs): + """Send a message.""" + data = kwargs.get(ATTR_DATA) or {} + + msg = self.prepare_message(message, data) + msg.send(self.token) diff --git a/homeassistant/components/numato/manifest.json b/homeassistant/components/numato/manifest.json index 4e9857cd579..8696151eecc 100644 --- a/homeassistant/components/numato/manifest.json +++ b/homeassistant/components/numato/manifest.json @@ -3,6 +3,5 @@ "name": "Numato USB GPIO Expander", "documentation": "https://www.home-assistant.io/integrations/numato", "requirements": ["numato-gpio==0.7.1"], - "codeowners": ["@clssn"], - "quality_scale": "internal" + "codeowners": ["@clssn"] } diff --git a/homeassistant/components/nut/config_flow.py b/homeassistant/components/nut/config_flow.py index ba005f04a6a..5d90d16f157 100644 --- a/homeassistant/components/nut/config_flow.py +++ b/homeassistant/components/nut/config_flow.py @@ -36,14 +36,22 @@ SENSOR_DICT = { for sensor_id, sensor_spec in SENSOR_TYPES.items() } -DATA_SCHEMA = vol.Schema( - { - vol.Optional(CONF_HOST, default=DEFAULT_HOST): str, - vol.Optional(CONF_PORT, default=DEFAULT_PORT): int, - vol.Optional(CONF_USERNAME): str, - vol.Optional(CONF_PASSWORD): str, - } -) + +def _base_schema(discovery_info): + """Generate base schema.""" + base_schema = {} + if not discovery_info: + base_schema.update( + { + vol.Optional(CONF_HOST, default=DEFAULT_HOST): str, + vol.Optional(CONF_PORT, default=DEFAULT_PORT): int, + } + ) + base_schema.update( + {vol.Optional(CONF_USERNAME): str, vol.Optional(CONF_PASSWORD): str} + ) + + return vol.Schema(base_schema) def _resource_schema_base(available_resources, selected_resources): @@ -75,7 +83,7 @@ def _ups_schema(ups_list): async def validate_input(hass: core.HomeAssistant, data): """Validate the user input allows us to connect. - Data has the keys from DATA_SCHEMA with values provided by the user. + Data has the keys from _base_schema with values provided by the user. """ host = data[CONF_HOST] @@ -113,9 +121,21 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Initialize the nut config flow.""" self.nut_config = {} self.available_resources = {} + self.discovery_info = {} self.ups_list = None self.title = None + async def async_step_zeroconf(self, discovery_info): + """Prepare configuration for a discovered nut device.""" + self.discovery_info = discovery_info + await self._async_handle_discovery_without_unique_id() + # pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167 + self.context["title_placeholders"] = { + CONF_PORT: discovery_info.get(CONF_PORT, DEFAULT_PORT), + CONF_HOST: discovery_info[CONF_HOST], + } + return await self.async_step_user() + async def async_step_import(self, user_input=None): """Handle the import.""" errors = {} @@ -129,13 +149,20 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): return self.async_create_entry(title=title, data=user_input) return self.async_show_form( - step_id="user", data_schema=DATA_SCHEMA, errors=errors + step_id="user", data_schema=_base_schema({}), errors=errors ) async def async_step_user(self, user_input=None): """Handle the user input.""" errors = {} if user_input is not None: + if self.discovery_info: + user_input.update( + { + CONF_HOST: self.discovery_info[CONF_HOST], + CONF_PORT: self.discovery_info.get(CONF_PORT, DEFAULT_PORT), + } + ) info, errors = await self._async_validate_or_error(user_input) if not errors: @@ -150,7 +177,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): return await self.async_step_resources() return self.async_show_form( - step_id="user", data_schema=DATA_SCHEMA, errors=errors + step_id="user", data_schema=_base_schema(self.discovery_info), errors=errors ) async def async_step_ups(self, user_input=None): diff --git a/homeassistant/components/nut/manifest.json b/homeassistant/components/nut/manifest.json index 226250b9a52..693b225c6dd 100644 --- a/homeassistant/components/nut/manifest.json +++ b/homeassistant/components/nut/manifest.json @@ -4,5 +4,6 @@ "documentation": "https://www.home-assistant.io/integrations/nut", "requirements": ["pynut2==2.1.2"], "codeowners": ["@bdraco"], - "config_flow": true + "config_flow": true, + "zeroconf": ["_nut._tcp.local."] } diff --git a/homeassistant/components/nws/translations/fr.json b/homeassistant/components/nws/translations/fr.json index 59fd1d9fedb..8b1b01ec74b 100644 --- a/homeassistant/components/nws/translations/fr.json +++ b/homeassistant/components/nws/translations/fr.json @@ -14,7 +14,8 @@ "latitude": "Latitude", "longitude": "Longitude", "station": "Code de la station METAR" - } + }, + "title": "Se connecter au National Weather Service" } } } diff --git a/homeassistant/components/nws/translations/no.json b/homeassistant/components/nws/translations/no.json index bd14f1bb653..f26abdeaa2e 100644 --- a/homeassistant/components/nws/translations/no.json +++ b/homeassistant/components/nws/translations/no.json @@ -10,7 +10,7 @@ "step": { "user": { "data": { - "api_key": "API-n\u00f8kkel (e-post)", + "api_key": "API-n\u00f8kkel", "latitude": "Breddegrad", "longitude": "Lengdegrad", "station": "METAR stasjonskode" diff --git a/homeassistant/components/nws/weather.py b/homeassistant/components/nws/weather.py index 7e1ca37ab6b..f7890190490 100644 --- a/homeassistant/components/nws/weather.py +++ b/homeassistant/components/nws/weather.py @@ -190,17 +190,16 @@ class NWSWeather(WeatherEntity): @property def wind_speed(self): """Return the current windspeed.""" - wind_m_s = None + wind_km_hr = None if self.observation: - wind_m_s = self.observation.get("windSpeed") - if wind_m_s is None: + wind_km_hr = self.observation.get("windSpeed") + if wind_km_hr is None: return None - wind_m_hr = wind_m_s * 3600 if self.is_metric: - wind = convert_distance(wind_m_hr, LENGTH_METERS, LENGTH_KILOMETERS) + wind = wind_km_hr else: - wind = convert_distance(wind_m_hr, LENGTH_METERS, LENGTH_MILES) + wind = convert_distance(wind_km_hr, LENGTH_KILOMETERS, LENGTH_MILES) return round(wind) @property diff --git a/homeassistant/components/nx584/alarm_control_panel.py b/homeassistant/components/nx584/alarm_control_panel.py index bc2c5034ed1..23dbfbb090c 100644 --- a/homeassistant/components/nx584/alarm_control_panel.py +++ b/homeassistant/components/nx584/alarm_control_panel.py @@ -1,4 +1,5 @@ """Support for NX584 alarm control panels.""" +from datetime import timedelta import logging from nx584 import client @@ -20,13 +21,19 @@ from homeassistant.const import ( STATE_ALARM_DISARMED, STATE_ALARM_TRIGGERED, ) -import homeassistant.helpers.config_validation as cv +from homeassistant.exceptions import PlatformNotReady +from homeassistant.helpers import config_validation as cv, entity_platform _LOGGER = logging.getLogger(__name__) +SCAN_INTERVAL = timedelta(seconds=10) + DEFAULT_HOST = "localhost" DEFAULT_NAME = "NX584" DEFAULT_PORT = 5007 +SERVICE_BYPASS_ZONE = "bypass_zone" +SERVICE_UNBYPASS_ZONE = "unbypass_zone" +ATTR_ZONE = "zone" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { @@ -37,7 +44,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( ) -def setup_platform(hass, config, add_entities, discovery_info=None): +async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the NX584 platform.""" name = config.get(CONF_NAME) host = config.get(CONF_HOST) @@ -46,27 +53,39 @@ def setup_platform(hass, config, add_entities, discovery_info=None): url = f"http://{host}:{port}" try: - add_entities([NX584Alarm(hass, url, name)]) + alarm_client = client.Client(url) + await hass.async_add_executor_job(alarm_client.list_zones) except requests.exceptions.ConnectionError as ex: - _LOGGER.error("Unable to connect to NX584: %s", str(ex)) - return + _LOGGER.error( + "Unable to connect to %(host)s: %(reason)s", dict(host=url, reason=ex), + ) + raise PlatformNotReady + + entity = NX584Alarm(name, alarm_client, url) + async_add_entities([entity]) + + platform = entity_platform.current_platform.get() + + platform.async_register_entity_service( + SERVICE_BYPASS_ZONE, {vol.Required(ATTR_ZONE): cv.positive_int}, "alarm_bypass", + ) + + platform.async_register_entity_service( + SERVICE_UNBYPASS_ZONE, + {vol.Required(ATTR_ZONE): cv.positive_int}, + "alarm_unbypass", + ) class NX584Alarm(alarm.AlarmControlPanelEntity): """Representation of a NX584-based alarm panel.""" - def __init__(self, hass, url, name): + def __init__(self, name, alarm_client, url): """Init the nx584 alarm panel.""" - - self._hass = hass self._name = name - self._url = url - self._alarm = client.Client(self._url) - # Do an initial list operation so that we will try to actually - # talk to the API and trigger a requests exception for setup_platform() - # to catch - self._alarm.list_zones() self._state = None + self._alarm = alarm_client + self._url = url @property def name(self): @@ -137,3 +156,11 @@ class NX584Alarm(alarm.AlarmControlPanelEntity): def alarm_arm_away(self, code=None): """Send arm away command.""" self._alarm.arm("exit") + + def alarm_bypass(self, zone): + """Send bypass command.""" + self._alarm.set_bypass(zone, True) + + def alarm_unbypass(self, zone): + """Send bypass command.""" + self._alarm.set_bypass(zone, False) diff --git a/homeassistant/components/nx584/binary_sensor.py b/homeassistant/components/nx584/binary_sensor.py index d12f337c171..127ce02b371 100644 --- a/homeassistant/components/nx584/binary_sensor.py +++ b/homeassistant/components/nx584/binary_sensor.py @@ -101,6 +101,11 @@ class NX584ZoneSensor(BinarySensorEntity): # True means "faulted" or "open" or "abnormal state" return self._zone["state"] + @property + def device_state_attributes(self): + """Return the state attributes.""" + return {"zone_number": self._zone["number"]} + class NX584Watcher(threading.Thread): """Event listener thread to process NX584 events.""" diff --git a/homeassistant/components/nx584/manifest.json b/homeassistant/components/nx584/manifest.json index 3246280b63d..57676870ce7 100644 --- a/homeassistant/components/nx584/manifest.json +++ b/homeassistant/components/nx584/manifest.json @@ -2,6 +2,6 @@ "domain": "nx584", "name": "NX584", "documentation": "https://www.home-assistant.io/integrations/nx584", - "requirements": ["pynx584==0.4"], + "requirements": ["pynx584==0.5"], "codeowners": [] } diff --git a/homeassistant/components/nx584/services.yaml b/homeassistant/components/nx584/services.yaml new file mode 100644 index 00000000000..13f5da8db25 --- /dev/null +++ b/homeassistant/components/nx584/services.yaml @@ -0,0 +1,21 @@ +# Describes the format for available nx584 services + +bypass_zone: + description: Bypass a zone. + fields: + entity_id: + description: Name of the alarm control panel which state has to be updated. + example: "alarm_control_panel.downstairs" + zone: + description: The number of the zone to be bypassed. + example: "1" + +unbypass_zone: + description: Un-Bypass a zone. + fields: + entity_id: + description: Name of the alarm control panel which state has to be updated. + example: "alarm_control_panel.downstairs" + zone: + description: The number of the zone to be un-bypassed. + example: "1" diff --git a/homeassistant/components/onboarding/views.py b/homeassistant/components/onboarding/views.py index f371c7aa9cb..a2a4fb15fd7 100644 --- a/homeassistant/components/onboarding/views.py +++ b/homeassistant/components/onboarding/views.py @@ -4,9 +4,10 @@ import asyncio import voluptuous as vol from homeassistant.auth.const import GROUP_ID_ADMIN +from homeassistant.components.auth import indieauth from homeassistant.components.http.data_validator import RequestDataValidator from homeassistant.components.http.view import HomeAssistantView -from homeassistant.const import HTTP_FORBIDDEN +from homeassistant.const import HTTP_BAD_REQUEST, HTTP_FORBIDDEN from homeassistant.core import callback from .const import ( @@ -168,7 +169,9 @@ class IntegrationOnboardingView(_BaseOnboardingView): name = "api:onboarding:integration" step = STEP_INTEGRATION - @RequestDataValidator(vol.Schema({vol.Required("client_id"): str})) + @RequestDataValidator( + vol.Schema({vol.Required("client_id"): str, vol.Required("redirect_uri"): str}) + ) async def post(self, request, data): """Handle token creation.""" hass = request.app["hass"] @@ -182,6 +185,14 @@ class IntegrationOnboardingView(_BaseOnboardingView): await self._async_mark_done(hass) + # Validate client ID and redirect uri + if not await indieauth.verify_redirect_uri( + request.app["hass"], data["client_id"], data["redirect_uri"] + ): + return self.json_message( + "invalid client id or redirect uri", HTTP_BAD_REQUEST + ) + # Return authorization code so we can redirect user and log them in auth_code = hass.components.auth.create_auth_code(data["client_id"], user) return self.json({"auth_code": auth_code}) diff --git a/homeassistant/components/onvif/translations/es.json b/homeassistant/components/onvif/translations/es.json index dd65094838d..af283fe038b 100644 --- a/homeassistant/components/onvif/translations/es.json +++ b/homeassistant/components/onvif/translations/es.json @@ -2,7 +2,7 @@ "config": { "abort": { "already_configured": "El dispositivo ONVIF ya est\u00e1 configurado.", - "already_in_progress": "El flujo de configuraci\u00f3n para el dispositivo ONVIF ya est\u00e1 en progreso.", + "already_in_progress": "El flujo de configuraci\u00f3n para el dispositivo ONVIF ya est\u00e1 en marcha.", "no_h264": "No hab\u00eda transmisiones H264 disponibles. Verifique la configuraci\u00f3n del perfil en su dispositivo.", "no_mac": "No se pudo configurar una identificaci\u00f3n \u00fanica para el dispositivo ONVIF.", "onvif_error": "Error de configuraci\u00f3n del dispositivo ONVIF. Revise los registros para m\u00e1s informaci\u00f3n." diff --git a/homeassistant/components/onvif/translations/fr.json b/homeassistant/components/onvif/translations/fr.json index 87b54bbeb97..51302258089 100644 --- a/homeassistant/components/onvif/translations/fr.json +++ b/homeassistant/components/onvif/translations/fr.json @@ -4,6 +4,7 @@ "already_configured": "Le p\u00e9riph\u00e9rique ONVIF est d\u00e9j\u00e0 configur\u00e9.", "already_in_progress": "Le flux de configuration pour le p\u00e9riph\u00e9rique ONVIF est d\u00e9j\u00e0 en cours.", "no_h264": "Aucun flux H264 n'\u00e9tait disponible. V\u00e9rifiez la configuration du profil sur votre appareil.", + "no_mac": "Impossible de configurer l'ID unique pour le p\u00e9riph\u00e9rique ONVIF.", "onvif_error": "Erreur lors de la configuration du p\u00e9riph\u00e9rique ONVIF. Consultez les journaux pour plus d'informations." }, "error": { diff --git a/homeassistant/components/opencv/manifest.json b/homeassistant/components/opencv/manifest.json index 37398c61686..ed8fd9c662c 100644 --- a/homeassistant/components/opencv/manifest.json +++ b/homeassistant/components/opencv/manifest.json @@ -2,6 +2,6 @@ "domain": "opencv", "name": "OpenCV", "documentation": "https://www.home-assistant.io/integrations/opencv", - "requirements": ["numpy==1.18.4", "opencv-python-headless==4.2.0.32"], + "requirements": ["numpy==1.19.0", "opencv-python-headless==4.2.0.32"], "codeowners": [] } diff --git a/homeassistant/components/opentherm_gw/__init__.py b/homeassistant/components/opentherm_gw/__init__.py index c6cf14bfdce..71fd104bd2f 100644 --- a/homeassistant/components/opentherm_gw/__init__.py +++ b/homeassistant/components/opentherm_gw/__init__.py @@ -43,6 +43,7 @@ from .const import ( SERVICE_SET_CONTROL_SETPOINT, SERVICE_SET_GPIO_MODE, SERVICE_SET_HOT_WATER_OVRD, + SERVICE_SET_HOT_WATER_SETPOINT, SERVICE_SET_LED_MODE, SERVICE_SET_MAX_MOD, SERVICE_SET_OAT, @@ -145,6 +146,7 @@ def register_services(hass): ), } ) + service_set_hot_water_setpoint_schema = service_set_control_setpoint_schema service_set_hot_water_ovrd_schema = vol.Schema( { vol.Required(ATTR_GW_ID): vol.All( @@ -263,6 +265,21 @@ def register_services(hass): service_set_hot_water_ovrd_schema, ) + async def set_dhw_setpoint(call): + """Set the domestic hot water setpoint on the OpenTherm Gateway.""" + gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] + gw_var = gw_vars.DATA_DHW_SETPOINT + value = await gw_dev.gateway.set_dhw_setpoint(call.data[ATTR_TEMPERATURE]) + gw_dev.status.update({gw_var: value}) + async_dispatcher_send(hass, gw_dev.update_signal, gw_dev.status) + + hass.services.async_register( + DOMAIN, + SERVICE_SET_HOT_WATER_SETPOINT, + set_dhw_setpoint, + service_set_hot_water_setpoint_schema, + ) + async def set_device_clock(call): """Set the clock on the OpenTherm Gateway.""" gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] diff --git a/homeassistant/components/opentherm_gw/config_flow.py b/homeassistant/components/opentherm_gw/config_flow.py index dc1b943686f..4afc508b8ee 100644 --- a/homeassistant/components/opentherm_gw/config_flow.py +++ b/homeassistant/components/opentherm_gw/config_flow.py @@ -68,9 +68,9 @@ class OpenThermGwConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): return self._show_form() - async def async_step_user(self, info=None): + async def async_step_user(self, user_input=None): """Handle manual initiation of the config flow.""" - return await self.async_step_init(info) + return await self.async_step_init(user_input) async def async_step_import(self, import_config): """ diff --git a/homeassistant/components/opentherm_gw/const.py b/homeassistant/components/opentherm_gw/const.py index b8d427ba193..14b54366b4a 100644 --- a/homeassistant/components/opentherm_gw/const.py +++ b/homeassistant/components/opentherm_gw/const.py @@ -29,6 +29,7 @@ DOMAIN = "opentherm_gw" SERVICE_RESET_GATEWAY = "reset_gateway" SERVICE_SET_CLOCK = "set_clock" SERVICE_SET_CONTROL_SETPOINT = "set_control_setpoint" +SERVICE_SET_HOT_WATER_SETPOINT = "set_hot_water_setpoint" SERVICE_SET_HOT_WATER_OVRD = "set_hot_water_ovrd" SERVICE_SET_GPIO_MODE = "set_gpio_mode" SERVICE_SET_LED_MODE = "set_led_mode" diff --git a/homeassistant/components/opentherm_gw/services.yaml b/homeassistant/components/opentherm_gw/services.yaml index 227c6d423bc..f60648ee8d4 100644 --- a/homeassistant/components/opentherm_gw/services.yaml +++ b/homeassistant/components/opentherm_gw/services.yaml @@ -52,6 +52,20 @@ set_hot_water_ovrd: state, or "A" to disable the override. example: "1" +set_hot_water_setpoint: + description: > + Set the domestic hot water setpoint on the gateway. + fields: + gateway_id: + description: The gateway_id of the OpenTherm Gateway. + example: "opentherm_gateway" + temperature: + description: > + The domestic hot water setpoint to set on the gateway. Not all boilers support this feature. + Values between 0 and 90 are accepted, but not all boilers support this range. + Check the values of the slave_dhw_min_setp and slave_dhw_max_setp sensors to see the supported range on your boiler. + example: "60" + set_gpio_mode: description: Change the function of the GPIO pins of the gateway. fields: diff --git a/homeassistant/components/openuv/translations/ar.json b/homeassistant/components/openuv/translations/ar.json deleted file mode 100644 index 968addd26bf..00000000000 --- a/homeassistant/components/openuv/translations/ar.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "OpenUV" -} \ No newline at end of file diff --git a/homeassistant/components/openuv/translations/fa.json b/homeassistant/components/openuv/translations/fa.json deleted file mode 100644 index 968addd26bf..00000000000 --- a/homeassistant/components/openuv/translations/fa.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "OpenUV" -} \ No newline at end of file diff --git a/homeassistant/components/openuv/translations/no.json b/homeassistant/components/openuv/translations/no.json index 6ef1d389794..4b9f875a8b9 100644 --- a/homeassistant/components/openuv/translations/no.json +++ b/homeassistant/components/openuv/translations/no.json @@ -10,7 +10,7 @@ "step": { "user": { "data": { - "api_key": "OpenUV API-n\u00f8kkel", + "api_key": "API-n\u00f8kkel", "elevation": "Elevasjon", "latitude": "Breddegrad", "longitude": "Lengdegrad" diff --git a/homeassistant/components/owntracks/device_tracker.py b/homeassistant/components/owntracks/device_tracker.py index b1204082887..d4a5399a0ff 100644 --- a/homeassistant/components/owntracks/device_tracker.py +++ b/homeassistant/components/owntracks/device_tracker.py @@ -24,6 +24,19 @@ _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass, entry, async_add_entities): """Set up OwnTracks based off an entry.""" + # Restore previously loaded devices + dev_reg = await device_registry.async_get_registry(hass) + dev_ids = { + identifier[1] + for device in dev_reg.devices.values() + for identifier in device.identifiers + if identifier[0] == OT_DOMAIN + } + + entities = [] + for dev_id in dev_ids: + entity = hass.data[OT_DOMAIN]["devices"][dev_id] = OwnTracksEntity(dev_id) + entities.append(entity) @callback def _receive_data(dev_id, **data): @@ -39,24 +52,8 @@ async def async_setup_entry(hass, entry, async_add_entities): hass.data[OT_DOMAIN]["context"].set_async_see(_receive_data) - # Restore previously loaded devices - dev_reg = await device_registry.async_get_registry(hass) - dev_ids = { - identifier[1] - for device in dev_reg.devices.values() - for identifier in device.identifiers - if identifier[0] == OT_DOMAIN - } - - if not dev_ids: - return True - - entities = [] - for dev_id in dev_ids: - entity = hass.data[OT_DOMAIN]["devices"][dev_id] = OwnTracksEntity(dev_id) - entities.append(entity) - - async_add_entities(entities) + if entities: + async_add_entities(entities) return True diff --git a/homeassistant/components/owntracks/translations/it.json b/homeassistant/components/owntracks/translations/it.json index 1c2af9e8d73..a198bc33fda 100644 --- a/homeassistant/components/owntracks/translations/it.json +++ b/homeassistant/components/owntracks/translations/it.json @@ -4,7 +4,7 @@ "one_instance_allowed": "\u00c8 necessaria una sola istanza." }, "create_entry": { - "default": "\n\nSu Android, apri l'[app OwnTracks]({android_url}), vai su preferenze -> connessione. Modifica le seguenti impostazioni: \n - Modalit\u00e0: HTTP privato \n - Host: {webhook_url} \n - Identificazione: \n - Nome utente: `` \n - ID dispositivo: ``\n\nSu iOS, apri l'[app OwnTracks]({ios_url}), tocca l'icona (i) in alto a sinistra -> impostazioni. Modifica le seguenti impostazioni: \n - Modalit\u00e0: HTTP \n - URL: {webhook_url} \n - Attiva autenticazione \n - UserID: `` \n\n {secret} \n \n Vedi [la documentazione]({docs_url}) per maggiori informazioni." + "default": "\n\nSu Android, apri l'[app OwnTracks]({android_url}), vai su preferenze -> connessione. Modifica le seguenti impostazioni: \n - Modalit\u00e0: HTTP privato \n - Host: {webhook_url} \n - Identificazione: \n - Nome utente: `''` \n - ID dispositivo: `''`\n\nSu iOS, apri l'[app OwnTracks]({ios_url}), tocca l'icona (i) in alto a sinistra -> impostazioni. Modifica le seguenti impostazioni: \n - Modalit\u00e0: HTTP \n - URL: {webhook_url} \n - Attiva autenticazione \n - UserID: `''` \n\n {secret} \n \n Vedi [la documentazione]({docs_url}) per maggiori informazioni." }, "step": { "user": { diff --git a/homeassistant/components/owntracks/translations/ko.json b/homeassistant/components/owntracks/translations/ko.json index 107e73b98a9..c1a5ce50e52 100644 --- a/homeassistant/components/owntracks/translations/ko.json +++ b/homeassistant/components/owntracks/translations/ko.json @@ -4,7 +4,7 @@ "one_instance_allowed": "\ud558\ub098\uc758 \uc778\uc2a4\ud134\uc2a4\ub9cc \ud544\uc694\ud569\ub2c8\ub2e4." }, "create_entry": { - "default": "\n\nAndroid \uc778 \uacbd\uc6b0, [OwnTracks \uc571]({android_url}) \uc744 \uc5f4\uace0 preferences -> connection \uc73c\ub85c \uc774\ub3d9\ud558\uc5ec \ub2e4\uc74c\uacfc \uac19\uc774 \uc124\uc815\ud574\uc8fc\uc138\uc694:\n - Mode: Private HTTP\n - Host: {webhook_url}\n - Identification:\n - Username: ``\n - Device ID: ``\n\niOS \uc778 \uacbd\uc6b0, [OwnTracks \uc571]({ios_url}) \uc744 \uc5f4\uace0 \uc67c\ucabd \uc0c1\ub2e8\uc758 (i) \uc544\uc774\ucf58\uc744 \ud0ed\ud558\uc5ec \uc124\uc815\uc73c\ub85c \uc774\ub3d9\ud558\uc5ec \ub2e4\uc74c\uacfc \uac19\uc774 \uc124\uc815\ud574\uc8fc\uc138\uc694:\n - Mode: HTTP\n - URL: {webhook_url}\n - Turn on authentication\n - UserID: ``\n\n{secret} \n \n\uc790\uc138\ud55c \uc815\ubcf4\ub294 [\uc548\ub0b4]({docs_url}) \ub97c \ucc38\uc870\ud574\uc8fc\uc138\uc694." + "default": "\n\nAndroid \uc778 \uacbd\uc6b0, [OwnTracks \uc571]({android_url}) \uc744 \uc5f4\uace0 preferences -> connection \uc73c\ub85c \uc774\ub3d9\ud558\uc5ec \ub2e4\uc74c\uacfc \uac19\uc774 \uc124\uc815\ud574\uc8fc\uc138\uc694:\n - Mode: Private HTTP\n - Host: {webhook_url}\n - Identification:\n - Username: `''`\n - Device ID: `''`\n\niOS \uc778 \uacbd\uc6b0, [OwnTracks \uc571]({ios_url}) \uc744 \uc5f4\uace0 \uc67c\ucabd \uc0c1\ub2e8\uc758 (i) \uc544\uc774\ucf58\uc744 \ud0ed\ud558\uc5ec \uc124\uc815\uc73c\ub85c \uc774\ub3d9\ud558\uc5ec \ub2e4\uc74c\uacfc \uac19\uc774 \uc124\uc815\ud574\uc8fc\uc138\uc694:\n - Mode: HTTP\n - URL: {webhook_url}\n - Turn on authentication\n - UserID: `''`\n\n{secret} \n \n\uc790\uc138\ud55c \uc815\ubcf4\ub294 [\uc548\ub0b4]({docs_url}) \ub97c \ucc38\uc870\ud574\uc8fc\uc138\uc694." }, "step": { "user": { diff --git a/homeassistant/components/owntracks/translations/nn.json b/homeassistant/components/owntracks/translations/nn.json deleted file mode 100644 index 0e0a71c8968..00000000000 --- a/homeassistant/components/owntracks/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "OwnTracks" -} \ No newline at end of file diff --git a/homeassistant/components/panel_custom/__init__.py b/homeassistant/components/panel_custom/__init__.py index 82572d7396c..6b8079f577f 100644 --- a/homeassistant/components/panel_custom/__init__.py +++ b/homeassistant/components/panel_custom/__init__.py @@ -23,8 +23,6 @@ CONF_TRUST_EXTERNAL_SCRIPT = "trust_external_script" CONF_URL_EXCLUSIVE_GROUP = "url_exclusive_group" CONF_REQUIRE_ADMIN = "require_admin" -MSG_URL_CONFLICT = "Pass in only one of webcomponent_path, module_url or js_url" - DEFAULT_EMBED_IFRAME = False DEFAULT_TRUST_EXTERNAL = False @@ -33,39 +31,48 @@ LEGACY_URL = "/api/panel_custom/{}" PANEL_DIR = "panels" + +def url_validator(value): + """Validate required urls are specified.""" + has_js_url = CONF_JS_URL in value + has_html_url = CONF_WEBCOMPONENT_PATH in value + has_module_url = CONF_MODULE_URL in value + + if has_html_url and (has_js_url or has_module_url): + raise vol.Invalid("You cannot specify other urls besides a webcomponent path") + + return value + + CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.All( cv.ensure_list, [ - vol.Schema( - { - vol.Required(CONF_COMPONENT_NAME): cv.string, - vol.Optional(CONF_SIDEBAR_TITLE): cv.string, - vol.Optional(CONF_SIDEBAR_ICON, default=DEFAULT_ICON): cv.icon, - vol.Optional(CONF_URL_PATH): cv.string, - vol.Optional(CONF_CONFIG): dict, - vol.Exclusive( - CONF_WEBCOMPONENT_PATH, - CONF_URL_EXCLUSIVE_GROUP, - msg=MSG_URL_CONFLICT, - ): cv.string, - vol.Exclusive( - CONF_JS_URL, CONF_URL_EXCLUSIVE_GROUP, msg=MSG_URL_CONFLICT - ): cv.string, - vol.Exclusive( - CONF_MODULE_URL, - CONF_URL_EXCLUSIVE_GROUP, - msg=MSG_URL_CONFLICT, - ): cv.string, - vol.Optional( - CONF_EMBED_IFRAME, default=DEFAULT_EMBED_IFRAME - ): cv.boolean, - vol.Optional( - CONF_TRUST_EXTERNAL_SCRIPT, default=DEFAULT_TRUST_EXTERNAL - ): cv.boolean, - vol.Optional(CONF_REQUIRE_ADMIN, default=False): cv.boolean, - } + vol.All( + vol.Schema( + { + vol.Required(CONF_COMPONENT_NAME): cv.string, + vol.Optional(CONF_SIDEBAR_TITLE): cv.string, + vol.Optional( + CONF_SIDEBAR_ICON, default=DEFAULT_ICON + ): cv.icon, + vol.Optional(CONF_URL_PATH): cv.string, + vol.Optional(CONF_CONFIG): dict, + vol.Optional(CONF_WEBCOMPONENT_PATH,): cv.string, + vol.Optional(CONF_JS_URL,): cv.string, + vol.Optional(CONF_MODULE_URL,): cv.string, + vol.Optional( + CONF_EMBED_IFRAME, default=DEFAULT_EMBED_IFRAME + ): cv.boolean, + vol.Optional( + CONF_TRUST_EXTERNAL_SCRIPT, + default=DEFAULT_TRUST_EXTERNAL, + ): cv.boolean, + vol.Optional(CONF_REQUIRE_ADMIN, default=False): cv.boolean, + } + ), + url_validator, ) ], ) @@ -102,12 +109,14 @@ async def async_register_panel( """Register a new custom panel.""" if js_url is None and html_url is None and module_url is None: raise ValueError("Either js_url, module_url or html_url is required.") - if (js_url and html_url) or (module_url and html_url): - raise ValueError("Pass in only one of JS url, Module url or HTML url.") - + if html_url and (js_url or module_url): + raise ValueError("You cannot specify other paths with an HTML url") if config is not None and not isinstance(config, dict): raise ValueError("Config needs to be a dictionary.") + if html_url: + _LOGGER.warning("HTML custom panels have been deprecated") + custom_panel_config = { "name": webcomponent_name, "embed_iframe": embed_iframe, @@ -146,6 +155,8 @@ async def async_setup(hass, config): if DOMAIN not in config: return True + seen = set() + for panel in config[DOMAIN]: name = panel[CONF_COMPONENT_NAME] @@ -160,22 +171,31 @@ async def async_setup(hass, config): "require_admin": panel[CONF_REQUIRE_ADMIN], } - panel_path = panel.get(CONF_WEBCOMPONENT_PATH) - - if panel_path is None: - panel_path = hass.config.path(PANEL_DIR, f"{name}.html") - if CONF_JS_URL in panel: kwargs["js_url"] = panel[CONF_JS_URL] - elif CONF_MODULE_URL in panel: + if CONF_MODULE_URL in panel: kwargs["module_url"] = panel[CONF_MODULE_URL] - elif not await hass.async_add_job(os.path.isfile, panel_path): - _LOGGER.error("Unable to find webcomponent for %s: %s", name, panel_path) - continue + if CONF_MODULE_URL not in panel and CONF_JS_URL not in panel: + if name in seen: + _LOGGER.warning( + "Got HTML panel with duplicate name %s. Not registering", name + ) + continue + + seen.add(name) + panel_path = panel.get(CONF_WEBCOMPONENT_PATH) + + if panel_path is None: + panel_path = hass.config.path(PANEL_DIR, f"{name}.html") + + if not await hass.async_add_executor_job(os.path.isfile, panel_path): + _LOGGER.error( + "Unable to find webcomponent for %s: %s", name, panel_path + ) + continue - else: url = LEGACY_URL.format(name) hass.http.register_static_path(url, panel_path) kwargs["html_url"] = url diff --git a/homeassistant/components/pi_hole/__init__.py b/homeassistant/components/pi_hole/__init__.py index a0d6c5da6d1..eba9053183b 100644 --- a/homeassistant/components/pi_hole/__init__.py +++ b/homeassistant/components/pi_hole/__init__.py @@ -17,10 +17,12 @@ from homeassistant.const import ( from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.util import Throttle +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import ( CONF_LOCATION, + DATA_KEY_API, + DATA_KEY_COORDINATOR, DEFAULT_LOCATION, DEFAULT_NAME, DEFAULT_SSL, @@ -34,7 +36,7 @@ from .const import ( SERVICE_ENABLE_ATTR_NAME, ) -LOGGER = logging.getLogger(__name__) +_LOGGER = logging.getLogger(__name__) PI_HOLE_SCHEMA = vol.Schema( vol.All( @@ -56,7 +58,7 @@ CONFIG_SCHEMA = vol.Schema( async def async_setup(hass, config): - """Set up the pi_hole integration.""" + """Set up the Pi_hole integration.""" service_disable_schema = vol.Schema( vol.All( @@ -82,37 +84,36 @@ async def async_setup(hass, config): ) ) - def get_pi_hole_from_name(name): - pi_hole = hass.data[DOMAIN].get(name) - if pi_hole is None: - LOGGER.error("Unknown Pi-hole name %s", name) + def get_api_from_name(name): + """Get Pi-hole API object from user configured name.""" + hole_data = hass.data[DOMAIN].get(name) + if hole_data is None: + _LOGGER.error("Unknown Pi-hole name %s", name) return None - if not pi_hole.api.api_token: - LOGGER.error( + api = hole_data[DATA_KEY_API] + if not api.api_token: + _LOGGER.error( "Pi-hole %s must have an api_key provided in configuration to be enabled", name, ) return None - return pi_hole + return api async def disable_service_handler(call): - """Handle the service call to disable a single Pi-Hole or all configured Pi-Holes.""" + """Handle the service call to disable a single Pi-hole or all configured Pi-holes.""" duration = call.data[SERVICE_DISABLE_ATTR_DURATION].total_seconds() name = call.data.get(SERVICE_DISABLE_ATTR_NAME) async def do_disable(name): - """Disable the named Pi-Hole.""" - pi_hole = get_pi_hole_from_name(name) - if pi_hole is None: + """Disable the named Pi-hole.""" + api = get_api_from_name(name) + if api is None: return - LOGGER.debug( - "Disabling Pi-hole '%s' (%s) for %d seconds", - name, - pi_hole.api.host, - duration, + _LOGGER.debug( + "Disabling Pi-hole '%s' (%s) for %d seconds", name, api.host, duration, ) - await pi_hole.api.disable(duration) + await api.disable(duration) if name is not None: await do_disable(name) @@ -121,18 +122,18 @@ async def async_setup(hass, config): await do_disable(name) async def enable_service_handler(call): - """Handle the service call to enable a single Pi-Hole or all configured Pi-Holes.""" + """Handle the service call to enable a single Pi-hole or all configured Pi-holes.""" name = call.data.get(SERVICE_ENABLE_ATTR_NAME) async def do_enable(name): - """Enable the named Pi-Hole.""" - pi_hole = get_pi_hole_from_name(name) - if pi_hole is None: + """Enable the named Pi-hole.""" + api = get_api_from_name(name) + if api is None: return - LOGGER.debug("Enabling Pi-hole '%s' (%s)", name, pi_hole.api.host) - await pi_hole.api.enable() + _LOGGER.debug("Enabling Pi-hole '%s' (%s)", name, api.host) + await api.enable() if name is not None: await do_enable(name) @@ -160,27 +161,37 @@ async def async_setup_entry(hass, entry): location = entry.data[CONF_LOCATION] api_key = entry.data.get(CONF_API_KEY) - LOGGER.debug("Setting up %s integration with host %s", DOMAIN, host) + _LOGGER.debug("Setting up %s integration with host %s", DOMAIN, host) try: session = async_get_clientsession(hass, verify_tls) - pi_hole = PiHoleData( - Hole( - host, - hass.loop, - session, - location=location, - tls=use_tls, - api_token=api_key, - ), - name, + api = Hole( + host, hass.loop, session, location=location, tls=use_tls, api_token=api_key, ) - await pi_hole.async_update() - hass.data[DOMAIN][name] = pi_hole + await api.get_data() except HoleError as ex: - LOGGER.warning("Failed to connect: %s", ex) + _LOGGER.warning("Failed to connect: %s", ex) raise ConfigEntryNotReady + async def async_update_data(): + """Fetch data from API endpoint.""" + try: + await api.get_data() + except HoleError as err: + raise UpdateFailed(f"Failed to communicating with API: {err}") + + coordinator = DataUpdateCoordinator( + hass, + _LOGGER, + name=name, + update_method=async_update_data, + update_interval=MIN_TIME_BETWEEN_UPDATES, + ) + hass.data[DOMAIN][name] = { + DATA_KEY_API: api, + DATA_KEY_COORDINATOR: coordinator, + } + hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, SENSOR_DOMAIN) ) @@ -192,24 +203,3 @@ async def async_unload_entry(hass, entry): """Unload pi-hole entry.""" hass.data[DOMAIN].pop(entry.data[CONF_NAME]) return await hass.config_entries.async_forward_entry_unload(entry, SENSOR_DOMAIN) - - -class PiHoleData: - """Get the latest data and update the states.""" - - def __init__(self, api, name): - """Initialize the data object.""" - self.api = api - self.name = name - self.available = True - - @Throttle(MIN_TIME_BETWEEN_UPDATES) - async def async_update(self): - """Get the latest data from the Pi-hole.""" - - try: - await self.api.get_data() - self.available = True - except HoleError: - LOGGER.error("Unable to fetch data from Pi-hole") - self.available = False diff --git a/homeassistant/components/pi_hole/const.py b/homeassistant/components/pi_hole/const.py index eec71ca441d..a5807de5575 100644 --- a/homeassistant/components/pi_hole/const.py +++ b/homeassistant/components/pi_hole/const.py @@ -23,6 +23,9 @@ ATTR_BLOCKED_DOMAINS = "domains_blocked" MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=5) +DATA_KEY_API = "api" +DATA_KEY_COORDINATOR = "coordinator" + SENSOR_DICT = { "ads_blocked_today": ["Ads Blocked Today", "ads", "mdi:close-octagon-outline"], "ads_percentage_today": [ diff --git a/homeassistant/components/pi_hole/sensor.py b/homeassistant/components/pi_hole/sensor.py index bbc42cdd8a5..d0009f1ebba 100644 --- a/homeassistant/components/pi_hole/sensor.py +++ b/homeassistant/components/pi_hole/sensor.py @@ -6,6 +6,8 @@ from homeassistant.helpers.entity import Entity from .const import ( ATTR_BLOCKED_DOMAINS, + DATA_KEY_API, + DATA_KEY_COORDINATOR, DOMAIN as PIHOLE_DOMAIN, SENSOR_DICT, SENSOR_LIST, @@ -15,10 +17,17 @@ LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass, entry, async_add_entities): - """Set up the pi-hole sensor.""" - pi_hole = hass.data[PIHOLE_DOMAIN][entry.data[CONF_NAME]] + """Set up the Pi-hole sensor.""" + name = entry.data[CONF_NAME] + hole_data = hass.data[PIHOLE_DOMAIN][name] sensors = [ - PiHoleSensor(pi_hole, sensor_name, entry.entry_id) + PiHoleSensor( + hole_data[DATA_KEY_API], + hole_data[DATA_KEY_COORDINATOR], + name, + sensor_name, + entry.entry_id, + ) for sensor_name in SENSOR_LIST ] async_add_entities(sensors, True) @@ -27,10 +36,11 @@ async def async_setup_entry(hass, entry, async_add_entities): class PiHoleSensor(Entity): """Representation of a Pi-hole sensor.""" - def __init__(self, pi_hole, sensor_name, server_unique_id): + def __init__(self, api, coordinator, name, sensor_name, server_unique_id): """Initialize a Pi-hole sensor.""" - self.pi_hole = pi_hole - self._name = pi_hole.name + self.api = api + self.coordinator = coordinator + self._name = name self._condition = sensor_name self._server_unique_id = server_unique_id @@ -38,7 +48,12 @@ class PiHoleSensor(Entity): self._condition_name = variable_info[0] self._unit_of_measurement = variable_info[1] self._icon = variable_info[2] - self.data = {} + + async def async_added_to_hass(self): + """When entity is added to hass.""" + self.async_on_remove( + self.coordinator.async_add_listener(self.async_write_ha_state) + ) @property def name(self): @@ -73,21 +88,25 @@ class PiHoleSensor(Entity): def state(self): """Return the state of the device.""" try: - return round(self.data[self._condition], 2) + return round(self.api.data[self._condition], 2) except TypeError: - return self.data[self._condition] + return self.api.data[self._condition] @property def device_state_attributes(self): - """Return the state attributes of the Pi-Hole.""" - return {ATTR_BLOCKED_DOMAINS: self.data["domains_being_blocked"]} + """Return the state attributes of the Pi-hole.""" + return {ATTR_BLOCKED_DOMAINS: self.api.data["domains_being_blocked"]} @property def available(self): """Could the device be accessed during the last update call.""" - return self.pi_hole.available + return self.coordinator.last_update_success + + @property + def should_poll(self): + """No need to poll. Coordinator notifies entity of updates.""" + return False async def async_update(self): """Get the latest data from the Pi-hole API.""" - await self.pi_hole.async_update() - self.data = self.pi_hole.api.data + await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/pi_hole/translations/fr.json b/homeassistant/components/pi_hole/translations/fr.json index ddd63a02062..77d7882dd95 100644 --- a/homeassistant/components/pi_hole/translations/fr.json +++ b/homeassistant/components/pi_hole/translations/fr.json @@ -1,5 +1,8 @@ { "config": { + "abort": { + "duplicated_name": "Le nom existe d\u00e9j\u00e0" + }, "step": { "user": { "data": { diff --git a/homeassistant/components/pjlink/media_player.py b/homeassistant/components/pjlink/media_player.py index 6316ea421e0..e601650d08c 100644 --- a/homeassistant/components/pjlink/media_player.py +++ b/homeassistant/components/pjlink/media_player.py @@ -28,6 +28,7 @@ CONF_ENCODING = "encoding" DEFAULT_PORT = 4352 DEFAULT_ENCODING = "utf-8" +DEFAULT_TIMEOUT = 10 PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { @@ -93,7 +94,9 @@ class PjLinkDevice(MediaPlayerEntity): def projector(self): """Create PJLink Projector instance.""" - projector = Projector.from_address(self._host, self._port, self._encoding) + projector = Projector.from_address( + self._host, self._port, self._encoding, DEFAULT_TIMEOUT + ) projector.authenticate(self._password) return projector diff --git a/homeassistant/components/plaato/translations/nn.json b/homeassistant/components/plaato/translations/nn.json deleted file mode 100644 index 5492aabed83..00000000000 --- a/homeassistant/components/plaato/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Plaato Airlock" -} \ No newline at end of file diff --git a/homeassistant/components/plant/__init__.py b/homeassistant/components/plant/__init__.py index 5364fbf2e21..02d6186d79d 100644 --- a/homeassistant/components/plant/__init__.py +++ b/homeassistant/components/plant/__init__.py @@ -311,7 +311,7 @@ class Plant(Entity): ) .order_by(States.last_updated.asc()) ) - states = execute(query) + states = execute(query, to_native=True, validate_entity_ids=False) for state in states: # filter out all None, NaN and "unknown" states diff --git a/homeassistant/components/plex/__init__.py b/homeassistant/components/plex/__init__.py index 89a3570dd10..01f80ed0d2b 100644 --- a/homeassistant/components/plex/__init__.py +++ b/homeassistant/components/plex/__init__.py @@ -161,12 +161,20 @@ async def async_setup_entry(hass, entry): } ) - hass.services.async_register( - PLEX_DOMAIN, - SERVICE_PLAY_ON_SONOS, - async_play_on_sonos_service, - schema=play_on_sonos_schema, - ) + def get_plex_account(plex_server): + try: + return plex_server.account + except (plexapi.exceptions.BadRequest, plexapi.exceptions.Unauthorized): + return None + + plex_account = await hass.async_add_executor_job(get_plex_account, plex_server) + if plex_account: + hass.services.async_register( + PLEX_DOMAIN, + SERVICE_PLAY_ON_SONOS, + async_play_on_sonos_service, + schema=play_on_sonos_schema, + ) return True diff --git a/homeassistant/components/plex/config_flow.py b/homeassistant/components/plex/config_flow.py index 5057b535ea6..ffadba63d3a 100644 --- a/homeassistant/components/plex/config_flow.py +++ b/homeassistant/components/plex/config_flow.py @@ -96,7 +96,9 @@ class PlexFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): self.client_id = None self._manual = False - async def async_step_user(self, user_input=None, errors=None): + async def async_step_user( + self, user_input=None, errors=None + ): # pylint: disable=arguments-differ """Handle a flow initialized by the user.""" if user_input is not None: return await self.async_step_plex_website_auth() diff --git a/homeassistant/components/plex/const.py b/homeassistant/components/plex/const.py index 9d9b8ed8915..e8077a00983 100644 --- a/homeassistant/components/plex/const.py +++ b/homeassistant/components/plex/const.py @@ -9,10 +9,13 @@ DEFAULT_PORT = 32400 DEFAULT_SSL = False DEFAULT_VERIFY_SSL = True +PLEXTV_THROTTLE = 60 + DEBOUNCE_TIMEOUT = 1 DISPATCHERS = "dispatchers" PLATFORMS = frozenset(["media_player", "sensor"]) PLATFORMS_COMPLETED = "platforms_completed" +PLAYER_SOURCE = "player_source" SERVERS = "servers" WEBSOCKETS = "websockets" diff --git a/homeassistant/components/plex/manifest.json b/homeassistant/components/plex/manifest.json index 386f772947a..7ea19a7a157 100644 --- a/homeassistant/components/plex/manifest.json +++ b/homeassistant/components/plex/manifest.json @@ -6,7 +6,7 @@ "requirements": [ "plexapi==4.0.0", "plexauth==0.0.5", - "plexwebsocket==0.0.10" + "plexwebsocket==0.0.11" ], "dependencies": ["http"], "after_dependencies": ["sonos"], diff --git a/homeassistant/components/plex/media_player.py b/homeassistant/components/plex/media_player.py index a25765ec588..d467b962dad 100644 --- a/homeassistant/components/plex/media_player.py +++ b/homeassistant/components/plex/media_player.py @@ -36,6 +36,8 @@ from .const import ( SERVERS, ) +LIVE_TV_SECTION = "-4" + _LOGGER = logging.getLogger(__name__) @@ -88,11 +90,12 @@ def _async_add_entities( class PlexMediaPlayer(MediaPlayerEntity): """Representation of a Plex device.""" - def __init__(self, plex_server, device, session=None): + def __init__(self, plex_server, device, player_source, session=None): """Initialize the Plex device.""" self.plex_server = plex_server self.device = device self.session = session + self.player_source = player_source self._app_name = "" self._available = False self._device_protocol_capabilities = None @@ -246,17 +249,23 @@ class PlexMediaPlayer(MediaPlayerEntity): if self._is_player_active and self.session is not None: self._session_type = self.session.type - self._media_duration = int(self.session.duration / 1000) + if self.session.duration: + self._media_duration = int(self.session.duration / 1000) + else: + self._media_duration = None # title (movie name, tv episode name, music song name) self._media_summary = self.session.summary self._media_title = self.session.title # media type self._set_media_type() - self._app_name = ( - self.session.section().title - if self.session.section() is not None - else "" - ) + if self.session.librarySectionID == LIVE_TV_SECTION: + self._app_name = "Live TV" + else: + self._app_name = ( + self.session.section().title + if self.session.section() is not None + else "" + ) self._set_media_image() else: self._session_type = None @@ -267,7 +276,10 @@ class PlexMediaPlayer(MediaPlayerEntity): self.media_content_type is MEDIA_TYPE_TVSHOW and not self.plex_server.option_use_episode_art ): - thumb_url = self.session.url(self.session.grandparentThumb) + if self.session.librarySectionID == LIVE_TV_SECTION: + thumb_url = self.session.grandparentThumb + else: + thumb_url = self.session.url(self.session.grandparentThumb) if thumb_url is None: _LOGGER.debug( @@ -301,7 +313,7 @@ class PlexMediaPlayer(MediaPlayerEntity): self._media_series_title = self.session.grandparentTitle # episode number (00) if self.session.index is not None: - self._media_episode = str(self.session.index).zfill(2) + self._media_episode = self.session.index elif self._session_type == "movie": self._media_content_type = MEDIA_TYPE_MOVIE @@ -585,6 +597,7 @@ class PlexMediaPlayer(MediaPlayerEntity): "session_username": self.username, "media_library_name": self._app_name, "summary": self.media_summary, + "player_source": self.player_source, } return attr diff --git a/homeassistant/components/plex/sensor.py b/homeassistant/components/plex/sensor.py index 30761f11bdd..1db7eb3b6f8 100644 --- a/homeassistant/components/plex/sensor.py +++ b/homeassistant/components/plex/sensor.py @@ -87,14 +87,19 @@ class PlexSensor(Entity): # "Supernatural (2005) - s01e13 - Route 666" def sync_io_attributes(session): - return (session.show(), session.seasonEpisode) + year = None + try: + year = session.show().year + except TypeError: + pass + return (year, session.seasonEpisode) - show, season_episode = await self.hass.async_add_executor_job( + year, season_episode = await self.hass.async_add_executor_job( sync_io_attributes, sess ) season_title = sess.grandparentTitle - if show.year is not None: - season_title += f" ({show.year!s})" + if year is not None: + season_title += f" ({year!s})" episode_title = sess.title now_playing_title = ( f"{season_title} - {season_episode} - {episode_title}" diff --git a/homeassistant/components/plex/server.py b/homeassistant/components/plex/server.py index dda4c0a46b5..94ba9b6950d 100644 --- a/homeassistant/components/plex/server.py +++ b/homeassistant/components/plex/server.py @@ -1,9 +1,10 @@ """Shared class to maintain Plex server instances.""" import logging import ssl +import time from urllib.parse import urlparse -from plexapi.exceptions import NotFound, Unauthorized +from plexapi.exceptions import BadRequest, NotFound, Unauthorized import plexapi.myplex import plexapi.playqueue import plexapi.server @@ -31,9 +32,11 @@ from .const import ( DEBOUNCE_TIMEOUT, DEFAULT_VERIFY_SSL, DOMAIN, + PLAYER_SOURCE, PLEX_NEW_MP_SIGNAL, PLEX_UPDATE_MEDIA_PLAYER_SIGNAL, PLEX_UPDATE_SENSOR_SIGNAL, + PLEXTV_THROTTLE, X_PLEX_DEVICE_NAME, X_PLEX_PLATFORM, X_PLEX_PRODUCT, @@ -70,6 +73,10 @@ class PlexServer: self.server_choice = None self._accounts = [] self._owner_username = None + self._plextv_clients = None + self._plextv_client_timestamp = 0 + self._plextv_device_cache = {} + self._use_plex_tv = self._token is not None self._version = None self.async_update_platforms = Debouncer( hass, @@ -88,19 +95,49 @@ class PlexServer: @property def account(self): """Return a MyPlexAccount instance.""" - if not self._plex_account: - self._plex_account = plexapi.myplex.MyPlexAccount(token=self._token) + if not self._plex_account and self._use_plex_tv: + try: + self._plex_account = plexapi.myplex.MyPlexAccount(token=self._token) + except (BadRequest, Unauthorized): + self._use_plex_tv = False + _LOGGER.error("Not authorized to access plex.tv with provided token") + raise return self._plex_account + @property + def plextv_resources(self): + """Return all resources linked to Plex account.""" + if self.account is None: + return [] + + return self.account.resources() + + def plextv_clients(self): + """Return available clients linked to Plex account.""" + if self.account is None: + return [] + + now = time.time() + if now - self._plextv_client_timestamp > PLEXTV_THROTTLE: + self._plextv_client_timestamp = now + self._plextv_clients = [ + x + for x in self.plextv_resources + if "player" in x.provides and x.presence + ] + _LOGGER.debug( + "Current available clients from plex.tv: %s", self._plextv_clients + ) + return self._plextv_clients + def connect(self): """Connect to a Plex server directly, obtaining direct URL if necessary.""" config_entry_update_needed = False def _connect_with_token(): - account = plexapi.myplex.MyPlexAccount(token=self._token) available_servers = [ (x.name, x.clientIdentifier) - for x in account.resources() + for x in self.plextv_resources if "server" in x.provides ] @@ -112,7 +149,9 @@ class PlexServer: self.server_choice = ( self._server_name if self._server_name else available_servers[0][0] ) - self._plex_server = account.resource(self.server_choice).connect(timeout=10) + self._plex_server = self.account.resource(self.server_choice).connect( + timeout=10 + ) def _connect_with_url(): session = None @@ -124,13 +163,18 @@ class PlexServer: ) def _update_plexdirect_hostname(): - account = plexapi.myplex.MyPlexAccount(token=self._token) - matching_server = [ + matching_servers = [ x.name - for x in account.resources() + for x in self.plextv_resources if x.clientIdentifier == self._server_id - ][0] - self._plex_server = account.resource(matching_server).connect(timeout=10) + ] + if matching_servers: + self._plex_server = self.account.resource(matching_servers[0]).connect( + timeout=10 + ) + return True + _LOGGER.error("Attempt to update plex.direct hostname failed") + return False if self._url: try: @@ -146,8 +190,12 @@ class PlexServer: _LOGGER.warning( "Plex SSL certificate's hostname changed, updating." ) - _update_plexdirect_hostname() - config_entry_update_needed = True + if _update_plexdirect_hostname(): + config_entry_update_needed = True + else: + raise Unauthorized( + "New certificate cannot be validated with provided token" + ) else: raise else: @@ -193,7 +241,11 @@ class PlexServer: def _fetch_platform_data(self): """Fetch all data from the Plex server in a single method.""" - return (self._plex_server.clients(), self._plex_server.sessions()) + return ( + self._plex_server.clients(), + self._plex_server.sessions(), + self.plextv_clients(), + ) async def _async_update_platforms(self): """Update the platform entities.""" @@ -217,7 +269,7 @@ class PlexServer: monitored_users.add(new_user) try: - devices, sessions = await self.hass.async_add_executor_job( + devices, sessions, plextv_clients = await self.hass.async_add_executor_job( self._fetch_platform_data ) except ( @@ -232,6 +284,9 @@ class PlexServer: def process_device(source, device): self._known_idle.discard(device.machineIdentifier) available_clients.setdefault(device.machineIdentifier, {"device": device}) + available_clients[device.machineIdentifier].setdefault( + PLAYER_SOURCE, source + ) if device.machineIdentifier not in ignored_clients: if self.option_ignore_plexweb_clients and device.product == "Plex Web": @@ -245,18 +300,43 @@ class PlexServer: ) return - if ( - device.machineIdentifier not in self._created_clients - and device.machineIdentifier not in ignored_clients - and device.machineIdentifier not in new_clients + if device.machineIdentifier not in ( + self._created_clients | ignored_clients | new_clients ): new_clients.add(device.machineIdentifier) _LOGGER.debug( - "New %s %s: %s", device.product, source, device.machineIdentifier + "New %s from %s: %s", + device.product, + source, + device.machineIdentifier, ) for device in devices: - process_device("device", device) + process_device("PMS", device) + + def connect_to_resource(resource): + """Connect to a plex.tv resource and return a Plex client.""" + client_id = resource.clientIdentifier + if client_id in self._plextv_device_cache: + return self._plextv_device_cache[client_id] + + client = None + try: + client = resource.connect(timeout=3) + _LOGGER.debug("plex.tv resource connection successful: %s", client) + except NotFound: + _LOGGER.error("plex.tv resource connection failed: %s", resource.name) + + self._plextv_device_cache[client_id] = client + return client + + for plextv_client in plextv_clients: + if plextv_client.clientIdentifier not in available_clients: + device = await self.hass.async_add_executor_job( + connect_to_resource, plextv_client + ) + if device: + process_device("plex.tv", device) for session in sessions: if session.TYPE == "photo": @@ -296,6 +376,7 @@ class PlexServer: for client_id in idle_clients: self.async_refresh_entity(client_id, None, None) self._known_idle.add(client_id) + self._plextv_device_cache.pop(client_id, None) if new_entity_configs: async_dispatcher_send( @@ -390,7 +471,7 @@ class PlexServer: key = kwargs["plex_key"] try: return self.fetch_item(key) - except plexapi.exceptions.NotFound: + except NotFound: _LOGGER.error("Media for key %s not found", key) return None diff --git a/homeassistant/components/plex/translations/cs.json b/homeassistant/components/plex/translations/cs.json deleted file mode 100644 index dc84548da7f..00000000000 --- a/homeassistant/components/plex/translations/cs.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "config": { - "step": { - "start_website_auth": { - "description": "Pokra\u010dujte v autorizaci na plex.tv.", - "title": "P\u0159ipojit server plex" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/plex/translations/nn.json b/homeassistant/components/plex/translations/nn.json deleted file mode 100644 index 9158b0c83e9..00000000000 --- a/homeassistant/components/plex/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Plex" -} \ No newline at end of file diff --git a/homeassistant/components/plex/translations/ro.json b/homeassistant/components/plex/translations/ro.json deleted file mode 100644 index 537bd5e3fac..00000000000 --- a/homeassistant/components/plex/translations/ro.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "config": { - "error": { - "no_token": "Furniza\u021bi un token sau selecta\u021bi configurarea manual\u0103" - }, - "step": { - "manual_setup": { - "data": { - "host": "Gazd\u0103", - "port": "Port", - "ssl": "Folosi\u021bi SSL", - "token": "Token-ul (dac\u0103 este necesar)", - "verify_ssl": "Verifica\u021bi certificatul SSL" - }, - "title": "Server Plex" - }, - "user": { - "data": { - "manual_setup": "Configurare manual\u0103" - } - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/plugwise/__init__.py b/homeassistant/components/plugwise/__init__.py index a0b98f9d1c0..efb97f51c41 100644 --- a/homeassistant/components/plugwise/__init__.py +++ b/homeassistant/components/plugwise/__init__.py @@ -58,10 +58,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: _LOGGER.error("Timeout while connecting to Smile") raise ConfigEntryNotReady + update_interval = timedelta(seconds=60) if api.smile_type == "power": update_interval = timedelta(seconds=10) - else: - update_interval = timedelta(seconds=60) async def async_update_data(): """Update data via API endpoint.""" @@ -102,9 +101,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: sw_version=api.smile_version[0], ) - platforms = ALL_PLATFORMS - single_master_thermostat = api.single_master_thermostat() + + platforms = ALL_PLATFORMS if single_master_thermostat is None: platforms = SENSOR_PLATFORMS @@ -165,8 +164,6 @@ class SmileGateway(Entity): @property def name(self): """Return the name of the entity, if any.""" - if not self._name: - return None return self._name @property diff --git a/homeassistant/components/plugwise/binary_sensor.py b/homeassistant/components/plugwise/binary_sensor.py index a2156cd37f9..d6b6424c7ce 100644 --- a/homeassistant/components/plugwise/binary_sensor.py +++ b/homeassistant/components/plugwise/binary_sensor.py @@ -26,20 +26,24 @@ async def async_setup_entry(hass, config_entry, async_add_entities): all_devices = api.get_all_devices() for dev_id, device_properties in all_devices.items(): - if device_properties["class"] == "heater_central": - data = api.get_device_data(dev_id) - for binary_sensor, dummy in BINARY_SENSOR_MAP.items(): - if binary_sensor in data: - entities.append( - PwBinarySensor( - api, - coordinator, - device_properties["name"], - binary_sensor, - dev_id, - device_properties["class"], - ) - ) + if device_properties["class"] != "heater_central": + continue + + data = api.get_device_data(dev_id) + for binary_sensor, dummy in BINARY_SENSOR_MAP.items(): + if binary_sensor not in data: + continue + + entities.append( + PwBinarySensor( + api, + coordinator, + device_properties["name"], + binary_sensor, + dev_id, + device_properties["class"], + ) + ) async_add_entities(entities, True) @@ -74,23 +78,26 @@ class PwBinarySensor(SmileSensor, BinarySensorEntity): data = self._api.get_device_data(self._dev_id) if not data: - _LOGGER.error("Received no data for device %s.", self._binary_sensor) + _LOGGER.error("Received no data for device %s", self._binary_sensor) self.async_write_ha_state() return - if self._binary_sensor in data: - self._is_on = data[self._binary_sensor] + if self._binary_sensor not in data: + self.async_write_ha_state() + return - self._state = STATE_OFF + self._is_on = data[self._binary_sensor] + + self._state = STATE_OFF + if self._binary_sensor == "dhw_state": + self._icon = FLOW_OFF_ICON + if self._binary_sensor == "slave_boiler_state": + self._icon = IDLE_ICON + if self._is_on: + self._state = STATE_ON if self._binary_sensor == "dhw_state": - self._icon = FLOW_OFF_ICON + self._icon = FLOW_ON_ICON if self._binary_sensor == "slave_boiler_state": - self._icon = IDLE_ICON - if self._is_on: - self._state = STATE_ON - if self._binary_sensor == "dhw_state": - self._icon = FLOW_ON_ICON - if self._binary_sensor == "slave_boiler_state": - self._icon = FLAME_ICON + self._icon = FLAME_ICON self.async_write_ha_state() diff --git a/homeassistant/components/plugwise/climate.py b/homeassistant/components/plugwise/climate.py index 42d4aa462b6..dbc9e54e0d7 100644 --- a/homeassistant/components/plugwise/climate.py +++ b/homeassistant/components/plugwise/climate.py @@ -122,8 +122,7 @@ class PwThermostat(SmileGateway, ClimateEntity): """Return the device specific state attributes.""" attributes = {} if self._schema_names: - if len(self._schema_names) > 1: - attributes["available_schemas"] = self._schema_names + attributes["available_schemas"] = self._schema_names if self._selected_schema: attributes["selected_schema"] = self._selected_schema return attributes @@ -238,10 +237,9 @@ class PwThermostat(SmileGateway, ClimateEntity): self._schema_names = climate_data["available_schedules"] if "selected_schedule" in climate_data: self._selected_schema = climate_data["selected_schedule"] + self._schema_status = False if self._selected_schema is not None: self._schema_status = True - else: - self._schema_status = False if "last_used" in climate_data: self._last_active_schema = climate_data["last_used"] if "presets" in climate_data: diff --git a/homeassistant/components/plugwise/manifest.json b/homeassistant/components/plugwise/manifest.json index fa4cf32a2ec..67456aca3bd 100644 --- a/homeassistant/components/plugwise/manifest.json +++ b/homeassistant/components/plugwise/manifest.json @@ -2,7 +2,7 @@ "domain": "plugwise", "name": "Plugwise", "documentation": "https://www.home-assistant.io/integrations/plugwise", - "requirements": ["Plugwise_Smile==0.2.13"], + "requirements": ["Plugwise_Smile==1.1.0"], "codeowners": ["@CoMPaTech", "@bouwew"], "config_flow": true } diff --git a/homeassistant/components/plugwise/sensor.py b/homeassistant/components/plugwise/sensor.py index eabb5c6655f..39c6b6e5010 100644 --- a/homeassistant/components/plugwise/sensor.py +++ b/homeassistant/components/plugwise/sensor.py @@ -69,11 +69,31 @@ ENERGY_SENSOR_MAP = { ENERGY_WATT_HOUR, DEVICE_CLASS_POWER, ], + "electricity_consumed_peak_interval": [ + "Consumed Power Interval", + ENERGY_WATT_HOUR, + DEVICE_CLASS_POWER, + ], + "electricity_consumed_off_peak_interval": [ + "Consumed Power Interval (off peak)", + ENERGY_WATT_HOUR, + DEVICE_CLASS_POWER, + ], "electricity_produced_interval": [ "Produced Power Interval", ENERGY_WATT_HOUR, DEVICE_CLASS_POWER, ], + "electricity_produced_peak_interval": [ + "Produced Power Interval", + ENERGY_WATT_HOUR, + DEVICE_CLASS_POWER, + ], + "electricity_produced_off_peak_interval": [ + "Produced Power Interval (off peak)", + ENERGY_WATT_HOUR, + DEVICE_CLASS_POWER, + ], "electricity_consumed_off_peak_point": [ "Current Consumed Power (off peak)", POWER_WATT, @@ -137,6 +157,13 @@ INDICATE_ACTIVE_LOCAL_DEVICE = [ "flame_state", ] +CUSTOM_ICONS = { + "gas_consumed_interval": "mdi:fire", + "gas_consumed_cumulative": "mdi:fire", + "modulation_level": "mdi:percent", + "valve_position": "mdi:valve", +} + async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Smile sensors from a config entry.""" @@ -153,52 +180,53 @@ async def async_setup_entry(hass, config_entry, async_add_entities): **ENERGY_SENSOR_MAP, **MISC_SENSOR_MAP, }.items(): - if sensor in data: - if data[sensor] is None: - continue + if data.get(sensor) is None: + continue - if "power" in device_properties["types"]: - model = None + if "power" in device_properties["types"]: + model = None - if "plug" in device_properties["types"]: - model = "Metered Switch" + if "plug" in device_properties["types"]: + model = "Metered Switch" - entities.append( - PwPowerSensor( - api, - coordinator, - device_properties["name"], - dev_id, - sensor, - sensor_type, - model, - ) + entities.append( + PwPowerSensor( + api, + coordinator, + device_properties["name"], + dev_id, + sensor, + sensor_type, + model, ) - else: - entities.append( - PwThermostatSensor( - api, - coordinator, - device_properties["name"], - dev_id, - sensor, - sensor_type, - ) + ) + else: + entities.append( + PwThermostatSensor( + api, + coordinator, + device_properties["name"], + dev_id, + sensor, + sensor_type, ) + ) if single_thermostat is False: for state in INDICATE_ACTIVE_LOCAL_DEVICE: - if state in data: - entities.append( - PwAuxDeviceSensor( - api, - coordinator, - device_properties["name"], - dev_id, - DEVICE_STATE, - ) + if state not in data: + continue + + entities.append( + PwAuxDeviceSensor( + api, + coordinator, + device_properties["name"], + dev_id, + DEVICE_STATE, ) - break + ) + break async_add_entities(entities, True) @@ -250,6 +278,7 @@ class PwThermostatSensor(SmileSensor, Entity): """Set up the Plugwise API.""" super().__init__(api, coordinator, name, dev_id, sensor) + self._icon = None self._model = sensor_type[SENSOR_MAP_MODEL] self._unit_of_measurement = sensor_type[SENSOR_MAP_UOM] self._dev_class = sensor_type[SENSOR_MAP_DEVICE_CLASS] @@ -260,7 +289,7 @@ class PwThermostatSensor(SmileSensor, Entity): data = self._api.get_device_data(self._dev_id) if not data: - _LOGGER.error("Received no data for device %s.", self._entity_name) + _LOGGER.error("Received no data for device %s", self._entity_name) self.async_write_ha_state() return @@ -271,6 +300,7 @@ class PwThermostatSensor(SmileSensor, Entity): if self._unit_of_measurement == UNIT_PERCENTAGE: measurement = int(measurement) self._state = measurement + self._icon = CUSTOM_ICONS.get(self._sensor, self._icon) self.async_write_ha_state() @@ -297,7 +327,7 @@ class PwAuxDeviceSensor(SmileSensor, Entity): data = self._api.get_device_data(self._dev_id) if not data: - _LOGGER.error("Received no data for device %s.", self._entity_name) + _LOGGER.error("Received no data for device %s", self._entity_name) self.async_write_ha_state() return @@ -325,6 +355,7 @@ class PwPowerSensor(SmileSensor, Entity): """Set up the Plugwise API.""" super().__init__(api, coordinator, name, dev_id, sensor) + self._icon = None self._model = model if model is None: self._model = sensor_type[SENSOR_MAP_MODEL] @@ -341,7 +372,7 @@ class PwPowerSensor(SmileSensor, Entity): data = self._api.get_device_data(self._dev_id) if not data: - _LOGGER.error("Received no data for device %s.", self._entity_name) + _LOGGER.error("Received no data for device %s", self._entity_name) self.async_write_ha_state() return @@ -350,5 +381,6 @@ class PwPowerSensor(SmileSensor, Entity): if self._unit_of_measurement == ENERGY_KILO_WATT_HOUR: measurement = int(measurement / 1000) self._state = measurement + self._icon = CUSTOM_ICONS.get(self._sensor, self._icon) self.async_write_ha_state() diff --git a/homeassistant/components/plugwise/switch.py b/homeassistant/components/plugwise/switch.py index 50b704e36ac..bd831e2f9aa 100644 --- a/homeassistant/components/plugwise/switch.py +++ b/homeassistant/components/plugwise/switch.py @@ -74,7 +74,7 @@ class PwSwitch(SmileGateway, SwitchEntity): data = self._api.get_device_data(self._dev_id) if not data: - _LOGGER.error("Received no data for device %s.", self._name) + _LOGGER.error("Received no data for device %s", self._name) self.async_write_ha_state() return diff --git a/homeassistant/components/plum_lightpad/__init__.py b/homeassistant/components/plum_lightpad/__init__.py index bfdf67a0f40..8e7596bd7e0 100644 --- a/homeassistant/components/plum_lightpad/__init__.py +++ b/homeassistant/components/plum_lightpad/__init__.py @@ -1,18 +1,20 @@ """Support for Plum Lightpad devices.""" -import asyncio import logging -from plumlightpad import Plum +from aiohttp import ContentTypeError +from requests.exceptions import ConnectTimeout, HTTPError import voluptuous as vol +from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, EVENT_HOMEASSISTANT_STOP -from homeassistant.helpers import discovery -from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady import homeassistant.helpers.config_validation as cv -_LOGGER = logging.getLogger(__name__) +from .const import DOMAIN +from .utils import load_plum -DOMAIN = "plum_lightpad" +_LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = vol.Schema( { @@ -26,58 +28,53 @@ CONFIG_SCHEMA = vol.Schema( extra=vol.ALLOW_EXTRA, ) -PLUM_DATA = "plum" +PLATFORMS = ["light"] -async def async_setup(hass, config): +async def async_setup(hass: HomeAssistant, config: dict): """Plum Lightpad Platform initialization.""" + if DOMAIN not in config: + return True conf = config[DOMAIN] - plum = Plum(conf[CONF_USERNAME], conf[CONF_PASSWORD]) - hass.data[PLUM_DATA] = plum + _LOGGER.info("Found Plum Lightpad configuration in config, importing...") + hass.async_create_task( + hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=conf + ) + ) + + return True + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): + """Set up Plum Lightpad from a config entry.""" + _LOGGER.debug("Setting up config entry with ID = %s", entry.unique_id) + + username = entry.data.get(CONF_USERNAME) + password = entry.data.get(CONF_PASSWORD) + + try: + plum = await load_plum(username, password, hass) + except ContentTypeError as ex: + _LOGGER.error("Unable to authenticate to Plum cloud: %s", ex) + return False + except (ConnectTimeout, HTTPError) as ex: + _LOGGER.error("Unable to connect to Plum cloud: %s", ex) + raise ConfigEntryNotReady + + hass.data.setdefault(DOMAIN, {}) + hass.data[DOMAIN][entry.entry_id] = plum + + for component in PLATFORMS: + hass.async_create_task( + hass.config_entries.async_forward_entry_setup(entry, component) + ) def cleanup(event): """Clean up resources.""" plum.cleanup() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, cleanup) - - cloud_web_sesison = async_get_clientsession(hass, verify_ssl=True) - await plum.loadCloudData(cloud_web_sesison) - - async def new_load(device): - """Load light and sensor platforms when LogicalLoad is detected.""" - await asyncio.wait( - [ - hass.async_create_task( - discovery.async_load_platform( - hass, "light", DOMAIN, discovered=device, hass_config=conf - ) - ) - ] - ) - - async def new_lightpad(device): - """Load light and binary sensor platforms when Lightpad detected.""" - await asyncio.wait( - [ - hass.async_create_task( - discovery.async_load_platform( - hass, "light", DOMAIN, discovered=device, hass_config=conf - ) - ) - ] - ) - - device_web_session = async_get_clientsession(hass, verify_ssl=False) - hass.async_create_task( - plum.discover( - hass.loop, - loadListener=new_load, - lightpadListener=new_lightpad, - websession=device_web_session, - ) - ) - return True diff --git a/homeassistant/components/plum_lightpad/config_flow.py b/homeassistant/components/plum_lightpad/config_flow.py new file mode 100644 index 00000000000..acf9380bf71 --- /dev/null +++ b/homeassistant/components/plum_lightpad/config_flow.py @@ -0,0 +1,62 @@ +"""Config flow for Plum Lightpad.""" +import logging +from typing import Any, Dict, Optional + +from aiohttp import ContentTypeError +from requests.exceptions import ConnectTimeout, HTTPError +import voluptuous as vol + +from homeassistant import config_entries +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.helpers import ConfigType + +from .const import DOMAIN # pylint: disable=unused-import +from .utils import load_plum + +_LOGGER = logging.getLogger(__name__) + + +class PlumLightpadConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): + """Config flow for Plum Lightpad integration.""" + + VERSION = 1 + + def _show_form(self, errors=None): + schema = { + vol.Required(CONF_USERNAME): str, + vol.Required(CONF_PASSWORD): str, + } + + return self.async_show_form( + step_id="user", data_schema=vol.Schema(schema), errors=errors or {}, + ) + + async def async_step_user( + self, user_input: Optional[ConfigType] = None + ) -> Dict[str, Any]: + """Handle a flow initialized by the user or redirected to by import.""" + if not user_input: + return self._show_form() + + username = user_input[CONF_USERNAME] + password = user_input[CONF_PASSWORD] + + # load Plum just so we know username/password work + try: + await load_plum(username, password, self.hass) + except (ContentTypeError, ConnectTimeout, HTTPError) as ex: + _LOGGER.error("Unable to connect/authenticate to Plum cloud: %s", str(ex)) + return self._show_form({"base": "cannot_connect"}) + + await self.async_set_unique_id(username) + self._abort_if_unique_id_configured() + + return self.async_create_entry( + title=username, data={CONF_USERNAME: username, CONF_PASSWORD: password} + ) + + async def async_step_import( + self, import_config: Optional[ConfigType] + ) -> Dict[str, Any]: + """Import a config entry from configuration.yaml.""" + return await self.async_step_user(import_config) diff --git a/homeassistant/components/plum_lightpad/const.py b/homeassistant/components/plum_lightpad/const.py new file mode 100644 index 00000000000..efea35d0a7a --- /dev/null +++ b/homeassistant/components/plum_lightpad/const.py @@ -0,0 +1,3 @@ +"""Constants for the Plum Lightpad component.""" + +DOMAIN = "plum_lightpad" diff --git a/homeassistant/components/plum_lightpad/light.py b/homeassistant/components/plum_lightpad/light.py index 737c6f2bfad..a94014ff1f9 100644 --- a/homeassistant/components/plum_lightpad/light.py +++ b/homeassistant/components/plum_lightpad/light.py @@ -1,4 +1,10 @@ """Support for Plum Lightpad lights.""" +import asyncio +import logging +from typing import Callable, List + +from plumlightpad import Plum + from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_HS_COLOR, @@ -6,30 +12,55 @@ from homeassistant.components.light import ( SUPPORT_COLOR, LightEntity, ) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.entity import Entity import homeassistant.util.color as color_util -from . import PLUM_DATA +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) -async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): - """Initialize the Plum Lightpad Light and GlowRing.""" - if discovery_info is None: - return +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: Callable[[List[Entity]], None], +) -> None: + """Set up Plum Lightpad dimmer lights and glow rings.""" - plum = hass.data[PLUM_DATA] + plum: Plum = hass.data[DOMAIN][entry.entry_id] - entities = [] + def setup_entities(device) -> None: + entities = [] - if "lpid" in discovery_info: - lightpad = plum.get_lightpad(discovery_info["lpid"]) - entities.append(GlowRing(lightpad=lightpad)) + if "lpid" in device: + lightpad = plum.get_lightpad(device["lpid"]) + entities.append(GlowRing(lightpad=lightpad)) - if "llid" in discovery_info: - logical_load = plum.get_load(discovery_info["llid"]) - entities.append(PlumLight(load=logical_load)) + if "llid" in device: + logical_load = plum.get_load(device["llid"]) + entities.append(PlumLight(load=logical_load)) - if entities: - async_add_entities(entities) + if entities: + async_add_entities(entities) + + async def new_load(device): + setup_entities(device) + + async def new_lightpad(device): + setup_entities(device) + + device_web_session = async_get_clientsession(hass, verify_ssl=False) + asyncio.create_task( + plum.discover( + hass.loop, + loadListener=new_load, + lightpadListener=new_lightpad, + websession=device_web_session, + ) + ) class PlumLight(LightEntity): @@ -54,11 +85,26 @@ class PlumLight(LightEntity): """No polling needed.""" return False + @property + def unique_id(self): + """Combine logical load ID with .light to guarantee it is unique.""" + return f"{self._load.llid}.light" + @property def name(self): """Return the name of the switch if any.""" return self._load.name + @property + def device_info(self): + """Return the device info.""" + return { + "name": self.name, + "identifiers": {(DOMAIN, self.unique_id)}, + "model": "Dimmer", + "manufacturer": "Plum", + } + @property def brightness(self) -> int: """Return the brightness of this switch between 0..255.""" @@ -130,11 +176,26 @@ class GlowRing(LightEntity): """No polling needed.""" return False + @property + def unique_id(self): + """Combine LightPad ID with .glow to guarantee it is unique.""" + return f"{self._lightpad.lpid}.glow" + @property def name(self): """Return the name of the switch if any.""" return self._name + @property + def device_info(self): + """Return the device info.""" + return { + "name": self.name, + "identifiers": {(DOMAIN, self.unique_id)}, + "model": "Glow Ring", + "manufacturer": "Plum", + } + @property def brightness(self) -> int: """Return the brightness of this switch between 0..255.""" diff --git a/homeassistant/components/plum_lightpad/manifest.json b/homeassistant/components/plum_lightpad/manifest.json index 5c846d41ad1..ed9bb9c2eb4 100644 --- a/homeassistant/components/plum_lightpad/manifest.json +++ b/homeassistant/components/plum_lightpad/manifest.json @@ -2,6 +2,12 @@ "domain": "plum_lightpad", "name": "Plum Lightpad", "documentation": "https://www.home-assistant.io/integrations/plum_lightpad", - "requirements": ["plumlightpad==0.0.11"], - "codeowners": ["@ColinHarrington"] + "requirements": [ + "plumlightpad==0.0.11" + ], + "codeowners": [ + "@ColinHarrington", + "@prystupa" + ], + "config_flow": true } diff --git a/homeassistant/components/plum_lightpad/strings.json b/homeassistant/components/plum_lightpad/strings.json new file mode 100644 index 00000000000..935e1614696 --- /dev/null +++ b/homeassistant/components/plum_lightpad/strings.json @@ -0,0 +1,18 @@ +{ + "config": { + "step": { + "user": { + "data": { + "username": "[%key:common::config_flow::data::email%]", + "password": "[%key:common::config_flow::data::password%]" + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" + } + } +} diff --git a/homeassistant/components/plum_lightpad/translations/en.json b/homeassistant/components/plum_lightpad/translations/en.json new file mode 100644 index 00000000000..95cafaa7313 --- /dev/null +++ b/homeassistant/components/plum_lightpad/translations/en.json @@ -0,0 +1,20 @@ +{ + "config": { + "abort": { + "single_instance_per_username_allowed": "Only one config entry per unique username is supported" + }, + "error": { + "cannot_connect": "Unable to connect to Plum Cloud." + }, + "step": { + "user": { + "data": { + "password": "Password", + "username": "Email" + }, + "title": "Fill in your Plum Cloud login information" + } + } + }, + "title": "Plum Lightpad" +} diff --git a/homeassistant/components/plum_lightpad/utils.py b/homeassistant/components/plum_lightpad/utils.py new file mode 100644 index 00000000000..6704b443d72 --- /dev/null +++ b/homeassistant/components/plum_lightpad/utils.py @@ -0,0 +1,14 @@ +"""Reusable utilities for the Plum Lightpad component.""" + +from plumlightpad import Plum + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession + + +async def load_plum(username: str, password: str, hass: HomeAssistant) -> Plum: + """Initialize Plum Lightpad API and load metadata stored in the cloud.""" + plum = Plum(username, password) + cloud_web_session = async_get_clientsession(hass, verify_ssl=True) + await plum.loadCloudData(cloud_web_session) + return plum diff --git a/homeassistant/components/point/translations/de.json b/homeassistant/components/point/translations/de.json index bc2e346b007..1e224e5ac51 100644 --- a/homeassistant/components/point/translations/de.json +++ b/homeassistant/components/point/translations/de.json @@ -12,7 +12,7 @@ }, "error": { "follow_link": "Bitte folgen dem Link und authentifiziere dich, bevor du auf Senden klickst", - "no_token": "Nicht mit Minut authentifiziert" + "no_token": "Ung\u00fcltiger Access Token" }, "step": { "auth": { diff --git a/homeassistant/components/point/translations/nn.json b/homeassistant/components/point/translations/nn.json deleted file mode 100644 index e5a6ea182ff..00000000000 --- a/homeassistant/components/point/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Minut Point" -} \ No newline at end of file diff --git a/homeassistant/components/point/translations/no.json b/homeassistant/components/point/translations/no.json index eb3fef66166..d3060562273 100644 --- a/homeassistant/components/point/translations/no.json +++ b/homeassistant/components/point/translations/no.json @@ -1,14 +1,14 @@ { "config": { "abort": { - "already_setup": "Du kan kun konfigurere \u00e9n Point-konto.", + "already_setup": "Allerede konfigurert. Bare en enkelt konfigurasjon mulig.", "authorize_url_fail": "Ukjent feil ved oppretting av godkjenningsadresse.", "authorize_url_timeout": "Tidsavbrudd ved oppretting av godkjenningsadresse.", "external_setup": "Punktet er konfigurert fra en annen flyt.", - "no_flows": "Du m\u00e5 konfigurere Point f\u00f8r du kangodkjenne den. [Vennligst les instruksjonene](https://www.home-assistant.io/components/point/)." + "no_flows": "Komponenten er ikke konfigurert. F\u00f8lg dokumentasjonen." }, "create_entry": { - "default": "Vellykket godkjenning med Minut for din(e) Point enhet(er)" + "default": "Vellykket godkjenning" }, "error": { "follow_link": "Vennligst f\u00f8lg lenken og godkjenn f\u00f8r du trykker p\u00e5 Send", diff --git a/homeassistant/components/powerwall/__init__.py b/homeassistant/components/powerwall/__init__.py index fa9c81533e7..0f25a14546d 100644 --- a/homeassistant/components/powerwall/__init__.py +++ b/homeassistant/components/powerwall/__init__.py @@ -93,7 +93,7 @@ async def _async_handle_api_changed_error(hass: HomeAssistant, error: APIChanged _LOGGER.error(str(error)) hass.components.persistent_notification.async_create( "It seems like your powerwall uses an unsupported version. " - "Please update the software of your powerwall or if it is" + "Please update the software of your powerwall or if it is " "already the newest consider reporting this issue.\nSee logs for more information", title="Unknown powerwall software version", ) diff --git a/homeassistant/components/powerwall/manifest.json b/homeassistant/components/powerwall/manifest.json index da5f6e4b7ed..1ba9562c4b7 100644 --- a/homeassistant/components/powerwall/manifest.json +++ b/homeassistant/components/powerwall/manifest.json @@ -3,6 +3,6 @@ "name": "Tesla Powerwall", "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/powerwall", - "requirements": ["tesla-powerwall==0.2.10"], + "requirements": ["tesla-powerwall==0.2.11"], "codeowners": ["@bdraco", "@jrester"] } diff --git a/homeassistant/components/prometheus/__init__.py b/homeassistant/components/prometheus/__init__.py index aea26414bee..845178cdbd2 100644 --- a/homeassistant/components/prometheus/__init__.py +++ b/homeassistant/components/prometheus/__init__.py @@ -167,7 +167,7 @@ class PrometheusMetrics: try: value = float(value) metric.labels(**self._labels(state)).set(value) - except ValueError: + except (ValueError, TypeError): pass def _metric(self, metric, factory, documentation, extra_labels=None): diff --git a/homeassistant/components/prometheus/manifest.json b/homeassistant/components/prometheus/manifest.json index 9632b5b8a43..9b4df619fb5 100644 --- a/homeassistant/components/prometheus/manifest.json +++ b/homeassistant/components/prometheus/manifest.json @@ -4,5 +4,5 @@ "documentation": "https://www.home-assistant.io/integrations/prometheus", "requirements": ["prometheus_client==0.7.1"], "dependencies": ["http"], - "codeowners": [] + "codeowners": ["@knyar"] } diff --git a/homeassistant/components/proximity/__init__.py b/homeassistant/components/proximity/__init__.py index 4b6ff477053..7beaaaf00e1 100644 --- a/homeassistant/components/proximity/__init__.py +++ b/homeassistant/components/proximity/__init__.py @@ -7,6 +7,7 @@ from homeassistant.const import ( CONF_DEVICES, CONF_UNIT_OF_MEASUREMENT, CONF_ZONE, + LENGTH_FEET, LENGTH_KILOMETERS, LENGTH_METERS, ) @@ -34,7 +35,7 @@ DEFAULT_PROXIMITY_ZONE = "home" DEFAULT_TOLERANCE = 1 DOMAIN = "proximity" -UNITS = [LENGTH_KILOMETERS, LENGTH_METERS, "mi", "ft"] +UNITS = [LENGTH_KILOMETERS, LENGTH_METERS, "mi", LENGTH_FEET] ZONE_SCHEMA = vol.Schema( { diff --git a/homeassistant/components/ps4/manifest.json b/homeassistant/components/ps4/manifest.json index cd27a587d7f..4cc86f109f8 100644 --- a/homeassistant/components/ps4/manifest.json +++ b/homeassistant/components/ps4/manifest.json @@ -3,6 +3,6 @@ "name": "Sony PlayStation 4", "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/ps4", - "requirements": ["pyps4-2ndscreen==1.0.7"], + "requirements": ["pyps4-2ndscreen==1.1.0"], "codeowners": ["@ktnrg45"] } diff --git a/homeassistant/components/ptvsd/manifest.json b/homeassistant/components/ptvsd/manifest.json index 0c487c4bb24..5feb04e92bb 100644 --- a/homeassistant/components/ptvsd/manifest.json +++ b/homeassistant/components/ptvsd/manifest.json @@ -2,6 +2,6 @@ "domain": "ptvsd", "name": "PTVSD - Python Tools for Visual Studio Debug Server", "documentation": "https://www.home-assistant.io/integrations/ptvsd", - "requirements": ["ptvsd==4.2.8"], + "requirements": ["ptvsd==4.3.2"], "codeowners": ["@swamp-ig"] } diff --git a/homeassistant/components/rachio/const.py b/homeassistant/components/rachio/const.py index 7f8111bd5e5..c9605712522 100644 --- a/homeassistant/components/rachio/const.py +++ b/homeassistant/components/rachio/const.py @@ -30,6 +30,7 @@ KEY_MODEL = "model" KEY_ON = "on" KEY_DURATION = "totalDuration" KEY_RAIN_DELAY = "rainDelayExpirationDate" +KEY_RAIN_DELAY_END = "endTime" KEY_RAIN_SENSOR_TRIPPED = "rainSensorTripped" KEY_STATUS = "status" KEY_SUBTYPE = "subType" diff --git a/homeassistant/components/rachio/switch.py b/homeassistant/components/rachio/switch.py index b16e3ce529e..9a656b5feb0 100644 --- a/homeassistant/components/rachio/switch.py +++ b/homeassistant/components/rachio/switch.py @@ -6,7 +6,8 @@ import logging from homeassistant.components.switch import SwitchEntity from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.util.dt import as_timestamp, now +from homeassistant.helpers.event import async_track_point_in_utc_time +from homeassistant.util.dt import as_timestamp, now, parse_datetime, utc_from_timestamp from .const import ( CONF_MANUAL_RUN_MINS, @@ -23,6 +24,7 @@ from .const import ( KEY_NAME, KEY_ON, KEY_RAIN_DELAY, + KEY_RAIN_DELAY_END, KEY_SCHEDULE_ID, KEY_SUBTYPE, KEY_SUMMARY, @@ -177,6 +179,11 @@ class RachioStandbySwitch(RachioSwitch): class RachioRainDelay(RachioSwitch): """Representation of a rain delay status/switch.""" + def __init__(self, controller): + """Set up a Rachio rain delay switch.""" + self._cancel_update = None + super().__init__(controller) + @property def name(self) -> str: """Return the name of the switch.""" @@ -195,13 +202,29 @@ class RachioRainDelay(RachioSwitch): @callback def _async_handle_update(self, *args, **kwargs) -> None: """Update the state using webhook data.""" + if self._cancel_update: + self._cancel_update() + self._cancel_update = None + if args[0][0][KEY_SUBTYPE] == SUBTYPE_RAIN_DELAY_ON: + endtime = parse_datetime(args[0][0][KEY_RAIN_DELAY_END]) + _LOGGER.debug("Rain delay expires at %s", endtime) self._state = True + self._cancel_update = async_track_point_in_utc_time( + self.hass, self._delay_expiration, endtime + ) elif args[0][0][KEY_SUBTYPE] == SUBTYPE_RAIN_DELAY_OFF: self._state = False self.async_write_ha_state() + @callback + def _delay_expiration(self, *args) -> None: + """Trigger when a rain delay expires.""" + self._state = False + self._cancel_update = None + self.async_write_ha_state() + def turn_on(self, **kwargs) -> None: """Activate a 24 hour rain delay on the controller.""" self._controller.rachio.device.rainDelay(self._controller.controller_id, 86400) @@ -219,6 +242,16 @@ class RachioRainDelay(RachioSwitch): KEY_RAIN_DELAY ] / 1000 > as_timestamp(now()) + # If the controller was in a rain delay state during a reboot, this re-sets the timer + if self._state is True: + delay_end = utc_from_timestamp( + self._controller.init_data[KEY_RAIN_DELAY] / 1000 + ) + _LOGGER.debug("Re-setting rain delay timer for %s", delay_end) + self._cancel_update = async_track_point_in_utc_time( + self.hass, self._delay_expiration, delay_end + ) + self.async_on_remove( async_dispatcher_connect( self.hass, @@ -392,7 +425,6 @@ class RachioSchedule(RachioSwitch): def turn_on(self, **kwargs) -> None: """Start this schedule.""" - self._controller.rachio.schedulerule.start(self._schedule_id) _LOGGER.debug( "Schedule %s started on %s", self.name, self._controller.name, diff --git a/homeassistant/components/rainmachine/translations/nn.json b/homeassistant/components/rainmachine/translations/nn.json deleted file mode 100644 index 3aabaabc3d6..00000000000 --- a/homeassistant/components/rainmachine/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "RainMachine" -} \ No newline at end of file diff --git a/homeassistant/components/recorder/__init__.py b/homeassistant/components/recorder/__init__.py index 8cceedb3985..aadc8e61fa1 100644 --- a/homeassistant/components/recorder/__init__.py +++ b/homeassistant/components/recorder/__init__.py @@ -7,7 +7,7 @@ import logging import queue import threading import time -from typing import Any, Dict, Optional +from typing import Any, Callable, List, Optional from sqlalchemy import create_engine, event as sqlalchemy_event, exc, select from sqlalchemy.orm import scoped_session, sessionmaker @@ -17,10 +17,7 @@ import voluptuous as vol from homeassistant.components import persistent_notification from homeassistant.const import ( ATTR_ENTITY_ID, - CONF_DOMAINS, - CONF_ENTITIES, CONF_EXCLUDE, - CONF_INCLUDE, EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, EVENT_STATE_CHANGED, @@ -29,7 +26,11 @@ from homeassistant.const import ( ) from homeassistant.core import CoreState, HomeAssistant, callback import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.entityfilter import generate_filter +from homeassistant.helpers.entityfilter import ( + INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA, + INCLUDE_EXCLUDE_FILTER_SCHEMA_INNER, + convert_include_exclude_filter, +) from homeassistant.helpers.typing import ConfigType import homeassistant.util.dt as dt_util @@ -69,22 +70,12 @@ CONF_PURGE_INTERVAL = "purge_interval" CONF_EVENT_TYPES = "event_types" CONF_COMMIT_INTERVAL = "commit_interval" -FILTER_SCHEMA = vol.Schema( - { - vol.Optional(CONF_EXCLUDE, default={}): vol.Schema( - { - vol.Optional(CONF_DOMAINS): vol.All(cv.ensure_list, [cv.string]), - vol.Optional(CONF_ENTITIES): cv.entity_ids, - vol.Optional(CONF_EVENT_TYPES): vol.All(cv.ensure_list, [cv.string]), - } - ), - vol.Optional(CONF_INCLUDE, default={}): vol.Schema( - { - vol.Optional(CONF_DOMAINS): vol.All(cv.ensure_list, [cv.string]), - vol.Optional(CONF_ENTITIES): cv.entity_ids, - } - ), - } +EXCLUDE_SCHEMA = INCLUDE_EXCLUDE_FILTER_SCHEMA_INNER.extend( + {vol.Optional(CONF_EVENT_TYPES): vol.All(cv.ensure_list, [cv.string])} +) + +FILTER_SCHEMA = INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA.extend( + {vol.Optional(CONF_EXCLUDE, default=EXCLUDE_SCHEMA({})): EXCLUDE_SCHEMA} ) CONFIG_SCHEMA = vol.Schema( @@ -146,13 +137,13 @@ def run_information_with_session(session, point_in_time: Optional[datetime] = No """Return information about current run from the database.""" recorder_runs = RecorderRuns - res = ( - session.query(recorder_runs) - .filter( + query = session.query(recorder_runs) + if point_in_time: + query = query.filter( (recorder_runs.start < point_in_time) & (recorder_runs.end > point_in_time) ) - .first() - ) + + res = query.first() if res: session.expunge(res) return res @@ -161,6 +152,7 @@ def run_information_with_session(session, point_in_time: Optional[datetime] = No async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the recorder.""" conf = config[DOMAIN] + entity_filter = convert_include_exclude_filter(conf) auto_purge = conf[CONF_AUTO_PURGE] keep_days = conf[CONF_PURGE_KEEP_DAYS] commit_interval = conf[CONF_COMMIT_INTERVAL] @@ -170,9 +162,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: db_url = conf.get(CONF_DB_URL) if not db_url: db_url = DEFAULT_URL.format(hass_config_path=hass.config.path(DEFAULT_DB_FILE)) - - include = conf.get(CONF_INCLUDE, {}) - exclude = conf.get(CONF_EXCLUDE, {}) + exclude = conf[CONF_EXCLUDE] + exclude_t = exclude.get(CONF_EVENT_TYPES, []) instance = hass.data[DATA_INSTANCE] = Recorder( hass=hass, auto_purge=auto_purge, @@ -181,8 +172,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: uri=db_url, db_max_retries=db_max_retries, db_retry_wait=db_retry_wait, - include=include, - exclude=exclude, + entity_filter=entity_filter, + exclude_t=exclude_t, ) instance.async_initialize() instance.start() @@ -213,8 +204,8 @@ class Recorder(threading.Thread): uri: str, db_max_retries: int, db_retry_wait: int, - include: Dict, - exclude: Dict, + entity_filter: Callable[[str], bool], + exclude_t: List[str], ) -> None: """Initialize the recorder.""" threading.Thread.__init__(self, name="Recorder") @@ -232,18 +223,15 @@ class Recorder(threading.Thread): self.engine: Any = None self.run_info: Any = None - self.entity_filter = generate_filter( - include.get(CONF_DOMAINS, []), - include.get(CONF_ENTITIES, []), - exclude.get(CONF_DOMAINS, []), - exclude.get(CONF_ENTITIES, []), - ) - self.exclude_t = exclude.get(CONF_EVENT_TYPES, []) + self.entity_filter = entity_filter + self.exclude_t = exclude_t self._timechanges_seen = 0 self._keepalive_count = 0 + self._old_state_ids = {} self.event_session = None self.get_session = None + self._completed_database_setup = False @callback def async_initialize(self): @@ -347,7 +335,7 @@ class Recorder(threading.Thread): self.event_session = self.get_session() # Use a session for the event read loop # with a commit every time the event time - # has changed. This reduces the disk io. + # has changed. This reduces the disk io. while True: event = self.queue.get() if event is None: @@ -356,7 +344,9 @@ class Recorder(threading.Thread): self.queue.task_done() return if isinstance(event, PurgeTask): - purge.purge_old_data(self, event.keep_days, event.repack) + # Schedule a new purge task if this one didn't finish + if not purge.purge_old_data(self, event.keep_days, event.repack): + self.queue.put(PurgeTask(event.keep_days, event.repack)) self.queue.task_done() continue if event.event_type == EVENT_TIME_CHANGED: @@ -383,6 +373,8 @@ class Recorder(threading.Thread): try: dbevent = Events.from_event(event) + if event.event_type == EVENT_STATE_CHANGED: + dbevent.event_data = "{}" self.event_session.add(dbevent) self.event_session.flush() except (TypeError, ValueError): @@ -394,8 +386,14 @@ class Recorder(threading.Thread): if dbevent and event.event_type == EVENT_STATE_CHANGED: try: dbstate = States.from_event(event) + dbstate.old_state_id = self._old_state_ids.get(dbstate.entity_id) dbstate.event_id = dbevent.event_id self.event_session.add(dbstate) + self.event_session.flush() + if "new_state" in event.data: + self._old_state_ids[dbstate.entity_id] = dbstate.state_id + elif dbstate.entity_id in self._old_state_ids: + del self._old_state_ids[dbstate.entity_id] except (TypeError, ValueError): _LOGGER.warning( "State is not JSON serializable: %s", @@ -503,6 +501,8 @@ class Recorder(threading.Thread): def setup_recorder_connection(dbapi_connection, connection_record): """Dbapi specific connection settings.""" + if self._completed_database_setup: + return # We do not import sqlite3 here so mysql/other # users do not have to pay for it to be loaded in @@ -514,6 +514,10 @@ class Recorder(threading.Thread): cursor.execute("PRAGMA journal_mode=WAL") cursor.close() dbapi_connection.isolation_level = old_isolation + # WAL mode only needs to be setup once + # instead of every time we open the sqlite connection + # as its persistent and isn't free to call every time. + self._completed_database_setup = True elif self.db_url.startswith("mysql"): cursor = dbapi_connection.cursor() cursor.execute("SET session wait_timeout=28800") diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 3a5ef2729be..1cff6d178ad 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -4,7 +4,7 @@ import os from sqlalchemy import Table, text from sqlalchemy.engine import reflection -from sqlalchemy.exc import OperationalError, SQLAlchemyError +from sqlalchemy.exc import InternalError, OperationalError, SQLAlchemyError from .models import SCHEMA_VERSION, Base, SchemaChanges from .util import session_scope @@ -64,11 +64,15 @@ def _create_index(engine, table_name, index_name): within the table definition described in the models """ table = Table(table_name, Base.metadata) - _LOGGER.debug("Looking up index for table %s", table_name) + _LOGGER.debug("Looking up index %s for table %s", index_name, table_name) # Look up the index object by name from the table is the models - index = next(idx for idx in table.indexes if idx.name == index_name) + index_list = [idx for idx in table.indexes if idx.name == index_name] + if not index_list: + _LOGGER.debug("The index %s no longer exists", index_name) + return + index = index_list[0] _LOGGER.debug("Creating %s index", index_name) - _LOGGER.info( + _LOGGER.warning( "Adding index `%s` to database. Note: this can take several " "minutes on large databases and slow computers. Please " "be patient!", @@ -83,6 +87,13 @@ def _create_index(engine, table_name, index_name): _LOGGER.warning( "Index %s already exists on %s, continuing", index_name, table_name ) + except InternalError as err: + if "duplicate" not in str(err).lower(): + raise + + _LOGGER.warning( + "Index %s already exists on %s, continuing", index_name, table_name + ) _LOGGER.debug("Finished creating %s", index_name) @@ -144,6 +155,11 @@ def _drop_index(engine, table_name, index_name): "Finished dropping index %s from table %s", index_name, table_name ) else: + if index_name == "ix_states_context_parent_id": + # Was only there on nightly so we do not want + # to generate log noise or issues about it. + return + _LOGGER.warning( "Failed to drop index %s from table %s. Schema " "Migration will continue; this is not a " @@ -155,7 +171,7 @@ def _drop_index(engine, table_name, index_name): def _add_columns(engine, table_name, columns_def): """Add columns to a table.""" - _LOGGER.info( + _LOGGER.warning( "Adding columns %s to table %s. Note: this can take several " "minutes on large databases and slow computers. Please " "be patient!", @@ -174,7 +190,7 @@ def _add_columns(engine, table_name, columns_def): ) ) return - except OperationalError: + except (InternalError, OperationalError): # Some engines support adding all columns at once, # this error is when they don't _LOGGER.info("Unable to use quick column add. Adding 1 by 1.") @@ -188,7 +204,7 @@ def _add_columns(engine, table_name, columns_def): ) ) ) - except OperationalError as err: + except (InternalError, OperationalError) as err: if "duplicate" not in str(err).lower(): raise @@ -249,14 +265,28 @@ def _apply_update(engine, new_version, old_version): elif new_version == 7: _create_index(engine, "states", "ix_states_entity_id") elif new_version == 8: - # Pending migration, want to group a few. - pass - # _add_columns(engine, "events", [ - # 'context_parent_id CHARACTER(36)', - # ]) - # _add_columns(engine, "states", [ - # 'context_parent_id CHARACTER(36)', - # ]) + _add_columns(engine, "events", ["context_parent_id CHARACTER(36)"]) + _add_columns(engine, "states", ["old_state_id INTEGER"]) + _create_index(engine, "events", "ix_events_context_parent_id") + elif new_version == 9: + # We now get the context from events with a join + # since its always there on state_changed events + # + # Ideally we would drop the columns from the states + # table as well but sqlite doesn't support that + # and we would have to move to something like + # sqlalchemy alembic to make that work + # + _drop_index(engine, "states", "ix_states_context_id") + _drop_index(engine, "states", "ix_states_context_user_id") + # This index won't be there if they were not running + # nightly but we don't treat that as a critical issue + _drop_index(engine, "states", "ix_states_context_parent_id") + # Redundant keys on composite index: + # We already have ix_states_entity_id_last_updated + _drop_index(engine, "states", "ix_states_entity_id") + _create_index(engine, "events", "ix_events_event_type_time_fired") + _drop_index(engine, "events", "ix_events_event_type") else: raise ValueError(f"No schema migration defined for version {new_version}") diff --git a/homeassistant/components/recorder/models.py b/homeassistant/components/recorder/models.py index ce46ae25476..0566faf1c4d 100644 --- a/homeassistant/components/recorder/models.py +++ b/homeassistant/components/recorder/models.py @@ -24,11 +24,11 @@ import homeassistant.util.dt as dt_util # pylint: disable=invalid-name Base = declarative_base() -SCHEMA_VERSION = 7 +SCHEMA_VERSION = 9 _LOGGER = logging.getLogger(__name__) -DB_TIMEZONE = "Z" +DB_TIMEZONE = "+00:00" class Events(Base): # type: ignore @@ -36,14 +36,20 @@ class Events(Base): # type: ignore __tablename__ = "events" event_id = Column(Integer, primary_key=True) - event_type = Column(String(32), index=True) + event_type = Column(String(32)) event_data = Column(Text) origin = Column(String(32)) time_fired = Column(DateTime(timezone=True), index=True) created = Column(DateTime(timezone=True), default=dt_util.utcnow) context_id = Column(String(36), index=True) context_user_id = Column(String(36), index=True) - # context_parent_id = Column(String(36), index=True) + context_parent_id = Column(String(36), index=True) + + __table_args__ = ( + # Used for fetching events at a specific time + # see logbook + Index("ix_events_event_type_time_fired", "event_type", "time_fired"), + ) @staticmethod def from_event(event): @@ -55,12 +61,16 @@ class Events(Base): # type: ignore time_fired=event.time_fired, context_id=event.context.id, context_user_id=event.context.user_id, - # context_parent_id=event.context.parent_id, + context_parent_id=event.context.parent_id, ) - def to_native(self): + def to_native(self, validate_entity_id=True): """Convert to a natve HA Event.""" - context = Context(id=self.context_id, user_id=self.context_user_id) + context = Context( + id=self.context_id, + user_id=self.context_user_id, + parent_id=self.context_parent_id, + ) try: return Event( self.event_type, @@ -81,16 +91,14 @@ class States(Base): # type: ignore __tablename__ = "states" state_id = Column(Integer, primary_key=True) domain = Column(String(64)) - entity_id = Column(String(255), index=True) + entity_id = Column(String(255)) state = Column(String(255)) attributes = Column(Text) event_id = Column(Integer, ForeignKey("events.event_id"), index=True) last_changed = Column(DateTime(timezone=True), default=dt_util.utcnow) last_updated = Column(DateTime(timezone=True), default=dt_util.utcnow, index=True) created = Column(DateTime(timezone=True), default=dt_util.utcnow) - context_id = Column(String(36), index=True) - context_user_id = Column(String(36), index=True) - # context_parent_id = Column(String(36), index=True) + old_state_id = Column(Integer) __table_args__ = ( # Used for fetching the state of entities at a specific time @@ -104,12 +112,7 @@ class States(Base): # type: ignore entity_id = event.data["entity_id"] state = event.data.get("new_state") - dbstate = States( - entity_id=entity_id, - context_id=event.context.id, - context_user_id=event.context.user_id, - # context_parent_id=event.context.parent_id, - ) + dbstate = States(entity_id=entity_id) # State got deleted if state is None: @@ -127,9 +130,8 @@ class States(Base): # type: ignore return dbstate - def to_native(self): + def to_native(self, validate_entity_id=True): """Convert to an HA state object.""" - context = Context(id=self.context_id, user_id=self.context_user_id) try: return State( self.entity_id, @@ -137,10 +139,10 @@ class States(Base): # type: ignore json.loads(self.attributes), process_timestamp(self.last_changed), process_timestamp(self.last_updated), - context=context, - # Temp, because database can still store invalid entity IDs - # Remove with 1.0 or in 2020. - temp_invalid_id_bypass=True, + # Join the events table on event_id to get the context instead + # as it will always be there for state_changed events + context=Context(id=None), + validate_entity_id=validate_entity_id, ) except ValueError: # When json.loads fails @@ -181,7 +183,7 @@ class RecorderRuns(Base): # type: ignore return [row[0] for row in query] - def to_native(self): + def to_native(self, validate_entity_id=True): """Return self, native format is this model.""" return self @@ -200,6 +202,16 @@ def process_timestamp(ts): if ts is None: return None if ts.tzinfo is None: - return dt_util.UTC.localize(ts) + return ts.replace(tzinfo=dt_util.UTC) return dt_util.as_utc(ts) + + +def process_timestamp_to_utc_isoformat(ts): + """Process a timestamp into UTC isotime.""" + if ts is None: + return None + if ts.tzinfo is None: + return f"{ts.isoformat()}{DB_TIMEZONE}" + + return dt_util.as_utc(ts).isoformat() diff --git a/homeassistant/components/recorder/purge.py b/homeassistant/components/recorder/purge.py index 0c247c96126..19c2db47768 100644 --- a/homeassistant/components/recorder/purge.py +++ b/homeassistant/components/recorder/purge.py @@ -1,42 +1,100 @@ """Purge old data helper.""" from datetime import timedelta import logging +import time -from sqlalchemy.exc import SQLAlchemyError +from sqlalchemy.exc import OperationalError, SQLAlchemyError import homeassistant.util.dt as dt_util -from .models import Events, States -from .util import session_scope +from .models import Events, RecorderRuns, States +from .util import execute, session_scope _LOGGER = logging.getLogger(__name__) -def purge_old_data(instance, purge_days, repack): - """Purge events and states older than purge_days ago.""" +def purge_old_data(instance, purge_days: int, repack: bool) -> bool: + """Purge events and states older than purge_days ago. + + Cleans up an timeframe of an hour, based on the oldest record. + """ purge_before = dt_util.utcnow() - timedelta(days=purge_days) - _LOGGER.debug("Purging events before %s", purge_before) + _LOGGER.debug("Purging states and events before target %s", purge_before) try: with session_scope(session=instance.get_session()) as session: + # Purge a max of 1 hour, based on the oldest states or events record + batch_purge_before = purge_before + + query = session.query(States).order_by(States.last_updated.asc()).limit(1) + states = execute(query, to_native=True, validate_entity_ids=False) + if states: + batch_purge_before = min( + batch_purge_before, states[0].last_updated + timedelta(hours=1), + ) + + query = session.query(Events).order_by(Events.time_fired.asc()).limit(1) + events = execute(query, to_native=True) + if events: + batch_purge_before = min( + batch_purge_before, events[0].time_fired + timedelta(hours=1), + ) + + _LOGGER.debug("Purging states and events before %s", batch_purge_before) + deleted_rows = ( session.query(States) - .filter(States.last_updated < purge_before) + .filter(States.last_updated < batch_purge_before) .delete(synchronize_session=False) ) _LOGGER.debug("Deleted %s states", deleted_rows) deleted_rows = ( session.query(Events) - .filter(Events.time_fired < purge_before) + .filter(Events.time_fired < batch_purge_before) .delete(synchronize_session=False) ) _LOGGER.debug("Deleted %s events", deleted_rows) - # Execute sqlite vacuum command to free up space on disk - if repack and instance.engine.driver in ("pysqlite", "postgresql"): - _LOGGER.debug("Vacuuming SQL DB to free space") - instance.engine.execute("VACUUM") + # If states or events purging isn't processing the purge_before yet, + # return false, as we are not done yet. + if batch_purge_before != purge_before: + _LOGGER.debug("Purging hasn't fully completed yet.") + return False + # Recorder runs is small, no need to batch run it + deleted_rows = ( + session.query(RecorderRuns) + .filter(RecorderRuns.start < purge_before) + .delete(synchronize_session=False) + ) + _LOGGER.debug("Deleted %s recorder_runs", deleted_rows) + + if repack: + # Execute sqlite or postgresql vacuum command to free up space on disk + if instance.engine.driver in ("pysqlite", "postgresql"): + _LOGGER.debug("Vacuuming SQL DB to free space") + instance.engine.execute("VACUUM") + # Optimize mysql / mariadb tables to free up space on disk + elif instance.engine.driver in ("mysqldb", "pymysql"): + _LOGGER.debug("Optimizing SQL DB to free space") + instance.engine.execute("OPTIMIZE TABLE states, events, recorder_runs") + + except OperationalError as err: + # Retry when one of the following MySQL errors occurred: + # 1205: Lock wait timeout exceeded; try restarting transaction + # 1206: The total number of locks exceeds the lock table size + # 1213: Deadlock found when trying to get lock; try restarting transaction + if instance.engine.driver in ("mysqldb", "pymysql") and err.orig.args[0] in ( + 1205, + 1206, + 1213, + ): + _LOGGER.info("%s; purge not completed, retrying", err.orig.args[1]) + time.sleep(instance.db_retry_wait) + return False + + _LOGGER.warning("Error purging history: %s.", err) except SQLAlchemyError as err: _LOGGER.warning("Error purging history: %s.", err) + return True diff --git a/homeassistant/components/recorder/util.py b/homeassistant/components/recorder/util.py index d7f0771b6f5..883bc41e71b 100644 --- a/homeassistant/components/recorder/util.py +++ b/homeassistant/components/recorder/util.py @@ -54,7 +54,7 @@ def commit(session, work): return False -def execute(qry, to_native=True): +def execute(qry, to_native=False, validate_entity_ids=True): """Query the database and convert the objects to HA native form. This method also retries a few times in the case of stale connections. @@ -64,7 +64,12 @@ def execute(qry, to_native=True): timer_start = time.perf_counter() if to_native: result = [ - row for row in (row.to_native() for row in qry) if row is not None + row + for row in ( + row.to_native(validate_entity_id=validate_entity_ids) + for row in qry + ) + if row is not None ] else: result = list(qry) diff --git a/homeassistant/components/remote_rpi_gpio/__init__.py b/homeassistant/components/remote_rpi_gpio/__init__.py index e1b66128e3f..aa0a75c3331 100644 --- a/homeassistant/components/remote_rpi_gpio/__init__.py +++ b/homeassistant/components/remote_rpi_gpio/__init__.py @@ -26,7 +26,9 @@ def setup_output(address, port, invert_logic): """Set up a GPIO as output.""" try: - return LED(port, active_high=invert_logic, pin_factory=PiGPIOFactory(address)) + return LED( + port, active_high=not invert_logic, pin_factory=PiGPIOFactory(address) + ) except (ValueError, IndexError, KeyError): return None diff --git a/homeassistant/components/remote_rpi_gpio/switch.py b/homeassistant/components/remote_rpi_gpio/switch.py index a5b255179cd..42ce258ef98 100644 --- a/homeassistant/components/remote_rpi_gpio/switch.py +++ b/homeassistant/components/remote_rpi_gpio/switch.py @@ -37,7 +37,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None): led = remote_rpi_gpio.setup_output(address, port, invert_logic) except (ValueError, IndexError, KeyError, OSError): return - new_switch = RemoteRPiGPIOSwitch(name, led, invert_logic) + new_switch = RemoteRPiGPIOSwitch(name, led) devices.append(new_switch) add_entities(devices) @@ -46,11 +46,10 @@ def setup_platform(hass, config, add_entities, discovery_info=None): class RemoteRPiGPIOSwitch(SwitchDevice): """Representation of a Remtoe Raspberry Pi GPIO.""" - def __init__(self, name, led, invert_logic): + def __init__(self, name, led): """Initialize the pin.""" self._name = name or DEVICE_DEFAULT_NAME self._state = False - self._invert_logic = invert_logic self._switch = led @property @@ -75,12 +74,12 @@ class RemoteRPiGPIOSwitch(SwitchDevice): def turn_on(self, **kwargs): """Turn the device on.""" - remote_rpi_gpio.write_output(self._switch, 0 if self._invert_logic else 1) + remote_rpi_gpio.write_output(self._switch, 1) self._state = True self.schedule_update_ha_state() def turn_off(self, **kwargs): """Turn the device off.""" - remote_rpi_gpio.write_output(self._switch, 1 if self._invert_logic else 0) + remote_rpi_gpio.write_output(self._switch, 0) self._state = False self.schedule_update_ha_state() diff --git a/homeassistant/components/rest/binary_sensor.py b/homeassistant/components/rest/binary_sensor.py index 6d797dfd834..a78c6aa5f2b 100644 --- a/homeassistant/components/rest/binary_sensor.py +++ b/homeassistant/components/rest/binary_sensor.py @@ -12,12 +12,14 @@ from homeassistant.components.binary_sensor import ( from homeassistant.const import ( CONF_AUTHENTICATION, CONF_DEVICE_CLASS, + CONF_FORCE_UPDATE, CONF_HEADERS, CONF_METHOD, CONF_NAME, CONF_PASSWORD, CONF_PAYLOAD, CONF_RESOURCE, + CONF_RESOURCE_TEMPLATE, CONF_TIMEOUT, CONF_USERNAME, CONF_VALUE_TEMPLATE, @@ -35,11 +37,13 @@ _LOGGER = logging.getLogger(__name__) DEFAULT_METHOD = "GET" DEFAULT_NAME = "REST Binary Sensor" DEFAULT_VERIFY_SSL = True +DEFAULT_FORCE_UPDATE = False DEFAULT_TIMEOUT = 10 PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { - vol.Required(CONF_RESOURCE): cv.url, + vol.Exclusive(CONF_RESOURCE, CONF_RESOURCE): cv.url, + vol.Exclusive(CONF_RESOURCE_TEMPLATE, CONF_RESOURCE): cv.template, vol.Optional(CONF_AUTHENTICATION): vol.In( [HTTP_BASIC_AUTHENTICATION, HTTP_DIGEST_AUTHENTICATION] ), @@ -52,15 +56,21 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( vol.Optional(CONF_USERNAME): cv.string, vol.Optional(CONF_VALUE_TEMPLATE): cv.template, vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): cv.boolean, + vol.Optional(CONF_FORCE_UPDATE, default=DEFAULT_FORCE_UPDATE): cv.boolean, vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int, } ) +PLATFORM_SCHEMA = vol.All( + cv.has_at_least_one_key(CONF_RESOURCE, CONF_RESOURCE_TEMPLATE), PLATFORM_SCHEMA +) + def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the REST binary sensor.""" name = config.get(CONF_NAME) resource = config.get(CONF_RESOURCE) + resource_template = config.get(CONF_RESOURCE_TEMPLATE) method = config.get(CONF_METHOD) payload = config.get(CONF_PAYLOAD) verify_ssl = config.get(CONF_VERIFY_SSL) @@ -70,6 +80,12 @@ def setup_platform(hass, config, add_entities, discovery_info=None): headers = config.get(CONF_HEADERS) device_class = config.get(CONF_DEVICE_CLASS) value_template = config.get(CONF_VALUE_TEMPLATE) + force_update = config.get(CONF_FORCE_UPDATE) + + if resource_template is not None: + resource_template.hass = hass + resource = resource_template.render() + if value_template is not None: value_template.hass = hass @@ -86,15 +102,34 @@ def setup_platform(hass, config, add_entities, discovery_info=None): if rest.data is None: raise PlatformNotReady - # No need to update the sensor now because it will determine its state - # based in the rest resource that has just been retrieved. - add_entities([RestBinarySensor(hass, rest, name, device_class, value_template)]) + add_entities( + [ + RestBinarySensor( + hass, + rest, + name, + device_class, + value_template, + force_update, + resource_template, + ) + ] + ) class RestBinarySensor(BinarySensorEntity): """Representation of a REST binary sensor.""" - def __init__(self, hass, rest, name, device_class, value_template): + def __init__( + self, + hass, + rest, + name, + device_class, + value_template, + force_update, + resource_template, + ): """Initialize a REST binary sensor.""" self._hass = hass self.rest = rest @@ -103,6 +138,8 @@ class RestBinarySensor(BinarySensorEntity): self._state = False self._previous_data = None self._value_template = value_template + self._force_update = force_update + self._resource_template = resource_template @property def name(self): @@ -139,6 +176,14 @@ class RestBinarySensor(BinarySensorEntity): response.lower(), False ) + @property + def force_update(self): + """Force update.""" + return self._force_update + def update(self): """Get the latest data from REST API and updates the state.""" + if self._resource_template is not None: + self.rest.set_url(self._resource_template.render()) + self.rest.update() diff --git a/homeassistant/components/roku/__init__.py b/homeassistant/components/roku/__init__.py index a3357ec4cf9..1a46fd9471c 100644 --- a/homeassistant/components/roku/__init__.py +++ b/homeassistant/components/roku/__init__.py @@ -38,7 +38,7 @@ CONFIG_SCHEMA = vol.Schema( ) PLATFORMS = [MEDIA_PLAYER_DOMAIN, REMOTE_DOMAIN] -SCAN_INTERVAL = timedelta(seconds=20) +SCAN_INTERVAL = timedelta(seconds=15) _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/roku/const.py b/homeassistant/components/roku/const.py index dc51e5d6f9b..4abbd9e109a 100644 --- a/homeassistant/components/roku/const.py +++ b/homeassistant/components/roku/const.py @@ -3,9 +3,13 @@ DOMAIN = "roku" # Attributes ATTR_IDENTIFIERS = "identifiers" +ATTR_KEYWORD = "keyword" ATTR_MANUFACTURER = "manufacturer" ATTR_MODEL = "model" ATTR_SOFTWARE_VERSION = "sw_version" # Default Values DEFAULT_PORT = 8060 + +# Services +SERVICE_SEARCH = "search" diff --git a/homeassistant/components/roku/manifest.json b/homeassistant/components/roku/manifest.json index 276fe2332f5..a5bed4530a8 100644 --- a/homeassistant/components/roku/manifest.json +++ b/homeassistant/components/roku/manifest.json @@ -2,7 +2,7 @@ "domain": "roku", "name": "Roku", "documentation": "https://www.home-assistant.io/integrations/roku", - "requirements": ["rokuecp==0.4.2"], + "requirements": ["rokuecp==0.5.0"], "ssdp": [ { "st": "roku:ecp", diff --git a/homeassistant/components/roku/media_player.py b/homeassistant/components/roku/media_player.py index 168d4a4a6fe..9a46d189486 100644 --- a/homeassistant/components/roku/media_player.py +++ b/homeassistant/components/roku/media_player.py @@ -2,6 +2,8 @@ import logging from typing import List +import voluptuous as vol + from homeassistant.components.media_player import MediaPlayerEntity from homeassistant.components.media_player.const import ( MEDIA_TYPE_APP, @@ -17,10 +19,17 @@ from homeassistant.components.media_player.const import ( SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_STEP, ) -from homeassistant.const import STATE_HOME, STATE_IDLE, STATE_PLAYING, STATE_STANDBY +from homeassistant.const import ( + STATE_HOME, + STATE_IDLE, + STATE_PAUSED, + STATE_PLAYING, + STATE_STANDBY, +) +from homeassistant.helpers import entity_platform from . import RokuDataUpdateCoordinator, RokuEntity, roku_exception_handler -from .const import DOMAIN +from .const import ATTR_KEYWORD, DOMAIN, SERVICE_SEARCH _LOGGER = logging.getLogger(__name__) @@ -37,6 +46,8 @@ SUPPORT_ROKU = ( | SUPPORT_TURN_OFF ) +SEARCH_SCHEMA = {vol.Required(ATTR_KEYWORD): str} + async def async_setup_entry(hass, entry, async_add_entities): """Set up the Roku config entry.""" @@ -44,6 +55,12 @@ async def async_setup_entry(hass, entry, async_add_entities): unique_id = coordinator.data.info.serial_number async_add_entities([RokuMediaPlayer(unique_id, coordinator)], True) + platform = entity_platform.current_platform.get() + + platform.async_register_entity_service( + SERVICE_SEARCH, SEARCH_SCHEMA, "search", + ) + class RokuMediaPlayer(RokuEntity, MediaPlayerEntity): """Representation of a Roku media player on the network.""" @@ -81,7 +98,10 @@ class RokuMediaPlayer(RokuEntity, MediaPlayerEntity): if self.coordinator.data.app.name == "Roku": return STATE_HOME - if self.coordinator.data.app.name is not None: + if self.coordinator.data.media and self.coordinator.data.media.paused: + return STATE_PAUSED + + if self.coordinator.data.app.name: return STATE_PLAYING return None @@ -161,6 +181,11 @@ class RokuMediaPlayer(RokuEntity, MediaPlayerEntity): """List of available input sources.""" return ["Home"] + sorted(app.name for app in self.coordinator.data.apps) + @roku_exception_handler + async def search(self, keyword): + """Emulate opening the search screen and entering the search keyword.""" + await self.coordinator.roku.search(keyword) + @roku_exception_handler async def async_turn_on(self) -> None: """Turn on the Roku.""" @@ -174,13 +199,13 @@ class RokuMediaPlayer(RokuEntity, MediaPlayerEntity): @roku_exception_handler async def async_media_pause(self) -> None: """Send pause command.""" - if self.state != STATE_STANDBY: + if self.state not in (STATE_STANDBY, STATE_PAUSED): await self.coordinator.roku.remote("play") @roku_exception_handler async def async_media_play(self) -> None: """Send play command.""" - if self.state != STATE_STANDBY: + if self.state not in (STATE_STANDBY, STATE_PLAYING): await self.coordinator.roku.remote("play") @roku_exception_handler diff --git a/homeassistant/components/roku/services.yaml b/homeassistant/components/roku/services.yaml new file mode 100644 index 00000000000..1d215306157 --- /dev/null +++ b/homeassistant/components/roku/services.yaml @@ -0,0 +1,9 @@ +search: + description: Emulates opening the search screen and entering the search keyword. + fields: + entity_id: + description: The entities to search on. + example: "media_player.roku" + keyword: + description: The keyword to search for. + example: "Space Jam" diff --git a/homeassistant/components/roku/translations/no.json b/homeassistant/components/roku/translations/no.json index e2c637ac957..43e0ea1f1c8 100644 --- a/homeassistant/components/roku/translations/no.json +++ b/homeassistant/components/roku/translations/no.json @@ -1,11 +1,11 @@ { "config": { "abort": { - "already_configured": "Roku-enheten er allerede konfigurert", + "already_configured": "Enheten er allerede konfigurert", "unknown": "Uventet feil" }, "error": { - "cannot_connect": "Klarte ikke \u00e5 koble til, vennligst pr\u00f8v igjen" + "cannot_connect": "Tilkobling mislyktes" }, "flow_title": "Roku: {name}", "step": { @@ -15,7 +15,7 @@ }, "user": { "data": { - "host": "Vert eller IP-adresse" + "host": "Vert " }, "description": "Fyll inn Roku-informasjonen din." } diff --git a/homeassistant/components/roomba/translations/fr.json b/homeassistant/components/roomba/translations/fr.json index d4025d2cddf..a8ff72b2ed5 100644 --- a/homeassistant/components/roomba/translations/fr.json +++ b/homeassistant/components/roomba/translations/fr.json @@ -1,8 +1,12 @@ { "config": { + "error": { + "cannot_connect": "Impossible de se connecter, veuillez r\u00e9essayer" + }, "step": { "user": { "data": { + "blid": "BLID", "continuous": "En continu", "delay": "D\u00e9lai", "host": "Nom d'h\u00f4te ou adresse IP", diff --git a/homeassistant/components/safe_mode/manifest.json b/homeassistant/components/safe_mode/manifest.json index 6da29c94790..78a656511bd 100644 --- a/homeassistant/components/safe_mode/manifest.json +++ b/homeassistant/components/safe_mode/manifest.json @@ -3,6 +3,6 @@ "name": "Safe Mode", "config_flow": false, "documentation": "https://www.home-assistant.io/integrations/safe_mode", - "dependencies": ["frontend", "config", "persistent_notification", "cloud"], + "dependencies": ["frontend", "persistent_notification", "cloud"], "codeowners": ["@home-assistant/core"] } diff --git a/homeassistant/components/samsungtv/config_flow.py b/homeassistant/components/samsungtv/config_flow.py index 95283d9606c..b939479a45a 100644 --- a/homeassistant/components/samsungtv/config_flow.py +++ b/homeassistant/components/samsungtv/config_flow.py @@ -116,17 +116,17 @@ class SamsungTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="user", data_schema=DATA_SCHEMA) - async def async_step_ssdp(self, user_input=None): + async def async_step_ssdp(self, discovery_info): """Handle a flow initialized by discovery.""" - host = urlparse(user_input[ATTR_SSDP_LOCATION]).hostname + host = urlparse(discovery_info[ATTR_SSDP_LOCATION]).hostname ip_address = await self.hass.async_add_executor_job(_get_ip, host) self._host = host self._ip = self.context[CONF_IP_ADDRESS] = ip_address - self._manufacturer = user_input.get(ATTR_UPNP_MANUFACTURER) - self._model = user_input.get(ATTR_UPNP_MODEL_NAME) + self._manufacturer = discovery_info.get(ATTR_UPNP_MANUFACTURER) + self._model = discovery_info.get(ATTR_UPNP_MODEL_NAME) self._name = f"Samsung {self._model}" - self._id = user_input.get(ATTR_UPNP_UDN) + self._id = discovery_info.get(ATTR_UPNP_UDN) self._title = self._model # probably access denied diff --git a/homeassistant/components/samsungtv/translations/es.json b/homeassistant/components/samsungtv/translations/es.json index 20bfe052924..b8f789420e5 100644 --- a/homeassistant/components/samsungtv/translations/es.json +++ b/homeassistant/components/samsungtv/translations/es.json @@ -2,7 +2,7 @@ "config": { "abort": { "already_configured": "Este televisor Samsung ya est\u00e1 configurado.", - "already_in_progress": "La configuraci\u00f3n del televisor Samsung ya est\u00e1 en progreso.", + "already_in_progress": "La configuraci\u00f3n del televisor Samsung ya est\u00e1 en marcha.", "auth_missing": "Home Assistant no est\u00e1 autenticado para conectarse a este televisor Samsung.", "not_successful": "No se puede conectar a este dispositivo Samsung TV.", "not_supported": "Esta televisi\u00f3n Samsung actualmente no es compatible." diff --git a/homeassistant/components/samsungtv/translations/no.json b/homeassistant/components/samsungtv/translations/no.json index afd5f7c633f..e0420ba74af 100644 --- a/homeassistant/components/samsungtv/translations/no.json +++ b/homeassistant/components/samsungtv/translations/no.json @@ -15,7 +15,7 @@ }, "user": { "data": { - "host": "Vert eller IP-adresse", + "host": "Vert ", "name": "Navn" }, "description": "Fyll inn Samsung TV-informasjonen din. Hvis du aldri har koblet til Home Assistant f\u00f8r, vil en popup p\u00e5 TVen be om godkjenning." diff --git a/homeassistant/components/script/__init__.py b/homeassistant/components/script/__init__.py index 6efd4c849aa..e80dcfa8027 100644 --- a/homeassistant/components/script/__init__.py +++ b/homeassistant/components/script/__init__.py @@ -10,7 +10,6 @@ from homeassistant.const import ( ATTR_NAME, CONF_ALIAS, CONF_ICON, - EVENT_SCRIPT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, @@ -41,6 +40,8 @@ CONF_SEQUENCE = "sequence" ENTITY_ID_FORMAT = DOMAIN + ".{}" +EVENT_SCRIPT_STARTED = "script_started" + SCRIPT_ENTRY_SCHEMA = vol.Schema( { vol.Optional(CONF_ALIAS): cv.string, @@ -81,13 +82,11 @@ def scripts_with_entity(hass: HomeAssistant, entity_id: str) -> List[str]: component = hass.data[DOMAIN] - results = [] - - for script_entity in component.entities: - if entity_id in script_entity.script.referenced_entities: - results.append(script_entity.entity_id) - - return results + return [ + script_entity.entity_id + for script_entity in component.entities + if entity_id in script_entity.script.referenced_entities + ] @callback @@ -114,13 +113,11 @@ def scripts_with_device(hass: HomeAssistant, device_id: str) -> List[str]: component = hass.data[DOMAIN] - results = [] - - for script_entity in component.entities: - if device_id in script_entity.script.referenced_devices: - results.append(script_entity.entity_id) - - return results + return [ + script_entity.entity_id + for script_entity in component.entities + if device_id in script_entity.script.referenced_devices + ] @callback @@ -259,8 +256,7 @@ class ScriptEntity(ToggleEntity): @property def state_attributes(self): """Return the state attributes.""" - attrs = {} - attrs[ATTR_LAST_TRIGGERED] = self.script.last_triggered + attrs = {ATTR_LAST_TRIGGERED: self.script.last_triggered} if self.script.can_cancel: attrs[ATTR_CAN_CANCEL] = self.script.can_cancel if self.script.last_action: diff --git a/homeassistant/components/script/logbook.py b/homeassistant/components/script/logbook.py new file mode 100644 index 00000000000..72ff0d15fc7 --- /dev/null +++ b/homeassistant/components/script/logbook.py @@ -0,0 +1,21 @@ +"""Describe logbook events.""" +from homeassistant.const import ATTR_ENTITY_ID, ATTR_NAME +from homeassistant.core import callback + +from . import DOMAIN, EVENT_SCRIPT_STARTED + + +@callback +def async_describe_events(hass, async_describe_event): + """Describe logbook events.""" + + @callback + def async_describe_logbook_event(event): + """Describe the logbook event.""" + return { + "name": event.data.get(ATTR_NAME), + "message": "started", + "entity_id": event.data.get(ATTR_ENTITY_ID), + } + + async_describe_event(DOMAIN, EVENT_SCRIPT_STARTED, async_describe_logbook_event) diff --git a/homeassistant/components/script/manifest.json b/homeassistant/components/script/manifest.json index 32acfcbb93b..b9d333ce553 100644 --- a/homeassistant/components/script/manifest.json +++ b/homeassistant/components/script/manifest.json @@ -2,6 +2,8 @@ "domain": "script", "name": "Scripts", "documentation": "https://www.home-assistant.io/integrations/script", - "codeowners": ["@home-assistant/core"], + "codeowners": [ + "@home-assistant/core" + ], "quality_scale": "internal" } diff --git a/homeassistant/components/season/translations/sensor.af.json b/homeassistant/components/season/translations/sensor.af.json deleted file mode 100644 index 0dbe4a131ee..00000000000 --- a/homeassistant/components/season/translations/sensor.af.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Herfs", - "spring": "Lente", - "summer": "Somer", - "winter": "Winter" - } -} \ No newline at end of file diff --git a/homeassistant/components/season/translations/sensor.cy.json b/homeassistant/components/season/translations/sensor.cy.json deleted file mode 100644 index 0d1553ac3ea..00000000000 --- a/homeassistant/components/season/translations/sensor.cy.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Hydref", - "spring": "Gwanwyn", - "summer": "Haf", - "winter": "Gaeaf" - } -} \ No newline at end of file diff --git a/homeassistant/components/season/translations/sensor.et.json b/homeassistant/components/season/translations/sensor.et.json deleted file mode 100644 index 1415a3b907b..00000000000 --- a/homeassistant/components/season/translations/sensor.et.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "S\u00fcgis", - "spring": "Kevad", - "summer": "Suvi", - "winter": "Talv" - } -} \ No newline at end of file diff --git a/homeassistant/components/season/translations/sensor.eu.json b/homeassistant/components/season/translations/sensor.eu.json deleted file mode 100644 index f226d920043..00000000000 --- a/homeassistant/components/season/translations/sensor.eu.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Udazkeneko", - "spring": "Spring", - "summer": "Uda", - "winter": "Winter" - } -} \ No newline at end of file diff --git a/homeassistant/components/season/translations/sensor.he.json b/homeassistant/components/season/translations/sensor.he.json deleted file mode 100644 index 282c24f3ad9..00000000000 --- a/homeassistant/components/season/translations/sensor.he.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "\u05e1\u05ea\u05d9\u05d5", - "spring": "\u05d0\u05d1\u05d9\u05d1", - "summer": "\u05e7\u05d9\u05e5", - "winter": "\u05d7\u05d5\u05e8\u05e3" - } -} \ No newline at end of file diff --git a/homeassistant/components/season/translations/sensor.id.json b/homeassistant/components/season/translations/sensor.id.json deleted file mode 100644 index ed0666aee36..00000000000 --- a/homeassistant/components/season/translations/sensor.id.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Musim gugur", - "spring": "Musim semi", - "summer": "Musim panas", - "winter": "Musim dingin" - } -} \ No newline at end of file diff --git a/homeassistant/components/season/translations/sensor.ja.json b/homeassistant/components/season/translations/sensor.ja.json deleted file mode 100644 index e441b1aa8ac..00000000000 --- a/homeassistant/components/season/translations/sensor.ja.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "\u79cb", - "spring": "\u6625", - "summer": "\u590f", - "winter": "\u51ac" - } -} \ No newline at end of file diff --git a/homeassistant/components/season/translations/sensor.lv.json b/homeassistant/components/season/translations/sensor.lv.json deleted file mode 100644 index a96e1112f71..00000000000 --- a/homeassistant/components/season/translations/sensor.lv.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Rudens", - "spring": "Pavasaris", - "summer": "Vasara", - "winter": "Ziema" - } -} \ No newline at end of file diff --git a/homeassistant/components/season/translations/sensor.ro.json b/homeassistant/components/season/translations/sensor.ro.json deleted file mode 100644 index 04f90318290..00000000000 --- a/homeassistant/components/season/translations/sensor.ro.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Toamn\u0103", - "spring": "Prim\u0103var\u0103", - "summer": "Var\u0103", - "winter": "Iarn\u0103" - } -} \ No newline at end of file diff --git a/homeassistant/components/season/translations/sensor.vi.json b/homeassistant/components/season/translations/sensor.vi.json deleted file mode 100644 index a3bb21dee27..00000000000 --- a/homeassistant/components/season/translations/sensor.vi.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "M\u00f9a thu", - "spring": "M\u00f9a xu\u00e2n", - "summer": "M\u00f9a h\u00e8", - "winter": "M\u00f9a \u0111\u00f4ng" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.ar.json b/homeassistant/components/sensor/translations/moon.ar.json deleted file mode 100644 index 94af741f5f4..00000000000 --- a/homeassistant/components/sensor/translations/moon.ar.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "state": { - "first_quarter": "\u0627\u0644\u0631\u0628\u0639 \u0627\u0644\u0623\u0648\u0644", - "full_moon": "\u0627\u0644\u0642\u0645\u0631 \u0627\u0644\u0643\u0627\u0645\u0644" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.bg.json b/homeassistant/components/sensor/translations/moon.bg.json deleted file mode 100644 index c764ccbc3e0..00000000000 --- a/homeassistant/components/sensor/translations/moon.bg.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "\u041f\u044a\u0440\u0432\u0430 \u0447\u0435\u0442\u0432\u044a\u0440\u0442\u0438\u043d\u0430", - "full_moon": "\u041f\u044a\u043b\u043d\u043e\u043b\u0443\u043d\u0438\u0435", - "last_quarter": "\u041f\u043e\u0441\u043b\u0435\u0434\u043d\u0430 \u0447\u0435\u0442\u0432\u044a\u0440\u0442\u0438\u043d\u0430", - "new_moon": "\u041d\u043e\u0432\u043e\u043b\u0443\u043d\u0438\u0435", - "waning_crescent": "\u041d\u0430\u043c\u0430\u043b\u044f\u0432\u0430\u0449 \u043f\u043e\u043b\u0443\u043c\u0435\u0441\u0435\u0446", - "waning_gibbous": "\u041d\u0430\u043c\u0430\u043b\u044f\u0432\u0430\u0449 \u043f\u043e\u043b\u0443\u043c\u0435\u0441\u0435\u0446", - "waxing_crescent": "\u041d\u0430\u0440\u0430\u0441\u0442\u0432\u0430\u0449 \u043f\u043e\u043b\u0443\u043c\u0435\u0441\u0435\u0446", - "waxing_gibbous": "\u041d\u0430\u0440\u0430\u0441\u0442\u0432\u0430\u0449 \u043f\u043e\u043b\u0443\u043c\u0435\u0441\u0435\u0446" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.ca.json b/homeassistant/components/sensor/translations/moon.ca.json deleted file mode 100644 index e294579da09..00000000000 --- a/homeassistant/components/sensor/translations/moon.ca.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "Quart creixent", - "full_moon": "Lluna plena", - "last_quarter": "Quart minvant", - "new_moon": "Lluna nova", - "waning_crescent": "Minvant (Lluna vella)", - "waning_gibbous": "Gibosa minvant", - "waxing_crescent": "Lluna nova visible", - "waxing_gibbous": "Gibosa creixent" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.cs.json b/homeassistant/components/sensor/translations/moon.cs.json deleted file mode 100644 index ef1d5bf5f13..00000000000 --- a/homeassistant/components/sensor/translations/moon.cs.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "Prvn\u00ed \u010dtvr\u0165", - "full_moon": "\u00dapln\u011bk", - "last_quarter": "Posledn\u00ed \u010dtvr\u0165", - "new_moon": "Nov", - "waning_crescent": "Couvaj\u00edc\u00ed srpek", - "waning_gibbous": "Couvaj\u00edc\u00ed m\u011bs\u00edc", - "waxing_crescent": "Dor\u016fstaj\u00edc\u00ed srpek", - "waxing_gibbous": "Dor\u016fstaj\u00edc\u00ed m\u011bs\u00edc" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.da.json b/homeassistant/components/sensor/translations/moon.da.json deleted file mode 100644 index c2406de68bb..00000000000 --- a/homeassistant/components/sensor/translations/moon.da.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "F\u00f8rste kvartal", - "full_moon": "Fuldm\u00e5ne", - "last_quarter": "Sidste kvartal", - "new_moon": "Nym\u00e5ne", - "waning_crescent": "Aftagende halvm\u00e5ne", - "waning_gibbous": "Aftagende m\u00e5ne", - "waxing_crescent": "Tiltagende halvm\u00e5ne", - "waxing_gibbous": "Tiltagende m\u00e5ne" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.de.json b/homeassistant/components/sensor/translations/moon.de.json deleted file mode 100644 index 310ebf9c359..00000000000 --- a/homeassistant/components/sensor/translations/moon.de.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "Erstes Viertel", - "full_moon": "Vollmond", - "last_quarter": "Letztes Viertel", - "new_moon": "Neumond", - "waning_crescent": "Abnehmende Sichel", - "waning_gibbous": "Drittes Viertel", - "waxing_crescent": "Zunehmende Sichel", - "waxing_gibbous": "Zweites Viertel" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.en.json b/homeassistant/components/sensor/translations/moon.en.json deleted file mode 100644 index 587b9496114..00000000000 --- a/homeassistant/components/sensor/translations/moon.en.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "First quarter", - "full_moon": "Full moon", - "last_quarter": "Last quarter", - "new_moon": "New moon", - "waning_crescent": "Waning crescent", - "waning_gibbous": "Waning gibbous", - "waxing_crescent": "Waxing crescent", - "waxing_gibbous": "Waxing gibbous" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.es-419.json b/homeassistant/components/sensor/translations/moon.es-419.json deleted file mode 100644 index 71cfab736cb..00000000000 --- a/homeassistant/components/sensor/translations/moon.es-419.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "Cuarto creciente", - "full_moon": "Luna llena", - "last_quarter": "Cuarto menguante", - "new_moon": "Luna nueva", - "waning_crescent": "Luna menguante", - "waning_gibbous": "Luna menguante gibosa", - "waxing_crescent": "Luna creciente", - "waxing_gibbous": "Luna creciente gibosa" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.es.json b/homeassistant/components/sensor/translations/moon.es.json deleted file mode 100644 index bf8cacca21c..00000000000 --- a/homeassistant/components/sensor/translations/moon.es.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "Primer cuarto", - "full_moon": "Luna llena", - "last_quarter": "\u00daltimo cuarto", - "new_moon": "Luna nueva", - "waning_crescent": "Luna menguante", - "waning_gibbous": "Gibosa menguante", - "waxing_crescent": "Luna creciente", - "waxing_gibbous": "Gibosa creciente" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.et.json b/homeassistant/components/sensor/translations/moon.et.json deleted file mode 100644 index 0d82e0d8f94..00000000000 --- a/homeassistant/components/sensor/translations/moon.et.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "Esimene veerand", - "full_moon": "T\u00e4iskuu", - "last_quarter": "Viimane veerand", - "new_moon": "Kuu loomine", - "waning_crescent": "Vanakuu", - "waning_gibbous": "Kahanev kuu", - "waxing_crescent": "Noorkuu", - "waxing_gibbous": "Kasvav kuu" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.fi.json b/homeassistant/components/sensor/translations/moon.fi.json deleted file mode 100644 index 10f8bb9b8a6..00000000000 --- a/homeassistant/components/sensor/translations/moon.fi.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "Ensimm\u00e4inen nelj\u00e4nnes", - "full_moon": "T\u00e4ysikuu", - "last_quarter": "Viimeinen nelj\u00e4nnes", - "new_moon": "Uusikuu", - "waning_crescent": "V\u00e4henev\u00e4 sirppi", - "waning_gibbous": "V\u00e4henev\u00e4 kuperakuu", - "waxing_crescent": "Kasvava sirppi", - "waxing_gibbous": "Kasvava kuperakuu" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.fr.json b/homeassistant/components/sensor/translations/moon.fr.json deleted file mode 100644 index fac2b654a46..00000000000 --- a/homeassistant/components/sensor/translations/moon.fr.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "Premier quartier", - "full_moon": "Pleine lune", - "last_quarter": "Dernier quartier", - "new_moon": "Nouvelle lune", - "waning_crescent": "Dernier croissant", - "waning_gibbous": "Gibbeuse d\u00e9croissante", - "waxing_crescent": "Premier croissant", - "waxing_gibbous": "Gibbeuse croissante" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.he.json b/homeassistant/components/sensor/translations/moon.he.json deleted file mode 100644 index 6531d3c8265..00000000000 --- a/homeassistant/components/sensor/translations/moon.he.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "\u05e8\u05d1\u05e2\u05d5\u05df \u05e8\u05d0\u05e9\u05d5\u05df", - "full_moon": "\u05d9\u05e8\u05d7 \u05de\u05dc\u05d0", - "last_quarter": "\u05e8\u05d1\u05e2\u05d5\u05df \u05d0\u05d7\u05e8\u05d5\u05df", - "new_moon": "\u05e8\u05d0\u05e9 \u05d7\u05d5\u05d3\u05e9", - "waning_crescent": "Waning crescent", - "waning_gibbous": "Waning gibbous", - "waxing_crescent": "Waxing crescent", - "waxing_gibbous": "Waxing gibbous" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.hu.json b/homeassistant/components/sensor/translations/moon.hu.json deleted file mode 100644 index fff9f51f50d..00000000000 --- a/homeassistant/components/sensor/translations/moon.hu.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "Els\u0151 negyed", - "full_moon": "Telihold", - "last_quarter": "Utols\u00f3 negyed", - "new_moon": "\u00dajhold", - "waning_crescent": "Fogy\u00f3 holdsarl\u00f3", - "waning_gibbous": "Fogy\u00f3 hold", - "waxing_crescent": "N\u00f6v\u0151 holdsarl\u00f3", - "waxing_gibbous": "N\u00f6v\u0151 hold" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.id.json b/homeassistant/components/sensor/translations/moon.id.json deleted file mode 100644 index 3ce14204fb5..00000000000 --- a/homeassistant/components/sensor/translations/moon.id.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "Babak pertama", - "full_moon": "Bulan purnama", - "last_quarter": "Kuartal terakhir", - "new_moon": "Bulan baru", - "waning_crescent": "Waning crescent", - "waning_gibbous": "Waning gibbous", - "waxing_crescent": "Waxing crescent", - "waxing_gibbous": "Waxing gibbous" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.it.json b/homeassistant/components/sensor/translations/moon.it.json deleted file mode 100644 index 39c7f22f7af..00000000000 --- a/homeassistant/components/sensor/translations/moon.it.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "Primo quarto", - "full_moon": "Luna piena", - "last_quarter": "Ultimo quarto", - "new_moon": "Luna nuova", - "waning_crescent": "Luna calante", - "waning_gibbous": "Gibbosa calante", - "waxing_crescent": "Luna crescente", - "waxing_gibbous": "Gibbosa crescente" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.ko.json b/homeassistant/components/sensor/translations/moon.ko.json deleted file mode 100644 index 7e62250b892..00000000000 --- a/homeassistant/components/sensor/translations/moon.ko.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "\ubc18\ub2ec(\ucc28\uc624\ub974\ub294)", - "full_moon": "\ubcf4\ub984\ub2ec", - "last_quarter": "\ubc18\ub2ec(\uc904\uc5b4\ub4dc\ub294)", - "new_moon": "\uc0ad\uc6d4", - "waning_crescent": "\uadf8\ubbd0\ub2ec", - "waning_gibbous": "\ud558\ud604\ub2ec", - "waxing_crescent": "\ucd08\uc2b9\ub2ec", - "waxing_gibbous": "\uc0c1\ud604\ub2ec" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.lb.json b/homeassistant/components/sensor/translations/moon.lb.json deleted file mode 100644 index 2aa7ea03db7..00000000000 --- a/homeassistant/components/sensor/translations/moon.lb.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "\u00c9ischt V\u00e9ierel", - "full_moon": "Vollmound", - "last_quarter": "L\u00e4scht V\u00e9ierel", - "new_moon": "Neimound", - "waning_crescent": "Ofhuelende Mound", - "waning_gibbous": "Dr\u00ebtt V\u00e9ierel", - "waxing_crescent": "Zouhuelende Mound", - "waxing_gibbous": "Zweet V\u00e9ierel" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.nl.json b/homeassistant/components/sensor/translations/moon.nl.json deleted file mode 100644 index 5e78d429b9f..00000000000 --- a/homeassistant/components/sensor/translations/moon.nl.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "Eerste kwartier", - "full_moon": "Volle maan", - "last_quarter": "Laatste kwartier", - "new_moon": "Nieuwe maan", - "waning_crescent": "Krimpende, sikkelvormige maan", - "waning_gibbous": "Krimpende, vooruitspringende maan", - "waxing_crescent": "Wassende, sikkelvormige maan", - "waxing_gibbous": "Wassende, vooruitspringende maan" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.nn.json b/homeassistant/components/sensor/translations/moon.nn.json deleted file mode 100644 index 7c516bcce50..00000000000 --- a/homeassistant/components/sensor/translations/moon.nn.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "Fyrste kvartal", - "full_moon": "Fullm\u00e5ne", - "last_quarter": "Siste kvartal", - "new_moon": "Nym\u00e5ne", - "waning_crescent": "Minkande halvm\u00e5ne", - "waning_gibbous": "Minkande m\u00e5ne", - "waxing_crescent": "Veksande halvm\u00e5ne", - "waxing_gibbous": "Veksande m\u00e5ne" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.no.json b/homeassistant/components/sensor/translations/moon.no.json deleted file mode 100644 index 19f9985accb..00000000000 --- a/homeassistant/components/sensor/translations/moon.no.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "F\u00f8rste kvarter", - "full_moon": "Fullm\u00e5ne", - "last_quarter": "Siste kvarter", - "new_moon": "Nym\u00e5ne", - "waning_crescent": "Minkende halvm\u00e5ne", - "waning_gibbous": "Minkende trekvartm\u00e5ne", - "waxing_crescent": "Voksende halvm\u00e5ne", - "waxing_gibbous": "Voksende trekvartm\u00e5ne" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.pl.json b/homeassistant/components/sensor/translations/moon.pl.json deleted file mode 100644 index 85dfe79bae4..00000000000 --- a/homeassistant/components/sensor/translations/moon.pl.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "pierwsza kwadra", - "full_moon": "pe\u0142nia", - "last_quarter": "ostatnia kwadra", - "new_moon": "n\u00f3w", - "waning_crescent": "sierp ubywaj\u0105cy", - "waning_gibbous": "ubywaj\u0105cy garbaty", - "waxing_crescent": "sierp przybywaj\u0105cy", - "waxing_gibbous": "przybywaj\u0105cy garbaty" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.pt-BR.json b/homeassistant/components/sensor/translations/moon.pt-BR.json deleted file mode 100644 index 93b17784a4e..00000000000 --- a/homeassistant/components/sensor/translations/moon.pt-BR.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "Quarto crescente", - "full_moon": "Lua cheia", - "last_quarter": "Quarto minguante", - "new_moon": "Lua Nova", - "waning_crescent": "Minguante", - "waning_gibbous": "Minguante gibosa", - "waxing_crescent": "Crescente", - "waxing_gibbous": "Crescente gibosa" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.pt.json b/homeassistant/components/sensor/translations/moon.pt.json deleted file mode 100644 index 14961ab98f0..00000000000 --- a/homeassistant/components/sensor/translations/moon.pt.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "Quarto crescente", - "full_moon": "Lua cheia", - "last_quarter": "Quarto minguante", - "new_moon": "Lua nova", - "waning_crescent": "Lua crescente", - "waning_gibbous": "Minguante convexa", - "waxing_crescent": "Lua minguante", - "waxing_gibbous": "Crescente convexa" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.ro.json b/homeassistant/components/sensor/translations/moon.ro.json deleted file mode 100644 index 6f64e497c74..00000000000 --- a/homeassistant/components/sensor/translations/moon.ro.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "state": { - "full_moon": "Lun\u0103 plin\u0103", - "new_moon": "Lun\u0103 nou\u0103" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.ru.json b/homeassistant/components/sensor/translations/moon.ru.json deleted file mode 100644 index 6db932a1aed..00000000000 --- a/homeassistant/components/sensor/translations/moon.ru.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "\u041f\u0435\u0440\u0432\u0430\u044f \u0447\u0435\u0442\u0432\u0435\u0440\u0442\u044c", - "full_moon": "\u041f\u043e\u043b\u043d\u043e\u043b\u0443\u043d\u0438\u0435", - "last_quarter": "\u041f\u043e\u0441\u043b\u0435\u0434\u043d\u044f\u044f \u0447\u0435\u0442\u0432\u0435\u0440\u0442\u044c", - "new_moon": "\u041d\u043e\u0432\u043e\u043b\u0443\u043d\u0438\u0435", - "waning_crescent": "\u0421\u0442\u0430\u0440\u0430\u044f \u043b\u0443\u043d\u0430", - "waning_gibbous": "\u0423\u0431\u044b\u0432\u0430\u044e\u0449\u0430\u044f \u043b\u0443\u043d\u0430", - "waxing_crescent": "\u041c\u043e\u043b\u043e\u0434\u0430\u044f \u043b\u0443\u043d\u0430", - "waxing_gibbous": "\u041f\u0440\u0438\u0431\u044b\u0432\u0430\u044e\u0449\u0430\u044f \u043b\u0443\u043d\u0430" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.sl.json b/homeassistant/components/sensor/translations/moon.sl.json deleted file mode 100644 index 1b69e10e6f9..00000000000 --- a/homeassistant/components/sensor/translations/moon.sl.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "Prvi krajec", - "full_moon": "Polna luna", - "last_quarter": "Zadnji krajec", - "new_moon": "Mlaj", - "waning_crescent": "Zadnji izbo\u010dec", - "waning_gibbous": "Zadnji srpec", - "waxing_crescent": "Prvi izbo\u010dec", - "waxing_gibbous": "Prvi srpec" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.sv.json b/homeassistant/components/sensor/translations/moon.sv.json deleted file mode 100644 index ae69c1c9654..00000000000 --- a/homeassistant/components/sensor/translations/moon.sv.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "F\u00f6rsta kvartalet", - "full_moon": "Fullm\u00e5ne", - "last_quarter": "Sista kvartalet", - "new_moon": "Nym\u00e5ne", - "waning_crescent": "Avtagande halvm\u00e5ne", - "waning_gibbous": "Avtagande halvm\u00e5ne", - "waxing_crescent": "Tilltagande halvm\u00e5ne", - "waxing_gibbous": "Tilltagande halvm\u00e5ne" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.th.json b/homeassistant/components/sensor/translations/moon.th.json deleted file mode 100644 index 5d65c23226d..00000000000 --- a/homeassistant/components/sensor/translations/moon.th.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "state": { - "full_moon": "\u0e1e\u0e23\u0e30\u0e08\u0e31\u0e19\u0e17\u0e23\u0e4c\u0e40\u0e15\u0e47\u0e21\u0e14\u0e27\u0e07" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.uk.json b/homeassistant/components/sensor/translations/moon.uk.json deleted file mode 100644 index 2467a705d50..00000000000 --- a/homeassistant/components/sensor/translations/moon.uk.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "\u041f\u0435\u0440\u0448\u0430 \u0447\u0432\u0435\u0440\u0442\u044c", - "full_moon": "\u041f\u043e\u0432\u043d\u0438\u0439 \u043c\u0456\u0441\u044f\u0446\u044c", - "last_quarter": "\u041e\u0441\u0442\u0430\u043d\u043d\u044f \u0447\u0432\u0435\u0440\u0442\u044c", - "new_moon": "\u041d\u043e\u0432\u0438\u0439 \u043c\u0456\u0441\u044f\u0446\u044c", - "waning_crescent": "\u0417\u0440\u043e\u0441\u0442\u0430\u044e\u0447\u0438\u0439 \u043f\u0456\u0432\u043c\u0456\u0441\u044f\u0446\u044c", - "waning_gibbous": "\u041c\u043e\u043b\u043e\u0434\u0438\u0439 \u043c\u0456\u0441\u044f\u0446\u044c", - "waxing_crescent": "\u041c\u043e\u043b\u043e\u0434\u0438\u0439 \u043c\u0456\u0441\u044f\u0446\u044c", - "waxing_gibbous": "\u041c\u043e\u043b\u043e\u0434\u0438\u0439 \u043c\u0456\u0441\u044f\u0446\u044c" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.zh-Hans.json b/homeassistant/components/sensor/translations/moon.zh-Hans.json deleted file mode 100644 index 22ab0d49f62..00000000000 --- a/homeassistant/components/sensor/translations/moon.zh-Hans.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "\u4e0a\u5f26\u6708", - "full_moon": "\u6ee1\u6708", - "last_quarter": "\u4e0b\u5f26\u6708", - "new_moon": "\u65b0\u6708", - "waning_crescent": "\u6b8b\u6708", - "waning_gibbous": "\u4e8f\u51f8\u6708", - "waxing_crescent": "\u5ce8\u7709\u6708", - "waxing_gibbous": "\u76c8\u51f8\u6708" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/moon.zh-Hant.json b/homeassistant/components/sensor/translations/moon.zh-Hant.json deleted file mode 100644 index 9cf4aad011e..00000000000 --- a/homeassistant/components/sensor/translations/moon.zh-Hant.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "state": { - "first_quarter": "\u4e0a\u5f26\u6708", - "full_moon": "\u6eff\u6708", - "last_quarter": "\u4e0b\u5f26\u6708", - "new_moon": "\u65b0\u6708", - "waning_crescent": "\u6b98\u6708", - "waning_gibbous": "\u8667\u51f8\u6708", - "waxing_crescent": "\u86fe\u7709\u6708", - "waxing_gibbous": "\u76c8\u51f8\u6708" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.af.json b/homeassistant/components/sensor/translations/season.af.json deleted file mode 100644 index 0dbe4a131ee..00000000000 --- a/homeassistant/components/sensor/translations/season.af.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Herfs", - "spring": "Lente", - "summer": "Somer", - "winter": "Winter" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.bg.json b/homeassistant/components/sensor/translations/season.bg.json deleted file mode 100644 index e3865ca42e5..00000000000 --- a/homeassistant/components/sensor/translations/season.bg.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "\u0415\u0441\u0435\u043d", - "spring": "\u041f\u0440\u043e\u043b\u0435\u0442", - "summer": "\u041b\u044f\u0442\u043e", - "winter": "\u0417\u0438\u043c\u0430" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.ca.json b/homeassistant/components/sensor/translations/season.ca.json deleted file mode 100644 index 9bce187ec65..00000000000 --- a/homeassistant/components/sensor/translations/season.ca.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Tardor", - "spring": "Primavera", - "summer": "Estiu", - "winter": "Hivern" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.cs.json b/homeassistant/components/sensor/translations/season.cs.json deleted file mode 100644 index e2d7e7919be..00000000000 --- a/homeassistant/components/sensor/translations/season.cs.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Podzim", - "spring": "Jaro", - "summer": "L\u00e9to", - "winter": "Zima" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.cy.json b/homeassistant/components/sensor/translations/season.cy.json deleted file mode 100644 index 0d1553ac3ea..00000000000 --- a/homeassistant/components/sensor/translations/season.cy.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Hydref", - "spring": "Gwanwyn", - "summer": "Haf", - "winter": "Gaeaf" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.da.json b/homeassistant/components/sensor/translations/season.da.json deleted file mode 100644 index 9cded2f9c0f..00000000000 --- a/homeassistant/components/sensor/translations/season.da.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Efter\u00e5r", - "spring": "For\u00e5r", - "summer": "Sommer", - "winter": "Vinter" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.de.json b/homeassistant/components/sensor/translations/season.de.json deleted file mode 100644 index 50d702340b9..00000000000 --- a/homeassistant/components/sensor/translations/season.de.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Herbst", - "spring": "Fr\u00fchling", - "summer": "Sommer", - "winter": "Winter" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.en.json b/homeassistant/components/sensor/translations/season.en.json deleted file mode 100644 index b42100215ca..00000000000 --- a/homeassistant/components/sensor/translations/season.en.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Autumn", - "spring": "Spring", - "summer": "Summer", - "winter": "Winter" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.es-419.json b/homeassistant/components/sensor/translations/season.es-419.json deleted file mode 100644 index 65df6a58b10..00000000000 --- a/homeassistant/components/sensor/translations/season.es-419.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Oto\u00f1o", - "spring": "Primavera", - "summer": "Verano", - "winter": "Invierno" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.es.json b/homeassistant/components/sensor/translations/season.es.json deleted file mode 100644 index 65df6a58b10..00000000000 --- a/homeassistant/components/sensor/translations/season.es.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Oto\u00f1o", - "spring": "Primavera", - "summer": "Verano", - "winter": "Invierno" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.et.json b/homeassistant/components/sensor/translations/season.et.json deleted file mode 100644 index 1415a3b907b..00000000000 --- a/homeassistant/components/sensor/translations/season.et.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "S\u00fcgis", - "spring": "Kevad", - "summer": "Suvi", - "winter": "Talv" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.eu.json b/homeassistant/components/sensor/translations/season.eu.json deleted file mode 100644 index f226d920043..00000000000 --- a/homeassistant/components/sensor/translations/season.eu.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Udazkeneko", - "spring": "Spring", - "summer": "Uda", - "winter": "Winter" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.fi.json b/homeassistant/components/sensor/translations/season.fi.json deleted file mode 100644 index f01f6451549..00000000000 --- a/homeassistant/components/sensor/translations/season.fi.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Syksy", - "spring": "Kev\u00e4t", - "summer": "Kes\u00e4", - "winter": "Talvi" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.fr.json b/homeassistant/components/sensor/translations/season.fr.json deleted file mode 100644 index ec9f9657428..00000000000 --- a/homeassistant/components/sensor/translations/season.fr.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Automne", - "spring": "Printemps", - "summer": "\u00c9t\u00e9", - "winter": "Hiver" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.he.json b/homeassistant/components/sensor/translations/season.he.json deleted file mode 100644 index 282c24f3ad9..00000000000 --- a/homeassistant/components/sensor/translations/season.he.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "\u05e1\u05ea\u05d9\u05d5", - "spring": "\u05d0\u05d1\u05d9\u05d1", - "summer": "\u05e7\u05d9\u05e5", - "winter": "\u05d7\u05d5\u05e8\u05e3" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.hr.json b/homeassistant/components/sensor/translations/season.hr.json deleted file mode 100644 index ff36d1ca66b..00000000000 --- a/homeassistant/components/sensor/translations/season.hr.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Jesen", - "spring": "Prolje\u0107e", - "summer": "Ljeto", - "winter": "Zima" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.hu.json b/homeassistant/components/sensor/translations/season.hu.json deleted file mode 100644 index 63596b09784..00000000000 --- a/homeassistant/components/sensor/translations/season.hu.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "\u0150sz", - "spring": "Tavasz", - "summer": "Ny\u00e1r", - "winter": "T\u00e9l" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.id.json b/homeassistant/components/sensor/translations/season.id.json deleted file mode 100644 index ed0666aee36..00000000000 --- a/homeassistant/components/sensor/translations/season.id.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Musim gugur", - "spring": "Musim semi", - "summer": "Musim panas", - "winter": "Musim dingin" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.it.json b/homeassistant/components/sensor/translations/season.it.json deleted file mode 100644 index d9138f6b16e..00000000000 --- a/homeassistant/components/sensor/translations/season.it.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Autunno", - "spring": "Primavera", - "summer": "Estate", - "winter": "Inverno" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.ja.json b/homeassistant/components/sensor/translations/season.ja.json deleted file mode 100644 index e441b1aa8ac..00000000000 --- a/homeassistant/components/sensor/translations/season.ja.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "\u79cb", - "spring": "\u6625", - "summer": "\u590f", - "winter": "\u51ac" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.ko.json b/homeassistant/components/sensor/translations/season.ko.json deleted file mode 100644 index f2bf0a7bae5..00000000000 --- a/homeassistant/components/sensor/translations/season.ko.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "\uac00\uc744", - "spring": "\ubd04", - "summer": "\uc5ec\ub984", - "winter": "\uaca8\uc6b8" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.lb.json b/homeassistant/components/sensor/translations/season.lb.json deleted file mode 100644 index f33afde7a07..00000000000 --- a/homeassistant/components/sensor/translations/season.lb.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Hierscht", - "spring": "Fr\u00e9ijoer", - "summer": "Summer", - "winter": "Wanter" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.lv.json b/homeassistant/components/sensor/translations/season.lv.json deleted file mode 100644 index a96e1112f71..00000000000 --- a/homeassistant/components/sensor/translations/season.lv.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Rudens", - "spring": "Pavasaris", - "summer": "Vasara", - "winter": "Ziema" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.nl.json b/homeassistant/components/sensor/translations/season.nl.json deleted file mode 100644 index 6054a8e2be5..00000000000 --- a/homeassistant/components/sensor/translations/season.nl.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Herfst", - "spring": "Lente", - "summer": "Zomer", - "winter": "Winter" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.nn.json b/homeassistant/components/sensor/translations/season.nn.json deleted file mode 100644 index dbcff7ef819..00000000000 --- a/homeassistant/components/sensor/translations/season.nn.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Haust", - "spring": "V\u00e5r", - "summer": "Sommar", - "winter": "Vinter" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.no.json b/homeassistant/components/sensor/translations/season.no.json deleted file mode 100644 index 9d520dae6a5..00000000000 --- a/homeassistant/components/sensor/translations/season.no.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "H\u00f8st", - "spring": "V\u00e5r", - "summer": "Sommer", - "winter": "Vinter" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.pl.json b/homeassistant/components/sensor/translations/season.pl.json deleted file mode 100644 index 9b313e511c9..00000000000 --- a/homeassistant/components/sensor/translations/season.pl.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "jesie\u0144", - "spring": "wiosna", - "summer": "lato", - "winter": "zima" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.pt-BR.json b/homeassistant/components/sensor/translations/season.pt-BR.json deleted file mode 100644 index fde45ad6c8e..00000000000 --- a/homeassistant/components/sensor/translations/season.pt-BR.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Outono", - "spring": "Primavera", - "summer": "Ver\u00e3o", - "winter": "Inverno" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.pt.json b/homeassistant/components/sensor/translations/season.pt.json deleted file mode 100644 index fde45ad6c8e..00000000000 --- a/homeassistant/components/sensor/translations/season.pt.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Outono", - "spring": "Primavera", - "summer": "Ver\u00e3o", - "winter": "Inverno" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.ro.json b/homeassistant/components/sensor/translations/season.ro.json deleted file mode 100644 index 04f90318290..00000000000 --- a/homeassistant/components/sensor/translations/season.ro.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Toamn\u0103", - "spring": "Prim\u0103var\u0103", - "summer": "Var\u0103", - "winter": "Iarn\u0103" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.ru.json b/homeassistant/components/sensor/translations/season.ru.json deleted file mode 100644 index 2b04886b72d..00000000000 --- a/homeassistant/components/sensor/translations/season.ru.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "\u041e\u0441\u0435\u043d\u044c", - "spring": "\u0412\u0435\u0441\u043d\u0430", - "summer": "\u041b\u0435\u0442\u043e", - "winter": "\u0417\u0438\u043c\u0430" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.sl.json b/homeassistant/components/sensor/translations/season.sl.json deleted file mode 100644 index f715a3ec13a..00000000000 --- a/homeassistant/components/sensor/translations/season.sl.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "Jesen", - "spring": "Pomlad", - "summer": "Poletje", - "winter": "Zima" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.sv.json b/homeassistant/components/sensor/translations/season.sv.json deleted file mode 100644 index 02332d76906..00000000000 --- a/homeassistant/components/sensor/translations/season.sv.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "H\u00f6st", - "spring": "V\u00e5r", - "summer": "Sommar", - "winter": "Vinter" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.th.json b/homeassistant/components/sensor/translations/season.th.json deleted file mode 100644 index 09799730389..00000000000 --- a/homeassistant/components/sensor/translations/season.th.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "\u0e24\u0e14\u0e39\u0e43\u0e1a\u0e44\u0e21\u0e49\u0e23\u0e48\u0e27\u0e07", - "spring": "\u0e24\u0e14\u0e39\u0e43\u0e1a\u0e44\u0e21\u0e49\u0e1c\u0e25\u0e34", - "summer": "\u0e24\u0e14\u0e39\u0e23\u0e49\u0e2d\u0e19", - "winter": "\u0e24\u0e14\u0e39\u0e2b\u0e19\u0e32\u0e27" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.uk.json b/homeassistant/components/sensor/translations/season.uk.json deleted file mode 100644 index 766e59a43da..00000000000 --- a/homeassistant/components/sensor/translations/season.uk.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "\u041e\u0441\u0456\u043d\u044c", - "spring": "\u0412\u0435\u0441\u043d\u0430", - "summer": "\u041b\u0456\u0442\u043e", - "winter": "\u0417\u0438\u043c\u0430" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.vi.json b/homeassistant/components/sensor/translations/season.vi.json deleted file mode 100644 index a3bb21dee27..00000000000 --- a/homeassistant/components/sensor/translations/season.vi.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "M\u00f9a thu", - "spring": "M\u00f9a xu\u00e2n", - "summer": "M\u00f9a h\u00e8", - "winter": "M\u00f9a \u0111\u00f4ng" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.zh-Hans.json b/homeassistant/components/sensor/translations/season.zh-Hans.json deleted file mode 100644 index 78801f4b1df..00000000000 --- a/homeassistant/components/sensor/translations/season.zh-Hans.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "\u79cb\u5b63", - "spring": "\u6625\u5b63", - "summer": "\u590f\u5b63", - "winter": "\u51ac\u5b63" - } -} \ No newline at end of file diff --git a/homeassistant/components/sensor/translations/season.zh-Hant.json b/homeassistant/components/sensor/translations/season.zh-Hant.json deleted file mode 100644 index 78801f4b1df..00000000000 --- a/homeassistant/components/sensor/translations/season.zh-Hant.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "state": { - "autumn": "\u79cb\u5b63", - "spring": "\u6625\u5b63", - "summer": "\u590f\u5b63", - "winter": "\u51ac\u5b63" - } -} \ No newline at end of file diff --git a/homeassistant/components/simplisafe/translations/nn.json b/homeassistant/components/simplisafe/translations/nn.json deleted file mode 100644 index 1bcfd463ce8..00000000000 --- a/homeassistant/components/simplisafe/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "SimpliSafe" -} \ No newline at end of file diff --git a/homeassistant/components/smappee/__init__.py b/homeassistant/components/smappee/__init__.py index d230661a9f2..c0c6c41ad54 100644 --- a/homeassistant/components/smappee/__init__.py +++ b/homeassistant/components/smappee/__init__.py @@ -1,334 +1,109 @@ -"""Support for Smappee energy monitor.""" -from datetime import datetime, timedelta -import logging -import re +"""The Smappee integration.""" +import asyncio -from requests.exceptions import RequestException -import smappy +from pysmappee import Smappee import voluptuous as vol -from homeassistant.const import ( - CONF_CLIENT_ID, - CONF_CLIENT_SECRET, - CONF_HOST, - CONF_PASSWORD, - CONF_USERNAME, -) -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.discovery import load_platform +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET +from homeassistant.core import HomeAssistant +from homeassistant.helpers import config_entry_oauth2_flow, config_validation as cv from homeassistant.util import Throttle -_LOGGER = logging.getLogger(__name__) - -DEFAULT_NAME = "Smappee" -DEFAULT_HOST_PASSWORD = "admin" - -CONF_HOST_PASSWORD = "host_password" - -DOMAIN = "smappee" -DATA_SMAPPEE = "SMAPPEE" - -_SENSOR_REGEX = re.compile(r"(?P([A-Za-z]+))\=(?P([0-9\.]+))") +from . import api, config_flow +from .const import ( + AUTHORIZE_URL, + BASE, + DOMAIN, + MIN_TIME_BETWEEN_UPDATES, + SMAPPEE_PLATFORMS, + TOKEN_URL, +) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { - vol.Inclusive(CONF_CLIENT_ID, "Server credentials"): cv.string, - vol.Inclusive(CONF_CLIENT_SECRET, "Server credentials"): cv.string, - vol.Inclusive(CONF_USERNAME, "Server credentials"): cv.string, - vol.Inclusive(CONF_PASSWORD, "Server credentials"): cv.string, - vol.Optional(CONF_HOST): cv.string, - vol.Optional( - CONF_HOST_PASSWORD, default=DEFAULT_HOST_PASSWORD - ): cv.string, + vol.Required(CONF_CLIENT_ID): cv.string, + vol.Required(CONF_CLIENT_SECRET): cv.string, } ) }, extra=vol.ALLOW_EXTRA, ) -MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=30) +async def async_setup(hass: HomeAssistant, config: dict): + """Set up the Smappee component.""" + hass.data[DOMAIN] = {} -def setup(hass, config): - """Set up the Smapee component.""" - client_id = config.get(DOMAIN).get(CONF_CLIENT_ID) - client_secret = config.get(DOMAIN).get(CONF_CLIENT_SECRET) - username = config.get(DOMAIN).get(CONF_USERNAME) - password = config.get(DOMAIN).get(CONF_PASSWORD) - host = config.get(DOMAIN).get(CONF_HOST) - host_password = config.get(DOMAIN).get(CONF_HOST_PASSWORD) + if DOMAIN not in config: + return True - smappee = Smappee(client_id, client_secret, username, password, host, host_password) + config_flow.SmappeeFlowHandler.async_register_implementation( + hass, + config_entry_oauth2_flow.LocalOAuth2Implementation( + hass, + DOMAIN, + config[DOMAIN][CONF_CLIENT_ID], + config[DOMAIN][CONF_CLIENT_SECRET], + AUTHORIZE_URL, + TOKEN_URL, + ), + ) - if not smappee.is_local_active and not smappee.is_remote_active: - _LOGGER.error("Neither Smappee server or local integration enabled.") - return False - - hass.data[DATA_SMAPPEE] = smappee - load_platform(hass, "switch", DOMAIN, {}, config) - load_platform(hass, "sensor", DOMAIN, {}, config) return True -class Smappee: - """Stores data retrieved from Smappee sensor.""" +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): + """Set up Smappee from a config entry.""" + implementation = await config_entry_oauth2_flow.async_get_config_entry_implementation( + hass, entry + ) - def __init__( - self, client_id, client_secret, username, password, host, host_password - ): - """Initialize the data.""" + smappee_api = api.ConfigEntrySmappeeApi(hass, entry, implementation) - self._remote_active = False - self._local_active = False - if client_id is not None: - try: - self._smappy = smappy.Smappee(client_id, client_secret) - self._smappy.authenticate(username, password) - self._remote_active = True - except RequestException as error: - self._smappy = None - _LOGGER.exception("Smappee server authentication failed (%s)", error) - else: - _LOGGER.warning("Smappee server integration init skipped.") + smappee = Smappee(smappee_api) + await hass.async_add_executor_job(smappee.load_service_locations) - if host is not None: - try: - self._localsmappy = smappy.LocalSmappee(host) - self._localsmappy.logon(host_password) - self._local_active = True - except RequestException as error: - self._localsmappy = None - _LOGGER.exception( - "Local Smappee device authentication failed (%s)", error - ) - else: - _LOGGER.warning("Smappee local integration init skipped.") + hass.data[DOMAIN][BASE] = SmappeeBase(hass, smappee) - self.locations = {} - self.info = {} - self.consumption = {} - self.sensor_consumption = {} - self.instantaneous = {} + for component in SMAPPEE_PLATFORMS: + hass.async_create_task( + hass.config_entries.async_forward_entry_setup(entry, component) + ) - if self._remote_active or self._local_active: - self.update() + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry): + """Unload a config entry.""" + unload_ok = all( + await asyncio.gather( + *[ + hass.config_entries.async_forward_entry_unload(entry, component) + for component in SMAPPEE_PLATFORMS + ] + ) + ) + + if unload_ok: + hass.data[DOMAIN].pop(BASE, None) + + return unload_ok + + +class SmappeeBase: + """An object to hold the PySmappee instance.""" + + def __init__(self, hass, smappee): + """Initialize the Smappee API wrapper class.""" + self.hass = hass + self.smappee = smappee @Throttle(MIN_TIME_BETWEEN_UPDATES) - def update(self): - """Update data from Smappee API.""" - if self.is_remote_active: - service_locations = self._smappy.get_service_locations().get( - "serviceLocations" - ) - for location in service_locations: - location_id = location.get("serviceLocationId") - if location_id is not None: - self.sensor_consumption[location_id] = {} - self.locations[location_id] = location.get("name") - self.info[location_id] = self._smappy.get_service_location_info( - location_id - ) - _LOGGER.debug( - "Remote info %s %s", self.locations, self.info[location_id] - ) - - for sensors in self.info[location_id].get("sensors"): - sensor_id = sensors.get("id") - self.sensor_consumption[location_id].update( - { - sensor_id: self.get_sensor_consumption( - location_id, sensor_id, aggregation=3, delta=1440 - ) - } - ) - _LOGGER.debug( - "Remote sensors %s %s", - self.locations, - self.sensor_consumption[location_id], - ) - - self.consumption[location_id] = self.get_consumption( - location_id, aggregation=3, delta=1440 - ) - _LOGGER.debug( - "Remote consumption %s %s", - self.locations, - self.consumption[location_id], - ) - - if self.is_local_active: - self.local_devices = self.get_switches() - _LOGGER.debug("Local switches %s", self.local_devices) - - self.instantaneous = self.load_instantaneous() - _LOGGER.debug("Local values %s", self.instantaneous) - - @property - def is_remote_active(self): - """Return true if Smappe server is configured and working.""" - return self._remote_active - - @property - def is_local_active(self): - """Return true if Smappe local device is configured and working.""" - return self._local_active - - def get_switches(self): - """Get switches from local Smappee.""" - if not self.is_local_active: - return - - try: - return self._localsmappy.load_command_control_config() - except RequestException as error: - _LOGGER.error("Error getting switches from local Smappee. (%s)", error) - - def get_consumption(self, location_id, aggregation, delta): - """Update data from Smappee.""" - # Start & End accept epoch (in milliseconds), - # datetime and pandas timestamps - # Aggregation: - # 1 = 5 min values (only available for the last 14 days), - # 2 = hourly values, - # 3 = daily values, - # 4 = monthly values, - # 5 = quarterly values - if not self.is_remote_active: - return - - end = datetime.utcnow() - start = end - timedelta(minutes=delta) - try: - return self._smappy.get_consumption(location_id, start, end, aggregation) - except RequestException as error: - _LOGGER.error("Error getting consumption from Smappee cloud. (%s)", error) - - def get_sensor_consumption(self, location_id, sensor_id, aggregation, delta): - """Update data from Smappee.""" - # Start & End accept epoch (in milliseconds), - # datetime and pandas timestamps - # Aggregation: - # 1 = 5 min values (only available for the last 14 days), - # 2 = hourly values, - # 3 = daily values, - # 4 = monthly values, - # 5 = quarterly values - if not self.is_remote_active: - return - - end = datetime.utcnow() - start = end - timedelta(minutes=delta) - try: - return self._smappy.get_sensor_consumption( - location_id, sensor_id, start, end, aggregation - ) - except RequestException as error: - _LOGGER.error("Error getting consumption from Smappee cloud. (%s)", error) - - def actuator_on(self, location_id, actuator_id, is_remote_switch, duration=None): - """Turn on actuator.""" - # Duration = 300,900,1800,3600 - # or any other value for an undetermined period of time. - # - # The comport plugs have a tendency to ignore the on/off signal. - # And because you can't read the status of a plug, it's more - # reliable to execute the command twice. - try: - if is_remote_switch: - self._smappy.actuator_on(location_id, actuator_id, duration) - self._smappy.actuator_on(location_id, actuator_id, duration) - else: - self._localsmappy.on_command_control(actuator_id) - self._localsmappy.on_command_control(actuator_id) - except RequestException as error: - _LOGGER.error("Error turning actuator on. (%s)", error) - return False - - return True - - def actuator_off(self, location_id, actuator_id, is_remote_switch, duration=None): - """Turn off actuator.""" - # Duration = 300,900,1800,3600 - # or any other value for an undetermined period of time. - # - # The comport plugs have a tendency to ignore the on/off signal. - # And because you can't read the status of a plug, it's more - # reliable to execute the command twice. - try: - if is_remote_switch: - self._smappy.actuator_off(location_id, actuator_id, duration) - self._smappy.actuator_off(location_id, actuator_id, duration) - else: - self._localsmappy.off_command_control(actuator_id) - self._localsmappy.off_command_control(actuator_id) - except RequestException as error: - _LOGGER.error("Error turning actuator on. (%s)", error) - return False - - return True - - def active_power(self): - """Get sum of all instantaneous active power values from local hub.""" - if not self.is_local_active: - return - - try: - return self._localsmappy.active_power() - except RequestException as error: - _LOGGER.error("Error getting data from Local Smappee unit. (%s)", error) - - def active_cosfi(self): - """Get the average of all instantaneous cosfi values.""" - if not self.is_local_active: - return - - try: - return self._localsmappy.active_cosfi() - except RequestException as error: - _LOGGER.error("Error getting data from Local Smappee unit. (%s)", error) - - def instantaneous_values(self): - """ReportInstantaneousValues.""" - if not self.is_local_active: - return - - report_instantaneous_values = self._localsmappy.report_instantaneous_values() - - report_result = report_instantaneous_values["report"].split("
") - properties = {} - for lines in report_result: - lines_result = lines.split(",") - for prop in lines_result: - match = _SENSOR_REGEX.search(prop) - if match: - properties[match.group("key")] = match.group("value") - _LOGGER.debug(properties) - return properties - - def active_current(self): - """Get current active Amps.""" - if not self.is_local_active: - return - - properties = self.instantaneous_values() - return float(properties["current"]) - - def active_voltage(self): - """Get current active Voltage.""" - if not self.is_local_active: - return - - properties = self.instantaneous_values() - return float(properties["voltage"]) - - def load_instantaneous(self): - """LoadInstantaneous.""" - if not self.is_local_active: - return - - try: - return self._localsmappy.load_instantaneous() - except RequestException as error: - _LOGGER.error("Error getting data from Local Smappee unit. (%s)", error) + async def async_update(self): + """Update all Smappee trends and appliance states.""" + await self.hass.async_add_executor_job( + self.smappee.update_trends_and_appliance_states + ) diff --git a/homeassistant/components/smappee/api.py b/homeassistant/components/smappee/api.py new file mode 100644 index 00000000000..703dd581d31 --- /dev/null +++ b/homeassistant/components/smappee/api.py @@ -0,0 +1,33 @@ +"""API for Smappee bound to Home Assistant OAuth.""" +from asyncio import run_coroutine_threadsafe + +from pysmappee import api + +from homeassistant import config_entries, core +from homeassistant.helpers import config_entry_oauth2_flow + + +class ConfigEntrySmappeeApi(api.SmappeeApi): + """Provide Smappee authentication tied to an OAuth2 based config entry.""" + + def __init__( + self, + hass: core.HomeAssistant, + config_entry: config_entries.ConfigEntry, + implementation: config_entry_oauth2_flow.AbstractOAuth2Implementation, + ): + """Initialize Smappee Auth.""" + self.hass = hass + self.config_entry = config_entry + self.session = config_entry_oauth2_flow.OAuth2Session( + hass, config_entry, implementation + ) + super().__init__(None, None, token=self.session.token) + + def refresh_tokens(self) -> dict: + """Refresh and return new Smappee tokens using Home Assistant OAuth2 session.""" + run_coroutine_threadsafe( + self.session.async_ensure_token_valid(), self.hass.loop + ).result() + + return self.session.token diff --git a/homeassistant/components/smappee/binary_sensor.py b/homeassistant/components/smappee/binary_sensor.py new file mode 100644 index 00000000000..7de6766bcb3 --- /dev/null +++ b/homeassistant/components/smappee/binary_sensor.py @@ -0,0 +1,173 @@ +"""Support for monitoring a Smappee appliance binary sensor.""" +import logging + +from homeassistant.components.binary_sensor import BinarySensorEntity + +from .const import BASE, DOMAIN + +_LOGGER = logging.getLogger(__name__) + +BINARY_SENSOR_PREFIX = "Appliance" +PRESENCE_PREFIX = "Presence" + + +async def async_setup_entry(hass, config_entry, async_add_entities): + """Set up the Smappee binary sensor.""" + smappee_base = hass.data[DOMAIN][BASE] + + entities = [] + for service_location in smappee_base.smappee.service_locations.values(): + for appliance_id, appliance in service_location.appliances.items(): + if appliance.type != "Find me" and appliance.source_type == "NILM": + entities.append( + SmappeeAppliance( + smappee_base=smappee_base, + service_location=service_location, + appliance_id=appliance_id, + appliance_name=appliance.name, + appliance_type=appliance.type, + ) + ) + + entities.append(SmappeePresence(smappee_base, service_location)) + + async_add_entities(entities, True) + + +class SmappeePresence(BinarySensorEntity): + """Implementation of a Smappee presence binary sensor.""" + + def __init__(self, smappee_base, service_location): + """Initialize the Smappee sensor.""" + self._smappee_base = smappee_base + self._service_location = service_location + self._state = self._service_location.is_present + + @property + def name(self): + """Return the name of the binary sensor.""" + return f"{self._service_location.service_location_name} - {PRESENCE_PREFIX}" + + @property + def is_on(self): + """Return if the binary sensor is turned on.""" + return self._state + + @property + def device_class(self): + """Return the class of this device, from component DEVICE_CLASSES.""" + return "presence" + + @property + def unique_id(self,): + """Return the unique ID for this binary sensor.""" + return ( + f"{self._service_location.device_serial_number}-" + f"{self._service_location.service_location_id}-" + f"presence" + ) + + @property + def device_info(self): + """Return the device info for this binary sensor.""" + return { + "identifiers": {(DOMAIN, self._service_location.device_serial_number)}, + "name": self._service_location.service_location_name, + "manufacturer": "Smappee", + "model": self._service_location.device_model, + "sw_version": self._service_location.firmware_version, + } + + async def async_update(self): + """Get the latest data from Smappee and update the state.""" + await self._smappee_base.async_update() + + self._state = self._service_location.is_present + + +class SmappeeAppliance(BinarySensorEntity): + """Implementation of a Smappee binary sensor.""" + + def __init__( + self, + smappee_base, + service_location, + appliance_id, + appliance_name, + appliance_type, + ): + """Initialize the Smappee sensor.""" + self._smappee_base = smappee_base + self._service_location = service_location + self._appliance_id = appliance_id + self._appliance_name = appliance_name + self._appliance_type = appliance_type + self._state = False + + @property + def name(self): + """Return the name of the sensor.""" + return ( + f"{self._service_location.service_location_name} - " + f"{BINARY_SENSOR_PREFIX} - " + f"{self._appliance_name if self._appliance_name != '' else self._appliance_type}" + ) + + @property + def is_on(self): + """Return if the binary sensor is turned on.""" + return self._state + + @property + def icon(self): + """Icon to use in the frontend.""" + icon_mapping = { + "Car Charger": "mdi:car", + "Coffeemaker": "mdi:coffee", + "Clothes Dryer": "mdi:tumble-dryer", + "Clothes Iron": "mdi:hanger", + "Dishwasher": "mdi:dishwasher", + "Lights": "mdi:lightbulb", + "Fan": "mdi:fan", + "Freezer": "mdi:fridge", + "Microwave": "mdi:microwave", + "Oven": "mdi:stove", + "Refrigerator": "mdi:fridge", + "Stove": "mdi:stove", + "Washing Machine": "mdi:washing-machine", + "Water Pump": "mdi:water-pump", + } + return icon_mapping.get(self._appliance_type) + + @property + def device_class(self): + """Return the class of this device, from component DEVICE_CLASSES.""" + # Only lights can be mapped onto the generic list of binary sensors + return "light" if self._appliance_type == "Lights" else "power" + + @property + def unique_id(self,): + """Return the unique ID for this binary sensor.""" + return ( + f"{self._service_location.device_serial_number}-" + f"{self._service_location.service_location_id}-" + f"appliance-{self._appliance_id}" + ) + + @property + def device_info(self): + """Return the device info for this binary sensor.""" + return { + "identifiers": {(DOMAIN, self._service_location.device_serial_number)}, + "name": self._service_location.service_location_name, + "manufacturer": "Smappee", + "model": self._service_location.device_model, + "sw_version": self._service_location.firmware_version, + } + + async def async_update(self): + """Get the latest data from Smappee and update the state.""" + await self._smappee_base.async_update() + + appliance = self._service_location.appliances.get(self._appliance_id) + self._state = bool(appliance.state) diff --git a/homeassistant/components/smappee/config_flow.py b/homeassistant/components/smappee/config_flow.py new file mode 100644 index 00000000000..48aca5449ad --- /dev/null +++ b/homeassistant/components/smappee/config_flow.py @@ -0,0 +1,30 @@ +"""Config flow for Smappee.""" +import logging + +from homeassistant import config_entries +from homeassistant.helpers import config_entry_oauth2_flow + +from .const import DOMAIN # pylint: disable=unused-import + +_LOGGER = logging.getLogger(__name__) + + +class SmappeeFlowHandler( + config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=DOMAIN +): + """Config flow to handle Smappee OAuth2 authentication.""" + + DOMAIN = DOMAIN + CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL + + @property + def logger(self) -> logging.Logger: + """Return logger.""" + return logging.getLogger(__name__) + + async def async_step_user(self, user_input=None): + """Handle a flow start.""" + if self.hass.config_entries.async_entries(DOMAIN): + return self.async_abort(reason="single_instance_allowed") + + return await super().async_step_user(user_input) diff --git a/homeassistant/components/smappee/const.py b/homeassistant/components/smappee/const.py new file mode 100644 index 00000000000..05998ac6716 --- /dev/null +++ b/homeassistant/components/smappee/const.py @@ -0,0 +1,15 @@ +"""Constants for the Smappee integration.""" + +from datetime import timedelta + +DOMAIN = "smappee" +DATA_CLIENT = "smappee_data" + +BASE = "BASE" + +SMAPPEE_PLATFORMS = ["binary_sensor", "sensor", "switch"] + +MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=5) + +AUTHORIZE_URL = "https://app1pub.smappee.net/dev/v1/oauth2/authorize" +TOKEN_URL = "https://app1pub.smappee.net/dev/v3/oauth2/token" diff --git a/homeassistant/components/smappee/manifest.json b/homeassistant/components/smappee/manifest.json index e2c24bf6d71..e316273f3ed 100644 --- a/homeassistant/components/smappee/manifest.json +++ b/homeassistant/components/smappee/manifest.json @@ -1,7 +1,13 @@ { "domain": "smappee", "name": "Smappee", + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/smappee", - "requirements": ["smappy==0.2.16"], - "codeowners": [] + "dependencies": ["http"], + "requirements": [ + "pysmappee==0.1.0" + ], + "codeowners": [ + "@bsmappee" + ] } diff --git a/homeassistant/components/smappee/sensor.py b/homeassistant/components/smappee/sensor.py index 9558bbc2e62..77578533749 100644 --- a/homeassistant/components/smappee/sensor.py +++ b/homeassistant/components/smappee/sensor.py @@ -1,170 +1,248 @@ """Support for monitoring a Smappee energy sensor.""" -from datetime import timedelta import logging -from homeassistant.const import ( - DEGREE, - ELECTRICAL_CURRENT_AMPERE, - ENERGY_KILO_WATT_HOUR, - POWER_WATT, - UNIT_PERCENTAGE, - VOLT, - VOLUME_CUBIC_METERS, -) +from homeassistant.const import DEVICE_CLASS_POWER, ENERGY_WATT_HOUR, POWER_WATT, VOLT from homeassistant.helpers.entity import Entity -from . import DATA_SMAPPEE +from .const import BASE, DOMAIN _LOGGER = logging.getLogger(__name__) -SENSOR_PREFIX = "Smappee" -SENSOR_TYPES = { - "solar": ["Solar", "mdi:white-balance-sunny", "local", POWER_WATT, "solar"], - "active_power": [ - "Active Power", - "mdi:power-plug", - "local", +TREND_SENSORS = { + "total_power": [ + "Total consumption - Active power", + None, POWER_WATT, - "active_power", + "total_power", + DEVICE_CLASS_POWER, ], - "current": ["Current", "mdi:gauge", "local", ELECTRICAL_CURRENT_AMPERE, "current"], - "voltage": ["Voltage", "mdi:gauge", "local", VOLT, "voltage"], - "active_cosfi": [ - "Power Factor", - "mdi:gauge", - "local", - UNIT_PERCENTAGE, - "active_cosfi", + "total_reactive_power": [ + "Total consumption - Reactive power", + None, + POWER_WATT, + "total_reactive_power", + DEVICE_CLASS_POWER, ], - "alwayson_today": [ - "Always On Today", - "mdi:gauge", - "remote", - ENERGY_KILO_WATT_HOUR, - "alwaysOn", - ], - "solar_today": [ - "Solar Today", - "mdi:white-balance-sunny", - "remote", - ENERGY_KILO_WATT_HOUR, - "solar", + "alwayson": [ + "Always on - Active power", + None, + POWER_WATT, + "alwayson", + DEVICE_CLASS_POWER, ], "power_today": [ - "Power Today", + "Total consumption - Today", "mdi:power-plug", - "remote", - ENERGY_KILO_WATT_HOUR, - "consumption", + ENERGY_WATT_HOUR, + "power_today", + None, ], - "water_sensor_1": [ - "Water Sensor 1", - "mdi:water", - "water", - VOLUME_CUBIC_METERS, - "value1", + "power_current_hour": [ + "Total consumption - Current hour", + "mdi:power-plug", + ENERGY_WATT_HOUR, + "power_current_hour", + None, ], - "water_sensor_2": [ - "Water Sensor 2", - "mdi:water", - "water", - VOLUME_CUBIC_METERS, - "value2", + "power_last_5_minutes": [ + "Total consumption - Last 5 minutes", + "mdi:power-plug", + ENERGY_WATT_HOUR, + "power_last_5_minutes", + None, ], - "water_sensor_temperature": [ - "Water Sensor Temperature", - "mdi:temperature-celsius", - "water", - DEGREE, - "temperature", + "alwayson_today": [ + "Always on - Today", + "mdi:sleep", + ENERGY_WATT_HOUR, + "alwayson_today", + None, ], - "water_sensor_humidity": [ - "Water Sensor Humidity", - "mdi:water-percent", - "water", - UNIT_PERCENTAGE, - "humidity", +} +SOLAR_SENSORS = { + "solar_power": [ + "Total production - Active power", + None, + POWER_WATT, + "solar_power", + DEVICE_CLASS_POWER, ], - "water_sensor_battery": [ - "Water Sensor Battery", - "mdi:battery", - "water", - UNIT_PERCENTAGE, - "battery", + "solar_today": [ + "Total production - Today", + "mdi:white-balance-sunny", + ENERGY_WATT_HOUR, + "solar_today", + None, + ], + "solar_current_hour": [ + "Total production - Current hour", + "mdi:white-balance-sunny", + ENERGY_WATT_HOUR, + "solar_current_hour", + None, + ], +} +VOLTAGE_SENSORS = { + "phase_voltages_a": [ + "Phase voltages - A", + "mdi:flash", + VOLT, + "phase_voltage_a", + ["ONE", "TWO", "THREE_STAR", "THREE_DELTA"], + None, + ], + "phase_voltages_b": [ + "Phase voltages - B", + "mdi:flash", + VOLT, + "phase_voltage_b", + ["TWO", "THREE_STAR", "THREE_DELTA"], + None, + ], + "phase_voltages_c": [ + "Phase voltages - C", + "mdi:flash", + VOLT, + "phase_voltage_c", + ["THREE_STAR"], + None, + ], + "line_voltages_a": [ + "Line voltages - A", + "mdi:flash", + VOLT, + "line_voltage_a", + ["ONE", "TWO", "THREE_STAR", "THREE_DELTA"], + None, + ], + "line_voltages_b": [ + "Line voltages - B", + "mdi:flash", + VOLT, + "line_voltage_b", + ["TWO", "THREE_STAR", "THREE_DELTA"], + None, + ], + "line_voltages_c": [ + "Line voltages - C", + "mdi:flash", + VOLT, + "line_voltage_c", + ["THREE_STAR", "THREE_DELTA"], + None, ], } -SCAN_INTERVAL = timedelta(seconds=30) - -def setup_platform(hass, config, add_entities, discovery_info=None): +async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Smappee sensor.""" - smappee = hass.data[DATA_SMAPPEE] + smappee_base = hass.data[DOMAIN][BASE] - dev = [] - if smappee.is_remote_active: - for location_id in smappee.locations.keys(): - for sensor in SENSOR_TYPES: - if "remote" in SENSOR_TYPES[sensor]: - dev.append( - SmappeeSensor( - smappee, location_id, sensor, SENSOR_TYPES[sensor] - ) + entities = [] + for service_location in smappee_base.smappee.service_locations.values(): + # Add all basic sensors (realtime values and aggregators) + for sensor in TREND_SENSORS: + entities.append( + SmappeeSensor( + smappee_base=smappee_base, + service_location=service_location, + sensor=sensor, + attributes=TREND_SENSORS[sensor], + ) + ) + + # Add solar sensors + if service_location.has_solar_production: + for sensor in SOLAR_SENSORS: + entities.append( + SmappeeSensor( + smappee_base=smappee_base, + service_location=service_location, + sensor=sensor, + attributes=SOLAR_SENSORS[sensor], ) - elif "water" in SENSOR_TYPES[sensor]: - for items in smappee.info[location_id].get("sensors"): - dev.append( - SmappeeSensor( - smappee, - location_id, - "{}:{}".format(sensor, items.get("id")), - SENSOR_TYPES[sensor], - ) - ) + ) - if smappee.is_local_active: - if smappee.is_remote_active: - location_keys = smappee.locations.keys() - else: - location_keys = [None] - for location_id in location_keys: - for sensor in SENSOR_TYPES: - if "local" in SENSOR_TYPES[sensor]: - dev.append( - SmappeeSensor( - smappee, location_id, sensor, SENSOR_TYPES[sensor] - ) + # Add all CT measurements + for measurement_id, measurement in service_location.measurements.items(): + entities.append( + SmappeeSensor( + smappee_base=smappee_base, + service_location=service_location, + sensor="load", + attributes=[ + measurement.name, + None, + POWER_WATT, + measurement_id, + DEVICE_CLASS_POWER, + ], + ) + ) + + # Add phase- and line voltages + for sensor_name, sensor in VOLTAGE_SENSORS.items(): + if service_location.phase_type in sensor[4]: + entities.append( + SmappeeSensor( + smappee_base=smappee_base, + service_location=service_location, + sensor=sensor_name, + attributes=sensor, ) + ) - add_entities(dev, True) + # Add Gas and Water sensors + for sensor_id, sensor in service_location.sensors.items(): + for channel in sensor.channels: + gw_icon = "mdi:gas-cylinder" + if channel.get("type") == "water": + gw_icon = "mdi:water" + + entities.append( + SmappeeSensor( + smappee_base=smappee_base, + service_location=service_location, + sensor="sensor", + attributes=[ + channel.get("name"), + gw_icon, + channel.get("uom"), + f"{sensor_id}-{channel.get('channel')}", + None, + ], + ) + ) + + async_add_entities(entities, True) class SmappeeSensor(Entity): """Implementation of a Smappee sensor.""" - def __init__(self, smappee, location_id, sensor, attributes): + def __init__(self, smappee_base, service_location, sensor, attributes): """Initialize the Smappee sensor.""" - self._smappee = smappee - self._location_id = location_id - self._attributes = attributes + self._smappee_base = smappee_base + self._service_location = service_location self._sensor = sensor self.data = None self._state = None - self._name = self._attributes[0] - self._icon = self._attributes[1] - self._type = self._attributes[2] - self._unit_of_measurement = self._attributes[3] - self._smappe_name = self._attributes[4] + self._name = attributes[0] + self._icon = attributes[1] + self._unit_of_measurement = attributes[2] + self._sensor_id = attributes[3] + self._device_class = attributes[4] @property def name(self): - """Return the name of the sensor.""" - if self._location_id: - location_name = self._smappee.locations[self._location_id] - else: - location_name = "Local" + """Return the name for this sensor.""" + if self._sensor in ["sensor", "load"]: + return ( + f"{self._service_location.service_location_name} - " + f"{self._sensor.title()} - {self._name}" + ) - return f"{SENSOR_PREFIX} {location_name} {self._name}" + return f"{self._service_location.service_location_name} - {self._name}" @property def icon(self): @@ -176,97 +254,94 @@ class SmappeeSensor(Entity): """Return the state of the sensor.""" return self._state + @property + def device_class(self): + """Return the class of this device, from component DEVICE_CLASSES.""" + return self._device_class + @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return self._unit_of_measurement @property - def device_state_attributes(self): - """Return the state attributes of the device.""" - attr = {} - if self._location_id: - attr["Location Id"] = self._location_id - attr["Location Name"] = self._smappee.locations[self._location_id] - return attr - - def update(self): - """Get the latest data from Smappee and update the state.""" - self._smappee.update() - - if self._sensor in ["alwayson_today", "solar_today", "power_today"]: - data = self._smappee.consumption[self._location_id] - if data: - consumption = data.get("consumptions")[-1] - _LOGGER.debug("%s %s", self._sensor, consumption) - value = consumption.get(self._smappe_name) - self._state = round(value / 1000, 2) - elif self._sensor == "active_cosfi": - cosfi = self._smappee.active_cosfi() - _LOGGER.debug("%s %s", self._sensor, cosfi) - if cosfi: - self._state = round(cosfi, 2) - elif self._sensor == "current": - current = self._smappee.active_current() - _LOGGER.debug("%s %s", self._sensor, current) - if current: - self._state = round(current, 2) - elif self._sensor == "voltage": - voltage = self._smappee.active_voltage() - _LOGGER.debug("%s %s", self._sensor, voltage) - if voltage: - self._state = round(voltage, 3) - elif self._sensor == "active_power": - data = self._smappee.instantaneous - _LOGGER.debug("%s %s", self._sensor, data) - if data: - value1 = [ - float(i["value"]) - for i in data - if i["key"].endswith("phase0ActivePower") - ] - value2 = [ - float(i["value"]) - for i in data - if i["key"].endswith("phase1ActivePower") - ] - value3 = [ - float(i["value"]) - for i in data - if i["key"].endswith("phase2ActivePower") - ] - active_power = sum(value1 + value2 + value3) / 1000 - self._state = round(active_power, 2) - elif self._sensor == "solar": - data = self._smappee.instantaneous - _LOGGER.debug("%s %s", self._sensor, data) - if data: - value1 = [ - float(i["value"]) - for i in data - if i["key"].endswith("phase3ActivePower") - ] - value2 = [ - float(i["value"]) - for i in data - if i["key"].endswith("phase4ActivePower") - ] - value3 = [ - float(i["value"]) - for i in data - if i["key"].endswith("phase5ActivePower") - ] - power = sum(value1 + value2 + value3) / 1000 - self._state = round(power, 2) - elif self._type == "water": - sensor_name, sensor_id = self._sensor.split(":") - data = self._smappee.sensor_consumption[self._location_id].get( - int(sensor_id) + def unique_id(self,): + """Return the unique ID for this sensor.""" + if self._sensor in ["load", "sensor"]: + return ( + f"{self._service_location.device_serial_number}-" + f"{self._service_location.service_location_id}-" + f"{self._sensor}-{self._sensor_id}" ) - if data: - tempdata = data.get("records") - if tempdata: - consumption = tempdata[-1] - _LOGGER.debug("%s (%s) %s", sensor_name, sensor_id, consumption) - value = consumption.get(self._smappe_name) - self._state = value + + return ( + f"{self._service_location.device_serial_number}-" + f"{self._service_location.service_location_id}-" + f"{self._sensor}" + ) + + @property + def device_info(self): + """Return the device info for this sensor.""" + return { + "identifiers": {(DOMAIN, self._service_location.device_serial_number)}, + "name": self._service_location.service_location_name, + "manufacturer": "Smappee", + "model": self._service_location.device_model, + "sw_version": self._service_location.firmware_version, + } + + async def async_update(self): + """Get the latest data from Smappee and update the state.""" + await self._smappee_base.async_update() + + if self._sensor == "total_power": + self._state = self._service_location.total_power + elif self._sensor == "total_reactive_power": + self._state = self._service_location.total_reactive_power + elif self._sensor == "solar_power": + self._state = self._service_location.solar_power + elif self._sensor == "alwayson": + self._state = self._service_location.alwayson + elif self._sensor in [ + "phase_voltages_a", + "phase_voltages_b", + "phase_voltages_c", + ]: + phase_voltages = self._service_location.phase_voltages + if phase_voltages is not None: + if self._sensor == "phase_voltages_a": + self._state = phase_voltages[0] + elif self._sensor == "phase_voltages_b": + self._state = phase_voltages[1] + elif self._sensor == "phase_voltages_c": + self._state = phase_voltages[2] + elif self._sensor in ["line_voltages_a", "line_voltages_b", "line_voltages_c"]: + line_voltages = self._service_location.line_voltages + if line_voltages is not None: + if self._sensor == "line_voltages_a": + self._state = line_voltages[0] + elif self._sensor == "line_voltages_b": + self._state = line_voltages[1] + elif self._sensor == "line_voltages_c": + self._state = line_voltages[2] + elif self._sensor in [ + "power_today", + "power_current_hour", + "power_last_5_minutes", + "solar_today", + "solar_current_hour", + "alwayson_today", + ]: + trend_value = self._service_location.aggregated_values.get(self._sensor) + self._state = round(trend_value) if trend_value is not None else None + elif self._sensor == "load": + self._state = self._service_location.measurements.get( + self._sensor_id + ).active_total + elif self._sensor == "sensor": + sensor_id, channel_id = self._sensor_id.split("-") + sensor = self._service_location.sensors.get(int(sensor_id)) + for channel in sensor.channels: + if channel.get("channel") == int(channel_id): + self._state = channel.get("value_today") diff --git a/homeassistant/components/smappee/strings.json b/homeassistant/components/smappee/strings.json new file mode 100644 index 00000000000..d89d15a92c6 --- /dev/null +++ b/homeassistant/components/smappee/strings.json @@ -0,0 +1,14 @@ +{ + "config": { + "step": { + "pick_implementation": { + "title": "Pick Authentication Method" + } + }, + "abort": { + "authorize_url_timeout": "Timeout generating authorize url.", + "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", + "missing_configuration": "The component is not configured. Please follow the documentation." + } + } +} diff --git a/homeassistant/components/smappee/switch.py b/homeassistant/components/smappee/switch.py index 6f6481d65f9..7d6a7f2405f 100644 --- a/homeassistant/components/smappee/switch.py +++ b/homeassistant/components/smappee/switch.py @@ -1,62 +1,105 @@ -"""Support for interacting with Smappee Comport Plugs.""" +"""Support for interacting with Smappee Comport Plugs, Switches and Output Modules.""" +from datetime import timedelta import logging from homeassistant.components.switch import SwitchEntity -from . import DATA_SMAPPEE +from .const import BASE, DOMAIN _LOGGER = logging.getLogger(__name__) -ICON = "mdi:power-plug" +SWITCH_PREFIX = "Switch" +ICON = "mdi:toggle-switch" +SCAN_INTERVAL = timedelta(seconds=5) -def setup_platform(hass, config, add_entities, discovery_info=None): +async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Smappee Comfort Plugs.""" - smappee = hass.data[DATA_SMAPPEE] + smappee_base = hass.data[DOMAIN][BASE] - dev = [] - if smappee.is_remote_active: - for location_id in smappee.locations.keys(): - for items in smappee.info[location_id].get("actuators"): - if items.get("name") != "": - _LOGGER.debug("Remote actuator %s", items) - dev.append( - SmappeeSwitch( - smappee, items.get("name"), location_id, items.get("id") + entities = [] + for service_location in smappee_base.smappee.service_locations.values(): + for actuator_id, actuator in service_location.actuators.items(): + if actuator.type in ["SWITCH", "COMFORT_PLUG"]: + entities.append( + SmappeeActuator( + smappee_base, + service_location, + actuator.name, + actuator_id, + actuator.type, + actuator.serialnumber, + ) + ) + elif actuator.type == "INFINITY_OUTPUT_MODULE": + for option in actuator.state_options: + entities.append( + SmappeeActuator( + smappee_base, + service_location, + actuator.name, + actuator_id, + actuator.type, + actuator.serialnumber, + actuator_state_option=option, ) ) - elif smappee.is_local_active: - for items in smappee.local_devices: - _LOGGER.debug("Local actuator %s", items) - dev.append( - SmappeeSwitch(smappee, items.get("value"), None, items.get("key")) - ) - add_entities(dev) + + async_add_entities(entities, True) -class SmappeeSwitch(SwitchEntity): +class SmappeeActuator(SwitchEntity): """Representation of a Smappee Comport Plug.""" - def __init__(self, smappee, name, location_id, switch_id): + def __init__( + self, + smappee_base, + service_location, + name, + actuator_id, + actuator_type, + actuator_serialnumber, + actuator_state_option=None, + ): """Initialize a new Smappee Comfort Plug.""" - self._name = name - self._state = False - self._smappee = smappee - self._location_id = location_id - self._switch_id = switch_id - self._remoteswitch = True - if location_id is None: - self._remoteswitch = False + self._smappee_base = smappee_base + self._service_location = service_location + self._actuator_name = name + self._actuator_id = actuator_id + self._actuator_type = actuator_type + self._actuator_serialnumber = actuator_serialnumber + self._actuator_state_option = actuator_state_option + self._state = self._service_location.actuators.get(actuator_id).state + self._connection_state = self._service_location.actuators.get( + actuator_id + ).connection_state @property def name(self): """Return the name of the switch.""" - return self._name + if self._actuator_type == "INFINITY_OUTPUT_MODULE": + return ( + f"{self._service_location.service_location_name} - " + f"Output module - {self._actuator_name} - {self._actuator_state_option}" + ) + + # Switch or comfort plug + return ( + f"{self._service_location.service_location_name} - " + f"{self._actuator_type.title()} - {self._actuator_name}" + ) @property def is_on(self): """Return true if switch is on.""" - return self._state + if self._actuator_type == "INFINITY_OUTPUT_MODULE": + return ( + self._service_location.actuators.get(self._actuator_id).state + == self._actuator_state_option + ) + + # Switch or comfort plug + return self._state == "ON_ON" @property def icon(self): @@ -65,24 +108,80 @@ class SmappeeSwitch(SwitchEntity): def turn_on(self, **kwargs): """Turn on Comport Plug.""" - if self._smappee.actuator_on( - self._location_id, self._switch_id, self._remoteswitch - ): - self._state = True + if self._actuator_type in ["SWITCH", "COMFORT_PLUG"]: + self._service_location.set_actuator_state(self._actuator_id, state="ON_ON") + elif self._actuator_type == "INFINITY_OUTPUT_MODULE": + self._service_location.set_actuator_state( + self._actuator_id, state=self._actuator_state_option + ) def turn_off(self, **kwargs): """Turn off Comport Plug.""" - if self._smappee.actuator_off( - self._location_id, self._switch_id, self._remoteswitch - ): - self._state = False + if self._actuator_type in ["SWITCH", "COMFORT_PLUG"]: + self._service_location.set_actuator_state( + self._actuator_id, state="OFF_OFF" + ) + elif self._actuator_type == "INFINITY_OUTPUT_MODULE": + self._service_location.set_actuator_state( + self._actuator_id, state="PLACEHOLDER", api=False + ) @property - def device_state_attributes(self): - """Return the state attributes of the device.""" - attr = {} - if self._remoteswitch: - attr["Location Id"] = self._location_id - attr["Location Name"] = self._smappee.locations[self._location_id] - attr["Switch Id"] = self._switch_id - return attr + def available(self): + """Return True if entity is available. Unavailable for COMFORT_PLUGS.""" + return ( + self._connection_state == "CONNECTED" + or self._actuator_type == "COMFORT_PLUG" + ) + + @property + def today_energy_kwh(self): + """Return the today total energy usage in kWh.""" + if self._actuator_type == "SWITCH": + cons = self._service_location.actuators.get( + self._actuator_id + ).consumption_today + if cons is not None: + return round(cons / 1000.0, 2) + return None + + @property + def unique_id(self,): + """Return the unique ID for this switch.""" + if self._actuator_type == "INFINITY_OUTPUT_MODULE": + return ( + f"{self._service_location.device_serial_number}-" + f"{self._service_location.service_location_id}-actuator-" + f"{self._actuator_id}-{self._actuator_state_option}" + ) + + # Switch or comfort plug + return ( + f"{self._service_location.device_serial_number}-" + f"{self._service_location.service_location_id}-actuator-" + f"{self._actuator_id}" + ) + + @property + def device_info(self): + """Return the device info for this switch.""" + return { + "identifiers": {(DOMAIN, self._service_location.device_serial_number)}, + "name": self._service_location.service_location_name, + "manufacturer": "Smappee", + "model": self._service_location.device_model, + "sw_version": self._service_location.firmware_version, + } + + async def async_update(self): + """Get the latest data from Smappee and update the state.""" + await self._smappee_base.async_update() + + new_state = self._service_location.actuators.get(self._actuator_id).state + if new_state != self._state: + self._state = new_state + self.async_write_ha_state() + + self._connection_state = self._service_location.actuators.get( + self._actuator_id + ).connection_state diff --git a/homeassistant/components/smappee/translations/ca.json b/homeassistant/components/smappee/translations/ca.json new file mode 100644 index 00000000000..b34b7b86d6f --- /dev/null +++ b/homeassistant/components/smappee/translations/ca.json @@ -0,0 +1,14 @@ +{ + "config": { + "abort": { + "authorize_url_timeout": "Temps d'espera esgotat generant l'URL d'autoritzaci\u00f3.", + "missing_configuration": "El component no est\u00e0 configurat. Mira'n la documentaci\u00f3.", + "single_instance_allowed": "Ja configurat. Nom\u00e9s \u00e9s possible una sola configuraci\u00f3." + }, + "step": { + "pick_implementation": { + "title": "Selecciona un m\u00e8tode d'autenticaci\u00f3" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/smappee/translations/en.json b/homeassistant/components/smappee/translations/en.json new file mode 100644 index 00000000000..fee105bf825 --- /dev/null +++ b/homeassistant/components/smappee/translations/en.json @@ -0,0 +1,14 @@ +{ + "config": { + "abort": { + "authorize_url_timeout": "Timeout generating authorize url.", + "missing_configuration": "The component is not configured. Please follow the documentation.", + "single_instance_allowed": "Already configured. Only a single configuration possible." + }, + "step": { + "pick_implementation": { + "title": "Pick Authentication Method" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/smappee/translations/es.json b/homeassistant/components/smappee/translations/es.json new file mode 100644 index 00000000000..57595e99da8 --- /dev/null +++ b/homeassistant/components/smappee/translations/es.json @@ -0,0 +1,14 @@ +{ + "config": { + "abort": { + "authorize_url_timeout": "Tiempo de espera agotado para la autorizaci\u00f3n de la url.", + "missing_configuration": "El componente no est\u00e1 configurado. Por favor, siga la documentaci\u00f3n.", + "single_instance_allowed": "Ya est\u00e1 configurado. S\u00f3lo es posible una \u00fanica configuraci\u00f3n." + }, + "step": { + "pick_implementation": { + "title": "Elija el m\u00e9todo de autenticaci\u00f3n" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/smappee/translations/it.json b/homeassistant/components/smappee/translations/it.json new file mode 100644 index 00000000000..095557aeb5d --- /dev/null +++ b/homeassistant/components/smappee/translations/it.json @@ -0,0 +1,14 @@ +{ + "config": { + "abort": { + "authorize_url_timeout": "Tempo scaduto nel generare l'URL di autorizzazione.", + "missing_configuration": "Il componente non \u00e8 configurato. Si prega di seguire la documentazione.", + "single_instance_allowed": "Gi\u00e0 configurato. \u00c8 possibile una sola configurazione." + }, + "step": { + "pick_implementation": { + "title": "Scegliere il metodo di autenticazione" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/smappee/translations/ko.json b/homeassistant/components/smappee/translations/ko.json new file mode 100644 index 00000000000..6e6331f30f1 --- /dev/null +++ b/homeassistant/components/smappee/translations/ko.json @@ -0,0 +1,14 @@ +{ + "config": { + "abort": { + "authorize_url_timeout": "\uc778\uc99d url \uc0dd\uc131 \uc2dc\uac04\uc774 \ucd08\uacfc\ub418\uc5c8\uc2b5\ub2c8\ub2e4.", + "missing_configuration": "\uad6c\uc131\uc694\uc18c\uac00 \uad6c\uc131\ub418\uc9c0 \uc54a\uc558\uc2b5\ub2c8\ub2e4. \uc124\uba85\uc11c\ub97c \ucc38\uace0\ud574\uc8fc\uc138\uc694.", + "single_instance_allowed": "\uc774\ubbf8 \uad6c\uc131\ub418\uc5c8\uc2b5\ub2c8\ub2e4. \ud558\ub098\uc758 \uad6c\uc131\ub9cc \uac00\ub2a5\ud569\ub2c8\ub2e4." + }, + "step": { + "pick_implementation": { + "title": "\uc778\uc99d \ubc29\ubc95 \uc120\ud0dd\ud558\uae30" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/smappee/translations/lb.json b/homeassistant/components/smappee/translations/lb.json new file mode 100644 index 00000000000..8169e17a6de --- /dev/null +++ b/homeassistant/components/smappee/translations/lb.json @@ -0,0 +1,13 @@ +{ + "config": { + "abort": { + "authorize_url_timeout": "Z\u00e4it Iwwerschreidung beim gener\u00e9ieren vun der Autorisatiouns URL.", + "missing_configuration": "Komponent ass nach net konfigur\u00e9iert. Follegt w.e.g der Dokumentatioun." + }, + "step": { + "pick_implementation": { + "title": "Wiel Authentifikatiouns Method aus" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/smappee/translations/no.json b/homeassistant/components/smappee/translations/no.json new file mode 100644 index 00000000000..6b2141fd61e --- /dev/null +++ b/homeassistant/components/smappee/translations/no.json @@ -0,0 +1,13 @@ +{ + "config": { + "abort": { + "authorize_url_timeout": "Tidsavbrudd ved generering av autoriseringsadresse.", + "missing_configuration": "Komponenten er ikke konfigurert. Vennligst f\u00f8lg dokumentasjonen." + }, + "step": { + "pick_implementation": { + "title": "Velg godkjenningsmetode" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/smappee/translations/pl.json b/homeassistant/components/smappee/translations/pl.json new file mode 100644 index 00000000000..8f9f0d9803d --- /dev/null +++ b/homeassistant/components/smappee/translations/pl.json @@ -0,0 +1,14 @@ +{ + "config": { + "abort": { + "authorize_url_timeout": "Przekroczono limit czasu generowania URL autoryzacji.", + "missing_configuration": "Komponent nie jest skonfigurowany. Post\u0119puj zgodnie z dokumentacj\u0105.", + "single_instance_allowed": "Ju\u017c skonfigurowano. Mo\u017cliwa jest tylko jedna konfiguracja." + }, + "step": { + "pick_implementation": { + "title": "Wybierz metod\u0119 uwierzytelniania" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/smappee/translations/ru.json b/homeassistant/components/smappee/translations/ru.json new file mode 100644 index 00000000000..abed7656da7 --- /dev/null +++ b/homeassistant/components/smappee/translations/ru.json @@ -0,0 +1,14 @@ +{ + "config": { + "abort": { + "authorize_url_timeout": "\u0418\u0441\u0442\u0435\u043a\u043b\u043e \u0432\u0440\u0435\u043c\u044f \u0433\u0435\u043d\u0435\u0440\u0430\u0446\u0438\u0438 \u0441\u0441\u044b\u043b\u043a\u0438 \u0430\u0432\u0442\u043e\u0440\u0438\u0437\u0430\u0446\u0438\u0438.", + "missing_configuration": "\u041d\u0435 \u0443\u0434\u0430\u043b\u043e\u0441\u044c \u0437\u0430\u0432\u0435\u0440\u0448\u0438\u0442\u044c \u043d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0443. \u041f\u043e\u0436\u0430\u043b\u0443\u0439\u0441\u0442\u0430, \u043e\u0437\u043d\u0430\u043a\u043e\u043c\u044c\u0442\u0435\u0441\u044c \u0441 \u0438\u043d\u0441\u0442\u0440\u0443\u043a\u0446\u0438\u044f\u043c\u0438.", + "single_instance_allowed": "\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0430 \u0443\u0436\u0435 \u0432\u044b\u043f\u043e\u043b\u043d\u0435\u043d\u0430. \u0412\u043e\u0437\u043c\u043e\u0436\u043d\u043e \u0434\u043e\u0431\u0430\u0432\u0438\u0442\u044c \u0442\u043e\u043b\u044c\u043a\u043e \u043e\u0434\u043d\u0443 \u043a\u043e\u043d\u0444\u0438\u0433\u0443\u0440\u0430\u0446\u0438\u044e." + }, + "step": { + "pick_implementation": { + "title": "\u0412\u044b\u0431\u0435\u0440\u0438\u0442\u0435 \u0441\u043f\u043e\u0441\u043e\u0431 \u0430\u0443\u0442\u0435\u043d\u0442\u0438\u0444\u0438\u043a\u0430\u0446\u0438\u0438" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/smappee/translations/zh-Hant.json b/homeassistant/components/smappee/translations/zh-Hant.json new file mode 100644 index 00000000000..3ff9da90cfb --- /dev/null +++ b/homeassistant/components/smappee/translations/zh-Hant.json @@ -0,0 +1,14 @@ +{ + "config": { + "abort": { + "authorize_url_timeout": "\u7522\u751f\u8a8d\u8b49 URL \u6642\u903e\u6642\u3002", + "missing_configuration": "\u5143\u4ef6\u5c1a\u672a\u8a2d\u7f6e\uff0c\u8acb\u53c3\u95b1\u6587\u4ef6\u8aaa\u660e\u3002", + "single_instance_allowed": "\u50c5\u80fd\u8a2d\u5b9a\u4e00\u7d44\u8a2d\u5099\u3002" + }, + "step": { + "pick_implementation": { + "title": "\u9078\u64c7\u9a57\u8b49\u6a21\u5f0f" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/smartthings/translations/nn.json b/homeassistant/components/smartthings/translations/nn.json deleted file mode 100644 index 68782329fac..00000000000 --- a/homeassistant/components/smartthings/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "SmartThings" -} \ No newline at end of file diff --git a/homeassistant/components/smartthings/translations/th.json b/homeassistant/components/smartthings/translations/th.json deleted file mode 100644 index 47b04ed2c6a..00000000000 --- a/homeassistant/components/smartthings/translations/th.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "config": { - "step": { - "wait_install": { - "description": "\u0e42\u0e1b\u0e23\u0e14\u0e15\u0e34\u0e14\u0e15\u0e31\u0e49\u0e07 Home Assistant SmartApp \u0e43\u0e19\u0e15\u0e33\u0e41\u0e2b\u0e19\u0e48\u0e07\u0e2d\u0e22\u0e48\u0e32\u0e07\u0e19\u0e49\u0e2d\u0e22\u0e2b\u0e19\u0e36\u0e48\u0e07\u0e41\u0e2b\u0e48\u0e07\u0e41\u0e25\u0e49\u0e27\u0e04\u0e25\u0e34\u0e01\u0e2a\u0e48\u0e07", - "title": "\u0e15\u0e34\u0e14\u0e15\u0e31\u0e49\u0e07 SmartApp" - } - } - }, - "title": "SmartThings" -} \ No newline at end of file diff --git a/homeassistant/components/sms/__init__.py b/homeassistant/components/sms/__init__.py index b8d46a4aec5..0b8f9d986e3 100644 --- a/homeassistant/components/sms/__init__.py +++ b/homeassistant/components/sms/__init__.py @@ -1,33 +1,73 @@ """The sms component.""" +import asyncio import logging -import gammu # pylint: disable=import-error, no-member import voluptuous as vol +from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import CONF_DEVICE +from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv -from .const import DOMAIN +from .const import DOMAIN, SMS_GATEWAY +from .gateway import create_sms_gateway _LOGGER = logging.getLogger(__name__) +PLATFORMS = ["sensor"] + CONFIG_SCHEMA = vol.Schema( {DOMAIN: vol.Schema({vol.Required(CONF_DEVICE): cv.isdevice})}, extra=vol.ALLOW_EXTRA, ) -def setup(hass, config): +async def async_setup(hass, config): """Configure Gammu state machine.""" - conf = config[DOMAIN] - device = conf.get(CONF_DEVICE) - gateway = gammu.StateMachine() # pylint: disable=no-member - try: - gateway.SetConfig(0, dict(Device=device, Connection="at")) - gateway.Init() - except gammu.GSMError as exc: # pylint: disable=no-member - _LOGGER.error("Failed to initialize, error %s", exc) - return False - else: - hass.data[DOMAIN] = gateway + hass.data.setdefault(DOMAIN, {}) + sms_config = config.get(DOMAIN, {}) + if not sms_config: return True + + hass.async_create_task( + hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=sms_config, + ) + ) + + return True + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): + """Configure Gammu state machine.""" + + device = entry.data[CONF_DEVICE] + config = {"Device": device, "Connection": "at"} + gateway = await create_sms_gateway(config, hass) + if not gateway: + return False + hass.data[DOMAIN][SMS_GATEWAY] = gateway + for component in PLATFORMS: + hass.async_create_task( + hass.config_entries.async_forward_entry_setup(entry, component) + ) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry): + """Unload a config entry.""" + unload_ok = all( + await asyncio.gather( + *[ + hass.config_entries.async_forward_entry_unload(entry, component) + for component in PLATFORMS + ] + ) + ) + + if unload_ok: + gateway = hass.data[DOMAIN].pop(SMS_GATEWAY) + await gateway.terminate_async() + + return unload_ok diff --git a/homeassistant/components/sms/config_flow.py b/homeassistant/components/sms/config_flow.py new file mode 100644 index 00000000000..148360416a2 --- /dev/null +++ b/homeassistant/components/sms/config_flow.py @@ -0,0 +1,74 @@ +"""Config flow for SMS integration.""" +import logging + +import gammu # pylint: disable=import-error, no-member +import voluptuous as vol + +from homeassistant import config_entries, core, exceptions +from homeassistant.const import CONF_DEVICE + +from .const import DOMAIN # pylint:disable=unused-import +from .gateway import create_sms_gateway + +_LOGGER = logging.getLogger(__name__) + +DATA_SCHEMA = vol.Schema({vol.Required(CONF_DEVICE): str}) + + +async def get_imei_from_config(hass: core.HomeAssistant, data): + """Validate the user input allows us to connect. + + Data has the keys from DATA_SCHEMA with values provided by the user. + """ + device = data[CONF_DEVICE] + config = {"Device": device, "Connection": "at"} + gateway = await create_sms_gateway(config, hass) + if not gateway: + raise CannotConnect + try: + imei = await gateway.get_imei_async() + except gammu.GSMError: # pylint: disable=no-member + raise CannotConnect + finally: + await gateway.terminate_async() + + # Return info that you want to store in the config entry. + return imei + + +class SMSFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): + """Handle a config flow for SMS integration.""" + + VERSION = 1 + CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL + + async def async_step_user(self, user_input=None): + """Handle the initial step.""" + if self._async_current_entries(): + return self.async_abort(reason="single_instance_allowed") + errors = {} + if user_input is not None: + try: + imei = await get_imei_from_config(self.hass, user_input) + except CannotConnect: + errors["base"] = "cannot_connect" + except Exception: # pylint: disable=broad-except + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + + if not errors: + await self.async_set_unique_id(imei) + self._abort_if_unique_id_configured() + return self.async_create_entry(title=imei, data=user_input) + + return self.async_show_form( + step_id="user", data_schema=DATA_SCHEMA, errors=errors + ) + + async def async_step_import(self, user_input): + """Handle import.""" + return await self.async_step_user(user_input) + + +class CannotConnect(exceptions.HomeAssistantError): + """Error to indicate we cannot connect.""" diff --git a/homeassistant/components/sms/const.py b/homeassistant/components/sms/const.py index aff2b704e05..b73e7954fc1 100644 --- a/homeassistant/components/sms/const.py +++ b/homeassistant/components/sms/const.py @@ -1,3 +1,4 @@ """Constants for sms Component.""" DOMAIN = "sms" +SMS_GATEWAY = "SMS_GATEWAY" diff --git a/homeassistant/components/sms/gateway.py b/homeassistant/components/sms/gateway.py new file mode 100644 index 00000000000..000434561bc --- /dev/null +++ b/homeassistant/components/sms/gateway.py @@ -0,0 +1,165 @@ +"""The sms gateway to interact with a GSM modem.""" +import logging + +import gammu # pylint: disable=import-error, no-member +from gammu.asyncworker import ( # pylint: disable=import-error, no-member + GammuAsyncWorker, +) + +from homeassistant.core import callback + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +class Gateway: + """SMS gateway to interact with a GSM modem.""" + + def __init__(self, worker, hass): + """Initialize the sms gateway.""" + self._worker = worker + self._hass = hass + + async def init_async(self): + """Initialize the sms gateway asynchronously.""" + try: + await self._worker.set_incoming_sms_async() + except gammu.ERR_NOTSUPPORTED: + _LOGGER.warning("Your phone does not support incoming SMS notifications!") + else: + await self._worker.set_incoming_callback_async(self.sms_callback) + + def sms_callback(self, state_machine, callback_type, callback_data): + """Receive notification about incoming event. + + @param state_machine: state machine which invoked action + @type state_machine: gammu.StateMachine + @param callback_type: type of action, one of Call, SMS, CB, USSD + @type callback_type: string + @param data: event data + @type data: hash + """ + _LOGGER.debug( + "Received incoming event type:%s,data:%s", callback_type, callback_data + ) + entries = self.get_and_delete_all_sms(state_machine) + _LOGGER.debug("SMS entries:%s", entries) + data = list() + + for entry in entries: + decoded_entry = gammu.DecodeSMS(entry) + message = entry[0] + _LOGGER.debug("Processing sms:%s,decoded:%s", message, decoded_entry) + if decoded_entry is None: + text = message["Text"] + else: + text = "" + for inner_entry in decoded_entry["Entries"]: + if inner_entry["Buffer"] is not None: + text = text + inner_entry["Buffer"] + + event_data = dict( + phone=message["Number"], date=str(message["DateTime"]), message=text + ) + + _LOGGER.debug("Append event data:%s", event_data) + data.append(event_data) + + self._hass.add_job(self._notify_incoming_sms, data) + + # pylint: disable=no-self-use + def get_and_delete_all_sms(self, state_machine, force=False): + """Read and delete all SMS in the modem.""" + # Read SMS memory status ... + memory = state_machine.GetSMSStatus() + # ... and calculate number of messages + remaining = memory["SIMUsed"] + memory["PhoneUsed"] + start_remaining = remaining + # Get all sms + start = True + entries = list() + all_parts = -1 + all_parts_arrived = False + _LOGGER.debug("Start remaining:%i", start_remaining) + + try: + while remaining > 0: + if start: + entry = state_machine.GetNextSMS(Folder=0, Start=True) + all_parts = entry[0]["UDH"]["AllParts"] + part_number = entry[0]["UDH"]["PartNumber"] + is_single_part = all_parts == 0 + is_multi_part = 0 <= all_parts < start_remaining + _LOGGER.debug("All parts:%i", all_parts) + _LOGGER.debug("Part Number:%i", part_number) + _LOGGER.debug("Remaining:%i", remaining) + all_parts_arrived = is_multi_part or is_single_part + _LOGGER.debug("Start all_parts_arrived:%s", all_parts_arrived) + start = False + else: + entry = state_machine.GetNextSMS( + Folder=0, Location=entry[0]["Location"] + ) + + if all_parts_arrived or force: + remaining = remaining - 1 + entries.append(entry) + + # delete retrieved sms + _LOGGER.debug("Deleting message") + state_machine.DeleteSMS(Folder=0, Location=entry[0]["Location"]) + else: + _LOGGER.debug("Not all parts have arrived") + break + + except gammu.ERR_EMPTY: + # error is raised if memory is empty (this induces wrong reported + # memory status) + _LOGGER.info("Failed to read messages!") + + # Link all SMS when there are concatenated messages + entries = gammu.LinkSMS(entries) + + return entries + + @callback + def _notify_incoming_sms(self, messages): + """Notify hass when an incoming SMS message is received.""" + for message in messages: + event_data = { + "phone": message["phone"], + "date": message["date"], + "text": message["message"], + } + self._hass.bus.async_fire(f"{DOMAIN}.incoming_sms", event_data) + + async def send_sms_async(self, message): + """Send sms message via the worker.""" + return await self._worker.send_sms_async(message) + + async def get_imei_async(self): + """Get the IMEI of the device.""" + return await self._worker.get_imei_async() + + async def get_signal_quality_async(self): + """Get the current signal level of the modem.""" + return await self._worker.get_signal_quality_async() + + async def terminate_async(self): + """Terminate modem connection.""" + return await self._worker.terminate_async() + + +async def create_sms_gateway(config, hass): + """Create the sms gateway.""" + try: + worker = GammuAsyncWorker() + worker.configure(config) + await worker.init_async() + gateway = Gateway(worker, hass) + await gateway.init_async() + return gateway + except gammu.GSMError as exc: # pylint: disable=no-member + _LOGGER.error("Failed to initialize, error %s", exc) + return None diff --git a/homeassistant/components/sms/manifest.json b/homeassistant/components/sms/manifest.json index 8b65ac77e59..c3c7db2aa61 100644 --- a/homeassistant/components/sms/manifest.json +++ b/homeassistant/components/sms/manifest.json @@ -1,7 +1,8 @@ { "domain": "sms", "name": "SMS notifications via GSM-modem", + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/sms", - "requirements": ["python-gammu==2.12"], + "requirements": ["python-gammu==3.0"], "codeowners": ["@ocalvo"] } diff --git a/homeassistant/components/sms/notify.py b/homeassistant/components/sms/notify.py index f39ae8153bd..0b867b2e0a5 100644 --- a/homeassistant/components/sms/notify.py +++ b/homeassistant/components/sms/notify.py @@ -8,7 +8,7 @@ from homeassistant.components.notify import PLATFORM_SCHEMA, BaseNotificationSer from homeassistant.const import CONF_NAME, CONF_RECIPIENT import homeassistant.helpers.config_validation as cv -from .const import DOMAIN +from .const import DOMAIN, SMS_GATEWAY _LOGGER = logging.getLogger(__name__) @@ -19,8 +19,18 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( def get_service(hass, config, discovery_info=None): """Get the SMS notification service.""" - gateway = hass.data[DOMAIN] - number = config[CONF_RECIPIENT] + + if SMS_GATEWAY not in hass.data[DOMAIN]: + _LOGGER.error("SMS gateway not found, cannot initialize service") + return + + gateway = hass.data[DOMAIN][SMS_GATEWAY] + + if discovery_info is None: + number = config[CONF_RECIPIENT] + else: + number = discovery_info[CONF_RECIPIENT] + return SMSNotificationService(gateway, number) @@ -32,7 +42,7 @@ class SMSNotificationService(BaseNotificationService): self.gateway = gateway self.number = number - def send_message(self, message="", **kwargs): + async def send_message(self, message="", **kwargs): """Send SMS message.""" smsinfo = { "Class": -1, @@ -53,6 +63,6 @@ class SMSNotificationService(BaseNotificationService): encoded_message["Number"] = self.number try: # Actually send the message - self.gateway.SendSMS(encoded_message) + await self.gateway.send_sms_async(encoded_message) except gammu.GSMError as exc: # pylint: disable=no-member _LOGGER.error("Sending to %s failed: %s", self.number, exc) diff --git a/homeassistant/components/sms/sensor.py b/homeassistant/components/sms/sensor.py new file mode 100644 index 00000000000..08168994b07 --- /dev/null +++ b/homeassistant/components/sms/sensor.py @@ -0,0 +1,76 @@ +"""Support for SMS dongle sensor.""" +import logging + +import gammu # pylint: disable=import-error, no-member + +from homeassistant.const import DEVICE_CLASS_SIGNAL_STRENGTH +from homeassistant.helpers.entity import Entity + +from .const import DOMAIN, SMS_GATEWAY + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry(hass, config_entry, async_add_entities): + """Set up the GSM Signal Sensor sensor.""" + gateway = hass.data[DOMAIN][SMS_GATEWAY] + entities = [] + imei = await gateway.get_imei_async() + name = f"gsm_signal_imei_{imei}" + entities.append(GSMSignalSensor(hass, gateway, name,)) + async_add_entities(entities, True) + + +class GSMSignalSensor(Entity): + """Implementation of a GSM Signal sensor.""" + + def __init__( + self, hass, gateway, name, + ): + """Initialize the GSM Signal sensor.""" + self._hass = hass + self._gateway = gateway + self._name = name + self._state = None + + @property + def name(self): + """Return the name of the sensor.""" + return self._name + + @property + def unit_of_measurement(self): + """Return the unit the value is expressed in.""" + return "dB" + + @property + def device_class(self): + """Return the class of this sensor.""" + return DEVICE_CLASS_SIGNAL_STRENGTH + + @property + def available(self): + """Return if the sensor data are available.""" + return self._state is not None + + @property + def state(self): + """Return the state of the device.""" + return self._state["SignalStrength"] + + async def async_update(self): + """Get the latest data from the modem.""" + try: + self._state = await self._gateway.get_signal_quality_async() + except gammu.GSMError as exc: # pylint: disable=no-member + _LOGGER.error("Failed to read signal quality: %s", exc) + + @property + def device_state_attributes(self): + """Return the sensor attributes.""" + return self._state + + @property + def entity_registry_enabled_default(self) -> bool: + """Return if the entity should be enabled when first added to the entity registry.""" + return False diff --git a/homeassistant/components/sms/strings.json b/homeassistant/components/sms/strings.json new file mode 100644 index 00000000000..872cb17cbea --- /dev/null +++ b/homeassistant/components/sms/strings.json @@ -0,0 +1,18 @@ +{ + "config": { + "step": { + "user": { + "title": "Connect to the modem", + "data": { "device": "Device" } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" + } + } +} diff --git a/homeassistant/components/sms/translations/ca.json b/homeassistant/components/sms/translations/ca.json new file mode 100644 index 00000000000..2d2a609c43e --- /dev/null +++ b/homeassistant/components/sms/translations/ca.json @@ -0,0 +1,20 @@ +{ + "config": { + "abort": { + "already_configured": "El dispositiu ja est\u00e0 configurat", + "single_instance_allowed": "Ja configurat. Nom\u00e9s \u00e9s possible una sola configuraci\u00f3." + }, + "error": { + "cannot_connect": "No s'ha pogut connectar", + "unknown": "Error inesperat" + }, + "step": { + "user": { + "data": { + "device": "Dispositiu" + }, + "title": "Connexi\u00f3 al m\u00f2dem" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/sms/translations/en.json b/homeassistant/components/sms/translations/en.json new file mode 100644 index 00000000000..dbbac1871c7 --- /dev/null +++ b/homeassistant/components/sms/translations/en.json @@ -0,0 +1,20 @@ +{ + "config": { + "abort": { + "already_configured": "Device is already configured", + "single_instance_allowed": "Already configured. Only a single configuration possible." + }, + "error": { + "cannot_connect": "Failed to connect", + "unknown": "Unexpected error" + }, + "step": { + "user": { + "data": { + "device": "Device" + }, + "title": "Connect to the modem" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/sms/translations/es.json b/homeassistant/components/sms/translations/es.json new file mode 100644 index 00000000000..9256ffecc28 --- /dev/null +++ b/homeassistant/components/sms/translations/es.json @@ -0,0 +1,20 @@ +{ + "config": { + "abort": { + "already_configured": "El dispositivo ya est\u00e1 configurado", + "single_instance_allowed": "Ya est\u00e1 configurado. S\u00f3lo es posible una \u00fanica configuraci\u00f3n." + }, + "error": { + "cannot_connect": "No se pudo conectar", + "unknown": "Error inesperado" + }, + "step": { + "user": { + "data": { + "device": "Dispositivo" + }, + "title": "Conectar con el m\u00f3dem" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/upnp/translations/tr.json b/homeassistant/components/sms/translations/no.json similarity index 52% rename from homeassistant/components/upnp/translations/tr.json rename to homeassistant/components/sms/translations/no.json index 91503c17a07..98af331c1dd 100644 --- a/homeassistant/components/upnp/translations/tr.json +++ b/homeassistant/components/sms/translations/no.json @@ -3,8 +3,9 @@ "step": { "user": { "data": { - "enable_sensors": "Trafik sens\u00f6rleri ekleyin" - } + "device": "Enhet" + }, + "title": "Koble til modemet" } } } diff --git a/homeassistant/components/sms/translations/ru.json b/homeassistant/components/sms/translations/ru.json new file mode 100644 index 00000000000..85a99a37528 --- /dev/null +++ b/homeassistant/components/sms/translations/ru.json @@ -0,0 +1,20 @@ +{ + "config": { + "abort": { + "already_configured": "\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0430 \u044d\u0442\u043e\u0433\u043e \u0443\u0441\u0442\u0440\u043e\u0439\u0441\u0442\u0432\u0430 \u0443\u0436\u0435 \u0432\u044b\u043f\u043e\u043b\u043d\u0435\u043d\u0430.", + "single_instance_allowed": "\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0430 \u0443\u0436\u0435 \u0432\u044b\u043f\u043e\u043b\u043d\u0435\u043d\u0430. \u0412\u043e\u0437\u043c\u043e\u0436\u043d\u043e \u0434\u043e\u0431\u0430\u0432\u0438\u0442\u044c \u0442\u043e\u043b\u044c\u043a\u043e \u043e\u0434\u043d\u0443 \u043a\u043e\u043d\u0444\u0438\u0433\u0443\u0440\u0430\u0446\u0438\u044e." + }, + "error": { + "cannot_connect": "\u041e\u0448\u0438\u0431\u043a\u0430 \u043f\u043e\u0434\u043a\u043b\u044e\u0447\u0435\u043d\u0438\u044f.", + "unknown": "\u041d\u0435\u043f\u0440\u0435\u0434\u0432\u0438\u0434\u0435\u043d\u043d\u0430\u044f \u043e\u0448\u0438\u0431\u043a\u0430." + }, + "step": { + "user": { + "data": { + "device": "\u0423\u0441\u0442\u0440\u043e\u0439\u0441\u0442\u0432\u043e" + }, + "title": "\u041f\u043e\u0434\u043a\u043b\u044e\u0447\u0435\u043d\u0438\u0435 \u043a \u0443\u0441\u0442\u0440\u043e\u0439\u0441\u0442\u0432\u0443" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/sms/translations/zh-Hant.json b/homeassistant/components/sms/translations/zh-Hant.json new file mode 100644 index 00000000000..30951f88d0d --- /dev/null +++ b/homeassistant/components/sms/translations/zh-Hant.json @@ -0,0 +1,20 @@ +{ + "config": { + "abort": { + "already_configured": "\u8a2d\u5099\u5df2\u7d93\u8a2d\u5b9a\u5b8c\u6210", + "single_instance_allowed": "\u50c5\u80fd\u8a2d\u5b9a\u4e00\u7d44\u8a2d\u5099\u3002" + }, + "error": { + "cannot_connect": "\u9023\u7dda\u5931\u6557", + "unknown": "\u672a\u9810\u671f\u932f\u8aa4" + }, + "step": { + "user": { + "data": { + "device": "\u8a2d\u5099" + }, + "title": "\u9023\u7dda\u81f3\u6578\u64da\u6a5f" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/solaredge/const.py b/homeassistant/components/solaredge/const.py index 6fec88c42d5..811e4144d0a 100644 --- a/homeassistant/components/solaredge/const.py +++ b/homeassistant/components/solaredge/const.py @@ -10,12 +10,13 @@ CONF_SITE_ID = "site_id" DEFAULT_NAME = "SolarEdge" -OVERVIEW_UPDATE_DELAY = timedelta(minutes=10) +OVERVIEW_UPDATE_DELAY = timedelta(minutes=15) DETAILS_UPDATE_DELAY = timedelta(hours=12) INVENTORY_UPDATE_DELAY = timedelta(hours=12) -POWER_FLOW_UPDATE_DELAY = timedelta(minutes=10) +POWER_FLOW_UPDATE_DELAY = timedelta(minutes=15) +ENERGY_DETAILS_DELAY = timedelta(minutes=15) -SCAN_INTERVAL = timedelta(minutes=10) +SCAN_INTERVAL = timedelta(minutes=15) # Supported overview sensor types: # Key: ['json_key', 'name', unit, icon, default] @@ -65,4 +66,15 @@ SENSOR_TYPES = { "solar_power": ["PV", "Solar Power", None, "mdi:solar-power", False], "grid_power": ["GRID", "Grid Power", None, "mdi:power-plug", False], "storage_power": ["STORAGE", "Storage Power", None, "mdi:car-battery", False], + "purchased_power": ["Purchased", "Imported Power", None, "mdi:flash", False], + "production_power": ["Production", "Production Power", None, "mdi:flash", False], + "consumption_power": ["Consumption", "Cosumption Power", None, "mdi:flash", False], + "selfconsumption_power": [ + "SelfConsumption", + "SelfConsumption Power", + None, + "mdi:flash", + False, + ], + "feedin_power": ["FeedIn", "Exported Power", None, "mdi:flash", False], } diff --git a/homeassistant/components/solaredge/sensor.py b/homeassistant/components/solaredge/sensor.py index f2464489627..469f8ef64a2 100644 --- a/homeassistant/components/solaredge/sensor.py +++ b/homeassistant/components/solaredge/sensor.py @@ -1,4 +1,5 @@ """Support for SolarEdge Monitoring API.""" +from datetime import date, datetime import logging from requests.exceptions import ConnectTimeout, HTTPError @@ -12,6 +13,7 @@ from homeassistant.util import Throttle from .const import ( CONF_SITE_ID, DETAILS_UPDATE_DELAY, + ENERGY_DETAILS_DELAY, INVENTORY_UPDATE_DELAY, OVERVIEW_UPDATE_DELAY, POWER_FLOW_UPDATE_DELAY, @@ -62,6 +64,7 @@ class SolarEdgeSensorFactory: overview = SolarEdgeOverviewDataService(api, site_id) inventory = SolarEdgeInventoryDataService(api, site_id) flow = SolarEdgePowerFlowDataService(api, site_id) + energy = SolarEdgeEnergyDetailsService(api, site_id) self.services = {"site_details": (SolarEdgeDetailsSensor, details)} @@ -80,6 +83,15 @@ class SolarEdgeSensorFactory: for key in ["power_consumption", "solar_power", "grid_power", "storage_power"]: self.services[key] = (SolarEdgePowerFlowSensor, flow) + for key in [ + "purchased_power", + "production_power", + "feedin_power", + "consumption_power", + "selfconsumption_power", + ]: + self.services[key] = (SolarEdgeEnergyDetailsSensor, energy) + def create_sensor(self, sensor_key): """Create and return a sensor based on the sensor_key.""" sensor_class, service = self.services[sensor_key] @@ -181,6 +193,30 @@ class SolarEdgeInventorySensor(SolarEdgeSensor): self._attributes = self.data_service.attributes[self._json_key] +class SolarEdgeEnergyDetailsSensor(SolarEdgeSensor): + """Representation of an SolarEdge Monitoring API power flow sensor.""" + + def __init__(self, platform_name, sensor_key, data_service): + """Initialize the power flow sensor.""" + super().__init__(platform_name, sensor_key, data_service) + + self._json_key = SENSOR_TYPES[self.sensor_key][0] + + self._attributes = {} + + @property + def device_state_attributes(self): + """Return the state attributes.""" + return self._attributes + + def update(self): + """Get the latest inventory data and update state and attributes.""" + self.data_service.update() + self._state = self.data_service.data.get(self._json_key) + self._attributes = self.data_service.attributes.get(self._json_key) + self._unit_of_measurement = self.data_service.unit + + class SolarEdgePowerFlowSensor(SolarEdgeSensor): """Representation of an SolarEdge Monitoring API power flow sensor.""" @@ -319,6 +355,68 @@ class SolarEdgeInventoryDataService(SolarEdgeDataService): _LOGGER.debug("Updated SolarEdge inventory: %s, %s", self.data, self.attributes) +class SolarEdgeEnergyDetailsService(SolarEdgeDataService): + """Get and update the latest power flow data.""" + + def __init__(self, api, site_id): + """Initialize the power flow data service.""" + super().__init__(api, site_id) + + self.unit = None + + @Throttle(ENERGY_DETAILS_DELAY) + def update(self): + """Update the data from the SolarEdge Monitoring API.""" + try: + now = datetime.now() + today = date.today() + midnight = datetime.combine(today, datetime.min.time()) + data = self.api.get_energy_details( + self.site_id, + midnight, + now.strftime("%Y-%m-%d %H:%M:%S"), + meters=None, + time_unit="DAY", + ) + energy_details = data["energyDetails"] + except KeyError: + _LOGGER.error("Missing power flow data, skipping update") + return + except (ConnectTimeout, HTTPError): + _LOGGER.error("Could not retrieve data, skipping update") + return + + if "meters" not in energy_details: + _LOGGER.debug( + "Missing meters in energy details data. Assuming site does not have any" + ) + return + + self.data = {} + self.attributes = {} + self.unit = energy_details["unit"] + meters = energy_details["meters"] + + for entity in meters: + for key, data in entity.items(): + if key == "type" and data in [ + "Production", + "SelfConsumption", + "FeedIn", + "Purchased", + "Consumption", + ]: + energy_type = data + if key == "values": + for row in data: + self.data[energy_type] = row["value"] + self.attributes[energy_type] = {"date": row["date"]} + + _LOGGER.debug( + "Updated SolarEdge energy details: %s, %s", self.data, self.attributes + ) + + class SolarEdgePowerFlowDataService(SolarEdgeDataService): """Get and update the latest power flow data.""" diff --git a/homeassistant/components/solarlog/translations/nn.json b/homeassistant/components/solarlog/translations/nn.json deleted file mode 100644 index 7ea0915a124..00000000000 --- a/homeassistant/components/solarlog/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Solar-Log" -} \ No newline at end of file diff --git a/homeassistant/components/solarlog/translations/no.json b/homeassistant/components/solarlog/translations/no.json index 5b7f2e43e3b..5c0bc0524eb 100644 --- a/homeassistant/components/solarlog/translations/no.json +++ b/homeassistant/components/solarlog/translations/no.json @@ -10,7 +10,7 @@ "step": { "user": { "data": { - "host": "Vertsnavnet eller ip-adressen til din Solar-Log-enhet", + "host": "Vert", "name": "Prefikset som skal brukes til dine Solar-Log sensorer" }, "title": "Definer din Solar-Log tilkobling" diff --git a/homeassistant/components/solax/manifest.json b/homeassistant/components/solax/manifest.json index 296c1b73ba9..5d8590389d8 100644 --- a/homeassistant/components/solax/manifest.json +++ b/homeassistant/components/solax/manifest.json @@ -2,6 +2,6 @@ "domain": "solax", "name": "SolaX Power", "documentation": "https://www.home-assistant.io/integrations/solax", - "requirements": ["solax==0.2.2"], + "requirements": ["solax==0.2.3"], "codeowners": ["@squishykid"] } diff --git a/homeassistant/components/soma/translations/nn.json b/homeassistant/components/soma/translations/nn.json deleted file mode 100644 index 961b47595e7..00000000000 --- a/homeassistant/components/soma/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Soma" -} \ No newline at end of file diff --git a/homeassistant/components/somfy/translations/nn.json b/homeassistant/components/somfy/translations/nn.json deleted file mode 100644 index ea066156c71..00000000000 --- a/homeassistant/components/somfy/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Somfy" -} \ No newline at end of file diff --git a/homeassistant/components/songpal/translations/da.json b/homeassistant/components/songpal/translations/da.json deleted file mode 100644 index 00c41b04a2e..00000000000 --- a/homeassistant/components/songpal/translations/da.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "config": { - "step": { - "init": { - "title": "Sony Songpal" - }, - "user": { - "title": "Sony Songpal" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/sonos/media_player.py b/homeassistant/components/sonos/media_player.py index 15a168047e9..4e4f7338a10 100644 --- a/homeassistant/components/sonos/media_player.py +++ b/homeassistant/components/sonos/media_player.py @@ -87,6 +87,7 @@ SERVICE_CLEAR_TIMER = "clear_sleep_timer" SERVICE_UPDATE_ALARM = "update_alarm" SERVICE_SET_OPTION = "set_option" SERVICE_PLAY_QUEUE = "play_queue" +SERVICE_REMOVE_FROM_QUEUE = "remove_from_queue" ATTR_SLEEP_TIME = "sleep_time" ATTR_ALARM_ID = "alarm_id" @@ -98,6 +99,7 @@ ATTR_WITH_GROUP = "with_group" ATTR_NIGHT_SOUND = "night_sound" ATTR_SPEECH_ENHANCE = "speech_enhance" ATTR_QUEUE_POSITION = "queue_position" +ATTR_STATUS_LIGHT = "status_light" UNAVAILABLE_VALUES = {"", "NOT_IMPLEMENTED", None} @@ -280,6 +282,7 @@ async def async_setup_entry(hass, config_entry, async_add_entities): { vol.Optional(ATTR_NIGHT_SOUND): cv.boolean, vol.Optional(ATTR_SPEECH_ENHANCE): cv.boolean, + vol.Optional(ATTR_STATUS_LIGHT): cv.boolean, }, "set_option", ) @@ -290,6 +293,12 @@ async def async_setup_entry(hass, config_entry, async_add_entities): "play_queue", ) + platform.async_register_entity_service( + SERVICE_REMOVE_FROM_QUEUE, + {vol.Optional(ATTR_QUEUE_POSITION): cv.positive_int}, + "remove_from_queue", + ) + class _ProcessSonosEventQueue: """Queue like object for dispatching sonos events.""" @@ -382,6 +391,8 @@ class SonosEntity(MediaPlayerEntity): self._media_artist = None self._media_album_name = None self._media_title = None + self._is_playing_local_queue = None + self._queue_position = None self._night_sound = None self._speech_enhance = None self._source_name = None @@ -592,6 +603,8 @@ class SonosEntity(MediaPlayerEntity): update_position = new_status != self._status self._status = new_status + self._is_playing_local_queue = self.soco.is_playing_local_queue + if self.soco.is_playing_tv: self.update_media_linein(SOURCE_TV) elif self.soco.is_playing_line_in: @@ -690,6 +703,8 @@ class SonosEntity(MediaPlayerEntity): self._media_image_url = track_info.get("album_art") + self._queue_position = int(track_info.get("playlist_position")) - 1 + def update_volume(self, event=None): """Update information about currently volume settings.""" if event: @@ -860,6 +875,15 @@ class SonosEntity(MediaPlayerEntity): """Title of current playing media.""" return self._media_title or None + @property + @soco_coordinator + def queue_position(self): + """If playing local queue return the position in the queue else None.""" + if self._is_playing_local_queue: + return self._queue_position + + return None + @property @soco_coordinator def source(self): @@ -929,7 +953,7 @@ class SonosEntity(MediaPlayerEntity): sources += [SOURCE_LINEIN] elif "PLAYBAR" in model: sources += [SOURCE_LINEIN, SOURCE_TV] - elif "BEAM" in model: + elif "BEAM" in model or "PLAYBASE" in model: sources += [SOURCE_TV] return sources @@ -1220,7 +1244,7 @@ class SonosEntity(MediaPlayerEntity): alarm.save() @soco_error() - def set_option(self, night_sound=None, speech_enhance=None): + def set_option(self, night_sound=None, speech_enhance=None, status_light=None): """Modify playback options.""" if night_sound is not None and self._night_sound is not None: self.soco.night_mode = night_sound @@ -1228,11 +1252,20 @@ class SonosEntity(MediaPlayerEntity): if speech_enhance is not None and self._speech_enhance is not None: self.soco.dialog_mode = speech_enhance + if status_light is not None: + self.soco.status_light = status_light + @soco_error() def play_queue(self, queue_position=0): """Start playing the queue.""" self.soco.play_from_queue(queue_position) + @soco_error() + @soco_coordinator + def remove_from_queue(self, queue_position=0): + """Remove item from the queue.""" + self.soco.remove_from_queue(queue_position) + @property def device_state_attributes(self): """Return entity specific state attributes.""" @@ -1244,4 +1277,7 @@ class SonosEntity(MediaPlayerEntity): if self._speech_enhance is not None: attributes[ATTR_SPEECH_ENHANCE] = self._speech_enhance + if self.queue_position is not None: + attributes[ATTR_QUEUE_POSITION] = self.queue_position + return attributes diff --git a/homeassistant/components/sonos/services.yaml b/homeassistant/components/sonos/services.yaml index 37effe7d6ab..7a8e2195bf4 100644 --- a/homeassistant/components/sonos/services.yaml +++ b/homeassistant/components/sonos/services.yaml @@ -64,6 +64,9 @@ set_option: speech_enhance: description: Enable Speech Enhancement mode example: "true" + status_light: + description: Enable Status (LED) Light + example: "true" play_queue: description: Starts playing the queue from the first item. @@ -74,3 +77,13 @@ play_queue: queue_position: description: Position of the song in the queue to start playing from. example: "0" + +remove_from_queue: + description: Removes an item from the queue. + fields: + entity_id: + description: Name(s) of entities that will remove an item. + example: "media_player.living_room_sonos" + queue_position: + description: Position in the queue to remove. + example: "0" diff --git a/homeassistant/components/sonos/translations/et.json b/homeassistant/components/sonos/translations/et.json deleted file mode 100644 index 0e652624ef6..00000000000 --- a/homeassistant/components/sonos/translations/et.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "confirm": { - "title": "" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/sonos/translations/hr.json b/homeassistant/components/sonos/translations/hr.json deleted file mode 100644 index 213ae1e4bce..00000000000 --- a/homeassistant/components/sonos/translations/hr.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "confirm": { - "title": "Sonos" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/speedtestdotnet/__init__.py b/homeassistant/components/speedtestdotnet/__init__.py index afccc71d285..3cad15a0967 100644 --- a/homeassistant/components/speedtestdotnet/__init__.py +++ b/homeassistant/components/speedtestdotnet/__init__.py @@ -5,30 +5,32 @@ import logging import speedtest import voluptuous as vol -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import CONF_MONITORED_CONDITIONS, CONF_SCAN_INTERVAL +from homeassistant.exceptions import ConfigEntryNotReady import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.discovery import async_load_platform -from homeassistant.helpers.dispatcher import dispatcher_send -from homeassistant.helpers.event import async_track_time_interval +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import DATA_UPDATED, DOMAIN, SENSOR_TYPES +from .const import ( + CONF_MANUAL, + CONF_SERVER_ID, + DEFAULT_SCAN_INTERVAL, + DEFAULT_SERVER, + DOMAIN, + SENSOR_TYPES, + SPEED_TEST_SERVICE, +) _LOGGER = logging.getLogger(__name__) -CONF_SERVER_ID = "server_id" -CONF_MANUAL = "manual" - -DEFAULT_INTERVAL = timedelta(hours=1) - CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Optional(CONF_SERVER_ID): cv.positive_int, - vol.Optional(CONF_SCAN_INTERVAL, default=DEFAULT_INTERVAL): vol.All( - cv.time_period, cv.positive_timedelta - ), + vol.Optional( + CONF_SCAN_INTERVAL, default=timedelta(minutes=DEFAULT_SCAN_INTERVAL) + ): vol.All(cv.time_period, cv.positive_timedelta), vol.Optional(CONF_MANUAL, default=False): cv.boolean, vol.Optional( CONF_MONITORED_CONDITIONS, default=list(SENSOR_TYPES) @@ -40,46 +42,147 @@ CONFIG_SCHEMA = vol.Schema( ) +def server_id_valid(server_id): + """Check if server_id is valid.""" + try: + api = speedtest.Speedtest() + api.get_servers([int(server_id)]) + except (speedtest.ConfigRetrievalError, speedtest.NoMatchedServers): + return False + + return True + + async def async_setup(hass, config): + """Import integration from config.""" + + if DOMAIN in config: + hass.async_create_task( + hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=config[DOMAIN] + ) + ) + return True + + +async def async_setup_entry(hass, config_entry): """Set up the Speedtest.net component.""" - conf = config[DOMAIN] - data = hass.data[DOMAIN] = SpeedtestData(hass, conf.get(CONF_SERVER_ID)) + coordinator = SpeedTestDataCoordinator(hass, config_entry) + await coordinator.async_setup() - if not conf[CONF_MANUAL]: - async_track_time_interval(hass, data.update, conf[CONF_SCAN_INTERVAL]) + await coordinator.async_refresh() + if not coordinator.last_update_success: + raise ConfigEntryNotReady - def update(call=None): - """Service call to manually update the data.""" - data.update() - - hass.services.async_register(DOMAIN, "speedtest", update) + hass.data[DOMAIN] = coordinator hass.async_create_task( - async_load_platform( - hass, SENSOR_DOMAIN, DOMAIN, conf[CONF_MONITORED_CONDITIONS], config - ) + hass.config_entries.async_forward_entry_setup(config_entry, "sensor") ) return True -class SpeedtestData: +async def async_unload_entry(hass, config_entry): + """Unload SpeedTest Entry from config_entry.""" + hass.services.async_remove(DOMAIN, SPEED_TEST_SERVICE) + + await hass.config_entries.async_forward_entry_unload(config_entry, "sensor") + + hass.data.pop(DOMAIN) + + return True + + +class SpeedTestDataCoordinator(DataUpdateCoordinator): """Get the latest data from speedtest.net.""" - def __init__(self, hass, server_id): + def __init__(self, hass, config_entry): """Initialize the data object.""" - self.data = None - self._hass = hass - self._servers = [] if server_id is None else [server_id] + self.hass = hass + self.config_entry = config_entry + self.api = None + self.servers = {} + super().__init__( + self.hass, + _LOGGER, + name=DOMAIN, + update_method=self.async_update, + update_interval=timedelta( + minutes=self.config_entry.options.get( + CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL + ) + ), + ) - def update(self, now=None): + def update_data(self): """Get the latest data from speedtest.net.""" + server_list = self.api.get_servers() - _LOGGER.debug("Executing speedtest.net speed test") - speed = speedtest.Speedtest() - speed.get_servers(self._servers) - speed.get_best_server() - speed.download() - speed.upload() - self.data = speed.results.dict() - dispatcher_send(self._hass, DATA_UPDATED) + self.servers[DEFAULT_SERVER] = {} + for server in sorted( + server_list.values(), key=lambda server: server[0]["country"] + ): + self.servers[f"{server[0]['country']} - {server[0]['sponsor']}"] = server[0] + + if self.config_entry.options.get(CONF_SERVER_ID): + server_id = self.config_entry.options.get(CONF_SERVER_ID) + self.api.closest.clear() + self.api.get_servers(servers=[server_id]) + _LOGGER.debug( + "Executing speedtest.net speed test with server_id: %s", self.api.best["id"] + ) + self.api.get_best_server() + self.api.download() + self.api.upload() + return self.api.results.dict() + + async def async_update(self, *_): + """Update Speedtest data.""" + try: + return await self.hass.async_add_executor_job(self.update_data) + except (speedtest.ConfigRetrievalError, speedtest.NoMatchedServers): + raise UpdateFailed + + async def async_set_options(self): + """Set options for entry.""" + if not self.config_entry.options: + data = {**self.config_entry.data} + options = { + CONF_SCAN_INTERVAL: data.pop(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL), + CONF_MANUAL: data.pop(CONF_MANUAL, False), + CONF_SERVER_ID: str(data.pop(CONF_SERVER_ID, "")), + } + self.hass.config_entries.async_update_entry( + self.config_entry, data=data, options=options + ) + + async def async_setup(self): + """Set up SpeedTest.""" + try: + self.api = await self.hass.async_add_executor_job(speedtest.Speedtest) + except speedtest.ConfigRetrievalError: + raise ConfigEntryNotReady + + async def request_update(call): + """Request update.""" + await self.async_request_refresh() + + await self.async_set_options() + + self.hass.services.async_register(DOMAIN, SPEED_TEST_SERVICE, request_update) + + self.config_entry.add_update_listener(options_updated_listener) + + +async def options_updated_listener(hass, entry): + """Handle options update.""" + if not entry.options[CONF_MANUAL]: + hass.data[DOMAIN].update_interval = timedelta( + minutes=entry.options[CONF_SCAN_INTERVAL] + ) + await hass.data[DOMAIN].async_request_refresh() + return + # set the update interval to a very long time + # if the user wants to disable auto update + hass.data[DOMAIN].update_interval = timedelta(days=7) diff --git a/homeassistant/components/speedtestdotnet/config_flow.py b/homeassistant/components/speedtestdotnet/config_flow.py new file mode 100644 index 00000000000..1d8f3cf189b --- /dev/null +++ b/homeassistant/components/speedtestdotnet/config_flow.py @@ -0,0 +1,117 @@ +"""Config flow for Speedtest.net.""" +import logging + +import voluptuous as vol + +from homeassistant import config_entries +from homeassistant.const import CONF_MONITORED_CONDITIONS, CONF_SCAN_INTERVAL +from homeassistant.core import callback + +from . import server_id_valid +from .const import ( + CONF_MANUAL, + CONF_SERVER_ID, + CONF_SERVER_NAME, + DEFAULT_NAME, + DEFAULT_SCAN_INTERVAL, + DEFAULT_SERVER, +) +from .const import DOMAIN # pylint: disable=unused-import + +_LOGGER = logging.getLogger(__name__) + + +class SpeedTestFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): + """Handle Speedtest.net config flow.""" + + VERSION = 1 + CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL + + @staticmethod + @callback + def async_get_options_flow(config_entry): + """Get the options flow for this handler.""" + return SpeedTestOptionsFlowHandler(config_entry) + + async def async_step_user(self, user_input=None): + """Handle a flow initialized by the user.""" + if self._async_current_entries(): + return self.async_abort(reason="one_instance_allowed") + + if user_input is None: + return self.async_show_form(step_id="user") + + return self.async_create_entry(title=DEFAULT_NAME, data=user_input) + + async def async_step_import(self, import_config): + """Import from config.""" + if ( + CONF_SERVER_ID in import_config + and not await self.hass.async_add_executor_job( + server_id_valid, import_config[CONF_SERVER_ID] + ) + ): + return self.async_abort(reason="wrong_server_id") + + import_config[CONF_SCAN_INTERVAL] = int( + import_config[CONF_SCAN_INTERVAL].seconds / 60 + ) + import_config.pop(CONF_MONITORED_CONDITIONS) + + return await self.async_step_user(user_input=import_config) + + +class SpeedTestOptionsFlowHandler(config_entries.OptionsFlow): + """Handle SpeedTest options.""" + + def __init__(self, config_entry): + """Initialize options flow.""" + self.config_entry = config_entry + self._servers = {} + + async def async_step_init(self, user_input=None): + """Manage the options.""" + errors = {} + + if user_input is not None: + server_name = user_input[CONF_SERVER_NAME] + if server_name != "*Auto Detect": + server_id = self._servers[server_name]["id"] + user_input[CONF_SERVER_ID] = server_id + else: + user_input[CONF_SERVER_ID] = None + + return self.async_create_entry(title="", data=user_input) + + self._servers = self.hass.data[DOMAIN].servers + + server_name = DEFAULT_SERVER + if self.config_entry.options.get( + CONF_SERVER_ID + ) and not self.config_entry.options.get(CONF_SERVER_NAME): + server = [ + key + for (key, value) in self._servers.items() + if value.get("id") == self.config_entry.options[CONF_SERVER_ID] + ] + server_name = server[0] if server else "" + + options = { + vol.Optional( + CONF_SERVER_NAME, + default=self.config_entry.options.get(CONF_SERVER_NAME, server_name), + ): vol.In(self._servers.keys()), + vol.Optional( + CONF_SCAN_INTERVAL, + default=self.config_entry.options.get( + CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL + ), + ): int, + vol.Optional( + CONF_MANUAL, default=self.config_entry.options.get(CONF_MANUAL, False) + ): bool, + } + + return self.async_show_form( + step_id="init", data_schema=vol.Schema(options), errors=errors + ) diff --git a/homeassistant/components/speedtestdotnet/const.py b/homeassistant/components/speedtestdotnet/const.py index 2fed2609fb3..546c7db053b 100644 --- a/homeassistant/components/speedtestdotnet/const.py +++ b/homeassistant/components/speedtestdotnet/const.py @@ -1,8 +1,9 @@ """Consts used by Speedtest.net.""" - from homeassistant.const import DATA_RATE_MEGABITS_PER_SECOND, TIME_MILLISECONDS DOMAIN = "speedtestdotnet" + +SPEED_TEST_SERVICE = "speedtest" DATA_UPDATED = f"{DOMAIN}_data_updated" SENSOR_TYPES = { @@ -10,3 +11,22 @@ SENSOR_TYPES = { "download": ["Download", DATA_RATE_MEGABITS_PER_SECOND], "upload": ["Upload", DATA_RATE_MEGABITS_PER_SECOND], } + +CONF_SERVER_NAME = "server_name" +CONF_SERVER_ID = "server_id" +CONF_MANUAL = "manual" + +ATTR_BYTES_RECEIVED = "bytes_received" +ATTR_BYTES_SENT = "bytes_sent" +ATTR_SERVER_COUNTRY = "server_country" +ATTR_SERVER_ID = "server_id" +ATTR_SERVER_NAME = "server_name" + + +DEFAULT_NAME = "SpeedTest" +DEFAULT_SCAN_INTERVAL = 60 +DEFAULT_SERVER = "*Auto Detect" + +ATTRIBUTION = "Data retrieved from Speedtest.net by Ookla" + +ICON = "mdi:speedometer" diff --git a/homeassistant/components/speedtestdotnet/manifest.json b/homeassistant/components/speedtestdotnet/manifest.json index 1ba5f418fc3..d230f03f954 100644 --- a/homeassistant/components/speedtestdotnet/manifest.json +++ b/homeassistant/components/speedtestdotnet/manifest.json @@ -1,7 +1,8 @@ { "domain": "speedtestdotnet", "name": "Speedtest.net", + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/speedtestdotnet", "requirements": ["speedtest-cli==2.1.2"], - "codeowners": ["@rohankapoorcom"] + "codeowners": ["@rohankapoorcom", "@engrbm87"] } diff --git a/homeassistant/components/speedtestdotnet/sensor.py b/homeassistant/components/speedtestdotnet/sensor.py index 41db6c26930..06868dc1437 100644 --- a/homeassistant/components/speedtestdotnet/sensor.py +++ b/homeassistant/components/speedtestdotnet/sensor.py @@ -2,54 +2,67 @@ import logging from homeassistant.const import ATTR_ATTRIBUTION -from homeassistant.core import callback -from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.restore_state import RestoreEntity +from homeassistant.helpers.entity import Entity -from .const import DATA_UPDATED, DOMAIN as SPEEDTESTDOTNET_DOMAIN, SENSOR_TYPES +from .const import ( + ATTR_BYTES_RECEIVED, + ATTR_BYTES_SENT, + ATTR_SERVER_COUNTRY, + ATTR_SERVER_ID, + ATTR_SERVER_NAME, + ATTRIBUTION, + DEFAULT_NAME, + DOMAIN, + ICON, + SENSOR_TYPES, +) _LOGGER = logging.getLogger(__name__) -ATTR_BYTES_RECEIVED = "bytes_received" -ATTR_BYTES_SENT = "bytes_sent" -ATTR_SERVER_COUNTRY = "server_country" -ATTR_SERVER_HOST = "server_host" -ATTR_SERVER_ID = "server_id" -ATTR_SERVER_LATENCY = "latency" -ATTR_SERVER_NAME = "server_name" -ATTRIBUTION = "Data retrieved from Speedtest.net by Ookla" +async def async_setup_entry(hass, config_entry, async_add_entities): + """Set up the Speedtestdotnet sensors.""" -ICON = "mdi:speedometer" + speedtest_coordinator = hass.data[DOMAIN] + + entities = [] + for sensor_type in SENSOR_TYPES: + entities.append(SpeedtestSensor(speedtest_coordinator, sensor_type)) + + async_add_entities(entities) -async def async_setup_platform(hass, config, async_add_entities, discovery_info): - """Set up the Speedtest.net sensor.""" - data = hass.data[SPEEDTESTDOTNET_DOMAIN] - async_add_entities([SpeedtestSensor(data, sensor) for sensor in discovery_info]) - - -class SpeedtestSensor(RestoreEntity): +class SpeedtestSensor(Entity): """Implementation of a speedtest.net sensor.""" - def __init__(self, speedtest_data, sensor_type): + def __init__(self, coordinator, sensor_type): """Initialize the sensor.""" self._name = SENSOR_TYPES[sensor_type][0] - self.speedtest_client = speedtest_data + self.coordinator = coordinator self.type = sensor_type - self._state = None - self._data = None self._unit_of_measurement = SENSOR_TYPES[self.type][1] @property def name(self): """Return the name of the sensor.""" - return "{} {}".format("Speedtest", self._name) + return f"{DEFAULT_NAME} {self._name}" + + @property + def unique_id(self): + """Return sensor unique_id.""" + return self.type @property def state(self): """Return the state of the device.""" - return self._state + state = None + if self.type == "ping": + state = self.coordinator.data["ping"] + elif self.type == "download": + state = round(self.coordinator.data["download"] / 10 ** 6, 2) + elif self.type == "upload": + state = round(self.coordinator.data["upload"] / 10 ** 6, 2) + return state @property def unit_of_measurement(self): @@ -69,47 +82,27 @@ class SpeedtestSensor(RestoreEntity): @property def device_state_attributes(self): """Return the state attributes.""" - attributes = {ATTR_ATTRIBUTION: ATTRIBUTION} - if self._data is not None: - return attributes.update( - { - ATTR_BYTES_RECEIVED: self._data["bytes_received"], - ATTR_BYTES_SENT: self._data["bytes_sent"], - ATTR_SERVER_COUNTRY: self._data["server"]["country"], - ATTR_SERVER_ID: self._data["server"]["id"], - ATTR_SERVER_LATENCY: self._data["server"]["latency"], - ATTR_SERVER_NAME: self._data["server"]["name"], - } - ) + attributes = { + ATTR_ATTRIBUTION: ATTRIBUTION, + ATTR_SERVER_NAME: self.coordinator.data["server"]["name"], + ATTR_SERVER_COUNTRY: self.coordinator.data["server"]["country"], + ATTR_SERVER_ID: self.coordinator.data["server"]["id"], + } + if self.type == "download": + attributes[ATTR_BYTES_RECEIVED] = self.coordinator.data["bytes_received"] + + if self.type == "upload": + attributes[ATTR_BYTES_SENT] = self.coordinator.data["bytes_sent"] + return attributes async def async_added_to_hass(self): """Handle entity which will be added.""" - await super().async_added_to_hass() - state = await self.async_get_last_state() - if not state: - return - self._state = state.state self.async_on_remove( - async_dispatcher_connect( - self.hass, DATA_UPDATED, self._schedule_immediate_update - ) + self.coordinator.async_add_listener(self.async_write_ha_state) ) - def update(self): - """Get the latest data and update the states.""" - self._data = self.speedtest_client.data - if self._data is None: - return - - if self.type == "ping": - self._state = self._data["ping"] - elif self.type == "download": - self._state = round(self._data["download"] / 10 ** 6, 2) - elif self.type == "upload": - self._state = round(self._data["upload"] / 10 ** 6, 2) - - @callback - def _schedule_immediate_update(self): - self.async_schedule_update_ha_state(True) + async def async_update(self): + """Request coordinator to update data.""" + await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/speedtestdotnet/strings.json b/homeassistant/components/speedtestdotnet/strings.json new file mode 100644 index 00000000000..f638c25a549 --- /dev/null +++ b/homeassistant/components/speedtestdotnet/strings.json @@ -0,0 +1,28 @@ +{ + "config": { + "step": { + "user": { + "title": "Set up SpeedTest", + "description": "Are you sure you want to set up SpeedTest?" + } + }, + "abort": { + "one_instance_allowed": "Only a single instance is necessary.", + "wrong_server_id": "Server id is not valid" + } + }, + "options": { + "step": { + "init": { + "data": { + "scan_interval": "Update frequency (minutes)", + "manual": "Disable auto update", + "server_name": "Select test server" + } + } + }, + "error": { + "retrive_error": "Error retriving servers list" + } + } +} diff --git a/homeassistant/components/speedtestdotnet/translations/ar.json b/homeassistant/components/speedtestdotnet/translations/ar.json new file mode 100644 index 00000000000..f3c2591d040 --- /dev/null +++ b/homeassistant/components/speedtestdotnet/translations/ar.json @@ -0,0 +1,22 @@ +{ + "config": { + "abort": { + "wrong_server_id": "\u0645\u0639\u0631\u0641 \u0627\u0644\u062e\u0627\u062f\u0645 \u063a\u064a\u0631 \u0635\u0627\u0644\u062d" + }, + "step": { + "user": { + "description": "\u0647\u0644 \u0623\u0646\u062a \u0645\u062a\u0623\u0643\u062f \u0645\u0646 \u0623\u0646\u0643 \u062a\u0631\u064a\u062f \u0625\u0639\u062f\u0627\u062f SpeedTest\u061f", + "title": "\u0625\u0639\u062f\u0627\u062f \u0627\u062e\u062a\u0628\u0627\u0631 \u0627\u0644\u0633\u0631\u0639\u0629" + } + } + }, + "options": { + "step": { + "init": { + "data": { + "manual": "\u062a\u0639\u0637\u064a\u0644 \u0627\u0644\u062a\u062d\u062f\u064a\u062b \u0627\u0644\u062a\u0644\u0642\u0627\u0626\u064a" + } + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/speedtestdotnet/translations/ca.json b/homeassistant/components/speedtestdotnet/translations/ca.json index 7af50becd61..2cd81f8af2e 100644 --- a/homeassistant/components/speedtestdotnet/translations/ca.json +++ b/homeassistant/components/speedtestdotnet/translations/ca.json @@ -1,9 +1,26 @@ { + "config": { + "abort": { + "one_instance_allowed": "Nom\u00e9s cal una \u00fanica inst\u00e0ncia.", + "wrong_server_id": "L'identificador del servidor no \u00e9s v\u00e0lid" + }, + "step": { + "user": { + "description": "Est\u00e0s segur que vols configurar SpeedTest?", + "title": "Configura SpeedTest" + } + } + }, "options": { + "error": { + "retrive_error": "S'ha produ\u00eft un error en recuperar la llista de servidors" + }, "step": { "init": { "data": { - "scan_interval": "Freq\u00fc\u00e8ncia d'actualitzaci\u00f3 (minuts)" + "manual": "Desactiva l'actualitzaci\u00f3 autom\u00e0tica", + "scan_interval": "Freq\u00fc\u00e8ncia d'actualitzaci\u00f3 (minuts)", + "server_name": "Seleccion el servidor de proves" } } } diff --git a/homeassistant/components/speedtestdotnet/translations/fr.json b/homeassistant/components/speedtestdotnet/translations/fr.json new file mode 100644 index 00000000000..68f16cea44f --- /dev/null +++ b/homeassistant/components/speedtestdotnet/translations/fr.json @@ -0,0 +1,28 @@ +{ + "config": { + "abort": { + "one_instance_allowed": "Une seule instance est n\u00e9cessaire.", + "wrong_server_id": "L'ID du serveur n'est pas valide" + }, + "step": { + "user": { + "description": "Voulez-vous vraiment configurer SpeedTest ?", + "title": "Configurer SpeedTest" + } + } + }, + "options": { + "error": { + "retrive_error": "Erreur lors de la r\u00e9cup\u00e9ration de la liste des serveurs" + }, + "step": { + "init": { + "data": { + "manual": "D\u00e9sactiver la mise \u00e0 jour automatique", + "scan_interval": "Fr\u00e9quence de mise \u00e0 jour (minutes)", + "server_name": "S\u00e9lectionner le serveur de test" + } + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/speedtestdotnet/translations/lb.json b/homeassistant/components/speedtestdotnet/translations/lb.json new file mode 100644 index 00000000000..9098d499a14 --- /dev/null +++ b/homeassistant/components/speedtestdotnet/translations/lb.json @@ -0,0 +1,18 @@ +{ + "config": { + "abort": { + "one_instance_allowed": "N\u00ebmmen eng eenzeg Instanz ass n\u00e9ideg.", + "wrong_server_id": "Server ID ass ong\u00eblteg" + } + }, + "options": { + "step": { + "init": { + "data": { + "scan_interval": "Intervalle vun de Mise \u00e0 jour (Minutten)", + "server_name": "Test Server auswielen" + } + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/spotify/translations/lv.json b/homeassistant/components/spotify/translations/lv.json deleted file mode 100644 index 01b2b7a38bd..00000000000 --- a/homeassistant/components/spotify/translations/lv.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Spotify" -} \ No newline at end of file diff --git a/homeassistant/components/squeezebox/__init__.py b/homeassistant/components/squeezebox/__init__.py index 5250a6dc267..e298bee7b07 100644 --- a/homeassistant/components/squeezebox/__init__.py +++ b/homeassistant/components/squeezebox/__init__.py @@ -1 +1,42 @@ -"""The squeezebox component.""" +"""The Logitech Squeezebox integration.""" + +import logging + +from homeassistant.components.media_player import DOMAIN as MP_DOMAIN +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant + +from .const import DISCOVERY_TASK, DOMAIN, PLAYER_DISCOVERY_UNSUB + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup(hass: HomeAssistant, config: dict): + """Set up the Logitech Squeezebox component.""" + return True + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): + """Set up Logitech Squeezebox from a config entry.""" + hass.async_create_task( + hass.config_entries.async_forward_entry_setup(entry, MP_DOMAIN) + ) + return True + + +async def async_unload_entry(hass, entry): + """Unload a config entry.""" + # Stop player discovery task for this config entry. + hass.data[DOMAIN][entry.entry_id][PLAYER_DISCOVERY_UNSUB]() + + # Remove stored data for this config entry + hass.data[DOMAIN].pop(entry.entry_id) + + # Stop server discovery task if this is the last config entry. + current_entries = hass.config_entries.async_entries(DOMAIN) + if len(current_entries) == 1 and current_entries[0] == entry: + _LOGGER.debug("Stopping server discovery task") + hass.data[DOMAIN][DISCOVERY_TASK].cancel() + hass.data[DOMAIN].pop(DISCOVERY_TASK) + + return await hass.config_entries.async_forward_entry_unload(entry, MP_DOMAIN) diff --git a/homeassistant/components/squeezebox/config_flow.py b/homeassistant/components/squeezebox/config_flow.py new file mode 100644 index 00000000000..f5ed6073104 --- /dev/null +++ b/homeassistant/components/squeezebox/config_flow.py @@ -0,0 +1,188 @@ +"""Config flow for Logitech Squeezebox integration.""" +import asyncio +import logging + +from pysqueezebox import Server, async_discover +import voluptuous as vol + +from homeassistant import config_entries +from homeassistant.const import ( + CONF_HOST, + CONF_PASSWORD, + CONF_PORT, + CONF_USERNAME, + HTTP_UNAUTHORIZED, +) +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +# pylint: disable=unused-import +from .const import DEFAULT_PORT, DOMAIN + +_LOGGER = logging.getLogger(__name__) + +TIMEOUT = 5 + + +def _base_schema(discovery_info=None): + """Generate base schema.""" + base_schema = {} + if discovery_info and CONF_HOST in discovery_info: + base_schema.update( + { + vol.Required( + CONF_HOST, + description={"suggested_value": discovery_info[CONF_HOST]}, + ): str, + } + ) + else: + base_schema.update({vol.Required(CONF_HOST): str}) + + if discovery_info and CONF_PORT in discovery_info: + base_schema.update( + { + vol.Required( + CONF_PORT, + default=DEFAULT_PORT, + description={"suggested_value": discovery_info[CONF_PORT]}, + ): int, + } + ) + else: + base_schema.update({vol.Required(CONF_PORT, default=DEFAULT_PORT): int}) + base_schema.update( + {vol.Optional(CONF_USERNAME): str, vol.Optional(CONF_PASSWORD): str} + ) + return vol.Schema(base_schema) + + +class SqueezeboxConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): + """Handle a config flow for Logitech Squeezebox.""" + + VERSION = 1 + CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL + + def __init__(self): + """Initialize an instance of the squeezebox config flow.""" + self.data_schema = _base_schema() + self.discovery_info = None + + async def _discover(self, uuid=None): + """Discover an unconfigured LMS server.""" + self.discovery_info = None + discovery_event = asyncio.Event() + + def _discovery_callback(server): + if server.uuid: + # ignore already configured uuids + for entry in self._async_current_entries(): + if entry.unique_id == server.uuid: + return + self.discovery_info = { + CONF_HOST: server.host, + CONF_PORT: server.port, + "uuid": server.uuid, + } + _LOGGER.debug("Discovered server: %s", self.discovery_info) + discovery_event.set() + + discovery_task = self.hass.async_create_task( + async_discover(_discovery_callback) + ) + + await discovery_event.wait() + discovery_task.cancel() # stop searching as soon as we find server + + # update with suggested values from discovery + self.data_schema = _base_schema(self.discovery_info) + + async def _validate_input(self, data): + """ + Validate the user input allows us to connect. + + Retrieve unique id and abort if already configured. + """ + server = Server( + async_get_clientsession(self.hass), + data[CONF_HOST], + data[CONF_PORT], + data.get(CONF_USERNAME), + data.get(CONF_PASSWORD), + ) + + try: + status = await server.async_query("serverstatus") + if not status: + if server.http_status == HTTP_UNAUTHORIZED: + return "invalid_auth" + return "cannot_connect" + except Exception: # pylint: disable=broad-except + return "unknown" + + if "uuid" in status: + await self.async_set_unique_id(status["uuid"]) + self._abort_if_unique_id_configured() + + async def async_step_user(self, user_input=None): + """Handle a flow initialized by the user.""" + errors = {} + if user_input and CONF_HOST in user_input: + # update with host provided by user + self.data_schema = _base_schema(user_input) + return await self.async_step_edit() + + # no host specified, see if we can discover an unconfigured LMS server + try: + await asyncio.wait_for(self._discover(), timeout=TIMEOUT) + return await self.async_step_edit() + except asyncio.TimeoutError: + errors["base"] = "no_server_found" + + # display the form + return self.async_show_form( + step_id="user", + data_schema=vol.Schema({vol.Optional(CONF_HOST): str}), + errors=errors, + ) + + async def async_step_edit(self, user_input=None): + """Edit a discovered or manually inputted server.""" + errors = {} + if user_input: + error = await self._validate_input(user_input) + if not error: + return self.async_create_entry( + title=user_input[CONF_HOST], data=user_input + ) + errors["base"] = error + + return self.async_show_form( + step_id="edit", data_schema=self.data_schema, errors=errors + ) + + async def async_step_import(self, config): + """Import a config flow from configuration.""" + error = await self._validate_input(config) + if error: + return self.async_abort(reason=error) + return self.async_create_entry(title=config[CONF_HOST], data=config) + + async def async_step_discovery(self, discovery_info): + """Handle discovery.""" + _LOGGER.debug("Reached discovery flow with info: %s", discovery_info) + if "uuid" in discovery_info: + await self.async_set_unique_id(discovery_info.pop("uuid")) + self._abort_if_unique_id_configured() + else: + # attempt to connect to server and determine uuid. will fail if password required + error = await self._validate_input(discovery_info) + if error: + await self._async_handle_discovery_without_unique_id() + + # update schema with suggested values from discovery + self.data_schema = _base_schema(discovery_info) + + # pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167 + self.context.update({"title_placeholders": {"host": discovery_info[CONF_HOST]}}) + + return await self.async_step_edit() diff --git a/homeassistant/components/squeezebox/const.py b/homeassistant/components/squeezebox/const.py index e7e52fe2d80..4664bbeaf43 100644 --- a/homeassistant/components/squeezebox/const.py +++ b/homeassistant/components/squeezebox/const.py @@ -1,10 +1,7 @@ """Constants for the Squeezebox component.""" -from homeassistant.const import STATE_IDLE, STATE_PAUSED, STATE_PLAYING - DOMAIN = "squeezebox" -SERVICE_CALL_METHOD = "call_method" -SQUEEZEBOX_MODE = { - "pause": STATE_PAUSED, - "play": STATE_PLAYING, - "stop": STATE_IDLE, -} +ENTRY_PLAYERS = "entry_players" +KNOWN_PLAYERS = "known_players" +PLAYER_DISCOVERY_UNSUB = "player_discovery_unsub" +DISCOVERY_TASK = "discovery_task" +DEFAULT_PORT = 9000 diff --git a/homeassistant/components/squeezebox/manifest.json b/homeassistant/components/squeezebox/manifest.json index 98456de67b5..b682887779b 100644 --- a/homeassistant/components/squeezebox/manifest.json +++ b/homeassistant/components/squeezebox/manifest.json @@ -2,6 +2,11 @@ "domain": "squeezebox", "name": "Logitech Squeezebox", "documentation": "https://www.home-assistant.io/integrations/squeezebox", - "codeowners": ["@rajlaud"], - "requirements": ["pysqueezebox==0.2.1"] + "codeowners": [ + "@rajlaud" + ], + "requirements": [ + "pysqueezebox==0.2.4" + ], + "config_flow": true } diff --git a/homeassistant/components/squeezebox/media_player.py b/homeassistant/components/squeezebox/media_player.py index 7194959d990..74dfc42f210 100644 --- a/homeassistant/components/squeezebox/media_player.py +++ b/homeassistant/components/squeezebox/media_player.py @@ -1,10 +1,11 @@ """Support for interfacing to the Logitech SqueezeBox API.""" +import asyncio import logging -import socket -from pysqueezebox import Server +from pysqueezebox import Server, async_discover import voluptuous as vol +from homeassistant import config_entries from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity from homeassistant.components.media_player.const import ( ATTR_MEDIA_ENQUEUE, @@ -22,20 +23,35 @@ from homeassistant.components.media_player.const import ( SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET, ) +from homeassistant.config_entries import SOURCE_DISCOVERY from homeassistant.const import ( ATTR_COMMAND, CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME, + EVENT_HOMEASSISTANT_START, + STATE_IDLE, STATE_OFF, + STATE_PAUSED, + STATE_PLAYING, ) -from homeassistant.exceptions import PlatformNotReady +from homeassistant.core import callback from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.dispatcher import ( + async_dispatcher_connect, + async_dispatcher_send, +) from homeassistant.util.dt import utcnow -from .const import SQUEEZEBOX_MODE +from .const import ( + DEFAULT_PORT, + DISCOVERY_TASK, + DOMAIN, + KNOWN_PLAYERS, + PLAYER_DISCOVERY_UNSUB, +) SERVICE_CALL_METHOD = "call_method" SERVICE_CALL_QUERY = "call_query" @@ -45,10 +61,11 @@ SERVICE_UNSYNC = "unsync" ATTR_QUERY_RESULT = "query_result" ATTR_SYNC_GROUP = "sync_group" +SIGNAL_PLAYER_REDISCOVERED = "squeezebox_player_rediscovered" + _LOGGER = logging.getLogger(__name__) -DEFAULT_PORT = 9000 -TIMEOUT = 10 +DISCOVERY_INTERVAL = 60 SUPPORT_SQUEEZEBOX = ( SUPPORT_PAUSE @@ -65,21 +82,23 @@ SUPPORT_SQUEEZEBOX = ( | SUPPORT_CLEAR_PLAYLIST ) -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_HOST): cv.string, - vol.Optional(CONF_PASSWORD): cv.string, - vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, - vol.Optional(CONF_USERNAME): cv.string, - } +PLATFORM_SCHEMA = vol.All( + cv.deprecated(CONF_HOST), + cv.deprecated(CONF_PORT), + cv.deprecated(CONF_PASSWORD), + cv.deprecated(CONF_USERNAME), + PLATFORM_SCHEMA.extend( + { + vol.Required(CONF_HOST): cv.string, + vol.Optional(CONF_PASSWORD): cv.string, + vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, + vol.Optional(CONF_USERNAME): cv.string, + } + ), ) -DATA_SQUEEZEBOX = "squeezebox" - -KNOWN_SERVERS = "squeezebox_known_servers" - +KNOWN_SERVERS = "known_servers" ATTR_PARAMETERS = "parameters" - ATTR_OTHER_PLAYER = "other_player" ATTR_TO_PROPERTY = [ @@ -87,57 +106,103 @@ ATTR_TO_PROPERTY = [ ATTR_SYNC_GROUP, ] +SQUEEZEBOX_MODE = { + "pause": STATE_PAUSED, + "play": STATE_PLAYING, + "stop": STATE_IDLE, +} + + +async def start_server_discovery(hass): + """Start a server discovery task.""" + + def _discovered_server(server): + asyncio.create_task( + hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_DISCOVERY}, + data={ + CONF_HOST: server.host, + CONF_PORT: int(server.port), + "uuid": server.uuid, + }, + ) + ) + + hass.data.setdefault(DOMAIN, {}) + if DISCOVERY_TASK not in hass.data[DOMAIN]: + _LOGGER.debug("Adding server discovery task for squeezebox") + hass.data[DOMAIN][DISCOVERY_TASK] = hass.async_create_task( + async_discover(_discovered_server) + ) + async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): - """Set up the squeezebox platform.""" + """Set up squeezebox platform from platform entry in configuration.yaml (deprecated).""" - known_servers = hass.data.get(KNOWN_SERVERS) - if known_servers is None: - hass.data[KNOWN_SERVERS] = known_servers = set() + if config: + await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=config + ) - if DATA_SQUEEZEBOX not in hass.data: - hass.data[DATA_SQUEEZEBOX] = [] + +async def async_setup_entry(hass, config_entry, async_add_entities): + """Set up an LMS Server from a config entry.""" + config = config_entry.data + _LOGGER.debug("Reached async_setup_entry for host=%s", config[CONF_HOST]) username = config.get(CONF_USERNAME) password = config.get(CONF_PASSWORD) + host = config[CONF_HOST] + port = config[CONF_PORT] - if discovery_info is not None: - host = discovery_info.get("host") - port = discovery_info.get("port") - else: - host = config.get(CONF_HOST) - port = config.get(CONF_PORT) + hass.data.setdefault(DOMAIN, {}) + hass.data[DOMAIN].setdefault(config_entry.entry_id, {}) - # In case the port is not discovered - if port is None: - port = DEFAULT_PORT + known_players = hass.data[DOMAIN].setdefault(KNOWN_PLAYERS, []) - # Get IP of host, to prevent duplication of same host (different DNS names) - try: - ipaddr = await hass.async_add_executor_job(socket.gethostbyname, host) - except OSError as error: - _LOGGER.error("Could not communicate with %s:%d: %s", host, port, error) - raise PlatformNotReady from error - - if ipaddr in known_servers: - return - - _LOGGER.debug("Creating LMS object for %s", ipaddr) + _LOGGER.debug("Creating LMS object for %s", host) lms = Server(async_get_clientsession(hass), host, port, username, password) - known_servers.add(ipaddr) - players = await lms.async_get_players() - if players is None: - raise PlatformNotReady - media_players = [] - for player in players: - media_players.append(SqueezeBoxDevice(player)) + async def _discovery(now=None): + """Discover squeezebox players by polling server.""" - hass.data[DATA_SQUEEZEBOX].extend(media_players) - async_add_entities(media_players) + async def _discovered_player(player): + """Handle a (re)discovered player.""" + entity = next( + ( + known + for known in known_players + if known.unique_id == player.player_id + ), + None, + ) + if entity: + await player.async_update() + async_dispatcher_send( + hass, SIGNAL_PLAYER_REDISCOVERED, player.player_id, player.connected + ) + if not entity: + _LOGGER.debug("Adding new entity: %s", player) + entity = SqueezeBoxEntity(player) + known_players.append(entity) + async_add_entities([entity]) + + players = await lms.async_get_players() + if players: + for player in players: + hass.async_create_task(_discovered_player(player)) + + hass.data[DOMAIN][config_entry.entry_id][ + PLAYER_DISCOVERY_UNSUB + ] = hass.helpers.event.async_call_later(DISCOVERY_INTERVAL, _discovery) + + _LOGGER.debug("Adding player discovery job for LMS server: %s", host) + asyncio.create_task(_discovery()) + + # Register entity services platform = entity_platform.current_platform.get() - platform.async_register_entity_service( SERVICE_CALL_METHOD, { @@ -148,7 +213,6 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info= }, "async_call_method", ) - platform.async_register_entity_service( SERVICE_CALL_QUERY, { @@ -159,17 +223,23 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info= }, "async_call_query", ) - platform.async_register_entity_service( SERVICE_SYNC, {vol.Required(ATTR_OTHER_PLAYER): cv.string}, "async_sync", ) - platform.async_register_entity_service(SERVICE_UNSYNC, None, "async_unsync") + # Start server discovery task if not already running + if hass.is_running: + asyncio.create_task(start_server_discovery(hass)) + else: + hass.bus.async_listen_once( + EVENT_HOMEASSISTANT_START, start_server_discovery(hass) + ) + return True -class SqueezeBoxDevice(MediaPlayerEntity): +class SqueezeBoxEntity(MediaPlayerEntity): """ Representation of a SqueezeBox device. @@ -181,6 +251,8 @@ class SqueezeBoxDevice(MediaPlayerEntity): self._player = player self._last_update = None self._query_result = {} + self._available = True + self._remove_dispatcher = None @property def device_state_attributes(self): @@ -203,10 +275,23 @@ class SqueezeBoxDevice(MediaPlayerEntity): """Return a unique ID.""" return self._player.player_id + @property + def available(self): + """Return True if device connected to LMS server.""" + return self._available + + @callback + def rediscovered(self, unique_id, connected): + """Make a player available again.""" + if unique_id == self.unique_id and connected: + self._available = True + _LOGGER.info("Player %s is available again", self.name) + self._remove_dispatcher() + @property def state(self): """Return the state of the device.""" - if self._player.power is not None and not self._player.power: + if not self._player.power: return STATE_OFF if self._player.mode: return SQUEEZEBOX_MODE.get(self._player.mode) @@ -214,13 +299,24 @@ class SqueezeBoxDevice(MediaPlayerEntity): async def async_update(self): """Update the Player() object.""" - last_media_position = self.media_position - await self._player.async_update() - if self.media_position != last_media_position: - _LOGGER.debug( - "Media position updated for %s: %s", self, self.media_position - ) - self._last_update = utcnow() + # only update available players, newly available players will be rediscovered and marked available + if self._available: + last_media_position = self.media_position + await self._player.async_update() + if self.media_position != last_media_position: + self._last_update = utcnow() + if self._player.connected is False: + _LOGGER.info("Player %s is not available", self.name) + self._available = False + + # start listening for restored players + self._remove_dispatcher = async_dispatcher_connect( + self.hass, SIGNAL_PLAYER_REDISCOVERED, self.rediscovered + ) + + async def async_will_remove_from_hass(self): + """Remove from list of known players when removed from hass.""" + self.hass.data[DOMAIN][KNOWN_PLAYERS].remove(self) @property def volume_level(self): @@ -291,7 +387,9 @@ class SqueezeBoxDevice(MediaPlayerEntity): @property def sync_group(self): """List players we are synced with.""" - player_ids = {p.unique_id: p.entity_id for p in self.hass.data[DATA_SQUEEZEBOX]} + player_ids = { + p.unique_id: p.entity_id for p in self.hass.data[DOMAIN][KNOWN_PLAYERS] + } sync_group = [] for player in self._player.sync_group: if player in player_ids: @@ -407,7 +505,9 @@ class SqueezeBoxDevice(MediaPlayerEntity): If the other player is a member of a sync group, it will leave the current sync group without asking. """ - player_ids = {p.entity_id: p.unique_id for p in self.hass.data[DATA_SQUEEZEBOX]} + player_ids = { + p.entity_id: p.unique_id for p in self.hass.data[DOMAIN][KNOWN_PLAYERS] + } other_player_id = player_ids.get(other_player) if other_player_id: await self._player.async_sync(other_player_id) diff --git a/homeassistant/components/squeezebox/strings.json b/homeassistant/components/squeezebox/strings.json new file mode 100644 index 00000000000..d905335a6ae --- /dev/null +++ b/homeassistant/components/squeezebox/strings.json @@ -0,0 +1,33 @@ +{ + "title": "Logitech Squeezebox", + "config": { + "flow_title": "Logitech Squeezebox: {host}", + "step": { + "user": { + "title": "Configure Logitech Media Server", + "data": { + "host": "[%key:common::config_flow::data::host%]" + } + }, + "edit": { + "title": "Edit connection information", + "data": { + "host": "[%key:common::config_flow::data::host%]", + "port": "[%key:common::config_flow::data::port%]", + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]", + "no_server_found": "Could not automatically discover server." + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "no_server_found": "No LMS server found." + } + } +} diff --git a/homeassistant/components/squeezebox/translations/ca.json b/homeassistant/components/squeezebox/translations/ca.json new file mode 100644 index 00000000000..a5ee040b705 --- /dev/null +++ b/homeassistant/components/squeezebox/translations/ca.json @@ -0,0 +1,29 @@ +{ + "config": { + "abort": { + "already_configured": "El dispositiu ja est\u00e0 configurat" + }, + "error": { + "cannot_connect": "No s'ha pogut connectar", + "invalid_auth": "Autenticaci\u00f3 inv\u00e0lida", + "unknown": "Error inesperat" + }, + "flow_title": "Logitech Squeezebox: {host}", + "step": { + "edit": { + "data": { + "host": "Amfitri\u00f3", + "password": "Contrasenya", + "port": "Port", + "username": "Nom d'usuari" + } + }, + "user": { + "data": { + "host": "Amfitri\u00f3" + } + } + } + }, + "title": "Logitech Squeezebox" +} \ No newline at end of file diff --git a/homeassistant/components/squeezebox/translations/en.json b/homeassistant/components/squeezebox/translations/en.json new file mode 100644 index 00000000000..9cbfbcc3664 --- /dev/null +++ b/homeassistant/components/squeezebox/translations/en.json @@ -0,0 +1,33 @@ +{ + "config": { + "abort": { + "already_configured": "Device is already configured", + "no_server_found": "No LMS server found." + }, + "error": { + "cannot_connect": "Failed to connect", + "invalid_auth": "Invalid authentication", + "no_server_found": "Could not automatically discover server.", + "unknown": "Unexpected error" + }, + "flow_title": "Logitech Squeezebox: {host}", + "step": { + "edit": { + "data": { + "host": "Host", + "password": "Password", + "port": "Port", + "username": "Username" + }, + "title": "Edit connection information" + }, + "user": { + "data": { + "host": "Host" + }, + "title": "Configure Logitech Media Server" + } + } + }, + "title": "Logitech Squeezebox" +} \ No newline at end of file diff --git a/homeassistant/components/squeezebox/translations/es.json b/homeassistant/components/squeezebox/translations/es.json new file mode 100644 index 00000000000..33b5f846fc8 --- /dev/null +++ b/homeassistant/components/squeezebox/translations/es.json @@ -0,0 +1,33 @@ +{ + "config": { + "abort": { + "already_configured": "El dispositivo ya est\u00e1 configurado", + "no_server_found": "No se ha encontrado servidor LMS." + }, + "error": { + "cannot_connect": "No se pudo conectar", + "invalid_auth": "Autenticaci\u00f3n no v\u00e1lida", + "no_server_found": "No se pudo descubrir autom\u00e1ticamente el servidor.", + "unknown": "Error inesperado" + }, + "flow_title": "Logitech Squeezebox: {host}", + "step": { + "edit": { + "data": { + "host": "Host", + "password": "Contrase\u00f1a", + "port": "Puerto", + "username": "Usuario" + }, + "title": "Editar la informaci\u00f3n de conexi\u00f3n" + }, + "user": { + "data": { + "host": "Host" + }, + "title": "Configurar Logitech Media Server" + } + } + }, + "title": "Logitech Squeezebox" +} \ No newline at end of file diff --git a/homeassistant/components/squeezebox/translations/no.json b/homeassistant/components/squeezebox/translations/no.json new file mode 100644 index 00000000000..aa71e9a0c52 --- /dev/null +++ b/homeassistant/components/squeezebox/translations/no.json @@ -0,0 +1,13 @@ +{ + "config": { + "abort": { + "no_server_found": "Ingen LMS-server funnet." + }, + "step": { + "user": { + "title": "Konfigurer Logitech Media Server" + } + } + }, + "title": "Logitech Squeezebox" +} \ No newline at end of file diff --git a/homeassistant/components/squeezebox/translations/ru.json b/homeassistant/components/squeezebox/translations/ru.json new file mode 100644 index 00000000000..f12f6bb5e83 --- /dev/null +++ b/homeassistant/components/squeezebox/translations/ru.json @@ -0,0 +1,33 @@ +{ + "config": { + "abort": { + "already_configured": "\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0430 \u044d\u0442\u043e\u0433\u043e \u0443\u0441\u0442\u0440\u043e\u0439\u0441\u0442\u0432\u0430 \u0443\u0436\u0435 \u0432\u044b\u043f\u043e\u043b\u043d\u0435\u043d\u0430.", + "no_server_found": "\u0421\u0435\u0440\u0432\u0435\u0440 LMS \u043d\u0435 \u043d\u0430\u0439\u0434\u0435\u043d." + }, + "error": { + "cannot_connect": "\u041e\u0448\u0438\u0431\u043a\u0430 \u043f\u043e\u0434\u043a\u043b\u044e\u0447\u0435\u043d\u0438\u044f.", + "invalid_auth": "\u041d\u0435\u0432\u0435\u0440\u043d\u0430\u044f \u0430\u0443\u0442\u0435\u043d\u0442\u0438\u0444\u0438\u043a\u0430\u0446\u0438\u044f.", + "no_server_found": "\u041d\u0435 \u0443\u0434\u0430\u043b\u043e\u0441\u044c \u0430\u0432\u0442\u043e\u043c\u0430\u0442\u0438\u0447\u0435\u0441\u043a\u0438 \u043e\u0431\u043d\u0430\u0440\u0443\u0436\u0438\u0442\u044c \u0441\u0435\u0440\u0432\u0435\u0440.", + "unknown": "\u041d\u0435\u043f\u0440\u0435\u0434\u0432\u0438\u0434\u0435\u043d\u043d\u0430\u044f \u043e\u0448\u0438\u0431\u043a\u0430." + }, + "flow_title": "Logitech Squeezebox: {host}", + "step": { + "edit": { + "data": { + "host": "\u0425\u043e\u0441\u0442", + "password": "\u041f\u0430\u0440\u043e\u043b\u044c", + "port": "\u041f\u043e\u0440\u0442", + "username": "\u041b\u043e\u0433\u0438\u043d" + }, + "title": "\u0418\u043d\u0444\u043e\u0440\u043c\u0430\u0446\u0438\u044f \u043e \u043f\u043e\u0434\u043a\u043b\u044e\u0447\u0435\u043d\u0438\u0438" + }, + "user": { + "data": { + "host": "\u0425\u043e\u0441\u0442" + }, + "title": "Logitech Media Server" + } + } + }, + "title": "Logitech Squeezebox" +} \ No newline at end of file diff --git a/homeassistant/components/squeezebox/translations/zh-Hant.json b/homeassistant/components/squeezebox/translations/zh-Hant.json new file mode 100644 index 00000000000..54b54195bf4 --- /dev/null +++ b/homeassistant/components/squeezebox/translations/zh-Hant.json @@ -0,0 +1,33 @@ +{ + "config": { + "abort": { + "already_configured": "\u8a2d\u5099\u5df2\u7d93\u8a2d\u5b9a\u5b8c\u6210", + "no_server_found": "\u627e\u4e0d\u5230 LMS \u4f3a\u670d\u5668\u3002" + }, + "error": { + "cannot_connect": "\u9023\u7dda\u5931\u6557", + "invalid_auth": "\u9a57\u8b49\u78bc\u7121\u6548", + "no_server_found": "\u7121\u6cd5\u81ea\u52d5\u63a2\u7d22\u4f3a\u670d\u5668\u3002", + "unknown": "\u672a\u9810\u671f\u932f\u8aa4" + }, + "flow_title": "\u7f85\u6280 Squeezebox\uff1a{host}", + "step": { + "edit": { + "data": { + "host": "\u4e3b\u6a5f\u7aef", + "password": "\u5bc6\u78bc", + "port": "\u901a\u8a0a\u57e0", + "username": "\u4f7f\u7528\u8005\u540d\u7a31" + }, + "title": "\u7de8\u8f2f\u9023\u7dda\u8cc7\u8a0a" + }, + "user": { + "data": { + "host": "\u4e3b\u6a5f\u7aef" + }, + "title": "\u8a2d\u5b9a\u7f85\u6280 Media Server" + } + } + }, + "title": "\u7f85\u6280 Squeezebox" +} \ No newline at end of file diff --git a/homeassistant/components/ssdp/manifest.json b/homeassistant/components/ssdp/manifest.json index 54fac55198e..d73bae27bb2 100644 --- a/homeassistant/components/ssdp/manifest.json +++ b/homeassistant/components/ssdp/manifest.json @@ -2,7 +2,7 @@ "domain": "ssdp", "name": "Simple Service Discovery Protocol (SSDP)", "documentation": "https://www.home-assistant.io/integrations/ssdp", - "requirements": ["defusedxml==0.6.0", "netdisco==2.7.0"], + "requirements": ["defusedxml==0.6.0", "netdisco==2.7.1"], "after_dependencies": ["zeroconf"], "codeowners": [] } diff --git a/homeassistant/components/statistics/sensor.py b/homeassistant/components/statistics/sensor.py index 226d278633b..00e029e49c6 100644 --- a/homeassistant/components/statistics/sensor.py +++ b/homeassistant/components/statistics/sensor.py @@ -332,7 +332,7 @@ class StatisticsSensor(Entity): query = query.order_by(States.last_updated.desc()).limit( self._sampling_size ) - states = execute(query) + states = execute(query, to_native=True, validate_entity_ids=False) for state in reversed(states): self._add_state_to_queue(state) diff --git a/homeassistant/components/switch/light.py b/homeassistant/components/switch/light.py index f40ccde5b0b..c23390a3e3e 100644 --- a/homeassistant/components/switch/light.py +++ b/homeassistant/components/switch/light.py @@ -84,14 +84,22 @@ class LightSwitch(LightEntity): """Forward the turn_on command to the switch in this light switch.""" data = {ATTR_ENTITY_ID: self._switch_entity_id} await self.hass.services.async_call( - switch.DOMAIN, switch.SERVICE_TURN_ON, data, blocking=True + switch.DOMAIN, + switch.SERVICE_TURN_ON, + data, + blocking=True, + context=self._context, ) async def async_turn_off(self, **kwargs): """Forward the turn_off command to the switch in this light switch.""" data = {ATTR_ENTITY_ID: self._switch_entity_id} await self.hass.services.async_call( - switch.DOMAIN, switch.SERVICE_TURN_OFF, data, blocking=True + switch.DOMAIN, + switch.SERVICE_TURN_OFF, + data, + blocking=True, + context=self._context, ) async def async_update(self): diff --git a/homeassistant/components/switcher_kis/__init__.py b/homeassistant/components/switcher_kis/__init__.py index 8369fdd8975..4a9a564ec3b 100644 --- a/homeassistant/components/switcher_kis/__init__.py +++ b/homeassistant/components/switcher_kis/__init__.py @@ -85,18 +85,12 @@ async def _validate_edit_permission( """Use for validating user control permissions.""" splited = split_entity_id(entity_id) if splited[0] != SWITCH_DOMAIN or not splited[1].startswith(DOMAIN): - raise Unauthorized( - context=context, entity_id=entity_id, permission=(POLICY_EDIT,) - ) + raise Unauthorized(context=context, entity_id=entity_id, permission=POLICY_EDIT) user = await hass.auth.async_get_user(context.user_id) if user is None: - raise UnknownUser( - context=context, entity_id=entity_id, permission=(POLICY_EDIT,) - ) + raise UnknownUser(context=context, entity_id=entity_id, permission=POLICY_EDIT) if not user.permissions.check_entity(entity_id, POLICY_EDIT): - raise Unauthorized( - context=context, entity_id=entity_id, permission=(POLICY_EDIT,) - ) + raise Unauthorized(context=context, entity_id=entity_id, permission=POLICY_EDIT) async def async_setup(hass: HomeAssistantType, config: Dict) -> bool: diff --git a/homeassistant/components/synology_dsm/binary_sensor.py b/homeassistant/components/synology_dsm/binary_sensor.py index 3dfc21b8a7b..a75f57db678 100644 --- a/homeassistant/components/synology_dsm/binary_sensor.py +++ b/homeassistant/components/synology_dsm/binary_sensor.py @@ -1,4 +1,6 @@ """Support for Synology DSM binary sensors.""" +from typing import Dict + from homeassistant.components.binary_sensor import BinarySensorEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_DISKS @@ -53,6 +55,11 @@ class SynoDSMSecurityBinarySensor(SynologyDSMEntity, BinarySensorEntity): """Return True if entity is available.""" return bool(self._api.security) + @property + def device_state_attributes(self) -> Dict[str, str]: + """Return security checks details.""" + return self._api.security.status_by_check + class SynoDSMStorageBinarySensor(SynologyDSMDeviceEntity, BinarySensorEntity): """Representation a Synology Storage binary sensor.""" diff --git a/homeassistant/components/synology_dsm/manifest.json b/homeassistant/components/synology_dsm/manifest.json index fcf91bb25b3..6ad926cfb9e 100644 --- a/homeassistant/components/synology_dsm/manifest.json +++ b/homeassistant/components/synology_dsm/manifest.json @@ -2,7 +2,7 @@ "domain": "synology_dsm", "name": "Synology DSM", "documentation": "https://www.home-assistant.io/integrations/synology_dsm", - "requirements": ["python-synology==0.8.1"], + "requirements": ["python-synology==0.8.2"], "codeowners": ["@ProtoThis", "@Quentame"], "config_flow": true, "ssdp": [ diff --git a/homeassistant/components/synology_dsm/translations/no.json b/homeassistant/components/synology_dsm/translations/no.json index 678484d5226..43c3c450f93 100644 --- a/homeassistant/components/synology_dsm/translations/no.json +++ b/homeassistant/components/synology_dsm/translations/no.json @@ -21,7 +21,7 @@ "link": { "data": { "password": "Passord", - "port": "Port (valgfritt)", + "port": "Port", "ssl": "Bruk SSL/TLS til \u00e5 koble til NAS-en", "username": "Brukernavn" }, @@ -32,7 +32,7 @@ "data": { "host": "Vert", "password": "Passord", - "port": "Port (valgfritt)", + "port": "Port", "ssl": "Bruk SSL/TLS til \u00e5 koble til NAS-en", "username": "Brukernavn" }, diff --git a/homeassistant/components/tahoma/__init__.py b/homeassistant/components/tahoma/__init__.py index 1a6b326f4e5..d75ccaec414 100644 --- a/homeassistant/components/tahoma/__init__.py +++ b/homeassistant/components/tahoma/__init__.py @@ -68,6 +68,7 @@ TAHOMA_TYPES = { "rts:VenetianBlindRTSComponent": "cover", "somfythermostat:SomfyThermostatTemperatureSensor": "sensor", "somfythermostat:SomfyThermostatHumiditySensor": "sensor", + "zwave:OnOffLightZWaveComponent": "switch", } diff --git a/homeassistant/components/tahoma/switch.py b/homeassistant/components/tahoma/switch.py index 13aa70c66d3..808f80d8cfa 100644 --- a/homeassistant/components/tahoma/switch.py +++ b/homeassistant/components/tahoma/switch.py @@ -47,10 +47,18 @@ class TahomaSwitch(TahomaDevice, SwitchEntity): else: self._state = STATE_OFF + if self.tahoma_device.type == "zwave:OnOffLightZWaveComponent": + if self.tahoma_device.active_states.get("core:OnOffState") == "on": + self._state = STATE_ON + else: + self._state = STATE_OFF + # A RTS power socket doesn't have a feedback channel, # so we must assume the socket is available. if self.tahoma_device.type == "rts:OnOffRTSComponent": self._available = True + elif self.tahoma_device.type == "zwave:OnOffLightZWaveComponent": + self._available = True else: self._available = bool( self.tahoma_device.active_states.get("core:StatusState") == "available" diff --git a/homeassistant/components/tellduslive/config_flow.py b/homeassistant/components/tellduslive/config_flow.py index 893f3b80456..36bc89d115a 100644 --- a/homeassistant/components/tellduslive/config_flow.py +++ b/homeassistant/components/tellduslive/config_flow.py @@ -114,13 +114,14 @@ class FlowHandler(config_entries.ConfigFlow): }, ) - async def async_step_discovery(self, user_input): + async def async_step_discovery(self, discovery_info): """Run when a Tellstick is discovered.""" + await self._async_handle_discovery_without_unique_id() - _LOGGER.info("Discovered tellstick device: %s", user_input) - if supports_local_api(user_input[1]): - _LOGGER.info("%s support local API", user_input[1]) - self._hosts.append(user_input[0]) + _LOGGER.info("Discovered tellstick device: %s", discovery_info) + if supports_local_api(discovery_info[1]): + _LOGGER.info("%s support local API", discovery_info[1]) + self._hosts.append(discovery_info[0]) return await self.async_step_user() diff --git a/homeassistant/components/tellduslive/strings.json b/homeassistant/components/tellduslive/strings.json index c29916be936..aabf00bc1b2 100644 --- a/homeassistant/components/tellduslive/strings.json +++ b/homeassistant/components/tellduslive/strings.json @@ -1,7 +1,7 @@ { "config": { "abort": { - "already_setup": "TelldusLive is already configured", + "already_configured": "TelldusLive is already configured", "authorize_url_fail": "Unknown error generating an authorize url.", "authorize_url_timeout": "Timeout generating authorize url.", "unknown": "Unknown error occurred" @@ -22,4 +22,4 @@ } } } -} \ No newline at end of file +} diff --git a/homeassistant/components/tellduslive/translations/ca.json b/homeassistant/components/tellduslive/translations/ca.json index 88e4dfbbdae..ae2bd468964 100644 --- a/homeassistant/components/tellduslive/translations/ca.json +++ b/homeassistant/components/tellduslive/translations/ca.json @@ -1,6 +1,7 @@ { "config": { "abort": { + "already_configured": "TelldusLive ja est\u00e0 configurat", "already_setup": "TelldusLive ja est\u00e0 configurat", "authorize_url_fail": "S'ha produ\u00eft un error desconegut al generar l'URL d'autoritzaci\u00f3.", "authorize_url_timeout": "S'ha acabat el temps d'espera durant la generaci\u00f3 de l'URL d'autoritzaci\u00f3.", diff --git a/homeassistant/components/tellduslive/translations/en.json b/homeassistant/components/tellduslive/translations/en.json index fb7a76de106..04bd2a192c1 100644 --- a/homeassistant/components/tellduslive/translations/en.json +++ b/homeassistant/components/tellduslive/translations/en.json @@ -1,6 +1,7 @@ { "config": { "abort": { + "already_configured": "TelldusLive is already configured", "already_setup": "TelldusLive is already configured", "authorize_url_fail": "Unknown error generating an authorize url.", "authorize_url_timeout": "Timeout generating authorize url.", diff --git a/homeassistant/components/tellduslive/translations/es.json b/homeassistant/components/tellduslive/translations/es.json index 37b1c15f896..378274f63af 100644 --- a/homeassistant/components/tellduslive/translations/es.json +++ b/homeassistant/components/tellduslive/translations/es.json @@ -1,6 +1,7 @@ { "config": { "abort": { + "already_configured": "TelldusLive ya est\u00e1 configurado", "already_setup": "TelldusLive ya est\u00e1 configurado", "authorize_url_fail": "Error desconocido generando la url de autorizaci\u00f3n", "authorize_url_timeout": "Tiempo de espera agotado generando la url de autorizaci\u00f3n", diff --git a/homeassistant/components/tellduslive/translations/it.json b/homeassistant/components/tellduslive/translations/it.json index 177c22e5813..e8f74f5ce29 100644 --- a/homeassistant/components/tellduslive/translations/it.json +++ b/homeassistant/components/tellduslive/translations/it.json @@ -1,6 +1,7 @@ { "config": { "abort": { + "already_configured": "TelldusLive \u00e8 gi\u00e0 configurato", "already_setup": "TelldusLive \u00e8 gi\u00e0 configurato", "authorize_url_fail": "Errore sconosciuto nel generare l'url di autorizzazione", "authorize_url_timeout": "Tempo scaduto nel generare l'url di autorizzazione", diff --git a/homeassistant/components/tellduslive/translations/ko.json b/homeassistant/components/tellduslive/translations/ko.json index 3430b256dda..ffbded23f7f 100644 --- a/homeassistant/components/tellduslive/translations/ko.json +++ b/homeassistant/components/tellduslive/translations/ko.json @@ -1,6 +1,7 @@ { "config": { "abort": { + "already_configured": "TelldusLive \uac00 \uc774\ubbf8 \uad6c\uc131\ub418\uc5c8\uc2b5\ub2c8\ub2e4", "already_setup": "TelldusLive \uac00 \uc774\ubbf8 \uad6c\uc131\ub418\uc5c8\uc2b5\ub2c8\ub2e4", "authorize_url_fail": "\uc778\uc99d url \uc0dd\uc131\uc5d0 \uc54c \uc218 \uc5c6\ub294 \uc624\ub958\uac00 \ubc1c\uc0dd\ud588\uc2b5\ub2c8\ub2e4.", "authorize_url_timeout": "\uc778\uc99d url \uc0dd\uc131 \uc2dc\uac04\uc774 \ucd08\uacfc\ub418\uc5c8\uc2b5\ub2c8\ub2e4.", diff --git a/homeassistant/components/tellduslive/translations/lb.json b/homeassistant/components/tellduslive/translations/lb.json index f9191fe7866..bf91c5d26ea 100644 --- a/homeassistant/components/tellduslive/translations/lb.json +++ b/homeassistant/components/tellduslive/translations/lb.json @@ -1,6 +1,7 @@ { "config": { "abort": { + "already_configured": "TelldusLive ass scho konfigur\u00e9iert", "already_setup": "TelldusLive ass scho konfigur\u00e9iert", "authorize_url_fail": "Onbekannte Feeler beim gener\u00e9ieren vun der Autorisatiouns URL.", "authorize_url_timeout": "Z\u00e4it Iwwerschreidung beim gener\u00e9ieren vun der Autorisatiouns URL.", diff --git a/homeassistant/components/tellduslive/translations/nn.json b/homeassistant/components/tellduslive/translations/nn.json deleted file mode 100644 index a98ce99c970..00000000000 --- a/homeassistant/components/tellduslive/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Telldus Live" -} \ No newline at end of file diff --git a/homeassistant/components/tellduslive/translations/no.json b/homeassistant/components/tellduslive/translations/no.json index 7ba0ed4c208..8c2f3d3489d 100644 --- a/homeassistant/components/tellduslive/translations/no.json +++ b/homeassistant/components/tellduslive/translations/no.json @@ -1,6 +1,7 @@ { "config": { "abort": { + "already_configured": "TelldusLive er allerede konfigurert", "already_setup": "TelldusLive er allerede konfigurert", "authorize_url_fail": "Ukjent feil ved oppretting av godkjenningsadresse.", "authorize_url_timeout": "Tidsavbrudd ved oppretting av godkjenningsadresse.", diff --git a/homeassistant/components/tellduslive/translations/pl.json b/homeassistant/components/tellduslive/translations/pl.json index 49118f70dd8..deee22d9ed8 100644 --- a/homeassistant/components/tellduslive/translations/pl.json +++ b/homeassistant/components/tellduslive/translations/pl.json @@ -1,6 +1,7 @@ { "config": { "abort": { + "already_configured": "TelldusLive jest ju\u017c skonfigurowany", "already_setup": "TelldusLive jest ju\u017c skonfigurowany.", "authorize_url_fail": "Nieznany b\u0142\u0105d podczas generowania url autoryzacji.", "authorize_url_timeout": "Przekroczono limit czasu generowania URL autoryzacji.", diff --git a/homeassistant/components/tellduslive/translations/ru.json b/homeassistant/components/tellduslive/translations/ru.json index 2fb298f781c..4aac3b2a5d5 100644 --- a/homeassistant/components/tellduslive/translations/ru.json +++ b/homeassistant/components/tellduslive/translations/ru.json @@ -1,6 +1,7 @@ { "config": { "abort": { + "already_configured": "\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0430 \u0443\u0436\u0435 \u0432\u044b\u043f\u043e\u043b\u043d\u0435\u043d\u0430.", "already_setup": "\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0430 \u0443\u0436\u0435 \u0432\u044b\u043f\u043e\u043b\u043d\u0435\u043d\u0430.", "authorize_url_fail": "\u041d\u0435\u0438\u0437\u0432\u0435\u0441\u0442\u043d\u0430\u044f \u043e\u0448\u0438\u0431\u043a\u0430 \u043f\u0440\u0438 \u0433\u0435\u043d\u0435\u0440\u0430\u0446\u0438\u0438 \u0441\u0441\u044b\u043b\u043a\u0438 \u0430\u0432\u0442\u043e\u0440\u0438\u0437\u0430\u0446\u0438\u0438.", "authorize_url_timeout": "\u0418\u0441\u0442\u0435\u043a\u043b\u043e \u0432\u0440\u0435\u043c\u044f \u0433\u0435\u043d\u0435\u0440\u0430\u0446\u0438\u0438 \u0441\u0441\u044b\u043b\u043a\u0438 \u0430\u0432\u0442\u043e\u0440\u0438\u0437\u0430\u0446\u0438\u0438.", diff --git a/homeassistant/components/tellduslive/translations/zh-Hant.json b/homeassistant/components/tellduslive/translations/zh-Hant.json index 0901d927a2b..0683c783677 100644 --- a/homeassistant/components/tellduslive/translations/zh-Hant.json +++ b/homeassistant/components/tellduslive/translations/zh-Hant.json @@ -1,6 +1,7 @@ { "config": { "abort": { + "already_configured": "TelldusLive \u5df2\u7d93\u8a2d\u5b9a\u5b8c\u6210", "already_setup": "TelldusLive \u5df2\u7d93\u8a2d\u5b9a\u5b8c\u6210", "authorize_url_fail": "\u7522\u751f\u8a8d\u8b49 URL \u6642\u767c\u751f\u672a\u77e5\u932f\u8aa4", "authorize_url_timeout": "\u7522\u751f\u8a8d\u8b49 URL \u6642\u903e\u6642", diff --git a/homeassistant/components/template/vacuum.py b/homeassistant/components/template/vacuum.py index c345663ca98..0a1a0e50ffc 100644 --- a/homeassistant/components/template/vacuum.py +++ b/homeassistant/components/template/vacuum.py @@ -53,6 +53,7 @@ CONF_VACUUMS = "vacuums" CONF_BATTERY_LEVEL_TEMPLATE = "battery_level_template" CONF_FAN_SPEED_LIST = "fan_speeds" CONF_FAN_SPEED_TEMPLATE = "fan_speed_template" +CONF_ATTRIBUTE_TEMPLATES = "attribute_templates" ENTITY_ID_FORMAT = DOMAIN + ".{}" _VALID_STATES = [ @@ -71,6 +72,9 @@ VACUUM_SCHEMA = vol.Schema( vol.Optional(CONF_BATTERY_LEVEL_TEMPLATE): cv.template, vol.Optional(CONF_FAN_SPEED_TEMPLATE): cv.template, vol.Optional(CONF_AVAILABILITY_TEMPLATE): cv.template, + vol.Optional(CONF_ATTRIBUTE_TEMPLATES, default={}): vol.Schema( + {cv.string: cv.template} + ), vol.Required(SERVICE_START): cv.SCRIPT_SCHEMA, vol.Optional(SERVICE_PAUSE): cv.SCRIPT_SCHEMA, vol.Optional(SERVICE_STOP): cv.SCRIPT_SCHEMA, @@ -99,6 +103,7 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info= battery_level_template = device_config.get(CONF_BATTERY_LEVEL_TEMPLATE) fan_speed_template = device_config.get(CONF_FAN_SPEED_TEMPLATE) availability_template = device_config.get(CONF_AVAILABILITY_TEMPLATE) + attribute_templates = device_config.get(CONF_ATTRIBUTE_TEMPLATES) start_action = device_config[SERVICE_START] pause_action = device_config.get(SERVICE_PAUSE) @@ -117,8 +122,10 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info= CONF_AVAILABILITY_TEMPLATE: availability_template, } - initialise_templates(hass, templates) - entity_ids = extract_entities(device, "vacuum", None, templates) + initialise_templates(hass, templates, attribute_templates) + entity_ids = extract_entities( + device, "vacuum", None, templates, attribute_templates + ) vacuums.append( TemplateVacuum( @@ -138,6 +145,7 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info= set_fan_speed_action, fan_speed_list, entity_ids, + attribute_templates, ) ) @@ -165,6 +173,7 @@ class TemplateVacuum(StateVacuumEntity): set_fan_speed_action, fan_speed_list, entity_ids, + attribute_templates, ): """Initialize the vacuum.""" self.hass = hass @@ -178,6 +187,8 @@ class TemplateVacuum(StateVacuumEntity): self._fan_speed_template = fan_speed_template self._availability_template = availability_template self._supported_features = SUPPORT_START + self._attribute_templates = attribute_templates + self._attributes = {} self._start_script = Script(hass, start_action) @@ -265,6 +276,11 @@ class TemplateVacuum(StateVacuumEntity): """Return if the device is available.""" return self._available + @property + def device_state_attributes(self): + """Return the state attributes.""" + return self._attributes + async def async_start(self): """Start or resume the cleaning task.""" await self._start_script.async_run(context=self._context) @@ -419,3 +435,13 @@ class TemplateVacuum(StateVacuumEntity): self._name, ex, ) + # Update attribute if attribute template is defined + if self._attribute_templates is not None: + attrs = {} + for key, value in self._attribute_templates.items(): + try: + attrs[key] = value.async_render() + except TemplateError as err: + _LOGGER.error("Error rendering attribute %s: %s", key, err) + + self._attributes = attrs diff --git a/homeassistant/components/tensorflow/manifest.json b/homeassistant/components/tensorflow/manifest.json index cbbd2d6345b..b74633d36d4 100644 --- a/homeassistant/components/tensorflow/manifest.json +++ b/homeassistant/components/tensorflow/manifest.json @@ -4,7 +4,7 @@ "documentation": "https://www.home-assistant.io/integrations/tensorflow", "requirements": [ "tensorflow==1.13.2", - "numpy==1.18.4", + "numpy==1.19.0", "protobuf==3.6.1", "pillow==7.1.2" ], diff --git a/homeassistant/components/tesla/manifest.json b/homeassistant/components/tesla/manifest.json index 39aa00cfb60..9a0d80f9a05 100644 --- a/homeassistant/components/tesla/manifest.json +++ b/homeassistant/components/tesla/manifest.json @@ -3,6 +3,6 @@ "name": "Tesla", "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/tesla", - "requirements": ["teslajsonpy==0.8.1"], + "requirements": ["teslajsonpy==0.9.0"], "codeowners": ["@zabuldon", "@alandtse"] } diff --git a/homeassistant/components/tibber/translations/fr.json b/homeassistant/components/tibber/translations/fr.json index a54522a8585..223c1d44780 100644 --- a/homeassistant/components/tibber/translations/fr.json +++ b/homeassistant/components/tibber/translations/fr.json @@ -12,6 +12,7 @@ "data": { "access_token": "Jeton d'acc\u00e8s" }, + "description": "Entrez votre jeton d'acc\u00e8s depuis https://developer.tibber.com/settings/accesstoken", "title": "Tibber" } } diff --git a/homeassistant/components/tile/__init__.py b/homeassistant/components/tile/__init__.py index f0192d0ed32..4f6411ed368 100644 --- a/homeassistant/components/tile/__init__.py +++ b/homeassistant/components/tile/__init__.py @@ -1 +1,146 @@ -"""The tile component.""" +"""The Tile component.""" +import asyncio +from datetime import timedelta + +from pytile import async_login +from pytile.errors import SessionExpiredError, TileError + +from homeassistant.const import ATTR_ATTRIBUTION, CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import callback +from homeassistant.helpers import aiohttp_client +from homeassistant.helpers.entity import Entity +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DATA_COORDINATOR, DOMAIN, LOGGER + +PLATFORMS = ["device_tracker"] +DEVICE_TYPES = ["PHONE", "TILE"] + +DEFAULT_ATTRIBUTION = "Data provided by Tile" +DEFAULT_ICON = "mdi:view-grid" +DEFAULT_UPDATE_INTERVAL = timedelta(minutes=2) + +CONF_SHOW_INACTIVE = "show_inactive" + + +async def async_setup(hass, config): + """Set up the Tile component.""" + hass.data[DOMAIN] = {DATA_COORDINATOR: {}} + + return True + + +async def async_setup_entry(hass, config_entry): + """Set up Tile as config entry.""" + websession = aiohttp_client.async_get_clientsession(hass) + + client = await async_login( + config_entry.data[CONF_USERNAME], + config_entry.data[CONF_PASSWORD], + session=websession, + ) + + async def async_update_data(): + """Get new data from the API.""" + try: + return await client.tiles.all() + except SessionExpiredError: + LOGGER.info("Tile session expired; creating a new one") + await client.async_init() + except TileError as err: + raise UpdateFailed(f"Error while retrieving data: {err}") + + coordinator = DataUpdateCoordinator( + hass, + LOGGER, + name=config_entry.title, + update_interval=DEFAULT_UPDATE_INTERVAL, + update_method=async_update_data, + ) + + await coordinator.async_refresh() + hass.data[DOMAIN][DATA_COORDINATOR][config_entry.entry_id] = coordinator + + for component in PLATFORMS: + hass.async_create_task( + hass.config_entries.async_forward_entry_setup(config_entry, component) + ) + + return True + + +async def async_unload_entry(hass, config_entry): + """Unload a Tile config entry.""" + unload_ok = all( + await asyncio.gather( + *[ + hass.config_entries.async_forward_entry_unload(config_entry, component) + for component in PLATFORMS + ] + ) + ) + if unload_ok: + hass.data[DOMAIN][DATA_COORDINATOR].pop(config_entry.entry_id) + + return unload_ok + + +class TileEntity(Entity): + """Define a generic Tile entity.""" + + def __init__(self, coordinator): + """Initialize.""" + self._attrs = {ATTR_ATTRIBUTION: DEFAULT_ATTRIBUTION} + self._name = None + self._unique_id = None + self.coordinator = coordinator + + @property + def device_state_attributes(self): + """Return the device state attributes.""" + return self._attrs + + @property + def icon(self): + """Return the icon.""" + return DEFAULT_ICON + + @property + def name(self): + """Return the name.""" + return self._name + + @property + def should_poll(self): + """Disable polling.""" + return False + + @property + def unique_id(self): + """Return the unique ID of the entity.""" + return self._unique_id + + @callback + def _update_from_latest_data(self): + """Update the entity from the latest data.""" + raise NotImplementedError + + async def async_added_to_hass(self): + """Register callbacks.""" + + @callback + def update(): + """Update the state.""" + self._update_from_latest_data() + self.async_write_ha_state() + + self.async_on_remove(self.coordinator.async_add_listener(update)) + + self._update_from_latest_data() + + async def async_update(self): + """Update the entity. + + Only used by the generic entity update service. + """ + await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/tile/config_flow.py b/homeassistant/components/tile/config_flow.py new file mode 100644 index 00000000000..15ac70eeb2c --- /dev/null +++ b/homeassistant/components/tile/config_flow.py @@ -0,0 +1,52 @@ +"""Config flow to configure the Tile integration.""" +from pytile import async_login +from pytile.errors import TileError +import voluptuous as vol + +from homeassistant import config_entries +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.helpers import aiohttp_client + +from .const import DOMAIN # pylint: disable=unused-import + + +class TileFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): + """Handle a Tile config flow.""" + + VERSION = 1 + CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL + + def __init__(self): + """Initialize the config flow.""" + self.data_schema = vol.Schema( + {vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str} + ) + + async def _show_form(self, errors=None): + """Show the form to the user.""" + return self.async_show_form( + step_id="user", data_schema=self.data_schema, errors=errors or {} + ) + + async def async_step_import(self, import_config): + """Import a config entry from configuration.yaml.""" + return await self.async_step_user(import_config) + + async def async_step_user(self, user_input=None): + """Handle the start of the config flow.""" + if not user_input: + return await self._show_form() + + await self.async_set_unique_id(user_input[CONF_USERNAME]) + self._abort_if_unique_id_configured() + + session = aiohttp_client.async_get_clientsession(self.hass) + + try: + await async_login( + user_input[CONF_USERNAME], user_input[CONF_PASSWORD], session=session + ) + except TileError: + return await self._show_form({"base": "invalid_credentials"}) + + return self.async_create_entry(title=user_input[CONF_USERNAME], data=user_input) diff --git a/homeassistant/components/tile/const.py b/homeassistant/components/tile/const.py new file mode 100644 index 00000000000..91f5b838642 --- /dev/null +++ b/homeassistant/components/tile/const.py @@ -0,0 +1,8 @@ +"""Define Tile constants.""" +import logging + +DOMAIN = "tile" + +DATA_COORDINATOR = "coordinator" + +LOGGER = logging.getLogger(__package__) diff --git a/homeassistant/components/tile/device_tracker.py b/homeassistant/components/tile/device_tracker.py index 6cfe6121ccb..5b0065b2c4e 100644 --- a/homeassistant/components/tile/device_tracker.py +++ b/homeassistant/components/tile/device_tracker.py @@ -1,21 +1,15 @@ -"""Support for Tile® Bluetooth trackers.""" -from datetime import timedelta +"""Support for Tile device trackers.""" import logging -from pytile import async_login -from pytile.errors import SessionExpiredError, TileError -import voluptuous as vol +from homeassistant.components.device_tracker.config_entry import TrackerEntity +from homeassistant.components.device_tracker.const import SOURCE_TYPE_GPS +from homeassistant.config_entries import SOURCE_IMPORT +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import callback -from homeassistant.components.device_tracker import PLATFORM_SCHEMA -from homeassistant.const import CONF_MONITORED_VARIABLES, CONF_PASSWORD, CONF_USERNAME -from homeassistant.helpers import aiohttp_client, config_validation as cv -from homeassistant.helpers.event import async_track_time_interval -from homeassistant.util import slugify -from homeassistant.util.json import load_json, save_json +from . import DATA_COORDINATOR, DOMAIN, TileEntity _LOGGER = logging.getLogger(__name__) -CLIENT_UUID_CONFIG_FILE = ".tile.conf" -DEVICE_TYPES = ["PHONE", "TILE"] ATTR_ALTITUDE = "altitude" ATTR_CONNECTION_STATE = "connection_state" @@ -23,118 +17,114 @@ ATTR_IS_DEAD = "is_dead" ATTR_IS_LOST = "is_lost" ATTR_RING_STATE = "ring_state" ATTR_VOIP_STATE = "voip_state" -ATTR_TILE_ID = "tile_identifier" ATTR_TILE_NAME = "tile_name" -CONF_SHOW_INACTIVE = "show_inactive" -DEFAULT_ICON = "mdi:view-grid" -DEFAULT_SCAN_INTERVAL = timedelta(minutes=2) +async def async_setup_entry(hass, config_entry, async_add_entities): + """Set up Tile device trackers.""" + coordinator = hass.data[DOMAIN][DATA_COORDINATOR][config_entry.entry_id] -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_USERNAME): cv.string, - vol.Required(CONF_PASSWORD): cv.string, - vol.Optional(CONF_SHOW_INACTIVE, default=False): cv.boolean, - vol.Optional(CONF_MONITORED_VARIABLES, default=DEVICE_TYPES): vol.All( - cv.ensure_list, [vol.In(DEVICE_TYPES)] - ), - } -) + async_add_entities( + [ + TileDeviceTracker(coordinator, tile_uuid, tile) + for tile_uuid, tile in coordinator.data.items() + ], + True, + ) async def async_setup_scanner(hass, config, async_see, discovery_info=None): - """Validate the configuration and return a Tile scanner.""" - websession = aiohttp_client.async_get_clientsession(hass) - - config_file = hass.config.path( - ".{}{}".format(slugify(config[CONF_USERNAME]), CLIENT_UUID_CONFIG_FILE) - ) - config_data = await hass.async_add_job(load_json, config_file) - if config_data: - client = await async_login( - config[CONF_USERNAME], - config[CONF_PASSWORD], - websession, - client_uuid=config_data["client_uuid"], + """Detect a legacy configuration and import it.""" + hass.async_create_task( + hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={ + CONF_USERNAME: config[CONF_USERNAME], + CONF_PASSWORD: config[CONF_PASSWORD], + }, ) - else: - client = await async_login( - config[CONF_USERNAME], config[CONF_PASSWORD], websession - ) - - config_data = {"client_uuid": client.client_uuid} - await hass.async_add_job(save_json, config_file, config_data) - - scanner = TileScanner( - client, - hass, - async_see, - config[CONF_MONITORED_VARIABLES], - config[CONF_SHOW_INACTIVE], ) - return await scanner.async_init() + + _LOGGER.info( + "Your Tile configuration has been imported into the UI; " + "please remove it from configuration.yaml" + ) + + return True -class TileScanner: - """Define an object to retrieve Tile data.""" +class TileDeviceTracker(TileEntity, TrackerEntity): + """Representation of a network infrastructure device.""" - def __init__(self, client, hass, async_see, types, show_inactive): + def __init__(self, coordinator, tile_uuid, tile): """Initialize.""" - self._async_see = async_see - self._client = client - self._hass = hass - self._show_inactive = show_inactive - self._types = types + super().__init__(coordinator) + self._name = tile["name"] + self._tile = tile + self._tile_uuid = tile_uuid + self._unique_id = f"tile_{tile_uuid}" - async def async_init(self): - """Further initialize connection to the Tile servers.""" - try: - await self._client.async_init() - except TileError as err: - _LOGGER.error("Unable to set up Tile scanner: %s", err) - return False + @property + def available(self): + """Return if entity is available.""" + return self.coordinator.last_update_success and not self._tile["is_dead"] - await self._async_update() + @property + def battery_level(self): + """Return the battery level of the device. - async_track_time_interval(self._hass, self._async_update, DEFAULT_SCAN_INTERVAL) + Percentage from 0-100. + """ + return None - return True + @property + def location_accuracy(self): + """Return the location accuracy of the device. - async def _async_update(self, now=None): - """Update info from Tile.""" - try: - await self._client.async_init() - tiles = await self._client.tiles.all( - whitelist=self._types, show_inactive=self._show_inactive + Value in meters. + """ + state = self._tile["last_tile_state"] + h_accuracy = state.get("h_accuracy") + v_accuracy = state.get("v_accuracy") + + if h_accuracy is not None and v_accuracy is not None: + return round( + ( + self._tile["last_tile_state"]["h_accuracy"] + + self._tile["last_tile_state"]["v_accuracy"] + ) + / 2 ) - except SessionExpiredError: - _LOGGER.info("Session expired; trying again shortly") - return - except TileError as err: - _LOGGER.error("There was an error while updating: %s", err) - return - if not tiles: - _LOGGER.warning("No Tiles found") - return + if h_accuracy is not None: + return h_accuracy - for tile in tiles: - await self._async_see( - dev_id="tile_{}".format(slugify(tile["tile_uuid"])), - gps=( - tile["last_tile_state"]["latitude"], - tile["last_tile_state"]["longitude"], - ), - attributes={ - ATTR_ALTITUDE: tile["last_tile_state"]["altitude"], - ATTR_CONNECTION_STATE: tile["last_tile_state"]["connection_state"], - ATTR_IS_DEAD: tile["is_dead"], - ATTR_IS_LOST: tile["last_tile_state"]["is_lost"], - ATTR_RING_STATE: tile["last_tile_state"]["ring_state"], - ATTR_VOIP_STATE: tile["last_tile_state"]["voip_state"], - ATTR_TILE_ID: tile["tile_uuid"], - ATTR_TILE_NAME: tile["name"], - }, - icon=DEFAULT_ICON, - ) + if v_accuracy is not None: + return v_accuracy + + return None + + @property + def latitude(self) -> float: + """Return latitude value of the device.""" + return self._tile["last_tile_state"]["latitude"] + + @property + def longitude(self) -> float: + """Return longitude value of the device.""" + return self._tile["last_tile_state"]["longitude"] + + @property + def source_type(self): + """Return the source type, eg gps or router, of the device.""" + return SOURCE_TYPE_GPS + + @callback + def _update_from_latest_data(self): + """Update the entity from the latest data.""" + self._tile = self.coordinator.data[self._tile_uuid] + self._attrs[ATTR_ALTITUDE] = self._tile["last_tile_state"]["altitude"] + self._attrs[ATTR_IS_LOST] = self._tile["last_tile_state"]["is_lost"] + self._attrs[ATTR_RING_STATE] = self._tile["last_tile_state"]["ring_state"] + self._attrs[ATTR_VOIP_STATE] = self._tile["last_tile_state"]["voip_state"] diff --git a/homeassistant/components/tile/manifest.json b/homeassistant/components/tile/manifest.json index 553c1e50823..a43a0e229c2 100644 --- a/homeassistant/components/tile/manifest.json +++ b/homeassistant/components/tile/manifest.json @@ -1,7 +1,8 @@ { "domain": "tile", "name": "Tile", + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/tile", - "requirements": ["pytile==3.0.1"], + "requirements": ["pytile==3.0.6"], "codeowners": ["@bachya"] } diff --git a/homeassistant/components/tile/strings.json b/homeassistant/components/tile/strings.json new file mode 100644 index 00000000000..8a1ee9660d9 --- /dev/null +++ b/homeassistant/components/tile/strings.json @@ -0,0 +1,29 @@ +{ + "config": { + "step": { + "user": { + "title": "Configure Tile", + "data": { + "username": "[%key:common::config_flow::data::email%]", + "password": "[%key:common::config_flow::data::password%]" + } + } + }, + "error": { + "invalid_credentials": "Invalid Tile credentials provided." + }, + "abort": { + "already_configured": "This Tile account is already registered." + } + }, + "options": { + "step": { + "init": { + "title": "Configure Tile", + "data": { + "show_inactive": "Show inactive Tiles" + } + } + } + } +} diff --git a/homeassistant/components/tile/translations/fr.json b/homeassistant/components/tile/translations/fr.json index 3451efee63b..a23d3c2d4d7 100644 --- a/homeassistant/components/tile/translations/fr.json +++ b/homeassistant/components/tile/translations/fr.json @@ -1,5 +1,11 @@ { "config": { + "abort": { + "already_configured": "Ce compte Tile est d\u00e9j\u00e0 enregistr\u00e9." + }, + "error": { + "invalid_credentials": "Informations d'identification de Tile non valides." + }, "step": { "user": { "title": "Configurer Tile" diff --git a/homeassistant/components/doorbird/translations/da.json b/homeassistant/components/tile/translations/pl.json similarity index 56% rename from homeassistant/components/doorbird/translations/da.json rename to homeassistant/components/tile/translations/pl.json index 3e66091d851..b8b737c37a3 100644 --- a/homeassistant/components/doorbird/translations/da.json +++ b/homeassistant/components/tile/translations/pl.json @@ -3,8 +3,8 @@ "step": { "user": { "data": { - "password": "Adgangskode", - "username": "Brugernavn" + "password": "Has\u0142o", + "username": "Nazwa u\u017cytkownika" } } } diff --git a/homeassistant/components/toon/__init__.py b/homeassistant/components/toon/__init__.py index b970ed2221b..bdfe8e35c74 100644 --- a/homeassistant/components/toon/__init__.py +++ b/homeassistant/components/toon/__init__.py @@ -1,289 +1,162 @@ """Support for Toon van Eneco devices.""" -from functools import partial +import asyncio import logging -from typing import Any, Dict -from toonapilib import Toon import voluptuous as vol +from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN +from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import ( CONF_CLIENT_ID, CONF_CLIENT_SECRET, - CONF_PASSWORD, CONF_SCAN_INTERVAL, - CONF_USERNAME, + EVENT_HOMEASSISTANT_STARTED, ) -from homeassistant.core import callback +from homeassistant.core import CoreState, HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import config_validation as cv, device_registry as dr -from homeassistant.helpers.dispatcher import async_dispatcher_connect, dispatcher_send -from homeassistant.helpers.entity import Entity -from homeassistant.helpers.event import async_track_time_interval -from homeassistant.helpers.typing import ConfigType, HomeAssistantType - -from . import config_flow # noqa: F401 -from .const import ( - CONF_DISPLAY, - CONF_TENANT, - DATA_TOON, - DATA_TOON_CLIENT, - DATA_TOON_CONFIG, - DATA_TOON_UPDATED, - DEFAULT_SCAN_INTERVAL, - DOMAIN, +from homeassistant.helpers.config_entry_oauth2_flow import ( + OAuth2Session, + async_get_config_entry_implementation, ) +from homeassistant.helpers.typing import ConfigType + +from .const import CONF_AGREEMENT_ID, CONF_MIGRATE, DEFAULT_SCAN_INTERVAL, DOMAIN +from .coordinator import ToonDataUpdateCoordinator +from .oauth2 import register_oauth2_implementations + +ENTITY_COMPONENTS = { + BINARY_SENSOR_DOMAIN, + CLIMATE_DOMAIN, + SENSOR_DOMAIN, + SWITCH_DOMAIN, +} _LOGGER = logging.getLogger(__name__) # Validation of the user's configuration CONFIG_SCHEMA = vol.Schema( { - DOMAIN: vol.Schema( - { - vol.Required(CONF_CLIENT_ID): cv.string, - vol.Required(CONF_CLIENT_SECRET): cv.string, - vol.Required( - CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL - ): vol.All(cv.time_period, cv.positive_timedelta), - } + DOMAIN: vol.All( + cv.deprecated(CONF_SCAN_INTERVAL), + vol.Schema( + { + vol.Required(CONF_CLIENT_ID): cv.string, + vol.Required(CONF_CLIENT_SECRET): cv.string, + vol.Optional( + CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL + ): vol.All(cv.time_period, cv.positive_timedelta), + } + ), ) }, extra=vol.ALLOW_EXTRA, ) -SERVICE_SCHEMA = vol.Schema({vol.Optional(CONF_DISPLAY): cv.string}) - -async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool: +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Toon components.""" if DOMAIN not in config: return True - conf = config[DOMAIN] + register_oauth2_implementations( + hass, config[DOMAIN][CONF_CLIENT_ID], config[DOMAIN][CONF_CLIENT_SECRET] + ) - # Store config to be used during entry setup - hass.data[DATA_TOON_CONFIG] = conf + hass.async_create_task( + hass.config_entries.flow.async_init(DOMAIN, context={"source": SOURCE_IMPORT}) + ) return True -async def async_setup_entry(hass: HomeAssistantType, entry: ConfigType) -> bool: - """Set up Toon from a config entry.""" - - conf = hass.data.get(DATA_TOON_CONFIG) - - toon = await hass.async_add_executor_job( - partial( - Toon, - entry.data[CONF_USERNAME], - entry.data[CONF_PASSWORD], - conf[CONF_CLIENT_ID], - conf[CONF_CLIENT_SECRET], - tenant_id=entry.data[CONF_TENANT], - display_common_name=entry.data[CONF_DISPLAY], +async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Handle migration of a previous version config entry.""" + if entry.version == 1: + # There is no usable data in version 1 anymore. + # The integration switched to OAuth and because of this, uses + # different unique identifiers as well. + # Force this by removing the existing entry and trigger a new flow. + hass.async_create_task( + hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={CONF_MIGRATE: entry.entry_id}, + ) ) - ) - hass.data.setdefault(DATA_TOON_CLIENT, {})[entry.entry_id] = toon + return False - toon_data = await hass.async_add_executor_job(ToonData, hass, entry, toon) - hass.data.setdefault(DATA_TOON, {})[entry.entry_id] = toon_data - async_track_time_interval(hass, toon_data.update, conf[CONF_SCAN_INTERVAL]) + return True + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up Toon from a config entry.""" + implementation = await async_get_config_entry_implementation(hass, entry) + session = OAuth2Session(hass, entry, implementation) + + coordinator = ToonDataUpdateCoordinator(hass, entry=entry, session=session) + await coordinator.toon.activate_agreement( + agreement_id=entry.data[CONF_AGREEMENT_ID] + ) + await coordinator.async_refresh() + + if not coordinator.last_update_success: + raise ConfigEntryNotReady + + hass.data.setdefault(DOMAIN, {}) + hass.data[DOMAIN][entry.entry_id] = coordinator # Register device for the Meter Adapter, since it will have no entities. device_registry = await dr.async_get_registry(hass) device_registry.async_get_or_create( config_entry_id=entry.entry_id, - identifiers={(DOMAIN, toon.agreement.id, "meter_adapter")}, + identifiers={ + (DOMAIN, coordinator.data.agreement.agreement_id, "meter_adapter") + }, manufacturer="Eneco", name="Meter Adapter", - via_device=(DOMAIN, toon.agreement.id), + via_device=(DOMAIN, coordinator.data.agreement.agreement_id), ) - def update(call): - """Service call to manually update the data.""" - called_display = call.data.get(CONF_DISPLAY) - for toon_data in hass.data[DATA_TOON].values(): - if ( - called_display and called_display == toon_data.display_name - ) or not called_display: - toon_data.update() - - hass.services.async_register(DOMAIN, "update", update, schema=SERVICE_SCHEMA) - - for component in "binary_sensor", "climate", "sensor": + # Spin up the platforms + for component in ENTITY_COMPONENTS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, component) ) + # If Home Assistant is already in a running state, register the webhook + # immediately, else trigger it after Home Assistant has finished starting. + if hass.state == CoreState.running: + await coordinator.register_webhook() + else: + hass.bus.async_listen_once( + EVENT_HOMEASSISTANT_STARTED, coordinator.register_webhook + ) + return True -class ToonData: - """Communication class for interacting with toonapilib.""" +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload Toon config entry.""" - def __init__(self, hass: HomeAssistantType, entry: ConfigType, toon): - """Initialize the Toon data object.""" - self._hass = hass - self._toon = toon - self._entry = entry - self.agreement = toon.agreement - self.gas = toon.gas - self.power = toon.power - self.solar = toon.solar - self.temperature = toon.temperature - self.thermostat = toon.thermostat - self.thermostat_info = toon.thermostat_info - self.thermostat_state = toon.thermostat_state + # Remove webhooks registration + await hass.data[DOMAIN][entry.entry_id].unregister_webhook() - @property - def display_name(self): - """Return the display connected to.""" - return self._entry.data[CONF_DISPLAY] - - def update(self, now=None): - """Update all Toon data and notify entities.""" - # Ignore the TTL mechanism from client library - # It causes a lots of issues, hence we take control over caching - self._toon._clear_cache() # pylint: disable=protected-access - - # Gather data from client library (single API call) - self.gas = self._toon.gas - self.power = self._toon.power - self.solar = self._toon.solar - self.temperature = self._toon.temperature - self.thermostat = self._toon.thermostat - self.thermostat_info = self._toon.thermostat_info - self.thermostat_state = self._toon.thermostat_state - - # Notify all entities - dispatcher_send(self._hass, DATA_TOON_UPDATED, self._entry.data[CONF_DISPLAY]) - - -class ToonEntity(Entity): - """Defines a base Toon entity.""" - - def __init__(self, toon: ToonData, name: str, icon: str) -> None: - """Initialize the Toon entity.""" - self._name = name - self._state = None - self._icon = icon - self.toon = toon - self._unsub_dispatcher = None - - @property - def name(self) -> str: - """Return the name of the entity.""" - return self._name - - @property - def icon(self) -> str: - """Return the mdi icon of the entity.""" - return self._icon - - @property - def should_poll(self) -> bool: - """Return the polling requirement of the entity.""" - return False - - async def async_added_to_hass(self) -> None: - """Connect to dispatcher listening for entity data notifications.""" - self._unsub_dispatcher = async_dispatcher_connect( - self.hass, DATA_TOON_UPDATED, self._schedule_immediate_update + # Unload entities for this entry/device. + unload_ok = all( + await asyncio.gather( + *( + hass.config_entries.async_forward_entry_unload(entry, component) + for component in ENTITY_COMPONENTS + ) ) + ) - async def async_will_remove_from_hass(self) -> None: - """Disconnect from update signal.""" - self._unsub_dispatcher() + # Cleanup + if unload_ok: + del hass.data[DOMAIN][entry.entry_id] - @callback - def _schedule_immediate_update(self, display_name: str) -> None: - """Schedule an immediate update of the entity.""" - if display_name == self.toon.display_name: - self.async_schedule_update_ha_state(True) - - -class ToonDisplayDeviceEntity(ToonEntity): - """Defines a Toon display device entity.""" - - @property - def device_info(self) -> Dict[str, Any]: - """Return device information about this thermostat.""" - agreement = self.toon.agreement - model = agreement.display_hardware_version.rpartition("/")[0] - sw_version = agreement.display_software_version.rpartition("/")[-1] - return { - "identifiers": {(DOMAIN, agreement.id)}, - "name": "Toon Display", - "manufacturer": "Eneco", - "model": model, - "sw_version": sw_version, - } - - -class ToonElectricityMeterDeviceEntity(ToonEntity): - """Defines a Electricity Meter device entity.""" - - @property - def device_info(self) -> Dict[str, Any]: - """Return device information about this entity.""" - return { - "name": "Electricity Meter", - "identifiers": {(DOMAIN, self.toon.agreement.id, "electricity")}, - "via_device": (DOMAIN, self.toon.agreement.id, "meter_adapter"), - } - - -class ToonGasMeterDeviceEntity(ToonEntity): - """Defines a Gas Meter device entity.""" - - @property - def device_info(self) -> Dict[str, Any]: - """Return device information about this entity.""" - via_device = "meter_adapter" - if self.toon.gas.is_smart: - via_device = "electricity" - - return { - "name": "Gas Meter", - "identifiers": {(DOMAIN, self.toon.agreement.id, "gas")}, - "via_device": (DOMAIN, self.toon.agreement.id, via_device), - } - - -class ToonSolarDeviceEntity(ToonEntity): - """Defines a Solar Device device entity.""" - - @property - def device_info(self) -> Dict[str, Any]: - """Return device information about this entity.""" - return { - "name": "Solar Panels", - "identifiers": {(DOMAIN, self.toon.agreement.id, "solar")}, - "via_device": (DOMAIN, self.toon.agreement.id, "meter_adapter"), - } - - -class ToonBoilerModuleDeviceEntity(ToonEntity): - """Defines a Boiler Module device entity.""" - - @property - def device_info(self) -> Dict[str, Any]: - """Return device information about this entity.""" - return { - "name": "Boiler Module", - "manufacturer": "Eneco", - "identifiers": {(DOMAIN, self.toon.agreement.id, "boiler_module")}, - "via_device": (DOMAIN, self.toon.agreement.id), - } - - -class ToonBoilerDeviceEntity(ToonEntity): - """Defines a Boiler device entity.""" - - @property - def device_info(self) -> Dict[str, Any]: - """Return device information about this entity.""" - return { - "name": "Boiler", - "identifiers": {(DOMAIN, self.toon.agreement.id, "boiler")}, - "via_device": (DOMAIN, self.toon.agreement.id, "boiler_module"), - } + return unload_ok diff --git a/homeassistant/components/toon/binary_sensor.py b/homeassistant/components/toon/binary_sensor.py index 500cbec1526..135b25dddff 100644 --- a/homeassistant/components/toon/binary_sensor.py +++ b/homeassistant/components/toon/binary_sensor.py @@ -1,20 +1,29 @@ """Support for Toon binary sensors.""" - import logging -from typing import Any +from typing import Optional from homeassistant.components.binary_sensor import BinarySensorEntity from homeassistant.config_entries import ConfigEntry from homeassistant.helpers.typing import HomeAssistantType -from . import ( +from .const import ( + ATTR_DEFAULT_ENABLED, + ATTR_DEVICE_CLASS, + ATTR_ICON, + ATTR_INVERTED, + ATTR_MEASUREMENT, + ATTR_NAME, + ATTR_SECTION, + BINARY_SENSOR_ENTITIES, + DOMAIN, +) +from .coordinator import ToonDataUpdateCoordinator +from .models import ( ToonBoilerDeviceEntity, ToonBoilerModuleDeviceEntity, - ToonData, ToonDisplayDeviceEntity, ToonEntity, ) -from .const import DATA_TOON, DOMAIN _LOGGER = logging.getLogger(__name__) @@ -23,87 +32,27 @@ async def async_setup_entry( hass: HomeAssistantType, entry: ConfigEntry, async_add_entities ) -> None: """Set up a Toon binary sensor based on a config entry.""" - toon = hass.data[DATA_TOON][entry.entry_id] + coordinator = hass.data[DOMAIN][entry.entry_id] sensors = [ ToonBoilerModuleBinarySensor( - toon, - "thermostat_info", - "boiler_connected", - None, - "Boiler Module Connection", - "mdi:check-network-outline", - "connectivity", - ), - ToonDisplayBinarySensor( - toon, - "thermostat_info", - "active_state", - 4, - "Toon Holiday Mode", - "mdi:airport", - None, - ), - ToonDisplayBinarySensor( - toon, - "thermostat_info", - "next_program", - None, - "Toon Program", - "mdi:calendar-clock", - None, + coordinator, key="thermostat_info_boiler_connected_None" ), + ToonDisplayBinarySensor(coordinator, key="thermostat_program_overridden"), ] - if toon.thermostat_info.have_ot_boiler: + if coordinator.data.thermostat.have_opentherm_boiler: sensors.extend( [ - ToonBoilerBinarySensor( - toon, - "thermostat_info", - "ot_communication_error", - "0", - "OpenTherm Connection", - "mdi:check-network-outline", - "connectivity", - ), - ToonBoilerBinarySensor( - toon, - "thermostat_info", - "error_found", - 255, - "Boiler Status", - "mdi:alert", - "problem", - inverted=True, - ), - ToonBoilerBinarySensor( - toon, - "thermostat_info", - "burner_info", - None, - "Boiler Burner", - "mdi:fire", - None, - ), - ToonBoilerBinarySensor( - toon, - "thermostat_info", - "burner_info", - "2", - "Hot Tap Water", - "mdi:water-pump", - None, - ), - ToonBoilerBinarySensor( - toon, - "thermostat_info", - "burner_info", - "3", - "Boiler Preheating", - "mdi:fire", - None, - ), + ToonBoilerBinarySensor(coordinator, key=key) + for key in [ + "thermostat_info_ot_communication_error_0", + "thermostat_info_error_found_255", + "thermostat_info_burner_info_None", + "thermostat_info_burner_info_1", + "thermostat_info_burner_info_2", + "thermostat_info_burner_info_3", + ] ] ) @@ -113,66 +62,46 @@ async def async_setup_entry( class ToonBinarySensor(ToonEntity, BinarySensorEntity): """Defines an Toon binary sensor.""" - def __init__( - self, - toon: ToonData, - section: str, - measurement: str, - on_value: Any, - name: str, - icon: str, - device_class: str, - inverted: bool = False, - ) -> None: + def __init__(self, coordinator: ToonDataUpdateCoordinator, *, key: str) -> None: """Initialize the Toon sensor.""" - self._state = inverted - self._device_class = device_class - self.section = section - self.measurement = measurement - self.on_value = on_value - self.inverted = inverted + self.key = key - super().__init__(toon, name, icon) + super().__init__( + coordinator, + enabled_default=BINARY_SENSOR_ENTITIES[key][ATTR_DEFAULT_ENABLED], + icon=BINARY_SENSOR_ENTITIES[key][ATTR_ICON], + name=BINARY_SENSOR_ENTITIES[key][ATTR_NAME], + ) @property def unique_id(self) -> str: """Return the unique ID for this binary sensor.""" - return "_".join( - [ - DOMAIN, - self.toon.agreement.id, - "binary_sensor", - self.section, - self.measurement, - str(self.on_value), - ] - ) + agreement_id = self.coordinator.data.agreement.agreement_id + # This unique ID is a bit ugly and contains unneeded information. + # It is here for legacy / backward compatible reasons. + return f"{DOMAIN}_{agreement_id}_binary_sensor_{self.key}" @property def device_class(self) -> str: """Return the device class.""" - return self._device_class + return BINARY_SENSOR_ENTITIES[self.key][ATTR_DEVICE_CLASS] @property - def is_on(self) -> bool: + def is_on(self) -> Optional[bool]: """Return the status of the binary sensor.""" - if self.on_value is not None: - value = self._state == self.on_value - elif self._state is None: - value = False - else: - value = bool(max(0, int(self._state))) + section = getattr( + self.coordinator.data, BINARY_SENSOR_ENTITIES[self.key][ATTR_SECTION] + ) + value = getattr(section, BINARY_SENSOR_ENTITIES[self.key][ATTR_MEASUREMENT]) - if self.inverted: + if value is None: + return None + + if BINARY_SENSOR_ENTITIES[self.key][ATTR_INVERTED]: return not value return value - def update(self) -> None: - """Get the latest data from the binary sensor.""" - section = getattr(self.toon, self.section) - self._state = getattr(section, self.measurement) - class ToonBoilerBinarySensor(ToonBinarySensor, ToonBoilerDeviceEntity): """Defines a Boiler binary sensor.""" diff --git a/homeassistant/components/toon/climate.py b/homeassistant/components/toon/climate.py index f3c3d9a69bf..06f64262d2b 100644 --- a/homeassistant/components/toon/climate.py +++ b/homeassistant/components/toon/climate.py @@ -1,8 +1,14 @@ """Support for Toon thermostat.""" - import logging from typing import Any, Dict, List, Optional +from toonapi import ( + ACTIVE_STATE_AWAY, + ACTIVE_STATE_COMFORT, + ACTIVE_STATE_HOME, + ACTIVE_STATE_SLEEP, +) + from homeassistant.components.climate import ClimateEntity from homeassistant.components.climate.const import ( CURRENT_HVAC_HEAT, @@ -19,56 +25,38 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS from homeassistant.helpers.typing import HomeAssistantType -from . import ToonData, ToonDisplayDeviceEntity -from .const import ( - DATA_TOON, - DATA_TOON_CLIENT, - DEFAULT_MAX_TEMP, - DEFAULT_MIN_TEMP, - DOMAIN, -) +from .const import DEFAULT_MAX_TEMP, DEFAULT_MIN_TEMP, DOMAIN +from .helpers import toon_exception_handler +from .models import ToonDisplayDeviceEntity _LOGGER = logging.getLogger(__name__) -SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_PRESET_MODE -SUPPORT_PRESET = [PRESET_AWAY, PRESET_COMFORT, PRESET_HOME, PRESET_SLEEP] - async def async_setup_entry( hass: HomeAssistantType, entry: ConfigEntry, async_add_entities ) -> None: """Set up a Toon binary sensors based on a config entry.""" - toon_client = hass.data[DATA_TOON_CLIENT][entry.entry_id] - toon_data = hass.data[DATA_TOON][entry.entry_id] - async_add_entities([ToonThermostatDevice(toon_client, toon_data)], True) + coordinator = hass.data[DOMAIN][entry.entry_id] + async_add_entities( + [ToonThermostatDevice(coordinator, name="Thermostat", icon="mdi:thermostat")] + ) class ToonThermostatDevice(ToonDisplayDeviceEntity, ClimateEntity): """Representation of a Toon climate device.""" - def __init__(self, toon_client, toon_data: ToonData) -> None: - """Initialize the Toon climate device.""" - self._client = toon_client - - self._current_temperature = None - self._target_temperature = None - self._heating = False - self._next_target_temperature = None - self._preset = None - - self._heating_type = None - - super().__init__(toon_data, "Toon Thermostat", "mdi:thermostat") - @property def unique_id(self) -> str: """Return the unique ID for this thermostat.""" - return "_".join([DOMAIN, self.toon.agreement.id, "climate"]) + agreement_id = self.coordinator.data.agreement.agreement_id + # This unique ID is a bit ugly and contains unneeded information. + # It is here for lecagy / backward compatible reasons. + return f"{DOMAIN}_{agreement_id}_climate" @property def supported_features(self) -> int: """Return the list of supported features.""" - return SUPPORT_FLAGS + return SUPPORT_TARGET_TEMPERATURE | SUPPORT_PRESET_MODE @property def hvac_mode(self) -> str: @@ -83,7 +71,7 @@ class ToonThermostatDevice(ToonDisplayDeviceEntity, ClimateEntity): @property def hvac_action(self) -> Optional[str]: """Return the current running hvac operation.""" - if self._heating: + if self.coordinator.data.thermostat.heating: return CURRENT_HVAC_HEAT return CURRENT_HVAC_IDLE @@ -95,24 +83,28 @@ class ToonThermostatDevice(ToonDisplayDeviceEntity, ClimateEntity): @property def preset_mode(self) -> Optional[str]: """Return the current preset mode, e.g., home, away, temp.""" - if self._preset is not None: - return self._preset.lower() - return None + mapping = { + ACTIVE_STATE_AWAY: PRESET_AWAY, + ACTIVE_STATE_COMFORT: PRESET_COMFORT, + ACTIVE_STATE_HOME: PRESET_HOME, + ACTIVE_STATE_SLEEP: PRESET_SLEEP, + } + return mapping.get(self.coordinator.data.thermostat.active_state) @property def preset_modes(self) -> List[str]: """Return a list of available preset modes.""" - return SUPPORT_PRESET + return [PRESET_AWAY, PRESET_COMFORT, PRESET_HOME, PRESET_SLEEP] @property def current_temperature(self) -> Optional[float]: """Return the current temperature.""" - return self._current_temperature + return self.coordinator.data.thermostat.current_display_temperature @property def target_temperature(self) -> Optional[float]: """Return the temperature we try to reach.""" - return self._target_temperature + return self.coordinator.data.thermostat.current_setpoint @property def min_temp(self) -> float: @@ -127,30 +119,27 @@ class ToonThermostatDevice(ToonDisplayDeviceEntity, ClimateEntity): @property def device_state_attributes(self) -> Dict[str, Any]: """Return the current state of the burner.""" - return {"heating_type": self._heating_type} + return {"heating_type": self.coordinator.data.agreement.heating_type} - def set_temperature(self, **kwargs) -> None: + @toon_exception_handler + async def async_set_temperature(self, **kwargs) -> None: """Change the setpoint of the thermostat.""" temperature = kwargs.get(ATTR_TEMPERATURE) - self._client.thermostat = self._target_temperature = temperature - self.schedule_update_ha_state() + await self.coordinator.toon.set_current_setpoint(temperature) - def set_preset_mode(self, preset_mode: str) -> None: + @toon_exception_handler + async def async_set_preset_mode(self, preset_mode: str) -> None: """Set new preset mode.""" - self._client.thermostat_state = self._preset = preset_mode - self.schedule_update_ha_state() + mapping = { + PRESET_AWAY: ACTIVE_STATE_AWAY, + PRESET_COMFORT: ACTIVE_STATE_COMFORT, + PRESET_HOME: ACTIVE_STATE_HOME, + PRESET_SLEEP: ACTIVE_STATE_SLEEP, + } + if preset_mode in mapping: + await self.coordinator.toon.set_active_state(mapping[preset_mode]) def set_hvac_mode(self, hvac_mode: str) -> None: """Set new target hvac mode.""" - - def update(self) -> None: - """Update local state.""" - if self.toon.thermostat_state is None: - self._preset = None - else: - self._preset = self.toon.thermostat_state.name - - self._current_temperature = self.toon.temperature - self._target_temperature = self.toon.thermostat - self._heating_type = self.toon.agreement.heating_type - self._heating = self.toon.thermostat_info.burner_info == 1 + # Intentionally left empty + # The HAVC mode is always HEAT diff --git a/homeassistant/components/toon/config_flow.py b/homeassistant/components/toon/config_flow.py index b584b7bd6cb..d1de68ef0b8 100644 --- a/homeassistant/components/toon/config_flow.py +++ b/homeassistant/components/toon/config_flow.py @@ -1,166 +1,103 @@ """Config flow to configure the Toon component.""" -from collections import OrderedDict -from functools import partial import logging +from typing import Any, Dict, List, Optional -from toonapilib import Toon -from toonapilib.toonapilibexceptions import ( - AgreementsRetrievalError, - InvalidConsumerKey, - InvalidConsumerSecret, - InvalidCredentials, -) +from toonapi import Agreement, Toon, ToonError import voluptuous as vol from homeassistant import config_entries -from homeassistant.const import ( - CONF_CLIENT_ID, - CONF_CLIENT_SECRET, - CONF_PASSWORD, - CONF_USERNAME, -) -from homeassistant.core import callback +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.config_entry_oauth2_flow import AbstractOAuth2FlowHandler -from .const import CONF_DISPLAY, CONF_TENANT, DATA_TOON_CONFIG, DOMAIN - -_LOGGER = logging.getLogger(__name__) +from .const import CONF_AGREEMENT, CONF_AGREEMENT_ID, CONF_MIGRATE, DOMAIN -@callback -def configured_displays(hass): - """Return a set of configured Toon displays.""" - return { - entry.data[CONF_DISPLAY] for entry in hass.config_entries.async_entries(DOMAIN) - } - - -@config_entries.HANDLERS.register(DOMAIN) -class ToonFlowHandler(config_entries.ConfigFlow): +class ToonFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): """Handle a Toon config flow.""" - VERSION = 1 - CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL + CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_PUSH + DOMAIN = DOMAIN + VERSION = 2 - def __init__(self): - """Initialize the Toon flow.""" - self.displays = None - self.username = None - self.password = None - self.tenant = None + agreements: Optional[List[Agreement]] = None + data: Optional[Dict[str, Any]] = None - async def async_step_user(self, user_input=None): - """Handle a flow initiated by the user.""" - app = self.hass.data.get(DATA_TOON_CONFIG, {}) + @property + def logger(self) -> logging.Logger: + """Return logger.""" + return logging.getLogger(__name__) - if not app: - return self.async_abort(reason="no_app") + async def async_oauth_create_entry(self, data: Dict[str, Any]) -> Dict[str, Any]: + """Test connection and load up agreements.""" + self.data = data - return await self.async_step_authenticate(user_input) - - async def _show_authenticaticate_form(self, errors=None): - """Show the authentication form to the user.""" - fields = OrderedDict() - fields[vol.Required(CONF_USERNAME)] = str - fields[vol.Required(CONF_PASSWORD)] = str - fields[vol.Optional(CONF_TENANT)] = vol.In(["eneco", "electrabel", "viesgo"]) - - return self.async_show_form( - step_id="authenticate", - data_schema=vol.Schema(fields), - errors=errors if errors else {}, + toon = Toon( + token=self.data["token"]["access_token"], + session=async_get_clientsession(self.hass), ) - - async def async_step_authenticate(self, user_input=None): - """Attempt to authenticate with the Toon account.""" - - if user_input is None: - return await self._show_authenticaticate_form() - - app = self.hass.data.get(DATA_TOON_CONFIG, {}) try: - toon = await self.hass.async_add_executor_job( - partial( - Toon, - user_input[CONF_USERNAME], - user_input[CONF_PASSWORD], - app[CONF_CLIENT_ID], - app[CONF_CLIENT_SECRET], - tenant_id=user_input[CONF_TENANT], - ) - ) + self.agreements = await toon.agreements() + except ToonError: + return self.async_abort(reason="connection_error") - displays = toon.display_names - - except InvalidConsumerKey: - return self.async_abort(reason=CONF_CLIENT_ID) - - except InvalidConsumerSecret: - return self.async_abort(reason=CONF_CLIENT_SECRET) - - except InvalidCredentials: - return await self._show_authenticaticate_form({"base": "credentials"}) - - except AgreementsRetrievalError: + if not self.agreements: return self.async_abort(reason="no_agreements") - except Exception: # pylint: disable=broad-except - _LOGGER.exception("Unexpected error while authenticating") - return self.async_abort(reason="unknown_auth_fail") + return await self.async_step_agreement() - self.displays = displays - self.username = user_input[CONF_USERNAME] - self.password = user_input[CONF_PASSWORD] - self.tenant = user_input[CONF_TENANT] + async def async_step_import( + self, config: Optional[Dict[str, Any]] = None + ) -> Dict[str, Any]: + """Start a configuration flow based on imported data. - return await self.async_step_display() + This step is merely here to trigger "discovery" when the `toon` + integration is listed in the user configuration, or when migrating from + the version 1 schema. + """ - async def _show_display_form(self, errors=None): - """Show the select display form to the user.""" - fields = OrderedDict() - fields[vol.Required(CONF_DISPLAY)] = vol.In(self.displays) + if config is not None and CONF_MIGRATE in config: + # pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167 + self.context.update({CONF_MIGRATE: config[CONF_MIGRATE]}) + else: + await self._async_handle_discovery_without_unique_id() - return self.async_show_form( - step_id="display", - data_schema=vol.Schema(fields), - errors=errors if errors else {}, - ) + return await self.async_step_user() - async def async_step_display(self, user_input=None): - """Select Toon display to add.""" + async def async_step_agreement( + self, user_input: Dict[str, Any] = None + ) -> Dict[str, Any]: + """Select Toon agreement to add.""" + if len(self.agreements) == 1: + return await self._create_entry(self.agreements[0]) - if not self.displays: - return self.async_abort(reason="no_displays") + agreements_list = [ + f"{agreement.street} {agreement.house_number}, {agreement.city}" + for agreement in self.agreements + ] if user_input is None: - return await self._show_display_form() - - if user_input[CONF_DISPLAY] in configured_displays(self.hass): - return await self._show_display_form({"base": "display_exists"}) - - app = self.hass.data.get(DATA_TOON_CONFIG, {}) - try: - await self.hass.async_add_executor_job( - partial( - Toon, - self.username, - self.password, - app[CONF_CLIENT_ID], - app[CONF_CLIENT_SECRET], - tenant_id=self.tenant, - display_common_name=user_input[CONF_DISPLAY], - ) + return self.async_show_form( + step_id="agreement", + data_schema=vol.Schema( + {vol.Required(CONF_AGREEMENT): vol.In(agreements_list)} + ), ) - except Exception: # pylint: disable=broad-except - _LOGGER.exception("Unexpected error while authenticating") - return self.async_abort(reason="unknown_auth_fail") + agreement_index = agreements_list.index(user_input[CONF_AGREEMENT]) + return await self._create_entry(self.agreements[agreement_index]) + async def _create_entry(self, agreement: Agreement) -> Dict[str, Any]: + if ( # pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167 + CONF_MIGRATE in self.context + ): + # pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167 + await self.hass.config_entries.async_remove(self.context[CONF_MIGRATE]) + + await self.async_set_unique_id(agreement.agreement_id) + self._abort_if_unique_id_configured() + + self.data[CONF_AGREEMENT_ID] = agreement.agreement_id return self.async_create_entry( - title=user_input[CONF_DISPLAY], - data={ - CONF_USERNAME: self.username, - CONF_PASSWORD: self.password, - CONF_TENANT: self.tenant, - CONF_DISPLAY: user_input[CONF_DISPLAY], - }, + title=f"{agreement.street} {agreement.house_number}, {agreement.city}", + data=self.data, ) diff --git a/homeassistant/components/toon/const.py b/homeassistant/components/toon/const.py index 5f26035065e..f017d0ae756 100644 --- a/homeassistant/components/toon/const.py +++ b/homeassistant/components/toon/const.py @@ -1,15 +1,27 @@ """Constants for the Toon integration.""" from datetime import timedelta +from homeassistant.components.binary_sensor import ( + DEVICE_CLASS_CONNECTIVITY, + DEVICE_CLASS_PROBLEM, +) +from homeassistant.components.sensor import DEVICE_CLASS_POWER +from homeassistant.const import ( + ATTR_DEVICE_CLASS, + ATTR_ICON, + ATTR_NAME, + ATTR_UNIT_OF_MEASUREMENT, + ENERGY_KILO_WATT_HOUR, + POWER_WATT, + UNIT_PERCENTAGE, +) + DOMAIN = "toon" -DATA_TOON = "toon" -DATA_TOON_CLIENT = "toon_client" -DATA_TOON_CONFIG = "toon_config" -DATA_TOON_UPDATED = "toon_updated" - -CONF_DISPLAY = "display" -CONF_TENANT = "tenant" +CONF_AGREEMENT = "agreement" +CONF_AGREEMENT_ID = "agreement_id" +CONF_CLOUDHOOK_URL = "cloudhook_url" +CONF_MIGRATE = "migrate" DEFAULT_SCAN_INTERVAL = timedelta(seconds=300) DEFAULT_MAX_TEMP = 30.0 @@ -18,3 +30,321 @@ DEFAULT_MIN_TEMP = 6.0 CURRENCY_EUR = "EUR" VOLUME_CM3 = "CM3" VOLUME_M3 = "M3" + +ATTR_DEFAULT_ENABLED = "default_enabled" +ATTR_INVERTED = "inverted" +ATTR_MEASUREMENT = "measurement" +ATTR_SECTION = "section" + +BINARY_SENSOR_ENTITIES = { + "thermostat_info_boiler_connected_None": { + ATTR_NAME: "Boiler Module Connection", + ATTR_SECTION: "thermostat", + ATTR_MEASUREMENT: "boiler_module_connected", + ATTR_INVERTED: False, + ATTR_DEVICE_CLASS: DEVICE_CLASS_CONNECTIVITY, + ATTR_ICON: "mdi:check-network-outline", + ATTR_DEFAULT_ENABLED: False, + }, + "thermostat_info_burner_info_1": { + ATTR_NAME: "Boiler Heating", + ATTR_SECTION: "thermostat", + ATTR_MEASUREMENT: "heating", + ATTR_INVERTED: False, + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:fire", + ATTR_DEFAULT_ENABLED: False, + }, + "thermostat_info_burner_info_2": { + ATTR_NAME: "Hot Tap Water", + ATTR_SECTION: "thermostat", + ATTR_MEASUREMENT: "hot_tapwater", + ATTR_INVERTED: False, + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:water-pump", + ATTR_DEFAULT_ENABLED: True, + }, + "thermostat_info_burner_info_3": { + ATTR_NAME: "Boiler Preheating", + ATTR_SECTION: "thermostat", + ATTR_MEASUREMENT: "pre_heating", + ATTR_INVERTED: False, + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:fire", + ATTR_DEFAULT_ENABLED: False, + }, + "thermostat_info_burner_info_None": { + ATTR_NAME: "Boiler Burner", + ATTR_SECTION: "thermostat", + ATTR_MEASUREMENT: "burner", + ATTR_INVERTED: False, + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:fire", + ATTR_DEFAULT_ENABLED: True, + }, + "thermostat_info_error_found_255": { + ATTR_NAME: "Boiler Status", + ATTR_SECTION: "thermostat", + ATTR_MEASUREMENT: "error_found", + ATTR_INVERTED: False, + ATTR_DEVICE_CLASS: DEVICE_CLASS_PROBLEM, + ATTR_ICON: "mdi:alert", + ATTR_DEFAULT_ENABLED: True, + }, + "thermostat_info_ot_communication_error_0": { + ATTR_NAME: "OpenTherm Connection", + ATTR_SECTION: "thermostat", + ATTR_MEASUREMENT: "opentherm_communication_error", + ATTR_INVERTED: False, + ATTR_DEVICE_CLASS: DEVICE_CLASS_PROBLEM, + ATTR_ICON: "mdi:check-network-outline", + ATTR_DEFAULT_ENABLED: False, + }, + "thermostat_program_overridden": { + ATTR_NAME: "Thermostat Program Override", + ATTR_SECTION: "thermostat", + ATTR_MEASUREMENT: "program_overridden", + ATTR_INVERTED: False, + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:gesture-tap", + ATTR_DEFAULT_ENABLED: True, + }, +} + +SENSOR_ENTITIES = { + "gas_average": { + ATTR_NAME: "Average Gas Usage", + ATTR_SECTION: "gas_usage", + ATTR_MEASUREMENT: "average", + ATTR_UNIT_OF_MEASUREMENT: VOLUME_CM3, + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:gas-cylinder", + ATTR_DEFAULT_ENABLED: True, + }, + "gas_average_daily": { + ATTR_NAME: "Average Daily Gas Usage", + ATTR_SECTION: "gas_usage", + ATTR_MEASUREMENT: "day_average", + ATTR_UNIT_OF_MEASUREMENT: VOLUME_M3, + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:gas-cylinder", + ATTR_DEFAULT_ENABLED: False, + }, + "gas_daily_usage": { + ATTR_NAME: "Gas Usage Today", + ATTR_SECTION: "gas_usage", + ATTR_MEASUREMENT: "day_usage", + ATTR_UNIT_OF_MEASUREMENT: VOLUME_M3, + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:gas-cylinder", + ATTR_DEFAULT_ENABLED: True, + }, + "gas_daily_cost": { + ATTR_NAME: "Gas Cost Today", + ATTR_SECTION: "gas_usage", + ATTR_MEASUREMENT: "day_cost", + ATTR_UNIT_OF_MEASUREMENT: CURRENCY_EUR, + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:gas-cylinder", + ATTR_DEFAULT_ENABLED: True, + }, + "gas_meter_reading": { + ATTR_NAME: "Gas Meter", + ATTR_SECTION: "gas_usage", + ATTR_MEASUREMENT: "meter", + ATTR_UNIT_OF_MEASUREMENT: VOLUME_M3, + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:gas-cylinder", + ATTR_DEFAULT_ENABLED: False, + }, + "gas_value": { + ATTR_NAME: "Current Gas Usage", + ATTR_SECTION: "gas_usage", + ATTR_MEASUREMENT: "current", + ATTR_UNIT_OF_MEASUREMENT: VOLUME_CM3, + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:gas-cylinder", + ATTR_DEFAULT_ENABLED: True, + }, + "power_average": { + ATTR_NAME: "Average Power Usage", + ATTR_SECTION: "power_usage", + ATTR_MEASUREMENT: "average", + ATTR_UNIT_OF_MEASUREMENT: POWER_WATT, + ATTR_DEVICE_CLASS: DEVICE_CLASS_POWER, + ATTR_ICON: "mdi:power-plug", + ATTR_DEFAULT_ENABLED: False, + }, + "power_average_daily": { + ATTR_NAME: "Average Daily Energy Usage", + ATTR_SECTION: "power_usage", + ATTR_MEASUREMENT: "day_average", + ATTR_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR, + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:power-plug", + ATTR_DEFAULT_ENABLED: False, + }, + "power_daily_cost": { + ATTR_NAME: "Energy Cost Today", + ATTR_SECTION: "power_usage", + ATTR_MEASUREMENT: "day_cost", + ATTR_UNIT_OF_MEASUREMENT: CURRENCY_EUR, + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:power-plug", + ATTR_DEFAULT_ENABLED: True, + }, + "power_daily_value": { + ATTR_NAME: "Energy Usage Today", + ATTR_SECTION: "power_usage", + ATTR_MEASUREMENT: "day_usage", + ATTR_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR, + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:power-plug", + ATTR_DEFAULT_ENABLED: True, + }, + "power_meter_reading": { + ATTR_NAME: "Electricity Meter Feed IN Tariff 1", + ATTR_SECTION: "power_usage", + ATTR_MEASUREMENT: "meter_high", + ATTR_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR, + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:power-plug", + ATTR_DEFAULT_ENABLED: False, + }, + "power_meter_reading_low": { + ATTR_NAME: "Electricity Meter Feed IN Tariff 2", + ATTR_SECTION: "power_usage", + ATTR_MEASUREMENT: "meter_high", + ATTR_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR, + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:power-plug", + ATTR_DEFAULT_ENABLED: False, + }, + "power_value": { + ATTR_NAME: "Current Power Usage", + ATTR_SECTION: "power_usage", + ATTR_MEASUREMENT: "current", + ATTR_UNIT_OF_MEASUREMENT: POWER_WATT, + ATTR_DEVICE_CLASS: DEVICE_CLASS_POWER, + ATTR_ICON: "mdi:power-plug", + ATTR_DEFAULT_ENABLED: True, + }, + "solar_meter_reading_produced": { + ATTR_NAME: "Electricity Meter Feed OUT Tariff 1", + ATTR_SECTION: "power_usage", + ATTR_MEASUREMENT: "meter_produced_high", + ATTR_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR, + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:power-plug", + ATTR_DEFAULT_ENABLED: False, + }, + "solar_meter_reading_low_produced": { + ATTR_NAME: "Electricity Meter Feed OUT Tariff 2", + ATTR_SECTION: "power_usage", + ATTR_MEASUREMENT: "meter_produced_low", + ATTR_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR, + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:power-plug", + ATTR_DEFAULT_ENABLED: False, + }, + "solar_value": { + ATTR_NAME: "Current Solar Power Production", + ATTR_SECTION: "power_usage", + ATTR_MEASUREMENT: "current_solar", + ATTR_UNIT_OF_MEASUREMENT: POWER_WATT, + ATTR_DEVICE_CLASS: DEVICE_CLASS_POWER, + ATTR_ICON: "mdi:solar-power", + ATTR_DEFAULT_ENABLED: True, + }, + "solar_maximum": { + ATTR_NAME: "Max Solar Power Production Today", + ATTR_SECTION: "power_usage", + ATTR_MEASUREMENT: "day_max_solar", + ATTR_UNIT_OF_MEASUREMENT: POWER_WATT, + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:solar-power", + ATTR_DEFAULT_ENABLED: True, + }, + "solar_produced": { + ATTR_NAME: "Solar Power Production to Grid", + ATTR_SECTION: "power_usage", + ATTR_MEASUREMENT: "current_produced", + ATTR_UNIT_OF_MEASUREMENT: POWER_WATT, + ATTR_DEVICE_CLASS: DEVICE_CLASS_POWER, + ATTR_ICON: "mdi:solar-power", + ATTR_DEFAULT_ENABLED: True, + }, + "power_usage_day_produced_solar": { + ATTR_NAME: "Solar Energy Produced Today", + ATTR_SECTION: "power_usage", + ATTR_MEASUREMENT: "day_produced_solar", + ATTR_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR, + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:solar-power", + ATTR_DEFAULT_ENABLED: True, + }, + "power_usage_day_to_grid_usage": { + ATTR_NAME: "Energy Produced To Grid Today", + ATTR_SECTION: "power_usage", + ATTR_MEASUREMENT: "day_to_grid_usage", + ATTR_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR, + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:solar-power", + ATTR_DEFAULT_ENABLED: False, + }, + "power_usage_day_from_grid_usage": { + ATTR_NAME: "Energy Usage From Grid Today", + ATTR_SECTION: "power_usage", + ATTR_MEASUREMENT: "day_from_grid_usage", + ATTR_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR, + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:power-plug", + ATTR_DEFAULT_ENABLED: False, + }, + "solar_average_produced": { + ATTR_NAME: "Average Solar Power Production to Grid", + ATTR_SECTION: "power_usage", + ATTR_MEASUREMENT: "average_produced", + ATTR_UNIT_OF_MEASUREMENT: POWER_WATT, + ATTR_DEVICE_CLASS: DEVICE_CLASS_POWER, + ATTR_ICON: "mdi:solar-power", + ATTR_DEFAULT_ENABLED: False, + }, + "thermostat_info_current_modulation_level": { + ATTR_NAME: "Boiler Modulation Level", + ATTR_SECTION: "thermostat", + ATTR_MEASUREMENT: "current_modulation_level", + ATTR_UNIT_OF_MEASUREMENT: UNIT_PERCENTAGE, + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:percent", + ATTR_DEFAULT_ENABLED: False, + }, + "power_usage_current_covered_by_solar": { + ATTR_NAME: "Current Power Usage Covered By Solar", + ATTR_SECTION: "power_usage", + ATTR_MEASUREMENT: "current_covered_by_solar", + ATTR_UNIT_OF_MEASUREMENT: UNIT_PERCENTAGE, + ATTR_DEVICE_CLASS: None, + ATTR_ICON: "mdi:solar-power", + ATTR_DEFAULT_ENABLED: True, + }, +} + +SWITCH_ENTITIES = { + "thermostat_holiday_mode": { + ATTR_NAME: "Holiday Mode", + ATTR_SECTION: "thermostat", + ATTR_MEASUREMENT: "holiday_mode", + ATTR_INVERTED: False, + ATTR_ICON: "mdi:airport", + ATTR_DEFAULT_ENABLED: True, + }, + "thermostat_program": { + ATTR_NAME: "Thermostat Program", + ATTR_SECTION: "thermostat", + ATTR_MEASUREMENT: "program", + ATTR_INVERTED: False, + ATTR_ICON: "mdi:calendar-clock", + ATTR_DEFAULT_ENABLED: True, + }, +} diff --git a/homeassistant/components/toon/coordinator.py b/homeassistant/components/toon/coordinator.py new file mode 100644 index 00000000000..8e9722316e2 --- /dev/null +++ b/homeassistant/components/toon/coordinator.py @@ -0,0 +1,141 @@ +"""Provides the Toon DataUpdateCoordinator.""" +import logging +import secrets +from typing import Optional + +from toonapi import Status, Toon, ToonError + +from homeassistant.components.webhook import ( + async_register as webhook_register, + async_unregister as webhook_unregister, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_WEBHOOK_ID, EVENT_HOMEASSISTANT_STOP +from homeassistant.core import Event, HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import CONF_CLOUDHOOK_URL, DEFAULT_SCAN_INTERVAL, DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +class ToonDataUpdateCoordinator(DataUpdateCoordinator): + """Class to manage fetching WLED data from single endpoint.""" + + def __init__( + self, hass: HomeAssistant, *, entry: ConfigEntry, session: OAuth2Session + ): + """Initialize global Toon data updater.""" + self.session = session + self.entry = entry + + async def async_token_refresh() -> str: + await session.async_ensure_token_valid() + return session.token["access_token"] + + self.toon = Toon( + token=session.token["access_token"], + session=async_get_clientsession(hass), + token_refresh_method=async_token_refresh, + ) + + super().__init__( + hass, _LOGGER, name=DOMAIN, update_interval=DEFAULT_SCAN_INTERVAL + ) + + def update_listeners(self) -> None: + """Call update on all listeners.""" + for update_callback in self._listeners: + update_callback() + + async def register_webhook(self, event: Optional[Event] = None) -> None: + """Register a webhook with Toon to get live updates.""" + if CONF_WEBHOOK_ID not in self.entry.data: + data = {**self.entry.data, CONF_WEBHOOK_ID: secrets.token_hex()} + self.hass.config_entries.async_update_entry(self.entry, data=data) + + if self.hass.components.cloud.async_active_subscription(): + + if CONF_CLOUDHOOK_URL not in self.entry.data: + webhook_url = await self.hass.components.cloud.async_create_cloudhook( + self.entry.data[CONF_WEBHOOK_ID] + ) + data = {**self.entry.data, CONF_CLOUDHOOK_URL: webhook_url} + self.hass.config_entries.async_update_entry(self.entry, data=data) + else: + webhook_url = self.entry.data[CONF_CLOUDHOOK_URL] + else: + webhook_url = self.hass.components.webhook.async_generate_url( + self.entry.data[CONF_WEBHOOK_ID] + ) + + webhook_register( + self.hass, + DOMAIN, + "Toon", + self.entry.data[CONF_WEBHOOK_ID], + self.handle_webhook, + ) + + try: + await self.toon.subscribe_webhook( + application_id=self.entry.entry_id, url=webhook_url + ) + _LOGGER.info("Registered Toon webhook: %s", webhook_url) + except ToonError as err: + _LOGGER.error("Error during webhook registration - %s", err) + + self.hass.bus.async_listen_once( + EVENT_HOMEASSISTANT_STOP, self.unregister_webhook + ) + + async def handle_webhook( + self, hass: HomeAssistant, webhook_id: str, request + ) -> None: + """Handle webhook callback.""" + try: + data = await request.json() + except ValueError: + return + + _LOGGER.debug("Got webhook data: %s", data) + + # Webhook expired notification, re-register + if data.get("code") == 510: + await self.register_webhook() + return + + if ( + "updateDataSet" not in data + or "commonName" not in data + or self.data.agreement.display_common_name != data["commonName"] + ): + _LOGGER.warning("Received invalid data from Toon webhook - %s", data) + return + + try: + await self.toon.update(data["updateDataSet"]) + self.update_listeners() + except ToonError as err: + _LOGGER.error("Could not process data received from Toon webhook - %s", err) + + async def unregister_webhook(self, event: Optional[Event] = None) -> None: + """Remove / Unregister webhook for toon.""" + _LOGGER.debug( + "Unregistering Toon webhook (%s)", self.entry.data[CONF_WEBHOOK_ID] + ) + try: + await self.toon.unsubscribe_webhook(self.entry.entry_id) + except ToonError as err: + _LOGGER.error("Failed unregistering Toon webhook - %s", err) + + webhook_unregister(self.hass, self.entry.data[CONF_WEBHOOK_ID]) + + async def _async_update_data(self) -> Status: + """Fetch data from Toon.""" + try: + return await self.toon.update() + except ToonError as error: + raise UpdateFailed(f"Invalid response from API: {error}") diff --git a/homeassistant/components/toon/helpers.py b/homeassistant/components/toon/helpers.py new file mode 100644 index 00000000000..405ecc36d7f --- /dev/null +++ b/homeassistant/components/toon/helpers.py @@ -0,0 +1,29 @@ +"""Helpers for Toon.""" +import logging + +from toonapi import ToonConnectionError, ToonError + +_LOGGER = logging.getLogger(__name__) + + +def toon_exception_handler(func): + """Decorate Toon calls to handle Toon exceptions. + + A decorator that wraps the passed in function, catches Toon errors, + and handles the availability of the device in the data coordinator. + """ + + async def handler(self, *args, **kwargs): + try: + await func(self, *args, **kwargs) + self.coordinator.update_listeners() + + except ToonConnectionError as error: + _LOGGER.error("Error communicating with API: %s", error) + self.coordinator.last_update_success = False + self.coordinator.update_listeners() + + except ToonError as error: + _LOGGER.error("Invalid response from API: %s", error) + + return handler diff --git a/homeassistant/components/toon/manifest.json b/homeassistant/components/toon/manifest.json index 230b7986fbd..2ced62ffc6c 100644 --- a/homeassistant/components/toon/manifest.json +++ b/homeassistant/components/toon/manifest.json @@ -3,6 +3,8 @@ "name": "Toon", "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/toon", - "requirements": ["toonapilib==3.2.4"], + "requirements": ["toonapi==0.1.0"], + "dependencies": ["http"], + "after_dependencies": ["cloud"], "codeowners": ["@frenck"] } diff --git a/homeassistant/components/toon/models.py b/homeassistant/components/toon/models.py new file mode 100644 index 00000000000..7634246d1c9 --- /dev/null +++ b/homeassistant/components/toon/models.py @@ -0,0 +1,153 @@ +"""DataUpdate Coordinator, and base Entity and Device models for Toon.""" +import logging +from typing import Any, Dict, Optional + +from homeassistant.helpers.entity import Entity + +from .const import DOMAIN +from .coordinator import ToonDataUpdateCoordinator + +_LOGGER = logging.getLogger(__name__) + + +class ToonEntity(Entity): + """Defines a base Toon entity.""" + + def __init__( + self, + coordinator: ToonDataUpdateCoordinator, + *, + name: str, + icon: str, + enabled_default: bool = True, + ) -> None: + """Initialize the Toon entity.""" + self._enabled_default = enabled_default + self._icon = icon + self._name = name + self._state = None + self.coordinator = coordinator + + @property + def name(self) -> str: + """Return the name of the entity.""" + return self._name + + @property + def icon(self) -> Optional[str]: + """Return the mdi icon of the entity.""" + return self._icon + + @property + def available(self) -> bool: + """Return True if entity is available.""" + return self.coordinator.last_update_success + + @property + def entity_registry_enabled_default(self) -> bool: + """Return if the entity should be enabled when first added to the entity registry.""" + return self._enabled_default + + @property + def should_poll(self) -> bool: + """Return the polling requirement of the entity.""" + return False + + async def async_added_to_hass(self) -> None: + """Connect to dispatcher listening for entity data notifications.""" + self.async_on_remove( + self.coordinator.async_add_listener(self.async_write_ha_state) + ) + + async def async_update(self) -> None: + """Update Toon entity.""" + await self.coordinator.async_request_refresh() + + +class ToonDisplayDeviceEntity(ToonEntity): + """Defines a Toon display device entity.""" + + @property + def device_info(self) -> Dict[str, Any]: + """Return device information about this thermostat.""" + agreement = self.coordinator.data.agreement + model = agreement.display_hardware_version.rpartition("/")[0] + sw_version = agreement.display_software_version.rpartition("/")[-1] + return { + "identifiers": {(DOMAIN, agreement.agreement_id)}, + "name": "Toon Display", + "manufacturer": "Eneco", + "model": model, + "sw_version": sw_version, + } + + +class ToonElectricityMeterDeviceEntity(ToonEntity): + """Defines a Electricity Meter device entity.""" + + @property + def device_info(self) -> Dict[str, Any]: + """Return device information about this entity.""" + agreement_id = self.coordinator.data.agreement.agreement_id + return { + "name": "Electricity Meter", + "identifiers": {(DOMAIN, agreement_id, "electricity")}, + "via_device": (DOMAIN, agreement_id, "meter_adapter"), + } + + +class ToonGasMeterDeviceEntity(ToonEntity): + """Defines a Gas Meter device entity.""" + + @property + def device_info(self) -> Dict[str, Any]: + """Return device information about this entity.""" + agreement_id = self.coordinator.data.agreement.agreement_id + return { + "name": "Gas Meter", + "identifiers": {(DOMAIN, agreement_id, "gas")}, + "via_device": (DOMAIN, agreement_id, "electricity"), + } + + +class ToonSolarDeviceEntity(ToonEntity): + """Defines a Solar Device device entity.""" + + @property + def device_info(self) -> Dict[str, Any]: + """Return device information about this entity.""" + agreement_id = self.coordinator.data.agreement.agreement_id + return { + "name": "Solar Panels", + "identifiers": {(DOMAIN, agreement_id, "solar")}, + "via_device": (DOMAIN, agreement_id, "meter_adapter"), + } + + +class ToonBoilerModuleDeviceEntity(ToonEntity): + """Defines a Boiler Module device entity.""" + + @property + def device_info(self) -> Dict[str, Any]: + """Return device information about this entity.""" + agreement_id = self.coordinator.data.agreement.agreement_id + return { + "name": "Boiler Module", + "manufacturer": "Eneco", + "identifiers": {(DOMAIN, agreement_id, "boiler_module")}, + "via_device": (DOMAIN, agreement_id), + } + + +class ToonBoilerDeviceEntity(ToonEntity): + """Defines a Boiler device entity.""" + + @property + def device_info(self) -> Dict[str, Any]: + """Return device information about this entity.""" + agreement_id = self.coordinator.data.agreement.agreement_id + return { + "name": "Boiler", + "identifiers": {(DOMAIN, agreement_id, "boiler")}, + "via_device": (DOMAIN, agreement_id, "boiler_module"), + } diff --git a/homeassistant/components/toon/oauth2.py b/homeassistant/components/toon/oauth2.py new file mode 100644 index 00000000000..fcd4659cea8 --- /dev/null +++ b/homeassistant/components/toon/oauth2.py @@ -0,0 +1,135 @@ +"""OAuth2 implementations for Toon.""" +import logging +from typing import Any, Optional, cast + +from homeassistant.core import HomeAssistant +from homeassistant.helpers import config_entry_oauth2_flow +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from . import config_flow + +_LOGGER = logging.getLogger(__name__) + + +def register_oauth2_implementations( + hass: HomeAssistant, client_id: str, client_secret: str +) -> None: + """Register Toon OAuth2 implementations.""" + config_flow.ToonFlowHandler.async_register_implementation( + hass, + ToonLocalOAuth2Implementation( + hass, + client_id=client_id, + client_secret=client_secret, + name="Eneco Toon", + tenant_id="eneco", + issuer="identity.toon.eu", + ), + ) + config_flow.ToonFlowHandler.async_register_implementation( + hass, + ToonLocalOAuth2Implementation( + hass, + client_id=client_id, + client_secret=client_secret, + name="Engie Electrabel Boxx", + tenant_id="electrabel", + ), + ) + config_flow.ToonFlowHandler.async_register_implementation( + hass, + ToonLocalOAuth2Implementation( + hass, + client_id=client_id, + client_secret=client_secret, + name="Viesgo", + tenant_id="viesgo", + ), + ) + + +class ToonLocalOAuth2Implementation(config_entry_oauth2_flow.LocalOAuth2Implementation): + """Local OAuth2 implementation for Toon.""" + + def __init__( + self, + hass: HomeAssistant, + client_id: str, + client_secret: str, + name: str, + tenant_id: str, + issuer: Optional[str] = None, + ): + """Local Toon Oauth Implementation.""" + self._name = name + self.tenant_id = tenant_id + self.issuer = issuer + + super().__init__( + hass=hass, + domain=tenant_id, + client_id=client_id, + client_secret=client_secret, + authorize_url="https://api.toon.eu/authorize", + token_url="https://api.toon.eu/token", + ) + + @property + def name(self) -> str: + """Name of the implementation.""" + return f"{self._name} via Configuration.yaml" + + @property + def extra_authorize_data(self) -> dict: + """Extra data that needs to be appended to the authorize url.""" + data = {"tenant_id": self.tenant_id} + + if self.issuer is not None: + data["issuer"] = self.issuer + + return data + + async def async_resolve_external_data(self, external_data: Any) -> dict: + """Initialize local Toon auth implementation.""" + data = { + "grant_type": "authorization_code", + "code": external_data, + "redirect_uri": self.redirect_uri, + "tenant_id": self.tenant_id, + } + + if self.issuer is not None: + data["issuer"] = self.issuer + + return await self._token_request(data) + + async def _async_refresh_token(self, token: dict) -> dict: + """Refresh tokens.""" + data = { + "grant_type": "refresh_token", + "client_id": self.client_id, + "refresh_token": token["refresh_token"], + "tenant_id": self.tenant_id, + } + + new_token = await self._token_request(data) + return {**token, **new_token} + + async def _token_request(self, data: dict) -> dict: + """Make a token request.""" + session = async_get_clientsession(self.hass) + headers = {} + + data["client_id"] = self.client_id + data["tenant_id"] = self.tenant_id + + if self.client_secret is not None: + data["client_secret"] = self.client_secret + + if self.issuer is not None: + data["issuer"] = self.issuer + headers["issuer"] = self.issuer + + resp = await session.post(self.token_url, data=data, headers=headers) + resp.raise_for_status() + return cast(dict, await resp.json()) diff --git a/homeassistant/components/toon/sensor.py b/homeassistant/components/toon/sensor.py index 157c357e180..cbe5a4a570b 100644 --- a/homeassistant/components/toon/sensor.py +++ b/homeassistant/components/toon/sensor.py @@ -1,283 +1,136 @@ """Support for Toon sensors.""" import logging +from typing import Optional from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ENERGY_KILO_WATT_HOUR, POWER_WATT, UNIT_PERCENTAGE -from homeassistant.helpers.typing import HomeAssistantType +from homeassistant.core import HomeAssistant -from . import ( +from .const import ( + ATTR_DEFAULT_ENABLED, + ATTR_DEVICE_CLASS, + ATTR_ICON, + ATTR_MEASUREMENT, + ATTR_NAME, + ATTR_SECTION, + ATTR_UNIT_OF_MEASUREMENT, + DOMAIN, + SENSOR_ENTITIES, +) +from .coordinator import ToonDataUpdateCoordinator +from .models import ( ToonBoilerDeviceEntity, - ToonData, ToonElectricityMeterDeviceEntity, ToonEntity, ToonGasMeterDeviceEntity, ToonSolarDeviceEntity, ) -from .const import CURRENCY_EUR, DATA_TOON, DOMAIN, VOLUME_CM3, VOLUME_M3 _LOGGER = logging.getLogger(__name__) async def async_setup_entry( - hass: HomeAssistantType, entry: ConfigEntry, async_add_entities + hass: HomeAssistant, entry: ConfigEntry, async_add_entities ) -> None: """Set up Toon sensors based on a config entry.""" - toon = hass.data[DATA_TOON][entry.entry_id] + coordinator = hass.data[DOMAIN][entry.entry_id] sensors = [ - ToonElectricityMeterDeviceSensor( - toon, "power", "value", "Current Power Usage", "mdi:power-plug", POWER_WATT - ), - ToonElectricityMeterDeviceSensor( - toon, - "power", - "average", - "Average Power Usage", - "mdi:power-plug", - POWER_WATT, - ), - ToonElectricityMeterDeviceSensor( - toon, - "power", - "daily_value", - "Power Usage Today", - "mdi:power-plug", - ENERGY_KILO_WATT_HOUR, - ), - ToonElectricityMeterDeviceSensor( - toon, - "power", - "daily_cost", - "Power Cost Today", - "mdi:power-plug", - CURRENCY_EUR, - ), - ToonElectricityMeterDeviceSensor( - toon, - "power", - "average_daily", - "Average Daily Power Usage", - "mdi:power-plug", - ENERGY_KILO_WATT_HOUR, - ), - ToonElectricityMeterDeviceSensor( - toon, - "power", - "meter_reading", - "Power Meter Feed IN Tariff 1", - "mdi:power-plug", - ENERGY_KILO_WATT_HOUR, - ), - ToonElectricityMeterDeviceSensor( - toon, - "power", - "meter_reading_low", - "Power Meter Feed IN Tariff 2", - "mdi:power-plug", - ENERGY_KILO_WATT_HOUR, - ), + ToonElectricityMeterDeviceSensor(coordinator, key=key) + for key in ( + "power_average_daily", + "power_average", + "power_daily_cost", + "power_daily_value", + "power_meter_reading_low", + "power_meter_reading", + "power_value", + "solar_meter_reading_low_produced", + "solar_meter_reading_produced", + ) ] - if toon.gas: + if coordinator.data.gas_usage and coordinator.data.gas_usage.is_smart: sensors.extend( [ - ToonGasMeterDeviceSensor( - toon, - "gas", - "value", - "Current Gas Usage", - "mdi:gas-cylinder", - VOLUME_CM3, - ), - ToonGasMeterDeviceSensor( - toon, - "gas", - "average", - "Average Gas Usage", - "mdi:gas-cylinder", - VOLUME_CM3, - ), - ToonGasMeterDeviceSensor( - toon, - "gas", - "daily_usage", - "Gas Usage Today", - "mdi:gas-cylinder", - VOLUME_M3, - ), - ToonGasMeterDeviceSensor( - toon, - "gas", - "average_daily", - "Average Daily Gas Usage", - "mdi:gas-cylinder", - VOLUME_M3, - ), - ToonGasMeterDeviceSensor( - toon, - "gas", - "meter_reading", - "Gas Meter", - "mdi:gas-cylinder", - VOLUME_M3, - ), - ToonGasMeterDeviceSensor( - toon, - "gas", - "daily_cost", - "Gas Cost Today", - "mdi:gas-cylinder", - CURRENCY_EUR, - ), - ] - ) - - if toon.solar: - sensors.extend( - [ - ToonSolarDeviceSensor( - toon, - "solar", - "value", - "Current Solar Production", - "mdi:solar-power", - POWER_WATT, - ), - ToonSolarDeviceSensor( - toon, - "solar", - "maximum", - "Max Solar Production", - "mdi:solar-power", - POWER_WATT, - ), - ToonSolarDeviceSensor( - toon, - "solar", - "produced", - "Solar Production to Grid", - "mdi:solar-power", - POWER_WATT, - ), - ToonSolarDeviceSensor( - toon, - "solar", - "average_produced", - "Average Solar Production to Grid", - "mdi:solar-power", - POWER_WATT, - ), - ToonElectricityMeterDeviceSensor( - toon, - "solar", - "meter_reading_produced", - "Power Meter Feed OUT Tariff 1", - "mdi:solar-power", - ENERGY_KILO_WATT_HOUR, - ), - ToonElectricityMeterDeviceSensor( - toon, - "solar", - "meter_reading_low_produced", - "Power Meter Feed OUT Tariff 2", - "mdi:solar-power", - ENERGY_KILO_WATT_HOUR, - ), - ] - ) - - if toon.thermostat_info.have_ot_boiler: - sensors.extend( - [ - ToonBoilerDeviceSensor( - toon, - "thermostat_info", - "current_modulation_level", - "Boiler Modulation Level", - "mdi:percent", - UNIT_PERCENTAGE, + ToonGasMeterDeviceSensor(coordinator, key=key) + for key in ( + "gas_average_daily", + "gas_average", + "gas_daily_cost", + "gas_daily_usage", + "gas_meter_reading", + "gas_value", ) ] ) + if coordinator.data.agreement.is_toon_solar: + sensors.extend( + [ + ToonSolarDeviceSensor(coordinator, key=key) + for key in [ + "solar_value", + "solar_maximum", + "solar_produced", + "solar_average_produced", + "power_usage_day_produced_solar", + "power_usage_day_from_grid_usage", + "power_usage_day_to_grid_usage", + "power_usage_current_covered_by_solar", + ] + ] + ) + + if coordinator.data.thermostat.have_opentherm_boiler: + sensors.extend( + [ + ToonBoilerDeviceSensor(coordinator, key=key) + for key in ["thermostat_info_current_modulation_level"] + ] + ) + async_add_entities(sensors, True) class ToonSensor(ToonEntity): """Defines a Toon sensor.""" - def __init__( - self, - toon: ToonData, - section: str, - measurement: str, - name: str, - icon: str, - unit_of_measurement: str, - ) -> None: + def __init__(self, coordinator: ToonDataUpdateCoordinator, *, key: str) -> None: """Initialize the Toon sensor.""" - self._state = None - self._unit_of_measurement = unit_of_measurement - self.section = section - self.measurement = measurement + self.key = key - super().__init__(toon, name, icon) + super().__init__( + coordinator, + enabled_default=SENSOR_ENTITIES[key][ATTR_DEFAULT_ENABLED], + icon=SENSOR_ENTITIES[key][ATTR_ICON], + name=SENSOR_ENTITIES[key][ATTR_NAME], + ) @property def unique_id(self) -> str: """Return the unique ID for this sensor.""" - return "_".join( - [DOMAIN, self.toon.agreement.id, "sensor", self.section, self.measurement] - ) + agreement_id = self.coordinator.data.agreement.agreement_id + # This unique ID is a bit ugly and contains unneeded information. + # It is here for legacy / backward compatible reasons. + return f"{DOMAIN}_{agreement_id}_sensor_{self.key}" @property - def state(self): + def state(self) -> Optional[str]: """Return the state of the sensor.""" - return self._state + section = getattr( + self.coordinator.data, SENSOR_ENTITIES[self.key][ATTR_SECTION] + ) + return getattr(section, SENSOR_ENTITIES[self.key][ATTR_MEASUREMENT]) @property - def unit_of_measurement(self) -> str: + def unit_of_measurement(self) -> Optional[str]: """Return the unit this state is expressed in.""" - return self._unit_of_measurement + return SENSOR_ENTITIES[self.key][ATTR_UNIT_OF_MEASUREMENT] - def update(self) -> None: - """Get the latest data from the sensor.""" - section = getattr(self.toon, self.section) - value = None - - if not section: - return - - if self.section == "power" and self.measurement == "daily_value": - value = round( - (float(section.daily_usage) + float(section.daily_usage_low)) / 1000.0, - 2, - ) - - if value is None: - value = getattr(section, self.measurement) - - if self.section == "power" and self.measurement in [ - "meter_reading", - "meter_reading_low", - "average_daily", - ]: - value = round(float(value) / 1000.0, 2) - - if self.section == "solar" and self.measurement in [ - "meter_reading_produced", - "meter_reading_low_produced", - ]: - value = float(value) / 1000.0 - - if self.section == "gas" and self.measurement in [ - "average_daily", - "daily_usage", - "meter_reading", - ]: - value = round(float(value) / 1000.0, 2) - - self._state = max(0, value) + @property + def device_class(self) -> Optional[str]: + """Return the device class.""" + return SENSOR_ENTITIES[self.key][ATTR_DEVICE_CLASS] class ToonElectricityMeterDeviceSensor(ToonSensor, ToonElectricityMeterDeviceEntity): diff --git a/homeassistant/components/toon/strings.json b/homeassistant/components/toon/strings.json index 897c398af9b..05eef817d28 100644 --- a/homeassistant/components/toon/strings.json +++ b/homeassistant/components/toon/strings.json @@ -1,33 +1,23 @@ { "config": { "step": { - "authenticate": { - "title": "Link your Toon account", - "description": "Authenticate with your Eneco Toon account (not the developer account).", - "data": { - "username": "[%key:common::config_flow::data::username%]", - "password": "[%key:common::config_flow::data::password%]", - "tenant": "Tenant" - } + "pick_implementation": { + "title": "Choose your tenant to authenticate with" }, - "display": { - "title": "Select display", - "description": "Select the Toon display to connect with.", + "agreement": { + "title": "Select your agreement", + "description": "Select the agreement address you want to add.", "data": { - "display": "Choose display" + "agreement": "Agreement" } } }, - "error": { - "credentials": "The provided credentials are invalid.", - "display_exists": "The selected display is already configured." - }, "abort": { - "client_id": "The client ID from the configuration is invalid.", - "client_secret": "The client secret from the configuration is invalid.", - "unknown_auth_fail": "Unexpected error occurred, while authenticating.", - "no_agreements": "This account has no Toon displays.", - "no_app": "You need to configure Toon before being able to authenticate with it. [Please read the instructions](https://www.home-assistant.io/components/toon/)." + "already_configured": "The selected agreement is already configured.", + "authorize_url_fail": "Unknown error generating an authorize url.", + "authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]", + "missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]", + "no_agreements": "This account has no Toon displays." } } -} \ No newline at end of file +} diff --git a/homeassistant/components/toon/switch.py b/homeassistant/components/toon/switch.py new file mode 100644 index 00000000000..22bdd6b100e --- /dev/null +++ b/homeassistant/components/toon/switch.py @@ -0,0 +1,119 @@ +"""Support for Toon switches.""" +import logging +from typing import Any + +from toonapi import ( + ACTIVE_STATE_AWAY, + ACTIVE_STATE_HOLIDAY, + PROGRAM_STATE_OFF, + PROGRAM_STATE_ON, +) + +from homeassistant.components.switch import SwitchEntity +from homeassistant.config_entries import ConfigEntry +from homeassistant.helpers.typing import HomeAssistantType + +from .const import ( + ATTR_DEFAULT_ENABLED, + ATTR_ICON, + ATTR_INVERTED, + ATTR_MEASUREMENT, + ATTR_NAME, + ATTR_SECTION, + DOMAIN, + SWITCH_ENTITIES, +) +from .coordinator import ToonDataUpdateCoordinator +from .helpers import toon_exception_handler +from .models import ToonDisplayDeviceEntity, ToonEntity + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistantType, entry: ConfigEntry, async_add_entities +) -> None: + """Set up a Toon switches based on a config entry.""" + coordinator = hass.data[DOMAIN][entry.entry_id] + + async_add_entities( + [ToonProgramSwitch(coordinator), ToonHolidayModeSwitch(coordinator)] + ) + + +class ToonSwitch(ToonEntity, SwitchEntity): + """Defines an Toon switch.""" + + def __init__(self, coordinator: ToonDataUpdateCoordinator, *, key: str) -> None: + """Initialize the Toon switch.""" + self.key = key + + super().__init__( + coordinator, + enabled_default=SWITCH_ENTITIES[key][ATTR_DEFAULT_ENABLED], + icon=SWITCH_ENTITIES[key][ATTR_ICON], + name=SWITCH_ENTITIES[key][ATTR_NAME], + ) + + @property + def unique_id(self) -> str: + """Return the unique ID for this binary sensor.""" + agreement_id = self.coordinator.data.agreement.agreement_id + return f"{agreement_id}_{self.key}" + + @property + def is_on(self) -> bool: + """Return the status of the binary sensor.""" + section = getattr( + self.coordinator.data, SWITCH_ENTITIES[self.key][ATTR_SECTION] + ) + value = getattr(section, SWITCH_ENTITIES[self.key][ATTR_MEASUREMENT]) + + if SWITCH_ENTITIES[self.key][ATTR_INVERTED]: + return not value + + return value + + +class ToonProgramSwitch(ToonSwitch, ToonDisplayDeviceEntity): + """Defines a Toon program switch.""" + + def __init__(self, coordinator: ToonDataUpdateCoordinator) -> None: + """Initialize the Toon program switch.""" + super().__init__(coordinator, key="thermostat_program") + + @toon_exception_handler + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the Toon program switch.""" + await self.coordinator.toon.set_active_state( + ACTIVE_STATE_AWAY, PROGRAM_STATE_OFF + ) + + @toon_exception_handler + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on the Toon program switch.""" + await self.coordinator.toon.set_active_state( + ACTIVE_STATE_AWAY, PROGRAM_STATE_ON + ) + + +class ToonHolidayModeSwitch(ToonSwitch, ToonDisplayDeviceEntity): + """Defines a Toon Holiday mode switch.""" + + def __init__(self, coordinator: ToonDataUpdateCoordinator) -> None: + """Initialize the Toon holiday switch.""" + super().__init__(coordinator, key="thermostat_holiday_mode") + + @toon_exception_handler + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the Toon holiday mode switch.""" + await self.coordinator.toon.set_active_state( + ACTIVE_STATE_AWAY, PROGRAM_STATE_ON + ) + + @toon_exception_handler + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on the Toon holiday mode switch.""" + await self.coordinator.toon.set_active_state( + ACTIVE_STATE_HOLIDAY, PROGRAM_STATE_OFF + ) diff --git a/homeassistant/components/toon/translations/en.json b/homeassistant/components/toon/translations/en.json index 8d8b837e987..c2efd5226b5 100644 --- a/homeassistant/components/toon/translations/en.json +++ b/homeassistant/components/toon/translations/en.json @@ -1,8 +1,12 @@ { "config": { "abort": { + "already_configured": "The selected agreement is already configured.", + "authorize_url_fail": "Unknown error generating an authorize url.", + "authorize_url_timeout": "Timeout generating authorize url.", "client_id": "The client ID from the configuration is invalid.", "client_secret": "The client secret from the configuration is invalid.", + "missing_configuration": "The component is not configured. Please follow the documentation.", "no_agreements": "This account has no Toon displays.", "no_app": "You need to configure Toon before being able to authenticate with it. [Please read the instructions](https://www.home-assistant.io/components/toon/).", "unknown_auth_fail": "Unexpected error occurred, while authenticating." @@ -12,6 +16,13 @@ "display_exists": "The selected display is already configured." }, "step": { + "agreement": { + "data": { + "agreement": "Agreement" + }, + "description": "Select the agreement address you want to add.", + "title": "Select your agreement" + }, "authenticate": { "data": { "password": "Password", @@ -27,6 +38,9 @@ }, "description": "Select the Toon display to connect with.", "title": "Select display" + }, + "pick_implementation": { + "title": "Choose your tenant to authenticate with" } } } diff --git a/homeassistant/components/tplink/translations/cs.json b/homeassistant/components/tplink/translations/cs.json deleted file mode 100644 index bc9be34a83e..00000000000 --- a/homeassistant/components/tplink/translations/cs.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "confirm": { - "title": "TP-Link Smart Home" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/tplink/translations/fi.json b/homeassistant/components/tplink/translations/fi.json deleted file mode 100644 index bc9be34a83e..00000000000 --- a/homeassistant/components/tplink/translations/fi.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "confirm": { - "title": "TP-Link Smart Home" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/tplink/translations/nn.json b/homeassistant/components/tplink/translations/nn.json deleted file mode 100644 index bc9be34a83e..00000000000 --- a/homeassistant/components/tplink/translations/nn.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "confirm": { - "title": "TP-Link Smart Home" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/tplink/translations/th.json b/homeassistant/components/tplink/translations/th.json deleted file mode 100644 index 23c027fa4af..00000000000 --- a/homeassistant/components/tplink/translations/th.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "TP-Link Smart Home" -} \ No newline at end of file diff --git a/homeassistant/components/traccar/translations/nn.json b/homeassistant/components/traccar/translations/nn.json deleted file mode 100644 index 8db28ed6982..00000000000 --- a/homeassistant/components/traccar/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Traccar" -} \ No newline at end of file diff --git a/homeassistant/components/tradfri/config_flow.py b/homeassistant/components/tradfri/config_flow.py index 2ade04cff55..e438fd20170 100644 --- a/homeassistant/components/tradfri/config_flow.py +++ b/homeassistant/components/tradfri/config_flow.py @@ -82,12 +82,12 @@ class FlowHandler(config_entries.ConfigFlow): step_id="auth", data_schema=vol.Schema(fields), errors=errors ) - async def async_step_homekit(self, user_input): + async def async_step_homekit(self, discovery_info): """Handle homekit discovery.""" - await self.async_set_unique_id(user_input["properties"]["id"]) - self._abort_if_unique_id_configured({CONF_HOST: user_input["host"]}) + await self.async_set_unique_id(discovery_info["properties"]["id"]) + self._abort_if_unique_id_configured({CONF_HOST: discovery_info["host"]}) - host = user_input["host"] + host = discovery_info["host"] for entry in self._async_current_entries(): if entry.data[CONF_HOST] != host: @@ -96,7 +96,7 @@ class FlowHandler(config_entries.ConfigFlow): # Backwards compat, we update old entries if not entry.unique_id: self.hass.config_entries.async_update_entry( - entry, unique_id=user_input["properties"]["id"] + entry, unique_id=discovery_info["properties"]["id"] ) return self.async_abort(reason="already_configured") diff --git a/homeassistant/components/trend/manifest.json b/homeassistant/components/trend/manifest.json index 0d741bcf264..dabeabd2757 100644 --- a/homeassistant/components/trend/manifest.json +++ b/homeassistant/components/trend/manifest.json @@ -2,7 +2,7 @@ "domain": "trend", "name": "Trend", "documentation": "https://www.home-assistant.io/integrations/trend", - "requirements": ["numpy==1.18.4"], + "requirements": ["numpy==1.19.0"], "codeowners": [], "quality_scale": "internal" } diff --git a/homeassistant/components/tts/__init__.py b/homeassistant/components/tts/__init__.py index 6dc2e9b7d45..39e4702e855 100644 --- a/homeassistant/components/tts/__init__.py +++ b/homeassistant/components/tts/__init__.py @@ -1,6 +1,5 @@ """Provide functionality for TTS.""" import asyncio -import ctypes import functools as ft import hashlib import io @@ -8,7 +7,7 @@ import logging import mimetypes import os import re -from typing import Optional +from typing import Dict, Optional from aiohttp import web import mutagen @@ -176,7 +175,11 @@ async def async_setup(hass, config): } await hass.services.async_call( - DOMAIN_MP, SERVICE_PLAY_MEDIA, data, blocking=True + DOMAIN_MP, + SERVICE_PLAY_MEDIA, + data, + blocking=True, + context=service.context, ) service_name = p_config.get(CONF_SERVICE_NAME, f"{p_type}_{SERVICE_SAY}") @@ -212,6 +215,16 @@ async def async_setup(hass, config): return True +def _hash_options(options: Dict) -> str: + """Hashes an options dictionary.""" + opts_hash = hashlib.blake2s(digest_size=5) + for key, value in sorted(options.items()): + opts_hash.update(str(key).encode()) + opts_hash.update(str(value).encode()) + + return opts_hash.hexdigest() + + class SpeechManager: """Representation of a speech store.""" @@ -304,7 +317,7 @@ class SpeechManager: ] if invalid_opts: raise HomeAssistantError(f"Invalid options found: {invalid_opts}") - options_key = ctypes.c_size_t(hash(frozenset(options))).value + options_key = _hash_options(options) else: options_key = "-" diff --git a/homeassistant/components/tuya/translations/ca.json b/homeassistant/components/tuya/translations/ca.json index 89398296e9f..dc07a2d8715 100644 --- a/homeassistant/components/tuya/translations/ca.json +++ b/homeassistant/components/tuya/translations/ca.json @@ -14,7 +14,7 @@ "data": { "country_code": "El teu codi de pa\u00eds (per exemple, 1 per l'EUA o 86 per la Xina)", "password": "Contrasenya", - "platform": "L\u2019aplicaci\u00f3 on es registra el vostre compte", + "platform": "L'aplicaci\u00f3 on es registra el teu compte", "username": "Nom d'usuari" }, "description": "Introdueix la teva credencial de Tuya.", diff --git a/homeassistant/components/twilio/translations/nn.json b/homeassistant/components/twilio/translations/nn.json deleted file mode 100644 index 8831caab476..00000000000 --- a/homeassistant/components/twilio/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Twilio" -} \ No newline at end of file diff --git a/homeassistant/components/ue_smart_radio/media_player.py b/homeassistant/components/ue_smart_radio/media_player.py index 9f6d01cfb5d..f20a9d3578b 100644 --- a/homeassistant/components/ue_smart_radio/media_player.py +++ b/homeassistant/components/ue_smart_radio/media_player.py @@ -82,10 +82,13 @@ def setup_platform(hass, config, add_entities, discovery_info=None): session = session_request.cookies["sdi_squeezenetwork_session"] player_request = send_request({"params": ["", ["serverstatus"]]}, session) - player_id = player_request["result"]["players_loop"][0]["playerid"] - player_name = player_request["result"]["players_loop"][0]["name"] - add_entities([UERadioDevice(session, player_id, player_name)]) + players = [ + UERadioDevice(session, player["playerid"], player["name"]) + for player in player_request["result"]["players_loop"] + ] + + add_entities(players) class UERadioDevice(MediaPlayerEntity): diff --git a/homeassistant/components/unifi/device_tracker.py b/homeassistant/components/unifi/device_tracker.py index 602795404bb..f1846bbcac3 100644 --- a/homeassistant/components/unifi/device_tracker.py +++ b/homeassistant/components/unifi/device_tracker.py @@ -11,6 +11,9 @@ from aiounifi.events import ( WIRELESS_CLIENT_CONNECTED, WIRELESS_CLIENT_ROAM, WIRELESS_CLIENT_ROAMRADIO, + WIRELESS_GUEST_CONNECTED, + WIRELESS_GUEST_ROAM, + WIRELESS_GUEST_ROAMRADIO, ) from homeassistant.components.device_tracker import DOMAIN @@ -60,6 +63,9 @@ WIRELESS_CONNECTION = ( WIRELESS_CLIENT_CONNECTED, WIRELESS_CLIENT_ROAM, WIRELESS_CLIENT_ROAMRADIO, + WIRELESS_GUEST_CONNECTED, + WIRELESS_GUEST_ROAM, + WIRELESS_GUEST_ROAMRADIO, ) diff --git a/homeassistant/components/unifi/translations/no.json b/homeassistant/components/unifi/translations/no.json index d9260f92640..6e149217f11 100644 --- a/homeassistant/components/unifi/translations/no.json +++ b/homeassistant/components/unifi/translations/no.json @@ -4,8 +4,8 @@ "already_configured": "Kontroller nettstedet er allerede konfigurert" }, "error": { - "faulty_credentials": "Ugyldig brukerlegitimasjon", - "service_unavailable": "Ingen tjeneste tilgjengelig", + "faulty_credentials": "Ugyldig godkjenning", + "service_unavailable": "Tilkobling mislyktes", "unknown_client_mac": "Ingen klient tilgjengelig p\u00e5 den MAC-adressen" }, "step": { diff --git a/homeassistant/components/universal/media_player.py b/homeassistant/components/universal/media_player.py index f1cad7e8abf..ec4b53cd2e0 100644 --- a/homeassistant/components/universal/media_player.py +++ b/homeassistant/components/universal/media_player.py @@ -205,7 +205,7 @@ class UniversalMediaPlayer(MediaPlayerEntity): service_data[ATTR_ENTITY_ID] = active_child.entity_id await self.hass.services.async_call( - DOMAIN, service_name, service_data, blocking=True + DOMAIN, service_name, service_data, blocking=True, context=self._context ) @property diff --git a/homeassistant/components/upnp/sensor.py b/homeassistant/components/upnp/sensor.py index 29bdf7429ab..aea0ec40460 100644 --- a/homeassistant/components/upnp/sensor.py +++ b/homeassistant/components/upnp/sensor.py @@ -198,6 +198,8 @@ class RawUpnpSensor(UpnpSensor): """Return the state of the device.""" device_value_key = self._sensor_type["device_value_key"] value = self._coordinator.data[device_value_key] + if value is None: + return None return format(value, "d") @@ -235,6 +237,8 @@ class DerivedUpnpSensor(UpnpSensor): # Can't calculate any derivative if we have only one value. device_value_key = self._sensor_type["device_value_key"] current_value = self._coordinator.data[device_value_key] + if current_value is None: + return None current_timestamp = self._coordinator.data[TIMESTAMP] if self._last_value is None or self._has_overflowed(current_value): self._last_value = current_value diff --git a/homeassistant/components/upnp/translations/et.json b/homeassistant/components/upnp/translations/et.json deleted file mode 100644 index bfffa5783ff..00000000000 --- a/homeassistant/components/upnp/translations/et.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "config": { - "step": { - "user": { - "data": { - "igd": "" - } - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/upnp/translations/it.json b/homeassistant/components/upnp/translations/it.json index dacb5023615..ea25f22d301 100644 --- a/homeassistant/components/upnp/translations/it.json +++ b/homeassistant/components/upnp/translations/it.json @@ -14,7 +14,7 @@ "step": { "init": { "one": "uno", - "other": "altro" + "other": "altri" }, "ssdp_confirm": { "description": "Vuoi configurare questo dispositivo UPnP/IGD?" diff --git a/homeassistant/components/uptime/sensor.py b/homeassistant/components/uptime/sensor.py index 1ccc0062461..12c00c7f96d 100644 --- a/homeassistant/components/uptime/sensor.py +++ b/homeassistant/components/uptime/sensor.py @@ -19,7 +19,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_UNIT_OF_MEASUREMENT, default="days"): vol.All( - cv.string, vol.In(["minutes", "hours", "days"]) + cv.string, vol.In(["minutes", "hours", "days", "seconds"]) ), } ) @@ -72,6 +72,8 @@ class UptimeSensor(Entity): div_factor *= 24 elif self.unit_of_measurement == "minutes": div_factor /= 60 + elif self.unit_of_measurement == "seconds": + div_factor /= 3600 delta = delta.total_seconds() / div_factor self._state = round(delta, 2) diff --git a/homeassistant/components/vera/__init__.py b/homeassistant/components/vera/__init__.py index 1e1538420b5..b636477b16d 100644 --- a/homeassistant/components/vera/__init__.py +++ b/homeassistant/components/vera/__init__.py @@ -24,7 +24,7 @@ from homeassistant.helpers.entity import Entity from homeassistant.util import convert, slugify from homeassistant.util.dt import utc_from_timestamp -from .common import ControllerData, get_configured_platforms +from .common import ControllerData, SubscriptionRegistry, get_configured_platforms from .config_flow import fix_device_id_list, new_options from .const import ( ATTR_CURRENT_ENERGY_KWH, @@ -95,12 +95,11 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b ) # Initialize the Vera controller. - controller = veraApi.VeraController(base_url) - controller.start() + subscription_registry = SubscriptionRegistry(hass) + controller = veraApi.VeraController(base_url, subscription_registry) + await hass.async_add_executor_job(controller.start) - hass.bus.async_listen_once( - EVENT_HOMEASSISTANT_STOP, lambda event: controller.stop() - ) + hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, controller.stop) try: all_devices = await hass.async_add_executor_job(controller.get_devices) @@ -143,12 +142,13 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Unload Withings config entry.""" - controller_data = hass.data[DOMAIN] + controller_data: ControllerData = hass.data[DOMAIN] tasks = [ hass.config_entries.async_forward_entry_unload(config_entry, platform) for platform in get_configured_platforms(controller_data) ] + tasks.append(hass.async_add_executor_job(controller_data.controller.stop)) await asyncio.gather(*tasks) return True diff --git a/homeassistant/components/vera/common.py b/homeassistant/components/vera/common.py index cdfdff404ec..17536bcae69 100644 --- a/homeassistant/components/vera/common.py +++ b/homeassistant/components/vera/common.py @@ -5,6 +5,8 @@ from typing import DefaultDict, List, NamedTuple, Set import pyvera as pv from homeassistant.components.scene import DOMAIN as SCENE_DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers.event import call_later _LOGGER = logging.getLogger(__name__) @@ -27,3 +29,38 @@ def get_configured_platforms(controller_data: ControllerData) -> Set[str]: platforms.append(SCENE_DOMAIN) return set(platforms) + + +class SubscriptionRegistry(pv.AbstractSubscriptionRegistry): + """Manages polling for data from vera.""" + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize the object.""" + super().__init__() + self._hass = hass + self._cancel_poll = None + + def start(self) -> None: + """Start polling for data.""" + self.stop() + self._schedule_poll(1) + + def stop(self) -> None: + """Stop polling for data.""" + if self._cancel_poll: + self._cancel_poll() + self._cancel_poll = None + + def _schedule_poll(self, delay: float) -> None: + self._cancel_poll = call_later(self._hass, delay, self._run_poll_server) + + def _run_poll_server(self, now) -> None: + delay = 1 + + # Long poll for changes. The downstream API instructs the endpoint to wait a + # a minimum of 200ms before returning data and a maximum of 9s before timing out. + if not self.poll_server_once(): + # If an error was encountered, wait a bit longer before trying again. + delay = 60 + + self._schedule_poll(delay) diff --git a/homeassistant/components/vera/manifest.json b/homeassistant/components/vera/manifest.json index 22a5da19d8c..a6afcce65b3 100644 --- a/homeassistant/components/vera/manifest.json +++ b/homeassistant/components/vera/manifest.json @@ -3,6 +3,6 @@ "name": "Vera", "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/vera", - "requirements": ["pyvera==0.3.7"], + "requirements": ["pyvera==0.3.9"], "codeowners": ["@vangorra"] } diff --git a/homeassistant/components/vesync/translations/nn.json b/homeassistant/components/vesync/translations/nn.json deleted file mode 100644 index 3c1ca41456d..00000000000 --- a/homeassistant/components/vesync/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "VeSync" -} \ No newline at end of file diff --git a/homeassistant/components/vicare/__init__.py b/homeassistant/components/vicare/__init__.py index 335e89eb873..d498ada704d 100644 --- a/homeassistant/components/vicare/__init__.py +++ b/homeassistant/components/vicare/__init__.py @@ -19,9 +19,10 @@ from homeassistant.helpers.storage import STORAGE_DIR _LOGGER = logging.getLogger(__name__) -VICARE_PLATFORMS = ["climate", "water_heater"] +VICARE_PLATFORMS = ["climate", "sensor", "binary_sensor", "water_heater"] DOMAIN = "vicare" +PYVICARE_ERROR = "error" VICARE_API = "api" VICARE_NAME = "name" VICARE_HEATING_TYPE = "heating_type" diff --git a/homeassistant/components/vicare/binary_sensor.py b/homeassistant/components/vicare/binary_sensor.py new file mode 100644 index 00000000000..a35339c12a9 --- /dev/null +++ b/homeassistant/components/vicare/binary_sensor.py @@ -0,0 +1,123 @@ +"""Viessmann ViCare sensor device.""" +import logging + +import requests + +from homeassistant.components.binary_sensor import ( + DEVICE_CLASS_POWER, + BinarySensorDevice, +) +from homeassistant.const import CONF_DEVICE_CLASS, CONF_NAME + +from . import ( + DOMAIN as VICARE_DOMAIN, + PYVICARE_ERROR, + VICARE_API, + VICARE_HEATING_TYPE, + VICARE_NAME, + HeatingType, +) + +_LOGGER = logging.getLogger(__name__) + +CONF_GETTER = "getter" + +SENSOR_CIRCULATION_PUMP_ACTIVE = "circulationpump_active" +SENSOR_BURNER_ACTIVE = "burner_active" +SENSOR_COMPRESSOR_ACTIVE = "compressor_active" + +SENSOR_TYPES = { + SENSOR_CIRCULATION_PUMP_ACTIVE: { + CONF_NAME: "Circulation pump active", + CONF_DEVICE_CLASS: DEVICE_CLASS_POWER, + CONF_GETTER: lambda api: api.getCirculationPumpActive(), + }, + # gas sensors + SENSOR_BURNER_ACTIVE: { + CONF_NAME: "Burner active", + CONF_DEVICE_CLASS: DEVICE_CLASS_POWER, + CONF_GETTER: lambda api: api.getBurnerActive(), + }, + # heatpump sensors + SENSOR_COMPRESSOR_ACTIVE: { + CONF_NAME: "Compressor active", + CONF_DEVICE_CLASS: DEVICE_CLASS_POWER, + CONF_GETTER: lambda api: api.getCompressorActive(), + }, +} + +SENSORS_GENERIC = [SENSOR_CIRCULATION_PUMP_ACTIVE] + +SENSORS_BY_HEATINGTYPE = { + HeatingType.gas: [SENSOR_BURNER_ACTIVE], + HeatingType.heatpump: [SENSOR_COMPRESSOR_ACTIVE], +} + + +def setup_platform(hass, config, add_entities, discovery_info=None): + """Create the ViCare sensor devices.""" + if discovery_info is None: + return + + vicare_api = hass.data[VICARE_DOMAIN][VICARE_API] + heating_type = hass.data[VICARE_DOMAIN][VICARE_HEATING_TYPE] + + sensors = SENSORS_GENERIC.copy() + + if heating_type != HeatingType.generic: + sensors.extend(SENSORS_BY_HEATINGTYPE[heating_type]) + + add_entities( + [ + ViCareBinarySensor( + hass.data[VICARE_DOMAIN][VICARE_NAME], vicare_api, sensor + ) + for sensor in sensors + ] + ) + + +class ViCareBinarySensor(BinarySensorDevice): + """Representation of a ViCare sensor.""" + + def __init__(self, name, api, sensor_type): + """Initialize the sensor.""" + self._sensor = SENSOR_TYPES[sensor_type] + self._name = f"{name} {self._sensor[CONF_NAME]}" + self._api = api + self._sensor_type = sensor_type + self._state = None + + @property + def available(self): + """Return True if entity is available.""" + return self._state is not None and self._state != PYVICARE_ERROR + + @property + def unique_id(self): + """Return a unique ID.""" + return f"{self._api.service.id}-{self._sensor_type}" + + @property + def name(self): + """Return the name of the sensor.""" + return self._name + + @property + def is_on(self): + """Return the state of the sensor.""" + return self._state + + @property + def device_class(self): + """Return the class of this device, from component DEVICE_CLASSES.""" + return self._sensor[CONF_DEVICE_CLASS] + + def update(self): + """Update state of sensor.""" + try: + self._state = self._sensor[CONF_GETTER](self._api) + except requests.exceptions.ConnectionError: + _LOGGER.error("Unable to retrieve data from ViCare server") + except ValueError: + _LOGGER.error("Unable to decode data from ViCare server") diff --git a/homeassistant/components/vicare/climate.py b/homeassistant/components/vicare/climate.py index ce88ea8e3e7..ddfb28478df 100644 --- a/homeassistant/components/vicare/climate.py +++ b/homeassistant/components/vicare/climate.py @@ -19,6 +19,7 @@ from homeassistant.const import ATTR_TEMPERATURE, PRECISION_WHOLE, TEMP_CELSIUS from . import ( DOMAIN as VICARE_DOMAIN, + PYVICARE_ERROR, VICARE_API, VICARE_HEATING_TYPE, VICARE_NAME, @@ -77,8 +78,6 @@ HA_TO_VICARE_PRESET_HEATING = { PRESET_ECO: VICARE_PROGRAM_ECO, } -PYVICARE_ERROR = "error" - def setup_platform(hass, config, add_entities, discovery_info=None): """Create the ViCare climate devices.""" @@ -138,8 +137,6 @@ class ViCareClimate(ClimateEntity): # Update the generic device attributes self._attributes = {} self._attributes["room_temperature"] = _room_temperature - self._attributes["supply_temperature"] = _supply_temperature - self._attributes["outside_temperature"] = self._api.getOutsideTemperature() self._attributes["active_vicare_program"] = self._current_program self._attributes["active_vicare_mode"] = self._current_mode self._attributes["heating_curve_slope"] = self._api.getHeatingCurveSlope() @@ -150,25 +147,14 @@ class ViCareClimate(ClimateEntity): self._attributes["date_last_service"] = self._api.getLastServiceDate() self._attributes["error_history"] = self._api.getErrorHistory() self._attributes["active_error"] = self._api.getActiveError() - self._attributes[ - "circulationpump_active" - ] = self._api.getCirculationPumpActive() # Update the specific device attributes if self._heating_type == HeatingType.gas: self._current_action = self._api.getBurnerActive() - self._attributes["burner_modulation"] = self._api.getBurnerModulation() - self._attributes[ - "boiler_temperature" - ] = self._api.getBoilerTemperature() - elif self._heating_type == HeatingType.heatpump: self._current_action = self._api.getCompressorActive() - self._attributes[ - "return_temperature" - ] = self._api.getReturnTemperature() except requests.exceptions.ConnectionError: _LOGGER.error("Unable to retrieve data from ViCare server") except ValueError: diff --git a/homeassistant/components/vicare/manifest.json b/homeassistant/components/vicare/manifest.json index 6fc0dfdd119..a91867b7a19 100644 --- a/homeassistant/components/vicare/manifest.json +++ b/homeassistant/components/vicare/manifest.json @@ -3,5 +3,5 @@ "name": "Viessmann ViCare", "documentation": "https://www.home-assistant.io/integrations/vicare", "codeowners": ["@oischinger"], - "requirements": ["PyViCare==0.1.10"] + "requirements": ["PyViCare==0.2.0"] } diff --git a/homeassistant/components/vicare/sensor.py b/homeassistant/components/vicare/sensor.py new file mode 100644 index 00000000000..35ce6dc787b --- /dev/null +++ b/homeassistant/components/vicare/sensor.py @@ -0,0 +1,289 @@ +"""Viessmann ViCare sensor device.""" +import logging + +import requests + +from homeassistant.const import ( + CONF_DEVICE_CLASS, + CONF_ICON, + CONF_NAME, + CONF_UNIT_OF_MEASUREMENT, + DEVICE_CLASS_POWER, + DEVICE_CLASS_TEMPERATURE, + ENERGY_KILO_WATT_HOUR, + POWER_WATT, + TEMP_CELSIUS, + UNIT_PERCENTAGE, +) +from homeassistant.helpers.entity import Entity + +from . import ( + DOMAIN as VICARE_DOMAIN, + PYVICARE_ERROR, + VICARE_API, + VICARE_HEATING_TYPE, + VICARE_NAME, + HeatingType, +) + +_LOGGER = logging.getLogger(__name__) + +CONF_GETTER = "getter" + +SENSOR_TYPE_TEMPERATURE = "temperature" + +SENSOR_OUTSIDE_TEMPERATURE = "outside_temperature" +SENSOR_SUPPLY_TEMPERATURE = "supply_temperature" +SENSOR_RETURN_TEMPERATURE = "return_temperature" + +# gas sensors +SENSOR_BOILER_TEMPERATURE = "boiler_temperature" +SENSOR_BURNER_MODULATION = "burner_modulation" +SENSOR_BURNER_STARTS = "burner_starts" +SENSOR_BURNER_HOURS = "burner_hours" +SENSOR_BURNER_POWER = "burner_power" +SENSOR_DHW_GAS_CONSUMPTION_TODAY = "hotwater_gas_consumption_today" +SENSOR_DHW_GAS_CONSUMPTION_THIS_WEEK = "hotwater_gas_consumption_heating_this_week" +SENSOR_DHW_GAS_CONSUMPTION_THIS_MONTH = "hotwater_gas_consumption_heating_this_month" +SENSOR_DHW_GAS_CONSUMPTION_THIS_YEAR = "hotwater_gas_consumption_heating_this_year" +SENSOR_GAS_CONSUMPTION_TODAY = "gas_consumption_heating_today" +SENSOR_GAS_CONSUMPTION_THIS_WEEK = "gas_consumption_heating_this_week" +SENSOR_GAS_CONSUMPTION_THIS_MONTH = "gas_consumption_heating_this_month" +SENSOR_GAS_CONSUMPTION_THIS_YEAR = "gas_consumption_heating_this_year" + +# heatpump sensors +SENSOR_COMPRESSOR_STARTS = "compressor_starts" +SENSOR_COMPRESSOR_HOURS = "compressor_hours" + +SENSOR_TYPES = { + SENSOR_OUTSIDE_TEMPERATURE: { + CONF_NAME: "Outside Temperature", + CONF_ICON: None, + CONF_UNIT_OF_MEASUREMENT: TEMP_CELSIUS, + CONF_GETTER: lambda api: api.getOutsideTemperature(), + CONF_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE, + }, + SENSOR_SUPPLY_TEMPERATURE: { + CONF_NAME: "Supply Temperature", + CONF_ICON: None, + CONF_UNIT_OF_MEASUREMENT: TEMP_CELSIUS, + CONF_GETTER: lambda api: api.getSupplyTemperature(), + CONF_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE, + }, + # gas sensors + SENSOR_BOILER_TEMPERATURE: { + CONF_NAME: "Boiler Temperature", + CONF_ICON: None, + CONF_UNIT_OF_MEASUREMENT: TEMP_CELSIUS, + CONF_GETTER: lambda api: api.getBoilerTemperature(), + CONF_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE, + }, + SENSOR_BURNER_MODULATION: { + CONF_NAME: "Burner modulation", + CONF_ICON: "mdi:percent", + CONF_UNIT_OF_MEASUREMENT: UNIT_PERCENTAGE, + CONF_GETTER: lambda api: api.getBurnerModulation(), + CONF_DEVICE_CLASS: None, + }, + SENSOR_DHW_GAS_CONSUMPTION_TODAY: { + CONF_NAME: "Hot water gas consumption today", + CONF_ICON: "mdi:power", + CONF_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR, + CONF_GETTER: lambda api: api.getGasConsumptionDomesticHotWaterToday(), + CONF_DEVICE_CLASS: None, + }, + SENSOR_DHW_GAS_CONSUMPTION_THIS_WEEK: { + CONF_NAME: "Hot water gas consumption this week", + CONF_ICON: "mdi:power", + CONF_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR, + CONF_GETTER: lambda api: api.getGasConsumptionDomesticHotWaterThisWeek(), + CONF_DEVICE_CLASS: None, + }, + SENSOR_DHW_GAS_CONSUMPTION_THIS_MONTH: { + CONF_NAME: "Hot water gas consumption this month", + CONF_ICON: "mdi:power", + CONF_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR, + CONF_GETTER: lambda api: api.getGasConsumptionDomesticHotWaterThisMonth(), + CONF_DEVICE_CLASS: None, + }, + SENSOR_DHW_GAS_CONSUMPTION_THIS_YEAR: { + CONF_NAME: "Hot water gas consumption this year", + CONF_ICON: "mdi:power", + CONF_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR, + CONF_GETTER: lambda api: api.getGasConsumptionDomesticHotWaterThisYear(), + CONF_DEVICE_CLASS: None, + }, + SENSOR_GAS_CONSUMPTION_TODAY: { + CONF_NAME: "Heating gas consumption today", + CONF_ICON: "mdi:power", + CONF_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR, + CONF_GETTER: lambda api: api.getGasConsumptionHeatingToday(), + CONF_DEVICE_CLASS: None, + }, + SENSOR_GAS_CONSUMPTION_THIS_WEEK: { + CONF_NAME: "Heating gas consumption this week", + CONF_ICON: "mdi:power", + CONF_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR, + CONF_GETTER: lambda api: api.getGasConsumptionHeatingThisWeek(), + CONF_DEVICE_CLASS: None, + }, + SENSOR_GAS_CONSUMPTION_THIS_MONTH: { + CONF_NAME: "Heating gas consumption this month", + CONF_ICON: "mdi:power", + CONF_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR, + CONF_GETTER: lambda api: api.getGasConsumptionHeatingThisMonth(), + CONF_DEVICE_CLASS: None, + }, + SENSOR_GAS_CONSUMPTION_THIS_YEAR: { + CONF_NAME: "Heating gas consumption this year", + CONF_ICON: "mdi:power", + CONF_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR, + CONF_GETTER: lambda api: api.getGasConsumptionHeatingThisYear(), + CONF_DEVICE_CLASS: None, + }, + SENSOR_BURNER_STARTS: { + CONF_NAME: "Burner Starts", + CONF_ICON: "mdi:counter", + CONF_UNIT_OF_MEASUREMENT: None, + CONF_GETTER: lambda api: api.getBurnerStarts(), + CONF_DEVICE_CLASS: None, + }, + SENSOR_BURNER_HOURS: { + CONF_NAME: "Burner Hours", + CONF_ICON: "mdi:counter", + CONF_UNIT_OF_MEASUREMENT: None, + CONF_GETTER: lambda api: api.getBurnerHours(), + CONF_DEVICE_CLASS: None, + }, + SENSOR_BURNER_POWER: { + CONF_NAME: "Burner Current Power", + CONF_ICON: None, + CONF_UNIT_OF_MEASUREMENT: POWER_WATT, + CONF_GETTER: lambda api: api.getCurrentPower(), + CONF_DEVICE_CLASS: DEVICE_CLASS_POWER, + }, + # heatpump sensors + SENSOR_COMPRESSOR_STARTS: { + CONF_NAME: "Compressor Starts", + CONF_ICON: "mdi:counter", + CONF_UNIT_OF_MEASUREMENT: None, + CONF_GETTER: lambda api: api.getCompressorStarts(), + CONF_DEVICE_CLASS: None, + }, + SENSOR_COMPRESSOR_HOURS: { + CONF_NAME: "Compressor Hours", + CONF_ICON: "mdi:counter", + CONF_UNIT_OF_MEASUREMENT: None, + CONF_GETTER: lambda api: api.getCompressorHours(), + CONF_DEVICE_CLASS: None, + }, + SENSOR_RETURN_TEMPERATURE: { + CONF_NAME: "Return Temperature", + CONF_ICON: None, + CONF_UNIT_OF_MEASUREMENT: TEMP_CELSIUS, + CONF_GETTER: lambda api: api.getReturnTemperature(), + CONF_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE, + }, +} + +SENSORS_GENERIC = [SENSOR_OUTSIDE_TEMPERATURE, SENSOR_SUPPLY_TEMPERATURE] + +SENSORS_BY_HEATINGTYPE = { + HeatingType.gas: [ + SENSOR_BOILER_TEMPERATURE, + SENSOR_BURNER_HOURS, + SENSOR_BURNER_MODULATION, + SENSOR_BURNER_STARTS, + SENSOR_BURNER_POWER, + SENSOR_DHW_GAS_CONSUMPTION_TODAY, + SENSOR_DHW_GAS_CONSUMPTION_THIS_WEEK, + SENSOR_DHW_GAS_CONSUMPTION_THIS_MONTH, + SENSOR_DHW_GAS_CONSUMPTION_THIS_YEAR, + SENSOR_GAS_CONSUMPTION_TODAY, + SENSOR_GAS_CONSUMPTION_THIS_WEEK, + SENSOR_GAS_CONSUMPTION_THIS_MONTH, + SENSOR_GAS_CONSUMPTION_THIS_YEAR, + ], + HeatingType.heatpump: [ + SENSOR_COMPRESSOR_HOURS, + SENSOR_COMPRESSOR_STARTS, + SENSOR_RETURN_TEMPERATURE, + ], +} + + +def setup_platform(hass, config, add_entities, discovery_info=None): + """Create the ViCare sensor devices.""" + if discovery_info is None: + return + + vicare_api = hass.data[VICARE_DOMAIN][VICARE_API] + heating_type = hass.data[VICARE_DOMAIN][VICARE_HEATING_TYPE] + + sensors = SENSORS_GENERIC.copy() + + if heating_type != HeatingType.generic: + sensors.extend(SENSORS_BY_HEATINGTYPE[heating_type]) + + add_entities( + [ + ViCareSensor(hass.data[VICARE_DOMAIN][VICARE_NAME], vicare_api, sensor) + for sensor in sensors + ] + ) + + +class ViCareSensor(Entity): + """Representation of a ViCare sensor.""" + + def __init__(self, name, api, sensor_type): + """Initialize the sensor.""" + self._sensor = SENSOR_TYPES[sensor_type] + self._name = f"{name} {self._sensor[CONF_NAME]}" + self._api = api + self._sensor_type = sensor_type + self._state = None + + @property + def available(self): + """Return True if entity is available.""" + return self._state is not None and self._state != PYVICARE_ERROR + + @property + def unique_id(self): + """Return a unique ID.""" + return f"{self._api.service.id}-{self._sensor_type}" + + @property + def name(self): + """Return the name of the sensor.""" + return self._name + + @property + def icon(self): + """Icon to use in the frontend, if any.""" + return self._sensor[CONF_ICON] + + @property + def state(self): + """Return the state of the sensor.""" + return self._state + + @property + def unit_of_measurement(self): + """Return the unit of measurement.""" + return self._sensor[CONF_UNIT_OF_MEASUREMENT] + + @property + def device_class(self): + """Return the class of this device, from component DEVICE_CLASSES.""" + return self._sensor[CONF_DEVICE_CLASS] + + def update(self): + """Update state of sensor.""" + try: + self._state = self._sensor[CONF_GETTER](self._api) + except requests.exceptions.ConnectionError: + _LOGGER.error("Unable to retrieve data from ViCare server") + except ValueError: + _LOGGER.error("Unable to decode data from ViCare server") diff --git a/homeassistant/components/vicare/water_heater.py b/homeassistant/components/vicare/water_heater.py index c6aa5205f24..cbecf7fdaf2 100644 --- a/homeassistant/components/vicare/water_heater.py +++ b/homeassistant/components/vicare/water_heater.py @@ -9,7 +9,13 @@ from homeassistant.components.water_heater import ( ) from homeassistant.const import ATTR_TEMPERATURE, PRECISION_WHOLE, TEMP_CELSIUS -from . import DOMAIN as VICARE_DOMAIN, VICARE_API, VICARE_HEATING_TYPE, VICARE_NAME +from . import ( + DOMAIN as VICARE_DOMAIN, + PYVICARE_ERROR, + VICARE_API, + VICARE_HEATING_TYPE, + VICARE_NAME, +) _LOGGER = logging.getLogger(__name__) @@ -40,8 +46,6 @@ HA_TO_VICARE_HVAC_DHW = { OPERATION_MODE_ON: VICARE_MODE_DHW, } -PYVICARE_ERROR = "error" - def setup_platform(hass, config, add_entities, discovery_info=None): """Create the ViCare water_heater devices.""" diff --git a/homeassistant/components/vizio/translations/sk.json b/homeassistant/components/vizio/translations/sk.json deleted file mode 100644 index e0c0076ddc2..00000000000 --- a/homeassistant/components/vizio/translations/sk.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "config": { - "step": { - "tv_apps": { - "data": { - "apps_to_include_or_exclude": "Aplik\u00e1cie, ktor\u00e9 chcete zahrn\u00fa\u0165 alebo vyl\u00fa\u010di\u0165", - "include_or_exclude": "Zahrn\u00fa\u0165 alebo vyl\u00fa\u010di\u0165 aplik\u00e1cie?" - }, - "description": "Ak m\u00e1te Smart TV, m\u00f4\u017eete volite\u013ene filtrova\u0165 svoj zoznam zdrojov v\u00fdberom aplik\u00e1ci\u00ed, ktor\u00e9 chcete zahrn\u00fa\u0165 alebo vyl\u00fa\u010di\u0165 do zoznamu zdrojov. Tento krok m\u00f4\u017eete presko\u010di\u0165 pre telev\u00edzory, ktor\u00e9 nepodporuj\u00fa aplik\u00e1cie.", - "title": "Konfigur\u00e1cia aplik\u00e1ci\u00ed pre Smart TV" - }, - "user_tv": { - "data": { - "apps_to_include_or_exclude": "Aplik\u00e1cie, ktor\u00e9 chcete zahrn\u00fa\u0165 alebo vyl\u00fa\u010di\u0165", - "include_or_exclude": "Zahrn\u00fa\u0165 alebo vyl\u00fa\u010di\u0165 aplik\u00e1cie?" - }, - "description": "Ak m\u00e1te Smart TV, m\u00f4\u017eete volite\u013ene filtrova\u0165 svoj zoznam zdrojov v\u00fdberom aplik\u00e1ci\u00ed, ktor\u00e9 chcete zahrn\u00fa\u0165 alebo vyl\u00fa\u010di\u0165 do zoznamu zdrojov. Tento krok m\u00f4\u017eete presko\u010di\u0165 pre telev\u00edzory, ktor\u00e9 nepodporuj\u00fa aplik\u00e1cie.", - "title": "Konfigur\u00e1cia aplik\u00e1ci\u00ed pre Smart TV" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wake_on_lan/__init__.py b/homeassistant/components/wake_on_lan/__init__.py index d5b8f92a9bc..1600f70a15f 100644 --- a/homeassistant/components/wake_on_lan/__init__.py +++ b/homeassistant/components/wake_on_lan/__init__.py @@ -5,7 +5,7 @@ import logging import voluptuous as vol import wakeonlan -from homeassistant.const import CONF_BROADCAST_ADDRESS, CONF_MAC +from homeassistant.const import CONF_BROADCAST_ADDRESS, CONF_BROADCAST_PORT, CONF_MAC import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) @@ -15,7 +15,11 @@ DOMAIN = "wake_on_lan" SERVICE_SEND_MAGIC_PACKET = "send_magic_packet" WAKE_ON_LAN_SEND_MAGIC_PACKET_SCHEMA = vol.Schema( - {vol.Required(CONF_MAC): cv.string, vol.Optional(CONF_BROADCAST_ADDRESS): cv.string} + { + vol.Required(CONF_MAC): cv.string, + vol.Optional(CONF_BROADCAST_ADDRESS): cv.string, + vol.Optional(CONF_BROADCAST_PORT): cv.port, + } ) @@ -26,10 +30,12 @@ async def async_setup(hass, config): """Send magic packet to wake up a device.""" mac_address = call.data.get(CONF_MAC) broadcast_address = call.data.get(CONF_BROADCAST_ADDRESS) + broadcast_port = call.data.get(CONF_BROADCAST_PORT) _LOGGER.info( - "Send magic packet to mac %s (broadcast: %s)", + "Send magic packet to mac %s (broadcast: %s, port: %s)", mac_address, broadcast_address, + broadcast_port, ) if broadcast_address is not None: await hass.async_add_job( @@ -37,6 +43,7 @@ async def async_setup(hass, config): wakeonlan.send_magic_packet, mac_address, ip_address=broadcast_address, + port=broadcast_port, ) ) else: diff --git a/homeassistant/components/wake_on_lan/services.yaml b/homeassistant/components/wake_on_lan/services.yaml index 915dd2bce96..54ce72c9432 100644 --- a/homeassistant/components/wake_on_lan/services.yaml +++ b/homeassistant/components/wake_on_lan/services.yaml @@ -1,9 +1,12 @@ send_magic_packet: description: Send a 'magic packet' to wake up a device with 'Wake-On-LAN' capabilities. fields: - broadcast_address: - description: Optional broadcast IP where to send the magic packet. - example: 192.168.255.255 mac: description: MAC address of the device to wake up. example: "aa:bb:cc:dd:ee:ff" + broadcast_address: + description: Optional broadcast IP where to send the magic packet. + example: 192.168.255.255 + broadcast_port: + description: Optional port where to send the magic packet. + example: 9 diff --git a/homeassistant/components/wake_on_lan/switch.py b/homeassistant/components/wake_on_lan/switch.py index e3af8f146f1..8f26e19d2e0 100644 --- a/homeassistant/components/wake_on_lan/switch.py +++ b/homeassistant/components/wake_on_lan/switch.py @@ -7,7 +7,13 @@ import voluptuous as vol import wakeonlan from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity -from homeassistant.const import CONF_BROADCAST_ADDRESS, CONF_HOST, CONF_MAC, CONF_NAME +from homeassistant.const import ( + CONF_BROADCAST_ADDRESS, + CONF_BROADCAST_PORT, + CONF_HOST, + CONF_MAC, + CONF_NAME, +) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.script import Script @@ -22,6 +28,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_MAC): cv.string, vol.Optional(CONF_BROADCAST_ADDRESS): cv.string, + vol.Optional(CONF_BROADCAST_PORT): cv.port, vol.Optional(CONF_HOST): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_OFF_ACTION): cv.SCRIPT_SCHEMA, @@ -32,26 +39,48 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( def setup_platform(hass, config, add_entities, discovery_info=None): """Set up a wake on lan switch.""" broadcast_address = config.get(CONF_BROADCAST_ADDRESS) + broadcast_port = config.get(CONF_BROADCAST_PORT) host = config.get(CONF_HOST) mac_address = config[CONF_MAC] name = config[CONF_NAME] off_action = config.get(CONF_OFF_ACTION) add_entities( - [WolSwitch(hass, name, host, mac_address, off_action, broadcast_address)], True + [ + WolSwitch( + hass, + name, + host, + mac_address, + off_action, + broadcast_address, + broadcast_port, + ) + ], + True, ) class WolSwitch(SwitchEntity): """Representation of a wake on lan switch.""" - def __init__(self, hass, name, host, mac_address, off_action, broadcast_address): + def __init__( + self, + hass, + name, + host, + mac_address, + off_action, + broadcast_address, + broadcast_port, + ): """Initialize the WOL switch.""" self._hass = hass self._name = name self._host = host self._mac_address = mac_address self._broadcast_address = broadcast_address + self._broadcast_port = broadcast_port self._off_script = Script(hass, off_action) if off_action else None self._state = False @@ -69,7 +98,9 @@ class WolSwitch(SwitchEntity): """Turn the device on.""" if self._broadcast_address: wakeonlan.send_magic_packet( - self._mac_address, ip_address=self._broadcast_address + self._mac_address, + ip_address=self._broadcast_address, + port=self._broadcast_port, ) else: wakeonlan.send_magic_packet(self._mac_address) diff --git a/homeassistant/components/weather/__init__.py b/homeassistant/components/weather/__init__.py index 8efb8519636..5a6fcc2d80b 100644 --- a/homeassistant/components/weather/__init__.py +++ b/homeassistant/components/weather/__init__.py @@ -19,6 +19,7 @@ ATTR_CONDITION_CLASS = "condition_class" ATTR_FORECAST = "forecast" ATTR_FORECAST_CONDITION = "condition" ATTR_FORECAST_PRECIPITATION = "precipitation" +ATTR_FORECAST_PRECIPITATION_PROBABILITY = "precipitation_probability" ATTR_FORECAST_TEMP = "temperature" ATTR_FORECAST_TEMP_LOW = "templow" ATTR_FORECAST_TIME = "datetime" diff --git a/homeassistant/components/weather/translations/fr.json b/homeassistant/components/weather/translations/fr.json index a766fcfd9c4..7975b1f396d 100644 --- a/homeassistant/components/weather/translations/fr.json +++ b/homeassistant/components/weather/translations/fr.json @@ -11,11 +11,11 @@ "partlycloudy": "Partiellement nuageux", "pouring": "Averses", "rainy": "Pluie", - "snowy": "Neige", - "snowy-rainy": "Neige / Pluie", - "sunny": "Soleil", - "windy": "Vent", - "windy-variant": "Vent" + "snowy": "Neigeux", + "snowy-rainy": "Neigeux, pluvieux", + "sunny": "Ensoleill\u00e9", + "windy": "Venteux", + "windy-variant": "Venteux" } } } \ No newline at end of file diff --git a/homeassistant/components/webostv/media_player.py b/homeassistant/components/webostv/media_player.py index 556ff7a287b..aa5a2a9c79d 100644 --- a/homeassistant/components/webostv/media_player.py +++ b/homeassistant/components/webostv/media_player.py @@ -60,6 +60,7 @@ SUPPORT_WEBOSTV_VOLUME = SUPPORT_VOLUME_MUTE | SUPPORT_VOLUME_STEP MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10) MIN_TIME_BETWEEN_FORCED_SCANS = timedelta(seconds=1) +SCAN_INTERVAL = timedelta(seconds=10) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): diff --git a/homeassistant/components/wemo/fan.py b/homeassistant/components/wemo/fan.py index f040a9f3845..f2cb46fa32c 100644 --- a/homeassistant/components/wemo/fan.py +++ b/homeassistant/components/wemo/fan.py @@ -304,6 +304,8 @@ class WemoHumidifier(FanEntity): else: self.set_speed(speed) + self.schedule_update_ha_state() + def turn_off(self, **kwargs) -> None: """Turn the switch off.""" try: @@ -312,6 +314,8 @@ class WemoHumidifier(FanEntity): _LOGGER.warning("Error while turning off device %s (%s)", self.name, err) self._available = False + self.schedule_update_ha_state() + def set_speed(self, speed: str) -> None: """Set the fan_mode of the Humidifier.""" try: @@ -322,6 +326,8 @@ class WemoHumidifier(FanEntity): ) self._available = False + self.schedule_update_ha_state() + def set_humidity(self, humidity: float) -> None: """Set the target humidity level for the Humidifier.""" if humidity < 50: @@ -343,6 +349,8 @@ class WemoHumidifier(FanEntity): ) self._available = False + self.schedule_update_ha_state() + def reset_filter_life(self) -> None: """Reset the filter life to 100%.""" try: @@ -352,3 +360,5 @@ class WemoHumidifier(FanEntity): "Error while resetting filter life on device: %s (%s)", self.name, err ) self._available = False + + self.schedule_update_ha_state() diff --git a/homeassistant/components/wemo/light.py b/homeassistant/components/wemo/light.py index 2a05d42f1f7..6aac2be6dda 100644 --- a/homeassistant/components/wemo/light.py +++ b/homeassistant/components/wemo/light.py @@ -32,6 +32,10 @@ SUPPORT_WEMO = ( SUPPORT_BRIGHTNESS | SUPPORT_COLOR_TEMP | SUPPORT_COLOR | SUPPORT_TRANSITION ) +# The WEMO_ constants below come from pywemo itself +WEMO_ON = 1 +WEMO_OFF = 0 + async def async_setup_entry(hass, config_entry, async_add_entities): """Set up WeMo lights.""" @@ -175,21 +179,27 @@ class WemoLight(LightEntity): if color_temp is not None: self.wemo.set_temperature(mireds=color_temp, transition=transition_time) - self.wemo.turn_on(**turn_on_kwargs) + if self.wemo.turn_on(**turn_on_kwargs): + self._state["onoff"] = WEMO_ON except ActionException as err: _LOGGER.warning("Error while turning on device %s (%s)", self.name, err) self._available = False + self.schedule_update_ha_state() + def turn_off(self, **kwargs): """Turn the light off.""" transition_time = int(kwargs.get(ATTR_TRANSITION, 0)) try: - self.wemo.turn_off(transition=transition_time) + if self.wemo.turn_off(transition=transition_time): + self._state["onoff"] = WEMO_OFF except ActionException as err: _LOGGER.warning("Error while turning off device %s (%s)", self.name, err) self._available = False + self.schedule_update_ha_state() + def _update(self, force_update=True): """Synchronize state with bridge.""" try: @@ -200,7 +210,7 @@ class WemoLight(LightEntity): self._available = False self.wemo.reconnect_with_device() else: - self._is_on = self._state.get("onoff") != 0 + self._is_on = self._state.get("onoff") != WEMO_OFF self._brightness = self._state.get("level", 255) self._color_temp = self._state.get("temperature_mireds") self._available = True @@ -355,20 +365,27 @@ class WemoDimmer(LightEntity): brightness = 255 try: - self.wemo.on() + if self.wemo.on(): + self._state = WEMO_ON + self.wemo.set_brightness(brightness) except ActionException as err: _LOGGER.warning("Error while turning on device %s (%s)", self.name, err) self._available = False + self.schedule_update_ha_state() + def turn_off(self, **kwargs): """Turn the dimmer off.""" try: - self.wemo.off() + if self.wemo.off(): + self._state = WEMO_OFF except ActionException as err: _LOGGER.warning("Error while turning on device %s (%s)", self.name, err) self._available = False + self.schedule_update_ha_state() + @property def available(self): """Return if dimmer is available.""" diff --git a/homeassistant/components/wemo/manifest.json b/homeassistant/components/wemo/manifest.json index 96efb140cee..e08e82b3269 100644 --- a/homeassistant/components/wemo/manifest.json +++ b/homeassistant/components/wemo/manifest.json @@ -12,5 +12,5 @@ "homekit": { "models": ["Wemo"] }, - "codeowners": ["@sqldiablo"] + "codeowners": [] } diff --git a/homeassistant/components/wemo/switch.py b/homeassistant/components/wemo/switch.py index 836ddf0730f..7cc88f552bf 100644 --- a/homeassistant/components/wemo/switch.py +++ b/homeassistant/components/wemo/switch.py @@ -18,6 +18,7 @@ PARALLEL_UPDATES = 0 _LOGGER = logging.getLogger(__name__) +# The WEMO_ constants below come from pywemo itself ATTR_SENSOR_STATE = "sensor_state" ATTR_SWITCH_MODE = "switch_mode" ATTR_CURRENT_STATE_DETAIL = "state_detail" @@ -191,19 +192,25 @@ class WemoSwitch(SwitchEntity): def turn_on(self, **kwargs): """Turn the switch on.""" try: - self.wemo.on() + if self.wemo.on(): + self._state = WEMO_ON except ActionException as err: _LOGGER.warning("Error while turning on device %s (%s)", self.name, err) self._available = False + self.schedule_update_ha_state() + def turn_off(self, **kwargs): """Turn the switch off.""" try: - self.wemo.off() + if self.wemo.off(): + self._state = WEMO_OFF except ActionException as err: _LOGGER.warning("Error while turning off device %s (%s)", self.name, err) self._available = False + self.schedule_update_ha_state() + async def async_added_to_hass(self): """Wemo switch added to Home Assistant.""" # Define inside async context so we know our event loop diff --git a/homeassistant/components/wemo/translations/fi.json b/homeassistant/components/wemo/translations/fi.json deleted file mode 100644 index afce1415ba7..00000000000 --- a/homeassistant/components/wemo/translations/fi.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "confirm": { - "title": "Wemo" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wemo/translations/hr.json b/homeassistant/components/wemo/translations/hr.json deleted file mode 100644 index 36e06157eb7..00000000000 --- a/homeassistant/components/wemo/translations/hr.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Wemo" -} \ No newline at end of file diff --git a/homeassistant/components/wemo/translations/nn.json b/homeassistant/components/wemo/translations/nn.json deleted file mode 100644 index afce1415ba7..00000000000 --- a/homeassistant/components/wemo/translations/nn.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "config": { - "step": { - "confirm": { - "title": "Wemo" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/withings/__init__.py b/homeassistant/components/withings/__init__.py index 93a6250ce03..29fca81ca79 100644 --- a/homeassistant/components/withings/__init__.py +++ b/homeassistant/components/withings/__init__.py @@ -3,55 +3,80 @@ Support for the Withings API. For more details about this platform, please refer to the documentation at """ +import asyncio +from typing import Optional, cast + +from aiohttp.web import Request, Response import voluptuous as vol from withings_api import WithingsAuth +from withings_api.common import NotifyAppli, enum_or_raise +from homeassistant.components import webhook +from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.components.webhook import ( + async_unregister as async_unregister_webhook, +) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET -from homeassistant.helpers import config_entry_oauth2_flow, config_validation as cv -from homeassistant.helpers.typing import ConfigType, HomeAssistantType +from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET, CONF_WEBHOOK_ID +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.event import async_call_later +from homeassistant.helpers.typing import ConfigType -from . import config_flow +from . import config_flow, const from .common import ( _LOGGER, - NotAuthenticatedError, WithingsLocalOAuth2Implementation, - get_data_manager, + async_get_data_manager, + async_remove_data_manager, + get_data_manager_by_webhook_id, + json_message_response, ) -from .const import CONF_PROFILES, CONFIG, CREDENTIALS, DOMAIN + +DOMAIN = const.DOMAIN CONFIG_SCHEMA = vol.Schema( { - DOMAIN: vol.Schema( - { - vol.Required(CONF_CLIENT_ID): vol.All(cv.string, vol.Length(min=1)), - vol.Required(CONF_CLIENT_SECRET): vol.All(cv.string, vol.Length(min=1)), - vol.Required(CONF_PROFILES): vol.All( - cv.ensure_list, - vol.Unique(), - vol.Length(min=1), - [vol.All(cv.string, vol.Length(min=1))], - ), - } + DOMAIN: vol.All( + cv.deprecated(const.CONF_PROFILES, invalidation_version="0.114"), + vol.Schema( + { + vol.Required(CONF_CLIENT_ID): vol.All(cv.string, vol.Length(min=1)), + vol.Required(CONF_CLIENT_SECRET): vol.All( + cv.string, vol.Length(min=1) + ), + vol.Optional(const.CONF_USE_WEBHOOK, default=False): cv.boolean, + vol.Optional(const.CONF_PROFILES): vol.All( + cv.ensure_list, + vol.Unique(), + vol.Length(min=1), + [vol.All(cv.string, vol.Length(min=1))], + ), + } + ), ) }, extra=vol.ALLOW_EXTRA, ) -async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool: +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Withings component.""" conf = config.get(DOMAIN, {}) if not conf: return True - hass.data[DOMAIN] = {CONFIG: conf} + # Make the config available to the oauth2 config flow. + hass.data[DOMAIN] = {const.CONFIG: conf} + # Setup the oauth2 config flow. config_flow.WithingsFlowHandler.async_register_implementation( hass, WithingsLocalOAuth2Implementation( hass, - DOMAIN, + const.DOMAIN, conf[CONF_CLIENT_ID], conf[CONF_CLIENT_SECRET], f"{WithingsAuth.URL}/oauth2_user/authorize2", @@ -62,52 +87,129 @@ async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool: return True -async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Withings from a config entry.""" - # Upgrading existing token information to hass managed tokens. - if "auth_implementation" not in entry.data: - _LOGGER.debug("Upgrading existing config entry") - data = entry.data - creds = data.get(CREDENTIALS, {}) - hass.config_entries.async_update_entry( - entry, - data={ - "auth_implementation": DOMAIN, - "implementation": DOMAIN, - "profile": data.get("profile"), - "token": { - "access_token": creds.get("access_token"), - "refresh_token": creds.get("refresh_token"), - "expires_at": int(creds.get("token_expiry")), - "type": creds.get("token_type"), - "userid": creds.get("userid") or creds.get("user_id"), - }, - }, - ) + config_updates = {} - implementation = await config_entry_oauth2_flow.async_get_config_entry_implementation( - hass, entry + # Add a unique id if it's an older config entry. + if entry.unique_id != entry.data["token"]["userid"] or not isinstance( + entry.unique_id, str + ): + config_updates["unique_id"] = str(entry.data["token"]["userid"]) + + # Add the webhook configuration. + if CONF_WEBHOOK_ID not in entry.data: + webhook_id = webhook.async_generate_id() + config_updates["data"] = { + **entry.data, + **{ + const.CONF_USE_WEBHOOK: hass.data[DOMAIN][const.CONFIG][ + const.CONF_USE_WEBHOOK + ], + CONF_WEBHOOK_ID: webhook_id, + const.CONF_WEBHOOK_URL: entry.data.get( + const.CONF_WEBHOOK_URL, + webhook.async_generate_url(hass, webhook_id), + ), + }, + } + + if config_updates: + hass.config_entries.async_update_entry(entry, **config_updates) + + data_manager = await async_get_data_manager(hass, entry) + + _LOGGER.debug("Confirming %s is authenticated to withings", data_manager.profile) + await data_manager.poll_data_update_coordinator.async_refresh() + if not data_manager.poll_data_update_coordinator.last_update_success: + raise ConfigEntryNotReady() + + webhook.async_register( + hass, + const.DOMAIN, + "Withings notify", + data_manager.webhook_config.id, + async_webhook_handler, ) - data_manager = get_data_manager(hass, entry, implementation) + # Perform first webhook subscription check. + if data_manager.webhook_config.enabled: + data_manager.async_start_polling_webhook_subscriptions() - _LOGGER.debug("Confirming we're authenticated") - try: - await data_manager.check_authenticated() - except NotAuthenticatedError: - _LOGGER.error( - "Withings auth tokens exired for profile %s, remove and re-add the integration", - data_manager.profile, - ) - return False + @callback + def async_call_later_callback(now) -> None: + hass.async_create_task( + data_manager.subscription_update_coordinator.async_refresh() + ) + + # Start subscription check in the background, outside this component's setup. + async_call_later(hass, 1, async_call_later_callback) hass.async_create_task( - hass.config_entries.async_forward_entry_setup(entry, "sensor") + hass.config_entries.async_forward_entry_setup(entry, BINARY_SENSOR_DOMAIN) + ) + hass.async_create_task( + hass.config_entries.async_forward_entry_setup(entry, SENSOR_DOMAIN) ) return True -async def async_unload_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload Withings config entry.""" - return await hass.config_entries.async_forward_entry_unload(entry, "sensor") + data_manager = await async_get_data_manager(hass, entry) + data_manager.async_stop_polling_webhook_subscriptions() + + async_unregister_webhook(hass, data_manager.webhook_config.id) + + await asyncio.gather( + data_manager.async_unsubscribe_webhook(), + hass.config_entries.async_forward_entry_unload(entry, BINARY_SENSOR_DOMAIN), + hass.config_entries.async_forward_entry_unload(entry, SENSOR_DOMAIN), + ) + + async_remove_data_manager(hass, entry) + + return True + + +async def async_webhook_handler( + hass: HomeAssistant, webhook_id: str, request: Request +) -> Optional[Response]: + """Handle webhooks calls.""" + # Handle http head calls to the path. + # When creating a notify subscription, Withings will check that the endpoint is running by sending a HEAD request. + if request.method.upper() == "HEAD": + return Response() + + if request.method.upper() != "POST": + return json_message_response("Invalid method.", message_code=2) + + # Handle http post calls to the path. + if not request.body_exists: + return json_message_response("No request body.", message_code=12) + + params = await request.post() + + if "appli" not in params: + return json_message_response("Parameter appli not provided", message_code=20) + + try: + appli = cast( + NotifyAppli, enum_or_raise(int(params.getone("appli")), NotifyAppli) + ) + except ValueError: + return json_message_response("Invalid appli provided", message_code=21) + + data_manager = get_data_manager_by_webhook_id(hass, webhook_id) + if not data_manager: + _LOGGER.error( + "Webhook id %s not handled by data manager. This is a bug and should be reported.", + webhook_id, + ) + return json_message_response("User not found", message_code=1) + + # Run this in the background and return immediately. + hass.async_create_task(data_manager.async_webhook_data_updated(appli)) + + return json_message_response("Success", message_code=0) diff --git a/homeassistant/components/withings/binary_sensor.py b/homeassistant/components/withings/binary_sensor.py new file mode 100644 index 00000000000..0fb8d8411fd --- /dev/null +++ b/homeassistant/components/withings/binary_sensor.py @@ -0,0 +1,40 @@ +"""Sensors flow for Withings.""" +from typing import Callable, List + +from homeassistant.components.binary_sensor import ( + DEVICE_CLASS_PRESENCE, + DOMAIN as BINARY_SENSOR_DOMAIN, + BinarySensorDevice, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity import Entity + +from .common import BaseWithingsSensor, async_create_entities + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: Callable[[List[Entity], bool], None], +) -> None: + """Set up the sensor config entry.""" + entities = await async_create_entities( + hass, entry, WithingsHealthBinarySensor, BINARY_SENSOR_DOMAIN + ) + + async_add_entities(entities, True) + + +class WithingsHealthBinarySensor(BaseWithingsSensor, BinarySensorDevice): + """Implementation of a Withings sensor.""" + + @property + def is_on(self) -> bool: + """Return true if the binary sensor is on.""" + return self._state_data + + @property + def device_class(self) -> str: + """Provide the device class.""" + return DEVICE_CLASS_PRESENCE diff --git a/homeassistant/components/withings/common.py b/homeassistant/components/withings/common.py index 1539b973cb8..b7c3cec6d9b 100644 --- a/homeassistant/components/withings/common.py +++ b/homeassistant/components/withings/common.py @@ -1,41 +1,62 @@ """Common code for Withings.""" -from asyncio import run_coroutine_threadsafe +import asyncio +from dataclasses import dataclass import datetime -from functools import partial +from datetime import timedelta +from enum import Enum, IntEnum import logging import re -import time -from typing import Any, Dict +from typing import Any, Callable, Dict, List, Optional, Tuple, Union +from aiohttp.web import Response import requests -from withings_api import ( - AbstractWithingsApi, - MeasureGetMeasResponse, - SleepGetResponse, +from withings_api import AbstractWithingsApi +from withings_api.common import ( + AuthFailedException, + GetSleepSummaryField, + MeasureGroupAttribs, + MeasureType, + MeasureTypes, + NotifyAppli, SleepGetSummaryResponse, + UnauthorizedException, + query_measure_groups, ) -from withings_api.common import AuthFailedException, UnauthorizedException +from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN +from homeassistant.components.http import HomeAssistantView +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, PlatformNotReady +from homeassistant.const import ( + CONF_WEBHOOK_ID, + MASS_KILOGRAMS, + SPEED_METERS_PER_SECOND, + TIME_SECONDS, + UNIT_PERCENTAGE, +) +from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import config_entry_oauth2_flow from homeassistant.helpers.config_entry_oauth2_flow import ( AUTH_CALLBACK_PATH, AbstractOAuth2Implementation, LocalOAuth2Implementation, OAuth2Session, ) +from homeassistant.helpers.entity import Entity +from homeassistant.helpers.entity_registry import EntityRegistry from homeassistant.helpers.network import get_url -from homeassistant.util import dt, slugify +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator +from homeassistant.util import dt from . import const +from .const import Measurement _LOGGER = logging.getLogger(const.LOG_NAMESPACE) -NOT_AUTHENTICATED_ERROR = re.compile( - # ".*(Error Code (100|101|102|200|401)|Missing access token parameter).*", - "^401,.*", - re.IGNORECASE, -) +NOT_AUTHENTICATED_ERROR = re.compile("^401,.*", re.IGNORECASE,) +DATA_UPDATED_SIGNAL = "withings_entity_state_updated" + +MeasurementData = Dict[Measurement, Any] class NotAuthenticatedError(HomeAssistantError): @@ -46,33 +67,403 @@ class ServiceError(HomeAssistantError): """Raise when the service has an error.""" -class ThrottleData: - """Throttle data.""" +class UpdateType(Enum): + """Data update type.""" - def __init__(self, interval: int, data: Any): - """Initialize throttle data.""" - self._time = int(time.time()) - self._interval = interval - self._data = data + POLL = "poll" + WEBHOOK = "webhook" - @property - def time(self) -> int: - """Get time created.""" - return self._time - @property - def interval(self) -> int: - """Get interval.""" - return self._interval +@dataclass +class WithingsAttribute: + """Immutable class for describing withings sensor data.""" - @property - def data(self) -> Any: - """Get data.""" - return self._data + measurement: Measurement + measute_type: Enum + friendly_name: str + unit_of_measurement: str + icon: Optional[str] + platform: str + enabled_by_default: bool + update_type: UpdateType - def is_expired(self) -> bool: - """Is this data expired.""" - return int(time.time()) - self.time > self.interval + +@dataclass +class WithingsData: + """Represents value and meta-data from the withings service.""" + + attribute: WithingsAttribute + value: Any + + +@dataclass +class WebhookConfig: + """Config for a webhook.""" + + id: str + url: str + enabled: bool + + +@dataclass +class StateData: + """State data held by data manager for retrieval by entities.""" + + unique_id: str + state: Any + + +WITHINGS_ATTRIBUTES = [ + WithingsAttribute( + Measurement.WEIGHT_KG, + MeasureType.WEIGHT, + "Weight", + MASS_KILOGRAMS, + "mdi:weight-kilogram", + SENSOR_DOMAIN, + True, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.FAT_MASS_KG, + MeasureType.FAT_MASS_WEIGHT, + "Fat Mass", + MASS_KILOGRAMS, + "mdi:weight-kilogram", + SENSOR_DOMAIN, + True, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.FAT_FREE_MASS_KG, + MeasureType.FAT_FREE_MASS, + "Fat Free Mass", + MASS_KILOGRAMS, + "mdi:weight-kilogram", + SENSOR_DOMAIN, + True, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.MUSCLE_MASS_KG, + MeasureType.MUSCLE_MASS, + "Muscle Mass", + MASS_KILOGRAMS, + "mdi:weight-kilogram", + SENSOR_DOMAIN, + True, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.BONE_MASS_KG, + MeasureType.BONE_MASS, + "Bone Mass", + MASS_KILOGRAMS, + "mdi:weight-kilogram", + SENSOR_DOMAIN, + True, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.HEIGHT_M, + MeasureType.HEIGHT, + "Height", + const.UOM_LENGTH_M, + "mdi:ruler", + SENSOR_DOMAIN, + False, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.TEMP_C, + MeasureType.TEMPERATURE, + "Temperature", + const.UOM_TEMP_C, + "mdi:thermometer", + SENSOR_DOMAIN, + True, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.BODY_TEMP_C, + MeasureType.BODY_TEMPERATURE, + "Body Temperature", + const.UOM_TEMP_C, + "mdi:thermometer", + SENSOR_DOMAIN, + True, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.SKIN_TEMP_C, + MeasureType.SKIN_TEMPERATURE, + "Skin Temperature", + const.UOM_TEMP_C, + "mdi:thermometer", + SENSOR_DOMAIN, + True, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.FAT_RATIO_PCT, + MeasureType.FAT_RATIO, + "Fat Ratio", + UNIT_PERCENTAGE, + None, + SENSOR_DOMAIN, + True, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.DIASTOLIC_MMHG, + MeasureType.DIASTOLIC_BLOOD_PRESSURE, + "Diastolic Blood Pressure", + const.UOM_MMHG, + None, + SENSOR_DOMAIN, + True, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.SYSTOLIC_MMGH, + MeasureType.SYSTOLIC_BLOOD_PRESSURE, + "Systolic Blood Pressure", + const.UOM_MMHG, + None, + SENSOR_DOMAIN, + True, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.HEART_PULSE_BPM, + MeasureType.HEART_RATE, + "Heart Pulse", + const.UOM_BEATS_PER_MINUTE, + "mdi:heart-pulse", + SENSOR_DOMAIN, + True, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.SPO2_PCT, + MeasureType.SP02, + "SP02", + UNIT_PERCENTAGE, + None, + SENSOR_DOMAIN, + True, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.HYDRATION, + MeasureType.HYDRATION, + "Hydration", + MASS_KILOGRAMS, + "mdi:water", + SENSOR_DOMAIN, + False, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.PWV, + MeasureType.PULSE_WAVE_VELOCITY, + "Pulse Wave Velocity", + SPEED_METERS_PER_SECOND, + None, + SENSOR_DOMAIN, + True, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.SLEEP_BREATHING_DISTURBANCES_INTENSITY, + GetSleepSummaryField.BREATHING_DISTURBANCES_INTENSITY, + "Breathing disturbances intensity", + "", + "", + SENSOR_DOMAIN, + False, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.SLEEP_DEEP_DURATION_SECONDS, + GetSleepSummaryField.DEEP_SLEEP_DURATION, + "Deep sleep", + TIME_SECONDS, + "mdi:sleep", + SENSOR_DOMAIN, + False, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.SLEEP_TOSLEEP_DURATION_SECONDS, + GetSleepSummaryField.DURATION_TO_SLEEP, + "Time to sleep", + TIME_SECONDS, + "mdi:sleep", + SENSOR_DOMAIN, + False, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.SLEEP_TOWAKEUP_DURATION_SECONDS, + GetSleepSummaryField.DURATION_TO_WAKEUP, + "Time to wakeup", + TIME_SECONDS, + "mdi:sleep-off", + SENSOR_DOMAIN, + False, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.SLEEP_HEART_RATE_AVERAGE, + GetSleepSummaryField.HR_AVERAGE, + "Average heart rate", + const.UOM_BEATS_PER_MINUTE, + "mdi:heart-pulse", + SENSOR_DOMAIN, + False, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.SLEEP_HEART_RATE_MAX, + GetSleepSummaryField.HR_MAX, + "Maximum heart rate", + const.UOM_BEATS_PER_MINUTE, + "mdi:heart-pulse", + SENSOR_DOMAIN, + False, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.SLEEP_HEART_RATE_MIN, + GetSleepSummaryField.HR_MIN, + "Minimum heart rate", + const.UOM_BEATS_PER_MINUTE, + "mdi:heart-pulse", + SENSOR_DOMAIN, + False, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.SLEEP_LIGHT_DURATION_SECONDS, + GetSleepSummaryField.LIGHT_SLEEP_DURATION, + "Light sleep", + TIME_SECONDS, + "mdi:sleep", + SENSOR_DOMAIN, + False, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.SLEEP_REM_DURATION_SECONDS, + GetSleepSummaryField.REM_SLEEP_DURATION, + "REM sleep", + TIME_SECONDS, + "mdi:sleep", + SENSOR_DOMAIN, + False, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.SLEEP_RESPIRATORY_RATE_AVERAGE, + GetSleepSummaryField.RR_AVERAGE, + "Average respiratory rate", + const.UOM_BREATHS_PER_MINUTE, + None, + SENSOR_DOMAIN, + False, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.SLEEP_RESPIRATORY_RATE_MAX, + GetSleepSummaryField.RR_MAX, + "Maximum respiratory rate", + const.UOM_BREATHS_PER_MINUTE, + None, + SENSOR_DOMAIN, + False, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.SLEEP_RESPIRATORY_RATE_MIN, + GetSleepSummaryField.RR_MIN, + "Minimum respiratory rate", + const.UOM_BREATHS_PER_MINUTE, + None, + SENSOR_DOMAIN, + False, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.SLEEP_SCORE, + GetSleepSummaryField.SLEEP_SCORE, + "Sleep score", + "", + None, + SENSOR_DOMAIN, + False, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.SLEEP_SNORING, + GetSleepSummaryField.SNORING, + "Snoring", + "", + None, + SENSOR_DOMAIN, + False, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.SLEEP_SNORING_EPISODE_COUNT, + GetSleepSummaryField.SNORING_EPISODE_COUNT, + "Snoring episode count", + "", + None, + SENSOR_DOMAIN, + False, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.SLEEP_WAKEUP_COUNT, + GetSleepSummaryField.WAKEUP_COUNT, + "Wakeup count", + const.UOM_FREQUENCY, + "mdi:sleep-off", + SENSOR_DOMAIN, + False, + UpdateType.POLL, + ), + WithingsAttribute( + Measurement.SLEEP_WAKEUP_DURATION_SECONDS, + GetSleepSummaryField.WAKEUP_DURATION, + "Wakeup time", + TIME_SECONDS, + "mdi:sleep-off", + SENSOR_DOMAIN, + False, + UpdateType.POLL, + ), + # Webhook measurements. + WithingsAttribute( + Measurement.IN_BED, + NotifyAppli.BED_IN, + "In bed", + "", + "mdi:bed", + BINARY_SENSOR_DOMAIN, + True, + UpdateType.WEBHOOK, + ), +] + +WITHINGS_MEASUREMENTS_MAP: Dict[Measurement, WithingsAttribute] = { + attr.measurement: attr for attr in WITHINGS_ATTRIBUTES +} + +WITHINGS_MEASURE_TYPE_MAP: Dict[ + Union[NotifyAppli, GetSleepSummaryField, MeasureType], WithingsAttribute +] = {attr.measute_type: attr for attr in WITHINGS_ATTRIBUTES} class ConfigEntryWithingsApi(AbstractWithingsApi): @@ -92,184 +483,308 @@ class ConfigEntryWithingsApi(AbstractWithingsApi): def _request( self, path: str, params: Dict[str, Any], method: str = "GET" - ) -> Dict[str, Any]: - return run_coroutine_threadsafe( - self.async_do_request(path, params, method), self._hass.loop - ).result() - - async def async_do_request( - self, path: str, params: Dict[str, Any], method: str = "GET" ) -> Dict[str, Any]: """Perform an async request.""" - await self.session.async_ensure_token_valid() - - response = await self._hass.async_add_executor_job( - partial( - requests.request, - method, - f"{self.URL}/{path}", - params=params, - headers={ - "Authorization": "Bearer %s" - % self._config_entry.data["token"]["access_token"] - }, - ) + asyncio.run_coroutine_threadsafe( + self.session.async_ensure_token_valid(), self._hass.loop ) + access_token = self._config_entry.data["token"]["access_token"] + response = requests.request( + method, + f"{self.URL}/{path}", + params=params, + headers={"Authorization": f"Bearer {access_token}"}, + ) return response.json() -class WithingsDataManager: - """A class representing an Withings cloud service connection.""" +def json_message_response(message: str, message_code: int) -> Response: + """Produce common json output.""" + return HomeAssistantView.json({"message": message, "code": message_code}, 200) - service_available = None - def __init__(self, hass: HomeAssistant, profile: str, api: ConfigEntryWithingsApi): - """Initialize data manager.""" +class WebhookAvailability(IntEnum): + """Represents various statuses of webhook availability.""" + + SUCCESS = 0 + CONNECT_ERROR = 1 + HTTP_ERROR = 2 + NOT_WEBHOOK = 3 + + +class WebhookUpdateCoordinator: + """Coordinates webhook data updates across listeners.""" + + def __init__(self, hass: HomeAssistant, user_id: int) -> None: + """Initialize the object.""" + self._hass = hass + self._user_id = user_id + self._listeners: List[CALLBACK_TYPE] = [] + self.data: MeasurementData = {} + + def async_add_listener(self, listener: CALLBACK_TYPE) -> Callable[[], None]: + """Add a listener.""" + self._listeners.append(listener) + + @callback + def remove_listener() -> None: + self.async_remove_listener(listener) + + return remove_listener + + def async_remove_listener(self, listener: CALLBACK_TYPE) -> None: + """Remove a listener.""" + self._listeners.remove(listener) + + def update_data(self, measurement: Measurement, value: Any) -> None: + """Update the data object and notify listeners the data has changed.""" + self.data[measurement] = value + self.notify_data_changed() + + def notify_data_changed(self) -> None: + """Notify all listeners the data has changed.""" + for listener in self._listeners: + listener() + + +class DataManager: + """Manage withing data.""" + + def __init__( + self, + hass: HomeAssistant, + profile: str, + api: ConfigEntryWithingsApi, + user_id: int, + webhook_config: WebhookConfig, + ): + """Initialize the data manager.""" self._hass = hass self._api = api + self._user_id = user_id self._profile = profile - self._slug = slugify(profile) + self._webhook_config = webhook_config + self._notify_subscribe_delay = datetime.timedelta(seconds=5) + self._notify_unsubscribe_delay = datetime.timedelta(seconds=1) - self._measures = None - self._sleep = None - self._sleep_summary = None + self._is_available = True + self._cancel_interval_update_interval: Optional[CALLBACK_TYPE] = None + self._cancel_configure_webhook_subscribe_interval: Optional[ + CALLBACK_TYPE + ] = None + self._api_notification_id = f"withings_{self._user_id}" - self.sleep_summary_last_update_parameter = None - self.throttle_data = {} + self.subscription_update_coordinator = DataUpdateCoordinator( + hass, + _LOGGER, + name="subscription_update_coordinator", + update_interval=timedelta(minutes=120), + update_method=self.async_subscribe_webhook, + ) + self.poll_data_update_coordinator = DataUpdateCoordinator( + hass, + _LOGGER, + name="poll_data_update_coordinator", + update_interval=timedelta(minutes=120) + if self._webhook_config.enabled + else timedelta(minutes=10), + update_method=self.async_get_all_data, + ) + self.webhook_update_coordinator = WebhookUpdateCoordinator( + self._hass, self._user_id + ) + self._cancel_subscription_update: Optional[Callable[[], None]] = None + self._subscribe_webhook_run_count = 0 + + @property + def webhook_config(self) -> WebhookConfig: + """Get the webhook config.""" + return self._webhook_config + + @property + def user_id(self) -> int: + """Get the user_id of the authenticated user.""" + return self._user_id @property def profile(self) -> str: """Get the profile.""" return self._profile - @property - def slug(self) -> str: - """Get the slugified profile the data is for.""" - return self._slug + def async_start_polling_webhook_subscriptions(self) -> None: + """Start polling webhook subscriptions (if enabled) to reconcile their setup.""" + self.async_stop_polling_webhook_subscriptions() - @property - def api(self) -> ConfigEntryWithingsApi: - """Get the api object.""" - return self._api + def empty_listener() -> None: + pass - @property - def measures(self) -> MeasureGetMeasResponse: - """Get the current measures data.""" - return self._measures - - @property - def sleep(self) -> SleepGetResponse: - """Get the current sleep data.""" - return self._sleep - - @property - def sleep_summary(self) -> SleepGetSummaryResponse: - """Get the current sleep summary data.""" - return self._sleep_summary - - @staticmethod - def get_throttle_interval() -> int: - """Get the throttle interval.""" - return const.THROTTLE_INTERVAL - - def get_throttle_data(self, domain: str) -> ThrottleData: - """Get throttlel data.""" - return self.throttle_data.get(domain) - - def set_throttle_data(self, domain: str, throttle_data: ThrottleData): - """Set throttle data.""" - self.throttle_data[domain] = throttle_data - - @staticmethod - def print_service_unavailable() -> bool: - """Print the service is unavailable (once) to the log.""" - if WithingsDataManager.service_available is not False: - _LOGGER.error("Looks like the service is not available at the moment") - WithingsDataManager.service_available = False - return True - - return False - - @staticmethod - def print_service_available() -> bool: - """Print the service is available (once) to to the log.""" - if WithingsDataManager.service_available is not True: - _LOGGER.info("Looks like the service is available again") - WithingsDataManager.service_available = True - return True - - return False - - async def call(self, function, throttle_domain=None) -> Any: - """Call an api method and handle the result.""" - throttle_data = self.get_throttle_data(throttle_domain) - - should_throttle = ( - throttle_domain and throttle_data and not throttle_data.is_expired() + self._cancel_subscription_update = self.subscription_update_coordinator.async_add_listener( + empty_listener ) + def async_stop_polling_webhook_subscriptions(self) -> None: + """Stop polling webhook subscriptions.""" + if self._cancel_subscription_update: + self._cancel_subscription_update() + self._cancel_subscription_update = None + + async def _do_retry(self, func, attempts=3) -> Any: + """Retry a function call. + + Withings' API occasionally and incorrectly throws errors. Retrying the call tends to work. + """ + exception = None + for attempt in range(1, attempts + 1): + _LOGGER.debug("Attempt %s of %s", attempt, attempts) + try: + return await func() + except Exception as exception1: # pylint: disable=broad-except + await asyncio.sleep(0.1) + exception = exception1 + continue + + if exception: + raise exception + + async def async_subscribe_webhook(self) -> None: + """Subscribe the webhook to withings data updates.""" + return await self._do_retry(self._async_subscribe_webhook) + + async def _async_subscribe_webhook(self) -> None: + _LOGGER.debug("Configuring withings webhook.") + + # On first startup, perform a fresh re-subscribe. Withings stops pushing data + # if the webhook fails enough times but they don't remove the old subscription + # config. This ensures the subscription is setup correctly and they start + # pushing again. + if self._subscribe_webhook_run_count == 0: + _LOGGER.debug("Refreshing withings webhook configs.") + await self.async_unsubscribe_webhook() + self._subscribe_webhook_run_count += 1 + + # Get the current webhooks. + response = await self._hass.async_add_executor_job(self._api.notify_list) + + subscribed_applis = frozenset( + [ + profile.appli + for profile in response.profiles + if profile.callbackurl == self._webhook_config.url + ] + ) + + # Determine what subscriptions need to be created. + ignored_applis = frozenset({NotifyAppli.USER}) + to_add_applis = frozenset( + [ + appli + for appli in NotifyAppli + if appli not in subscribed_applis and appli not in ignored_applis + ] + ) + + # Subscribe to each one. + for appli in to_add_applis: + _LOGGER.debug( + "Subscribing %s for %s in %s seconds", + self._webhook_config.url, + appli, + self._notify_subscribe_delay.total_seconds(), + ) + # Withings will HTTP HEAD the callback_url and needs some downtime + # between each call or there is a higher chance of failure. + await asyncio.sleep(self._notify_subscribe_delay.total_seconds()) + await self._hass.async_add_executor_job( + self._api.notify_subscribe, self._webhook_config.url, appli + ) + + async def async_unsubscribe_webhook(self) -> None: + """Unsubscribe webhook from withings data updates.""" + return await self._do_retry(self._async_unsubscribe_webhook) + + async def _async_unsubscribe_webhook(self) -> None: + # Get the current webhooks. + response = await self._hass.async_add_executor_job(self._api.notify_list) + + # Revoke subscriptions. + for profile in response.profiles: + _LOGGER.debug( + "Unsubscribing %s for %s in %s seconds", + profile.callbackurl, + profile.appli, + self._notify_unsubscribe_delay.total_seconds(), + ) + # Quick calls to Withings can result in the service returning errors. Give them + # some time to cool down. + await asyncio.sleep(self._notify_subscribe_delay.total_seconds()) + await self._hass.async_add_executor_job( + self._api.notify_revoke, profile.callbackurl, profile.appli + ) + + async def async_get_all_data(self) -> Optional[Dict[MeasureType, Any]]: + """Update all withings data.""" try: - if should_throttle: - _LOGGER.debug("Throttling call for domain: %s", throttle_domain) - result = throttle_data.data - else: - _LOGGER.debug("Running call.") - result = await self._hass.async_add_executor_job(function) + return await self._do_retry(self._async_get_all_data) + except Exception as exception: + # User is not authenticated. + if isinstance( + exception, (UnauthorizedException, AuthFailedException) + ) or NOT_AUTHENTICATED_ERROR.match(str(exception)): + context = { + const.PROFILE: self._profile, + "userid": self._user_id, + "source": "reauth", + } - # Update throttle data. - self.set_throttle_data( - throttle_domain, ThrottleData(self.get_throttle_interval(), result) + # Check if reauth flow already exists. + flow = next( + iter( + flow + for flow in self._hass.config_entries.flow.async_progress() + if flow.context == context + ), + None, ) + if flow: + return - WithingsDataManager.print_service_available() - return result + # Start a reauth flow. + await self._hass.config_entries.flow.async_init( + const.DOMAIN, context=context, + ) + return - except Exception as ex: - # Withings api encountered error. - if isinstance(ex, (UnauthorizedException, AuthFailedException)): - raise NotAuthenticatedError(ex) + raise exception - # Oauth2 config flow failed to authenticate. - if NOT_AUTHENTICATED_ERROR.match(str(ex)): - raise NotAuthenticatedError(ex) + async def _async_get_all_data(self) -> Optional[Dict[MeasureType, Any]]: + _LOGGER.info("Updating all withings data.") + return { + **await self.async_get_measures(), + **await self.async_get_sleep_summary(), + } - # Probably a network error. - WithingsDataManager.print_service_unavailable() - raise PlatformNotReady(ex) + async def async_get_measures(self) -> Dict[MeasureType, Any]: + """Get the measures data.""" + _LOGGER.debug("Updating withings measures") - async def check_authenticated(self) -> bool: - """Check if the user is authenticated.""" + response = await self._hass.async_add_executor_job(self._api.measure_get_meas) - def function(): - return bool(self._api.user_get_device()) + groups = query_measure_groups( + response, MeasureTypes.ANY, MeasureGroupAttribs.UNAMBIGUOUS + ) - return await self.call(function) + return { + WITHINGS_MEASURE_TYPE_MAP[measure.type].measurement: round( + float(measure.value * pow(10, measure.unit)), 2 + ) + for group in groups + for measure in group.measures + } - async def update_measures(self) -> MeasureGetMeasResponse: - """Update the measures data.""" - - def function(): - return self._api.measure_get_meas() - - self._measures = await self.call(function, throttle_domain="update_measures") - - return self._measures - - async def update_sleep(self) -> SleepGetResponse: - """Update the sleep data.""" - end_date = dt.now() - start_date = end_date - datetime.timedelta(hours=2) - - def function(): - return self._api.sleep_get(startdate=start_date, enddate=end_date) - - self._sleep = await self.call(function, throttle_domain="update_sleep") - - return self._sleep - - async def update_sleep_summary(self) -> SleepGetSummaryResponse: - """Update the sleep summary data.""" + async def async_get_sleep_summary(self) -> Dict[MeasureType, Any]: + """Get the sleep summary data.""" + _LOGGER.debug("Updating withing sleep summary") now = dt.utcnow() yesterday = now - datetime.timedelta(days=1) yesterday_noon = datetime.datetime( @@ -283,62 +798,275 @@ class WithingsDataManager: datetime.timezone.utc, ) - _LOGGER.debug( - "Getting sleep summary data since: %s", - yesterday.strftime("%Y-%m-%d %H:%M:%S UTC"), - ) - - def function(): + def get_sleep_summary() -> SleepGetSummaryResponse: return self._api.sleep_get_summary(lastupdate=yesterday_noon) - self._sleep_summary = await self.call( - function, throttle_domain="update_sleep_summary" + response = await self._hass.async_add_executor_job(get_sleep_summary) + + # Set the default to empty lists. + raw_values: Dict[GetSleepSummaryField, List[int]] = { + field: [] for field in GetSleepSummaryField + } + + # Collect the raw data. + for serie in response.series: + data = serie.data + + for field in GetSleepSummaryField: + raw_values[field].append(data._asdict()[field.value]) + + values: Dict[GetSleepSummaryField, float] = {} + + def average(data: List[int]) -> float: + return sum(data) / len(data) + + def set_value(field: GetSleepSummaryField, func: Callable) -> None: + non_nones = [ + value for value in raw_values.get(field, []) if value is not None + ] + values[field] = func(non_nones) if non_nones else None + + set_value(GetSleepSummaryField.BREATHING_DISTURBANCES_INTENSITY, average) + set_value(GetSleepSummaryField.DEEP_SLEEP_DURATION, sum) + set_value(GetSleepSummaryField.DURATION_TO_SLEEP, average) + set_value(GetSleepSummaryField.DURATION_TO_WAKEUP, average) + set_value(GetSleepSummaryField.HR_AVERAGE, average) + set_value(GetSleepSummaryField.HR_MAX, average) + set_value(GetSleepSummaryField.HR_MIN, average) + set_value(GetSleepSummaryField.LIGHT_SLEEP_DURATION, sum) + set_value(GetSleepSummaryField.REM_SLEEP_DURATION, sum) + set_value(GetSleepSummaryField.RR_AVERAGE, average) + set_value(GetSleepSummaryField.RR_MAX, average) + set_value(GetSleepSummaryField.RR_MIN, average) + set_value(GetSleepSummaryField.SLEEP_SCORE, max) + set_value(GetSleepSummaryField.SNORING, average) + set_value(GetSleepSummaryField.SNORING_EPISODE_COUNT, sum) + set_value(GetSleepSummaryField.WAKEUP_COUNT, sum) + set_value(GetSleepSummaryField.WAKEUP_DURATION, average) + + return { + WITHINGS_MEASURE_TYPE_MAP[field].measurement: round(value, 4) + if value is not None + else None + for field, value in values.items() + } + + async def async_webhook_data_updated(self, data_category: NotifyAppli) -> None: + """Handle scenario when data is updated from a webook.""" + _LOGGER.debug("Withings webhook triggered") + if data_category in { + NotifyAppli.WEIGHT, + NotifyAppli.CIRCULATORY, + NotifyAppli.SLEEP, + }: + await self.poll_data_update_coordinator.async_request_refresh() + + elif data_category in {NotifyAppli.BED_IN, NotifyAppli.BED_OUT}: + self.webhook_update_coordinator.update_data( + Measurement.IN_BED, data_category == NotifyAppli.BED_IN + ) + + +def get_attribute_unique_id(attribute: WithingsAttribute, user_id: int) -> str: + """Get a entity unique id for a user's attribute.""" + return f"withings_{user_id}_{attribute.measurement.value}" + + +async def async_get_entity_id( + hass: HomeAssistant, attribute: WithingsAttribute, user_id: int +) -> Optional[str]: + """Get an entity id for a user's attribute.""" + entity_registry: EntityRegistry = await hass.helpers.entity_registry.async_get_registry() + unique_id = get_attribute_unique_id(attribute, user_id) + + entity_id = entity_registry.async_get_entity_id( + attribute.platform, const.DOMAIN, unique_id + ) + + if entity_id is None: + _LOGGER.error("Cannot find entity id for unique_id: %s", unique_id) + return None + + return entity_id + + +class BaseWithingsSensor(Entity): + """Base class for withings sensors.""" + + def __init__(self, data_manager: DataManager, attribute: WithingsAttribute) -> None: + """Initialize the Withings sensor.""" + self._data_manager = data_manager + self._attribute = attribute + self._profile = self._data_manager.profile + self._user_id = self._data_manager.user_id + self._name = f"Withings {self._attribute.measurement.value} {self._profile}" + self._unique_id = get_attribute_unique_id(self._attribute, self._user_id) + self._state_data: Optional[Any] = None + + @property + def should_poll(self) -> bool: + """Return False to indicate HA should not poll for changes.""" + return False + + @property + def name(self) -> str: + """Return the name of the sensor.""" + return self._name + + @property + def available(self) -> bool: + """Return True if entity is available.""" + if self._attribute.update_type == UpdateType.POLL: + return self._data_manager.poll_data_update_coordinator.last_update_success + + return True + + @property + def unique_id(self) -> str: + """Return a unique, Home Assistant friendly identifier for this entity.""" + return self._unique_id + + @property + def unit_of_measurement(self) -> str: + """Return the unit of measurement of this entity, if any.""" + return self._attribute.unit_of_measurement + + @property + def icon(self) -> str: + """Icon to use in the frontend, if any.""" + return self._attribute.icon + + @property + def entity_registry_enabled_default(self) -> bool: + """Return if the entity should be enabled when first added to the entity registry.""" + return self._attribute.enabled_by_default + + @callback + def _on_poll_data_updated(self) -> None: + self._update_state_data( + self._data_manager.poll_data_update_coordinator.data or {} ) - return self._sleep_summary + @callback + def _on_webhook_data_updated(self) -> None: + self._update_state_data( + self._data_manager.webhook_update_coordinator.data or {} + ) + + def _update_state_data(self, data: MeasurementData) -> None: + """Update the state data.""" + self._state_data = data.get(self._attribute.measurement) + self.async_write_ha_state() + + async def async_added_to_hass(self) -> None: + """Register update dispatcher.""" + if self._attribute.update_type == UpdateType.POLL: + self.async_on_remove( + self._data_manager.poll_data_update_coordinator.async_add_listener( + self._on_poll_data_updated + ) + ) + self._on_poll_data_updated() + + elif self._attribute.update_type == UpdateType.WEBHOOK: + self.async_on_remove( + self._data_manager.webhook_update_coordinator.async_add_listener( + self._on_webhook_data_updated + ) + ) + self._on_webhook_data_updated() -def create_withings_data_manager( - hass: HomeAssistant, - config_entry: ConfigEntry, - implementation: AbstractOAuth2Implementation, -) -> WithingsDataManager: - """Set up the sensor config entry.""" - profile = config_entry.data.get(const.PROFILE) +async def async_get_data_manager( + hass: HomeAssistant, config_entry: ConfigEntry +) -> DataManager: + """Get the data manager for a config entry.""" + hass.data.setdefault(const.DOMAIN, {}) + hass.data[const.DOMAIN].setdefault(config_entry.entry_id, {}) + config_entry_data = hass.data[const.DOMAIN][config_entry.entry_id] - _LOGGER.debug("Creating withings api instance") - api = ConfigEntryWithingsApi( - hass=hass, config_entry=config_entry, implementation=implementation + if const.DATA_MANAGER not in config_entry_data: + profile = config_entry.data.get(const.PROFILE) + + _LOGGER.debug("Creating withings data manager for profile: %s", profile) + config_entry_data[const.DATA_MANAGER] = DataManager( + hass, + profile, + ConfigEntryWithingsApi( + hass=hass, + config_entry=config_entry, + implementation=await config_entry_oauth2_flow.async_get_config_entry_implementation( + hass, config_entry + ), + ), + config_entry.data["token"]["userid"], + WebhookConfig( + id=config_entry.data[CONF_WEBHOOK_ID], + url=config_entry.data[const.CONF_WEBHOOK_URL], + enabled=config_entry.data[const.CONF_USE_WEBHOOK], + ), + ) + + return config_entry_data[const.DATA_MANAGER] + + +def get_data_manager_by_webhook_id( + hass: HomeAssistant, webhook_id: str +) -> Optional[DataManager]: + """Get a data manager by it's webhook id.""" + return next( + iter( + [ + data_manager + for data_manager in get_all_data_managers(hass) + if data_manager.webhook_config.id == webhook_id + ] + ), + None, ) - _LOGGER.debug("Creating withings data manager for profile: %s", profile) - return WithingsDataManager(hass, profile, api) + +def get_all_data_managers(hass: HomeAssistant) -> Tuple[DataManager, ...]: + """Get all configured data managers.""" + return tuple( + [ + config_entry_data[const.DATA_MANAGER] + for config_entry_data in hass.data[const.DOMAIN].values() + if const.DATA_MANAGER in config_entry_data + ] + ) -def get_data_manager( +def async_remove_data_manager(hass: HomeAssistant, config_entry: ConfigEntry) -> None: + """Remove a data manager for a config entry.""" + del hass.data[const.DOMAIN][config_entry.entry_id][const.DATA_MANAGER] + + +async def async_create_entities( hass: HomeAssistant, entry: ConfigEntry, - implementation: AbstractOAuth2Implementation, -) -> WithingsDataManager: - """Get a data manager for a config entry. + create_func: Callable[[DataManager, WithingsAttribute], Entity], + platform: str, +) -> List[Entity]: + """Create withings entities from config entry.""" + data_manager = await async_get_data_manager(hass, entry) - If the data manager doesn't exist yet, it will be - created and cached for later use. - """ - entry_id = entry.entry_id + return [ + create_func(data_manager, attribute) + for attribute in get_platform_attributes(platform) + ] - hass.data[const.DOMAIN] = hass.data.get(const.DOMAIN, {}) - domain_dict = hass.data[const.DOMAIN] - domain_dict[const.DATA_MANAGER] = domain_dict.get(const.DATA_MANAGER, {}) - - dm_dict = domain_dict[const.DATA_MANAGER] - dm_dict[entry_id] = dm_dict.get(entry_id) or create_withings_data_manager( - hass, entry, implementation +def get_platform_attributes(platform: str) -> Tuple[WithingsAttribute, ...]: + """Get withings attributes used for a specific platform.""" + return tuple( + [ + attribute + for attribute in WITHINGS_ATTRIBUTES + if attribute.platform == platform + ] ) - return dm_dict[entry_id] - class WithingsLocalOAuth2Implementation(LocalOAuth2Implementation): """Oauth2 implementation that only uses the external url.""" diff --git a/homeassistant/components/withings/config_flow.py b/homeassistant/components/withings/config_flow.py index e18a4b0337a..e1a1dee3191 100644 --- a/homeassistant/components/withings/config_flow.py +++ b/homeassistant/components/withings/config_flow.py @@ -1,5 +1,6 @@ """Config flow for Withings.""" import logging +from typing import Dict, Union import voluptuous as vol from withings_api.common import AuthScope @@ -7,17 +8,20 @@ from withings_api.common import AuthScope from homeassistant import config_entries from homeassistant.components.withings import const from homeassistant.helpers import config_entry_oauth2_flow +from homeassistant.util import slugify _LOGGER = logging.getLogger(__name__) -@config_entries.HANDLERS.register(const.DOMAIN) -class WithingsFlowHandler(config_entry_oauth2_flow.AbstractOAuth2FlowHandler): +class WithingsFlowHandler( + config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=const.DOMAIN +): """Handle a config flow.""" DOMAIN = const.DOMAIN CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL - _current_data = None + # Temporarily holds authorization data during the profile step. + _current_data: Dict[str, Union[None, str, int]] = {} @property def logger(self) -> logging.Logger: @@ -33,6 +37,7 @@ class WithingsFlowHandler(config_entry_oauth2_flow.AbstractOAuth2FlowHandler): AuthScope.USER_INFO.value, AuthScope.USER_METRICS.value, AuthScope.USER_ACTIVITY.value, + AuthScope.USER_SLEEP_EVENTS.value, ] ) } @@ -44,21 +49,58 @@ class WithingsFlowHandler(config_entry_oauth2_flow.AbstractOAuth2FlowHandler): async def async_step_profile(self, data: dict) -> dict: """Prompt the user to select a user profile.""" - profile = data.get(const.PROFILE) + errors = {} + # pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167 + reauth_profile = ( + self.context.get(const.PROFILE) + if self.context.get("source") == "reauth" + else None + ) + profile = data.get(const.PROFILE) or reauth_profile if profile: - new_data = {**self._current_data, **{const.PROFILE: profile}} - self._current_data = None - return await self.async_step_finish(new_data) + existing_entries = [ + config_entry + for config_entry in self.hass.config_entries.async_entries(const.DOMAIN) + if slugify(config_entry.data.get(const.PROFILE)) == slugify(profile) + ] + + if reauth_profile or not existing_entries: + new_data = {**self._current_data, **data, const.PROFILE: profile} + self._current_data = {} + return await self.async_step_finish(new_data) + + errors["base"] = "profile_exists" - profiles = self.hass.data[const.DOMAIN][const.CONFIG][const.CONF_PROFILES] return self.async_show_form( step_id="profile", - data_schema=vol.Schema({vol.Required(const.PROFILE): vol.In(profiles)}), + data_schema=vol.Schema({vol.Required(const.PROFILE): str}), + errors=errors, + ) + + async def async_step_reauth(self, data: dict = None) -> dict: + """Prompt user to re-authenticate.""" + if data is not None: + return await self.async_step_user() + + # pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167 + placeholders = {const.PROFILE: self.context["profile"]} + + self.context.update({"title_placeholders": placeholders}) + + return self.async_show_form( + step_id="reauth", + # pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167 + description_placeholders=placeholders, ) async def async_step_finish(self, data: dict) -> dict: """Finish the flow.""" - self._current_data = None + self._current_data = {} + + await self.async_set_unique_id( + str(data["token"]["userid"]), raise_on_progress=False + ) + self._abort_if_unique_id_configured(data) return self.async_create_entry(title=data[const.PROFILE], data=data) diff --git a/homeassistant/components/withings/const.py b/homeassistant/components/withings/const.py index f2a29cfa3ca..c6cad929f81 100644 --- a/homeassistant/components/withings/const.py +++ b/homeassistant/components/withings/const.py @@ -1,61 +1,59 @@ """Constants used by the Withings component.""" +from enum import Enum + import homeassistant.const as const -DOMAIN = "withings" - CONF_PROFILES = "profiles" +CONF_USE_WEBHOOK = "use_webhook" DATA_MANAGER = "data_manager" -BASE_URL = "base_url" -CODE = "code" CONFIG = "config" -CREDENTIALS = "credentials" +DOMAIN = "withings" LOG_NAMESPACE = "homeassistant.components.withings" -MEASURES = "measures" PROFILE = "profile" +PUSH_HANDLER = "push_handler" +CONF_WEBHOOK_URL = "webhook_url" -AUTH_CALLBACK_PATH = "/api/withings/authorize" -AUTH_CALLBACK_NAME = "withings:authorize" -THROTTLE_INTERVAL = 60 -SCAN_INTERVAL = 60 +class Measurement(Enum): + """Measurement supported by the withings integration.""" -STATE_UNKNOWN = const.STATE_UNKNOWN -STATE_AWAKE = "awake" -STATE_DEEP = "deep" -STATE_LIGHT = "light" -STATE_REM = "rem" + BODY_TEMP_C = "body_temperature_c" + BONE_MASS_KG = "bone_mass_kg" + DIASTOLIC_MMHG = "diastolic_blood_pressure_mmhg" + FAT_FREE_MASS_KG = "fat_free_mass_kg" + FAT_MASS_KG = "fat_mass_kg" + FAT_RATIO_PCT = "fat_ratio_pct" + HEART_PULSE_BPM = "heart_pulse_bpm" + HEIGHT_M = "height_m" + HYDRATION = "hydration" + IN_BED = "in_bed" + MUSCLE_MASS_KG = "muscle_mass_kg" + PWV = "pulse_wave_velocity" + SKIN_TEMP_C = "skin_temperature_c" + SLEEP_BREATHING_DISTURBANCES_INTENSITY = "sleep_breathing_disturbances_intensity" + SLEEP_DEEP_DURATION_SECONDS = "sleep_deep_duration_seconds" + SLEEP_HEART_RATE_AVERAGE = "sleep_heart_rate_average_bpm" + SLEEP_HEART_RATE_MAX = "sleep_heart_rate_max_bpm" + SLEEP_HEART_RATE_MIN = "sleep_heart_rate_min_bpm" + SLEEP_LIGHT_DURATION_SECONDS = "sleep_light_duration_seconds" + SLEEP_REM_DURATION_SECONDS = "sleep_rem_duration_seconds" + SLEEP_RESPIRATORY_RATE_AVERAGE = "sleep_respiratory_average_bpm" + SLEEP_RESPIRATORY_RATE_MAX = "sleep_respiratory_max_bpm" + SLEEP_RESPIRATORY_RATE_MIN = "sleep_respiratory_min_bpm" + SLEEP_SCORE = "sleep_score" + SLEEP_SNORING = "sleep_snoring" + SLEEP_SNORING_EPISODE_COUNT = "sleep_snoring_eposode_count" + SLEEP_TOSLEEP_DURATION_SECONDS = "sleep_tosleep_duration_seconds" + SLEEP_TOWAKEUP_DURATION_SECONDS = "sleep_towakeup_duration_seconds" + SLEEP_WAKEUP_COUNT = "sleep_wakeup_count" + SLEEP_WAKEUP_DURATION_SECONDS = "sleep_wakeup_duration_seconds" + SPO2_PCT = "spo2_pct" + SYSTOLIC_MMGH = "systolic_blood_pressure_mmhg" + TEMP_C = "temperature_c" + WEIGHT_KG = "weight_kg" -MEAS_BODY_TEMP_C = "body_temperature_c" -MEAS_BONE_MASS_KG = "bone_mass_kg" -MEAS_DIASTOLIC_MMHG = "diastolic_blood_pressure_mmhg" -MEAS_FAT_FREE_MASS_KG = "fat_free_mass_kg" -MEAS_FAT_MASS_KG = "fat_mass_kg" -MEAS_FAT_RATIO_PCT = "fat_ratio_pct" -MEAS_HEART_PULSE_BPM = "heart_pulse_bpm" -MEAS_HEIGHT_M = "height_m" -MEAS_HYDRATION = "hydration" -MEAS_MUSCLE_MASS_KG = "muscle_mass_kg" -MEAS_PWV = "pulse_wave_velocity" -MEAS_SKIN_TEMP_C = "skin_temperature_c" -MEAS_SLEEP_DEEP_DURATION_SECONDS = "sleep_deep_duration_seconds" -MEAS_SLEEP_HEART_RATE_AVERAGE = "sleep_heart_rate_average_bpm" -MEAS_SLEEP_HEART_RATE_MAX = "sleep_heart_rate_max_bpm" -MEAS_SLEEP_HEART_RATE_MIN = "sleep_heart_rate_min_bpm" -MEAS_SLEEP_LIGHT_DURATION_SECONDS = "sleep_light_duration_seconds" -MEAS_SLEEP_REM_DURATION_SECONDS = "sleep_rem_duration_seconds" -MEAS_SLEEP_RESPIRATORY_RATE_AVERAGE = "sleep_respiratory_average_bpm" -MEAS_SLEEP_RESPIRATORY_RATE_MAX = "sleep_respiratory_max_bpm" -MEAS_SLEEP_RESPIRATORY_RATE_MIN = "sleep_respiratory_min_bpm" -MEAS_SLEEP_TOSLEEP_DURATION_SECONDS = "sleep_tosleep_duration_seconds" -MEAS_SLEEP_TOWAKEUP_DURATION_SECONDS = "sleep_towakeup_duration_seconds" -MEAS_SLEEP_WAKEUP_COUNT = "sleep_wakeup_count" -MEAS_SLEEP_WAKEUP_DURATION_SECONDS = "sleep_wakeup_duration_seconds" -MEAS_SPO2_PCT = "spo2_pct" -MEAS_SYSTOLIC_MMGH = "systolic_blood_pressure_mmhg" -MEAS_TEMP_C = "temperature_c" -MEAS_WEIGHT_KG = "weight_kg" UOM_BEATS_PER_MINUTE = "bpm" UOM_BREATHS_PER_MINUTE = f"br/{const.TIME_MINUTES}" diff --git a/homeassistant/components/withings/manifest.json b/homeassistant/components/withings/manifest.json index 337a98ab404..ec981ff691c 100644 --- a/homeassistant/components/withings/manifest.json +++ b/homeassistant/components/withings/manifest.json @@ -3,7 +3,7 @@ "name": "Withings", "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/withings", - "requirements": ["withings-api==2.1.3"], - "dependencies": ["api", "http", "webhook"], + "requirements": ["withings-api==2.1.6"], + "dependencies": ["http", "webhook"], "codeowners": ["@vangorra"] } diff --git a/homeassistant/components/withings/sensor.py b/homeassistant/components/withings/sensor.py index 4061e3207cc..a7580faa3d0 100644 --- a/homeassistant/components/withings/sensor.py +++ b/homeassistant/components/withings/sensor.py @@ -1,34 +1,12 @@ """Sensors flow for Withings.""" from typing import Callable, List, Union -from withings_api.common import ( - GetSleepSummaryField, - MeasureGetMeasResponse, - MeasureGroupAttribs, - MeasureType, - SleepGetSummaryResponse, - get_measure_value, -) - +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - MASS_KILOGRAMS, - SPEED_METERS_PER_SECOND, - TIME_SECONDS, - UNIT_PERCENTAGE, -) from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_entry_oauth2_flow from homeassistant.helpers.entity import Entity -from homeassistant.util import slugify -from . import const -from .common import _LOGGER, WithingsDataManager, get_data_manager - -# There's only 3 calls (per profile) made to the withings api every 5 -# minutes (see throttle values). This component wouldn't benefit -# much from parallel updates. -PARALLEL_UPDATES = 1 +from .common import BaseWithingsSensor, async_create_entities async def async_setup_entry( @@ -37,401 +15,18 @@ async def async_setup_entry( async_add_entities: Callable[[List[Entity], bool], None], ) -> None: """Set up the sensor config entry.""" - implementation = await config_entry_oauth2_flow.async_get_config_entry_implementation( - hass, entry + + entities = await async_create_entities( + hass, entry, WithingsHealthSensor, SENSOR_DOMAIN, ) - data_manager = get_data_manager(hass, entry, implementation) - user_id = entry.data["token"]["userid"] - - entities = create_sensor_entities(data_manager, user_id) async_add_entities(entities, True) -class WithingsAttribute: - """Base class for modeling withing data.""" - - def __init__( - self, - measurement: str, - measure_type, - friendly_name: str, - unit_of_measurement: str, - icon: str, - ) -> None: - """Initialize attribute.""" - self.measurement = measurement - self.measure_type = measure_type - self.friendly_name = friendly_name - self.unit_of_measurement = unit_of_measurement - self.icon = icon - - -class WithingsMeasureAttribute(WithingsAttribute): - """Model measure attributes.""" - - -class WithingsSleepSummaryAttribute(WithingsAttribute): - """Models sleep summary attributes.""" - - -WITHINGS_ATTRIBUTES = [ - WithingsMeasureAttribute( - const.MEAS_WEIGHT_KG, - MeasureType.WEIGHT, - "Weight", - MASS_KILOGRAMS, - "mdi:weight-kilogram", - ), - WithingsMeasureAttribute( - const.MEAS_FAT_MASS_KG, - MeasureType.FAT_MASS_WEIGHT, - "Fat Mass", - MASS_KILOGRAMS, - "mdi:weight-kilogram", - ), - WithingsMeasureAttribute( - const.MEAS_FAT_FREE_MASS_KG, - MeasureType.FAT_FREE_MASS, - "Fat Free Mass", - MASS_KILOGRAMS, - "mdi:weight-kilogram", - ), - WithingsMeasureAttribute( - const.MEAS_MUSCLE_MASS_KG, - MeasureType.MUSCLE_MASS, - "Muscle Mass", - MASS_KILOGRAMS, - "mdi:weight-kilogram", - ), - WithingsMeasureAttribute( - const.MEAS_BONE_MASS_KG, - MeasureType.BONE_MASS, - "Bone Mass", - MASS_KILOGRAMS, - "mdi:weight-kilogram", - ), - WithingsMeasureAttribute( - const.MEAS_HEIGHT_M, - MeasureType.HEIGHT, - "Height", - const.UOM_LENGTH_M, - "mdi:ruler", - ), - WithingsMeasureAttribute( - const.MEAS_TEMP_C, - MeasureType.TEMPERATURE, - "Temperature", - const.UOM_TEMP_C, - "mdi:thermometer", - ), - WithingsMeasureAttribute( - const.MEAS_BODY_TEMP_C, - MeasureType.BODY_TEMPERATURE, - "Body Temperature", - const.UOM_TEMP_C, - "mdi:thermometer", - ), - WithingsMeasureAttribute( - const.MEAS_SKIN_TEMP_C, - MeasureType.SKIN_TEMPERATURE, - "Skin Temperature", - const.UOM_TEMP_C, - "mdi:thermometer", - ), - WithingsMeasureAttribute( - const.MEAS_FAT_RATIO_PCT, - MeasureType.FAT_RATIO, - "Fat Ratio", - UNIT_PERCENTAGE, - None, - ), - WithingsMeasureAttribute( - const.MEAS_DIASTOLIC_MMHG, - MeasureType.DIASTOLIC_BLOOD_PRESSURE, - "Diastolic Blood Pressure", - const.UOM_MMHG, - None, - ), - WithingsMeasureAttribute( - const.MEAS_SYSTOLIC_MMGH, - MeasureType.SYSTOLIC_BLOOD_PRESSURE, - "Systolic Blood Pressure", - const.UOM_MMHG, - None, - ), - WithingsMeasureAttribute( - const.MEAS_HEART_PULSE_BPM, - MeasureType.HEART_RATE, - "Heart Pulse", - const.UOM_BEATS_PER_MINUTE, - "mdi:heart-pulse", - ), - WithingsMeasureAttribute( - const.MEAS_SPO2_PCT, MeasureType.SP02, "SP02", UNIT_PERCENTAGE, None - ), - WithingsMeasureAttribute( - const.MEAS_HYDRATION, - MeasureType.HYDRATION, - "Hydration", - UNIT_PERCENTAGE, - "mdi:water", - ), - WithingsMeasureAttribute( - const.MEAS_PWV, - MeasureType.PULSE_WAVE_VELOCITY, - "Pulse Wave Velocity", - SPEED_METERS_PER_SECOND, - None, - ), - WithingsSleepSummaryAttribute( - const.MEAS_SLEEP_WAKEUP_DURATION_SECONDS, - GetSleepSummaryField.WAKEUP_DURATION.value, - "Wakeup time", - TIME_SECONDS, - "mdi:sleep-off", - ), - WithingsSleepSummaryAttribute( - const.MEAS_SLEEP_LIGHT_DURATION_SECONDS, - GetSleepSummaryField.LIGHT_SLEEP_DURATION.value, - "Light sleep", - TIME_SECONDS, - "mdi:sleep", - ), - WithingsSleepSummaryAttribute( - const.MEAS_SLEEP_DEEP_DURATION_SECONDS, - GetSleepSummaryField.DEEP_SLEEP_DURATION.value, - "Deep sleep", - TIME_SECONDS, - "mdi:sleep", - ), - WithingsSleepSummaryAttribute( - const.MEAS_SLEEP_REM_DURATION_SECONDS, - GetSleepSummaryField.REM_SLEEP_DURATION.value, - "REM sleep", - TIME_SECONDS, - "mdi:sleep", - ), - WithingsSleepSummaryAttribute( - const.MEAS_SLEEP_WAKEUP_COUNT, - GetSleepSummaryField.WAKEUP_COUNT.value, - "Wakeup count", - const.UOM_FREQUENCY, - "mdi:sleep-off", - ), - WithingsSleepSummaryAttribute( - const.MEAS_SLEEP_TOSLEEP_DURATION_SECONDS, - GetSleepSummaryField.DURATION_TO_SLEEP.value, - "Time to sleep", - TIME_SECONDS, - "mdi:sleep", - ), - WithingsSleepSummaryAttribute( - const.MEAS_SLEEP_TOWAKEUP_DURATION_SECONDS, - GetSleepSummaryField.DURATION_TO_WAKEUP.value, - "Time to wakeup", - TIME_SECONDS, - "mdi:sleep-off", - ), - WithingsSleepSummaryAttribute( - const.MEAS_SLEEP_HEART_RATE_AVERAGE, - GetSleepSummaryField.HR_AVERAGE.value, - "Average heart rate", - const.UOM_BEATS_PER_MINUTE, - "mdi:heart-pulse", - ), - WithingsSleepSummaryAttribute( - const.MEAS_SLEEP_HEART_RATE_MIN, - GetSleepSummaryField.HR_MIN.value, - "Minimum heart rate", - const.UOM_BEATS_PER_MINUTE, - "mdi:heart-pulse", - ), - WithingsSleepSummaryAttribute( - const.MEAS_SLEEP_HEART_RATE_MAX, - GetSleepSummaryField.HR_MAX.value, - "Maximum heart rate", - const.UOM_BEATS_PER_MINUTE, - "mdi:heart-pulse", - ), - WithingsSleepSummaryAttribute( - const.MEAS_SLEEP_RESPIRATORY_RATE_AVERAGE, - GetSleepSummaryField.RR_AVERAGE.value, - "Average respiratory rate", - const.UOM_BREATHS_PER_MINUTE, - None, - ), - WithingsSleepSummaryAttribute( - const.MEAS_SLEEP_RESPIRATORY_RATE_MIN, - GetSleepSummaryField.RR_MIN.value, - "Minimum respiratory rate", - const.UOM_BREATHS_PER_MINUTE, - None, - ), - WithingsSleepSummaryAttribute( - const.MEAS_SLEEP_RESPIRATORY_RATE_MAX, - GetSleepSummaryField.RR_MAX.value, - "Maximum respiratory rate", - const.UOM_BREATHS_PER_MINUTE, - None, - ), -] - -WITHINGS_MEASUREMENTS_MAP = {attr.measurement: attr for attr in WITHINGS_ATTRIBUTES} - - -class WithingsHealthSensor(Entity): +class WithingsHealthSensor(BaseWithingsSensor): """Implementation of a Withings sensor.""" - def __init__( - self, - data_manager: WithingsDataManager, - attribute: WithingsAttribute, - user_id: str, - ) -> None: - """Initialize the Withings sensor.""" - self._data_manager = data_manager - self._attribute = attribute - self._state = None - - self._slug = self._data_manager.slug - self._user_id = user_id - @property - def name(self) -> str: - """Return the name of the sensor.""" - return f"Withings {self._attribute.measurement} {self._slug}" - - @property - def unique_id(self) -> str: - """Return a unique, Home Assistant friendly identifier for this entity.""" - return ( - f"withings_{self._slug}_{self._user_id}_" - f"{slugify(self._attribute.measurement)}" - ) - - @property - def state(self) -> Union[str, int, float, None]: - """Return the state of the sensor.""" - return self._state - - @property - def unit_of_measurement(self) -> str: - """Return the unit of measurement of this entity, if any.""" - return self._attribute.unit_of_measurement - - @property - def icon(self) -> str: - """Icon to use in the frontend, if any.""" - return self._attribute.icon - - @property - def device_state_attributes(self) -> None: - """Get withings attributes.""" - return self._attribute.__dict__ - - async def async_update(self) -> None: - """Update the data.""" - _LOGGER.debug( - "Async update slug: %s, measurement: %s, user_id: %s", - self._slug, - self._attribute.measurement, - self._user_id, - ) - - if isinstance(self._attribute, WithingsMeasureAttribute): - _LOGGER.debug("Updating measures state") - await self._data_manager.update_measures() - await self.async_update_measure(self._data_manager.measures) - - elif isinstance(self._attribute, WithingsSleepSummaryAttribute): - _LOGGER.debug("Updating sleep summary state") - await self._data_manager.update_sleep_summary() - await self.async_update_sleep_summary(self._data_manager.sleep_summary) - - async def async_update_measure(self, data: MeasureGetMeasResponse) -> None: - """Update the measures data.""" - measure_type = self._attribute.measure_type - - _LOGGER.debug( - "Finding the unambiguous measure group with measure_type: %s", measure_type - ) - - value = get_measure_value(data, measure_type, MeasureGroupAttribs.UNAMBIGUOUS) - - if value is None: - _LOGGER.debug("Could not find a value, setting state to %s", None) - self._state = None - return - - self._state = round(value, 2) - - async def async_update_sleep_summary(self, data: SleepGetSummaryResponse) -> None: - """Update the sleep summary data.""" - if not data.series: - _LOGGER.debug("Sleep data has no series, setting state to %s", None) - self._state = None - return - - measurement = self._attribute.measurement - measure_type = self._attribute.measure_type - - _LOGGER.debug("Determining total value for: %s", measurement) - total = 0 - for serie in data.series: - data = serie.data - value = 0 - if measure_type == GetSleepSummaryField.REM_SLEEP_DURATION.value: - value = data.remsleepduration - elif measure_type == GetSleepSummaryField.WAKEUP_DURATION.value: - value = data.wakeupduration - elif measure_type == GetSleepSummaryField.LIGHT_SLEEP_DURATION.value: - value = data.lightsleepduration - elif measure_type == GetSleepSummaryField.DEEP_SLEEP_DURATION.value: - value = data.deepsleepduration - elif measure_type == GetSleepSummaryField.WAKEUP_COUNT.value: - value = data.wakeupcount - elif measure_type == GetSleepSummaryField.DURATION_TO_SLEEP.value: - value = data.durationtosleep - elif measure_type == GetSleepSummaryField.DURATION_TO_WAKEUP.value: - value = data.durationtowakeup - elif measure_type == GetSleepSummaryField.HR_AVERAGE.value: - value = data.hr_average - elif measure_type == GetSleepSummaryField.HR_MIN.value: - value = data.hr_min - elif measure_type == GetSleepSummaryField.HR_MAX.value: - value = data.hr_max - elif measure_type == GetSleepSummaryField.RR_AVERAGE.value: - value = data.rr_average - elif measure_type == GetSleepSummaryField.RR_MIN.value: - value = data.rr_min - elif measure_type == GetSleepSummaryField.RR_MAX.value: - value = data.rr_max - - # Sometimes a None is provided for value, default to 0. - total += value or 0 - - self._state = round(total, 4) - - -def create_sensor_entities( - data_manager: WithingsDataManager, user_id: str -) -> List[WithingsHealthSensor]: - """Create sensor entities.""" - entities = [] - - for attribute in WITHINGS_ATTRIBUTES: - _LOGGER.debug( - "Creating entity for measurement: %s, measure_type: %s," - "friendly_name: %s, unit_of_measurement: %s", - attribute.measurement, - attribute.measure_type, - attribute.friendly_name, - attribute.unit_of_measurement, - ) - - entity = WithingsHealthSensor(data_manager, attribute, user_id) - - entities.append(entity) - - return entities + def state(self) -> Union[None, str, int, float]: + """Return the state of the entity.""" + return self._state_data diff --git a/homeassistant/components/withings/strings.json b/homeassistant/components/withings/strings.json index b4f5123d5af..e7763a1db0c 100644 --- a/homeassistant/components/withings/strings.json +++ b/homeassistant/components/withings/strings.json @@ -1,16 +1,25 @@ { "config": { + "flow_title": "Withings: {profile}", "step": { "profile": { "title": "User Profile.", - "description": "Which profile did you select on the Withings website? It's important the profiles match, otherwise data will be mis-labeled.", - "data": { "profile": "Profile" } + "description": "Provide a unique profile name for this data. Typically this is the name of the profile you selected in the previous step.", + "data": { "profile": "Profile Name" } }, - "pick_implementation": { "title": "Pick Authentication Method" } + "pick_implementation": { "title": "Pick Authentication Method" }, + "reauth": { + "title": "Re-authenticate Profile", + "description": "The \"{profile}\" profile needs to be re-authenticated in order to continue receiving Withings data." + } + }, + "error": { + "profile_exists": "User profile is already configured. Please provide a unique profile name." }, "abort": { "authorize_url_timeout": "Timeout generating authorize url.", - "missing_configuration": "The Withings integration is not configured. Please follow the documentation." + "missing_configuration": "The Withings integration is not configured. Please follow the documentation.", + "already_configured": "Configuration updated for profile." }, "create_entry": { "default": "Successfully authenticated with Withings." } } diff --git a/homeassistant/components/withings/translations/ca.json b/homeassistant/components/withings/translations/ca.json index 40896dd7931..88d3ae7e6e6 100644 --- a/homeassistant/components/withings/translations/ca.json +++ b/homeassistant/components/withings/translations/ca.json @@ -1,22 +1,31 @@ { "config": { "abort": { + "already_configured": "Configuraci\u00f3 de perfil actualitzada.", "authorize_url_timeout": "S'ha acabat el temps d'espera durant la generaci\u00f3 de l'URL d'autoritzaci\u00f3.", "missing_configuration": "La integraci\u00f3 Withings no est\u00e0 configurada. Mira'n la documentaci\u00f3." }, "create_entry": { "default": "Autenticaci\u00f3 exitosa amb Withings." }, + "error": { + "profile_exists": "El perfil ja est\u00e0 configurat. Proporciona un nom de perfil \u00fanic." + }, + "flow_title": "Withings: {profile}", "step": { "pick_implementation": { "title": "Selecci\u00f3 del m\u00e8tode d'autenticaci\u00f3" }, "profile": { "data": { - "profile": "Perfil" + "profile": "Nom de perfil" }, - "description": "Quin perfil has seleccionat al lloc web de Withings? \u00c9s important que els perfils coincideixin sin\u00f3, les dades no s'etiquetaran correctament.", + "description": "Ha de proporcionar un nom de perfil \u00fanic per a aquestes dades. Normalment \u00e9s el nom del perfil seleccionat en el pas anterior.", "title": "Perfil d'usuari." + }, + "reauth": { + "description": "El perfil \"{profile}\" s'ha de tornar a autenticar per poder continuar rebent dades de Withings.", + "title": "Torna a autenticar perfil" } } } diff --git a/homeassistant/components/withings/translations/en.json b/homeassistant/components/withings/translations/en.json index 734a23bc5e0..185bd56153c 100644 --- a/homeassistant/components/withings/translations/en.json +++ b/homeassistant/components/withings/translations/en.json @@ -1,22 +1,31 @@ { "config": { "abort": { + "already_configured": "Configuration updated for profile.", "authorize_url_timeout": "Timeout generating authorize url.", "missing_configuration": "The Withings integration is not configured. Please follow the documentation." }, "create_entry": { "default": "Successfully authenticated with Withings." }, + "error": { + "profile_exists": "User profile is already configured. Please provide a unique profile name." + }, + "flow_title": "Withings: {profile}", "step": { "pick_implementation": { "title": "Pick Authentication Method" }, "profile": { "data": { - "profile": "Profile" + "profile": "Profile Name" }, - "description": "Which profile did you select on the Withings website? It's important the profiles match, otherwise data will be mis-labeled.", + "description": "Provide a unique profile name for this data. Typically this is the name of the profile you selected in the previous step.", "title": "User Profile." + }, + "reauth": { + "description": "The \"{profile}\" profile needs to be re-authenticated in order to continue receiving Withings data.", + "title": "Re-authenticate Profile" } } } diff --git a/homeassistant/components/withings/translations/es.json b/homeassistant/components/withings/translations/es.json index d162c06e761..1285a24b41f 100644 --- a/homeassistant/components/withings/translations/es.json +++ b/homeassistant/components/withings/translations/es.json @@ -1,12 +1,17 @@ { "config": { "abort": { + "already_configured": "Configuraci\u00f3n actualizada para el perfil.", "authorize_url_timeout": "Tiempo de espera agotado para la autorizaci\u00f3n de la url.", "missing_configuration": "La integraci\u00f3n de Withings no est\u00e1 configurada. Por favor, siga la documentaci\u00f3n." }, "create_entry": { "default": "Autenticado correctamente con Withings." }, + "error": { + "profile_exists": "El perfil de usuario ya est\u00e1 configurado. Por favor, proporciona un nombre de perfil \u00fanico." + }, + "flow_title": "Withings: {profile}", "step": { "pick_implementation": { "title": "Elija el m\u00e9todo de autenticaci\u00f3n" @@ -17,6 +22,10 @@ }, "description": "\u00bfQu\u00e9 perfil seleccion\u00f3 en el sitio web de Withings? Es importante que los perfiles coincidan, de lo contrario los datos se etiquetar\u00e1n incorrectamente.", "title": "Perfil de usuario." + }, + "reauth": { + "description": "El perfil \"{profile}\" debe volver a autenticarse para continuar recibiendo datos de Withings.", + "title": "Volver a autenticar a {profile}" } } } diff --git a/homeassistant/components/withings/translations/it.json b/homeassistant/components/withings/translations/it.json index f1d45416988..f6566ccab77 100644 --- a/homeassistant/components/withings/translations/it.json +++ b/homeassistant/components/withings/translations/it.json @@ -7,6 +7,7 @@ "create_entry": { "default": "Autenticazione riuscita con Withings." }, + "flow_title": "Withings: {profile}", "step": { "pick_implementation": { "title": "Scegli il metodo di autenticazione" @@ -17,6 +18,10 @@ }, "description": "Quale profilo hai selezionato sul sito web di Withings? \u00c8 importante che i profili corrispondano, altrimenti i dati avranno con un'errata etichettatura.", "title": "Profilo utente." + }, + "reauth": { + "description": "Il profilo \"{profile}\" deve essere autenticato nuovamente per continuare a ricevere i dati Withings.", + "title": "Riautentica {profile}" } } } diff --git a/homeassistant/components/withings/translations/ko.json b/homeassistant/components/withings/translations/ko.json index da1d3440611..77531d8d313 100644 --- a/homeassistant/components/withings/translations/ko.json +++ b/homeassistant/components/withings/translations/ko.json @@ -7,6 +7,7 @@ "create_entry": { "default": "Withings \ub85c \uc131\uacf5\uc801\uc73c\ub85c \uc778\uc99d\ub418\uc5c8\uc2b5\ub2c8\ub2e4." }, + "flow_title": "Withings: {profile}", "step": { "pick_implementation": { "title": "\uc778\uc99d \ubc29\ubc95 \uc120\ud0dd\ud558\uae30" @@ -17,6 +18,10 @@ }, "description": "Withings \uc6f9 \uc0ac\uc774\ud2b8\uc5d0\uc11c \uc5b4\ub5a4 \ud504\ub85c\ud544\uc744 \uc120\ud0dd\ud558\uc168\ub098\uc694? \ud504\ub85c\ud544\uc774 \uc77c\uce58\ud574\uc57c \ud569\ub2c8\ub2e4. \uadf8\ub807\uc9c0 \uc54a\uc73c\uba74, \ub370\uc774\ud130\uc5d0 \ub808\uc774\ube14\uc774 \uc798\ubabb \uc9c0\uc815\ub429\ub2c8\ub2e4.", "title": "\uc0ac\uc6a9\uc790 \ud504\ub85c\ud544." + }, + "reauth": { + "description": "Withings \ub370\uc774\ud130\ub97c \uacc4\uc18d \uc218\uc2e0\ud558\ub824\uba74 \"{profile}\" \ud504\ub85c\ud544\uc744 \ub2e4\uc2dc \uc778\uc99d\ud574\uc57c \ud569\ub2c8\ub2e4.", + "title": "{profile} \uc7ac\uc778\uc99d" } } } diff --git a/homeassistant/components/withings/translations/lb.json b/homeassistant/components/withings/translations/lb.json index 38bd29b96e4..c8a2d4beae2 100644 --- a/homeassistant/components/withings/translations/lb.json +++ b/homeassistant/components/withings/translations/lb.json @@ -7,6 +7,7 @@ "create_entry": { "default": "Erfollegr\u00e4ich mat Withings authentifiz\u00e9iert." }, + "flow_title": "Withing: {profile}", "step": { "pick_implementation": { "title": "Wielt Authentifikatiouns Method aus" @@ -17,6 +18,10 @@ }, "description": "W\u00e9ie Profil hutt dir op der Withings Webs\u00e4it ausgewielt? Et ass wichteg dass Profiller passen, soss ginn Donn\u00e9e\u00eb falsch gekennzeechent.", "title": "Benotzer Profil." + }, + "reauth": { + "description": "De Profil \"{profile}\" muss fr\u00ebsch authentifi\u00e9iert ginn fir weiderhinn Donn\u00e9e\u00eb vun Withing z'empf\u00e4nken.", + "title": "{profile} fr\u00ebsch authentifiz\u00e9ieren" } } } diff --git a/homeassistant/components/withings/translations/lv.json b/homeassistant/components/withings/translations/lv.json deleted file mode 100644 index a23ba89ea71..00000000000 --- a/homeassistant/components/withings/translations/lv.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Withings" -} \ No newline at end of file diff --git a/homeassistant/components/withings/translations/nn.json b/homeassistant/components/withings/translations/nn.json deleted file mode 100644 index a23ba89ea71..00000000000 --- a/homeassistant/components/withings/translations/nn.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "title": "Withings" -} \ No newline at end of file diff --git a/homeassistant/components/withings/translations/no.json b/homeassistant/components/withings/translations/no.json index 44c341ca2ca..1581d073ed9 100644 --- a/homeassistant/components/withings/translations/no.json +++ b/homeassistant/components/withings/translations/no.json @@ -1,12 +1,17 @@ { "config": { "abort": { + "already_configured": "Konfigurasjon oppdatert for profil.", "authorize_url_timeout": "Tidsavbrudd ved oppretting av godkjenningsadresse.", "missing_configuration": "Withings-integrasjonen er ikke konfigurert. Vennligst f\u00f8lg dokumentasjonen." }, "create_entry": { "default": "Vellykket godkjenning med Withings." }, + "error": { + "profile_exists": "Brukerprofilen er allerede konfigurert. Oppgi et unikt profilnavn." + }, + "flow_title": "Withings: {profil}", "step": { "pick_implementation": { "title": "Velg godkjenningsmetode" @@ -17,6 +22,10 @@ }, "description": "Hvilken profil valgte du p\u00e5 Withings nettsted? Det er viktig at profilene samsvarer, ellers blir data feilmerket.", "title": "Brukerprofil." + }, + "reauth": { + "description": "Profilen {profile} m\u00e5 godkjennes p\u00e5 nytt for \u00e5 kunne fortsette \u00e5 motta Withings-data.", + "title": "Re-autentisere {profil}" } } } diff --git a/homeassistant/components/withings/translations/pl.json b/homeassistant/components/withings/translations/pl.json index 9896ba3ad5c..626e74a36ac 100644 --- a/homeassistant/components/withings/translations/pl.json +++ b/homeassistant/components/withings/translations/pl.json @@ -7,6 +7,7 @@ "create_entry": { "default": "Pomy\u015blnie uwierzytelniono z Withings dla wybranego profilu" }, + "flow_title": "Withings: {profile}", "step": { "pick_implementation": { "title": "Wybierz metod\u0119 uwierzytelniania" @@ -17,6 +18,10 @@ }, "description": "Kt\u00f3ry profil wybra\u0142e\u015b na stronie Withings? Wa\u017cne jest, aby profile si\u0119 zgadza\u0142y, w przeciwnym razie dane zostan\u0105 b\u0142\u0119dnie oznaczone.", "title": "Profil u\u017cytkownika" + }, + "reauth": { + "description": "Profil \"{profile}\" musi zosta\u0107 ponownie uwierzytelniony, aby nadal otrzymywa\u0107 dane Withings.", + "title": "Ponownie uwierzytelnij {profile}" } } } diff --git a/homeassistant/components/withings/translations/ru.json b/homeassistant/components/withings/translations/ru.json index 5a945c14a84..33c2437df62 100644 --- a/homeassistant/components/withings/translations/ru.json +++ b/homeassistant/components/withings/translations/ru.json @@ -1,12 +1,17 @@ { "config": { "abort": { + "already_configured": "\u041e\u0431\u043d\u043e\u0432\u043b\u0435\u043d\u0430 \u043a\u043e\u043d\u0444\u0438\u0433\u0443\u0440\u0430\u0446\u0438\u044f \u0434\u043b\u044f \u043f\u0440\u043e\u0444\u0438\u043b\u044f.", "authorize_url_timeout": "\u0418\u0441\u0442\u0435\u043a\u043b\u043e \u0432\u0440\u0435\u043c\u044f \u0433\u0435\u043d\u0435\u0440\u0430\u0446\u0438\u0438 \u0441\u0441\u044b\u043b\u043a\u0438 \u0430\u0432\u0442\u043e\u0440\u0438\u0437\u0430\u0446\u0438\u0438.", "missing_configuration": "\u0418\u043d\u0442\u0435\u0433\u0440\u0430\u0446\u0438\u044f Withings \u043d\u0435 \u043d\u0430\u0441\u0442\u0440\u043e\u0435\u043d\u0430. \u041f\u043e\u0436\u0430\u043b\u0443\u0439\u0441\u0442\u0430, \u043e\u0437\u043d\u0430\u043a\u043e\u043c\u044c\u0442\u0435\u0441\u044c \u0441 \u0438\u043d\u0441\u0442\u0440\u0443\u043a\u0446\u0438\u044f\u043c\u0438." }, "create_entry": { "default": "\u0410\u0443\u0442\u0435\u043d\u0442\u0438\u0444\u0438\u043a\u0430\u0446\u0438\u044f \u043f\u0440\u043e\u0439\u0434\u0435\u043d\u0430 \u0443\u0441\u043f\u0435\u0448\u043d\u043e." }, + "error": { + "profile_exists": "\u041f\u0440\u043e\u0444\u0438\u043b\u044c \u044d\u0442\u043e\u0433\u043e \u043f\u043e\u043b\u044c\u0437\u043e\u0432\u0430\u0442\u0435\u043b\u044f \u0443\u0436\u0435 \u043d\u0430\u0441\u0442\u0440\u043e\u0435\u043d. \u0423\u043a\u0430\u0436\u0438\u0442\u0435 \u0443\u043d\u0438\u043a\u0430\u043b\u044c\u043d\u043e\u0435 \u0438\u043c\u044f \u043f\u0440\u043e\u0444\u0438\u043b\u044f." + }, + "flow_title": "Withings: {profile}", "step": { "pick_implementation": { "title": "\u0412\u044b\u0431\u0435\u0440\u0438\u0442\u0435 \u0441\u043f\u043e\u0441\u043e\u0431 \u0430\u0443\u0442\u0435\u043d\u0442\u0438\u0444\u0438\u043a\u0430\u0446\u0438\u0438" @@ -17,6 +22,10 @@ }, "description": "\u041a\u0430\u043a\u043e\u0439 \u043f\u0440\u043e\u0444\u0438\u043b\u044c \u0412\u044b \u0432\u044b\u0431\u0440\u0430\u043b\u0438 \u043d\u0430 \u0441\u0430\u0439\u0442\u0435 Withings? \u0412\u0430\u0436\u043d\u043e, \u0447\u0442\u043e\u0431\u044b \u043f\u0440\u043e\u0444\u0438\u043b\u0438 \u0441\u043e\u0432\u043f\u0430\u0434\u0430\u043b\u0438, \u0438\u043d\u0430\u0447\u0435 \u0434\u0430\u043d\u043d\u044b\u0435 \u0431\u0443\u0434\u0443\u0442 \u043d\u0435\u043f\u0440\u0430\u0432\u0438\u043b\u044c\u043d\u043e \u043f\u043e\u043c\u0435\u0447\u0435\u043d\u044b.", "title": "Withings" + }, + "reauth": { + "description": "\u041f\u0440\u043e\u0444\u0438\u043b\u044c \"{profile}\" \u0434\u043e\u043b\u0436\u0435\u043d \u0431\u044b\u0442\u044c \u043f\u043e\u0432\u0442\u043e\u0440\u043d\u043e \u0430\u0443\u0442\u0435\u043d\u0442\u0438\u0444\u0438\u0446\u0438\u0440\u043e\u0432\u0430\u043d \u0434\u043b\u044f \u043f\u0440\u043e\u0434\u043e\u043b\u0436\u0435\u043d\u0438\u044f \u043f\u043e\u043b\u0443\u0447\u0435\u043d\u0438\u044f \u0434\u0430\u043d\u043d\u044b\u0445 Withings.", + "title": "\u041f\u043e\u0432\u0442\u043e\u0440\u043d\u0430\u044f \u0430\u0443\u0442\u0435\u043d\u0442\u0438\u0444\u0438\u043a\u0430\u0446\u0438\u044f \u043f\u0440\u043e\u0444\u0438\u043b\u044f" } } } diff --git a/homeassistant/components/withings/translations/zh-Hans.json b/homeassistant/components/withings/translations/zh-Hans.json deleted file mode 100644 index c7485b09248..00000000000 --- a/homeassistant/components/withings/translations/zh-Hans.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "config": { - "step": { - "user": { - "description": "\u8bf7\u9009\u62e9\u4f60\u60f3\u8981Home Assistant\u548cWithings\u5bf9\u5e94\u7684\u7528\u6237\u914d\u7f6e\u6587\u4ef6\u3002\u5728Withings\u9875\u9762\u4e0a\uff0c\u8bf7\u52a1\u5fc5\u9009\u62e9\u76f8\u540c\u7684\u7528\u6237\uff0c\u5426\u5219\u6570\u636e\u5c06\u65e0\u6cd5\u6b63\u786e\u6807\u8bb0\u3002", - "title": "\u7528\u6237\u8d44\u6599" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/withings/translations/zh-Hant.json b/homeassistant/components/withings/translations/zh-Hant.json index ac644d26418..3f31c0585f8 100644 --- a/homeassistant/components/withings/translations/zh-Hant.json +++ b/homeassistant/components/withings/translations/zh-Hant.json @@ -1,22 +1,31 @@ { "config": { "abort": { + "already_configured": "\u6b64\u500b\u4eba\u8a2d\u7f6e\u8a2d\u5b9a\u5df2\u66f4\u65b0\u3002", "authorize_url_timeout": "\u7522\u751f\u8a8d\u8b49 URL \u6642\u903e\u6642\u3002", "missing_configuration": "Withings \u6574\u5408\u5c1a\u672a\u8a2d\u7f6e\uff0c\u8acb\u53c3\u95b1\u6587\u4ef6\u8aaa\u660e\u3002" }, "create_entry": { "default": "\u5df2\u6210\u529f\u8a8d\u8b49 Withings \u8a2d\u5099\u3002" }, + "error": { + "profile_exists": "\u4f7f\u7528\u8005\u500b\u4eba\u8a2d\u7f6e\u5df2\u7d93\u8a2d\u5b9a\uff0c\u8acb\u63d0\u4f9b\u7368\u4e00\u7684\u540d\u7a31\u3002" + }, + "flow_title": "Withings\uff1a{profile}", "step": { "pick_implementation": { "title": "\u9078\u64c7\u9a57\u8b49\u6a21\u5f0f" }, "profile": { "data": { - "profile": "\u500b\u4eba\u8a2d\u5b9a" + "profile": "\u500b\u4eba\u8a2d\u7f6e\u540d\u7a31" }, - "description": "\u65bc Withings \u7db2\u7ad9\u6240\u9078\u64c7\u7684\u500b\u4eba\u8a2d\u5b9a\u70ba\u4f55\uff1f\u5047\u5982\u500b\u4eba\u8a2d\u5b9a\u4e0d\u7b26\u5408\u7684\u8a71\uff0c\u8cc7\u6599\u5c07\u6703\u6a19\u793a\u932f\u8aa4\u3002", + "description": "\u8acb\u70ba\u8cc7\u6599\u8a2d\u5b9a\u4e00\u7d44\u7368\u4e00\u7684\u500b\u4eba\u8a2d\u7f6e\u540d\u7a31\u3002\u901a\u5e38\u8207\u524d\u4e00\u6b65\u9a5f\u6240\u9078\u64c7\u4e4b\u8a2d\u7f6e\u6587\u4ef6\u540d\u7a31\u76f8\u540c\u3002", "title": "\u500b\u4eba\u8a2d\u5b9a\u3002" + }, + "reauth": { + "description": "\"{profile}\" \u8a2d\u5b9a\u6a94\u9700\u8981\u91cd\u65b0\u8a8d\u8b49\u4ee5\u4fdd\u6301\u63a5\u6536 Withings \u8cc7\u6599\u3002", + "title": "\u91cd\u65b0\u8a8d\u8b49\u500b\u4eba\u8a2d\u7f6e" } } } diff --git a/homeassistant/components/wled/__init__.py b/homeassistant/components/wled/__init__.py index 91c130a7a81..70d14895fbc 100644 --- a/homeassistant/components/wled/__init__.py +++ b/homeassistant/components/wled/__init__.py @@ -180,11 +180,9 @@ class WLEDEntity(Entity): async def async_added_to_hass(self) -> None: """Connect to dispatcher listening for entity data notifications.""" - self.coordinator.async_add_listener(self.async_write_ha_state) - - async def async_will_remove_from_hass(self) -> None: - """Disconnect from update signal.""" - self.coordinator.async_remove_listener(self.async_write_ha_state) + self.async_on_remove( + self.coordinator.async_add_listener(self.async_write_ha_state) + ) async def async_update(self) -> None: """Update WLED entity.""" diff --git a/homeassistant/components/wled/translations/af.json b/homeassistant/components/wled/translations/af.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/af.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/ar.json b/homeassistant/components/wled/translations/ar.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/ar.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/bs.json b/homeassistant/components/wled/translations/bs.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/bs.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/cs.json b/homeassistant/components/wled/translations/cs.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/cs.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/cy.json b/homeassistant/components/wled/translations/cy.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/cy.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/el.json b/homeassistant/components/wled/translations/el.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/el.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/eo.json b/homeassistant/components/wled/translations/eo.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/eo.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/et.json b/homeassistant/components/wled/translations/et.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/et.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/eu.json b/homeassistant/components/wled/translations/eu.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/eu.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/fa.json b/homeassistant/components/wled/translations/fa.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/fa.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/fi.json b/homeassistant/components/wled/translations/fi.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/fi.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/gsw.json b/homeassistant/components/wled/translations/gsw.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/gsw.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/he.json b/homeassistant/components/wled/translations/he.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/he.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/hi.json b/homeassistant/components/wled/translations/hi.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/hi.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/hr.json b/homeassistant/components/wled/translations/hr.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/hr.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/iba.json b/homeassistant/components/wled/translations/iba.json deleted file mode 100644 index a9107341e37..00000000000 --- a/homeassistant/components/wled/translations/iba.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/id.json b/homeassistant/components/wled/translations/id.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/id.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/is.json b/homeassistant/components/wled/translations/is.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/is.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/ja.json b/homeassistant/components/wled/translations/ja.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/ja.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/lt.json b/homeassistant/components/wled/translations/lt.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/lt.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/lv.json b/homeassistant/components/wled/translations/lv.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/lv.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/nn.json b/homeassistant/components/wled/translations/nn.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/nn.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/no.json b/homeassistant/components/wled/translations/no.json index 9f9c2b40e6e..da372daad11 100644 --- a/homeassistant/components/wled/translations/no.json +++ b/homeassistant/components/wled/translations/no.json @@ -11,7 +11,7 @@ "step": { "user": { "data": { - "host": "Vert eller IP-adresse" + "host": "Vert " }, "description": "Sett opp WLED til \u00e5 integreres med Home Assistant." }, diff --git a/homeassistant/components/wled/translations/pt-BR.json b/homeassistant/components/wled/translations/pt-BR.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/pt-BR.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/ro.json b/homeassistant/components/wled/translations/ro.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/ro.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/sk.json b/homeassistant/components/wled/translations/sk.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/sk.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/sr-Latn.json b/homeassistant/components/wled/translations/sr-Latn.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/sr-Latn.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/sr.json b/homeassistant/components/wled/translations/sr.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/sr.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/ta.json b/homeassistant/components/wled/translations/ta.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/ta.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/te.json b/homeassistant/components/wled/translations/te.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/te.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/th.json b/homeassistant/components/wled/translations/th.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/th.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/tr.json b/homeassistant/components/wled/translations/tr.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/tr.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/uk.json b/homeassistant/components/wled/translations/uk.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/uk.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/ur.json b/homeassistant/components/wled/translations/ur.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/ur.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/vi.json b/homeassistant/components/wled/translations/vi.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/vi.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/wled/translations/zh-Hans.json b/homeassistant/components/wled/translations/zh-Hans.json deleted file mode 100644 index 77eafbdb9bd..00000000000 --- a/homeassistant/components/wled/translations/zh-Hans.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "abort": { - "already_configured": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "error": { - "connection_error": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "flow_title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "step": { - "user": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - }, - "zeroconf_confirm": { - "description": "Wykryto urz\u0105dzenie [%key:component::wled::title%]", - "title": "Wykryto urz\u0105dzenie [%key:component::wled::title%]" - } - } - } -} \ No newline at end of file diff --git a/homeassistant/components/xiaomi_aqara/__init__.py b/homeassistant/components/xiaomi_aqara/__init__.py index 450a6e4c862..d759785f49f 100644 --- a/homeassistant/components/xiaomi_aqara/__init__.py +++ b/homeassistant/components/xiaomi_aqara/__init__.py @@ -1,11 +1,12 @@ """Support for Xiaomi Gateways.""" +import asyncio from datetime import timedelta import logging import voluptuous as vol -from xiaomi_gateway import XiaomiGatewayDiscovery +from xiaomi_gateway import XiaomiGateway, XiaomiGatewayDiscovery -from homeassistant.components.discovery import SERVICE_XIAOMI_GW +from homeassistant import config_entries, core from homeassistant.const import ( ATTR_BATTERY_LEVEL, ATTR_VOLTAGE, @@ -15,29 +16,34 @@ from homeassistant.const import ( EVENT_HOMEASSISTANT_STOP, ) from homeassistant.core import callback -from homeassistant.helpers import discovery +from homeassistant.helpers import device_registry as dr import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.device_registry import format_mac from homeassistant.helpers.entity import Entity from homeassistant.helpers.event import async_track_point_in_utc_time from homeassistant.util.dt import utcnow +from .const import ( + CONF_INTERFACE, + CONF_KEY, + CONF_PROTOCOL, + CONF_SID, + DEFAULT_DISCOVERY_RETRY, + DOMAIN, + GATEWAYS_KEY, + LISTENER_KEY, +) + _LOGGER = logging.getLogger(__name__) +GATEWAY_PLATFORMS = ["binary_sensor", "sensor", "switch", "light", "cover", "lock"] +GATEWAY_PLATFORMS_NO_KEY = ["binary_sensor", "sensor"] + ATTR_GW_MAC = "gw_mac" ATTR_RINGTONE_ID = "ringtone_id" ATTR_RINGTONE_VOL = "ringtone_vol" ATTR_DEVICE_ID = "device_id" -CONF_DISCOVERY_RETRY = "discovery_retry" -CONF_GATEWAYS = "gateways" -CONF_INTERFACE = "interface" -CONF_KEY = "key" -CONF_DISABLE = "disable" - -DOMAIN = "xiaomi_aqara" - -PY_XIAOMI_GATEWAY = "xiaomi_gw" - TIME_TILL_UNAVAILABLE = timedelta(minutes=150) SERVICE_PLAY_RINGTONE = "play_ringtone" @@ -45,10 +51,6 @@ SERVICE_STOP_RINGTONE = "stop_ringtone" SERVICE_ADD_DEVICE = "add_device" SERVICE_REMOVE_DEVICE = "remove_device" -GW_MAC = vol.All( - cv.string, lambda value: value.replace(":", "").lower(), vol.Length(min=12, max=12) -) - SERVICE_SCHEMA_PLAY_RINGTONE = vol.Schema( { vol.Required(ATTR_RINGTONE_ID): vol.All( @@ -65,102 +67,8 @@ SERVICE_SCHEMA_REMOVE_DEVICE = vol.Schema( ) -GATEWAY_CONFIG = vol.Schema( - { - vol.Optional(CONF_KEY): vol.All(cv.string, vol.Length(min=16, max=16)), - vol.Optional(CONF_HOST): cv.string, - vol.Optional(CONF_PORT, default=9898): cv.port, - vol.Optional(CONF_DISABLE, default=False): cv.boolean, - } -) - -GATEWAY_CONFIG_MAC_OPTIONAL = GATEWAY_CONFIG.extend({vol.Optional(CONF_MAC): GW_MAC}) - -GATEWAY_CONFIG_MAC_REQUIRED = GATEWAY_CONFIG.extend({vol.Required(CONF_MAC): GW_MAC}) - - -def _fix_conf_defaults(config): - """Update some configuration defaults.""" - config["sid"] = config.pop(CONF_MAC, None) - - if config.get(CONF_KEY) is None: - _LOGGER.warning( - "Key is not provided for gateway %s. Controlling the gateway " - "will not be possible", - config["sid"], - ) - - if config.get(CONF_HOST) is None: - config.pop(CONF_PORT) - - return config - - -CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: vol.Schema( - { - vol.Optional(CONF_GATEWAYS, default={}): vol.All( - cv.ensure_list, - vol.Any( - vol.All([GATEWAY_CONFIG_MAC_OPTIONAL], vol.Length(max=1)), - vol.All([GATEWAY_CONFIG_MAC_REQUIRED], vol.Length(min=2)), - ), - [_fix_conf_defaults], - ), - vol.Optional(CONF_INTERFACE, default="any"): cv.string, - vol.Optional(CONF_DISCOVERY_RETRY, default=3): cv.positive_int, - } - ) - }, - extra=vol.ALLOW_EXTRA, -) - - def setup(hass, config): """Set up the Xiaomi component.""" - gateways = [] - interface = "any" - discovery_retry = 3 - if DOMAIN in config: - gateways = config[DOMAIN][CONF_GATEWAYS] - interface = config[DOMAIN][CONF_INTERFACE] - discovery_retry = config[DOMAIN][CONF_DISCOVERY_RETRY] - - async def xiaomi_gw_discovered(service, discovery_info): - """Perform action when Xiaomi Gateway device(s) has been found.""" - # We don't need to do anything here, the purpose of Home Assistant's - # discovery service is to just trigger loading of this - # component, and then its own discovery process kicks in. - - discovery.listen(hass, SERVICE_XIAOMI_GW, xiaomi_gw_discovered) - - xiaomi = hass.data[PY_XIAOMI_GATEWAY] = XiaomiGatewayDiscovery( - hass.add_job, gateways, interface - ) - - _LOGGER.debug("Expecting %s gateways", len(gateways)) - for k in range(discovery_retry): - _LOGGER.info("Discovering Xiaomi Gateways (Try %s)", k + 1) - xiaomi.discover_gateways() - if len(xiaomi.gateways) >= len(gateways): - break - - if not xiaomi.gateways: - _LOGGER.error("No gateway discovered") - return False - xiaomi.listen() - _LOGGER.debug("Gateways discovered. Listening for broadcasts") - - for component in ["binary_sensor", "sensor", "switch", "light", "cover", "lock"]: - discovery.load_platform(hass, component, DOMAIN, {}, config) - - def stop_xiaomi(event): - """Stop Xiaomi Socket.""" - _LOGGER.info("Shutting down Xiaomi Hub") - xiaomi.stop_listen() - - hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_xiaomi) def play_ringtone_service(call): """Service to play ringtone through Gateway.""" @@ -196,13 +104,13 @@ def setup(hass, config): gateway = call.data.get(ATTR_GW_MAC) gateway.write_to_hub(gateway.sid, remove_device=device_id) - gateway_only_schema = _add_gateway_to_schema(xiaomi, vol.Schema({})) + gateway_only_schema = _add_gateway_to_schema(hass, vol.Schema({})) hass.services.register( DOMAIN, SERVICE_PLAY_RINGTONE, play_ringtone_service, - schema=_add_gateway_to_schema(xiaomi, SERVICE_SCHEMA_PLAY_RINGTONE), + schema=_add_gateway_to_schema(hass, SERVICE_SCHEMA_PLAY_RINGTONE), ) hass.services.register( @@ -217,21 +125,119 @@ def setup(hass, config): DOMAIN, SERVICE_REMOVE_DEVICE, remove_device_service, - schema=_add_gateway_to_schema(xiaomi, SERVICE_SCHEMA_REMOVE_DEVICE), + schema=_add_gateway_to_schema(hass, SERVICE_SCHEMA_REMOVE_DEVICE), ) return True +async def async_setup_entry( + hass: core.HomeAssistant, entry: config_entries.ConfigEntry +): + """Set up the xiaomi aqara components from a config entry.""" + hass.data.setdefault(DOMAIN, {}) + hass.data[DOMAIN].setdefault(GATEWAYS_KEY, {}) + + # Connect to Xiaomi Aqara Gateway + xiaomi_gateway = await hass.async_add_executor_job( + XiaomiGateway, + entry.data[CONF_HOST], + entry.data[CONF_PORT], + entry.data[CONF_SID], + entry.data[CONF_KEY], + DEFAULT_DISCOVERY_RETRY, + entry.data[CONF_INTERFACE], + entry.data[CONF_PROTOCOL], + ) + hass.data[DOMAIN][GATEWAYS_KEY][entry.entry_id] = xiaomi_gateway + + gateway_discovery = hass.data[DOMAIN].setdefault( + LISTENER_KEY, + XiaomiGatewayDiscovery(hass.add_job, [], entry.data[CONF_INTERFACE]), + ) + + if len(hass.data[DOMAIN][GATEWAYS_KEY]) == 1: + # start listining for local pushes (only once) + await hass.async_add_executor_job(gateway_discovery.listen) + + # register stop callback to shutdown listining for local pushes + def stop_xiaomi(event): + """Stop Xiaomi Socket.""" + _LOGGER.debug("Shutting down Xiaomi Gateway Listener") + gateway_discovery.stop_listen() + + hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_xiaomi) + + gateway_discovery.gateways[entry.data[CONF_HOST]] = xiaomi_gateway + _LOGGER.debug( + "Gateway with host '%s' connected, listening for broadcasts", + entry.data[CONF_HOST], + ) + + device_registry = await dr.async_get_registry(hass) + device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + identifiers={(DOMAIN, entry.unique_id)}, + manufacturer="Xiaomi Aqara", + name=entry.title, + sw_version=entry.data[CONF_PROTOCOL], + ) + + if entry.data[CONF_KEY] is not None: + platforms = GATEWAY_PLATFORMS + else: + platforms = GATEWAY_PLATFORMS_NO_KEY + + for component in platforms: + hass.async_create_task( + hass.config_entries.async_forward_entry_setup(entry, component) + ) + + return True + + +async def async_unload_entry( + hass: core.HomeAssistant, entry: config_entries.ConfigEntry +): + """Unload a config entry.""" + if entry.data[CONF_KEY] is not None: + platforms = GATEWAY_PLATFORMS + else: + platforms = GATEWAY_PLATFORMS_NO_KEY + + unload_ok = all( + await asyncio.gather( + *[ + hass.config_entries.async_forward_entry_unload(entry, component) + for component in platforms + ] + ) + ) + if unload_ok: + hass.data[DOMAIN][GATEWAYS_KEY].pop(entry.entry_id) + + if len(hass.data[DOMAIN][GATEWAYS_KEY]) == 0: + # No gateways left, stop Xiaomi socket + hass.data[DOMAIN].pop(GATEWAYS_KEY) + _LOGGER.debug("Shutting down Xiaomi Gateway Listener") + gateway_discovery = hass.data[DOMAIN].pop(LISTENER_KEY) + await hass.async_add_executor_job(gateway_discovery.stop_listen) + + return unload_ok + + class XiaomiDevice(Entity): """Representation a base Xiaomi device.""" - def __init__(self, device, device_type, xiaomi_hub): + def __init__(self, device, device_type, xiaomi_hub, config_entry): """Initialize the Xiaomi device.""" self._state = None self._is_available = True self._sid = device["sid"] + self._model = device["model"] + self._protocol = device["proto"] self._name = f"{device_type}_{self._sid}" + self._device_name = f"{self._model}_{self._sid}" self._type = device_type self._write_to_hub = xiaomi_hub.write_to_hub self._get_from_hub = xiaomi_hub.get_from_hub @@ -248,6 +254,16 @@ class XiaomiDevice(Entity): else: self._unique_id = f"{self._type}{self._sid}" + self._gateway_id = config_entry.unique_id + if config_entry.data[CONF_MAC] == format_mac(self._sid): + # this entity belongs to the gateway itself + self._is_gateway = True + self._device_id = config_entry.unique_id + else: + # this entity is connected through zigbee + self._is_gateway = False + self._device_id = self._sid + def _add_push_data_job(self, *args): self.hass.add_job(self.push_data, *args) @@ -266,6 +282,32 @@ class XiaomiDevice(Entity): """Return a unique ID.""" return self._unique_id + @property + def device_id(self): + """Return the device id of the Xiaomi Aqara device.""" + return self._device_id + + @property + def device_info(self): + """Return the device info of the Xiaomi Aqara device.""" + if self._is_gateway: + device_info = { + "identifiers": {(DOMAIN, self._device_id)}, + "model": self._model, + } + else: + device_info = { + "connections": {(dr.CONNECTION_ZIGBEE, self._device_id)}, + "identifiers": {(DOMAIN, self._device_id)}, + "manufacturer": "Xiaomi Aqara", + "model": self._model, + "name": self._device_name, + "sw_version": self._protocol, + "via_device": (DOMAIN, self._gateway_id), + } + + return device_info + @property def available(self): """Return True if entity is available.""" @@ -334,24 +376,26 @@ class XiaomiDevice(Entity): raise NotImplementedError() -def _add_gateway_to_schema(xiaomi, schema): +def _add_gateway_to_schema(hass, schema): """Extend a voluptuous schema with a gateway validator.""" def gateway(sid): """Convert sid to a gateway.""" sid = str(sid).replace(":", "").lower() - for gateway in xiaomi.gateways.values(): + for gateway in hass.data[DOMAIN][GATEWAYS_KEY].values(): if gateway.sid == sid: return gateway raise vol.Invalid(f"Unknown gateway sid {sid}") - gateways = list(xiaomi.gateways.values()) kwargs = {} + xiaomi_data = hass.data.get(DOMAIN) + if xiaomi_data is not None: + gateways = list(xiaomi_data[GATEWAYS_KEY].values()) - # If the user has only 1 gateway, make it the default for services. - if len(gateways) == 1: - kwargs["default"] = gateways[0].sid + # If the user has only 1 gateway, make it the default for services. + if len(gateways) == 1: + kwargs["default"] = gateways[0].sid return schema.extend({vol.Required(ATTR_GW_MAC, **kwargs): gateway}) diff --git a/homeassistant/components/xiaomi_aqara/binary_sensor.py b/homeassistant/components/xiaomi_aqara/binary_sensor.py index 01caddb7eb5..44dc6706d57 100644 --- a/homeassistant/components/xiaomi_aqara/binary_sensor.py +++ b/homeassistant/components/xiaomi_aqara/binary_sensor.py @@ -5,7 +5,8 @@ from homeassistant.components.binary_sensor import BinarySensorEntity from homeassistant.core import callback from homeassistant.helpers.event import async_call_later -from . import PY_XIAOMI_GATEWAY, XiaomiDevice +from . import XiaomiDevice +from .const import DOMAIN, GATEWAYS_KEY _LOGGER = logging.getLogger(__name__) @@ -21,94 +22,115 @@ DENSITY = "density" ATTR_DENSITY = "Density" -def setup_platform(hass, config, add_entities, discovery_info=None): +async def async_setup_entry(hass, config_entry, async_add_entities): """Perform the setup for Xiaomi devices.""" - devices = [] - for (_, gateway) in hass.data[PY_XIAOMI_GATEWAY].gateways.items(): - for device in gateway.devices["binary_sensor"]: - model = device["model"] - if model in ["motion", "sensor_motion", "sensor_motion.aq2"]: - devices.append(XiaomiMotionSensor(device, hass, gateway)) - elif model in ["magnet", "sensor_magnet", "sensor_magnet.aq2"]: - devices.append(XiaomiDoorSensor(device, gateway)) - elif model == "sensor_wleak.aq1": - devices.append(XiaomiWaterLeakSensor(device, gateway)) - elif model in ["smoke", "sensor_smoke"]: - devices.append(XiaomiSmokeSensor(device, gateway)) - elif model in ["natgas", "sensor_natgas"]: - devices.append(XiaomiNatgasSensor(device, gateway)) - elif model in [ - "switch", - "sensor_switch", - "sensor_switch.aq2", - "sensor_switch.aq3", - "remote.b1acn01", - ]: - if "proto" not in device or int(device["proto"][0:1]) == 1: - data_key = "status" - else: - data_key = "button_0" - devices.append(XiaomiButton(device, "Switch", data_key, hass, gateway)) - elif model in [ - "86sw1", - "sensor_86sw1", - "sensor_86sw1.aq1", - "remote.b186acn01", - ]: - if "proto" not in device or int(device["proto"][0:1]) == 1: - data_key = "channel_0" - else: - data_key = "button_0" - devices.append( - XiaomiButton(device, "Wall Switch", data_key, hass, gateway) - ) - elif model in [ - "86sw2", - "sensor_86sw2", - "sensor_86sw2.aq1", - "remote.b286acn01", - ]: - if "proto" not in device or int(device["proto"][0:1]) == 1: - data_key_left = "channel_0" - data_key_right = "channel_1" - else: - data_key_left = "button_0" - data_key_right = "button_1" - devices.append( - XiaomiButton( - device, "Wall Switch (Left)", data_key_left, hass, gateway - ) - ) - devices.append( - XiaomiButton( - device, "Wall Switch (Right)", data_key_right, hass, gateway - ) - ) - devices.append( - XiaomiButton( - device, "Wall Switch (Both)", "dual_channel", hass, gateway - ) - ) - elif model in ["cube", "sensor_cube", "sensor_cube.aqgl01"]: - devices.append(XiaomiCube(device, hass, gateway)) - elif model in ["vibration", "vibration.aq1"]: - devices.append(XiaomiVibration(device, "Vibration", "status", gateway)) + entities = [] + gateway = hass.data[DOMAIN][GATEWAYS_KEY][config_entry.entry_id] + for entity in gateway.devices["binary_sensor"]: + model = entity["model"] + if model in ["motion", "sensor_motion", "sensor_motion.aq2"]: + entities.append(XiaomiMotionSensor(entity, hass, gateway, config_entry)) + elif model in ["magnet", "sensor_magnet", "sensor_magnet.aq2"]: + entities.append(XiaomiDoorSensor(entity, gateway, config_entry)) + elif model == "sensor_wleak.aq1": + entities.append(XiaomiWaterLeakSensor(entity, gateway, config_entry)) + elif model in ["smoke", "sensor_smoke"]: + entities.append(XiaomiSmokeSensor(entity, gateway, config_entry)) + elif model in ["natgas", "sensor_natgas"]: + entities.append(XiaomiNatgasSensor(entity, gateway, config_entry)) + elif model in [ + "switch", + "sensor_switch", + "sensor_switch.aq2", + "sensor_switch.aq3", + "remote.b1acn01", + ]: + if "proto" not in entity or int(entity["proto"][0:1]) == 1: + data_key = "status" else: - _LOGGER.warning("Unmapped Device Model %s", model) + data_key = "button_0" + entities.append( + XiaomiButton(entity, "Switch", data_key, hass, gateway, config_entry) + ) + elif model in [ + "86sw1", + "sensor_86sw1", + "sensor_86sw1.aq1", + "remote.b186acn01", + ]: + if "proto" not in entity or int(entity["proto"][0:1]) == 1: + data_key = "channel_0" + else: + data_key = "button_0" + entities.append( + XiaomiButton( + entity, "Wall Switch", data_key, hass, gateway, config_entry + ) + ) + elif model in [ + "86sw2", + "sensor_86sw2", + "sensor_86sw2.aq1", + "remote.b286acn01", + ]: + if "proto" not in entity or int(entity["proto"][0:1]) == 1: + data_key_left = "channel_0" + data_key_right = "channel_1" + else: + data_key_left = "button_0" + data_key_right = "button_1" + entities.append( + XiaomiButton( + entity, + "Wall Switch (Left)", + data_key_left, + hass, + gateway, + config_entry, + ) + ) + entities.append( + XiaomiButton( + entity, + "Wall Switch (Right)", + data_key_right, + hass, + gateway, + config_entry, + ) + ) + entities.append( + XiaomiButton( + entity, + "Wall Switch (Both)", + "dual_channel", + hass, + gateway, + config_entry, + ) + ) + elif model in ["cube", "sensor_cube", "sensor_cube.aqgl01"]: + entities.append(XiaomiCube(entity, hass, gateway, config_entry)) + elif model in ["vibration", "vibration.aq1"]: + entities.append( + XiaomiVibration(entity, "Vibration", "status", gateway, config_entry) + ) + else: + _LOGGER.warning("Unmapped Device Model %s", model) - add_entities(devices) + async_add_entities(entities) class XiaomiBinarySensor(XiaomiDevice, BinarySensorEntity): """Representation of a base XiaomiBinarySensor.""" - def __init__(self, device, name, xiaomi_hub, data_key, device_class): + def __init__(self, device, name, xiaomi_hub, data_key, device_class, config_entry): """Initialize the XiaomiSmokeSensor.""" self._data_key = data_key self._device_class = device_class self._should_poll = False self._density = 0 - XiaomiDevice.__init__(self, device, name, xiaomi_hub) + super().__init__(device, name, xiaomi_hub, config_entry) @property def should_poll(self): @@ -134,11 +156,11 @@ class XiaomiBinarySensor(XiaomiDevice, BinarySensorEntity): class XiaomiNatgasSensor(XiaomiBinarySensor): """Representation of a XiaomiNatgasSensor.""" - def __init__(self, device, xiaomi_hub): + def __init__(self, device, xiaomi_hub, config_entry): """Initialize the XiaomiSmokeSensor.""" self._density = None - XiaomiBinarySensor.__init__( - self, device, "Natgas Sensor", xiaomi_hub, "alarm", "gas" + super().__init__( + device, "Natgas Sensor", xiaomi_hub, "alarm", "gas", config_entry ) @property @@ -172,7 +194,7 @@ class XiaomiNatgasSensor(XiaomiBinarySensor): class XiaomiMotionSensor(XiaomiBinarySensor): """Representation of a XiaomiMotionSensor.""" - def __init__(self, device, hass, xiaomi_hub): + def __init__(self, device, hass, xiaomi_hub, config_entry): """Initialize the XiaomiMotionSensor.""" self._hass = hass self._no_motion_since = 0 @@ -181,8 +203,8 @@ class XiaomiMotionSensor(XiaomiBinarySensor): data_key = "status" else: data_key = "motion_status" - XiaomiBinarySensor.__init__( - self, device, "Motion Sensor", xiaomi_hub, data_key, "motion" + super().__init__( + device, "Motion Sensor", xiaomi_hub, data_key, "motion", config_entry ) @property @@ -263,15 +285,15 @@ class XiaomiMotionSensor(XiaomiBinarySensor): class XiaomiDoorSensor(XiaomiBinarySensor): """Representation of a XiaomiDoorSensor.""" - def __init__(self, device, xiaomi_hub): + def __init__(self, device, xiaomi_hub, config_entry): """Initialize the XiaomiDoorSensor.""" self._open_since = 0 if "proto" not in device or int(device["proto"][0:1]) == 1: data_key = "status" else: data_key = "window_status" - XiaomiBinarySensor.__init__( - self, device, "Door Window Sensor", xiaomi_hub, data_key, "opening" + super().__init__( + device, "Door Window Sensor", xiaomi_hub, data_key, "opening", config_entry, ) @property @@ -309,14 +331,14 @@ class XiaomiDoorSensor(XiaomiBinarySensor): class XiaomiWaterLeakSensor(XiaomiBinarySensor): """Representation of a XiaomiWaterLeakSensor.""" - def __init__(self, device, xiaomi_hub): + def __init__(self, device, xiaomi_hub, config_entry): """Initialize the XiaomiWaterLeakSensor.""" if "proto" not in device or int(device["proto"][0:1]) == 1: data_key = "status" else: data_key = "wleak_status" - XiaomiBinarySensor.__init__( - self, device, "Water Leak Sensor", xiaomi_hub, data_key, "moisture" + super().__init__( + device, "Water Leak Sensor", xiaomi_hub, data_key, "moisture", config_entry, ) def parse_data(self, data, raw_data): @@ -343,11 +365,11 @@ class XiaomiWaterLeakSensor(XiaomiBinarySensor): class XiaomiSmokeSensor(XiaomiBinarySensor): """Representation of a XiaomiSmokeSensor.""" - def __init__(self, device, xiaomi_hub): + def __init__(self, device, xiaomi_hub, config_entry): """Initialize the XiaomiSmokeSensor.""" self._density = 0 - XiaomiBinarySensor.__init__( - self, device, "Smoke Sensor", xiaomi_hub, "alarm", "smoke" + super().__init__( + device, "Smoke Sensor", xiaomi_hub, "alarm", "smoke", config_entry ) @property @@ -380,10 +402,10 @@ class XiaomiSmokeSensor(XiaomiBinarySensor): class XiaomiVibration(XiaomiBinarySensor): """Representation of a Xiaomi Vibration Sensor.""" - def __init__(self, device, name, data_key, xiaomi_hub): + def __init__(self, device, name, data_key, xiaomi_hub, config_entry): """Initialize the XiaomiVibration.""" self._last_action = None - super().__init__(device, name, xiaomi_hub, data_key, None) + super().__init__(device, name, xiaomi_hub, data_key, None, config_entry) @property def device_state_attributes(self): @@ -414,11 +436,11 @@ class XiaomiVibration(XiaomiBinarySensor): class XiaomiButton(XiaomiBinarySensor): """Representation of a Xiaomi Button.""" - def __init__(self, device, name, data_key, hass, xiaomi_hub): + def __init__(self, device, name, data_key, hass, xiaomi_hub, config_entry): """Initialize the XiaomiButton.""" self._hass = hass self._last_action = None - XiaomiBinarySensor.__init__(self, device, name, xiaomi_hub, data_key, None) + super().__init__(device, name, xiaomi_hub, data_key, None, config_entry) @property def device_state_attributes(self): @@ -469,7 +491,7 @@ class XiaomiButton(XiaomiBinarySensor): class XiaomiCube(XiaomiBinarySensor): """Representation of a Xiaomi Cube.""" - def __init__(self, device, hass, xiaomi_hub): + def __init__(self, device, hass, xiaomi_hub, config_entry): """Initialize the Xiaomi Cube.""" self._hass = hass self._last_action = None @@ -478,7 +500,7 @@ class XiaomiCube(XiaomiBinarySensor): data_key = "status" else: data_key = "cube_status" - XiaomiBinarySensor.__init__(self, device, "Cube", xiaomi_hub, data_key, None) + super().__init__(device, "Cube", xiaomi_hub, data_key, None, config_entry) @property def device_state_attributes(self): diff --git a/homeassistant/components/xiaomi_aqara/config_flow.py b/homeassistant/components/xiaomi_aqara/config_flow.py new file mode 100644 index 00000000000..b9cfe58ac4b --- /dev/null +++ b/homeassistant/components/xiaomi_aqara/config_flow.py @@ -0,0 +1,183 @@ +"""Config flow to configure Xiaomi Aqara.""" +import logging +from socket import gaierror + +import voluptuous as vol +from xiaomi_gateway import XiaomiGatewayDiscovery + +from homeassistant import config_entries +from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME, CONF_PORT +from homeassistant.helpers.device_registry import format_mac + +# pylint: disable=unused-import +from .const import ( + CONF_INTERFACE, + CONF_KEY, + CONF_PROTOCOL, + CONF_SID, + DOMAIN, + ZEROCONF_GATEWAY, +) + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_GATEWAY_NAME = "Xiaomi Aqara Gateway" +DEFAULT_INTERFACE = "any" + + +GATEWAY_CONFIG = vol.Schema( + {vol.Optional(CONF_INTERFACE, default=DEFAULT_INTERFACE): str} +) +GATEWAY_SETTINGS = vol.Schema( + { + vol.Optional(CONF_KEY): vol.All(str, vol.Length(min=16, max=16)), + vol.Optional(CONF_NAME, default=DEFAULT_GATEWAY_NAME): str, + } +) + + +class XiaomiAqaraFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): + """Handle a Xiaomi Aqara config flow.""" + + VERSION = 1 + CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_PUSH + + def __init__(self): + """Initialize.""" + self.host = None + self.interface = DEFAULT_INTERFACE + self.gateways = None + self.selected_gateway = None + + async def async_step_user(self, user_input=None): + """Handle a flow initialized by the user.""" + errors = {} + if user_input is not None: + self.interface = user_input[CONF_INTERFACE] + + # Discover Xiaomi Aqara Gateways in the netwerk to get required SIDs. + xiaomi = XiaomiGatewayDiscovery(self.hass.add_job, [], self.interface) + try: + await self.hass.async_add_executor_job(xiaomi.discover_gateways) + except gaierror: + errors[CONF_INTERFACE] = "invalid_interface" + + if not errors: + self.gateways = xiaomi.gateways + + # if host is already known by zeroconf discovery + if self.host is not None: + self.selected_gateway = self.gateways.get(self.host) + if self.selected_gateway is not None: + return await self.async_step_settings() + + errors["base"] = "not_found_error" + else: + if len(self.gateways) == 1: + self.selected_gateway = list(self.gateways.values())[0] + return await self.async_step_settings() + if len(self.gateways) > 1: + return await self.async_step_select() + + errors["base"] = "discovery_error" + + return self.async_show_form( + step_id="user", data_schema=GATEWAY_CONFIG, errors=errors + ) + + async def async_step_select(self, user_input=None): + """Handle multiple aqara gateways found.""" + errors = {} + if user_input is not None: + ip_adress = user_input["select_ip"] + self.selected_gateway = self.gateways[ip_adress] + return await self.async_step_settings() + + select_schema = vol.Schema( + { + vol.Required("select_ip"): vol.In( + [gateway.ip_adress for gateway in self.gateways.values()] + ) + } + ) + + return self.async_show_form( + step_id="select", data_schema=select_schema, errors=errors + ) + + async def async_step_zeroconf(self, discovery_info): + """Handle zeroconf discovery.""" + name = discovery_info.get("name") + self.host = discovery_info.get("host") + mac_address = discovery_info.get("properties", {}).get("mac") + + if not name or not self.host or not mac_address: + return self.async_abort(reason="not_xiaomi_aqara") + + # Check if the discovered device is an xiaomi aqara gateway. + if not name.startswith(ZEROCONF_GATEWAY): + _LOGGER.debug( + "Xiaomi device '%s' discovered with host %s, not identified as xiaomi aqara gateway", + name, + self.host, + ) + return self.async_abort(reason="not_xiaomi_aqara") + + # format mac (include semicolns and make uppercase) + mac_address = format_mac(mac_address) + + unique_id = mac_address + await self.async_set_unique_id(unique_id) + self._abort_if_unique_id_configured({CONF_HOST: self.host}) + + # pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167 + self.context.update({"title_placeholders": {"name": self.host}}) + + return await self.async_step_user() + + async def async_step_settings(self, user_input=None): + """Specify settings and connect aqara gateway.""" + errors = {} + if user_input is not None: + # get all required data + name = user_input[CONF_NAME] + key = user_input.get(CONF_KEY) + ip_adress = self.selected_gateway.ip_adress + port = self.selected_gateway.port + sid = self.selected_gateway.sid + protocol = self.selected_gateway.proto + + if key is not None: + # validate key by issuing stop ringtone playback command. + self.selected_gateway.key = key + valid_key = self.selected_gateway.write_to_hub(sid, mid=10000) + else: + valid_key = True + + if valid_key: + # format_mac, for a gateway the sid equels the mac address + mac_address = format_mac(sid) + + # set unique_id + unique_id = mac_address + await self.async_set_unique_id(unique_id) + self._abort_if_unique_id_configured() + + return self.async_create_entry( + title=name, + data={ + CONF_HOST: ip_adress, + CONF_PORT: port, + CONF_MAC: mac_address, + CONF_INTERFACE: self.interface, + CONF_PROTOCOL: protocol, + CONF_KEY: key, + CONF_SID: sid, + }, + ) + + errors[CONF_KEY] = "invalid_key" + + return self.async_show_form( + step_id="settings", data_schema=GATEWAY_SETTINGS, errors=errors + ) diff --git a/homeassistant/components/xiaomi_aqara/const.py b/homeassistant/components/xiaomi_aqara/const.py new file mode 100644 index 00000000000..ab214cb13cc --- /dev/null +++ b/homeassistant/components/xiaomi_aqara/const.py @@ -0,0 +1,15 @@ +"""Constants of the Xiaomi Aqara component.""" + +DOMAIN = "xiaomi_aqara" + +GATEWAYS_KEY = "gateways" +LISTENER_KEY = "listener" + +ZEROCONF_GATEWAY = "lumi-gateway" + +CONF_INTERFACE = "interface" +CONF_PROTOCOL = "protocol" +CONF_KEY = "key" +CONF_SID = "sid" + +DEFAULT_DISCOVERY_RETRY = 5 diff --git a/homeassistant/components/xiaomi_aqara/cover.py b/homeassistant/components/xiaomi_aqara/cover.py index 52d2487e74f..fbe7ae334e6 100644 --- a/homeassistant/components/xiaomi_aqara/cover.py +++ b/homeassistant/components/xiaomi_aqara/cover.py @@ -3,7 +3,8 @@ import logging from homeassistant.components.cover import ATTR_POSITION, CoverEntity -from . import PY_XIAOMI_GATEWAY, XiaomiDevice +from . import XiaomiDevice +from .const import DOMAIN, GATEWAYS_KEY _LOGGER = logging.getLogger(__name__) @@ -13,29 +14,31 @@ DATA_KEY_PROTO_V1 = "status" DATA_KEY_PROTO_V2 = "curtain_status" -def setup_platform(hass, config, add_entities, discovery_info=None): +async def async_setup_entry(hass, config_entry, async_add_entities): """Perform the setup for Xiaomi devices.""" - devices = [] - for (_, gateway) in hass.data[PY_XIAOMI_GATEWAY].gateways.items(): - for device in gateway.devices["cover"]: - model = device["model"] - if model in ["curtain", "curtain.aq2", "curtain.hagl04"]: - if "proto" not in device or int(device["proto"][0:1]) == 1: - data_key = DATA_KEY_PROTO_V1 - else: - data_key = DATA_KEY_PROTO_V2 - devices.append(XiaomiGenericCover(device, "Curtain", data_key, gateway)) - add_entities(devices) + entities = [] + gateway = hass.data[DOMAIN][GATEWAYS_KEY][config_entry.entry_id] + for device in gateway.devices["cover"]: + model = device["model"] + if model in ["curtain", "curtain.aq2", "curtain.hagl04"]: + if "proto" not in device or int(device["proto"][0:1]) == 1: + data_key = DATA_KEY_PROTO_V1 + else: + data_key = DATA_KEY_PROTO_V2 + entities.append( + XiaomiGenericCover(device, "Curtain", data_key, gateway, config_entry) + ) + async_add_entities(entities) class XiaomiGenericCover(XiaomiDevice, CoverEntity): """Representation of a XiaomiGenericCover.""" - def __init__(self, device, name, data_key, xiaomi_hub): + def __init__(self, device, name, data_key, xiaomi_hub, config_entry): """Initialize the XiaomiGenericCover.""" self._data_key = data_key self._pos = 0 - XiaomiDevice.__init__(self, device, name, xiaomi_hub) + super().__init__(device, name, xiaomi_hub, config_entry) @property def current_cover_position(self): diff --git a/homeassistant/components/xiaomi_aqara/light.py b/homeassistant/components/xiaomi_aqara/light.py index f1cd17f9dee..494c9af920e 100644 --- a/homeassistant/components/xiaomi_aqara/light.py +++ b/homeassistant/components/xiaomi_aqara/light.py @@ -12,32 +12,35 @@ from homeassistant.components.light import ( ) import homeassistant.util.color as color_util -from . import PY_XIAOMI_GATEWAY, XiaomiDevice +from . import XiaomiDevice +from .const import DOMAIN, GATEWAYS_KEY _LOGGER = logging.getLogger(__name__) -def setup_platform(hass, config, add_entities, discovery_info=None): +async def async_setup_entry(hass, config_entry, async_add_entities): """Perform the setup for Xiaomi devices.""" - devices = [] - for (_, gateway) in hass.data[PY_XIAOMI_GATEWAY].gateways.items(): - for device in gateway.devices["light"]: - model = device["model"] - if model in ["gateway", "gateway.v3"]: - devices.append(XiaomiGatewayLight(device, "Gateway Light", gateway)) - add_entities(devices) + entities = [] + gateway = hass.data[DOMAIN][GATEWAYS_KEY][config_entry.entry_id] + for device in gateway.devices["light"]: + model = device["model"] + if model in ["gateway", "gateway.v3"]: + entities.append( + XiaomiGatewayLight(device, "Gateway Light", gateway, config_entry) + ) + async_add_entities(entities) class XiaomiGatewayLight(XiaomiDevice, LightEntity): """Representation of a XiaomiGatewayLight.""" - def __init__(self, device, name, xiaomi_hub): + def __init__(self, device, name, xiaomi_hub, config_entry): """Initialize the XiaomiGatewayLight.""" self._data_key = "rgb" self._hs = (0, 0) self._brightness = 100 - XiaomiDevice.__init__(self, device, name, xiaomi_hub) + super().__init__(device, name, xiaomi_hub, config_entry) @property def is_on(self): diff --git a/homeassistant/components/xiaomi_aqara/lock.py b/homeassistant/components/xiaomi_aqara/lock.py index c3835f83391..db858729995 100644 --- a/homeassistant/components/xiaomi_aqara/lock.py +++ b/homeassistant/components/xiaomi_aqara/lock.py @@ -6,7 +6,8 @@ from homeassistant.const import STATE_LOCKED, STATE_UNLOCKED from homeassistant.core import callback from homeassistant.helpers.event import async_call_later -from . import PY_XIAOMI_GATEWAY, XiaomiDevice +from . import XiaomiDevice +from .const import DOMAIN, GATEWAYS_KEY _LOGGER = logging.getLogger(__name__) @@ -20,27 +21,26 @@ ATTR_VERIFIED_WRONG_TIMES = "verified_wrong_times" UNLOCK_MAINTAIN_TIME = 5 -async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): +async def async_setup_entry(hass, config_entry, async_add_entities): """Perform the setup for Xiaomi devices.""" - devices = [] - - for gateway in hass.data[PY_XIAOMI_GATEWAY].gateways.values(): - for device in gateway.devices["lock"]: - model = device["model"] - if model == "lock.aq1": - devices.append(XiaomiAqaraLock(device, "Lock", gateway)) - async_add_entities(devices) + entities = [] + gateway = hass.data[DOMAIN][GATEWAYS_KEY][config_entry.entry_id] + for device in gateway.devices["lock"]: + model = device["model"] + if model == "lock.aq1": + entities.append(XiaomiAqaraLock(device, "Lock", gateway, config_entry)) + async_add_entities(entities) class XiaomiAqaraLock(LockEntity, XiaomiDevice): """Representation of a XiaomiAqaraLock.""" - def __init__(self, device, name, xiaomi_hub): + def __init__(self, device, name, xiaomi_hub, config_entry): """Initialize the XiaomiAqaraLock.""" self._changed_by = 0 self._verified_wrong_times = 0 - super().__init__(device, name, xiaomi_hub) + super().__init__(device, name, xiaomi_hub, config_entry) @property def is_locked(self) -> bool: diff --git a/homeassistant/components/xiaomi_aqara/manifest.json b/homeassistant/components/xiaomi_aqara/manifest.json index e604b225fc4..cb6bb376e3b 100644 --- a/homeassistant/components/xiaomi_aqara/manifest.json +++ b/homeassistant/components/xiaomi_aqara/manifest.json @@ -1,8 +1,10 @@ { "domain": "xiaomi_aqara", "name": "Xiaomi Gateway (Aqara)", + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/xiaomi_aqara", "requirements": ["PyXiaomiGateway==0.12.4"], "after_dependencies": ["discovery"], - "codeowners": ["@danielhiversen", "@syssi"] + "codeowners": ["@danielhiversen", "@syssi"], + "zeroconf": ["_miio._udp.local."] } diff --git a/homeassistant/components/xiaomi_aqara/sensor.py b/homeassistant/components/xiaomi_aqara/sensor.py index d793f920349..fe1eb5a80fe 100644 --- a/homeassistant/components/xiaomi_aqara/sensor.py +++ b/homeassistant/components/xiaomi_aqara/sensor.py @@ -10,7 +10,8 @@ from homeassistant.const import ( UNIT_PERCENTAGE, ) -from . import PY_XIAOMI_GATEWAY, XiaomiDevice +from . import XiaomiDevice +from .const import DOMAIN, GATEWAYS_KEY _LOGGER = logging.getLogger(__name__) @@ -24,50 +25,70 @@ SENSOR_TYPES = { } -def setup_platform(hass, config, add_entities, discovery_info=None): +async def async_setup_entry(hass, config_entry, async_add_entities): """Perform the setup for Xiaomi devices.""" - devices = [] - for (_, gateway) in hass.data[PY_XIAOMI_GATEWAY].gateways.items(): - for device in gateway.devices["sensor"]: - if device["model"] == "sensor_ht": - devices.append( - XiaomiSensor(device, "Temperature", "temperature", gateway) + entities = [] + gateway = hass.data[DOMAIN][GATEWAYS_KEY][config_entry.entry_id] + for device in gateway.devices["sensor"]: + if device["model"] == "sensor_ht": + entities.append( + XiaomiSensor( + device, "Temperature", "temperature", gateway, config_entry ) - devices.append(XiaomiSensor(device, "Humidity", "humidity", gateway)) - elif device["model"] in ["weather", "weather.v1"]: - devices.append( - XiaomiSensor(device, "Temperature", "temperature", gateway) + ) + entities.append( + XiaomiSensor(device, "Humidity", "humidity", gateway, config_entry) + ) + elif device["model"] in ["weather", "weather.v1"]: + entities.append( + XiaomiSensor( + device, "Temperature", "temperature", gateway, config_entry ) - devices.append(XiaomiSensor(device, "Humidity", "humidity", gateway)) - devices.append(XiaomiSensor(device, "Pressure", "pressure", gateway)) - elif device["model"] == "sensor_motion.aq2": - devices.append(XiaomiSensor(device, "Illumination", "lux", gateway)) - elif device["model"] in ["gateway", "gateway.v3", "acpartner.v3"]: - devices.append( - XiaomiSensor(device, "Illumination", "illumination", gateway) + ) + entities.append( + XiaomiSensor(device, "Humidity", "humidity", gateway, config_entry) + ) + entities.append( + XiaomiSensor(device, "Pressure", "pressure", gateway, config_entry) + ) + elif device["model"] == "sensor_motion.aq2": + entities.append( + XiaomiSensor(device, "Illumination", "lux", gateway, config_entry) + ) + elif device["model"] in ["gateway", "gateway.v3", "acpartner.v3"]: + entities.append( + XiaomiSensor( + device, "Illumination", "illumination", gateway, config_entry ) - elif device["model"] in ["vibration"]: - devices.append( - XiaomiSensor(device, "Bed Activity", "bed_activity", gateway) + ) + elif device["model"] in ["vibration"]: + entities.append( + XiaomiSensor( + device, "Bed Activity", "bed_activity", gateway, config_entry ) - devices.append( - XiaomiSensor(device, "Tilt Angle", "final_tilt_angle", gateway) + ) + entities.append( + XiaomiSensor( + device, "Tilt Angle", "final_tilt_angle", gateway, config_entry ) - devices.append( - XiaomiSensor(device, "Coordination", "coordination", gateway) + ) + entities.append( + XiaomiSensor( + device, "Coordination", "coordination", gateway, config_entry ) - else: - _LOGGER.warning("Unmapped Device Model ") - add_entities(devices) + ) + else: + _LOGGER.warning("Unmapped Device Model") + async_add_entities(entities) class XiaomiSensor(XiaomiDevice): """Representation of a XiaomiSensor.""" - def __init__(self, device, name, data_key, xiaomi_hub): + def __init__(self, device, name, data_key, xiaomi_hub, config_entry): """Initialize the XiaomiSensor.""" self._data_key = data_key - XiaomiDevice.__init__(self, device, name, xiaomi_hub) + super().__init__(device, name, xiaomi_hub, config_entry) @property def icon(self): diff --git a/homeassistant/components/xiaomi_aqara/strings.json b/homeassistant/components/xiaomi_aqara/strings.json new file mode 100644 index 00000000000..87e1d37cb93 --- /dev/null +++ b/homeassistant/components/xiaomi_aqara/strings.json @@ -0,0 +1,40 @@ +{ + "config": { + "flow_title": "Xiaomi Aqara Gateway: {name}", + "step": { + "user": { + "title": "Xiaomi Aqara Gateway", + "description": "Connect to your Xiaomi Aqara Gateway", + "data": { + "interface": "The network interface to use" + } + }, + "settings": { + "title": "Xiaomi Aqara Gateway, optional settings", + "description": "The key (password) can be retrieved using this tutorial: https://www.domoticz.com/wiki/Xiaomi_Gateway_(Aqara)#Adding_the_Xiaomi_Gateway_to_Domoticz. If the key is not provided only sensors will be accessible", + "data": { + "key": "The key of your gateway", + "name": "Name of the Gateway" + } + }, + "select": { + "title": "Select the Xiaomi Aqara Gateway that you wish to connect", + "description": "Run the setup again if you want to connect aditional gateways", + "data": { + "select_ip": "Gateway IP" + } + } + }, + "error": { + "discovery_error": "Failed to discover a Xiaomi Aqara Gateway, try using the IP of the device running HomeAssistant as interface", + "not_found_error": "Zeroconf discovered Gateway could not be located to get the necessary information, try using the IP of the device running HomeAssistant as interface", + "invalid_interface": "Invalid network interface", + "invalid_key": "Invalid gateway key" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "already_in_progress": "Config flow for this gateway is already in progress", + "not_xiaomi_aqara": "Not a Xiaomi Aqara Gateway, discovered device did not match known gateways" + } + } +} diff --git a/homeassistant/components/xiaomi_aqara/switch.py b/homeassistant/components/xiaomi_aqara/switch.py index e711eab46fb..36dadefee1f 100644 --- a/homeassistant/components/xiaomi_aqara/switch.py +++ b/homeassistant/components/xiaomi_aqara/switch.py @@ -3,7 +3,8 @@ import logging from homeassistant.components.switch import SwitchEntity -from . import PY_XIAOMI_GATEWAY, XiaomiDevice +from . import XiaomiDevice +from .const import DOMAIN, GATEWAYS_KEY _LOGGER = logging.getLogger(__name__) @@ -20,76 +21,108 @@ ENERGY_CONSUMED = "energy_consumed" IN_USE = "inuse" -def setup_platform(hass, config, add_entities, discovery_info=None): +async def async_setup_entry(hass, config_entry, async_add_entities): """Perform the setup for Xiaomi devices.""" - devices = [] - for (_, gateway) in hass.data[PY_XIAOMI_GATEWAY].gateways.items(): - for device in gateway.devices["switch"]: - model = device["model"] - if model == "plug": - if "proto" not in device or int(device["proto"][0:1]) == 1: - data_key = "status" - else: - data_key = "channel_0" - devices.append( - XiaomiGenericSwitch(device, "Plug", data_key, True, gateway) + entities = [] + gateway = hass.data[DOMAIN][GATEWAYS_KEY][config_entry.entry_id] + for device in gateway.devices["switch"]: + model = device["model"] + if model == "plug": + if "proto" not in device or int(device["proto"][0:1]) == 1: + data_key = "status" + else: + data_key = "channel_0" + entities.append( + XiaomiGenericSwitch( + device, "Plug", data_key, True, gateway, config_entry ) - elif model in ["ctrl_neutral1", "ctrl_neutral1.aq1"]: - devices.append( - XiaomiGenericSwitch( - device, "Wall Switch", "channel_0", False, gateway - ) + ) + elif model in ["ctrl_neutral1", "ctrl_neutral1.aq1"]: + entities.append( + XiaomiGenericSwitch( + device, "Wall Switch", "channel_0", False, gateway, config_entry ) - elif model in ["ctrl_ln1", "ctrl_ln1.aq1"]: - devices.append( - XiaomiGenericSwitch( - device, "Wall Switch LN", "channel_0", False, gateway - ) + ) + elif model in ["ctrl_ln1", "ctrl_ln1.aq1"]: + entities.append( + XiaomiGenericSwitch( + device, "Wall Switch LN", "channel_0", False, gateway, config_entry ) - elif model in ["ctrl_neutral2", "ctrl_neutral2.aq1"]: - devices.append( - XiaomiGenericSwitch( - device, "Wall Switch Left", "channel_0", False, gateway - ) + ) + elif model in ["ctrl_neutral2", "ctrl_neutral2.aq1"]: + entities.append( + XiaomiGenericSwitch( + device, + "Wall Switch Left", + "channel_0", + False, + gateway, + config_entry, ) - devices.append( - XiaomiGenericSwitch( - device, "Wall Switch Right", "channel_1", False, gateway - ) + ) + entities.append( + XiaomiGenericSwitch( + device, + "Wall Switch Right", + "channel_1", + False, + gateway, + config_entry, ) - elif model in ["ctrl_ln2", "ctrl_ln2.aq1"]: - devices.append( - XiaomiGenericSwitch( - device, "Wall Switch LN Left", "channel_0", False, gateway - ) + ) + elif model in ["ctrl_ln2", "ctrl_ln2.aq1"]: + entities.append( + XiaomiGenericSwitch( + device, + "Wall Switch LN Left", + "channel_0", + False, + gateway, + config_entry, ) - devices.append( - XiaomiGenericSwitch( - device, "Wall Switch LN Right", "channel_1", False, gateway - ) + ) + entities.append( + XiaomiGenericSwitch( + device, + "Wall Switch LN Right", + "channel_1", + False, + gateway, + config_entry, ) - elif model in ["86plug", "ctrl_86plug", "ctrl_86plug.aq1"]: - if "proto" not in device or int(device["proto"][0:1]) == 1: - data_key = "status" - else: - data_key = "channel_0" - devices.append( - XiaomiGenericSwitch(device, "Wall Plug", data_key, True, gateway) + ) + elif model in ["86plug", "ctrl_86plug", "ctrl_86plug.aq1"]: + if "proto" not in device or int(device["proto"][0:1]) == 1: + data_key = "status" + else: + data_key = "channel_0" + entities.append( + XiaomiGenericSwitch( + device, "Wall Plug", data_key, True, gateway, config_entry ) - add_entities(devices) + ) + async_add_entities(entities) class XiaomiGenericSwitch(XiaomiDevice, SwitchEntity): """Representation of a XiaomiPlug.""" - def __init__(self, device, name, data_key, supports_power_consumption, xiaomi_hub): + def __init__( + self, + device, + name, + data_key, + supports_power_consumption, + xiaomi_hub, + config_entry, + ): """Initialize the XiaomiPlug.""" self._data_key = data_key self._in_use = None self._load_power = None self._power_consumed = None self._supports_power_consumption = supports_power_consumption - XiaomiDevice.__init__(self, device, name, xiaomi_hub) + super().__init__(device, name, xiaomi_hub, config_entry) @property def icon(self): diff --git a/homeassistant/components/xiaomi_aqara/translations/ca.json b/homeassistant/components/xiaomi_aqara/translations/ca.json new file mode 100644 index 00000000000..c3422a85efb --- /dev/null +++ b/homeassistant/components/xiaomi_aqara/translations/ca.json @@ -0,0 +1,25 @@ +{ + "config": { + "abort": { + "already_configured": "El dispositiu ja est\u00e0 configurat" + }, + "error": { + "invalid_interface": "Interf\u00edcie de xarxa no v\u00e0lida", + "invalid_key": "Clau de la passarel\u00b7la no v\u00e0lida" + }, + "flow_title": "Passarel\u00b7la Xiaomi Aqara: {name}", + "step": { + "select": { + "data": { + "select_ip": "IP de la passarel\u00b7la" + } + }, + "settings": { + "data": { + "key": "Clau de la passarel\u00b7la", + "name": "Nom de la passarel\u00b7la" + } + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/xiaomi_aqara/translations/en.json b/homeassistant/components/xiaomi_aqara/translations/en.json new file mode 100644 index 00000000000..7b801e33089 --- /dev/null +++ b/homeassistant/components/xiaomi_aqara/translations/en.json @@ -0,0 +1,40 @@ +{ + "config": { + "abort": { + "already_configured": "Device is already configured", + "already_in_progress": "Config flow for this gateway is already in progress", + "not_xiaomi_aqara": "Not a Xiaomi Aqara Gateway, discovered device did not match known gateways" + }, + "error": { + "discovery_error": "Failed to discover a Xiaomi Aqara Gateway, try using the IP of the device running HomeAssistant as interface", + "invalid_interface": "Invalid network interface", + "invalid_key": "Invalid gateway key", + "not_found_error": "Zeroconf discovered Gateway could not be located to get the necessary information, try using the IP of the device running HomeAssistant as interface" + }, + "flow_title": "Xiaomi Aqara Gateway: {name}", + "step": { + "select": { + "data": { + "select_ip": "Gateway IP" + }, + "description": "Run the setup again if you want to connect aditional gateways", + "title": "Select the Xiaomi Aqara Gateway that you wish to connect" + }, + "settings": { + "data": { + "key": "The key of your gateway", + "name": "Name of the Gateway" + }, + "description": "The key (password) can be retrieved using this tutorial: https://www.domoticz.com/wiki/Xiaomi_Gateway_(Aqara)#Adding_the_Xiaomi_Gateway_to_Domoticz. If the key is not provided only sensors will be accessible", + "title": "Xiaomi Aqara Gateway, optional settings" + }, + "user": { + "data": { + "interface": "The network interface to use" + }, + "description": "Connect to your Xiaomi Aqara Gateway", + "title": "Xiaomi Aqara Gateway" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/xiaomi_aqara/translations/es.json b/homeassistant/components/xiaomi_aqara/translations/es.json new file mode 100644 index 00000000000..9d388203bcb --- /dev/null +++ b/homeassistant/components/xiaomi_aqara/translations/es.json @@ -0,0 +1,40 @@ +{ + "config": { + "abort": { + "already_configured": "El dispositivo ya est\u00e1 configurado", + "already_in_progress": "El flujo de configuraci\u00f3n para este gateway ya est\u00e1 en marcha.", + "not_xiaomi_aqara": "No es un Xiaomi Aqara Gateway, el dispositivo descubierto no coincide con los gateways conocidos" + }, + "error": { + "discovery_error": "No se pudo descubrir un Xiaomi Aqara Gateway, intenta utilizar la IP del dispositivo que ejecuta HomeAssistant como interfaz", + "invalid_interface": "Interfaz de red inv\u00e1lida", + "invalid_key": "Clave del gateway inv\u00e1lida", + "not_found_error": "El Gateway descubierto por Zeroconf no puede localizarse para obtener toda la informaci\u00f3n necesaria, intenta usar la IP del dispositivo que ejecuta HomeAssistant como interfaz" + }, + "flow_title": "Xiaomi Aqara Gateway: {name}", + "step": { + "select": { + "data": { + "select_ip": "IP del gateway" + }, + "description": "Ejecuta la configuraci\u00f3n de nuevo si deseas conectar gateways adicionales", + "title": "Selecciona el Xiaomi Aqara Gateway que quieres conectar" + }, + "settings": { + "data": { + "key": "La clave de tu gateway", + "name": "Nombre del Gateway" + }, + "description": "La clave (contrase\u00f1a) se puede obtener con este tutorial: https://www.domoticz.com/wiki/Xiaomi_Gateway_(Aqara)#Adding_the_Xiaomi_Gateway_to_Domoticz. Si no se proporciona la clave solo se podr\u00e1 acceder a los sensores", + "title": "Xiaomi Aqara Gateway, configuraciones opcionales" + }, + "user": { + "data": { + "interface": "La interfaz de la red a usar" + }, + "description": "Conectar con tu Xiaomi Aqara Gateway", + "title": "Xiaomi Aqara Gateway" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/xiaomi_aqara/translations/no.json b/homeassistant/components/xiaomi_aqara/translations/no.json new file mode 100644 index 00000000000..fd89ad26f93 --- /dev/null +++ b/homeassistant/components/xiaomi_aqara/translations/no.json @@ -0,0 +1,39 @@ +{ + "config": { + "abort": { + "already_in_progress": "Konfigurasjonsflyt for denne porten p\u00e5g\u00e5r allerede", + "not_xiaomi_aqara": "Ikke en Xiaomi Aqara Gateway, oppdaget enhet ikke samsvarer med kjente gatewayer" + }, + "error": { + "discovery_error": "Kunne ikke oppdage en Xiaomi Aqara Gateway, pr\u00f8v \u00e5 bruke IP-adressen til enheten som kj\u00f8rer HomeAssistant som grensesnitt", + "invalid_interface": "Ugyldig nettverksgrensesnitt", + "invalid_key": "Ugyldig gateway-n\u00f8kkel", + "not_found_error": "Zeroconf oppdaget Gateway kunne ikke v\u00e6re plassert for \u00e5 f\u00e5 den n\u00f8dvendige informasjonen, kan du pr\u00f8ve \u00e5 bruke IP-adressen til enheten som kj\u00f8rer HomeAssistant som grensesnitt" + }, + "flow_title": "", + "step": { + "select": { + "data": { + "select_ip": "" + }, + "description": "Kj\u00f8r oppsettet igjen hvis du vil koble til tilleggsportaler", + "title": "Velg Xiaomi Aqara Gateway som du \u00f8nsker \u00e5 koble til" + }, + "settings": { + "data": { + "key": "N\u00f8kkelen til gatewayen", + "name": "Navnet p\u00e5 gatewayen" + }, + "description": "N\u00f8kkelen (passordet) kan hentes ved hjelp av denne veiviseren: [https://www.domoticz.com/wiki/Xiaomi_Gateway_(Aqara)#Adding_the_Xiaomi_Gateway_to_Domoticz](https://www.domoticz.com/wiki/Xiaomi_Gateway_(Aqara)#Adding_the_Xiaomi_Gateway_to_Domoticz). Hvis n\u00f8kkelen ikke oppgis, vil bare sensorer bli tilgjengelige", + "title": "Xiaomi Aqara Gateway, valgfrie innstillinger" + }, + "user": { + "data": { + "interface": "Nettverksgrensesnittet som skal brukes" + }, + "description": "Koble til Xiaomi Aqara Gateway", + "title": "" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/xiaomi_aqara/translations/ru.json b/homeassistant/components/xiaomi_aqara/translations/ru.json new file mode 100644 index 00000000000..5a088f0783c --- /dev/null +++ b/homeassistant/components/xiaomi_aqara/translations/ru.json @@ -0,0 +1,40 @@ +{ + "config": { + "abort": { + "already_configured": "\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0430 \u044d\u0442\u043e\u0433\u043e \u0443\u0441\u0442\u0440\u043e\u0439\u0441\u0442\u0432\u0430 \u0443\u0436\u0435 \u0432\u044b\u043f\u043e\u043b\u043d\u0435\u043d\u0430.", + "already_in_progress": "\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0430 \u044d\u0442\u043e\u0433\u043e \u0443\u0441\u0442\u0440\u043e\u0439\u0441\u0442\u0432\u0430 \u0443\u0436\u0435 \u0432\u044b\u043f\u043e\u043b\u043d\u044f\u0435\u0442\u0441\u044f.", + "not_xiaomi_aqara": "\u042d\u0442\u043e \u043d\u0435 \u0448\u043b\u044e\u0437 Xiaomi Aqara. \u0423\u0441\u0442\u0440\u043e\u0439\u0441\u0442\u0432\u043e \u043d\u0435 \u0441\u043e\u043e\u0442\u0432\u0435\u0442\u0441\u0442\u0432\u0443\u0435\u0442 \u0438\u0437\u0432\u0435\u0441\u0442\u043d\u044b\u043c \u0448\u043b\u044e\u0437\u0430\u043c." + }, + "error": { + "discovery_error": "\u041d\u0435 \u0443\u0434\u0430\u043b\u043e\u0441\u044c \u043e\u0431\u043d\u0430\u0440\u0443\u0436\u0438\u0442\u044c \u0448\u043b\u044e\u0437 Xiaomi Aqara, \u043f\u043e\u043f\u0440\u043e\u0431\u0443\u0439\u0442\u0435 \u0438\u0441\u043f\u043e\u043b\u044c\u0437\u043e\u0432\u0430\u0442\u044c IP-\u0430\u0434\u0440\u0435\u0441 \u0443\u0441\u0442\u0440\u043e\u0439\u0441\u0442\u0432\u0430 \u0441 HomeAssistant \u0432 \u043a\u0430\u0447\u0435\u0441\u0442\u0432\u0435 \u0438\u043d\u0442\u0435\u0440\u0444\u0435\u0439\u0441\u0430.", + "invalid_interface": "\u041d\u0435\u0432\u0435\u0440\u043d\u044b\u0439 \u0441\u0435\u0442\u0435\u0432\u043e\u0439 \u0438\u043d\u0442\u0435\u0440\u0444\u0435\u0439\u0441.", + "invalid_key": "\u041d\u0435\u0432\u0435\u0440\u043d\u044b\u0439 \u043a\u043b\u044e\u0447 \u0448\u043b\u044e\u0437\u0430.", + "not_found_error": "\u041d\u0435 \u0443\u0434\u0430\u043b\u043e\u0441\u044c \u043f\u043e\u043b\u0443\u0447\u0438\u0442\u044c \u043d\u0435\u043e\u0431\u0445\u043e\u0434\u0438\u043c\u0443\u044e \u0438\u043d\u0444\u043e\u0440\u043c\u0430\u0446\u0438\u044e \u043e\u0442 \u043e\u0431\u043d\u0430\u0440\u0443\u0436\u0435\u043d\u043d\u043e\u0433\u043e \u0448\u043b\u044e\u0437\u0430, \u043f\u043e\u043f\u0440\u043e\u0431\u0443\u0439\u0442\u0435 \u0438\u0441\u043f\u043e\u043b\u044c\u0437\u043e\u0432\u0430\u0442\u044c IP-\u0430\u0434\u0440\u0435\u0441 \u0443\u0441\u0442\u0440\u043e\u0439\u0441\u0442\u0432\u0430 \u0441 HomeAssistant \u0432 \u043a\u0430\u0447\u0435\u0441\u0442\u0432\u0435 \u0438\u043d\u0442\u0435\u0440\u0444\u0435\u0439\u0441\u0430." + }, + "flow_title": "Xiaomi Aqara Gateway: {name}", + "step": { + "select": { + "data": { + "select_ip": "IP-\u0430\u0434\u0440\u0435\u0441 \u0448\u043b\u044e\u0437\u0430" + }, + "description": "\u0417\u0430\u043f\u0443\u0441\u0442\u0438\u0442\u0435 \u043d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0443 \u0435\u0449\u0451 \u0440\u0430\u0437, \u0435\u0441\u043b\u0438 \u0412\u044b \u0445\u043e\u0442\u0438\u0442\u0435 \u0434\u043e\u0431\u0430\u0432\u0438\u0442\u044c \u0435\u0449\u0451 \u043e\u0434\u0438\u043d \u0448\u043b\u044e\u0437", + "title": "\u0412\u044b\u0431\u0435\u0440\u0438\u0442\u0435 \u0448\u043b\u044e\u0437 Xiaomi Aqara" + }, + "settings": { + "data": { + "key": "\u041a\u043b\u044e\u0447", + "name": "\u041d\u0430\u0437\u0432\u0430\u043d\u0438\u0435" + }, + "description": "\u041a\u043b\u044e\u0447 (\u043f\u0430\u0440\u043e\u043b\u044c) \u043c\u043e\u0436\u043d\u043e \u043f\u043e\u043b\u0443\u0447\u0438\u0442\u044c \u0441 \u043f\u043e\u043c\u043e\u0449\u044c\u044e \u044d\u0442\u043e\u0433\u043e \u0440\u0443\u043a\u043e\u0432\u043e\u0434\u0441\u0442\u0432\u0430: https://www.domoticz.com/wiki/Xiaomi_Gateway_(Aqara)#Adding_the_Xiaomi_Gateway_to_Domoticz. \u0415\u0441\u043b\u0438 \u043a\u043b\u044e\u0447 \u043d\u0435 \u0443\u043a\u0430\u0437\u0430\u043d, \u0431\u0443\u0434\u0443\u0442 \u0434\u043e\u0441\u0442\u0443\u043f\u043d\u044b \u0442\u043e\u043b\u044c\u043a\u043e \u0434\u0430\u0442\u0447\u0438\u043a\u0438.", + "title": "Xiaomi Aqara Gateway" + }, + "user": { + "data": { + "interface": "\u0421\u0435\u0442\u0435\u0432\u043e\u0439 \u0438\u043d\u0442\u0435\u0440\u0444\u0435\u0439\u0441" + }, + "description": "\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u0442\u0435 Home Assistant \u0434\u043b\u044f \u0438\u043d\u0442\u0435\u0433\u0440\u0430\u0446\u0438\u0438 \u0441 Xiaomi Aqara Gateway.", + "title": "Xiaomi Aqara Gateway" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/xiaomi_aqara/translations/zh-Hant.json b/homeassistant/components/xiaomi_aqara/translations/zh-Hant.json new file mode 100644 index 00000000000..99b677ddd7d --- /dev/null +++ b/homeassistant/components/xiaomi_aqara/translations/zh-Hant.json @@ -0,0 +1,40 @@ +{ + "config": { + "abort": { + "already_configured": "\u8a2d\u5099\u5df2\u7d93\u8a2d\u5b9a\u5b8c\u6210", + "already_in_progress": "\u7db2\u95dc\u8a2d\u5b9a\u5df2\u7d93\u9032\u884c\u4e2d", + "not_xiaomi_aqara": "\u4e26\u975e\u5c0f\u7c73 Aqara \u7db2\u95dc\uff0c\u6240\u63a2\u7d22\u4e4b\u8a2d\u5099\u8207\u5df2\u77e5\u7db2\u95dc\u4e0d\u7b26\u5408" + }, + "error": { + "discovery_error": "\u63a2\u7d22\u5c0f\u7c73 Aqara \u7db2\u95dc\u5931\u6557\uff0c\u8acb\u5617\u8a66\u4f7f\u7528\u57f7\u884c Home Assistant \u8a2d\u5099\u7684 IP \u4f5c\u70ba\u4ecb\u9762", + "invalid_interface": "\u7db2\u8def\u4ecb\u9762\u7121\u6548", + "invalid_key": "\u7db2\u95dc\u5bc6\u9470\u7121\u6548", + "not_found_error": "Zeroconf \u6240\u63a2\u7d22\u7684\u7db2\u95dc\u7121\u6cd5\u53d6\u5f97\u5fc5\u8981\u7684\u8cc7\u8a0a\uff0c\u8acb\u5617\u8a66\u4f7f\u7528\u57f7\u884c Home Assistant \u7684\u8a2d\u5099 IP \u4f5c\u70ba\u4ecb\u9762" + }, + "flow_title": "\u5c0f\u7c73 Aqara \u7db2\u95dc\uff1a{name}", + "step": { + "select": { + "data": { + "select_ip": "\u7db2\u95dc IP" + }, + "description": "\u5982\u679c\u9084\u6709\u5176\u4ed6\u7db2\u95dc\u9700\u8981\u9023\u7dda\uff0c\u8acb\u518d\u57f7\u884c\u4e00\u6b21\u8a2d\u5b9a", + "title": "\u9078\u64c7\u6240\u8981\u9023\u7dda\u7684\u5c0f\u7c73 Aqara \u7db2\u95dc" + }, + "settings": { + "data": { + "key": "\u7db2\u95dc\u5bc6\u9470", + "name": "\u7db2\u95dc\u540d\u7a31" + }, + "description": "\u5bc6\u9470\uff08\u5bc6\u78bc\uff09\u53d6\u5f97\u8acb\u53c3\u8003\u4e0b\u65b9\u6559\u5b78\uff1ahttps://www.domoticz.com/wiki/Xiaomi_Gateway_(Aqara)#Adding_the_Xiaomi_Gateway_to_Domoticz\u3002\u5047\u5982\u672a\u63d0\u4f9b\u5bc6\u9470\u3001\u5247\u50c5\u6703\u6536\u5230\u50b3\u611f\u5668\u8a2d\u5099\u7684\u8cc7\u8a0a\u3002\uff3c", + "title": "\u5c0f\u7c73 Aqara \u7db2\u95dc\u9078\u9805\u8a2d\u5b9a" + }, + "user": { + "data": { + "interface": "\u4f7f\u7528\u7684\u7db2\u8def\u4ecb\u9762" + }, + "description": "\u9023\u7dda\u81f3\u5c0f\u7c73 Aqara \u7db2\u95dc", + "title": "\u5c0f\u7c73 Aqara \u7db2\u95dc" + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/xiaomi_miio/const.py b/homeassistant/components/xiaomi_miio/const.py index 77f398aa3ad..1e8e7de9ef9 100644 --- a/homeassistant/components/xiaomi_miio/const.py +++ b/homeassistant/components/xiaomi_miio/const.py @@ -48,7 +48,9 @@ SERVICE_MOVE_REMOTE_CONTROL = "vacuum_remote_control_move" SERVICE_MOVE_REMOTE_CONTROL_STEP = "vacuum_remote_control_move_step" SERVICE_START_REMOTE_CONTROL = "vacuum_remote_control_start" SERVICE_STOP_REMOTE_CONTROL = "vacuum_remote_control_stop" +SERVICE_CLEAN_SEGMENT = "vacuum_clean_segment" SERVICE_CLEAN_ZONE = "vacuum_clean_zone" +SERVICE_GOTO = "vacuum_goto" # AirQuality Model MODEL_AIRQUALITYMONITOR_V1 = "zhimi.airmonitor.v1" diff --git a/homeassistant/components/xiaomi_miio/manifest.json b/homeassistant/components/xiaomi_miio/manifest.json index e1ead8d966c..c9e948a1300 100644 --- a/homeassistant/components/xiaomi_miio/manifest.json +++ b/homeassistant/components/xiaomi_miio/manifest.json @@ -3,7 +3,7 @@ "name": "Xiaomi Miio", "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/xiaomi_miio", - "requirements": ["construct==2.9.45", "python-miio==0.5.0.1"], + "requirements": ["construct==2.9.45", "python-miio==0.5.1"], "codeowners": ["@rytilahti", "@syssi"], "zeroconf": ["_miio._udp.local."] } diff --git a/homeassistant/components/xiaomi_miio/services.yaml b/homeassistant/components/xiaomi_miio/services.yaml index a92e46f11a1..c61b7f37f22 100644 --- a/homeassistant/components/xiaomi_miio/services.yaml +++ b/homeassistant/components/xiaomi_miio/services.yaml @@ -330,3 +330,26 @@ vacuum_clean_zone: repeats: description: Number of cleaning repeats for each zone between 1 and 3. example: "1" + +vacuum_goto: + description: Go to the specified coordinates. + fields: + entity_id: + description: Name of the vacuum entity. + example: "vacuum.xiaomi_vacuum_cleaner" + x_coord: + description: x-coordinate. + example: 27500 + y_coord: + description: y-coordinate. + example: 32000 + +vacuum_clean_segment: + description: Start cleaning of the specified segment(s). + fields: + entity_id: + description: Name of the vacuum entity. + example: "vacuum.xiaomi_vacuum_cleaner" + segments: + description: Segments. + example: "[1,2]" diff --git a/homeassistant/components/xiaomi_miio/translations/ca.json b/homeassistant/components/xiaomi_miio/translations/ca.json index 81dc08731c3..9bd8f91c947 100644 --- a/homeassistant/components/xiaomi_miio/translations/ca.json +++ b/homeassistant/components/xiaomi_miio/translations/ca.json @@ -8,6 +8,7 @@ "connect_error": "[%key::common::config_flow::error::cannot_connect%]", "no_device_selected": "No hi ha cap dispositiu seleccionat, selecciona'n un." }, + "flow_title": "Xiaomi Miio: {name}", "step": { "gateway": { "data": { @@ -15,7 +16,7 @@ "name": "Nom de la passarel\u00b7la", "token": "Token de l'API" }, - "description": "Necessitar\u00e0s el token de l'API, consulta les instruccions a https://www.home-assistant.io/integrations/vacuum.xiaomi_miio/#retrieving-the-access-token.", + "description": "Necessitar\u00e0s el token de l'API de 32 car\u00e0cters, consulta les instruccions a https://www.home-assistant.io/integrations/vacuum.xiaomi_miio/#retrieving-the-access-token. Tingues en compte que aquest token \u00e9s diferent a la clau utilitzada per la integraci\u00f3 Xiaomi Aqara.", "title": "Connexi\u00f3 amb la passarel\u00b7la de Xiaomi" }, "user": { diff --git a/homeassistant/components/xiaomi_miio/translations/de.json b/homeassistant/components/xiaomi_miio/translations/de.json index 1ce000e4674..6ec92566ade 100644 --- a/homeassistant/components/xiaomi_miio/translations/de.json +++ b/homeassistant/components/xiaomi_miio/translations/de.json @@ -15,7 +15,7 @@ "name": "Name des Gateways", "token": "API-Token" }, - "description": "Sie ben\u00f6tigen das API-Token. Anweisungen finden Sie unter https://www.home-assistant.io/integrations/vacuum.xiaomi_miio/#retrieving-the-access-token.", + "description": "Sie ben\u00f6tigen den 32 Zeichen langen API-Token. Anweisungen finden Sie unter https://www.home-assistant.io/integrations/vacuum.xiaomi_miio/#retrieving-the-access-token.", "title": "Stellen Sie eine Verbindung zu einem Xiaomi Gateway her" }, "user": { diff --git a/homeassistant/components/xiaomi_miio/translations/en.json b/homeassistant/components/xiaomi_miio/translations/en.json index dc271b1030b..ce57c18f523 100644 --- a/homeassistant/components/xiaomi_miio/translations/en.json +++ b/homeassistant/components/xiaomi_miio/translations/en.json @@ -1,6 +1,5 @@ { "config": { - "flow_title": "Xiaomi Miio: {name}", "abort": { "already_configured": "Device is already configured", "already_in_progress": "Config flow for this Xiaomi Miio device is already in progress." @@ -29,4 +28,4 @@ } } } -} +} \ No newline at end of file diff --git a/homeassistant/components/xiaomi_miio/translations/es.json b/homeassistant/components/xiaomi_miio/translations/es.json index 41428edd02f..e9d28698760 100644 --- a/homeassistant/components/xiaomi_miio/translations/es.json +++ b/homeassistant/components/xiaomi_miio/translations/es.json @@ -2,12 +2,13 @@ "config": { "abort": { "already_configured": "El dispositivo ya est\u00e1 configurado", - "already_in_progress": "El flujo de configuraci\u00f3n para este dispositivo Xiaomi Miio ya est\u00e1 en progreso." + "already_in_progress": "El flujo de configuraci\u00f3n para este dispositivo Xiaomi Miio ya est\u00e1 en marcha." }, "error": { "connect_error": "No se ha podido conectar", "no_device_selected": "No se ha seleccionado ning\u00fan dispositivo, por favor, seleccione un dispositivo." }, + "flow_title": "Xiaomi Miio: {name}", "step": { "gateway": { "data": { diff --git a/homeassistant/components/xiaomi_miio/translations/fr.json b/homeassistant/components/xiaomi_miio/translations/fr.json index c494456fc68..9cd89ee7f68 100644 --- a/homeassistant/components/xiaomi_miio/translations/fr.json +++ b/homeassistant/components/xiaomi_miio/translations/fr.json @@ -7,6 +7,7 @@ "connect_error": "Impossible de se connecter, veuillez r\u00e9essayer", "no_device_selected": "Aucun appareil s\u00e9lectionn\u00e9, veuillez s\u00e9lectionner un appareil." }, + "flow_title": "Xiaomi Miio: {name}", "step": { "gateway": { "data": { diff --git a/homeassistant/components/xiaomi_miio/translations/it.json b/homeassistant/components/xiaomi_miio/translations/it.json index dae9a8dbbc1..3514d759926 100644 --- a/homeassistant/components/xiaomi_miio/translations/it.json +++ b/homeassistant/components/xiaomi_miio/translations/it.json @@ -8,6 +8,7 @@ "connect_error": "Impossibile connettersi", "no_device_selected": "Nessun dispositivo selezionato, selezionare un dispositivo." }, + "flow_title": "Xiaomi Miio: {name}", "step": { "gateway": { "data": { @@ -15,7 +16,7 @@ "name": "Nome del Gateway", "token": "Token API" }, - "description": "Sar\u00e0 necessario il token API, consultare https://www.home-assistant.io/integrations/vacuum.xiaomi_miio/#retrieving-the-access-token per istruzioni.", + "description": "\u00c8 necessario il Token API a 32 caratteri, vedere https://www.home-assistant.io/integrations/vacuum.xiaomi_miio/#retrieving-the-access-token per le istruzioni. Si prega di notare che questo token \u00e8 diverso dalla chiave utilizzata dall'integrazione di Xiaomi Aqara.", "title": "Connessione a un Xiaomi Gateway " }, "user": { diff --git a/homeassistant/components/xiaomi_miio/translations/ko.json b/homeassistant/components/xiaomi_miio/translations/ko.json index 3a1e6574915..3357e5d91d3 100644 --- a/homeassistant/components/xiaomi_miio/translations/ko.json +++ b/homeassistant/components/xiaomi_miio/translations/ko.json @@ -8,6 +8,7 @@ "connect_error": "\uc5f0\uacb0\ud558\uc9c0 \ubabb\ud588\uc2b5\ub2c8\ub2e4", "no_device_selected": "\uc120\ud0dd\ub41c \uae30\uae30\uac00 \uc5c6\uc2b5\ub2c8\ub2e4. \uae30\uae30\ub97c \uc120\ud0dd\ud574\uc8fc\uc138\uc694." }, + "flow_title": "Xiaomi Miio: {name}", "step": { "gateway": { "data": { @@ -15,7 +16,7 @@ "name": "\uac8c\uc774\ud2b8\uc6e8\uc774 \uc774\ub984", "token": "API \ud1a0\ud070" }, - "description": "API \ud1a0\ud070\uc774 \ud544\uc694\ud569\ub2c8\ub2e4. [\uc548\ub0b4](https://www.home-assistant.io/integrations/vacuum.xiaomi_miio/#retrieving-the-access-token) \ub97c \ucc38\uc870\ud574\uc8fc\uc138\uc694.", + "description": "32 \ubb38\uc790\uc758 API \ud1a0\ud070\uc774 \ud544\uc694\ud569\ub2c8\ub2e4. [\uc548\ub0b4](https://www.home-assistant.io/integrations/vacuum.xiaomi_miio/#retrieving-the-access-token) \ub97c \ucc38\uc870\ud574\uc8fc\uc138\uc694. \uc774 \ud1a0\ud070\uc740 Xiaomi Aqara \ud1b5\ud569 \uad6c\uc131\uc694\uc18c\uc5d0\uc11c \uc0ac\uc6a9\ub418\ub294 \ud0a4\uc640 \ub2e4\ub985\ub2c8\ub2e4.", "title": "Xiaomi \uac8c\uc774\ud2b8\uc6e8\uc774\uc5d0 \uc5f0\uacb0\ud558\uae30" }, "user": { diff --git a/homeassistant/components/xiaomi_miio/translations/lb.json b/homeassistant/components/xiaomi_miio/translations/lb.json index 3a55f0d8a2d..4eba640db1b 100644 --- a/homeassistant/components/xiaomi_miio/translations/lb.json +++ b/homeassistant/components/xiaomi_miio/translations/lb.json @@ -8,6 +8,7 @@ "connect_error": "Feeler beim verbannen", "no_device_selected": "Keen Apparat ausgewielt, wiel een Apparat aus w.e.g." }, + "flow_title": "Xiaomi Miio: {name}", "step": { "gateway": { "data": { diff --git a/homeassistant/components/xiaomi_miio/translations/no.json b/homeassistant/components/xiaomi_miio/translations/no.json index a83cf030cc3..24e070323b3 100644 --- a/homeassistant/components/xiaomi_miio/translations/no.json +++ b/homeassistant/components/xiaomi_miio/translations/no.json @@ -7,6 +7,7 @@ "error": { "no_device_selected": "Ingen enhet valgt, vennligst velg en enhet." }, + "flow_title": "Xiaomi Miio: {navn}", "step": { "gateway": { "data": { @@ -14,7 +15,7 @@ "name": "Navnet p\u00e5 gatewayen", "token": "API-token" }, - "description": "Du trenger API-tilgangstoken, se [https://www.home-assistant.io/integrations/vacuum.xiaomi_miio/#retrieving-the-access-token](https://www.home-assistant.io/integrations/vacuum.xiaomi_miio/#retrieving-the-access-token) for instruksjoner.", + "description": "Du trenger API-tokenet p\u00e5 32 tegn, se https://www.home-assistant.io/integrations/vacuum.xiaomi_miio/#retrieving-the-access-token for instruksjoner. V\u00e6r oppmerksom p\u00e5 at dette tokenet er forskjellig fra n\u00f8kkelen som brukes av Xiaomi Aqara-integreringen.", "title": "Koble til en Xiaomi Gateway" }, "user": { diff --git a/homeassistant/components/xiaomi_miio/translations/pl.json b/homeassistant/components/xiaomi_miio/translations/pl.json index bd6dd1923d5..b4bd9a5546d 100644 --- a/homeassistant/components/xiaomi_miio/translations/pl.json +++ b/homeassistant/components/xiaomi_miio/translations/pl.json @@ -8,6 +8,7 @@ "connect_error": "Nie mo\u017cna nawi\u0105za\u0107 po\u0142\u0105czenia.", "no_device_selected": "Nie wybrano \u017cadnego urz\u0105dzenia, wybierz jedno urz\u0105dzenie." }, + "flow_title": "Xiaomi Miio: {name}", "step": { "gateway": { "data": { diff --git a/homeassistant/components/xiaomi_miio/translations/ru.json b/homeassistant/components/xiaomi_miio/translations/ru.json index edd4365f20b..b1b014b6de7 100644 --- a/homeassistant/components/xiaomi_miio/translations/ru.json +++ b/homeassistant/components/xiaomi_miio/translations/ru.json @@ -8,6 +8,7 @@ "connect_error": "\u041e\u0448\u0438\u0431\u043a\u0430 \u043f\u043e\u0434\u043a\u043b\u044e\u0447\u0435\u043d\u0438\u044f.", "no_device_selected": "\u0412\u044b\u0431\u0435\u0440\u0438\u0442\u0435 \u043e\u0434\u043d\u043e \u0438\u0437 \u0443\u0441\u0442\u0440\u043e\u0439\u0441\u0442\u0432." }, + "flow_title": "Xiaomi Miio: {name}", "step": { "gateway": { "data": { @@ -15,7 +16,7 @@ "name": "\u041d\u0430\u0437\u0432\u0430\u043d\u0438\u0435", "token": "\u0422\u043e\u043a\u0435\u043d API" }, - "description": "\u0414\u043b\u044f \u043f\u043e\u0434\u043a\u043b\u044e\u0447\u0435\u043d\u0438\u044f \u0442\u0440\u0435\u0431\u0443\u0435\u0442\u0441\u044f \u0442\u043e\u043a\u0435\u043d API. \u041e \u0442\u043e\u043c, \u043a\u0430\u043a \u043f\u043e\u043b\u0443\u0447\u0438\u0442\u044c \u0442\u043e\u043a\u0435\u043d, \u0412\u044b \u043c\u043e\u0436\u0435\u0442\u0435 \u0443\u0437\u043d\u0430\u0442\u044c \u0437\u0434\u0435\u0441\u044c: \nhttps://www.home-assistant.io/integrations/vacuum.xiaomi_miio/#retrieving-the-access-token.", + "description": "\u0414\u043b\u044f \u043f\u043e\u0434\u043a\u043b\u044e\u0447\u0435\u043d\u0438\u044f \u0442\u0440\u0435\u0431\u0443\u0435\u0442\u0441\u044f 32-\u0445 \u0437\u043d\u0430\u0447\u043d\u044b\u0439 \u0442\u043e\u043a\u0435\u043d API. \u041e \u0442\u043e\u043c, \u043a\u0430\u043a \u043f\u043e\u043b\u0443\u0447\u0438\u0442\u044c \u0442\u043e\u043a\u0435\u043d, \u0412\u044b \u043c\u043e\u0436\u0435\u0442\u0435 \u0443\u0437\u043d\u0430\u0442\u044c \u0437\u0434\u0435\u0441\u044c: \nhttps://www.home-assistant.io/integrations/vacuum.xiaomi_miio/#retrieving-the-access-token.\n\u041e\u0431\u0440\u0430\u0442\u0438\u0442\u0435 \u0432\u043d\u0438\u043c\u0430\u043d\u0438\u0435, \u0447\u0442\u043e \u044d\u0442\u043e\u0442 \u0442\u043e\u043a\u0435\u043d \u043e\u0442\u043b\u0438\u0447\u0430\u0435\u0442\u0441\u044f \u043e\u0442 \u043a\u043b\u044e\u0447\u0430, \u0438\u0441\u043f\u043e\u043b\u044c\u0437\u0443\u0435\u043c\u043e\u0433\u043e \u043f\u0440\u0438 \u0438\u043d\u0442\u0435\u0433\u0440\u0430\u0446\u0438\u0438 Xiaomi Aqara.", "title": "\u041f\u043e\u0434\u043a\u043b\u044e\u0447\u0435\u043d\u0438\u0435 \u043a \u0448\u043b\u044e\u0437\u0443 Xiaomi" }, "user": { diff --git a/homeassistant/components/xiaomi_miio/translations/zh-Hant.json b/homeassistant/components/xiaomi_miio/translations/zh-Hant.json index 83f8143b120..7b459c30803 100644 --- a/homeassistant/components/xiaomi_miio/translations/zh-Hant.json +++ b/homeassistant/components/xiaomi_miio/translations/zh-Hant.json @@ -8,6 +8,7 @@ "connect_error": "\u9023\u7dda\u5931\u6557", "no_device_selected": "\u672a\u9078\u64c7\u8a2d\u5099\uff0c\u8acb\u9078\u64c7\u4e00\u9805\u8a2d\u5099\u3002" }, + "flow_title": "Xiaomi Miio\uff1a{name}", "step": { "gateway": { "data": { @@ -15,7 +16,7 @@ "name": "\u7db2\u95dc\u540d\u7a31", "token": "API \u5bc6\u9470" }, - "description": "\u5c07\u9700\u8981\u8f38\u5165 API \u5bc6\u9470\uff0c\u8acb\u53c3\u95b1 https://www.home-assistant.io/integrations/vacuum.xiaomi_miio/#retrieving-the-access-token \u4ee5\u7372\u5f97\u7372\u53d6\u5bc6\u9470\u7684\u6559\u5b78\u3002", + "description": "\u5c07\u9700\u8981\u8f38\u5165 32 \u4f4d\u5b57\u5143 API \u5bc6\u9470\uff0c\u8acb\u53c3\u95b1 https://www.home-assistant.io/integrations/vacuum.xiaomi_miio/#retrieving-the-access-token \u4ee5\u7372\u5f97\u7372\u53d6\u5bc6\u9470\u7684\u6559\u5b78\u3002\u8acb\u6ce8\u610f\uff1a\u6b64\u5bc6\u9470\u8207 Xiaomi Aqara \u6574\u5408\u6240\u4f7f\u7528\u4e4b\u5bc6\u9470\u4e0d\u540c\u3002", "title": "\u9023\u7dda\u81f3\u5c0f\u7c73\u7db2\u95dc" }, "user": { diff --git a/homeassistant/components/xiaomi_miio/vacuum.py b/homeassistant/components/xiaomi_miio/vacuum.py index f37c22a38aa..106e8f9dfc4 100644 --- a/homeassistant/components/xiaomi_miio/vacuum.py +++ b/homeassistant/components/xiaomi_miio/vacuum.py @@ -1,5 +1,4 @@ """Support for the Xiaomi vacuum cleaner robot.""" -import asyncio from functools import partial import logging @@ -27,19 +26,14 @@ from homeassistant.components.vacuum import ( SUPPORT_STOP, StateVacuumEntity, ) -from homeassistant.const import ( - ATTR_ENTITY_ID, - CONF_HOST, - CONF_NAME, - CONF_TOKEN, - STATE_OFF, - STATE_ON, -) -import homeassistant.helpers.config_validation as cv +from homeassistant.const import CONF_HOST, CONF_NAME, CONF_TOKEN, STATE_OFF, STATE_ON +from homeassistant.helpers import config_validation as cv, entity_platform +from homeassistant.util.dt import as_utc from .const import ( - DOMAIN, + SERVICE_CLEAN_SEGMENT, SERVICE_CLEAN_ZONE, + SERVICE_GOTO, SERVICE_MOVE_REMOTE_CONTROL, SERVICE_MOVE_REMOTE_CONTROL_STEP, SERVICE_START_REMOTE_CONTROL, @@ -80,69 +74,7 @@ ATTR_RC_VELOCITY = "velocity" ATTR_STATUS = "status" ATTR_ZONE_ARRAY = "zone" ATTR_ZONE_REPEATER = "repeats" - -VACUUM_SERVICE_SCHEMA = vol.Schema({vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids}) - -SERVICE_SCHEMA_REMOTE_CONTROL = VACUUM_SERVICE_SCHEMA.extend( - { - vol.Optional(ATTR_RC_VELOCITY): vol.All( - vol.Coerce(float), vol.Clamp(min=-0.29, max=0.29) - ), - vol.Optional(ATTR_RC_ROTATION): vol.All( - vol.Coerce(int), vol.Clamp(min=-179, max=179) - ), - vol.Optional(ATTR_RC_DURATION): cv.positive_int, - } -) - -SERVICE_SCHEMA_CLEAN_ZONE = VACUUM_SERVICE_SCHEMA.extend( - { - vol.Required(ATTR_ZONE_ARRAY): vol.All( - list, - [ - vol.ExactSequence( - [vol.Coerce(int), vol.Coerce(int), vol.Coerce(int), vol.Coerce(int)] - ) - ], - ), - vol.Required(ATTR_ZONE_REPEATER): vol.All( - vol.Coerce(int), vol.Clamp(min=1, max=3) - ), - } -) - -SERVICE_SCHEMA_CLEAN_ZONE = VACUUM_SERVICE_SCHEMA.extend( - { - vol.Required(ATTR_ZONE_ARRAY): vol.All( - list, - [ - vol.ExactSequence( - [vol.Coerce(int), vol.Coerce(int), vol.Coerce(int), vol.Coerce(int)] - ) - ], - ), - vol.Required(ATTR_ZONE_REPEATER): vol.All( - vol.Coerce(int), vol.Clamp(min=1, max=3) - ), - } -) - -SERVICE_TO_METHOD = { - SERVICE_START_REMOTE_CONTROL: {"method": "async_remote_control_start"}, - SERVICE_STOP_REMOTE_CONTROL: {"method": "async_remote_control_stop"}, - SERVICE_MOVE_REMOTE_CONTROL: { - "method": "async_remote_control_move", - "schema": SERVICE_SCHEMA_REMOTE_CONTROL, - }, - SERVICE_MOVE_REMOTE_CONTROL_STEP: { - "method": "async_remote_control_move_step", - "schema": SERVICE_SCHEMA_REMOTE_CONTROL, - }, - SERVICE_CLEAN_ZONE: { - "method": "async_clean_zone", - "schema": SERVICE_SCHEMA_CLEAN_ZONE, - }, -} +ATTR_TIMERS = "timers" SUPPORT_XIAOMI = ( SUPPORT_STATE @@ -194,39 +126,84 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info= async_add_entities([mirobo], update_before_add=True) - async def async_service_handler(service): - """Map services to methods on MiroboVacuum.""" - method = SERVICE_TO_METHOD.get(service.service) - params = { - key: value for key, value in service.data.items() if key != ATTR_ENTITY_ID - } - entity_ids = service.data.get(ATTR_ENTITY_ID) + platform = entity_platform.current_platform.get() - if entity_ids: - target_vacuums = [ - vac - for vac in hass.data[DATA_KEY].values() - if vac.entity_id in entity_ids - ] - else: - target_vacuums = hass.data[DATA_KEY].values() + platform.async_register_entity_service( + SERVICE_START_REMOTE_CONTROL, + {}, + MiroboVacuum.async_remote_control_start.__name__, + ) - update_tasks = [] - for vacuum in target_vacuums: - await getattr(vacuum, method["method"])(**params) + platform.async_register_entity_service( + SERVICE_STOP_REMOTE_CONTROL, + {}, + MiroboVacuum.async_remote_control_stop.__name__, + ) - for vacuum in target_vacuums: - update_coro = vacuum.async_update_ha_state(True) - update_tasks.append(update_coro) + platform.async_register_entity_service( + SERVICE_MOVE_REMOTE_CONTROL, + { + vol.Optional(ATTR_RC_VELOCITY): vol.All( + vol.Coerce(float), vol.Clamp(min=-0.29, max=0.29) + ), + vol.Optional(ATTR_RC_ROTATION): vol.All( + vol.Coerce(int), vol.Clamp(min=-179, max=179) + ), + vol.Optional(ATTR_RC_DURATION): cv.positive_int, + }, + MiroboVacuum.async_remote_control_move.__name__, + ) - if update_tasks: - await asyncio.wait(update_tasks) + platform.async_register_entity_service( + SERVICE_MOVE_REMOTE_CONTROL_STEP, + { + vol.Optional(ATTR_RC_VELOCITY): vol.All( + vol.Coerce(float), vol.Clamp(min=-0.29, max=0.29) + ), + vol.Optional(ATTR_RC_ROTATION): vol.All( + vol.Coerce(int), vol.Clamp(min=-179, max=179) + ), + vol.Optional(ATTR_RC_DURATION): cv.positive_int, + }, + MiroboVacuum.async_remote_control_move_step.__name__, + ) - for vacuum_service in SERVICE_TO_METHOD: - schema = SERVICE_TO_METHOD[vacuum_service].get("schema", VACUUM_SERVICE_SCHEMA) - hass.services.async_register( - DOMAIN, vacuum_service, async_service_handler, schema=schema - ) + platform.async_register_entity_service( + SERVICE_CLEAN_ZONE, + { + vol.Required(ATTR_ZONE_ARRAY): vol.All( + list, + [ + vol.ExactSequence( + [ + vol.Coerce(int), + vol.Coerce(int), + vol.Coerce(int), + vol.Coerce(int), + ] + ) + ], + ), + vol.Required(ATTR_ZONE_REPEATER): vol.All( + vol.Coerce(int), vol.Clamp(min=1, max=3) + ), + }, + MiroboVacuum.async_clean_zone.__name__, + ) + + platform.async_register_entity_service( + SERVICE_GOTO, + { + vol.Required("x_coord"): vol.Coerce(int), + vol.Required("y_coord"): vol.Coerce(int), + }, + MiroboVacuum.async_goto.__name__, + ) + platform.async_register_entity_service( + SERVICE_CLEAN_SEGMENT, + {vol.Required("segments"): vol.Any(vol.Coerce(int), [vol.Coerce(int)])}, + MiroboVacuum.async_clean_segment.__name__, + ) class MiroboVacuum(StateVacuumEntity): @@ -247,6 +224,8 @@ class MiroboVacuum(StateVacuumEntity): self._fan_speeds = None self._fan_speeds_reverse = None + self._timers = None + @property def name(self): """Return the name of the device.""" @@ -293,6 +272,18 @@ class MiroboVacuum(StateVacuumEntity): """Get the list of available fan speed steps of the vacuum cleaner.""" return list(self._fan_speeds) if self._fan_speeds else [] + @property + def timers(self): + """Get the list of added timers of the vacuum cleaner.""" + return [ + { + "enabled": timer.enabled, + "cron": timer.cron, + "next_schedule": as_utc(timer.next_schedule), + } + for timer in self._timers + ] + @property def device_state_attributes(self): """Return the specific state attributes of this vacuum cleaner.""" @@ -338,6 +329,9 @@ class MiroboVacuum(StateVacuumEntity): if self.vacuum_state.got_error: attrs[ATTR_ERROR] = self.vacuum_state.error + + if self.timers: + attrs[ATTR_TIMERS] = self.timers return attrs @property @@ -450,6 +444,26 @@ class MiroboVacuum(StateVacuumEntity): duration=duration, ) + async def async_goto(self, x_coord: int, y_coord: int): + """Goto the specified coordinates.""" + await self._try_command( + "Unable to send the vacuum cleaner to the specified coordinates: %s", + self._vacuum.goto, + x_coord=x_coord, + y_coord=y_coord, + ) + + async def async_clean_segment(self, segments): + """Clean the specified segments(s).""" + if isinstance(segments, int): + segments = [segments] + + await self._try_command( + "Unable to start cleaning of the specified segments: %s", + self._vacuum.segment_clean, + segments=segments, + ) + def update(self): """Fetch state from the device.""" try: @@ -464,6 +478,8 @@ class MiroboVacuum(StateVacuumEntity): self.last_clean = self._vacuum.last_clean_details() self.dnd_state = self._vacuum.dnd_status() + self._timers = self._vacuum.timer() + self._available = True except OSError as exc: _LOGGER.error("Got OSError while fetching the state: %s", exc) diff --git a/homeassistant/components/xs1/climate.py b/homeassistant/components/xs1/climate.py index c57c0857817..60b6ca3c7c9 100644 --- a/homeassistant/components/xs1/climate.py +++ b/homeassistant/components/xs1/climate.py @@ -119,5 +119,5 @@ class XS1ThermostatEntity(XS1DeviceEntity, ClimateEntity): async def async_update(self): """Also update the sensor when available.""" await super().async_update() - if self.sensor is None: + if self.sensor is not None: await self.hass.async_add_executor_job(self.sensor.update) diff --git a/homeassistant/components/xs1/manifest.json b/homeassistant/components/xs1/manifest.json index e997953f7ac..164f571fade 100644 --- a/homeassistant/components/xs1/manifest.json +++ b/homeassistant/components/xs1/manifest.json @@ -2,6 +2,6 @@ "domain": "xs1", "name": "EZcontrol XS1", "documentation": "https://www.home-assistant.io/integrations/xs1", - "requirements": ["xs1-api-client==2.3.5"], + "requirements": ["xs1-api-client==3.0.0"], "codeowners": [] } diff --git a/homeassistant/components/yamaha/const.py b/homeassistant/components/yamaha/const.py index e2a0c5eceea..fea962938eb 100644 --- a/homeassistant/components/yamaha/const.py +++ b/homeassistant/components/yamaha/const.py @@ -1,3 +1,4 @@ """Constants for the Yamaha component.""" DOMAIN = "yamaha" SERVICE_ENABLE_OUTPUT = "enable_output" +SERVICE_SELECT_SCENE = "select_scene" diff --git a/homeassistant/components/yamaha/media_player.py b/homeassistant/components/yamaha/media_player.py index b26729c720e..196e6605eab 100644 --- a/homeassistant/components/yamaha/media_player.py +++ b/homeassistant/components/yamaha/media_player.py @@ -22,7 +22,6 @@ from homeassistant.components.media_player.const import ( SUPPORT_VOLUME_SET, ) from homeassistant.const import ( - ATTR_ENTITY_ID, CONF_HOST, CONF_NAME, STATE_IDLE, @@ -30,15 +29,17 @@ from homeassistant.const import ( STATE_ON, STATE_PLAYING, ) -import homeassistant.helpers.config_validation as cv +from homeassistant.helpers import config_validation as cv, entity_platform -from .const import DOMAIN, SERVICE_ENABLE_OUTPUT +from .const import SERVICE_ENABLE_OUTPUT, SERVICE_SELECT_SCENE _LOGGER = logging.getLogger(__name__) ATTR_ENABLED = "enabled" ATTR_PORT = "port" +ATTR_SCENE = "scene" + CONF_SOURCE_IGNORE = "source_ignore" CONF_SOURCE_NAMES = "source_names" CONF_ZONE_IGNORE = "zone_ignore" @@ -47,12 +48,6 @@ CONF_ZONE_NAMES = "zone_names" DATA_YAMAHA = "yamaha_known_receivers" DEFAULT_NAME = "Yamaha Receiver" -MEDIA_PLAYER_SCHEMA = vol.Schema({ATTR_ENTITY_ID: cv.comp_entity_ids}) - -ENABLE_OUTPUT_SCHEMA = MEDIA_PLAYER_SCHEMA.extend( - {vol.Required(ATTR_ENABLED): cv.boolean, vol.Required(ATTR_PORT): cv.string} -) - SUPPORT_YAMAHA = ( SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE @@ -79,78 +74,94 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( ) -def setup_platform(hass, config, add_entities, discovery_info=None): - """Set up the Yamaha platform.""" +class YamahaConfigInfo: + """Configuration Info for Yamaha Receivers.""" - # Keep track of configured receivers so that we don't end up - # discovering a receiver dynamically that we have static config - # for. Map each device from its zone_id to an instance since - # YamahaDevice is not hashable (thus not possible to add to a set). - if hass.data.get(DATA_YAMAHA) is None: - hass.data[DATA_YAMAHA] = {} + def __init__(self, config: None, discovery_info: None): + """Initialize the Configuration Info for Yamaha Receiver.""" + self.name = config.get(CONF_NAME) + self.host = config.get(CONF_HOST) + self.ctrl_url = f"http://{self.host}:80/YamahaRemoteControl/ctrl" + self.source_ignore = config.get(CONF_SOURCE_IGNORE) + self.source_names = config.get(CONF_SOURCE_NAMES) + self.zone_ignore = config.get(CONF_ZONE_IGNORE) + self.zone_names = config.get(CONF_ZONE_NAMES) + self.from_discovery = False + if discovery_info is not None: + self.name = discovery_info.get("name") + self.model = discovery_info.get("model_name") + self.ctrl_url = discovery_info.get("control_url") + self.desc_url = discovery_info.get("description_url") + self.zone_ignore = [] + self.from_discovery = True - name = config.get(CONF_NAME) - host = config.get(CONF_HOST) - source_ignore = config.get(CONF_SOURCE_IGNORE) - source_names = config.get(CONF_SOURCE_NAMES) - zone_ignore = config.get(CONF_ZONE_IGNORE) - zone_names = config.get(CONF_ZONE_NAMES) - if discovery_info is not None: - name = discovery_info.get("name") - model = discovery_info.get("model_name") - ctrl_url = discovery_info.get("control_url") - desc_url = discovery_info.get("description_url") +def _discovery(config_info): + """Discover receivers from configuration in the network.""" + if config_info.from_discovery: receivers = rxv.RXV( - ctrl_url, model_name=model, friendly_name=name, unit_desc_url=desc_url + config_info.ctrl_url, + model_name=config_info.model, + friendly_name=config_info.name, + unit_desc_url=config_info.desc_url, ).zone_controllers() _LOGGER.debug("Receivers: %s", receivers) - # when we are dynamically discovered config is empty - zone_ignore = [] - elif host is None: + elif config_info.host is None: receivers = [] for recv in rxv.find(): receivers.extend(recv.zone_controllers()) else: - ctrl_url = f"http://{host}:80/YamahaRemoteControl/ctrl" - receivers = rxv.RXV(ctrl_url, name).zone_controllers() + receivers = rxv.RXV(config_info.ctrl_url, config_info.name).zone_controllers() - devices = [] + return receivers + + +async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): + """Set up the Yamaha platform.""" + + # Keep track of configured receivers so that we don't end up + # discovering a receiver dynamically that we have static config + # for. Map each device from its zone_id . + known_zones = hass.data.setdefault(DATA_YAMAHA, set()) + + # Get the Infos for configuration from config (YAML) or Discovery + config_info = YamahaConfigInfo(config=config, discovery_info=discovery_info) + # Async check if the Receivers are there in the network + receivers = await hass.async_add_executor_job(_discovery, config_info) + + entities = [] for receiver in receivers: - if receiver.zone in zone_ignore: + if receiver.zone in config_info.zone_ignore: continue - device = YamahaDevice(name, receiver, source_ignore, source_names, zone_names) + entity = YamahaDevice( + config_info.name, + receiver, + config_info.source_ignore, + config_info.source_names, + config_info.zone_names, + ) # Only add device if it's not already added - if device.zone_id not in hass.data[DATA_YAMAHA]: - hass.data[DATA_YAMAHA][device.zone_id] = device - devices.append(device) + if entity.zone_id not in known_zones: + known_zones.add(entity.zone_id) + entities.append(entity) else: - _LOGGER.debug("Ignoring duplicate receiver: %s", name) + _LOGGER.debug("Ignoring duplicate receiver: %s", config_info.name) - def service_handler(service): - """Handle for services.""" - entity_ids = service.data.get(ATTR_ENTITY_ID) + async_add_entities(entities) - devices = [ - device - for device in hass.data[DATA_YAMAHA].values() - if not entity_ids or device.entity_id in entity_ids - ] - - for device in devices: - port = service.data[ATTR_PORT] - enabled = service.data[ATTR_ENABLED] - - device.enable_output(port, enabled) - device.schedule_update_ha_state(True) - - hass.services.register( - DOMAIN, SERVICE_ENABLE_OUTPUT, service_handler, schema=ENABLE_OUTPUT_SCHEMA + # Register Service 'select_scene' + platform = entity_platform.current_platform.get() + platform.async_register_entity_service( + SERVICE_SELECT_SCENE, {vol.Required(ATTR_SCENE): cv.string}, "set_scene", + ) + # Register Service 'enable_output' + platform.async_register_entity_service( + SERVICE_ENABLE_OUTPUT, + {vol.Required(ATTR_ENABLED): cv.boolean, vol.Required(ATTR_PORT): cv.string}, + "enable_output", ) - - add_entities(devices) class YamahaDevice(MediaPlayerEntity): @@ -350,7 +361,6 @@ class YamahaDevice(MediaPlayerEntity): Yamaha to direct play certain kinds of media. media_type is treated as the input type that we are setting, and media id is specific to it. - For the NET RADIO mediatype the format for ``media_id`` is a "path" in your vtuner hierarchy. For instance: ``Bookmarks>Internet>Radio Paradise``. The separators are @@ -358,12 +368,10 @@ class YamahaDevice(MediaPlayerEntity): scenes. There is a looping construct built into the yamaha library to do this with a fallback timeout if the vtuner service is unresponsive. - NOTE: this might take a while, because the only API interface for setting the net radio station emulates button pressing and navigating through the net radio menu hierarchy. And each sub menu must be fetched by the receiver from the vtuner service. - """ if media_type == "NET RADIO": self.receiver.net_radio(media_id) @@ -372,6 +380,13 @@ class YamahaDevice(MediaPlayerEntity): """Enable or disable an output port..""" self.receiver.enable_output(port, enabled) + def set_scene(self, scene): + """Set the current scene.""" + try: + self.receiver.scene = scene + except AssertionError: + _LOGGER.warning("Scene '%s' does not exist!", scene) + def select_sound_mode(self, sound_mode): """Set Sound Mode for Receiver..""" self.receiver.surround_program = sound_mode diff --git a/homeassistant/components/yamaha/services.yaml b/homeassistant/components/yamaha/services.yaml index f96d3ea58ef..e4d85885d54 100644 --- a/homeassistant/components/yamaha/services.yaml +++ b/homeassistant/components/yamaha/services.yaml @@ -10,3 +10,12 @@ enable_output: enabled: description: Boolean indicating if port should be enabled or not. example: true +select_scene: + description: "Select a scene on the receiver" + fields: + entity_id: + description: Name(s) of entities to enable/disable port on. + example: "media_player.yamaha" + scene: + description: Name of the scene. Standard for RX-V437 is 'BD/DVD Movie Viewing', 'TV Viewing', 'NET Audio Listening' or 'Radio Listening' + example: "TV Viewing" diff --git a/homeassistant/components/yeelight/light.py b/homeassistant/components/yeelight/light.py index 244ccd5745d..0f622837bfb 100644 --- a/homeassistant/components/yeelight/light.py +++ b/homeassistant/components/yeelight/light.py @@ -127,25 +127,6 @@ YEELIGHT_COLOR_EFFECT_LIST = [ *YEELIGHT_MONO_EFFECT_LIST, ] -MODEL_TO_DEVICE_TYPE = { - "mono": BulbType.White, - "mono1": BulbType.White, - "color": BulbType.Color, - "color1": BulbType.Color, - "color2": BulbType.Color, - "strip1": BulbType.Color, - "bslamp1": BulbType.Color, - "bslamp2": BulbType.Color, - "RGBW": BulbType.Color, - "lamp1": BulbType.WhiteTemp, - "ceiling1": BulbType.WhiteTemp, - "ceiling2": BulbType.WhiteTemp, - "ceiling3": BulbType.WhiteTemp, - "ceiling4": BulbType.WhiteTempMood, - "ceiling10": BulbType.WhiteTempMood, - "ceiling13": BulbType.WhiteTemp, -} - EFFECTS_MAP = { EFFECT_DISCO: yee_transitions.disco, EFFECT_TEMP: yee_transitions.temp, @@ -274,10 +255,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None): lights = [] - if device.model: - device_type = MODEL_TO_DEVICE_TYPE.get(device.model, None) - else: - device_type = device.type + device_type = device.type def _lights_setup_helper(klass): lights.append(klass(device, custom_effects=custom_effects)) diff --git a/homeassistant/components/zha/__init__.py b/homeassistant/components/zha/__init__.py index 8a23c6fc20d..d5f76fa5e23 100644 --- a/homeassistant/components/zha/__init__.py +++ b/homeassistant/components/zha/__init__.py @@ -51,9 +51,9 @@ CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( vol.All( - cv.deprecated(CONF_USB_PATH, invalidation_version="0.112"), - cv.deprecated(CONF_BAUDRATE, invalidation_version="0.112"), - cv.deprecated(CONF_RADIO_TYPE, invalidation_version="0.112"), + cv.deprecated(CONF_USB_PATH), + cv.deprecated(CONF_BAUDRATE), + cv.deprecated(CONF_RADIO_TYPE), ZHA_CONFIG_SCHEMA, ), ), diff --git a/homeassistant/components/zha/climate.py b/homeassistant/components/zha/climate.py index 3a0ff6455d2..7e2a0e147a7 100644 --- a/homeassistant/components/zha/climate.py +++ b/homeassistant/components/zha/climate.py @@ -200,6 +200,10 @@ class Thermostat(ZhaEntity, ClimateEntity): data[ATTR_OCCP_COOL_SETPT] = self._thrm.occupied_cooling_setpoint if self._thrm.occupied_heating_setpoint is not None: data[ATTR_OCCP_HEAT_SETPT] = self._thrm.occupied_heating_setpoint + if self._thrm.pi_heating_demand is not None: + data[ATTR_PI_HEATING_DEMAND] = self._thrm.pi_heating_demand + if self._thrm.pi_cooling_demand is not None: + data[ATTR_PI_COOLING_DEMAND] = self._thrm.pi_cooling_demand unoccupied_cooling_setpoint = self._thrm.unoccupied_cooling_setpoint if unoccupied_cooling_setpoint is not None: @@ -583,3 +587,13 @@ class ZenWithinThermostat(Thermostat): if self.hvac_mode != HVAC_MODE_OFF: return CURRENT_HVAC_IDLE return CURRENT_HVAC_OFF + + +@STRICT_MATCH( + channel_names=CHANNEL_THERMOSTAT, + aux_channels=CHANNEL_FAN, + manufacturers="Centralite", + models="3157100", +) +class CentralitePearl(ZenWithinThermostat): + """Centralite Pearl Thermostat implementation.""" diff --git a/homeassistant/components/zha/core/channels/__init__.py b/homeassistant/components/zha/core/channels/__init__.py index 18eb2a6c1cc..f0ef6f382c4 100644 --- a/homeassistant/components/zha/core/channels/__init__.py +++ b/homeassistant/components/zha/core/channels/__init__.py @@ -3,6 +3,8 @@ import asyncio import logging from typing import Any, Dict, List, Optional, Tuple, Union +import zigpy.zcl.clusters.closures + from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_send @@ -280,9 +282,10 @@ class ChannelPool: # incorrectly. if ( hasattr(cluster, "ep_attribute") + and cluster_id == zigpy.zcl.clusters.closures.DoorLock.cluster_id and cluster.ep_attribute == "multistate_input" ): - channel_class = base.ZigbeeChannel + channel_class = general.MultistateInput # end of ugly hack channel = channel_class(cluster, self) if channel.name == const.CHANNEL_POWER_CONFIGURATION: diff --git a/homeassistant/components/zha/core/channels/homeautomation.py b/homeassistant/components/zha/core/channels/homeautomation.py index 2601cf47573..d95180ce780 100644 --- a/homeassistant/components/zha/core/channels/homeautomation.py +++ b/homeassistant/components/zha/core/channels/homeautomation.py @@ -75,7 +75,7 @@ class ElectricalMeasurementChannel(ZigbeeChannel): async def async_initialize(self, from_cache): """Initialize channel.""" - await self.fetch_config(from_cache) + await self.fetch_config(True) await super().async_initialize(from_cache) async def fetch_config(self, from_cache): diff --git a/homeassistant/components/zha/core/channels/smartenergy.py b/homeassistant/components/zha/core/channels/smartenergy.py index b6a631308ee..7b12411b84f 100644 --- a/homeassistant/components/zha/core/channels/smartenergy.py +++ b/homeassistant/components/zha/core/channels/smartenergy.py @@ -3,7 +3,7 @@ import logging import zigpy.zcl.clusters.smartenergy as smartenergy -from homeassistant.const import TIME_HOURS, TIME_SECONDS +from homeassistant.const import LENGTH_FEET, TIME_HOURS, TIME_SECONDS from homeassistant.core import callback from .. import registries, typing as zha_typing @@ -62,7 +62,7 @@ class Metering(ZigbeeChannel): unit_of_measure_map = { 0x00: "kW", 0x01: f"m³/{TIME_HOURS}", - 0x02: f"ft³/{TIME_HOURS}", + 0x02: f"{LENGTH_FEET}³/{TIME_HOURS}", 0x03: f"ccf/{TIME_HOURS}", 0x04: f"US gal/{TIME_HOURS}", 0x05: f"IMP gal/{TIME_HOURS}", @@ -80,8 +80,8 @@ class Metering(ZigbeeChannel): ) -> None: """Initialize Metering.""" super().__init__(cluster, ch_pool) - self._divisor = None - self._multiplier = None + self._divisor = 1 + self._multiplier = 1 self._unit_enum = None self._format_spec = None @@ -114,14 +114,8 @@ class Metering(ZigbeeChannel): from_cache=from_cache, ) - self._divisor = results.get("divisor", 1) - if self._divisor == 0: - self._divisor = 1 - - self._multiplier = results.get("multiplier", 1) - if self._multiplier == 0: - self._multiplier = 1 - + self._divisor = results.get("divisor", self._divisor) + self._multiplier = results.get("multiplier", self._multiplier) self._unit_enum = results.get("unit_of_measure", 0x7F) # default to unknown fmting = results.get( diff --git a/homeassistant/components/zha/core/const.py b/homeassistant/components/zha/core/const.py index 8a99a8a1b11..cb6a698d72f 100644 --- a/homeassistant/components/zha/core/const.py +++ b/homeassistant/components/zha/core/const.py @@ -170,20 +170,23 @@ class RadioType(enum.Enum): """Possible options for radio type.""" ezsp = ( - "ESZP: HUSBZB-1, Elelabs, Telegesis, Silabs EmberZNet protocol", + "EZSP = Silicon Labs EmberZNet protocol: Elelabs, HUSBZB-1, Telegesis", bellows.zigbee.application.ControllerApplication, ) deconz = ( - "Conbee, Conbee II, RaspBee radios from dresden elektronik", + "deCONZ = dresden elektronik deCONZ protocol: ConBee I/II, RaspBee I/II", zigpy_deconz.zigbee.application.ControllerApplication, ) ti_cc = ( - "TI_CC: CC2531, CC2530, CC2652R, CC1352 etc, Texas Instruments ZNP protocol", + "TI_CC = Texas Instruments Z-Stack ZNP protocol: CC253x, CC26x2, CC13x2", zigpy_cc.zigbee.application.ControllerApplication, ) - zigate = "ZiGate Radio", zigpy_zigate.zigbee.application.ControllerApplication + zigate = ( + "ZiGate = ZiGate Zigbee radios: PiZiGate, ZiGate USB-TTL, ZiGate WiFi", + zigpy_zigate.zigbee.application.ControllerApplication, + ) xbee = ( - "Digi XBee S2C, XBee 3 radios", + "XBee = Digi XBee Zigbee radios: Digi XBee Series 2, 2C, 3", zigpy_xbee.zigbee.application.ControllerApplication, ) diff --git a/homeassistant/components/zha/core/device.py b/homeassistant/components/zha/core/device.py index fcbf518a9db..53b1dcc163b 100644 --- a/homeassistant/components/zha/core/device.py +++ b/homeassistant/components/zha/core/device.py @@ -66,8 +66,8 @@ from .const import ( from .helpers import LogMixin _LOGGER = logging.getLogger(__name__) -_CONSIDER_UNAVAILABLE_MAINS = 60 * 60 * 2 # 2 hours -_CONSIDER_UNAVAILABLE_BATTERY = 60 * 60 * 6 # 6 hours +CONSIDER_UNAVAILABLE_MAINS = 60 * 60 * 2 # 2 hours +CONSIDER_UNAVAILABLE_BATTERY = 60 * 60 * 6 # 6 hours _UPDATE_ALIVE_INTERVAL = (60, 90) _CHECKIN_GRACE_PERIODS = 2 @@ -96,11 +96,6 @@ class ZHADevice(LogMixin): self._available_signal = f"{self.name}_{self.ieee}_{SIGNAL_AVAILABLE}" self._checkins_missed_count = 0 self.unsubs = [] - self.unsubs.append( - async_dispatcher_connect( - self.hass, self._available_signal, self.async_initialize - ) - ) self.quirk_applied = isinstance(self._zigpy_device, zigpy.quirks.CustomDevice) self.quirk_class = ( f"{self._zigpy_device.__class__.__module__}." @@ -108,9 +103,9 @@ class ZHADevice(LogMixin): ) if self.is_mains_powered: - self._consider_unavailable_time = _CONSIDER_UNAVAILABLE_MAINS + self._consider_unavailable_time = CONSIDER_UNAVAILABLE_MAINS else: - self._consider_unavailable_time = _CONSIDER_UNAVAILABLE_BATTERY + self._consider_unavailable_time = CONSIDER_UNAVAILABLE_BATTERY keep_alive_interval = random.randint(*_UPDATE_ALIVE_INTERVAL) self.unsubs.append( async_track_time_interval( @@ -263,9 +258,14 @@ class ZHADevice(LogMixin): @property def available(self): - """Return True if sensor is available.""" + """Return True if device is available.""" return self._available + @available.setter + def available(self, new_availability: bool) -> None: + """Set device availability.""" + self._available = new_availability + @property def zigbee_signature(self) -> Dict[str, Any]: """Get zigbee signature for this device.""" @@ -274,10 +274,6 @@ class ZHADevice(LogMixin): ATTR_ENDPOINTS: self._channels.zigbee_signature, } - def set_available(self, available): - """Set availability from restore and prevent signals.""" - self._available = available - @classmethod def new( cls, @@ -342,13 +338,20 @@ class ZHADevice(LogMixin): if res is not None: self._checkins_missed_count = 0 - def update_available(self, available): - """Set sensor availability.""" - if self._available != available and available: - # Update the state the first time the device comes online - async_dispatcher_send(self.hass, self._available_signal, False) - async_dispatcher_send(self.hass, f"{self._available_signal}_entity", available) - self._available = available + def update_available(self, available: bool) -> None: + """Update device availability and signal entities.""" + availability_changed = self.available ^ available + self.available = available + if availability_changed and available: + # reinit channels then signal entities + self.hass.async_create_task(self._async_became_available()) + return + async_dispatcher_send(self.hass, f"{self._available_signal}_entity") + + async def _async_became_available(self) -> None: + """Update device availability and signal entities.""" + await self.async_initialize(False) + async_dispatcher_send(self.hass, f"{self._available_signal}_entity") @property def device_info(self): diff --git a/homeassistant/components/zha/core/gateway.py b/homeassistant/components/zha/core/gateway.py index 08f412dfcd8..ef39c408ec5 100644 --- a/homeassistant/components/zha/core/gateway.py +++ b/homeassistant/components/zha/core/gateway.py @@ -2,6 +2,7 @@ import asyncio import collections +from datetime import timedelta import itertools import logging import os @@ -74,7 +75,12 @@ from .const import ( ZHA_GW_MSG_RAW_INIT, RadioType, ) -from .device import DeviceStatus, ZHADevice +from .device import ( + CONSIDER_UNAVAILABLE_BATTERY, + CONSIDER_UNAVAILABLE_MAINS, + DeviceStatus, + ZHADevice, +) from .group import GroupMember, ZHAGroup from .patches import apply_application_controller_patch from .registries import GROUP_ENTITY_DOMAINS @@ -161,11 +167,26 @@ class ZHAGateway: @callback def async_load_devices(self) -> None: """Restore ZHA devices from zigpy application state.""" - zigpy_devices = self.application_controller.devices.values() - for zigpy_device in zigpy_devices: + for zigpy_device in self.application_controller.devices.values(): zha_device = self._async_get_or_create_device(zigpy_device, restored=True) if zha_device.nwk == 0x0000: self.coordinator_zha_device = zha_device + zha_dev_entry = self.zha_storage.devices.get(str(zigpy_device.ieee)) + delta_msg = "not known" + if zha_dev_entry and zha_dev_entry.last_seen is not None: + delta = round(time.time() - zha_dev_entry.last_seen) + if zha_device.is_mains_powered: + zha_device.available = delta < CONSIDER_UNAVAILABLE_MAINS + else: + zha_device.available = delta < CONSIDER_UNAVAILABLE_BATTERY + delta_msg = f"{str(timedelta(seconds=delta))} ago" + _LOGGER.debug( + "[%s](%s) restored as '%s', last seen: %s", + zha_device.nwk, + zha_device.name, + "available" if zha_device.available else "unavailable", + delta_msg, + ) @callback def async_load_groups(self) -> None: @@ -485,10 +506,12 @@ class ZHAGateway: self, sender, profile, cluster, src_ep, dst_ep, message ): """Handle tasks when a device becomes available.""" - self.async_update_device(sender) + self.async_update_device(sender, available=True) @callback - def async_update_device(self, sender: zigpy_dev.Device, available: bool = True): + def async_update_device( + self, sender: zigpy_dev.Device, available: bool = True + ) -> None: """Update device that has just become available.""" if sender.ieee in self.devices: device = self.devices[sender.ieee] @@ -543,9 +566,9 @@ class ZHAGateway: ) async def _async_device_joined(self, zha_device: zha_typing.ZhaDeviceType) -> None: + zha_device.available = True await zha_device.async_configure() - # will cause async_init to fire so don't explicitly call it - zha_device.update_available(True) + await zha_device.async_initialize(from_cache=False) async_dispatcher_send(self._hass, SIGNAL_ADD_ENTITIES) async def _async_device_rejoined(self, zha_device): @@ -556,7 +579,8 @@ class ZHAGateway: ) # we don't have to do this on a nwk swap but we don't have a way to tell currently await zha_device.async_configure() - # will cause async_init to fire so don't explicitly call it + # force async_initialize() to fire so don't explicitly call it + zha_device.available = False zha_device.update_available(True) async def async_create_zigpy_group( diff --git a/homeassistant/components/zha/core/patches.py b/homeassistant/components/zha/core/patches.py index 3d8c84e9bf3..633152e253c 100644 --- a/homeassistant/components/zha/core/patches.py +++ b/homeassistant/components/zha/core/patches.py @@ -7,8 +7,7 @@ def apply_application_controller_patch(zha_gateway): def handle_message(sender, profile, cluster, src_ep, dst_ep, message): """Handle message from a device.""" if ( - not sender.initializing - and sender.ieee in zha_gateway.devices + sender.ieee in zha_gateway.devices and not zha_gateway.devices[sender.ieee].available ): zha_gateway.async_device_became_available( diff --git a/homeassistant/components/zha/core/store.py b/homeassistant/components/zha/core/store.py index 0171ded67fe..896402703da 100644 --- a/homeassistant/components/zha/core/store.py +++ b/homeassistant/components/zha/core/store.py @@ -1,7 +1,9 @@ """Data storage helper for ZHA.""" # pylint: disable=unused-import from collections import OrderedDict +import datetime import logging +import time from typing import MutableMapping, cast import attr @@ -19,6 +21,7 @@ DATA_REGISTRY = "zha_storage" STORAGE_KEY = "zha.storage" STORAGE_VERSION = 1 SAVE_DELAY = 10 +TOMBSTONE_LIFETIME = datetime.timedelta(days=60).total_seconds() @attr.s(slots=True, frozen=True) @@ -99,7 +102,7 @@ class ZhaStorage: devices[device["ieee"]] = ZhaDeviceEntry( name=device["name"], ieee=device["ieee"], - last_seen=device["last_seen"] if "last_seen" in device else None, + last_seen=device.get("last_seen"), ) self.devices = devices @@ -121,6 +124,7 @@ class ZhaStorage: data["devices"] = [ {"name": entry.name, "ieee": entry.ieee, "last_seen": entry.last_seen} for entry in self.devices.values() + if entry.last_seen and (time.time() - entry.last_seen) < TOMBSTONE_LIFETIME ] return data diff --git a/homeassistant/components/zha/core/typing.py b/homeassistant/components/zha/core/typing.py index a4619d0596e..bce4a058ac6 100644 --- a/homeassistant/components/zha/core/typing.py +++ b/homeassistant/components/zha/core/typing.py @@ -30,6 +30,7 @@ if TYPE_CHECKING: import homeassistant.components.zha.core.channels.base as base_channels import homeassistant.components.zha.core.device import homeassistant.components.zha.core.gateway + import homeassistant.components.zha.core.group import homeassistant.components.zha.entity import homeassistant.components.zha.core.channels diff --git a/homeassistant/components/zha/entity.py b/homeassistant/components/zha/entity.py index 8629fc50075..a997d59197d 100644 --- a/homeassistant/components/zha/entity.py +++ b/homeassistant/components/zha/entity.py @@ -2,7 +2,6 @@ import asyncio import logging -import time from typing import Any, Awaitable, Dict, List, Optional from homeassistant.core import CALLBACK_TYPE, State, callback @@ -33,7 +32,6 @@ from .core.typing import CALLABLE_T, ChannelType, ZhaDeviceType _LOGGER = logging.getLogger(__name__) ENTITY_SUFFIX = "entity_suffix" -RESTART_GRACE_PERIOD = 7200 # 2 hours class BaseZhaEntity(LogMixin, entity.Entity): @@ -48,7 +46,6 @@ class BaseZhaEntity(LogMixin, entity.Entity): self._state: Any = None self._device_state_attributes: Dict[str, Any] = {} self._zha_device: ZhaDeviceType = zha_device - self._available: bool = False self._unsubs: List[CALLABLE_T] = [] self.remove_future: Awaitable[None] = None @@ -96,15 +93,9 @@ class BaseZhaEntity(LogMixin, entity.Entity): "via_device": (DOMAIN, self.hass.data[DATA_ZHA][DATA_ZHA_BRIDGE_ID]), } - @property - def available(self) -> bool: - """Return entity availability.""" - return self._available - @callback - def async_set_available(self, available: bool) -> None: - """Set entity availability.""" - self._available = available + def async_state_changed(self) -> None: + """Entity state changed.""" self.async_write_ha_state() @callback @@ -163,9 +154,13 @@ class ZhaEntity(BaseZhaEntity, RestoreEntity): for channel in channels: self.cluster_channels[channel.name] = channel + @property + def available(self) -> bool: + """Return entity availability.""" + return self._zha_device.available + async def async_added_to_hass(self) -> None: """Run when about to be added to hass.""" - await super().async_added_to_hass() self.remove_future = asyncio.Future() await self.async_accept_signal( None, @@ -173,11 +168,17 @@ class ZhaEntity(BaseZhaEntity, RestoreEntity): self.async_remove, signal_override=True, ) - await self.async_check_recently_seen() + + if not self.zha_device.is_mains_powered: + # mains powered devices will get real time state + last_state = await self.async_get_last_state() + if last_state: + self.async_restore_last_state(last_state) + await self.async_accept_signal( None, f"{self.zha_device.available_signal}_entity", - self.async_set_available, + self.async_state_changed, signal_override=True, ) self._zha_device.gateway.register_entity_reference( @@ -199,20 +200,6 @@ class ZhaEntity(BaseZhaEntity, RestoreEntity): def async_restore_last_state(self, last_state) -> None: """Restore previous state.""" - async def async_check_recently_seen(self) -> None: - """Check if the device was seen within the last 2 hours.""" - last_state = await self.async_get_last_state() - if ( - last_state - and self._zha_device.last_seen - and (time.time() - self._zha_device.last_seen < RESTART_GRACE_PERIOD) - ): - self.async_set_available(True) - if not self.zha_device.is_mains_powered: - # mains powered devices will get real time state - self.async_restore_last_state(last_state) - self._zha_device.set_available(True) - async def async_update(self) -> None: """Retrieve latest state.""" for channel in self.cluster_channels.values(): @@ -228,6 +215,7 @@ class ZhaGroupEntity(BaseZhaEntity): ) -> None: """Initialize a light group.""" super().__init__(unique_id, zha_device, **kwargs) + self._available = False self._name = ( f"{zha_device.gateway.groups.get(group_id).name}_zha_group_0x{group_id:04x}" ) @@ -235,6 +223,11 @@ class ZhaGroupEntity(BaseZhaEntity): self._entity_ids: List[str] = entity_ids self._async_unsub_state_changed: Optional[CALLBACK_TYPE] = None + @property + def available(self) -> bool: + """Return entity availability.""" + return self._available + async def async_added_to_hass(self) -> None: """Register callbacks.""" await super().async_added_to_hass() diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index 421162eb9dd..c9b25b58e25 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -4,12 +4,12 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/zha", "requirements": [ - "bellows==0.16.2", + "bellows==0.17.0", "pyserial==3.4", - "zha-quirks==0.0.40", + "zha-quirks==0.0.41", "zigpy-cc==0.4.4", "zigpy-deconz==0.9.2", - "zigpy==0.20.4", + "zigpy==0.21.0", "zigpy-xbee==0.12.1", "zigpy-zigate==0.6.1" ], diff --git a/homeassistant/components/zha/sensor.py b/homeassistant/components/zha/sensor.py index 4544780b4f8..86969c5fe96 100644 --- a/homeassistant/components/zha/sensor.py +++ b/homeassistant/components/zha/sensor.py @@ -25,11 +25,9 @@ from homeassistant.util.temperature import fahrenheit_to_celsius from .core import discovery from .core.const import ( - CHANNEL_ANALOG_INPUT, CHANNEL_ELECTRICAL_MEASUREMENT, CHANNEL_HUMIDITY, CHANNEL_ILLUMINANCE, - CHANNEL_MULTISTATE_INPUT, CHANNEL_POWER_CONFIGURATION, CHANNEL_PRESSURE, CHANNEL_SMARTENERGY_METERING, @@ -153,13 +151,6 @@ class Sensor(ZhaEntity): return round(float(value * self._multiplier) / self._divisor) -@STRICT_MATCH(channel_names=CHANNEL_ANALOG_INPUT) -class AnalogInput(Sensor): - """Sensor that displays analog input values.""" - - SENSOR_ATTR = "present_value" - - @STRICT_MATCH(channel_names=CHANNEL_POWER_CONFIGURATION) class Battery(Sensor): """Battery sensor of power configuration cluster.""" @@ -220,18 +211,6 @@ class ElectricalMeasurement(Sensor): return round(value) -@STRICT_MATCH(channel_names=CHANNEL_MULTISTATE_INPUT) -class Text(Sensor): - """Sensor that displays string values.""" - - _device_class = None - _unit = None - - def formatter(self, value) -> str: - """Return string value.""" - return value - - @STRICT_MATCH(generic_ids=CHANNEL_ST_HUMIDITY_CLUSTER) @STRICT_MATCH(channel_names=CHANNEL_HUMIDITY) class Humidity(Sensor): diff --git a/homeassistant/components/zone/__init__.py b/homeassistant/components/zone/__init__.py index 74c145e19d9..aad8eb51dd2 100644 --- a/homeassistant/components/zone/__init__.py +++ b/homeassistant/components/zone/__init__.py @@ -7,7 +7,6 @@ import voluptuous as vol from homeassistant import config_entries from homeassistant.const import ( ATTR_EDITABLE, - ATTR_HIDDEN, ATTR_LATITUDE, ATTR_LONGITUDE, CONF_ICON, @@ -332,7 +331,6 @@ class Zone(entity.Entity): def _generate_attrs(self) -> None: """Generate new attrs based on config.""" self._attrs = { - ATTR_HIDDEN: True, ATTR_LATITUDE: self._config[CONF_LATITUDE], ATTR_LONGITUDE: self._config[CONF_LONGITUDE], ATTR_RADIUS: self._config[CONF_RADIUS], diff --git a/homeassistant/config_entries.py b/homeassistant/config_entries.py index 8dc88aa4da9..68442689d3b 100644 --- a/homeassistant/config_entries.py +++ b/homeassistant/config_entries.py @@ -21,6 +21,8 @@ _LOGGER = logging.getLogger(__name__) _UNDEF: dict = {} SOURCE_DISCOVERY = "discovery" +SOURCE_HASSIO = "hassio" +SOURCE_HOMEKIT = "homekit" SOURCE_IMPORT = "import" SOURCE_INTEGRATION_DISCOVERY = "integration_discovery" SOURCE_SSDP = "ssdp" @@ -62,6 +64,7 @@ ENTRY_STATE_FAILED_UNLOAD = "failed_unload" UNRECOVERABLE_STATES = (ENTRY_STATE_MIGRATION_ERROR, ENTRY_STATE_FAILED_UNLOAD) +DEFAULT_DISCOVERY_UNIQUE_ID = "default_discovery_unique_id" DISCOVERY_NOTIFICATION_ID = "config_entry_discovery" DISCOVERY_SOURCES = ( SOURCE_SSDP, @@ -466,6 +469,10 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager): ): self.async_abort(progress_flow["flow_id"]) + # Reset unique ID when the default discovery ID has been used + if flow.unique_id == DEFAULT_DISCOVERY_UNIQUE_ID: + await flow.async_set_unique_id(None) + # Find existing entry. for check_entry in self.config_entries.async_entries(result["handler"]): if check_entry.unique_id == flow.unique_id: @@ -857,12 +864,16 @@ class ConfigFlow(data_entry_flow.FlowHandler): raise data_entry_flow.AbortFlow("already_configured") async def async_set_unique_id( - self, unique_id: str, *, raise_on_progress: bool = True + self, unique_id: Optional[str] = None, *, raise_on_progress: bool = True ) -> Optional[ConfigEntry]: """Set a unique ID for the config flow. Returns optionally existing config entry with same ID. """ + if unique_id is None: + self.context["unique_id"] = None # pylint: disable=no-member + return None + if raise_on_progress: for progress in self._async_in_progress(): if progress["context"].get("unique_id") == unique_id: @@ -870,6 +881,13 @@ class ConfigFlow(data_entry_flow.FlowHandler): self.context["unique_id"] = unique_id # pylint: disable=no-member + # Abort discoveries done using the default discovery unique id + assert self.hass is not None + if unique_id != DEFAULT_DISCOVERY_UNIQUE_ID: + for progress in self._async_in_progress(): + if progress["context"].get("unique_id") == DEFAULT_DISCOVERY_UNIQUE_ID: + self.hass.config_entries.flow.async_abort(progress["flow_id"]) + for entry in self._async_current_entries(): if entry.unique_id == unique_id: return entry @@ -911,6 +929,49 @@ class ConfigFlow(data_entry_flow.FlowHandler): """Rediscover a config entry by it's unique_id.""" return self.async_abort(reason="not_implemented") + async def async_step_user( + self, user_input: Optional[Dict[str, Any]] = None + ) -> Dict[str, Any]: + """Handle a flow initiated by the user.""" + return self.async_abort(reason="not_implemented") + + async def _async_handle_discovery_without_unique_id(self) -> None: + """Mark this flow discovered, without a unique identifier. + + If a flow initiated by discovery, doesn't have a unique ID, this can + be used alternatively. It will ensure only 1 flow is started and only + when the handler has no existing config entries. + + It ensures that the discovery can be ignored by the user. + """ + if self.unique_id is not None: + return + + # Abort if the handler has config entries already + if self._async_current_entries(): + raise data_entry_flow.AbortFlow("already_configured") + + # Use an special unique id to differentiate + await self.async_set_unique_id(DEFAULT_DISCOVERY_UNIQUE_ID) + self._abort_if_unique_id_configured() + + # Abort if any other flow for this handler is already in progress + assert self.hass is not None + if self._async_in_progress(): + raise data_entry_flow.AbortFlow("already_in_progress") + + async def async_step_discovery( + self, discovery_info: Dict[str, Any] + ) -> Dict[str, Any]: + """Handle a flow initialized by discovery.""" + await self._async_handle_discovery_without_unique_id() + return await self.async_step_user() + + async_step_hassio = async_step_discovery + async_step_homekit = async_step_discovery + async_step_ssdp = async_step_discovery + async_step_zeroconf = async_step_discovery + class OptionsFlowManager(data_entry_flow.FlowManager): """Flow to set options for a configuration entry.""" @@ -947,7 +1008,8 @@ class OptionsFlowManager(data_entry_flow.FlowManager): entry = self.hass.config_entries.async_get_entry(flow.handler) if entry is None: raise UnknownEntry(flow.handler) - self.hass.config_entries.async_update_entry(entry, options=result["data"]) + if result["data"] is not None: + self.hass.config_entries.async_update_entry(entry, options=result["data"]) result["result"] = True return result diff --git a/homeassistant/const.py b/homeassistant/const.py index d73e7139ff6..f72c0011b17 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -1,7 +1,7 @@ """Constants used by Home Assistant components.""" MAJOR_VERSION = 0 -MINOR_VERSION = 111 -PATCH_VERSION = "4" +MINOR_VERSION = 112 +PATCH_VERSION = "0" __short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__ = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER = (3, 7, 0) @@ -46,6 +46,7 @@ CONF_BINARY_SENSORS = "binary_sensors" CONF_BLACKLIST = "blacklist" CONF_BRIGHTNESS = "brightness" CONF_BROADCAST_ADDRESS = "broadcast_address" +CONF_BROADCAST_PORT = "broadcast_port" CONF_CLIENT_ID = "client_id" CONF_CLIENT_SECRET = "client_secret" CONF_CODE = "code" @@ -190,7 +191,6 @@ EVENT_HOMEASSISTANT_STOP = "homeassistant_stop" EVENT_HOMEASSISTANT_FINAL_WRITE = "homeassistant_final_write" EVENT_LOGBOOK_ENTRY = "logbook_entry" EVENT_PLATFORM_DISCOVERED = "platform_discovered" -EVENT_SCRIPT_STARTED = "script_started" EVENT_SERVICE_REGISTERED = "service_registered" EVENT_SERVICE_REMOVED = "service_removed" EVENT_STATE_CHANGED = "state_changed" diff --git a/homeassistant/core.py b/homeassistant/core.py index 2350c9a8102..f8f4e7c0d02 100644 --- a/homeassistant/core.py +++ b/homeassistant/core.py @@ -79,6 +79,7 @@ from homeassistant.util.unit_system import IMPERIAL_SYSTEM, METRIC_SYSTEM, UnitS # Typing imports that create a circular dependency if TYPE_CHECKING: + from homeassistant.auth import AuthManager from homeassistant.config_entries import ConfigEntries from homeassistant.components.http import HomeAssistantHTTP @@ -174,6 +175,7 @@ class CoreState(enum.Enum): class HomeAssistant: """Root object of the Home Assistant home automation.""" + auth: "AuthManager" http: "HomeAssistantHTTP" = None # type: ignore config_entries: "ConfigEntries" = None # type: ignore @@ -737,14 +739,12 @@ class State: last_changed: Optional[datetime.datetime] = None, last_updated: Optional[datetime.datetime] = None, context: Optional[Context] = None, - # Temp, because database can still store invalid entity IDs - # Remove with 1.0 or in 2020. - temp_invalid_id_bypass: Optional[bool] = False, + validate_entity_id: Optional[bool] = True, ) -> None: """Initialize a new state.""" state = str(state) - if not valid_entity_id(entity_id) and not temp_invalid_id_bypass: + if validate_entity_id and not valid_entity_id(entity_id): raise InvalidEntityFormatError( f"Invalid entity id encountered: {entity_id}. " "Format should be ." diff --git a/homeassistant/exceptions.py b/homeassistant/exceptions.py index 745d80d386b..d085c1a9021 100644 --- a/homeassistant/exceptions.py +++ b/homeassistant/exceptions.py @@ -1,5 +1,5 @@ """The exceptions used by Home Assistant.""" -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional import jinja2 @@ -49,7 +49,7 @@ class Unauthorized(HomeAssistantError): entity_id: Optional[str] = None, config_entry_id: Optional[str] = None, perm_category: Optional[str] = None, - permission: Optional[Tuple[str]] = None, + permission: Optional[str] = None, ) -> None: """Unauthorized error.""" super().__init__(self.__class__.__name__) diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 33059ee0d68..977be4bae87 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -15,8 +15,11 @@ FLOWS = [ "almond", "ambiclimate", "ambient_station", + "arcam_fmj", "atag", "august", + "avri", + "awair", "axis", "blebox", "blink", @@ -29,6 +32,7 @@ FLOWS = [ "coronavirus", "daikin", "deconz", + "denonavr", "devolo_home_control", "dialogflow", "directv", @@ -68,6 +72,7 @@ FLOWS = [ "huawei_lte", "hue", "hunterdouglas_powerview", + "hvv_departures", "iaqualink", "icloud", "ifttt", @@ -91,6 +96,7 @@ FLOWS = [ "melcloud", "met", "meteo_france", + "metoffice", "mikrotik", "mill", "minecraft_server", @@ -116,6 +122,7 @@ FLOWS = [ "plaato", "plex", "plugwise", + "plum_lightpad", "point", "powerwall", "ps4", @@ -130,8 +137,10 @@ FLOWS = [ "sentry", "shopping_list", "simplisafe", + "smappee", "smartthings", "smhi", + "sms", "solaredge", "solarlog", "soma", @@ -139,13 +148,16 @@ FLOWS = [ "sonarr", "songpal", "sonos", + "speedtestdotnet", "spotify", + "squeezebox", "starline", "synology_dsm", "tado", "tellduslive", "tesla", "tibber", + "tile", "toon", "totalconnect", "tplink", @@ -167,6 +179,7 @@ FLOWS = [ "wiffi", "withings", "wled", + "xiaomi_aqara", "xiaomi_miio", "zerproc", "zha", diff --git a/homeassistant/generated/ssdp.py b/homeassistant/generated/ssdp.py index 490ffdffeb1..1cbade276fe 100644 --- a/homeassistant/generated/ssdp.py +++ b/homeassistant/generated/ssdp.py @@ -6,11 +6,55 @@ To update, run python3 -m script.hassfest # fmt: off SSDP = { + "arcam_fmj": [ + { + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1", + "manufacturer": "ARCAM" + } + ], "deconz": [ { "manufacturer": "Royal Philips Electronics" } ], + "denonavr": [ + { + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1", + "manufacturer": "Denon" + }, + { + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1", + "manufacturer": "DENON" + }, + { + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1", + "manufacturer": "Marantz" + }, + { + "deviceType": "urn:schemas-upnp-org:device:MediaServer:1", + "manufacturer": "Denon" + }, + { + "deviceType": "urn:schemas-upnp-org:device:MediaServer:1", + "manufacturer": "DENON" + }, + { + "deviceType": "urn:schemas-upnp-org:device:MediaServer:1", + "manufacturer": "Marantz" + }, + { + "deviceType": "urn:schemas-denon-com:device:AiosDevice:1", + "manufacturer": "Denon" + }, + { + "deviceType": "urn:schemas-denon-com:device:AiosDevice:1", + "manufacturer": "DENON" + }, + { + "deviceType": "urn:schemas-denon-com:device:AiosDevice:1", + "manufacturer": "Marantz" + } + ], "directv": [ { "deviceType": "urn:schemas-upnp-org:device:MediaServer:1", diff --git a/homeassistant/generated/zeroconf.py b/homeassistant/generated/zeroconf.py index ead2c0fa42d..a4bd268199f 100644 --- a/homeassistant/generated/zeroconf.py +++ b/homeassistant/generated/zeroconf.py @@ -38,8 +38,12 @@ ZEROCONF = { "ipp" ], "_miio._udp.local.": [ + "xiaomi_aqara", "xiaomi_miio" ], + "_nut._tcp.local.": [ + "nut" + ], "_printer._tcp.local.": [ "brother" ], @@ -57,8 +61,10 @@ ZEROCONF = { HOMEKIT = { "819LMB": "myq", "AC02": "tado", + "Abode": "abode", "BSB002": "hue", "Healty Home Coach": "netatmo", + "Iota": "abode", "LIFX": "lifx", "Netatmo Relay": "netatmo", "PowerView": "hunterdouglas_powerview", diff --git a/homeassistant/helpers/condition.py b/homeassistant/helpers/condition.py index 535de0304a0..5c7313f6716 100644 --- a/homeassistant/helpers/condition.py +++ b/homeassistant/helpers/condition.py @@ -5,7 +5,7 @@ from datetime import datetime, timedelta import functools as ft import logging import sys -from typing import Callable, Container, Optional, Set, Union, cast +from typing import Callable, Container, List, Optional, Set, Union, cast from homeassistant.components import zone as zone_cmp from homeassistant.components.device_automation import ( @@ -238,7 +238,7 @@ def async_numeric_state_from_config( """Wrap action method with state based condition.""" if config_validation: config = cv.NUMERIC_STATE_CONDITION_SCHEMA(config) - entity_id = config.get(CONF_ENTITY_ID) + entity_ids = config.get(CONF_ENTITY_ID, []) below = config.get(CONF_BELOW) above = config.get(CONF_ABOVE) value_template = config.get(CONF_VALUE_TEMPLATE) @@ -250,8 +250,11 @@ def async_numeric_state_from_config( if value_template is not None: value_template.hass = hass - return async_numeric_state( - hass, entity_id, below, above, value_template, variables + return all( + async_numeric_state( + hass, entity_id, below, above, value_template, variables + ) + for entity_id in entity_ids ) return if_numeric_state @@ -260,7 +263,7 @@ def async_numeric_state_from_config( def state( hass: HomeAssistant, entity: Union[None, str, State], - req_state: str, + req_state: Union[str, List[str]], for_period: Optional[timedelta] = None, ) -> bool: """Test if state matches requirements. @@ -274,7 +277,10 @@ def state( return False assert isinstance(entity, State) - is_state = entity.state == req_state + if isinstance(req_state, str): + req_state = [req_state] + + is_state = entity.state in req_state if for_period is None or not is_state: return is_state @@ -288,13 +294,18 @@ def state_from_config( """Wrap action method with state based condition.""" if config_validation: config = cv.STATE_CONDITION_SCHEMA(config) - entity_id = config.get(CONF_ENTITY_ID) - req_state = cast(str, config.get(CONF_STATE)) + entity_ids = config.get(CONF_ENTITY_ID, []) + req_states: Union[str, List[str]] = config.get(CONF_STATE, []) for_period = config.get("for") + if not isinstance(req_states, list): + req_states = [req_states] + def if_state(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool: """Test if condition.""" - return state(hass, entity_id, req_state, for_period) + return all( + state(hass, entity_id, req_states, for_period) for entity_id in entity_ids + ) return if_state @@ -506,12 +517,18 @@ def zone_from_config( """Wrap action method with zone based condition.""" if config_validation: config = cv.ZONE_CONDITION_SCHEMA(config) - entity_id = config.get(CONF_ENTITY_ID) - zone_entity_id = config.get(CONF_ZONE) + entity_ids = config.get(CONF_ENTITY_ID, []) + zone_entity_ids = config.get(CONF_ZONE, []) def if_in_zone(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool: """Test if condition.""" - return zone(hass, zone_entity_id, entity_id) + return all( + any( + zone(hass, zone_entity_id, entity_id) + for zone_entity_id in zone_entity_ids + ) + for entity_id in entity_ids + ) return if_in_zone @@ -556,7 +573,7 @@ async def async_validate_condition_config( @callback def async_extract_entities(config: ConfigType) -> Set[str]: """Extract entities from a condition.""" - referenced = set() + referenced: Set[str] = set() to_process = deque([config]) while to_process: @@ -567,10 +584,13 @@ def async_extract_entities(config: ConfigType) -> Set[str]: to_process.extend(config["conditions"]) continue - entity_id = config.get(CONF_ENTITY_ID) + entity_ids = config.get(CONF_ENTITY_ID) - if entity_id is not None: - referenced.add(entity_id) + if isinstance(entity_ids, str): + entity_ids = [entity_ids] + + if entity_ids is not None: + referenced.update(entity_ids) return referenced diff --git a/homeassistant/helpers/config_entry_flow.py b/homeassistant/helpers/config_entry_flow.py index 81881d943cd..d349820978e 100644 --- a/homeassistant/helpers/config_entry_flow.py +++ b/homeassistant/helpers/config_entry_flow.py @@ -1,5 +1,5 @@ """Helpers for data entry flows for config entries.""" -from typing import Awaitable, Callable, Union +from typing import Any, Awaitable, Callable, Dict, Optional, Union from homeassistant import config_entries @@ -28,7 +28,9 @@ class DiscoveryFlowHandler(config_entries.ConfigFlow): self._discovery_function = discovery_function self.CONNECTION_CLASS = connection_class # pylint: disable=invalid-name - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: Optional[Dict[str, Any]] = None + ) -> Dict[str, Any]: """Handle a flow initialized by the user.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") @@ -37,7 +39,9 @@ class DiscoveryFlowHandler(config_entries.ConfigFlow): return await self.async_step_confirm() - async def async_step_confirm(self, user_input=None): + async def async_step_confirm( + self, user_input: Optional[Dict[str, Any]] = None + ) -> Dict[str, Any]: """Confirm setup.""" if user_input is None: return self.async_show_form(step_id="confirm") @@ -48,7 +52,7 @@ class DiscoveryFlowHandler(config_entries.ConfigFlow): has_devices = in_progress if not has_devices: - has_devices = await self.hass.async_add_job( + has_devices = await self.hass.async_add_job( # type: ignore self._discovery_function, self.hass ) @@ -56,12 +60,18 @@ class DiscoveryFlowHandler(config_entries.ConfigFlow): return self.async_abort(reason="no_devices_found") # Cancel the discovered one. + assert self.hass is not None for flow in in_progress: self.hass.config_entries.flow.async_abort(flow["flow_id"]) + if self._async_current_entries(): + return self.async_abort(reason="single_instance_allowed") + return self.async_create_entry(title=self._title, data={}) - async def async_step_discovery(self, discovery_info): + async def async_step_discovery( + self, discovery_info: Dict[str, Any] + ) -> Dict[str, Any]: """Handle a flow initialized by discovery.""" if self._async_in_progress() or self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") @@ -74,11 +84,17 @@ class DiscoveryFlowHandler(config_entries.ConfigFlow): async_step_ssdp = async_step_discovery async_step_homekit = async_step_discovery - async def async_step_import(self, _): + async def async_step_import(self, _: Optional[Dict[str, Any]]) -> Dict[str, Any]: """Handle a flow initialized by import.""" - if self._async_in_progress() or self._async_current_entries(): + if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") + # Cancel other flows. + assert self.hass is not None + in_progress = self._async_in_progress() + for flow in in_progress: + self.hass.config_entries.flow.async_abort(flow["flow_id"]) + return self.async_create_entry(title=self._title, data={}) @@ -117,7 +133,9 @@ class WebhookFlowHandler(config_entries.ConfigFlow): self._description_placeholder = description_placeholder self._allow_multiple = allow_multiple - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: Optional[Dict[str, Any]] = None + ) -> Dict[str, Any]: """Handle a user initiated set up flow to create a webhook.""" if not self._allow_multiple and self._async_current_entries(): return self.async_abort(reason="one_instance_allowed") @@ -125,6 +143,7 @@ class WebhookFlowHandler(config_entries.ConfigFlow): if user_input is None: return self.async_show_form(step_id="user") + assert self.hass is not None webhook_id = self.hass.components.webhook.async_generate_id() if ( diff --git a/homeassistant/helpers/config_entry_oauth2_flow.py b/homeassistant/helpers/config_entry_oauth2_flow.py index 712ea9f105c..acaa0e52ab1 100644 --- a/homeassistant/helpers/config_entry_oauth2_flow.py +++ b/homeassistant/helpers/config_entry_oauth2_flow.py @@ -120,10 +120,16 @@ class LocalOAuth2Implementation(AbstractOAuth2Implementation): """Return the redirect uri.""" return f"{get_url(self.hass)}{AUTH_CALLBACK_PATH}" + @property + def extra_authorize_data(self) -> dict: + """Extra data that needs to be appended to the authorize url.""" + return {} + async def async_generate_authorize_url(self, flow_id: str) -> str: """Generate a url for the user to authorize.""" return str( - URL(self.authorize_url).with_query( + URL(self.authorize_url) + .with_query( { "response_type": "code", "client_id": self.client_id, @@ -131,6 +137,7 @@ class LocalOAuth2Implementation(AbstractOAuth2Implementation): "state": _encode_jwt(self.hass, {"flow_id": flow_id}), } ) + .update_query(self.extra_authorize_data) ) async def async_resolve_external_data(self, external_data: Any) -> dict: @@ -226,7 +233,9 @@ class AbstractOAuth2FlowHandler(config_entries.ConfigFlow, metaclass=ABCMeta): ), ) - async def async_step_auth(self, user_input: Optional[dict] = None) -> dict: + async def async_step_auth( + self, user_input: Optional[Dict[str, Any]] = None + ) -> Dict[str, Any]: """Create an entry for auth.""" # Flow has been triggered by external data if user_input: @@ -243,7 +252,9 @@ class AbstractOAuth2FlowHandler(config_entries.ConfigFlow, metaclass=ABCMeta): return self.async_external_step(step_id="auth", url=url) - async def async_step_creation(self, user_input: Optional[dict] = None) -> dict: + async def async_step_creation( + self, user_input: Optional[Dict[str, Any]] = None + ) -> Dict[str, Any]: """Create config entry from external data.""" token = await self.flow_impl.async_resolve_external_data(self.external_data) token["expires_at"] = time.time() + token["expires_in"] @@ -261,7 +272,9 @@ class AbstractOAuth2FlowHandler(config_entries.ConfigFlow, metaclass=ABCMeta): """ return self.async_create_entry(title=self.flow_impl.name, data=data) - async def async_step_discovery(self, user_input: Optional[dict] = None) -> dict: + async def async_step_discovery( + self, discovery_info: Dict[str, Any] + ) -> Dict[str, Any]: """Handle a flow initialized by discovery.""" await self.async_set_unique_id(self.DOMAIN) diff --git a/homeassistant/helpers/config_validation.py b/homeassistant/helpers/config_validation.py index c24adc76597..24ba0d3c0f0 100644 --- a/homeassistant/helpers/config_validation.py +++ b/homeassistant/helpers/config_validation.py @@ -659,7 +659,7 @@ def deprecated( warning = ( "The '{key}' option is deprecated," " please replace it with '{replacement_key}'." - " This option will become invalid in version" + " This option {invalidation_status} invalid in version" " {invalidation_version}" ) elif replacement_key: @@ -671,7 +671,7 @@ def deprecated( warning = ( "The '{key}' option is deprecated," " please remove it from your configuration." - " This option will become invalid in version" + " This option {invalidation_status} invalid in version" " {invalidation_version}" ) else: @@ -690,6 +690,7 @@ def deprecated( warning.format( key=key, replacement_key=replacement_key, + invalidation_status="became", invalidation_version=invalidation_version, ) ) @@ -702,6 +703,7 @@ def deprecated( warning, key=key, replacement_key=replacement_key, + invalidation_status="will become", invalidation_version=invalidation_version, ) @@ -842,7 +844,7 @@ NUMERIC_STATE_CONDITION_SCHEMA = vol.All( vol.Schema( { vol.Required(CONF_CONDITION): "numeric_state", - vol.Required(CONF_ENTITY_ID): entity_id, + vol.Required(CONF_ENTITY_ID): entity_ids, CONF_BELOW: vol.Coerce(float), CONF_ABOVE: vol.Coerce(float), vol.Optional(CONF_VALUE_TEMPLATE): template, @@ -855,8 +857,8 @@ STATE_CONDITION_SCHEMA = vol.All( vol.Schema( { vol.Required(CONF_CONDITION): "state", - vol.Required(CONF_ENTITY_ID): entity_id, - vol.Required(CONF_STATE): str, + vol.Required(CONF_ENTITY_ID): entity_ids, + vol.Required(CONF_STATE): vol.Any(str, [str]), vol.Optional(CONF_FOR): vol.All(time_period, positive_timedelta), # To support use_trigger_value in automation # Deprecated 2016/04/25 @@ -903,8 +905,8 @@ TIME_CONDITION_SCHEMA = vol.All( ZONE_CONDITION_SCHEMA = vol.Schema( { vol.Required(CONF_CONDITION): "zone", - vol.Required(CONF_ENTITY_ID): entity_id, - "zone": entity_id, + vol.Required(CONF_ENTITY_ID): entity_ids, + "zone": entity_ids, # To support use_trigger_value in automation # Deprecated 2016/04/25 vol.Optional("event"): vol.Any("enter", "leave"), diff --git a/homeassistant/helpers/discovery.py b/homeassistant/helpers/discovery.py index 11663672bb2..e86638ac02a 100644 --- a/homeassistant/helpers/discovery.py +++ b/homeassistant/helpers/discovery.py @@ -9,9 +9,8 @@ from typing import Any, Callable, Collection, Dict, Optional, Union from homeassistant import core, setup from homeassistant.const import ATTR_DISCOVERED, ATTR_SERVICE, EVENT_PLATFORM_DISCOVERED -from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.loader import DEPENDENCY_BLACKLIST, bind_hass +from homeassistant.loader import bind_hass from homeassistant.util.async_ import run_callback_threadsafe EVENT_LOAD_PLATFORM = "load_platform.{}" @@ -79,9 +78,6 @@ async def async_discover( hass_config: ConfigType, ) -> None: """Fire discovery event. Can ensure a component is loaded.""" - if component in DEPENDENCY_BLACKLIST: - raise HomeAssistantError(f"Cannot discover the {component} component.") - if component is not None and component not in hass.config.components: await setup.async_setup_component(hass, component, hass_config) @@ -181,9 +177,6 @@ async def async_load_platform( """ assert hass_config, "You need to pass in the real hass config" - if component in DEPENDENCY_BLACKLIST: - raise HomeAssistantError(f"Cannot discover the {component} component.") - setup_success = True if component not in hass.config.components: diff --git a/homeassistant/helpers/entity.py b/homeassistant/helpers/entity.py index b5d36f6a2f5..6a14c6e1ef7 100644 --- a/homeassistant/helpers/entity.py +++ b/homeassistant/helpers/entity.py @@ -5,7 +5,7 @@ from datetime import datetime, timedelta import functools as ft import logging from timeit import default_timer as timer -from typing import Any, Dict, Iterable, List, Optional, Union +from typing import Any, Awaitable, Dict, Iterable, List, Optional, Union from homeassistant.config import DATA_CUSTOMIZE from homeassistant.const import ( @@ -13,7 +13,6 @@ from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_ENTITY_PICTURE, ATTR_FRIENDLY_NAME, - ATTR_HIDDEN, ATTR_ICON, ATTR_SUPPORTED_FEATURES, ATTR_UNIT_OF_MEASUREMENT, @@ -32,11 +31,10 @@ from homeassistant.helpers.entity_registry import ( EVENT_ENTITY_REGISTRY_UPDATED, RegistryEntry, ) +from homeassistant.helpers.event import Event from homeassistant.util import dt as dt_util, ensure_unique_string, slugify from homeassistant.util.async_ import run_callback_threadsafe -# mypy: allow-untyped-defs, no-check-untyped-defs - _LOGGER = logging.getLogger(__name__) SLOW_UPDATE_WARNING = 10 @@ -199,11 +197,6 @@ class Entity(ABC): """Return the entity picture to use in the frontend, if any.""" return None - @property - def hidden(self) -> bool: - """Return True if the entity should be hidden from UIs.""" - return False - @property def available(self) -> bool: """Return True if entity is available.""" @@ -258,7 +251,7 @@ class Entity(ABC): self._context = context self._context_set = dt_util.utcnow() - async def async_update_ha_state(self, force_refresh=False): + async def async_update_ha_state(self, force_refresh: bool = False) -> None: """Update Home Assistant with current state of entity. If force_refresh == True will update entity before setting state. @@ -294,14 +287,15 @@ class Entity(ABC): f"No entity id specified for entity {self.name}" ) - self._async_write_ha_state() # type: ignore + self._async_write_ha_state() @callback - def _async_write_ha_state(self): + def _async_write_ha_state(self) -> None: """Write the state to the state machine.""" if self.registry_entry and self.registry_entry.disabled_by: if not self._disabled_reported: self._disabled_reported = True + assert self.platform is not None _LOGGER.warning( "Entity %s is incorrectly being triggered for updates while it is disabled. This is a bug in the %s integration.", self.entity_id, @@ -317,9 +311,8 @@ class Entity(ABC): if not self.available: state = STATE_UNAVAILABLE else: - state = self.state - - state = STATE_UNKNOWN if state is None else str(state) + sstate = self.state + state = STATE_UNKNOWN if sstate is None else str(sstate) attr.update(self.state_attributes or {}) attr.update(self.device_state_attributes or {}) @@ -341,10 +334,6 @@ class Entity(ABC): if entity_picture is not None: attr[ATTR_ENTITY_PICTURE] = entity_picture - hidden = self.hidden - if hidden: - attr[ATTR_HIDDEN] = hidden - assumed_state = self.assumed_state if assumed_state: attr[ATTR_ASSUMED_STATE] = assumed_state @@ -383,6 +372,7 @@ class Entity(ABC): ) # Overwrite properties that have been set in the config file. + assert self.hass is not None if DATA_CUSTOMIZE in self.hass.data: attr.update(self.hass.data[DATA_CUSTOMIZE].get(self.entity_id)) @@ -403,7 +393,7 @@ class Entity(ABC): pass if ( - self._context is not None + self._context_set is not None and dt_util.utcnow() - self._context_set > self.context_recent_time ): self._context = None @@ -413,7 +403,7 @@ class Entity(ABC): self.entity_id, state, attr, self.force_update, self._context ) - def schedule_update_ha_state(self, force_refresh=False): + def schedule_update_ha_state(self, force_refresh: bool = False) -> None: """Schedule an update ha state change task. Scheduling the update avoids executor deadlocks. @@ -423,10 +413,11 @@ class Entity(ABC): If state is changed more than once before the ha state change task has been executed, the intermediate state transitions will be missed. """ - self.hass.add_job(self.async_update_ha_state(force_refresh)) + assert self.hass is not None + self.hass.add_job(self.async_update_ha_state(force_refresh)) # type: ignore @callback - def async_schedule_update_ha_state(self, force_refresh=False): + def async_schedule_update_ha_state(self, force_refresh: bool = False) -> None: """Schedule an update ha state change task. This method must be run in the event loop. @@ -438,11 +429,12 @@ class Entity(ABC): been executed, the intermediate state transitions will be missed. """ if force_refresh: + assert self.hass is not None self.hass.async_create_task(self.async_update_ha_state(force_refresh)) else: self.async_write_ha_state() - async def async_device_update(self, warning=True): + async def async_device_update(self, warning: bool = True) -> None: """Process 'update' or 'async_update' from entity. This method is a coroutine. @@ -455,6 +447,7 @@ class Entity(ABC): if self.parallel_updates: await self.parallel_updates.acquire() + assert self.hass is not None if warning: update_warn = self.hass.loop.call_later( SLOW_UPDATE_WARNING, @@ -467,9 +460,11 @@ class Entity(ABC): try: # pylint: disable=no-member if hasattr(self, "async_update"): - await self.async_update() + await self.async_update() # type: ignore elif hasattr(self, "update"): - await self.hass.async_add_executor_job(self.update) + await self.hass.async_add_executor_job( + self.update # type: ignore + ) finally: self._update_staged = False if warning: @@ -534,7 +529,7 @@ class Entity(ABC): Not to be extended by integrations. """ - async def _async_registry_updated(self, event): + async def _async_registry_updated(self, event: Event) -> None: """Handle entity registry update.""" data = event.data if data["action"] == "remove" and data["entity_id"] == self.entity_id: @@ -547,24 +542,28 @@ class Entity(ABC): ): return + assert self.hass is not None ent_reg = await self.hass.helpers.entity_registry.async_get_registry() old = self.registry_entry self.registry_entry = ent_reg.async_get(data["entity_id"]) + assert self.registry_entry is not None if self.registry_entry.disabled_by is not None: await self.async_remove() return + assert old is not None if self.registry_entry.entity_id == old.entity_id: self.async_write_ha_state() return await self.async_remove() + assert self.platform is not None self.entity_id = self.registry_entry.entity_id await self.platform.async_add_entities([self]) - def __eq__(self, other): + def __eq__(self, other: Any) -> bool: """Return the comparison.""" if not isinstance(other, self.__class__): return False @@ -587,8 +586,7 @@ class Entity(ABC): """Return the representation.""" return f"" - # call an requests - async def async_request_call(self, coro): + async def async_request_call(self, coro: Awaitable) -> None: """Process request batched.""" if self.parallel_updates: await self.parallel_updates.acquire() @@ -617,16 +615,18 @@ class ToggleEntity(Entity): """Turn the entity on.""" raise NotImplementedError() - async def async_turn_on(self, **kwargs): + async def async_turn_on(self, **kwargs: Any) -> None: """Turn the entity on.""" + assert self.hass is not None await self.hass.async_add_executor_job(ft.partial(self.turn_on, **kwargs)) def turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" raise NotImplementedError() - async def async_turn_off(self, **kwargs): + async def async_turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" + assert self.hass is not None await self.hass.async_add_executor_job(ft.partial(self.turn_off, **kwargs)) def toggle(self, **kwargs: Any) -> None: @@ -636,7 +636,7 @@ class ToggleEntity(Entity): else: self.turn_on(**kwargs) - async def async_toggle(self, **kwargs): + async def async_toggle(self, **kwargs: Any) -> None: """Toggle the entity.""" if self.is_on: await self.async_turn_off(**kwargs) diff --git a/homeassistant/helpers/entity_platform.py b/homeassistant/helpers/entity_platform.py index 5eb5b213732..717917f816d 100644 --- a/homeassistant/helpers/entity_platform.py +++ b/homeassistant/helpers/entity_platform.py @@ -319,6 +319,8 @@ class EntityPlatform: await entity.async_device_update(warning=False) except Exception: # pylint: disable=broad-except self.logger.exception("%s: Error on device update!", self.platform_name) + entity.hass = None + entity.platform = None return suggested_object_id = None @@ -391,6 +393,8 @@ class EntityPlatform: or entity.name or f'"{self.platform_name} {entity.unique_id}"', ) + entity.hass = None + entity.platform = None return # We won't generate an entity ID if the platform has already set one @@ -416,6 +420,8 @@ class EntityPlatform: # Make sure it is valid in case an entity set the value themselves if not valid_entity_id(entity.entity_id): + entity.hass = None + entity.platform = None raise HomeAssistantError(f"Invalid entity id: {entity.entity_id}") already_exists = entity.entity_id in self.entities @@ -431,6 +437,8 @@ class EntityPlatform: if entity.unique_id is not None: msg += f". Platform {self.platform_name} does not generate unique IDs" self.logger.error(msg) + entity.hass = None + entity.platform = None return entity_id = entity.entity_id @@ -542,7 +550,7 @@ class EntityPlatform: for entity in self.entities.values(): if not entity.should_poll: continue - tasks.append(entity.async_update_ha_state(True)) # type: ignore + tasks.append(entity.async_update_ha_state(True)) if tasks: await asyncio.gather(*tasks) diff --git a/homeassistant/helpers/entityfilter.py b/homeassistant/helpers/entityfilter.py index f8dd83ccfcc..dfcbbeb4cd0 100644 --- a/homeassistant/helpers/entityfilter.py +++ b/homeassistant/helpers/entityfilter.py @@ -1,16 +1,23 @@ """Helper class to implement include/exclude of entities and domains.""" -from typing import Callable, Dict, List +import fnmatch +import re +from typing import Callable, Dict, List, Pattern import voluptuous as vol +from homeassistant.const import CONF_DOMAINS, CONF_ENTITIES, CONF_EXCLUDE, CONF_INCLUDE from homeassistant.core import split_entity_id from homeassistant.helpers import config_validation as cv CONF_INCLUDE_DOMAINS = "include_domains" +CONF_INCLUDE_ENTITY_GLOBS = "include_entity_globs" CONF_INCLUDE_ENTITIES = "include_entities" CONF_EXCLUDE_DOMAINS = "exclude_domains" +CONF_EXCLUDE_ENTITY_GLOBS = "exclude_entity_globs" CONF_EXCLUDE_ENTITIES = "exclude_entities" +CONF_ENTITY_GLOBS = "entity_globs" + def convert_filter(config: Dict[str, List[str]]) -> Callable[[str], bool]: """Convert the filter schema into a filter.""" @@ -19,6 +26,8 @@ def convert_filter(config: Dict[str, List[str]]) -> Callable[[str], bool]: config[CONF_INCLUDE_ENTITIES], config[CONF_EXCLUDE_DOMAINS], config[CONF_EXCLUDE_ENTITIES], + config[CONF_INCLUDE_ENTITY_GLOBS], + config[CONF_EXCLUDE_ENTITY_GLOBS], ) setattr(filt, "config", config) setattr(filt, "empty_filter", sum(len(val) for val in config.values()) == 0) @@ -30,10 +39,16 @@ BASE_FILTER_SCHEMA = vol.Schema( vol.Optional(CONF_EXCLUDE_DOMAINS, default=[]): vol.All( cv.ensure_list, [cv.string] ), + vol.Optional(CONF_EXCLUDE_ENTITY_GLOBS, default=[]): vol.All( + cv.ensure_list, [cv.string] + ), vol.Optional(CONF_EXCLUDE_ENTITIES, default=[]): cv.entity_ids, vol.Optional(CONF_INCLUDE_DOMAINS, default=[]): vol.All( cv.ensure_list, [cv.string] ), + vol.Optional(CONF_INCLUDE_ENTITY_GLOBS, default=[]): vol.All( + cv.ensure_list, [cv.string] + ), vol.Optional(CONF_INCLUDE_ENTITIES, default=[]): cv.entity_ids, } ) @@ -41,20 +56,104 @@ BASE_FILTER_SCHEMA = vol.Schema( FILTER_SCHEMA = vol.All(BASE_FILTER_SCHEMA, convert_filter) +def convert_include_exclude_filter( + config: Dict[str, Dict[str, List[str]]] +) -> Callable[[str], bool]: + """Convert the include exclude filter schema into a filter.""" + include = config[CONF_INCLUDE] + exclude = config[CONF_EXCLUDE] + filt = convert_filter( + { + CONF_INCLUDE_DOMAINS: include[CONF_DOMAINS], + CONF_INCLUDE_ENTITY_GLOBS: include[CONF_ENTITY_GLOBS], + CONF_INCLUDE_ENTITIES: include[CONF_ENTITIES], + CONF_EXCLUDE_DOMAINS: exclude[CONF_DOMAINS], + CONF_EXCLUDE_ENTITY_GLOBS: exclude[CONF_ENTITY_GLOBS], + CONF_EXCLUDE_ENTITIES: exclude[CONF_ENTITIES], + } + ) + setattr(filt, "config", config) + return filt + + +INCLUDE_EXCLUDE_FILTER_SCHEMA_INNER = vol.Schema( + { + vol.Optional(CONF_DOMAINS, default=[]): vol.All(cv.ensure_list, [cv.string]), + vol.Optional(CONF_ENTITY_GLOBS, default=[]): vol.All( + cv.ensure_list, [cv.string] + ), + vol.Optional(CONF_ENTITIES, default=[]): cv.entity_ids, + } +) + +INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA = vol.Schema( + { + vol.Optional( + CONF_INCLUDE, default=INCLUDE_EXCLUDE_FILTER_SCHEMA_INNER({}) + ): INCLUDE_EXCLUDE_FILTER_SCHEMA_INNER, + vol.Optional( + CONF_EXCLUDE, default=INCLUDE_EXCLUDE_FILTER_SCHEMA_INNER({}) + ): INCLUDE_EXCLUDE_FILTER_SCHEMA_INNER, + } +) + +INCLUDE_EXCLUDE_FILTER_SCHEMA = vol.All( + INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA, convert_include_exclude_filter +) + + +def _glob_to_re(glob: str) -> Pattern: + """Translate and compile glob string into pattern.""" + return re.compile(fnmatch.translate(glob)) + + +def _test_against_patterns(patterns: List[Pattern], entity_id: str) -> bool: + """Test entity against list of patterns, true if any match.""" + for pattern in patterns: + if pattern.match(entity_id): + return True + + return False + + +# It's safe since we don't modify it. And None causes typing warnings +# pylint: disable=dangerous-default-value def generate_filter( include_domains: List[str], include_entities: List[str], exclude_domains: List[str], exclude_entities: List[str], + include_entity_globs: List[str] = [], + exclude_entity_globs: List[str] = [], ) -> Callable[[str], bool]: """Return a function that will filter entities based on the args.""" include_d = set(include_domains) include_e = set(include_entities) exclude_d = set(exclude_domains) exclude_e = set(exclude_entities) + include_eg_set = set(include_entity_globs) + exclude_eg_set = set(exclude_entity_globs) + include_eg = list(map(_glob_to_re, include_eg_set)) + exclude_eg = list(map(_glob_to_re, exclude_eg_set)) - have_exclude = bool(exclude_e or exclude_d) - have_include = bool(include_e or include_d) + have_exclude = bool(exclude_e or exclude_d or exclude_eg) + have_include = bool(include_e or include_d or include_eg) + + def entity_included(domain: str, entity_id: str) -> bool: + """Return true if entity matches inclusion filters.""" + return ( + entity_id in include_e + or domain in include_d + or bool(include_eg and _test_against_patterns(include_eg, entity_id)) + ) + + def entity_excluded(domain: str, entity_id: str) -> bool: + """Return true if entity matches exclusion filters.""" + return ( + entity_id in exclude_e + or domain in exclude_d + or bool(exclude_eg and _test_against_patterns(exclude_eg, entity_id)) + ) # Case 1 - no includes or excludes - pass all entities if not have_include and not have_exclude: @@ -66,7 +165,7 @@ def generate_filter( def entity_filter_2(entity_id: str) -> bool: """Return filter function for case 2.""" domain = split_entity_id(entity_id)[0] - return entity_id in include_e or domain in include_d + return entity_included(domain, entity_id) return entity_filter_2 @@ -76,36 +175,50 @@ def generate_filter( def entity_filter_3(entity_id: str) -> bool: """Return filter function for case 3.""" domain = split_entity_id(entity_id)[0] - return entity_id not in exclude_e and domain not in exclude_d + return not entity_excluded(domain, entity_id) return entity_filter_3 # Case 4 - both includes and excludes specified - # Case 4a - include domain specified + # Case 4a - include domain or glob specified # - if domain is included, pass if entity not excluded - # - if domain is not included, pass if entity is included - # note: if both include and exclude domains specified, - # the exclude domains are ignored - if include_d: + # - if glob is included, pass if entity and domain not excluded + # - if domain and glob are not included, pass if entity is included + # note: if both include domain matches then exclude domains ignored. + # If glob matches then exclude domains and glob checked + if include_d or include_eg: def entity_filter_4a(entity_id: str) -> bool: """Return filter function for case 4a.""" domain = split_entity_id(entity_id)[0] if domain in include_d: - return entity_id not in exclude_e + return not ( + entity_id in exclude_e + or bool( + exclude_eg and _test_against_patterns(exclude_eg, entity_id) + ) + ) + if _test_against_patterns(include_eg, entity_id): + return not entity_excluded(domain, entity_id) return entity_id in include_e return entity_filter_4a - # Case 4b - exclude domain specified - # - if domain is excluded, pass if entity is included - # - if domain is not excluded, pass if entity not excluded - if exclude_d: + # Case 4b - exclude domain or glob specified, include has no domain or glob + # In this one case the traditional include logic is inverted. Even though an + # include is specified since its only a list of entity IDs its used only to + # expose specific entities excluded by domain or glob. Any entities not + # excluded are then presumed included. Logic is as follows + # - if domain or glob is excluded, pass if entity is included + # - if domain is not excluded, pass if entity not excluded by ID + if exclude_d or exclude_eg: def entity_filter_4b(entity_id: str) -> bool: """Return filter function for case 4b.""" domain = split_entity_id(entity_id)[0] - if domain in exclude_d: + if domain in exclude_d or ( + exclude_eg and _test_against_patterns(exclude_eg, entity_id) + ): return entity_id in include_e return entity_id not in exclude_e @@ -113,8 +226,4 @@ def generate_filter( # Case 4c - neither include or exclude domain specified # - Only pass if entity is included. Ignore entity excludes. - def entity_filter_4c(entity_id: str) -> bool: - """Return filter function for case 4c.""" - return entity_id in include_e - - return entity_filter_4c + return lambda entity_id: entity_id in include_e diff --git a/homeassistant/helpers/integration_platform.py b/homeassistant/helpers/integration_platform.py index 01567c72c7b..93f3f3f7427 100644 --- a/homeassistant/helpers/integration_platform.py +++ b/homeassistant/helpers/integration_platform.py @@ -4,7 +4,7 @@ import logging from typing import Any, Awaitable, Callable from homeassistant.core import Event, HomeAssistant -from homeassistant.loader import IntegrationNotFound, async_get_integration, bind_hass +from homeassistant.loader import async_get_integration, bind_hass from homeassistant.setup import ATTR_COMPONENT, EVENT_COMPONENT_LOADED _LOGGER = logging.getLogger(__name__) @@ -21,10 +21,20 @@ async def async_process_integration_platforms( async def _process(component_name: str) -> None: """Process the intents of a component.""" + if "." in component_name: + return + + integration = await async_get_integration(hass, component_name) + try: - integration = await async_get_integration(hass, component_name) platform = integration.get_platform(platform_name) - except (IntegrationNotFound, ImportError): + except ImportError as err: + if f"{component_name}.{platform_name}" not in str(err): + _LOGGER.exception( + "Unexpected error importing %s/%s.py", + component_name, + platform_name, + ) return try: diff --git a/homeassistant/helpers/service.py b/homeassistant/helpers/service.py index af4bdb50fa4..2c4f02990bf 100644 --- a/homeassistant/helpers/service.py +++ b/homeassistant/helpers/service.py @@ -505,7 +505,7 @@ def async_register_admin_service( """Register a service that requires admin access.""" @wraps(service_func) - async def admin_handler(call): + async def admin_handler(call: ha.ServiceCall) -> None: if call.context.user_id: user = await hass.auth.async_get_user(call.context.user_id) if user is None: diff --git a/homeassistant/helpers/system_info.py b/homeassistant/helpers/system_info.py index a857858de1b..855b6153ba0 100644 --- a/homeassistant/helpers/system_info.py +++ b/homeassistant/helpers/system_info.py @@ -51,7 +51,7 @@ async def async_get_system_info(hass: HomeAssistantType) -> Dict: info_object["docker_version"] = info.get("docker") if info.get("hassos") is not None: - info_object["installation_type"] = "Home Assistant" + info_object["installation_type"] = "Home Assistant OS" else: info_object["installation_type"] = "Home Assistant Supervised" diff --git a/homeassistant/helpers/translation.py b/homeassistant/helpers/translation.py index d0fac953ac1..217f16d1841 100644 --- a/homeassistant/helpers/translation.py +++ b/homeassistant/helpers/translation.py @@ -20,12 +20,6 @@ _LOGGER = logging.getLogger(__name__) TRANSLATION_LOAD_LOCK = "translation_load_lock" TRANSLATION_FLATTEN_CACHE = "translation_flatten_cache" -MOVED_TRANSLATIONS_DIRECTORY_MSG = ( - "%s: the '.translations' directory has been moved, the new name is 'translations', " - "starting with Home Assistant 0.111 your translations will no longer " - "load if you do not move/rename this " -) - def recursive_flatten(prefix: Any, data: Dict) -> Dict[str, Any]: """Return a flattened representation of dict data.""" @@ -71,13 +65,8 @@ def component_translation_path( else: filename = f"{language}.json" - translation_legacy_path = integration.file_path / ".translations" translation_path = integration.file_path / "translations" - if translation_legacy_path.is_dir() and not translation_path.is_dir(): - _LOGGER.warning(MOVED_TRANSLATIONS_DIRECTORY_MSG, domain) - return str(translation_legacy_path / filename) - return str(translation_path / filename) diff --git a/homeassistant/loader.py b/homeassistant/loader.py index ed5545b3c28..315165bf27f 100644 --- a/homeassistant/loader.py +++ b/homeassistant/loader.py @@ -31,8 +31,6 @@ if TYPE_CHECKING: CALLABLE_T = TypeVar("CALLABLE_T", bound=Callable) # pylint: disable=invalid-name -DEPENDENCY_BLACKLIST = {"config"} - _LOGGER = logging.getLogger(__name__) DATA_COMPONENTS = "components" @@ -205,6 +203,14 @@ class Integration: self.file_path = file_path self.manifest = manifest manifest["is_built_in"] = self.is_built_in + + if self.dependencies: + self._all_dependencies_resolved: Optional[bool] = None + self._all_dependencies: Optional[Set[str]] = None + else: + self._all_dependencies_resolved = True + self._all_dependencies = set() + _LOGGER.info("Loaded %s from %s", self.domain, pkg_path) @property @@ -257,6 +263,49 @@ class Integration: """Test if package is a built-in integration.""" return self.pkg_path.startswith(PACKAGE_BUILTIN) + @property + def all_dependencies(self) -> Set[str]: + """Return all dependencies including sub-dependencies.""" + if self._all_dependencies is None: + raise RuntimeError("Dependencies not resolved!") + + return self._all_dependencies + + @property + def all_dependencies_resolved(self) -> bool: + """Return if all dependencies have been resolved.""" + return self._all_dependencies_resolved is not None + + async def resolve_dependencies(self) -> bool: + """Resolve all dependencies.""" + if self._all_dependencies_resolved is not None: + return self._all_dependencies_resolved + + try: + dependencies = await _async_component_dependencies( + self.hass, self.domain, self, set(), set() + ) + dependencies.discard(self.domain) + self._all_dependencies = dependencies + self._all_dependencies_resolved = True + except IntegrationNotFound as err: + _LOGGER.error( + "Unable to resolve dependencies for %s: we are unable to resolve (sub)dependency %s", + self.domain, + err.domain, + ) + self._all_dependencies_resolved = False + except CircularDependency as err: + _LOGGER.error( + "Unable to resolve dependencies for %s: it contains a circular dependency: %s -> %s", + self.domain, + err.from_domain, + err.to_domain, + ) + self._all_dependencies_resolved = False + + return self._all_dependencies_resolved + def get_component(self) -> ModuleType: """Return the component.""" cache = self.hass.data.setdefault(DATA_COMPONENTS, {}) @@ -490,23 +539,18 @@ def bind_hass(func: CALLABLE_T) -> CALLABLE_T: return func -async def async_component_dependencies(hass: "HomeAssistant", domain: str) -> Set[str]: - """Return all dependencies and subdependencies of components. - - Raises CircularDependency if a circular dependency is found. - """ - return await _async_component_dependencies(hass, domain, set(), set()) - - async def _async_component_dependencies( - hass: "HomeAssistant", domain: str, loaded: Set[str], loading: Set[str] + hass: "HomeAssistant", + start_domain: str, + integration: Integration, + loaded: Set[str], + loading: Set[str], ) -> Set[str]: """Recursive function to get component dependencies. Async friendly. """ - integration = await async_get_integration(hass, domain) - + domain = integration.domain loading.add(domain) for dependency_domain in integration.dependencies: @@ -518,11 +562,19 @@ async def _async_component_dependencies( if dependency_domain in loading: raise CircularDependency(domain, dependency_domain) - dep_loaded = await _async_component_dependencies( - hass, dependency_domain, loaded, loading - ) + loaded.add(dependency_domain) - loaded.update(dep_loaded) + dep_integration = await async_get_integration(hass, dependency_domain) + + if start_domain in dep_integration.after_dependencies: + raise CircularDependency(start_domain, dependency_domain) + + if dep_integration.dependencies: + dep_loaded = await _async_component_dependencies( + hass, start_domain, dep_integration, loaded, loading + ) + + loaded.update(dep_loaded) loaded.add(domain) loading.remove(domain) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 73c62bfab2c..8045e7b808b 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -11,16 +11,17 @@ ciso8601==2.1.3 cryptography==2.9.2 defusedxml==0.6.0 distro==1.5.0 -hass-nabucasa==0.34.6 -home-assistant-frontend==20200603.3 -importlib-metadata==1.6.0 +emoji==0.5.4 +hass-nabucasa==0.34.7 +home-assistant-frontend==20200701.0 +importlib-metadata==1.6.0;python_version<'3.8' jinja2>=2.11.1 -netdisco==2.7.0 +netdisco==2.7.1 pip>=8.0.3 python-slugify==4.0.0 pytz>=2020.1 pyyaml==5.3.1 -requests==2.23.0 +requests==2.24.0 ruamel.yaml==0.15.100 sqlalchemy==1.3.17 voluptuous-serialize==2.3.0 diff --git a/homeassistant/scripts/benchmark/__init__.py b/homeassistant/scripts/benchmark/__init__.py index 69de7970745..eaba6f52c02 100644 --- a/homeassistant/scripts/benchmark/__init__.py +++ b/homeassistant/scripts/benchmark/__init__.py @@ -1,8 +1,10 @@ """Script to run benchmarks.""" import argparse import asyncio +import collections from contextlib import suppress from datetime import datetime +import json import logging from timeit import default_timer as timer from typing import Callable, Dict, TypeVar @@ -10,6 +12,8 @@ from typing import Callable, Dict, TypeVar from homeassistant import core from homeassistant.components.websocket_api.const import JSON_DUMP from homeassistant.const import ATTR_NOW, EVENT_STATE_CHANGED, EVENT_TIME_CHANGED +from homeassistant.helpers.entityfilter import convert_include_exclude_filter +from homeassistant.helpers.json import JSONEncoder from homeassistant.util import dt as dt_util # mypy: allow-untyped-calls, allow-untyped-defs, no-check-untyped-defs @@ -169,21 +173,90 @@ async def _logbook_filtering(hass, last_changed, last_updated): "last_changed": last_changed, } - event = core.Event( - EVENT_STATE_CHANGED, - {"entity_id": entity_id, "old_state": old_state, "new_state": new_state}, + event = _create_state_changed_event_from_old_new( + entity_id, dt_util.utcnow(), old_state, new_state + ) + + entity_attr_cache = logbook.EntityAttributeCache(hass) + + entities_filter = convert_include_exclude_filter( + logbook.INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA({}) ) def yield_events(event): - # pylint: disable=protected-access - entities_filter = logbook._generate_filter_from_config({}) for _ in range(10 ** 5): - if logbook._keep_event(hass, event, entities_filter): + # pylint: disable=protected-access + if logbook._keep_event(hass, event, entities_filter, entity_attr_cache): yield event start = timer() - list(logbook.humanify(hass, yield_events(event))) + list(logbook.humanify(hass, yield_events(event), entity_attr_cache)) + + return timer() - start + + +@benchmark +async def filtering_entity_id(hass): + """Run a 100k state changes through entity filter.""" + config = { + "include": { + "domains": [ + "automation", + "script", + "group", + "media_player", + "custom_component", + ], + "entity_globs": [ + "binary_sensor.*_contact", + "binary_sensor.*_occupancy", + "binary_sensor.*_detected", + "binary_sensor.*_active", + "input_*", + "device_tracker.*_phone", + "switch.*_light", + "binary_sensor.*_charging", + "binary_sensor.*_lock", + "binary_sensor.*_connected", + ], + "entities": [ + "test.entity_1", + "test.entity_2", + "binary_sensor.garage_door_open", + "test.entity_3", + "test.entity_4", + ], + }, + "exclude": { + "domains": ["input_number"], + "entity_globs": ["media_player.google_*", "group.all_*"], + "entities": [], + }, + } + + entity_ids = [ + "automation.home_arrival", + "script.shut_off_house", + "binary_sensor.garage_door_open", + "binary_sensor.front_door_lock", + "binary_sensor.kitchen_motion_sensor_occupancy", + "switch.desk_lamp", + "light.dining_room", + "input_boolean.guest_staying_over", + "person.eleanor_fant", + "alert.issue_at_home", + "calendar.eleanor_fant_s_calendar", + "sun.sun", + ] + + entities_filter = convert_include_exclude_filter(config) + size = len(entity_ids) + + start = timer() + + for i in range(10 ** 5): + entities_filter(entity_ids[i % size]) return timer() - start @@ -208,3 +281,48 @@ async def json_serialize_states(hass): start = timer() JSON_DUMP(states) return timer() - start + + +def _create_state_changed_event_from_old_new( + entity_id, event_time_fired, old_state, new_state +): + """Create a state changed event from a old and new state.""" + attributes = {} + if new_state is not None: + attributes = new_state.get("attributes") + attributes_json = json.dumps(attributes, cls=JSONEncoder) + if attributes_json == "null": + attributes_json = "{}" + row = collections.namedtuple( + "Row", + [ + "event_type" + "event_data" + "time_fired" + "context_id" + "context_user_id" + "state" + "entity_id" + "domain" + "attributes" + "state_id", + "old_state_id", + ], + ) + + row.event_type = EVENT_STATE_CHANGED + row.event_data = "{}" + row.attributes = attributes_json + row.time_fired = event_time_fired + row.state = new_state and new_state.get("state") + row.entity_id = entity_id + row.domain = entity_id and core.split_entity_id(entity_id)[0] + row.context_id = None + row.context_user_id = None + row.old_state_id = old_state and 1 + row.state_id = new_state and 1 + + # pylint: disable=import-outside-toplevel + from homeassistant.components import logbook + + return logbook.LazyEventPartialState(row) diff --git a/homeassistant/setup.py b/homeassistant/setup.py index 67d9200df61..dedcc27e44e 100644 --- a/homeassistant/setup.py +++ b/homeassistant/setup.py @@ -3,7 +3,7 @@ import asyncio import logging.handlers from timeit import default_timer as timer from types import ModuleType -from typing import Awaitable, Callable, List, Optional +from typing import Awaitable, Callable, Optional, Set from homeassistant import config as conf_util, core, loader, requirements from homeassistant.config import async_notify_setup_error @@ -16,14 +16,26 @@ _LOGGER = logging.getLogger(__name__) ATTR_COMPONENT = "component" +DATA_SETUP_DONE = "setup_done" DATA_SETUP_STARTED = "setup_started" DATA_SETUP = "setup_tasks" DATA_DEPS_REQS = "deps_reqs_processed" SLOW_SETUP_WARNING = 10 -# Since a pip install can run, we wait -# 30 minutes to timeout -SLOW_SETUP_MAX_WAIT = 1800 + +# Since its possible for databases to be +# upwards of 36GiB (or larger) in the wild +# we wait up to 3 hours for startup +SLOW_SETUP_MAX_WAIT = 10800 + + +@core.callback +def async_set_domains_to_be_loaded(hass: core.HomeAssistant, domains: Set[str]) -> None: + """Set domains that are going to be loaded from the config. + + This will allow us to properly handle after_dependencies. + """ + hass.data[DATA_SETUP_DONE] = {domain: asyncio.Event() for domain in domains} def setup_component(hass: core.HomeAssistant, domain: str, config: ConfigType) -> bool: @@ -52,37 +64,43 @@ async def async_setup_component( _async_setup_component(hass, domain, config) ) - return await task # type: ignore + try: + return await task # type: ignore + finally: + if domain in hass.data.get(DATA_SETUP_DONE, {}): + hass.data[DATA_SETUP_DONE].pop(domain).set() async def _async_process_dependencies( - hass: core.HomeAssistant, config: ConfigType, name: str, dependencies: List[str] + hass: core.HomeAssistant, config: ConfigType, integration: loader.Integration ) -> bool: """Ensure all dependencies are set up.""" - blacklisted = [dep for dep in dependencies if dep in loader.DEPENDENCY_BLACKLIST] + tasks = { + dep: hass.loop.create_task(async_setup_component(hass, dep, config)) + for dep in integration.dependencies + } - if blacklisted and name not in ("default_config", "safe_mode"): - _LOGGER.error( - "Unable to set up dependencies of %s: " - "found blacklisted dependencies: %s", - name, - ", ".join(blacklisted), - ) - return False - - tasks = [async_setup_component(hass, dep, config) for dep in dependencies] + to_be_loaded = hass.data.get(DATA_SETUP_DONE, {}) + for dep in integration.after_dependencies: + if dep in to_be_loaded and dep not in hass.config.components: + tasks[dep] = hass.loop.create_task(to_be_loaded[dep].wait()) if not tasks: return True - results = await asyncio.gather(*tasks) + _LOGGER.debug("Dependency %s will wait for %s", integration.domain, list(tasks)) + results = await asyncio.gather(*tasks.values()) - failed = [dependencies[idx] for idx, res in enumerate(results) if not res] + failed = [ + domain + for idx, domain in enumerate(integration.dependencies) + if not results[idx] + ] if failed: _LOGGER.error( "Unable to set up dependencies of %s. Setup failed for dependencies: %s", - name, + integration.domain, ", ".join(failed), ) @@ -110,22 +128,7 @@ async def _async_setup_component( return False # Validate all dependencies exist and there are no circular dependencies - try: - await loader.async_component_dependencies(hass, domain) - except loader.IntegrationNotFound as err: - _LOGGER.error( - "Not setting up %s because we are unable to resolve (sub)dependency %s", - domain, - err.domain, - ) - return False - except loader.CircularDependency as err: - _LOGGER.error( - "Not setting up %s because it contains a circular dependency: %s -> %s", - domain, - err.from_domain, - err.to_domain, - ) + if not await integration.resolve_dependencies(): return False # Process requirements as soon as possible, so we can import the component @@ -312,9 +315,7 @@ async def async_process_deps_reqs( elif integration.domain in processed: return - if integration.dependencies and not await _async_process_dependencies( - hass, config, integration.domain, integration.dependencies - ): + if not await _async_process_dependencies(hass, config, integration): raise HomeAssistantError("Could not set up all dependencies.") if not hass.config.skip_pip and integration.requirements: diff --git a/homeassistant/util/package.py b/homeassistant/util/package.py index 9a5ae82d4a2..a665fd78914 100644 --- a/homeassistant/util/package.py +++ b/homeassistant/util/package.py @@ -8,9 +8,19 @@ import sys from typing import Optional from urllib.parse import urlparse -from importlib_metadata import PackageNotFoundError, version import pkg_resources +if sys.version_info[:2] >= (3, 8): + from importlib.metadata import ( # pylint: disable=no-name-in-module,import-error + PackageNotFoundError, + version, + ) +else: + from importlib_metadata import ( # pylint: disable=import-error + PackageNotFoundError, + version, + ) + _LOGGER = logging.getLogger(__name__) diff --git a/requirements_all.txt b/requirements_all.txt index a0adb50f656..3dce8223d59 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -6,7 +6,7 @@ attrs==19.3.0 bcrypt==3.1.7 certifi>=2020.4.5.1 ciso8601==2.1.3 -importlib-metadata==1.6.0 +importlib-metadata==1.6.0;python_version<'3.8' jinja2>=2.11.1 PyJWT==1.7.1 cryptography==2.9.2 @@ -14,7 +14,7 @@ pip>=8.0.3 python-slugify==4.0.0 pytz>=2020.1 pyyaml==5.3.1 -requests==2.23.0 +requests==2.24.0 ruamel.yaml==0.15.100 voluptuous==0.11.7 voluptuous-serialize==2.3.0 @@ -44,7 +44,7 @@ Mastodon.py==1.5.1 OPi.GPIO==0.4.0 # homeassistant.components.plugwise -Plugwise_Smile==0.2.13 +Plugwise_Smile==1.1.0 # homeassistant.components.essent PyEssent==0.13 @@ -85,7 +85,7 @@ PyTransportNSW==0.1.1 PyTurboJPEG==1.4.0 # homeassistant.components.vicare -PyViCare==0.1.10 +PyViCare==0.2.0 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.12.4 @@ -131,6 +131,9 @@ adafruit-circuitpython-mcp230xx==2.2.2 # homeassistant.components.androidtv adb-shell==0.1.3 +# homeassistant.components.alarmdecoder +adext==0.3 + # homeassistant.components.adguard adguardhome==0.4.2 @@ -178,7 +181,7 @@ aioftp==0.12.0 aioguardian==0.2.3 # homeassistant.components.harmony -aioharmony==0.1.13 +aioharmony==0.2.5 # homeassistant.components.homekit_controller aiohomekit[IP]==0.2.38 @@ -197,7 +200,7 @@ aioimaplib==0.7.15 aiokafka==0.5.1 # homeassistant.components.kef -aiokef==0.2.10 +aiokef==0.2.12 # homeassistant.components.lifx aiolifx==0.6.7 @@ -235,9 +238,6 @@ airly==0.0.2 # homeassistant.components.aladdin_connect aladdin_connect==0.3 -# homeassistant.components.alarmdecoder -alarmdecoder==1.13.2 - # homeassistant.components.alpha_vantage alpha_vantage==2.2.0 @@ -248,7 +248,7 @@ ambiclimate==0.2.1 amcrest==1.7.0 # homeassistant.components.androidtv -androidtv==0.0.41 +androidtv==0.0.43 # homeassistant.components.anel_pwrctrl anel_pwrctrl-homeassistant==0.0.1.dev2 @@ -272,7 +272,7 @@ aprslib==0.6.46 aqualogic==1.0 # homeassistant.components.arcam_fmj -arcam-fmj==0.4.6 +arcam-fmj==0.5.1 # homeassistant.components.arris_tg2492lg arris-tg2492lg==1.0.0 @@ -306,7 +306,7 @@ avea==1.4 avri-api==0.1.7 # homeassistant.components.axis -axis==31 +axis==33 # homeassistant.components.azure_event_hub azure-eventhub==5.1.0 @@ -336,10 +336,10 @@ beautifulsoup4==4.9.0 beewi_smartclim==0.0.7 # homeassistant.components.zha -bellows==0.16.2 +bellows==0.17.0 # homeassistant.components.bmw_connected_drive -bimmer_connected==0.7.5 +bimmer_connected==0.7.7 # homeassistant.components.bizkaibus bizkaibus==0.1.1 @@ -456,6 +456,9 @@ datadog==0.15.0 # homeassistant.components.metoffice datapoint==0.9.5 +# homeassistant.components.debugpy +debugpy==1.0.0b11 + # homeassistant.components.decora # decora==0.6 @@ -472,7 +475,7 @@ defusedxml==0.6.0 deluge-client==1.7.1 # homeassistant.components.denonavr -denonavr==0.8.1 +denonavr==0.9.3 # homeassistant.components.devolo_home_control devolo-home-control-api==0.11.0 @@ -505,7 +508,7 @@ dsmr_parser==0.18 dweepy==0.3.0 # homeassistant.components.dynalite -dynalite_devices==0.1.40 +dynalite_devices==0.1.41 # homeassistant.components.rainforest_eagle eagle200_reader==0.2.4 @@ -528,6 +531,9 @@ eliqonline==1.2.2 # homeassistant.components.elkm1 elkm1-lib==0.7.18 +# homeassistant.components.mobile_app +emoji==0.5.4 + # homeassistant.components.emulated_roku emulated_roku==0.2.1 @@ -538,7 +544,7 @@ enocean==0.50 enturclient==0.2.1 # homeassistant.components.environment_canada -env_canada==0.0.38 +env_canada==0.0.39 # homeassistant.components.envirophat # envirophat==0.0.6 @@ -634,6 +640,7 @@ georss_ign_sismologia_client==0.2 # homeassistant.components.qld_bushfire georss_qld_bushfire_alert_client==0.3 +# homeassistant.components.denonavr # homeassistant.components.huawei_lte # homeassistant.components.kef # homeassistant.components.minecraft_server @@ -686,7 +693,7 @@ greenwavereality==0.5.1 griddypower==0.1.0 # homeassistant.components.growatt_server -growattServer==0.0.1 +growattServer==0.0.4 # homeassistant.components.gstreamer gstreamer-player==1.1.2 @@ -704,10 +711,7 @@ habitipy==0.2.0 hangups==0.4.9 # homeassistant.components.cloud -hass-nabucasa==0.34.6 - -# homeassistant.components.mqtt -hbmqtt==0.9.5 +hass-nabucasa==0.34.7 # homeassistant.components.jewish_calendar hdate==0.9.5 @@ -734,7 +738,7 @@ hole==0.5.1 holidays==0.10.2 # homeassistant.components.frontend -home-assistant-frontend==20200603.3 +home-assistant-frontend==20200701.0 # homeassistant.components.zwave homeassistant-pyozw==0.1.10 @@ -743,7 +747,7 @@ homeassistant-pyozw==0.1.10 homeconnect==0.5 # homeassistant.components.homematicip_cloud -homematicip==0.10.17 +homematicip==0.10.18 # homeassistant.components.horizon horimote==0.4.1 @@ -756,7 +760,7 @@ httplib2==0.10.3 huawei-lte-api==1.4.12 # homeassistant.components.hydrawise -hydrawiser==0.1.1 +hydrawiser==0.2 # homeassistant.components.bh1750 # homeassistant.components.bme280 @@ -779,11 +783,14 @@ ibmiotf==0.3.4 iglo==1.2.7 # homeassistant.components.ihc -ihcsdk==2.6.0 +ihcsdk==2.7.0 # homeassistant.components.incomfort incomfort-client==0.4.0 +# homeassistant.components.influxdb +influxdb-client==1.6.0 + # homeassistant.components.influxdb influxdb==5.2.3 @@ -942,11 +949,11 @@ ndms2_client==0.0.11 nessclient==0.9.15 # homeassistant.components.netdata -netdata==0.1.2 +netdata==0.2.0 # homeassistant.components.discovery # homeassistant.components.ssdp -netdisco==2.7.0 +netdisco==2.7.1 # homeassistant.components.neurio_energy neurio==0.3.1 @@ -963,6 +970,9 @@ niko-home-control==0.2.1 # homeassistant.components.nilu niluclient==0.1.2 +# homeassistant.components.notify_events +notify-events==1.0.4 + # homeassistant.components.nederlandse_spoorwegen nsapi==3.0.4 @@ -979,7 +989,7 @@ numato-gpio==0.7.1 # homeassistant.components.opencv # homeassistant.components.tensorflow # homeassistant.components.trend -numpy==1.18.4 +numpy==1.19.0 # homeassistant.components.oasa_telematics oasatelematics==0.3 @@ -1084,7 +1094,7 @@ plexapi==4.0.0 plexauth==0.0.5 # homeassistant.components.plex -plexwebsocket==0.0.10 +plexwebsocket==0.0.11 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 @@ -1121,7 +1131,7 @@ proxmoxer==1.1.0 psutil==5.7.0 # homeassistant.components.ptvsd -ptvsd==4.2.8 +ptvsd==4.3.2 # homeassistant.components.wink pubnubsub-handler==1.0.8 @@ -1197,7 +1207,7 @@ py_nextbusnext==0.1.4 pyads==3.0.7 # homeassistant.components.hisense_aehw4a1 -pyaehw4a1==0.3.4 +pyaehw4a1==0.3.5 # homeassistant.components.aftership pyaftership==0.1.2 @@ -1256,6 +1266,9 @@ pycomfoconnect==0.3 # homeassistant.components.coolmaster pycoolmasternet==0.0.4 +# homeassistant.components.avri +pycountry==19.8.18 + # homeassistant.components.microsoft pycsspeechtts==1.0.3 @@ -1263,7 +1276,7 @@ pycsspeechtts==1.0.3 # pycups==1.9.73 # homeassistant.components.daikin -pydaikin==2.1.2 +pydaikin==2.2.0 # homeassistant.components.danfoss_air pydanfossair==0.1.0 @@ -1310,9 +1323,6 @@ pyeverlights==0.1.0 # homeassistant.components.ezviz pyezviz==0.1.5 -# homeassistant.components.fortigate -pyfgt==0.5.1 - # homeassistant.components.fido pyfido==2.1.1 @@ -1350,6 +1360,9 @@ pygatt[GATTTOOL]==4.0.5 # homeassistant.components.gtfs pygtfs==0.1.5 +# homeassistant.components.hvv_departures +pygti==0.6.0 + # homeassistant.components.version pyhaversion==3.3.0 @@ -1375,10 +1388,10 @@ pyialarm==0.3 pyicloud==0.9.7 # homeassistant.components.insteon -pyinsteon==1.0.4 +pyinsteon==1.0.5 # homeassistant.components.intesishome -pyintesishome==1.7.4 +pyintesishome==1.7.5 # homeassistant.components.ipma pyipma==2.0.5 @@ -1468,7 +1481,7 @@ pymsteams==0.1.12 pymusiccast==0.1.6 # homeassistant.components.myq -pymyq==2.0.4 +pymyq==2.0.5 # homeassistant.components.mysensors pymysensors==0.18.0 @@ -1495,7 +1508,7 @@ pynut2==2.1.2 pynws==0.10.4 # homeassistant.components.nx584 -pynx584==0.4 +pynx584==0.5 # homeassistant.components.nzbget pynzbgetapi==0.2.0 @@ -1545,7 +1558,7 @@ pypjlink2==1.2.1 pypoint==1.1.2 # homeassistant.components.ps4 -pyps4-2ndscreen==1.0.7 +pyps4-2ndscreen==1.1.0 # homeassistant.components.qvr_pro pyqvrpro==0.52 @@ -1596,6 +1609,9 @@ pysignalclirestapi==0.3.4 # homeassistant.components.sma pysma==0.3.5 +# homeassistant.components.smappee +pysmappee==0.1.0 + # homeassistant.components.smartthings pysmartapp==0.3.2 @@ -1621,7 +1637,7 @@ pysonos==0.0.31 pyspcwebgw==0.4.0 # homeassistant.components.squeezebox -pysqueezebox==0.2.1 +pysqueezebox==0.2.4 # homeassistant.components.stiebel_eltron pystiebeleltron==0.0.1.dev2 @@ -1657,7 +1673,7 @@ python-clementine-remote==1.0.1 python-digitalocean==1.13.2 # homeassistant.components.ecobee -python-ecobee-api==0.2.5 +python-ecobee-api==0.2.7 # homeassistant.components.eq3btsmart # python-eq3bt==0.1.11 @@ -1672,7 +1688,7 @@ python-family-hub-local==0.0.2 python-forecastio==1.4.0 # homeassistant.components.sms -# python-gammu==2.12 +# python-gammu==3.0 # homeassistant.components.gc100 python-gc100==1.0.3a @@ -1696,7 +1712,7 @@ python-juicenet==1.0.1 # python-lirc==1.2.3 # homeassistant.components.xiaomi_miio -python-miio==0.5.0.1 +python-miio==0.5.1 # homeassistant.components.mpd python-mpd2==1.0.0 @@ -1726,7 +1742,7 @@ python-sochain-api==0.0.2 python-songpal==0.12 # homeassistant.components.synology_dsm -python-synology==0.8.1 +python-synology==0.8.2 # homeassistant.components.tado python-tado==0.8.1 @@ -1753,7 +1769,7 @@ python-whois==0.7.2 python-wink==1.10.5 # homeassistant.components.awair -python_awair==0.0.4 +python_awair==0.1.1 # homeassistant.components.swiss_public_transport python_opendata_transport==0.2.1 @@ -1762,7 +1778,7 @@ python_opendata_transport==0.2.1 pythonegardia==1.0.40 # homeassistant.components.tile -pytile==3.0.1 +pytile==3.0.6 # homeassistant.components.touchline pytouchline==0.7 @@ -1790,7 +1806,7 @@ pyuptimerobot==0.0.5 # pyuserinput==0.1.11 # homeassistant.components.vera -pyvera==0.3.7 +pyvera==0.3.9 # homeassistant.components.versasense pyversasense==0.0.6 @@ -1871,7 +1887,7 @@ rjpl==0.3.5 rocketchat-API==0.6.1 # homeassistant.components.roku -rokuecp==0.4.2 +rokuecp==0.5.0 # homeassistant.components.roomba roombapy==1.6.1 @@ -1951,9 +1967,6 @@ sleepyq==0.7 # homeassistant.components.xmpp slixmpp==1.5.1 -# homeassistant.components.smappee -smappy==0.2.16 - # homeassistant.components.smarthab smarthab==0.20 @@ -1981,7 +1994,7 @@ solaredge-local==0.2.0 solaredge==0.0.2 # homeassistant.components.solax -solax==0.2.2 +solax==0.2.3 # homeassistant.components.honeywell somecomfort==0.5.2 @@ -2078,10 +2091,10 @@ temperusb==1.5.3 # tensorflow==1.13.2 # homeassistant.components.powerwall -tesla-powerwall==0.2.10 +tesla-powerwall==0.2.11 # homeassistant.components.tesla -teslajsonpy==0.8.1 +teslajsonpy==0.9.0 # homeassistant.components.thermoworks_smoke thermoworks_smoke==0.1.8 @@ -2099,7 +2112,7 @@ tmb==0.0.4 todoist-python==8.0.0 # homeassistant.components.toon -toonapilib==3.2.4 +toonapi==0.1.0 # homeassistant.components.totalconnect total_connect_client==0.55 @@ -2192,7 +2205,7 @@ wiffi==1.0.0 wirelesstagpy==0.4.0 # homeassistant.components.withings -withings-api==2.1.3 +withings-api==2.1.6 # homeassistant.components.wled wled==0.4.3 @@ -2218,7 +2231,7 @@ xknx==0.11.3 xmltodict==0.12.0 # homeassistant.components.xs1 -xs1-api-client==2.3.5 +xs1-api-client==3.0.0 # homeassistant.components.yandex_transport ya_ma==0.3.8 @@ -2233,7 +2246,7 @@ yeelight==0.5.2 yeelightsunflower==0.0.10 # homeassistant.components.media_extractor -youtube_dl==2020.05.29 +youtube_dl==2020.06.16.1 # homeassistant.components.zengge zengge==0.2 @@ -2242,7 +2255,7 @@ zengge==0.2 zeroconf==0.27.1 # homeassistant.components.zha -zha-quirks==0.0.40 +zha-quirks==0.0.41 # homeassistant.components.zhong_hong zhong_hong_hvac==1.0.9 @@ -2263,7 +2276,7 @@ zigpy-xbee==0.12.1 zigpy-zigate==0.6.1 # homeassistant.components.zha -zigpy==0.20.4 +zigpy==0.21.0 # homeassistant.components.zoneminder zm-py==0.4.0 diff --git a/requirements_test.txt b/requirements_test.txt index 60d085752ed..7adc9c936e7 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -7,13 +7,13 @@ asynctest==0.13.0 codecov==2.1.0 coverage==5.1 mock-open==1.4.0 -mypy==0.770 -pre-commit==2.4.0 +mypy==0.780 +pre-commit==2.5.1 pylint==2.4.4 astroid==2.3.3 pylint-strict-informational==0.1 pytest-aiohttp==0.3.0 -pytest-cov==2.8.1 +pytest-cov==2.10.0 pytest-sugar==0.9.3 pytest-timeout==1.3.4 pytest==5.4.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c932bc872d1..6a755a2db01 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -7,7 +7,7 @@ HAP-python==2.9.1 # homeassistant.components.plugwise -Plugwise_Smile==0.2.13 +Plugwise_Smile==1.1.0 # homeassistant.components.flick_electric PyFlick==0.0.2 @@ -29,6 +29,9 @@ PyTransportNSW==0.1.1 # homeassistant.components.homekit PyTurboJPEG==1.4.0 +# homeassistant.components.xiaomi_aqara +PyXiaomiGateway==0.12.4 + # homeassistant.components.remember_the_milk RtmAPI==0.7.2 @@ -85,7 +88,7 @@ aiofreepybox==0.0.8 aioguardian==0.2.3 # homeassistant.components.harmony -aioharmony==0.1.13 +aioharmony==0.2.5 # homeassistant.components.homekit_controller aiohomekit[IP]==0.2.38 @@ -97,6 +100,9 @@ aiohttp_cors==0.7.0 # homeassistant.components.hue aiohue==2.1.0 +# homeassistant.components.apache_kafka +aiokafka==0.5.1 + # homeassistant.components.notion aionotion==1.1.0 @@ -125,7 +131,7 @@ airly==0.0.2 ambiclimate==0.2.1 # homeassistant.components.androidtv -androidtv==0.0.41 +androidtv==0.0.43 # homeassistant.components.apns apns2==0.3.0 @@ -137,7 +143,7 @@ apprise==0.8.5 aprslib==0.6.46 # homeassistant.components.arcam_fmj -arcam-fmj==0.4.6 +arcam-fmj==0.5.1 # homeassistant.components.dlna_dmr # homeassistant.components.upnp @@ -146,18 +152,27 @@ async-upnp-client==0.14.13 # homeassistant.components.stream av==8.0.2 +# homeassistant.components.avri +avri-api==0.1.7 + # homeassistant.components.axis -axis==31 +axis==33 + +# homeassistant.components.azure_event_hub +azure-eventhub==5.1.0 # homeassistant.components.homekit base36==0.1.1 # homeassistant.components.zha -bellows==0.16.2 +bellows==0.17.0 # homeassistant.components.blebox blebox_uniapi==1.3.2 +# homeassistant.components.blink +blinkpy==0.15.0 + # homeassistant.components.bom bomradarloop==0.1.4 @@ -199,6 +214,12 @@ coronavirus==1.1.1 # homeassistant.components.datadog datadog==0.15.0 +# homeassistant.components.metoffice +datapoint==0.9.5 + +# homeassistant.components.debugpy +debugpy==1.0.0b11 + # homeassistant.components.ihc # homeassistant.components.namecheapdns # homeassistant.components.ohmconnect @@ -206,7 +227,7 @@ datadog==0.15.0 defusedxml==0.6.0 # homeassistant.components.denonavr -denonavr==0.8.1 +denonavr==0.9.3 # homeassistant.components.devolo_home_control devolo-home-control-api==0.11.0 @@ -224,7 +245,7 @@ doorbirdpy==2.0.8 dsmr_parser==0.18 # homeassistant.components.dynalite -dynalite_devices==0.1.40 +dynalite_devices==0.1.41 # homeassistant.components.ee_brightbox eebrightbox==0.0.4 @@ -235,6 +256,9 @@ elgato==0.2.0 # homeassistant.components.elkm1 elkm1-lib==0.7.18 +# homeassistant.components.mobile_app +emoji==0.5.4 + # homeassistant.components.emulated_roku emulated_roku==0.2.1 @@ -272,6 +296,7 @@ georss_ign_sismologia_client==0.2 # homeassistant.components.qld_bushfire georss_qld_bushfire_alert_client==0.3 +# homeassistant.components.denonavr # homeassistant.components.huawei_lte # homeassistant.components.kef # homeassistant.components.minecraft_server @@ -303,10 +328,7 @@ ha-ffmpeg==2.0 hangups==0.4.9 # homeassistant.components.cloud -hass-nabucasa==0.34.6 - -# homeassistant.components.mqtt -hbmqtt==0.9.5 +hass-nabucasa==0.34.7 # homeassistant.components.jewish_calendar hdate==0.9.5 @@ -321,7 +343,7 @@ hole==0.5.1 holidays==0.10.2 # homeassistant.components.frontend -home-assistant-frontend==20200603.3 +home-assistant-frontend==20200701.0 # homeassistant.components.zwave homeassistant-pyozw==0.1.10 @@ -330,7 +352,7 @@ homeassistant-pyozw==0.1.10 homeconnect==0.5 # homeassistant.components.homematicip_cloud -homematicip==0.10.17 +homematicip==0.10.18 # homeassistant.components.google # homeassistant.components.remember_the_milk @@ -342,6 +364,9 @@ huawei-lte-api==1.4.12 # homeassistant.components.iaqualink iaqualink==0.3.4 +# homeassistant.components.influxdb +influxdb-client==1.6.0 + # homeassistant.components.influxdb influxdb==5.2.3 @@ -399,7 +424,7 @@ nessclient==0.9.15 # homeassistant.components.discovery # homeassistant.components.ssdp -netdisco==2.7.0 +netdisco==2.7.1 # homeassistant.components.nexia nexia==0.9.3 @@ -417,7 +442,7 @@ numato-gpio==0.7.1 # homeassistant.components.opencv # homeassistant.components.tensorflow # homeassistant.components.trend -numpy==1.18.4 +numpy==1.19.0 # homeassistant.components.google oauth2client==4.0.0 @@ -462,7 +487,7 @@ plexapi==4.0.0 plexauth==0.0.5 # homeassistant.components.plex -plexwebsocket==0.0.10 +plexwebsocket==0.0.11 # homeassistant.components.mhz19 # homeassistant.components.serial_pm @@ -478,7 +503,7 @@ prayer_times_calculator==0.0.3 prometheus_client==0.7.1 # homeassistant.components.ptvsd -ptvsd==4.2.8 +ptvsd==4.3.2 # homeassistant.components.androidtv pure-python-adb==0.2.2.dev0 @@ -515,7 +540,7 @@ pyTibber==0.14.0 py_nextbusnext==0.1.4 # homeassistant.components.hisense_aehw4a1 -pyaehw4a1==0.3.4 +pyaehw4a1==0.3.5 # homeassistant.components.airvisual pyairvisual==4.4.0 @@ -544,8 +569,11 @@ pychromecast==6.0.0 # homeassistant.components.coolmaster pycoolmasternet==0.0.4 +# homeassistant.components.avri +pycountry==19.8.18 + # homeassistant.components.daikin -pydaikin==2.1.2 +pydaikin==2.2.0 # homeassistant.components.deconz pydeconz==71 @@ -578,6 +606,9 @@ pyfttt==0.3 # homeassistant.components.skybeacon pygatt[GATTTOOL]==4.0.5 +# homeassistant.components.hvv_departures +pygti==0.6.0 + # homeassistant.components.version pyhaversion==3.3.0 @@ -639,7 +670,7 @@ pymodbus==2.3.0 pymonoprice==0.3 # homeassistant.components.myq -pymyq==2.0.4 +pymyq==2.0.5 # homeassistant.components.nut pynut2==2.1.2 @@ -648,7 +679,7 @@ pynut2==2.1.2 pynws==0.10.4 # homeassistant.components.nx584 -pynx584==0.4 +pynx584==0.5 # homeassistant.components.openuv pyopenuv==1.0.9 @@ -668,7 +699,7 @@ pyotp==2.3.0 pypoint==1.1.2 # homeassistant.components.ps4 -pyps4-2ndscreen==1.0.7 +pyps4-2ndscreen==1.1.0 # homeassistant.components.qwikswitch pyqwikswitch==0.93 @@ -683,6 +714,9 @@ pysignalclirestapi==0.3.4 # homeassistant.components.sma pysma==0.3.5 +# homeassistant.components.smappee +pysmappee==0.1.0 + # homeassistant.components.smartthings pysmartapp==0.3.2 @@ -698,8 +732,11 @@ pysonos==0.0.31 # homeassistant.components.spc pyspcwebgw==0.4.0 +# homeassistant.components.squeezebox +pysqueezebox==0.2.4 + # homeassistant.components.ecobee -python-ecobee-api==0.2.5 +python-ecobee-api==0.2.7 # homeassistant.components.darksky python-forecastio==1.4.0 @@ -711,7 +748,7 @@ python-izone==1.1.2 python-juicenet==1.0.1 # homeassistant.components.xiaomi_miio -python-miio==0.5.0.1 +python-miio==0.5.1 # homeassistant.components.nest python-nest==4.1.0 @@ -723,7 +760,7 @@ python-openzwave-mqtt==1.0.2 python-songpal==0.12 # homeassistant.components.synology_dsm -python-synology==0.8.1 +python-synology==0.8.2 # homeassistant.components.tado python-tado==0.8.1 @@ -735,7 +772,10 @@ python-twitch-client==0.6.0 python-velbus==2.0.43 # homeassistant.components.awair -python_awair==0.0.4 +python_awair==0.1.1 + +# homeassistant.components.tile +pytile==3.0.6 # homeassistant.components.traccar pytraccar==0.9.0 @@ -744,7 +784,7 @@ pytraccar==0.9.0 pytradfri[async]==6.4.0 # homeassistant.components.vera -pyvera==0.3.7 +pyvera==0.3.9 # homeassistant.components.vesync pyvesync==1.1.0 @@ -774,7 +814,7 @@ rflink==0.0.52 ring_doorbell==0.6.0 # homeassistant.components.roku -rokuecp==0.4.2 +rokuecp==0.5.0 # homeassistant.components.roomba roombapy==1.6.1 @@ -818,6 +858,9 @@ sonarr==0.2.2 # homeassistant.components.marytts speak2mary==1.4.0 +# homeassistant.components.speedtestdotnet +speedtest-cli==2.1.2 + # homeassistant.components.spotify spotipy==2.12.0 @@ -844,13 +887,13 @@ sunwatcher==0.2.1 tellduslive==0.10.11 # homeassistant.components.powerwall -tesla-powerwall==0.2.10 +tesla-powerwall==0.2.11 # homeassistant.components.tesla -teslajsonpy==0.8.1 +teslajsonpy==0.9.0 # homeassistant.components.toon -toonapilib==3.2.4 +toonapi==0.1.0 # homeassistant.components.totalconnect total_connect_client==0.55 @@ -898,7 +941,7 @@ watchdog==0.8.3 wiffi==1.0.0 # homeassistant.components.withings -withings-api==2.1.3 +withings-api==2.1.6 # homeassistant.components.wled wled==0.4.3 @@ -918,7 +961,7 @@ ya_ma==0.3.8 zeroconf==0.27.1 # homeassistant.components.zha -zha-quirks==0.0.40 +zha-quirks==0.0.41 # homeassistant.components.zha zigpy-cc==0.4.4 @@ -933,4 +976,4 @@ zigpy-xbee==0.12.1 zigpy-zigate==0.6.1 # homeassistant.components.zha -zigpy==0.20.4 +zigpy==0.21.0 diff --git a/script/hassfest/config_flow.py b/script/hassfest/config_flow.py index 44f2b2d59ae..1d69504ff8a 100644 --- a/script/hassfest/config_flow.py +++ b/script/hassfest/config_flow.py @@ -2,8 +2,6 @@ import json from typing import Dict -from homeassistant.requirements import DISCOVERY_INTEGRATIONS - from .model import Config, Integration BASE = """ @@ -17,7 +15,7 @@ To update, run python3 -m script.hassfest FLOWS = {} """.strip() -UNIQUE_ID_IGNORE = {"esphome", "fritzbox", "heos", "huawei_lte"} +UNIQUE_ID_IGNORE = {"huawei_lte", "mqtt", "adguard"} def validate_integration(config: Config, integration: Integration): @@ -25,26 +23,46 @@ def validate_integration(config: Config, integration: Integration): config_flow_file = integration.path / "config_flow.py" if not config_flow_file.is_file(): - integration.add_error( - "config_flow", "Config flows need to be defined in the file config_flow.py" - ) + if integration.get("config_flow"): + integration.add_error( + "config_flow", + "Config flows need to be defined in the file config_flow.py", + ) + if integration.get("homekit"): + integration.add_error( + "config_flow", + "HomeKit information in a manifest requires a config flow to exist", + ) + if integration.get("ssdp"): + integration.add_error( + "config_flow", + "SSDP information in a manifest requires a config flow to exist", + ) + if integration.get("zeroconf"): + integration.add_error( + "config_flow", + "Zeroconf information in a manifest requires a config flow to exist", + ) return - needs_unique_id = integration.domain not in UNIQUE_ID_IGNORE and any( - bool(integration.manifest.get(key)) - for keys in DISCOVERY_INTEGRATIONS.values() - for key in keys + config_flow = config_flow_file.read_text() + + needs_unique_id = integration.domain not in UNIQUE_ID_IGNORE and ( + "async_step_discovery" in config_flow + or "async_step_hassio" in config_flow + or "async_step_homekit" in config_flow + or "async_step_ssdp" in config_flow + or "async_step_zeroconf" in config_flow ) if not needs_unique_id: return - config_flow = config_flow_file.read_text() - has_unique_id = ( "self.async_set_unique_id" in config_flow - or "config_entry_flow.register_discovery_flow" in config_flow - or "config_entry_oauth2_flow.AbstractOAuth2FlowHandler" in config_flow + or "self._async_handle_discovery_without_unique_id" in config_flow + or "register_discovery_flow" in config_flow + or "AbstractOAuth2FlowHandler" in config_flow ) if has_unique_id: @@ -70,9 +88,12 @@ def generate_and_validate(integrations: Dict[str, Integration], config: Config): if not integration.manifest: continue - config_flow = integration.manifest.get("config_flow") - - if not config_flow: + if not ( + integration.manifest.get("config_flow") + or integration.manifest.get("homekit") + or integration.manifest.get("ssdp") + or integration.manifest.get("zeroconf") + ): continue validate_integration(config, integration) diff --git a/script/hassfest/dependencies.py b/script/hassfest/dependencies.py index ba9e971d02e..2335270dc4a 100644 --- a/script/hassfest/dependencies.py +++ b/script/hassfest/dependencies.py @@ -103,6 +103,7 @@ ALLOWED_USED_COMPONENTS = { "input_number", "input_select", "input_text", + "onboarding", "persistent_notification", "person", "script", @@ -253,7 +254,14 @@ def validate(integrations: Dict[str, Integration], config): continue # check that all referenced dependencies exist + after_deps = integration.manifest.get("after_dependencies", []) for dep in integration.manifest.get("dependencies", []): + if dep in after_deps: + integration.add_error( + "dependencies", + f"Dependency {dep} is both in dependencies and after_dependencies", + ) + if dep not in integrations: integration.add_error( "dependencies", f"Dependency {dep} does not exist" diff --git a/script/hassfest/ssdp.py b/script/hassfest/ssdp.py index 05a9dee332d..c9b3b893118 100644 --- a/script/hassfest/ssdp.py +++ b/script/hassfest/ssdp.py @@ -38,22 +38,6 @@ def generate_and_validate(integrations: Dict[str, Integration]): if not ssdp: continue - try: - with open(str(integration.path / "config_flow.py")) as fp: - content = fp.read() - if ( - " async_step_ssdp" not in content - and "AbstractOAuth2FlowHandler" not in content - and "register_discovery_flow" not in content - ): - integration.add_error("ssdp", "Config flow has no async_step_ssdp") - continue - except FileNotFoundError: - integration.add_error( - "ssdp", "SSDP info in a manifest requires a config flow to exist" - ) - continue - for matcher in ssdp: data[domain].append(sort_dict(matcher)) diff --git a/script/hassfest/translations.py b/script/hassfest/translations.py index 416bfbdb47e..801a5112118 100644 --- a/script/hassfest/translations.py +++ b/script/hassfest/translations.py @@ -32,7 +32,7 @@ REMOVED_TITLE_MSG = ( MOVED_TRANSLATIONS_DIRECTORY_MSG = ( "The '.translations' directory has been moved, the new name is 'translations', " - "starting with Home Assistant 0.111 your translations will no longer " + "starting with Home Assistant 0.112 your translations will no longer " "load if you do not move/rename this " ) @@ -47,7 +47,7 @@ def check_translations_directory_name(integration: Integration) -> None: return if legacy_translations.is_dir(): - integration.add_warning("translations", MOVED_TRANSLATIONS_DIRECTORY_MSG) + integration.add_error("translations", MOVED_TRANSLATIONS_DIRECTORY_MSG) def find_references(strings, prefix, found): diff --git a/script/hassfest/zeroconf.py b/script/hassfest/zeroconf.py index 5ff102ea480..d6b39bd0d27 100644 --- a/script/hassfest/zeroconf.py +++ b/script/hassfest/zeroconf.py @@ -34,42 +34,7 @@ def generate_and_validate(integrations: Dict[str, Integration]): homekit = integration.manifest.get("homekit", {}) homekit_models = homekit.get("models", []) - if not service_types and not homekit_models: - continue - - try: - with open(str(integration.path / "config_flow.py")) as fp: - content = fp.read() - uses_discovery_flow = "register_discovery_flow" in content - uses_oauth2_flow = "AbstractOAuth2FlowHandler" in content - - if ( - service_types - and not uses_discovery_flow - and not uses_oauth2_flow - and " async_step_zeroconf" not in content - ): - integration.add_error( - "zeroconf", "Config flow has no async_step_zeroconf" - ) - continue - - if ( - homekit_models - and not uses_discovery_flow - and not uses_oauth2_flow - and " async_step_homekit" not in content - ): - integration.add_error( - "zeroconf", "Config flow has no async_step_homekit" - ) - continue - - except FileNotFoundError: - integration.add_error( - "zeroconf", - "Zeroconf info in a manifest requires a config flow to exist", - ) + if not (service_types or homekit_models): continue for service_type in service_types: diff --git a/script/setup b/script/setup index a8c9d628115..0076d70a7f0 100755 --- a/script/setup +++ b/script/setup @@ -8,4 +8,4 @@ cd "$(dirname "$0")/.." script/bootstrap pre-commit install -pip3 install -e . +pip install -e . diff --git a/script/translations/const.py b/script/translations/const.py index d282c9c2915..7c50b7db5e3 100644 --- a/script/translations/const.py +++ b/script/translations/const.py @@ -3,6 +3,6 @@ import pathlib CORE_PROJECT_ID = "130246255a974bd3b5e8a1.51616605" FRONTEND_PROJECT_ID = "3420425759f6d6d241f598.13594006" -CLI_2_DOCKER_IMAGE = "v2.3.0" +CLI_2_DOCKER_IMAGE = "2.5.1" INTEGRATIONS_DIR = pathlib.Path("homeassistant/components") FRONTEND_DIR = pathlib.Path("../frontend") diff --git a/script/translations/download.py b/script/translations/download.py index 8f17e057080..7fc4c3365cc 100755 --- a/script/translations/download.py +++ b/script/translations/download.py @@ -7,7 +7,7 @@ import re import subprocess from typing import Dict, List, Union -from .const import CLI_2_DOCKER_IMAGE, CORE_PROJECT_ID +from .const import CLI_2_DOCKER_IMAGE, CORE_PROJECT_ID, INTEGRATIONS_DIR from .error import ExitApp from .util import get_lokalise_token @@ -74,23 +74,13 @@ def get_component_path(lang, component): def get_platform_path(lang, component, platform): """Get the platform translation path.""" - if os.path.isdir(os.path.join("homeassistant", "components", component, platform)): - return os.path.join( - "homeassistant", - "components", - component, - platform, - "translations", - f"{lang}.json", - ) - else: - return os.path.join( - "homeassistant", - "components", - component, - "translations", - f"{platform}.{lang}.json", - ) + return os.path.join( + "homeassistant", + "components", + component, + "translations", + f"{platform}.{lang}.json", + ) def get_component_translations(translations): @@ -111,9 +101,12 @@ def save_language_translations(lang, translations): os.makedirs(os.path.dirname(path), exist_ok=True) save_json(path, base_translations) - for platform, platform_translations in component_translations.get( - "platform", {} - ).items(): + if "platform" not in component_translations: + continue + + for platform, platform_translations in component_translations[ + "platform" + ].items(): path = get_platform_path(lang, component, platform) os.makedirs(os.path.dirname(path), exist_ok=True) save_json(path, platform_translations) @@ -127,12 +120,20 @@ def write_integration_translations(): save_language_translations(lang, translations) +def delete_old_translations(): + """Delete old translations.""" + for fil in INTEGRATIONS_DIR.glob("*/translations/*"): + fil.unlink() + + def run(): """Run the script.""" DOWNLOAD_DIR.mkdir(parents=True, exist_ok=True) run_download_docker() + delete_old_translations() + write_integration_translations() return 0 diff --git a/setup.py b/setup.py index 1473fd1f5f9..4e46f632170 100755 --- a/setup.py +++ b/setup.py @@ -39,7 +39,7 @@ REQUIRES = [ "bcrypt==3.1.7", "certifi>=2020.4.5.1", "ciso8601==2.1.3", - "importlib-metadata==1.6.0", + "importlib-metadata==1.6.0;python_version<'3.8'", "jinja2>=2.11.1", "PyJWT==1.7.1", # PyJWT has loose dependency. We want the latest one. @@ -48,7 +48,7 @@ REQUIRES = [ "python-slugify==4.0.0", "pytz>=2020.1", "pyyaml==5.3.1", - "requests==2.23.0", + "requests==2.24.0", "ruamel.yaml==0.15.100", "voluptuous==0.11.7", "voluptuous-serialize==2.3.0", diff --git a/tests/common.py b/tests/common.py index 2136de3584f..dfe832cc4ce 100644 --- a/tests/common.py +++ b/tests/common.py @@ -23,7 +23,7 @@ from homeassistant.auth import ( providers as auth_providers, ) from homeassistant.auth.permissions import system_policies -from homeassistant.components import mqtt, recorder +from homeassistant.components import recorder from homeassistant.components.device_automation import ( # noqa: F401 _async_get_device_automation_capabilities as async_get_device_automation_capabilities, _async_get_device_automations as async_get_device_automations, @@ -53,13 +53,13 @@ from homeassistant.helpers import ( storage, ) from homeassistant.helpers.json import JSONEncoder -from homeassistant.setup import async_setup_component, setup_component +from homeassistant.setup import setup_component from homeassistant.util.async_ import run_callback_threadsafe import homeassistant.util.dt as date_util from homeassistant.util.unit_system import METRIC_SYSTEM import homeassistant.util.yaml.loader as yaml_loader -from tests.async_mock import AsyncMock, MagicMock, Mock, patch +from tests.async_mock import AsyncMock, Mock, patch _LOGGER = logging.getLogger(__name__) INSTANCES = [] @@ -324,36 +324,6 @@ def mock_state_change_event(hass, new_state, old_state=None): hass.bus.fire(EVENT_STATE_CHANGED, event_data, context=new_state.context) -async def async_mock_mqtt_component(hass, config=None): - """Mock the MQTT component.""" - if config is None: - config = {mqtt.CONF_BROKER: "mock-broker"} - - @ha.callback - def _async_fire_mqtt_message(topic, payload, qos, retain): - async_fire_mqtt_message(hass, topic, payload, qos, retain) - - with patch("paho.mqtt.client.Client") as mock_client: - mock_client = mock_client.return_value - mock_client.connect.return_value = 0 - mock_client.subscribe.return_value = (0, 0) - mock_client.unsubscribe.return_value = (0, 0) - mock_client.publish.side_effect = _async_fire_mqtt_message - - result = await async_setup_component(hass, mqtt.DOMAIN, {mqtt.DOMAIN: config}) - assert result - await hass.async_block_till_done() - - hass.data["mqtt"] = MagicMock( - spec_set=hass.data["mqtt"], wraps=hass.data["mqtt"] - ) - - return hass.data["mqtt"] - - -mock_mqtt_component = threadsafe_coroutine_factory(async_mock_mqtt_component) - - @ha.callback def mock_component(hass, component): """Mock a component is setup.""" @@ -991,6 +961,8 @@ def mock_integration(hass, module): hass.data.setdefault(loader.DATA_INTEGRATIONS, {})[module.DOMAIN] = integration hass.data.setdefault(loader.DATA_COMPONENTS, {})[module.DOMAIN] = module + return integration + def mock_entity_platform(hass, platform_path, module): """Mock a entity platform. diff --git a/tests/components/alarm_control_panel/common.py b/tests/components/alarm_control_panel/common.py index ce0bde0517c..fa50a1aab41 100644 --- a/tests/components/alarm_control_panel/common.py +++ b/tests/components/alarm_control_panel/common.py @@ -15,7 +15,6 @@ from homeassistant.const import ( SERVICE_ALARM_DISARM, SERVICE_ALARM_TRIGGER, ) -from homeassistant.loader import bind_hass async def async_alarm_disarm(hass, code=None, entity_id=ENTITY_MATCH_ALL): @@ -29,18 +28,6 @@ async def async_alarm_disarm(hass, code=None, entity_id=ENTITY_MATCH_ALL): await hass.services.async_call(DOMAIN, SERVICE_ALARM_DISARM, data, blocking=True) -@bind_hass -def alarm_disarm(hass, code=None, entity_id=ENTITY_MATCH_ALL): - """Send the alarm the command for disarm.""" - data = {} - if code: - data[ATTR_CODE] = code - if entity_id: - data[ATTR_ENTITY_ID] = entity_id - - hass.services.call(DOMAIN, SERVICE_ALARM_DISARM, data) - - async def async_alarm_arm_home(hass, code=None, entity_id=ENTITY_MATCH_ALL): """Send the alarm the command for disarm.""" data = {} @@ -52,18 +39,6 @@ async def async_alarm_arm_home(hass, code=None, entity_id=ENTITY_MATCH_ALL): await hass.services.async_call(DOMAIN, SERVICE_ALARM_ARM_HOME, data, blocking=True) -@bind_hass -def alarm_arm_home(hass, code=None, entity_id=ENTITY_MATCH_ALL): - """Send the alarm the command for arm home.""" - data = {} - if code: - data[ATTR_CODE] = code - if entity_id: - data[ATTR_ENTITY_ID] = entity_id - - hass.services.call(DOMAIN, SERVICE_ALARM_ARM_HOME, data) - - async def async_alarm_arm_away(hass, code=None, entity_id=ENTITY_MATCH_ALL): """Send the alarm the command for disarm.""" data = {} @@ -75,18 +50,6 @@ async def async_alarm_arm_away(hass, code=None, entity_id=ENTITY_MATCH_ALL): await hass.services.async_call(DOMAIN, SERVICE_ALARM_ARM_AWAY, data, blocking=True) -@bind_hass -def alarm_arm_away(hass, code=None, entity_id=ENTITY_MATCH_ALL): - """Send the alarm the command for arm away.""" - data = {} - if code: - data[ATTR_CODE] = code - if entity_id: - data[ATTR_ENTITY_ID] = entity_id - - hass.services.call(DOMAIN, SERVICE_ALARM_ARM_AWAY, data) - - async def async_alarm_arm_night(hass, code=None, entity_id=ENTITY_MATCH_ALL): """Send the alarm the command for disarm.""" data = {} @@ -98,18 +61,6 @@ async def async_alarm_arm_night(hass, code=None, entity_id=ENTITY_MATCH_ALL): await hass.services.async_call(DOMAIN, SERVICE_ALARM_ARM_NIGHT, data, blocking=True) -@bind_hass -def alarm_arm_night(hass, code=None, entity_id=ENTITY_MATCH_ALL): - """Send the alarm the command for arm night.""" - data = {} - if code: - data[ATTR_CODE] = code - if entity_id: - data[ATTR_ENTITY_ID] = entity_id - - hass.services.call(DOMAIN, SERVICE_ALARM_ARM_NIGHT, data) - - async def async_alarm_trigger(hass, code=None, entity_id=ENTITY_MATCH_ALL): """Send the alarm the command for disarm.""" data = {} @@ -121,18 +72,6 @@ async def async_alarm_trigger(hass, code=None, entity_id=ENTITY_MATCH_ALL): await hass.services.async_call(DOMAIN, SERVICE_ALARM_TRIGGER, data, blocking=True) -@bind_hass -def alarm_trigger(hass, code=None, entity_id=ENTITY_MATCH_ALL): - """Send the alarm the command for trigger.""" - data = {} - if code: - data[ATTR_CODE] = code - if entity_id: - data[ATTR_ENTITY_ID] = entity_id - - hass.services.call(DOMAIN, SERVICE_ALARM_TRIGGER, data) - - async def async_alarm_arm_custom_bypass(hass, code=None, entity_id=ENTITY_MATCH_ALL): """Send the alarm the command for disarm.""" data = {} @@ -144,15 +83,3 @@ async def async_alarm_arm_custom_bypass(hass, code=None, entity_id=ENTITY_MATCH_ await hass.services.async_call( DOMAIN, SERVICE_ALARM_ARM_CUSTOM_BYPASS, data, blocking=True ) - - -@bind_hass -def alarm_arm_custom_bypass(hass, code=None, entity_id=ENTITY_MATCH_ALL): - """Send the alarm the command for arm custom bypass.""" - data = {} - if code: - data[ATTR_CODE] = code - if entity_id: - data[ATTR_ENTITY_ID] = entity_id - - hass.services.call(DOMAIN, SERVICE_ALARM_ARM_CUSTOM_BYPASS, data) diff --git a/tests/components/alert/test_init.py b/tests/components/alert/test_init.py index 96c36a87edc..60c2bd6b809 100644 --- a/tests/components/alert/test_init.py +++ b/tests/components/alert/test_init.py @@ -125,10 +125,7 @@ class TestAlert(unittest.TestCase): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self._setup_notify() - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def _setup_notify(self): events = [] @@ -205,21 +202,6 @@ class TestAlert(unittest.TestCase): self.hass.block_till_done() assert STATE_ON == self.hass.states.get(ENTITY_ID).state - def test_hidden(self): - """Test entity hiding.""" - assert setup_component(self.hass, alert.DOMAIN, TEST_CONFIG) - hidden = self.hass.states.get(ENTITY_ID).attributes.get("hidden") - assert hidden - - self.hass.states.set("sensor.test", STATE_ON) - self.hass.block_till_done() - hidden = self.hass.states.get(ENTITY_ID).attributes.get("hidden") - assert not hidden - - turn_off(self.hass, ENTITY_ID) - hidden = self.hass.states.get(ENTITY_ID).attributes.get("hidden") - assert not hidden - def test_notification_no_done_message(self): """Test notifications.""" events = [] @@ -362,8 +344,6 @@ class TestAlert(unittest.TestCase): self.hass.add_job(entity.begin_alerting) self.hass.block_till_done() - assert entity.hidden is True - def test_done_message_state_tracker_reset_on_cancel(self): """Test that the done message is reset when canceled.""" entity = alert.Alert(self.hass, *TEST_NOACK) diff --git a/tests/components/alexa/test_flash_briefings.py b/tests/components/alexa/test_flash_briefings.py index 14dbe7336fb..7ab75d8c037 100644 --- a/tests/components/alexa/test_flash_briefings.py +++ b/tests/components/alexa/test_flash_briefings.py @@ -6,7 +6,7 @@ import pytest from homeassistant.components import alexa from homeassistant.components.alexa import const -from homeassistant.const import HTTP_NOT_FOUND +from homeassistant.const import HTTP_NOT_FOUND, HTTP_UNAUTHORIZED from homeassistant.core import callback from homeassistant.setup import async_setup_component @@ -39,6 +39,7 @@ def alexa_client(loop, hass, hass_client): "homeassistant": {}, "alexa": { "flash_briefings": { + "password": "pass/abc", "weather": [ { "title": "Weekly forecast", @@ -63,8 +64,11 @@ def alexa_client(loop, hass, hass_client): return loop.run_until_complete(hass_client()) -def _flash_briefing_req(client, briefing_id): - return client.get(f"/api/alexa/flash_briefings/{briefing_id}") +def _flash_briefing_req(client, briefing_id, password="pass%2Fabc"): + if password is None: + return client.get(f"/api/alexa/flash_briefings/{briefing_id}") + + return client.get(f"/api/alexa/flash_briefings/{briefing_id}?password={password}") async def test_flash_briefing_invalid_id(alexa_client): @@ -75,6 +79,30 @@ async def test_flash_briefing_invalid_id(alexa_client): assert text == "" +async def test_flash_briefing_no_password(alexa_client): + """Test for no Flash Briefing password.""" + req = await _flash_briefing_req(alexa_client, "weather", password=None) + assert req.status == HTTP_UNAUTHORIZED + text = await req.text() + assert text == "" + + +async def test_flash_briefing_invalid_password(alexa_client): + """Test an invalid Flash Briefing password.""" + req = await _flash_briefing_req(alexa_client, "weather", password="wrongpass") + assert req.status == HTTP_UNAUTHORIZED + text = await req.text() + assert text == "" + + +async def test_flash_briefing_request_for_password(alexa_client): + """Test for "password" Flash Briefing.""" + req = await _flash_briefing_req(alexa_client, "password") + assert req.status == HTTP_NOT_FOUND + text = await req.text() + assert text == "" + + async def test_flash_briefing_date_from_str(alexa_client): """Test the response has a valid date parsed from string.""" req = await _flash_briefing_req(alexa_client, "weather") diff --git a/tests/components/alexa/test_init.py b/tests/components/alexa/test_init.py index 212b48cb436..605ca96f190 100644 --- a/tests/components/alexa/test_init.py +++ b/tests/components/alexa/test_init.py @@ -1,24 +1,28 @@ """Tests for alexa.""" from homeassistant.components import logbook from homeassistant.components.alexa.const import EVENT_ALEXA_SMART_HOME -import homeassistant.core as ha from homeassistant.setup import async_setup_component +from tests.components.logbook.test_init import MockLazyEventPartialState + async def test_humanify_alexa_event(hass): """Test humanifying Alexa event.""" + hass.config.components.add("recorder") await async_setup_component(hass, "alexa", {}) + await async_setup_component(hass, "logbook", {}) hass.states.async_set("light.kitchen", "on", {"friendly_name": "Kitchen Light"}) + entity_attr_cache = logbook.EntityAttributeCache(hass) results = list( logbook.humanify( hass, [ - ha.Event( + MockLazyEventPartialState( EVENT_ALEXA_SMART_HOME, {"request": {"namespace": "Alexa.Discovery", "name": "Discover"}}, ), - ha.Event( + MockLazyEventPartialState( EVENT_ALEXA_SMART_HOME, { "request": { @@ -28,7 +32,7 @@ async def test_humanify_alexa_event(hass): } }, ), - ha.Event( + MockLazyEventPartialState( EVENT_ALEXA_SMART_HOME, { "request": { @@ -39,6 +43,7 @@ async def test_humanify_alexa_event(hass): }, ), ], + entity_attr_cache, ) ) diff --git a/tests/components/almond/test_config_flow.py b/tests/components/almond/test_config_flow.py index 959846bd017..a6785d2eff0 100644 --- a/tests/components/almond/test_config_flow.py +++ b/tests/components/almond/test_config_flow.py @@ -82,7 +82,7 @@ async def test_abort_if_existing_entry(hass): assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_setup" - result = await flow.async_step_hassio() + result = await flow.async_step_hassio({}) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_setup" diff --git a/tests/components/androidtv/test_media_player.py b/tests/components/androidtv/test_media_player.py index 85e4a75acd0..d1723c2d6fa 100644 --- a/tests/components/androidtv/test_media_player.py +++ b/tests/components/androidtv/test_media_player.py @@ -31,9 +31,9 @@ from homeassistant.const import ( CONF_NAME, CONF_PLATFORM, SERVICE_VOLUME_SET, - STATE_IDLE, STATE_OFF, STATE_PLAYING, + STATE_STANDBY, STATE_UNAVAILABLE, ) from homeassistant.setup import async_setup_component @@ -150,7 +150,7 @@ async def _test_reconnect(hass, caplog, config): # state will be the last known state state = hass.states.get(entity_id) if patch_key == "server": - assert state.state == STATE_IDLE + assert state.state == STATE_STANDBY else: assert state.state == STATE_OFF @@ -159,7 +159,7 @@ async def _test_reconnect(hass, caplog, config): await hass.helpers.entity_component.async_update_entity(entity_id) state = hass.states.get(entity_id) assert state is not None - assert state.state == STATE_IDLE + assert state.state == STATE_STANDBY if patch_key == "python": assert ( @@ -879,7 +879,7 @@ async def test_update_lock_not_acquired(hass): await hass.helpers.entity_component.async_update_entity(entity_id) state = hass.states.get(entity_id) assert state is not None - assert state.state == STATE_IDLE + assert state.state == STATE_STANDBY async def test_download(hass): diff --git a/tests/components/apache_kafka/__init__.py b/tests/components/apache_kafka/__init__.py new file mode 100644 index 00000000000..c166dee76ad --- /dev/null +++ b/tests/components/apache_kafka/__init__.py @@ -0,0 +1 @@ +"""Tests for apache_kafka component.""" diff --git a/tests/components/apache_kafka/test_init.py b/tests/components/apache_kafka/test_init.py new file mode 100644 index 00000000000..45791da082e --- /dev/null +++ b/tests/components/apache_kafka/test_init.py @@ -0,0 +1,181 @@ +"""The tests for the Apache Kafka component.""" +from collections import namedtuple + +import pytest + +import homeassistant.components.apache_kafka as apache_kafka +from homeassistant.const import STATE_ON +from homeassistant.setup import async_setup_component + +from tests.async_mock import patch + +APACHE_KAFKA_PATH = "homeassistant.components.apache_kafka" +PRODUCER_PATH = f"{APACHE_KAFKA_PATH}.AIOKafkaProducer" +MIN_CONFIG = { + "ip_address": "localhost", + "port": 8080, + "topic": "topic", +} +FilterTest = namedtuple("FilterTest", "id should_pass") +MockKafkaClient = namedtuple("MockKafkaClient", "init start send_and_wait") + + +@pytest.fixture(name="mock_client") +def mock_client_fixture(): + """Mock the apache kafka client.""" + with patch(f"{PRODUCER_PATH}.start") as start, patch( + f"{PRODUCER_PATH}.send_and_wait" + ) as send_and_wait, patch(f"{PRODUCER_PATH}.__init__", return_value=None) as init: + yield MockKafkaClient(init, start, send_and_wait) + + +@pytest.fixture(autouse=True, scope="module") +def mock_client_stop(): + """Mock client stop at module scope for teardown.""" + with patch(f"{PRODUCER_PATH}.stop") as stop: + yield stop + + +async def test_minimal_config(hass, mock_client): + """Test the minimal config and defaults of component.""" + config = {apache_kafka.DOMAIN: MIN_CONFIG} + assert await async_setup_component(hass, apache_kafka.DOMAIN, config) + await hass.async_block_till_done() + assert mock_client.start.called_once + + +async def test_full_config(hass, mock_client): + """Test the full config of component.""" + config = { + apache_kafka.DOMAIN: { + "filter": { + "include_domains": ["light"], + "include_entity_globs": ["sensor.included_*"], + "include_entities": ["binary_sensor.included"], + "exclude_domains": ["light"], + "exclude_entity_globs": ["sensor.excluded_*"], + "exclude_entities": ["binary_sensor.excluded"], + }, + } + } + config[apache_kafka.DOMAIN].update(MIN_CONFIG) + + assert await async_setup_component(hass, apache_kafka.DOMAIN, config) + await hass.async_block_till_done() + assert mock_client.start.called_once + + +async def _setup(hass, filter_config): + """Shared set up for filtering tests.""" + config = {apache_kafka.DOMAIN: {"filter": filter_config}} + config[apache_kafka.DOMAIN].update(MIN_CONFIG) + + assert await async_setup_component(hass, apache_kafka.DOMAIN, config) + await hass.async_block_till_done() + + +async def _run_filter_tests(hass, tests, mock_client): + """Run a series of filter tests on apache kafka.""" + for test in tests: + hass.states.async_set(test.id, STATE_ON) + await hass.async_block_till_done() + + if test.should_pass: + mock_client.send_and_wait.assert_called_once() + mock_client.send_and_wait.reset_mock() + else: + mock_client.send_and_wait.assert_not_called() + + +async def test_allowlist(hass, mock_client): + """Test an allowlist only config.""" + await _setup( + hass, + { + "include_domains": ["light"], + "include_entity_globs": ["sensor.included_*"], + "include_entities": ["binary_sensor.included"], + }, + ) + + tests = [ + FilterTest("climate.excluded", False), + FilterTest("light.included", True), + FilterTest("sensor.excluded_test", False), + FilterTest("sensor.included_test", True), + FilterTest("binary_sensor.included", True), + FilterTest("binary_sensor.excluded", False), + ] + + await _run_filter_tests(hass, tests, mock_client) + + +async def test_denylist(hass, mock_client): + """Test a denylist only config.""" + await _setup( + hass, + { + "exclude_domains": ["climate"], + "exclude_entity_globs": ["sensor.excluded_*"], + "exclude_entities": ["binary_sensor.excluded"], + }, + ) + + tests = [ + FilterTest("climate.excluded", False), + FilterTest("light.included", True), + FilterTest("sensor.excluded_test", False), + FilterTest("sensor.included_test", True), + FilterTest("binary_sensor.included", True), + FilterTest("binary_sensor.excluded", False), + ] + + await _run_filter_tests(hass, tests, mock_client) + + +async def test_filtered_allowlist(hass, mock_client): + """Test an allowlist config with a filtering denylist.""" + await _setup( + hass, + { + "include_domains": ["light"], + "include_entity_globs": ["*.included_*"], + "exclude_domains": ["climate"], + "exclude_entity_globs": ["*.excluded_*"], + "exclude_entities": ["light.excluded"], + }, + ) + + tests = [ + FilterTest("light.included", True), + FilterTest("light.excluded_test", False), + FilterTest("light.excluded", False), + FilterTest("sensor.included_test", True), + FilterTest("climate.included_test", False), + ] + + await _run_filter_tests(hass, tests, mock_client) + + +async def test_filtered_denylist(hass, mock_client): + """Test a denylist config with a filtering allowlist.""" + await _setup( + hass, + { + "include_entities": ["climate.included", "sensor.excluded_test"], + "exclude_domains": ["climate"], + "exclude_entity_globs": ["*.excluded_*"], + "exclude_entities": ["light.excluded"], + }, + ) + + tests = [ + FilterTest("climate.excluded", False), + FilterTest("climate.included", True), + FilterTest("switch.excluded_test", False), + FilterTest("sensor.excluded_test", True), + FilterTest("light.excluded", False), + FilterTest("light.included", True), + ] + + await _run_filter_tests(hass, tests, mock_client) diff --git a/tests/components/apns/test_notify.py b/tests/components/apns/test_notify.py index 5c69e19435e..6c27299de5c 100644 --- a/tests/components/apns/test_notify.py +++ b/tests/components/apns/test_notify.py @@ -30,8 +30,9 @@ class TestApns(unittest.TestCase): def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() diff --git a/tests/components/arcam_fmj/conftest.py b/tests/components/arcam_fmj/conftest.py index 386cdf9a2b0..dfdc9e434f2 100644 --- a/tests/components/arcam_fmj/conftest.py +++ b/tests/components/arcam_fmj/conftest.py @@ -3,30 +3,24 @@ from arcam.fmj.client import Client from arcam.fmj.state import State import pytest -from homeassistant.components.arcam_fmj import DEVICE_SCHEMA -from homeassistant.components.arcam_fmj.const import DOMAIN +from homeassistant.components.arcam_fmj.const import DEFAULT_NAME from homeassistant.components.arcam_fmj.media_player import ArcamFmj from homeassistant.const import CONF_HOST, CONF_PORT -from homeassistant.setup import async_setup_component from tests.async_mock import Mock, patch +from tests.common import MockConfigEntry MOCK_HOST = "127.0.0.1" -MOCK_PORT = 1234 +MOCK_PORT = 50000 MOCK_TURN_ON = { "service": "switch.turn_on", "data": {"entity_id": "switch.test"}, } -MOCK_NAME = "dummy" -MOCK_UUID = "1234" -MOCK_ENTITY_ID = "media_player.arcam_fmj_127_0_0_1_1234_1" -MOCK_CONFIG = DEVICE_SCHEMA({CONF_HOST: MOCK_HOST, CONF_PORT: MOCK_PORT}) - - -@pytest.fixture(name="config") -def config_fixture(): - """Create hass config fixture.""" - return {DOMAIN: [{CONF_HOST: MOCK_HOST, CONF_PORT: MOCK_PORT}]} +MOCK_ENTITY_ID = "media_player.arcam_fmj_127_0_0_1_zone_1" +MOCK_UUID = "456789abcdef" +MOCK_UDN = f"uuid:01234567-89ab-cdef-0123-{MOCK_UUID}" +MOCK_NAME = f"{DEFAULT_NAME} ({MOCK_HOST})" +MOCK_CONFIG_ENTRY = {CONF_HOST: MOCK_HOST, CONF_PORT: MOCK_PORT} @pytest.fixture(name="client") @@ -75,7 +69,7 @@ def state_fixture(state_1): @pytest.fixture(name="player") def player_fixture(hass, state): """Get standard player.""" - player = ArcamFmj(state, MOCK_UUID, MOCK_NAME, None) + player = ArcamFmj(MOCK_NAME, state, MOCK_UUID) player.entity_id = MOCK_ENTITY_ID player.hass = hass player.async_write_ha_state = Mock() @@ -83,8 +77,12 @@ def player_fixture(hass, state): @pytest.fixture(name="player_setup") -async def player_setup_fixture(hass, config, state_1, state_2, client): +async def player_setup_fixture(hass, state_1, state_2, client): """Get standard player.""" + config_entry = MockConfigEntry( + domain="arcam_fmj", data=MOCK_CONFIG_ENTRY, title=MOCK_NAME + ) + config_entry.add_to_hass(hass) def state_mock(cli, zone): if zone == 1: @@ -95,6 +93,6 @@ async def player_setup_fixture(hass, config, state_1, state_2, client): with patch("homeassistant.components.arcam_fmj.Client", return_value=client), patch( "homeassistant.components.arcam_fmj.media_player.State", side_effect=state_mock ), patch("homeassistant.components.arcam_fmj._run_client", return_value=None): - assert await async_setup_component(hass, "arcam_fmj", config) + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() yield MOCK_ENTITY_ID diff --git a/tests/components/arcam_fmj/test_config_flow.py b/tests/components/arcam_fmj/test_config_flow.py index 6df280fa92e..9475c2f110c 100644 --- a/tests/components/arcam_fmj/test_config_flow.py +++ b/tests/components/arcam_fmj/test_config_flow.py @@ -1,37 +1,182 @@ """Tests for the Arcam FMJ config flow module.""" +from arcam.fmj.client import ConnectionFailed import pytest from homeassistant import data_entry_flow -from homeassistant.components.arcam_fmj.config_flow import ArcamFmjFlowHandler -from homeassistant.components.arcam_fmj.const import DOMAIN +from homeassistant.components import ssdp +from homeassistant.components.arcam_fmj.config_flow import get_entry_client +from homeassistant.components.arcam_fmj.const import DOMAIN, DOMAIN_DATA_ENTRIES +from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER +from homeassistant.const import CONF_HOST, CONF_PORT, CONF_SOURCE -from .conftest import MOCK_CONFIG, MOCK_NAME +from .conftest import ( + MOCK_CONFIG_ENTRY, + MOCK_HOST, + MOCK_NAME, + MOCK_PORT, + MOCK_UDN, + MOCK_UUID, +) +from tests.async_mock import AsyncMock, patch from tests.common import MockConfigEntry +MOCK_UPNP_DEVICE = f""" + + + {MOCK_UDN} + + +""" -@pytest.fixture(name="config_entry") -def config_entry_fixture(): - """Create a mock Arcam config entry.""" - return MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG, title=MOCK_NAME) +MOCK_UPNP_LOCATION = f"http://{MOCK_HOST}:8080/dd.xml" + +MOCK_DISCOVER = { + ssdp.ATTR_UPNP_MANUFACTURER: "ARCAM", + ssdp.ATTR_UPNP_MODEL_NAME: " ", + ssdp.ATTR_UPNP_MODEL_NUMBER: "AVR450, AVR750", + ssdp.ATTR_UPNP_FRIENDLY_NAME: f"Arcam media client {MOCK_UUID}", + ssdp.ATTR_UPNP_SERIAL: "12343", + ssdp.ATTR_SSDP_LOCATION: f"http://{MOCK_HOST}:8080/dd.xml", + ssdp.ATTR_UPNP_UDN: MOCK_UDN, + ssdp.ATTR_UPNP_DEVICE_TYPE: "urn:schemas-upnp-org:device:MediaRenderer:1", +} -async def test_single_import_only(hass, config_entry): - """Test form is shown when host not provided.""" - config_entry.add_to_hass(hass) - flow = ArcamFmjFlowHandler() - flow.hass = hass - result = await flow.async_step_import(MOCK_CONFIG) - assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT - assert result["reason"] == "already_setup" +@pytest.fixture(name="dummy_client", autouse=True) +def dummy_client_fixture(hass): + """Mock out the real client.""" + with patch("homeassistant.components.arcam_fmj.config_flow.Client") as client: + client.return_value.start.side_effect = AsyncMock(return_value=None) + client.return_value.stop.side_effect = AsyncMock(return_value=None) + yield client.return_value -async def test_import(hass): - """Test form is shown when host not provided.""" - flow = ArcamFmjFlowHandler() - flow.hass = hass - result = await flow.async_step_import(MOCK_CONFIG) +async def test_ssdp(hass, dummy_client): + """Test a ssdp import flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={CONF_SOURCE: SOURCE_SSDP}, data=MOCK_DISCOVER, + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_FORM + assert result["step_id"] == "confirm" + + result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY - assert result["title"] == "Arcam FMJ" - assert result["data"] == MOCK_CONFIG + assert result["title"] == f"Arcam FMJ ({MOCK_HOST})" + assert result["data"] == MOCK_CONFIG_ENTRY + + +async def test_ssdp_abort(hass): + """Test a ssdp import flow.""" + entry = MockConfigEntry( + domain=DOMAIN, data=MOCK_CONFIG_ENTRY, title=MOCK_NAME, unique_id=MOCK_UUID + ) + entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={CONF_SOURCE: SOURCE_SSDP}, data=MOCK_DISCOVER, + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT + assert result["reason"] == "already_configured" + + +async def test_ssdp_unable_to_connect(hass, dummy_client): + """Test a ssdp import flow.""" + dummy_client.start.side_effect = AsyncMock(side_effect=ConnectionFailed) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={CONF_SOURCE: SOURCE_SSDP}, data=MOCK_DISCOVER, + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_FORM + assert result["step_id"] == "confirm" + + result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) + assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT + assert result["reason"] == "unable_to_connect" + + +async def test_ssdp_update(hass): + """Test a ssdp import flow.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={CONF_HOST: "old_host", CONF_PORT: MOCK_PORT}, + title=MOCK_NAME, + unique_id=MOCK_UUID, + ) + entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={CONF_SOURCE: SOURCE_SSDP}, data=MOCK_DISCOVER, + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT + assert result["reason"] == "already_configured" + + assert entry.data[CONF_HOST] == MOCK_HOST + + +async def test_user(hass, aioclient_mock): + """Test a manual user configuration flow.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={CONF_SOURCE: SOURCE_USER}, data=None, + ) + + assert result["type"] == data_entry_flow.RESULT_TYPE_FORM + assert result["step_id"] == "user" + + user_input = { + CONF_HOST: MOCK_HOST, + CONF_PORT: MOCK_PORT, + } + + aioclient_mock.get(MOCK_UPNP_LOCATION, text=MOCK_UPNP_DEVICE) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY + assert result["title"] == f"Arcam FMJ ({MOCK_HOST})" + assert result["data"] == MOCK_CONFIG_ENTRY + assert result["result"].unique_id == MOCK_UUID + + +async def test_invalid_ssdp(hass, aioclient_mock): + """Test a a config flow where ssdp fails.""" + user_input = { + CONF_HOST: MOCK_HOST, + CONF_PORT: MOCK_PORT, + } + + aioclient_mock.get(MOCK_UPNP_LOCATION, text="") + result = await hass.config_entries.flow.async_init( + DOMAIN, context={CONF_SOURCE: SOURCE_USER}, data=user_input, + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY + assert result["title"] == f"Arcam FMJ ({MOCK_HOST})" + assert result["data"] == MOCK_CONFIG_ENTRY + assert result["result"].unique_id is None + + +async def test_user_wrong(hass, aioclient_mock): + """Test a manual user configuration flow with no ssdp response.""" + user_input = { + CONF_HOST: MOCK_HOST, + CONF_PORT: MOCK_PORT, + } + + aioclient_mock.get(MOCK_UPNP_LOCATION, status=404) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={CONF_SOURCE: SOURCE_USER}, data=user_input, + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY + assert result["title"] == f"Arcam FMJ ({MOCK_HOST})" + assert result["result"].unique_id is None + + +async def test_get_entry_client(hass): + """Test helper for configuration.""" + entry = MockConfigEntry( + domain=DOMAIN, data=MOCK_CONFIG_ENTRY, title=MOCK_NAME, unique_id=MOCK_UUID + ) + hass.data[DOMAIN_DATA_ENTRIES] = {entry.entry_id: "dummy"} + assert get_entry_client(hass, entry) == "dummy" diff --git a/tests/components/arcam_fmj/test_media_player.py b/tests/components/arcam_fmj/test_media_player.py index 3d88f337e93..d6c219a6d96 100644 --- a/tests/components/arcam_fmj/test_media_player.py +++ b/tests/components/arcam_fmj/test_media_player.py @@ -4,10 +4,14 @@ from math import isclose from arcam.fmj import DecodeMode2CH, DecodeModeMCH, IncomingAudioFormat, SourceCodes import pytest -from homeassistant.components.media_player.const import MEDIA_TYPE_MUSIC -from homeassistant.core import HomeAssistant +from homeassistant.components.media_player.const import ( + ATTR_INPUT_SOURCE, + MEDIA_TYPE_MUSIC, + SERVICE_SELECT_SOURCE, +) +from homeassistant.const import ATTR_ENTITY_ID -from .conftest import MOCK_ENTITY_ID, MOCK_HOST, MOCK_NAME, MOCK_PORT, MOCK_UUID +from .conftest import MOCK_HOST, MOCK_NAME, MOCK_PORT, MOCK_UUID from tests.async_mock import ANY, MagicMock, Mock, PropertyMock, patch @@ -27,8 +31,9 @@ async def test_properties(player, state): """Test standard properties.""" assert player.unique_id == f"{MOCK_UUID}-1" assert player.device_info == { - "identifiers": {("arcam_fmj", MOCK_HOST, MOCK_PORT)}, - "model": "FMJ", + "name": f"Arcam FMJ ({MOCK_HOST})", + "identifiers": {("arcam_fmj", MOCK_UUID), ("arcam_fmj", MOCK_HOST, MOCK_PORT)}, + "model": "Arcam FMJ AVR", "manufacturer": "Arcam", } assert not player.should_poll @@ -55,12 +60,12 @@ async def test_powered_on(player, state): async def test_supported_features(player, state): - """Test support when turn on service exist.""" + """Test supported features.""" data = await update(player) assert data.attributes["supported_features"] == 69004 -async def test_turn_on_without_service(player, state): +async def test_turn_on(player, state): """Test turn on service.""" state.get_power.return_value = None await player.async_turn_on() @@ -71,29 +76,6 @@ async def test_turn_on_without_service(player, state): state.set_power.assert_called_with(True) -async def test_turn_on_with_service(hass, state): - """Test support when turn on service exist.""" - from homeassistant.components.arcam_fmj.media_player import ArcamFmj - - player = ArcamFmj(state, MOCK_UUID, "dummy", MOCK_TURN_ON) - player.hass = Mock(HomeAssistant) - player.entity_id = MOCK_ENTITY_ID - with patch( - "homeassistant.components.arcam_fmj.media_player.async_call_from_config" - ) as async_call_from_config: - - state.get_power.return_value = None - await player.async_turn_on() - state.set_power.assert_not_called() - async_call_from_config.assert_called_with( - player.hass, - MOCK_TURN_ON, - variables=None, - blocking=True, - validate_config=False, - ) - - async def test_turn_off(player, state): """Test command to turn off.""" await player.async_turn_off() @@ -110,7 +92,7 @@ async def test_mute_volume(player, state, mute): async def test_name(player): """Test name.""" - assert player.name == MOCK_NAME + assert player.name == f"{MOCK_NAME} - Zone: 1" async def test_update(player, state): @@ -138,9 +120,15 @@ async def test_2ch(player, state, fmt, result): "source, value", [("PVR", SourceCodes.PVR), ("BD", SourceCodes.BD), ("INVALID", None)], ) -async def test_select_source(player, state, source, value): +async def test_select_source(hass, player_setup, state, source, value): """Test selection of source.""" - await player.async_select_source(source) + await hass.services.async_call( + "media_player", + SERVICE_SELECT_SOURCE, + service_data={ATTR_ENTITY_ID: player_setup, ATTR_INPUT_SOURCE: source}, + blocking=True, + ) + if value: state.set_source.assert_called_with(value) else: diff --git a/tests/components/aurora/test_binary_sensor.py b/tests/components/aurora/test_binary_sensor.py index f90c1e2bcca..ea385d5697f 100644 --- a/tests/components/aurora/test_binary_sensor.py +++ b/tests/components/aurora/test_binary_sensor.py @@ -20,8 +20,9 @@ class TestAuroraSensorSetUp(unittest.TestCase): self.hass.config.latitude = self.lat self.hass.config.longitude = self.lon self.entities = [] + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() diff --git a/tests/components/automation/test_init.py b/tests/components/automation/test_init.py index d17e55691bc..9af8a6591d9 100644 --- a/tests/components/automation/test_init.py +++ b/tests/components/automation/test_init.py @@ -17,7 +17,7 @@ from homeassistant.const import ( STATE_OFF, STATE_ON, ) -from homeassistant.core import Context, CoreState, Event, State +from homeassistant.core import Context, CoreState, State from homeassistant.exceptions import HomeAssistantError, Unauthorized from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -30,6 +30,7 @@ from tests.common import ( mock_restore_cache, ) from tests.components.automation import common +from tests.components.logbook.test_init import MockLazyEventPartialState @pytest.fixture @@ -1038,21 +1039,25 @@ async def test_extraction_functions(hass): async def test_logbook_humanify_automation_triggered_event(hass): """Test humanifying Automation Trigger event.""" + hass.config.components.add("recorder") await async_setup_component(hass, automation.DOMAIN, {}) + await async_setup_component(hass, "logbook", {}) + entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ - Event( + MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.hello", ATTR_NAME: "Hello Automation"}, ), - Event( + MockLazyEventPartialState( EVENT_AUTOMATION_TRIGGERED, {ATTR_ENTITY_ID: "automation.bye", ATTR_NAME: "Bye Automation"}, ), ], + entity_attr_cache, ) ) diff --git a/tests/components/automation/test_mqtt.py b/tests/components/automation/test_mqtt.py index 0a07c5aac48..f0dd76ff1b4 100644 --- a/tests/components/automation/test_mqtt.py +++ b/tests/components/automation/test_mqtt.py @@ -6,12 +6,7 @@ import pytest import homeassistant.components.automation as automation from homeassistant.setup import async_setup_component -from tests.common import ( - async_fire_mqtt_message, - async_mock_mqtt_component, - async_mock_service, - mock_component, -) +from tests.common import async_fire_mqtt_message, async_mock_service, mock_component from tests.components.automation import common @@ -22,10 +17,9 @@ def calls(hass): @pytest.fixture(autouse=True) -def setup_comp(hass): +def setup_comp(hass, mqtt_mock): """Initialize components.""" mock_component(hass, "group") - hass.loop.run_until_complete(async_mock_mqtt_component(hass)) async def test_if_fires_on_topic_match(hass, calls): @@ -104,10 +98,8 @@ async def test_if_not_fires_on_topic_but_no_payload_match(hass, calls): assert len(calls) == 0 -async def test_encoding_default(hass, calls): +async def test_encoding_default(hass, calls, mqtt_mock): """Test default encoding.""" - mock_mqtt = await async_mock_mqtt_component(hass) - assert await async_setup_component( hass, automation.DOMAIN, @@ -119,15 +111,13 @@ async def test_encoding_default(hass, calls): }, ) - mock_mqtt.async_subscribe.assert_called_once_with( + mqtt_mock.async_subscribe.assert_called_once_with( "test-topic", mock.ANY, 0, "utf-8" ) -async def test_encoding_custom(hass, calls): +async def test_encoding_custom(hass, calls, mqtt_mock): """Test default encoding.""" - mock_mqtt = await async_mock_mqtt_component(hass) - assert await async_setup_component( hass, automation.DOMAIN, @@ -139,4 +129,4 @@ async def test_encoding_custom(hass, calls): }, ) - mock_mqtt.async_subscribe.assert_called_once_with("test-topic", mock.ANY, 0, None) + mqtt_mock.async_subscribe.assert_called_once_with("test-topic", mock.ANY, 0, None) diff --git a/tests/components/avri/__init__.py b/tests/components/avri/__init__.py new file mode 100644 index 00000000000..c5212855038 --- /dev/null +++ b/tests/components/avri/__init__.py @@ -0,0 +1 @@ +"""Tests for the Avri integration.""" diff --git a/tests/components/avri/test_config_flow.py b/tests/components/avri/test_config_flow.py new file mode 100644 index 00000000000..291f7669ebd --- /dev/null +++ b/tests/components/avri/test_config_flow.py @@ -0,0 +1,80 @@ +"""Test the Avri config flow.""" +from asynctest import patch + +from homeassistant import config_entries, setup +from homeassistant.components.avri.const import DOMAIN + + +async def test_form(hass): + """Test we get the form.""" + await setup.async_setup_component(hass, "avri", {}) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] == "form" + assert result["errors"] == {} + + with patch( + "homeassistant.components.avri.async_setup_entry", return_value=True, + ) as mock_setup_entry: + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "zip_code": "1234AB", + "house_number": 42, + "house_number_extension": "", + "country_code": "NL", + }, + ) + + assert result2["type"] == "create_entry" + assert result2["title"] == "1234AB 42" + assert result2["data"] == { + "id": "1234AB 42", + "zip_code": "1234AB", + "house_number": 42, + "house_number_extension": "", + "country_code": "NL", + } + await hass.async_block_till_done() + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_invalid_house_number(hass): + """Test we handle invalid house number.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "zip_code": "1234AB", + "house_number": -1, + "house_number_extension": "", + "country_code": "NL", + }, + ) + + assert result2["type"] == "form" + assert result2["errors"] == {"house_number": "invalid_house_number"} + + +async def test_form_invalid_country_code(hass): + """Test we handle invalid county code.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "zip_code": "1234AB", + "house_number": 42, + "house_number_extension": "", + "country_code": "foo", + }, + ) + + assert result2["type"] == "form" + assert result2["errors"] == {"country_code": "invalid_country_code"} diff --git a/tests/components/awair/const.py b/tests/components/awair/const.py new file mode 100644 index 00000000000..94c07e9e9fd --- /dev/null +++ b/tests/components/awair/const.py @@ -0,0 +1,20 @@ +"""Constants used in Awair tests.""" + +import json + +from homeassistant.const import CONF_ACCESS_TOKEN + +from tests.common import load_fixture + +AWAIR_UUID = "awair_24947" +CONFIG = {CONF_ACCESS_TOKEN: "12345"} +UNIQUE_ID = "foo@bar.com" +DEVICES_FIXTURE = json.loads(load_fixture("awair/devices.json")) +GEN1_DATA_FIXTURE = json.loads(load_fixture("awair/awair.json")) +GEN2_DATA_FIXTURE = json.loads(load_fixture("awair/awair-r2.json")) +GLOW_DATA_FIXTURE = json.loads(load_fixture("awair/glow.json")) +MINT_DATA_FIXTURE = json.loads(load_fixture("awair/mint.json")) +NO_DEVICES_FIXTURE = json.loads(load_fixture("awair/no_devices.json")) +OFFLINE_FIXTURE = json.loads(load_fixture("awair/awair-offline.json")) +OMNI_DATA_FIXTURE = json.loads(load_fixture("awair/omni.json")) +USER_FIXTURE = json.loads(load_fixture("awair/user.json")) diff --git a/tests/components/awair/test_config_flow.py b/tests/components/awair/test_config_flow.py new file mode 100644 index 00000000000..bbd37bda075 --- /dev/null +++ b/tests/components/awair/test_config_flow.py @@ -0,0 +1,190 @@ +"""Define tests for the Awair config flow.""" + +from asynctest import patch +from python_awair.exceptions import AuthError, AwairError + +from homeassistant import data_entry_flow +from homeassistant.components.awair.const import DOMAIN +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.const import CONF_ACCESS_TOKEN + +from .const import CONFIG, DEVICES_FIXTURE, NO_DEVICES_FIXTURE, UNIQUE_ID, USER_FIXTURE + +from tests.common import MockConfigEntry + + +async def test_show_form(hass): + """Test that the form is served with no input.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] == data_entry_flow.RESULT_TYPE_FORM + assert result["step_id"] == SOURCE_USER + + +async def test_invalid_access_token(hass): + """Test that errors are shown when the access token is invalid.""" + + with patch("python_awair.AwairClient.query", side_effect=AuthError()): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER}, data=CONFIG + ) + + assert result["errors"] == {CONF_ACCESS_TOKEN: "auth"} + + +async def test_unexpected_api_error(hass): + """Test that we abort on generic errors.""" + + with patch("python_awair.AwairClient.query", side_effect=AwairError()): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER}, data=CONFIG + ) + + assert result["type"] == "abort" + assert result["reason"] == "unknown" + + +async def test_duplicate_error(hass): + """Test that errors are shown when adding a duplicate config.""" + + with patch( + "python_awair.AwairClient.query", side_effect=[USER_FIXTURE, DEVICES_FIXTURE] + ), patch( + "homeassistant.components.awair.sensor.async_setup_entry", return_value=True, + ): + MockConfigEntry(domain=DOMAIN, unique_id=UNIQUE_ID, data=CONFIG).add_to_hass( + hass + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER}, data=CONFIG + ) + + assert result["type"] == "abort" + assert result["reason"] == "already_configured" + + +async def test_no_devices_error(hass): + """Test that errors are shown when the API returns no devices.""" + + with patch( + "python_awair.AwairClient.query", side_effect=[USER_FIXTURE, NO_DEVICES_FIXTURE] + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER}, data=CONFIG + ) + + assert result["type"] == "abort" + assert result["reason"] == "no_devices" + + +async def test_import(hass): + """Test config.yaml import.""" + + with patch( + "python_awair.AwairClient.query", side_effect=[USER_FIXTURE, DEVICES_FIXTURE] + ), patch( + "homeassistant.components.awair.sensor.async_setup_entry", return_value=True, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={CONF_ACCESS_TOKEN: CONFIG[CONF_ACCESS_TOKEN]}, + ) + + assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY + assert result["title"] == "foo@bar.com (32406)" + assert result["data"][CONF_ACCESS_TOKEN] == CONFIG[CONF_ACCESS_TOKEN] + assert result["result"].unique_id == UNIQUE_ID + + +async def test_import_aborts_on_api_error(hass): + """Test config.yaml imports on api error.""" + + with patch("python_awair.AwairClient.query", side_effect=AwairError()): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={CONF_ACCESS_TOKEN: CONFIG[CONF_ACCESS_TOKEN]}, + ) + + assert result["type"] == "abort" + assert result["reason"] == "unknown" + + +async def test_import_aborts_if_configured(hass): + """Test config import doesn't re-import unnecessarily.""" + + with patch( + "python_awair.AwairClient.query", side_effect=[USER_FIXTURE, DEVICES_FIXTURE] + ), patch( + "homeassistant.components.awair.sensor.async_setup_entry", return_value=True, + ): + MockConfigEntry(domain=DOMAIN, unique_id=UNIQUE_ID, data=CONFIG).add_to_hass( + hass + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={CONF_ACCESS_TOKEN: CONFIG[CONF_ACCESS_TOKEN]}, + ) + + assert result["type"] == "abort" + assert result["reason"] == "already_setup" + + +async def test_reauth(hass): + """Test reauth flow.""" + with patch( + "python_awair.AwairClient.query", side_effect=[USER_FIXTURE, DEVICES_FIXTURE] + ), patch( + "homeassistant.components.awair.sensor.async_setup_entry", return_value=True, + ): + mock_config = MockConfigEntry(domain=DOMAIN, unique_id=UNIQUE_ID, data=CONFIG) + mock_config.add_to_hass(hass) + hass.config_entries.async_update_entry( + mock_config, data={**CONFIG, CONF_ACCESS_TOKEN: "blah"} + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "reauth", "unique_id": UNIQUE_ID}, data=CONFIG, + ) + + assert result["type"] == "abort" + assert result["reason"] == "reauth_successful" + + with patch("python_awair.AwairClient.query", side_effect=AuthError()): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "reauth", "unique_id": UNIQUE_ID}, data=CONFIG, + ) + + assert result["errors"] == {CONF_ACCESS_TOKEN: "auth"} + + with patch("python_awair.AwairClient.query", side_effect=AwairError()): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "reauth", "unique_id": UNIQUE_ID}, data=CONFIG, + ) + + assert result["type"] == "abort" + assert result["reason"] == "unknown" + + +async def test_create_entry(hass): + """Test overall flow.""" + + with patch( + "python_awair.AwairClient.query", side_effect=[USER_FIXTURE, DEVICES_FIXTURE] + ), patch( + "homeassistant.components.awair.sensor.async_setup_entry", return_value=True, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER}, data=CONFIG + ) + + assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY + assert result["title"] == "foo@bar.com (32406)" + assert result["data"][CONF_ACCESS_TOKEN] == CONFIG[CONF_ACCESS_TOKEN] + assert result["result"].unique_id == UNIQUE_ID diff --git a/tests/components/awair/test_sensor.py b/tests/components/awair/test_sensor.py index d1a3b933d05..00c469e3747 100644 --- a/tests/components/awair/test_sensor.py +++ b/tests/components/awair/test_sensor.py @@ -1,312 +1,342 @@ """Tests for the Awair sensor platform.""" -from contextlib import contextmanager -from datetime import timedelta -import json -import logging - -from homeassistant.components.awair.sensor import ( - ATTR_LAST_API_UPDATE, - ATTR_TIMESTAMP, - DEVICE_CLASS_CARBON_DIOXIDE, - DEVICE_CLASS_PM2_5, - DEVICE_CLASS_SCORE, - DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS, +from homeassistant.components.awair.const import ( + API_CO2, + API_HUMID, + API_LUX, + API_PM10, + API_PM25, + API_SCORE, + API_SPL_A, + API_TEMP, + API_VOC, + ATTR_UNIQUE_ID, + DOMAIN, + SENSOR_TYPES, ) -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.const import ( + ATTR_ICON, + ATTR_UNIT_OF_MEASUREMENT, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, CONCENTRATION_PARTS_PER_BILLION, CONCENTRATION_PARTS_PER_MILLION, - DEVICE_CLASS_HUMIDITY, - DEVICE_CLASS_TEMPERATURE, STATE_UNAVAILABLE, TEMP_CELSIUS, UNIT_PERCENTAGE, ) -from homeassistant.setup import async_setup_component -from homeassistant.util.dt import parse_datetime, utcnow + +from .const import ( + AWAIR_UUID, + CONFIG, + DEVICES_FIXTURE, + GEN1_DATA_FIXTURE, + GEN2_DATA_FIXTURE, + GLOW_DATA_FIXTURE, + MINT_DATA_FIXTURE, + OFFLINE_FIXTURE, + OMNI_DATA_FIXTURE, + UNIQUE_ID, + USER_FIXTURE, +) from tests.async_mock import patch -from tests.common import async_fire_time_changed, load_fixture - -DISCOVERY_CONFIG = {"sensor": {"platform": "awair", "access_token": "qwerty"}} - -MANUAL_CONFIG = { - "sensor": { - "platform": "awair", - "access_token": "qwerty", - "devices": [{"uuid": "awair_foo"}], - } -} - -_LOGGER = logging.getLogger(__name__) - -NOW = utcnow() -AIR_DATA_FIXTURE = json.loads(load_fixture("awair_air_data_latest.json")) -AIR_DATA_FIXTURE[0][ATTR_TIMESTAMP] = str(NOW) -AIR_DATA_FIXTURE_UPDATED = json.loads( - load_fixture("awair_air_data_latest_updated.json") -) -AIR_DATA_FIXTURE_UPDATED[0][ATTR_TIMESTAMP] = str(NOW + timedelta(minutes=5)) -AIR_DATA_FIXTURE_EMPTY = [] +from tests.common import MockConfigEntry -@contextmanager -def alter_time(retval): - """Manage multiple time mocks.""" - patch_one = patch("homeassistant.util.dt.utcnow", return_value=retval) - patch_two = patch("homeassistant.util.utcnow", return_value=retval) - patch_three = patch( - "homeassistant.components.awair.sensor.dt.utcnow", return_value=retval - ) +async def setup_awair(hass, fixtures): + """Add Awair devices to hass, using specified fixtures for data.""" - with patch_one, patch_two, patch_three: - yield - - -async def setup_awair(hass, config=None, data_fixture=AIR_DATA_FIXTURE): - """Load the Awair platform.""" - devices_json = json.loads(load_fixture("awair_devices.json")) - devices_mock = devices_json - devices_patch = patch("python_awair.AwairClient.devices", return_value=devices_mock) - air_data_mock = data_fixture - air_data_patch = patch( - "python_awair.AwairClient.air_data_latest", return_value=air_data_mock - ) - - if config is None: - config = DISCOVERY_CONFIG - - with devices_patch, air_data_patch, alter_time(NOW): - assert await async_setup_component(hass, SENSOR_DOMAIN, config) + entry = MockConfigEntry(domain=DOMAIN, unique_id=UNIQUE_ID, data=CONFIG) + with patch("python_awair.AwairClient.query", side_effect=fixtures): + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() -async def test_platform_manually_configured(hass): - """Test that we can manually configure devices.""" - await setup_awair(hass, MANUAL_CONFIG) +def assert_expected_properties( + hass, registry, name, unique_id, state_value, attributes +): + """Assert expected properties from a dict.""" - assert len(hass.states.async_all()) == 6 - - # Ensure that we loaded the device with uuid 'awair_foo', not the - # 'awair_12345' device that we stub out for API device discovery - entity = hass.data[SENSOR_DOMAIN].get_entity("sensor.awair_co2") - assert entity.unique_id == "awair_foo_CO2" + entry = registry.async_get(name) + assert entry.unique_id == unique_id + state = hass.states.get(name) + assert state + assert state.state == state_value + for attr, value in attributes.items(): + assert state.attributes.get(attr) == value -async def test_platform_automatically_configured(hass): - """Test that we can discover devices from the API.""" - await setup_awair(hass) +async def test_awair_gen1_sensors(hass): + """Test expected sensors on a 1st gen Awair.""" - assert len(hass.states.async_all()) == 6 + fixtures = [USER_FIXTURE, DEVICES_FIXTURE, GEN1_DATA_FIXTURE] + await setup_awair(hass, fixtures) + registry = await hass.helpers.entity_registry.async_get_registry() - # Ensure that we loaded the device with uuid 'awair_12345', which is - # the device that we stub out for API device discovery - entity = hass.data[SENSOR_DOMAIN].get_entity("sensor.awair_co2") - assert entity.unique_id == "awair_12345_CO2" - - -async def test_bad_platform_setup(hass): - """Tests that we throw correct exceptions when setting up Awair.""" - from python_awair import AwairClient - - auth_patch = patch( - "python_awair.AwairClient.devices", side_effect=AwairClient.AuthError - ) - rate_patch = patch( - "python_awair.AwairClient.devices", side_effect=AwairClient.RatelimitError - ) - generic_patch = patch( - "python_awair.AwairClient.devices", side_effect=AwairClient.GenericError + assert_expected_properties( + hass, + registry, + "sensor.living_room_awair_score", + f"{AWAIR_UUID}_{SENSOR_TYPES[API_SCORE][ATTR_UNIQUE_ID]}", + "88", + {ATTR_ICON: "mdi:blur"}, ) - with auth_patch: - assert await async_setup_component(hass, SENSOR_DOMAIN, DISCOVERY_CONFIG) - assert not hass.states.async_all() + assert_expected_properties( + hass, + registry, + "sensor.living_room_temperature", + f"{AWAIR_UUID}_{SENSOR_TYPES[API_TEMP][ATTR_UNIQUE_ID]}", + "21.8", + {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS, "awair_index": 1.0}, + ) - with rate_patch: - assert await async_setup_component(hass, SENSOR_DOMAIN, DISCOVERY_CONFIG) - assert not hass.states.async_all() + assert_expected_properties( + hass, + registry, + "sensor.living_room_humidity", + f"{AWAIR_UUID}_{SENSOR_TYPES[API_HUMID][ATTR_UNIQUE_ID]}", + "41.59", + {ATTR_UNIT_OF_MEASUREMENT: UNIT_PERCENTAGE, "awair_index": 0.0}, + ) - with generic_patch: - assert await async_setup_component(hass, SENSOR_DOMAIN, DISCOVERY_CONFIG) - assert not hass.states.async_all() + assert_expected_properties( + hass, + registry, + "sensor.living_room_carbon_dioxide", + f"{AWAIR_UUID}_{SENSOR_TYPES[API_CO2][ATTR_UNIQUE_ID]}", + "654.0", + { + ATTR_ICON: "mdi:cloud", + ATTR_UNIT_OF_MEASUREMENT: CONCENTRATION_PARTS_PER_MILLION, + "awair_index": 0.0, + }, + ) + + assert_expected_properties( + hass, + registry, + "sensor.living_room_volatile_organic_compounds", + f"{AWAIR_UUID}_{SENSOR_TYPES[API_VOC][ATTR_UNIQUE_ID]}", + "366", + { + ATTR_ICON: "mdi:cloud", + ATTR_UNIT_OF_MEASUREMENT: CONCENTRATION_PARTS_PER_BILLION, + "awair_index": 1.0, + }, + ) + + assert_expected_properties( + hass, + registry, + "sensor.living_room_pm2_5", + # gen1 unique_id should be awair_12345-DUST, which matches old integration behavior + f"{AWAIR_UUID}_DUST", + "14.3", + { + ATTR_ICON: "mdi:blur", + ATTR_UNIT_OF_MEASUREMENT: CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + "awair_index": 1.0, + }, + ) + + assert_expected_properties( + hass, + registry, + "sensor.living_room_pm10", + f"{AWAIR_UUID}_{SENSOR_TYPES[API_PM10][ATTR_UNIQUE_ID]}", + "14.3", + { + ATTR_ICON: "mdi:blur", + ATTR_UNIT_OF_MEASUREMENT: CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + "awair_index": 1.0, + }, + ) + + # We should not have a dust sensor; it's aliased as pm2.5 + # and pm10 sensors. + assert hass.states.get("sensor.living_room_dust") is None + + # We should not have sound or lux sensors. + assert hass.states.get("sensor.living_room_sound_level") is None + assert hass.states.get("sensor.living_room_illuminance") is None -async def test_awair_setup_no_data(hass): - """Ensure that we do not crash during setup when no data is returned.""" - await setup_awair(hass, data_fixture=AIR_DATA_FIXTURE_EMPTY) - assert not hass.states.async_all() +async def test_awair_gen2_sensors(hass): + """Test expected sensors on a 2nd gen Awair.""" + + fixtures = [USER_FIXTURE, DEVICES_FIXTURE, GEN2_DATA_FIXTURE] + await setup_awair(hass, fixtures) + registry = await hass.helpers.entity_registry.async_get_registry() + + assert_expected_properties( + hass, + registry, + "sensor.living_room_awair_score", + f"{AWAIR_UUID}_{SENSOR_TYPES[API_SCORE][ATTR_UNIQUE_ID]}", + "97", + {ATTR_ICON: "mdi:blur"}, + ) + + assert_expected_properties( + hass, + registry, + "sensor.living_room_pm2_5", + f"{AWAIR_UUID}_{SENSOR_TYPES[API_PM25][ATTR_UNIQUE_ID]}", + "2.0", + { + ATTR_ICON: "mdi:blur", + ATTR_UNIT_OF_MEASUREMENT: CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + "awair_index": 0.0, + }, + ) + + # The Awair 2nd gen reports specifically a pm2.5 sensor, + # and so we don't alias anything. Make sure we didn't do that. + assert hass.states.get("sensor.living_room_pm10") is None -async def test_awair_misc_attributes(hass): - """Test that desired attributes are set.""" - await setup_awair(hass) +async def test_awair_mint_sensors(hass): + """Test expected sensors on an Awair mint.""" - attributes = hass.states.get("sensor.awair_co2").attributes - assert attributes[ATTR_LAST_API_UPDATE] == parse_datetime( - AIR_DATA_FIXTURE[0][ATTR_TIMESTAMP] + fixtures = [USER_FIXTURE, DEVICES_FIXTURE, MINT_DATA_FIXTURE] + await setup_awair(hass, fixtures) + registry = await hass.helpers.entity_registry.async_get_registry() + + assert_expected_properties( + hass, + registry, + "sensor.living_room_awair_score", + f"{AWAIR_UUID}_{SENSOR_TYPES[API_SCORE][ATTR_UNIQUE_ID]}", + "98", + {ATTR_ICON: "mdi:blur"}, + ) + + assert_expected_properties( + hass, + registry, + "sensor.living_room_pm2_5", + f"{AWAIR_UUID}_{SENSOR_TYPES[API_PM25][ATTR_UNIQUE_ID]}", + "1.0", + { + ATTR_ICON: "mdi:blur", + ATTR_UNIT_OF_MEASUREMENT: CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + "awair_index": 0.0, + }, + ) + + assert_expected_properties( + hass, + registry, + "sensor.living_room_illuminance", + f"{AWAIR_UUID}_{SENSOR_TYPES[API_LUX][ATTR_UNIQUE_ID]}", + "441.7", + {ATTR_UNIT_OF_MEASUREMENT: "lx"}, + ) + + # The Mint does not have a CO2 sensor. + assert hass.states.get("sensor.living_room_carbon_dioxide") is None + + +async def test_awair_glow_sensors(hass): + """Test expected sensors on an Awair glow.""" + + fixtures = [USER_FIXTURE, DEVICES_FIXTURE, GLOW_DATA_FIXTURE] + await setup_awair(hass, fixtures) + registry = await hass.helpers.entity_registry.async_get_registry() + + assert_expected_properties( + hass, + registry, + "sensor.living_room_awair_score", + f"{AWAIR_UUID}_{SENSOR_TYPES[API_SCORE][ATTR_UNIQUE_ID]}", + "93", + {ATTR_ICON: "mdi:blur"}, + ) + + # The glow does not have a particle sensor + assert hass.states.get("sensor.living_room_pm2_5") is None + + +async def test_awair_omni_sensors(hass): + """Test expected sensors on an Awair omni.""" + + fixtures = [USER_FIXTURE, DEVICES_FIXTURE, OMNI_DATA_FIXTURE] + await setup_awair(hass, fixtures) + registry = await hass.helpers.entity_registry.async_get_registry() + + assert_expected_properties( + hass, + registry, + "sensor.living_room_awair_score", + f"{AWAIR_UUID}_{SENSOR_TYPES[API_SCORE][ATTR_UNIQUE_ID]}", + "99", + {ATTR_ICON: "mdi:blur"}, + ) + + assert_expected_properties( + hass, + registry, + "sensor.living_room_sound_level", + f"{AWAIR_UUID}_{SENSOR_TYPES[API_SPL_A][ATTR_UNIQUE_ID]}", + "47.0", + {ATTR_ICON: "mdi:ear-hearing", ATTR_UNIT_OF_MEASUREMENT: "dBa"}, + ) + + assert_expected_properties( + hass, + registry, + "sensor.living_room_illuminance", + f"{AWAIR_UUID}_{SENSOR_TYPES[API_LUX][ATTR_UNIQUE_ID]}", + "804.9", + {ATTR_UNIT_OF_MEASUREMENT: "lx"}, ) -async def test_awair_score(hass): - """Test that we create a sensor for the 'Awair score'.""" - await setup_awair(hass) +async def test_awair_offline(hass): + """Test expected behavior when an Awair is offline.""" - sensor = hass.states.get("sensor.awair_score") - assert sensor.state == "78" - assert sensor.attributes["device_class"] == DEVICE_CLASS_SCORE - assert sensor.attributes["unit_of_measurement"] == UNIT_PERCENTAGE + fixtures = [USER_FIXTURE, DEVICES_FIXTURE, OFFLINE_FIXTURE] + await setup_awair(hass, fixtures) + + # The expected behavior is that we won't have any sensors + # if the device is not online when we set it up. python_awair + # does not make any assumptions about what sensors a device + # might have - they are created dynamically. + + # We check for the absence of the "awair score", which every + # device *should* have if it's online. If we don't see it, + # then we probably didn't set anything up. Which is correct, + # in this case. + assert hass.states.get("sensor.living_room_awair_score") is None -async def test_awair_temp(hass): - """Test that we create a temperature sensor.""" - await setup_awair(hass) +async def test_awair_unavailable(hass): + """Test expected behavior when an Awair becomes offline later.""" - sensor = hass.states.get("sensor.awair_temperature") - assert sensor.state == "22.4" - assert sensor.attributes["device_class"] == DEVICE_CLASS_TEMPERATURE - assert sensor.attributes["unit_of_measurement"] == TEMP_CELSIUS + fixtures = [USER_FIXTURE, DEVICES_FIXTURE, GEN1_DATA_FIXTURE] + await setup_awair(hass, fixtures) + registry = await hass.helpers.entity_registry.async_get_registry() - -async def test_awair_humid(hass): - """Test that we create a humidity sensor.""" - await setup_awair(hass) - - sensor = hass.states.get("sensor.awair_humidity") - assert sensor.state == "32.7" - assert sensor.attributes["device_class"] == DEVICE_CLASS_HUMIDITY - assert sensor.attributes["unit_of_measurement"] == UNIT_PERCENTAGE - - -async def test_awair_co2(hass): - """Test that we create a CO2 sensor.""" - await setup_awair(hass) - - sensor = hass.states.get("sensor.awair_co2") - assert sensor.state == "612" - assert sensor.attributes["device_class"] == DEVICE_CLASS_CARBON_DIOXIDE - assert sensor.attributes["unit_of_measurement"] == CONCENTRATION_PARTS_PER_MILLION - - -async def test_awair_voc(hass): - """Test that we create a CO2 sensor.""" - await setup_awair(hass) - - sensor = hass.states.get("sensor.awair_voc") - assert sensor.state == "1012" - assert sensor.attributes["device_class"] == DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS - assert sensor.attributes["unit_of_measurement"] == CONCENTRATION_PARTS_PER_BILLION - - -async def test_awair_dust(hass): - """Test that we create a pm25 sensor.""" - await setup_awair(hass) - - # The Awair Gen1 that we mock actually returns 'DUST', but that - # is mapped to pm25 internally so that it shows up in Homekit - sensor = hass.states.get("sensor.awair_pm2_5") - assert sensor.state == "6.2" - assert sensor.attributes["device_class"] == DEVICE_CLASS_PM2_5 - assert ( - sensor.attributes["unit_of_measurement"] - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER + assert_expected_properties( + hass, + registry, + "sensor.living_room_awair_score", + f"{AWAIR_UUID}_{SENSOR_TYPES[API_SCORE][ATTR_UNIQUE_ID]}", + "88", + {ATTR_ICON: "mdi:blur"}, ) - -async def test_awair_unsupported_sensors(hass): - """Ensure we don't create sensors the stubbed device doesn't support.""" - await setup_awair(hass) - - # Our tests mock an Awair Gen 1 device, which should never return - # PM10 sensor readings. Assert that we didn't create a pm10 sensor, - # which could happen if someone were ever to refactor incorrectly. - assert hass.states.get("sensor.awair_pm10") is None - - -async def test_availability(hass): - """Ensure that we mark the component available/unavailable correctly.""" - await setup_awair(hass) - - assert hass.states.get("sensor.awair_score").state == "78" - - future = NOW + timedelta(minutes=30) - data_patch = patch( - "python_awair.AwairClient.air_data_latest", return_value=AIR_DATA_FIXTURE, - ) - - with data_patch, alter_time(future): - async_fire_time_changed(hass, future) - await hass.async_block_till_done() - - assert hass.states.get("sensor.awair_score").state == STATE_UNAVAILABLE - - future = NOW + timedelta(hours=1) - fixture = AIR_DATA_FIXTURE_UPDATED - fixture[0][ATTR_TIMESTAMP] = str(future) - data_patch = patch("python_awair.AwairClient.air_data_latest", return_value=fixture) - - with data_patch, alter_time(future): - async_fire_time_changed(hass, future) - await hass.async_block_till_done() - - assert hass.states.get("sensor.awair_score").state == "79" - - future = NOW + timedelta(minutes=90) - fixture = AIR_DATA_FIXTURE_EMPTY - data_patch = patch("python_awair.AwairClient.air_data_latest", return_value=fixture) - - with data_patch, alter_time(future): - async_fire_time_changed(hass, future) - await hass.async_block_till_done() - - assert hass.states.get("sensor.awair_score").state == STATE_UNAVAILABLE - - -async def test_async_update(hass): - """Ensure we can update sensors.""" - await setup_awair(hass) - - future = NOW + timedelta(minutes=10) - data_patch = patch( - "python_awair.AwairClient.air_data_latest", - return_value=AIR_DATA_FIXTURE_UPDATED, - ) - - with data_patch, alter_time(future): - async_fire_time_changed(hass, future) - await hass.async_block_till_done() - - score_sensor = hass.states.get("sensor.awair_score") - assert score_sensor.state == "79" - - assert hass.states.get("sensor.awair_temperature").state == "23.4" - assert hass.states.get("sensor.awair_humidity").state == "33.7" - assert hass.states.get("sensor.awair_co2").state == "613" - assert hass.states.get("sensor.awair_voc").state == "1013" - assert hass.states.get("sensor.awair_pm2_5").state == "7.2" - - -async def test_throttle_async_update(hass): - """Ensure we throttle updates.""" - await setup_awair(hass) - - future = NOW + timedelta(minutes=1) - data_patch = patch( - "python_awair.AwairClient.air_data_latest", - return_value=AIR_DATA_FIXTURE_UPDATED, - ) - - with data_patch, alter_time(future): - async_fire_time_changed(hass, future) - await hass.async_block_till_done() - - assert hass.states.get("sensor.awair_score").state == "78" - - future = NOW + timedelta(minutes=15) - with data_patch, alter_time(future): - async_fire_time_changed(hass, future) - await hass.async_block_till_done() - - assert hass.states.get("sensor.awair_score").state == "79" + with patch("python_awair.AwairClient.query", side_effect=OFFLINE_FIXTURE): + await hass.helpers.entity_component.async_update_entity( + "sensor.living_room_awair_score" + ) + assert_expected_properties( + hass, + registry, + "sensor.living_room_awair_score", + f"{AWAIR_UUID}_{SENSOR_TYPES[API_SCORE][ATTR_UNIQUE_ID]}", + STATE_UNAVAILABLE, + {ATTR_ICON: "mdi:blur"}, + ) diff --git a/tests/components/axis/test_camera.py b/tests/components/axis/test_camera.py index 6db8de0a0a8..af276fe6fe5 100644 --- a/tests/components/axis/test_camera.py +++ b/tests/components/axis/test_camera.py @@ -2,7 +2,6 @@ from homeassistant.components import camera from homeassistant.components.axis.const import ( - CONF_CAMERA, CONF_STREAM_PROFILE, DOMAIN as AXIS_DOMAIN, ) @@ -70,7 +69,7 @@ async def test_camera_with_stream_profile(hass): async def test_camera_disabled(hass): """Test that Axis camera platform is loaded properly but does not create camera entity.""" - with patch.dict(ENTRY_OPTIONS, {CONF_CAMERA: False}): + with patch("axis.vapix.Params.image_format", new=None): await setup_axis_integration(hass) assert len(hass.states.async_entity_ids(CAMERA_DOMAIN)) == 0 diff --git a/tests/components/axis/test_config_flow.py b/tests/components/axis/test_config_flow.py index 941961f623a..aa7d9db9027 100644 --- a/tests/components/axis/test_config_flow.py +++ b/tests/components/axis/test_config_flow.py @@ -2,7 +2,6 @@ from homeassistant import data_entry_flow from homeassistant.components.axis import config_flow from homeassistant.components.axis.const import ( - CONF_CAMERA, CONF_EVENTS, CONF_MODEL, CONF_STREAM_PROFILE, @@ -352,7 +351,6 @@ async def test_option_flow(hass): assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"] == { - CONF_CAMERA: True, CONF_EVENTS: True, CONF_STREAM_PROFILE: "profile_1", } diff --git a/tests/components/axis/test_device.py b/tests/components/axis/test_device.py index e4b0a960979..4350764c486 100644 --- a/tests/components/axis/test_device.py +++ b/tests/components/axis/test_device.py @@ -7,6 +7,7 @@ import axis as axislib from axis.api_discovery import URL as API_DISCOVERY_URL from axis.basic_device_info import URL as BASIC_DEVICE_INFO_URL from axis.event_stream import OPERATION_INITIALIZED +from axis.light_control import URL as LIGHT_CONTROL_URL from axis.mqtt import URL_CLIENT as MQTT_CLIENT_URL from axis.param_cgi import ( BRAND as BRAND_URL, @@ -22,7 +23,6 @@ import pytest from homeassistant import config_entries from homeassistant.components import axis from homeassistant.components.axis.const import ( - CONF_CAMERA, CONF_EVENTS, CONF_MODEL, DOMAIN as AXIS_DOMAIN, @@ -38,17 +38,13 @@ from homeassistant.const import ( ) from tests.async_mock import Mock, patch -from tests.common import ( - MockConfigEntry, - async_fire_mqtt_message, - async_mock_mqtt_component, -) +from tests.common import MockConfigEntry, async_fire_mqtt_message MAC = "00408C12345" MODEL = "model" NAME = "name" -ENTRY_OPTIONS = {CONF_CAMERA: True, CONF_EVENTS: True} +ENTRY_OPTIONS = {CONF_EVENTS: True} ENTRY_CONFIG = { CONF_HOST: "1.2.3.4", @@ -83,7 +79,6 @@ API_DISCOVERY_PORT_MANAGEMENT = { "name": "IO Port Management", } - BASIC_DEVICE_INFO_RESPONSE = { "apiVersion": "1.1", "data": { @@ -96,6 +91,27 @@ BASIC_DEVICE_INFO_RESPONSE = { }, } +LIGHT_CONTROL_RESPONSE = { + "apiVersion": "1.1", + "method": "getLightInformation", + "data": { + "items": [ + { + "lightID": "led0", + "lightType": "IR", + "enabled": True, + "synchronizeDayNightMode": True, + "lightState": False, + "automaticIntensityMode": False, + "automaticAngleOfIlluminationMode": False, + "nrOfLEDs": 1, + "error": False, + "errorInfo": "", + } + ] + }, +} + MQTT_CLIENT_RESPONSE = { "apiVersion": "1.0", "context": "some context", @@ -168,6 +184,8 @@ def vapix_session_request(session, url, **kwargs): return json.dumps(API_DISCOVERY_RESPONSE) if BASIC_DEVICE_INFO_URL in url: return json.dumps(BASIC_DEVICE_INFO_RESPONSE) + if LIGHT_CONTROL_URL in url: + return json.dumps(LIGHT_CONTROL_RESPONSE) if MQTT_CLIENT_URL in url: return json.dumps(MQTT_CLIENT_RESPONSE) if PORT_MANAGEMENT_URL in url: @@ -218,10 +236,11 @@ async def test_device_setup(hass): entry = device.config_entry - assert len(forward_entry_setup.mock_calls) == 3 + assert len(forward_entry_setup.mock_calls) == 4 assert forward_entry_setup.mock_calls[0][1] == (entry, "binary_sensor") assert forward_entry_setup.mock_calls[1][1] == (entry, "camera") - assert forward_entry_setup.mock_calls[2][1] == (entry, "switch") + assert forward_entry_setup.mock_calls[2][1] == (entry, "light") + assert forward_entry_setup.mock_calls[3][1] == (entry, "switch") assert device.host == ENTRY_CONFIG[CONF_HOST] assert device.model == ENTRY_CONFIG[CONF_MODEL] @@ -243,17 +262,15 @@ async def test_device_info(hass): assert device.api.vapix.serial_number == "00408C12345" -async def test_device_support_mqtt(hass): +async def test_device_support_mqtt(hass, mqtt_mock): """Successful setup.""" api_discovery = deepcopy(API_DISCOVERY_RESPONSE) api_discovery["data"]["apiList"].append(API_DISCOVERY_MQTT) - mock_mqtt = await async_mock_mqtt_component(hass) - with patch.dict(API_DISCOVERY_RESPONSE, api_discovery): await setup_axis_integration(hass) - mock_mqtt.async_subscribe.assert_called_with(f"{MAC}/#", mock.ANY, 0, "utf-8") + mqtt_mock.async_subscribe.assert_called_with(f"{MAC}/#", mock.ANY, 0, "utf-8") topic = f"{MAC}/event/tns:onvif/Device/tns:axis/Sensor/PIR/$source/sensor/0" message = b'{"timestamp": 1590258472044, "topic": "onvif:Device/axis:Sensor/PIR", "message": {"source": {"sensor": "0"}, "key": {}, "data": {"state": "1"}}}' diff --git a/tests/components/axis/test_light.py b/tests/components/axis/test_light.py new file mode 100644 index 00000000000..98613451b0d --- /dev/null +++ b/tests/components/axis/test_light.py @@ -0,0 +1,150 @@ +"""Axis light platform tests.""" + +from copy import deepcopy + +from homeassistant.components.axis.const import DOMAIN as AXIS_DOMAIN +from homeassistant.components.light import ATTR_BRIGHTNESS, DOMAIN as LIGHT_DOMAIN +from homeassistant.setup import async_setup_component + +from .test_device import API_DISCOVERY_RESPONSE, NAME, setup_axis_integration + +from tests.async_mock import patch + +API_DISCOVERY_LIGHT_CONTROL = { + "id": "light-control", + "version": "1.1", + "name": "Light Control", +} + +EVENT_ON = { + "operation": "Initialized", + "topic": "tns1:Device/tnsaxis:Light/Status", + "source": "id", + "source_idx": "0", + "type": "state", + "value": "ON", +} + +EVENT_OFF = { + "operation": "Initialized", + "topic": "tns1:Device/tnsaxis:Light/Status", + "source": "id", + "source_idx": "0", + "type": "state", + "value": "OFF", +} + + +async def test_platform_manually_configured(hass): + """Test that nothing happens when platform is manually configured.""" + assert await async_setup_component( + hass, LIGHT_DOMAIN, {LIGHT_DOMAIN: {"platform": AXIS_DOMAIN}} + ) + + assert AXIS_DOMAIN not in hass.data + + +async def test_no_lights(hass): + """Test that no light events in Axis results in no light entities.""" + await setup_axis_integration(hass) + + assert not hass.states.async_entity_ids(LIGHT_DOMAIN) + + +async def test_lights(hass): + """Test that lights are loaded properly.""" + api_discovery = deepcopy(API_DISCOVERY_RESPONSE) + api_discovery["data"]["apiList"].append(API_DISCOVERY_LIGHT_CONTROL) + + with patch.dict(API_DISCOVERY_RESPONSE, api_discovery): + device = await setup_axis_integration(hass) + + # Add light + with patch( + "axis.light_control.LightControl.get_current_intensity", + return_value={"data": {"intensity": 100}}, + ), patch( + "axis.light_control.LightControl.get_valid_intensity", + return_value={"data": {"ranges": [{"high": 150}]}}, + ): + device.api.event.process_event(EVENT_ON) + await hass.async_block_till_done() + + assert len(hass.states.async_entity_ids(LIGHT_DOMAIN)) == 1 + + light_0 = hass.states.get(f"light.{NAME}_ir_light_0") + assert light_0.state == "on" + assert light_0.name == f"{NAME} IR Light 0" + + # Turn on, set brightness, light already on + with patch( + "axis.light_control.LightControl.activate_light" + ) as mock_activate, patch( + "axis.light_control.LightControl.set_manual_intensity" + ) as mock_set_intensity, patch( + "axis.light_control.LightControl.get_current_intensity", + return_value={"data": {"intensity": 100}}, + ): + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_on", + {"entity_id": f"light.{NAME}_ir_light_0", ATTR_BRIGHTNESS: 50}, + blocking=True, + ) + mock_activate.not_called() + mock_set_intensity.assert_called_once_with("led0", 29) + + # Turn off + with patch( + "axis.light_control.LightControl.deactivate_light" + ) as mock_deactivate, patch( + "axis.light_control.LightControl.get_current_intensity", + return_value={"data": {"intensity": 100}}, + ): + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_off", + {"entity_id": f"light.{NAME}_ir_light_0"}, + blocking=True, + ) + mock_deactivate.assert_called_once() + + # Event turn off light + device.api.event.process_event(EVENT_OFF) + await hass.async_block_till_done() + + light_0 = hass.states.get(f"light.{NAME}_ir_light_0") + assert light_0.state == "off" + + # Turn on, set brightness + with patch( + "axis.light_control.LightControl.activate_light" + ) as mock_activate, patch( + "axis.light_control.LightControl.set_manual_intensity" + ) as mock_set_intensity, patch( + "axis.light_control.LightControl.get_current_intensity", + return_value={"data": {"intensity": 100}}, + ): + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_on", + {"entity_id": f"light.{NAME}_ir_light_0"}, + blocking=True, + ) + mock_activate.assert_called_once() + mock_set_intensity.assert_not_called() + + # Turn off, light already off + with patch( + "axis.light_control.LightControl.deactivate_light" + ) as mock_deactivate, patch( + "axis.light_control.LightControl.get_current_intensity", + return_value={"data": {"intensity": 100}}, + ): + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_off", + {"entity_id": f"light.{NAME}_ir_light_0"}, + blocking=True, + ) + mock_deactivate.assert_not_called() diff --git a/tests/components/azure_event_hub/__init__.py b/tests/components/azure_event_hub/__init__.py new file mode 100644 index 00000000000..9ed51480e83 --- /dev/null +++ b/tests/components/azure_event_hub/__init__.py @@ -0,0 +1 @@ +"""Tests for azure_event_hub component.""" diff --git a/tests/components/azure_event_hub/test_init.py b/tests/components/azure_event_hub/test_init.py new file mode 100644 index 00000000000..bec710c5f3c --- /dev/null +++ b/tests/components/azure_event_hub/test_init.py @@ -0,0 +1,211 @@ +"""The tests for the Azure Event Hub component.""" +from collections import namedtuple + +import pytest + +import homeassistant.components.azure_event_hub as azure_event_hub +from homeassistant.const import STATE_ON +from homeassistant.setup import async_setup_component + +from tests.async_mock import MagicMock, patch + +AZURE_EVENT_HUB_PATH = "homeassistant.components.azure_event_hub" +PRODUCER_PATH = f"{AZURE_EVENT_HUB_PATH}.EventHubProducerClient" +MIN_CONFIG = { + "event_hub_namespace": "namespace", + "event_hub_instance_name": "name", + "event_hub_sas_policy": "policy", + "event_hub_sas_key": "key", +} +FilterTest = namedtuple("FilterTest", "id should_pass") + + +@pytest.fixture(autouse=True, name="mock_client", scope="module") +def mock_client_fixture(): + """Mock the azure event hub producer client.""" + with patch(f"{PRODUCER_PATH}.send_batch") as mock_send_batch, patch( + f"{PRODUCER_PATH}.close" + ) as mock_close, patch(f"{PRODUCER_PATH}.__init__", return_value=None) as mock_init: + yield ( + mock_init, + mock_send_batch, + mock_close, + ) + + +@pytest.fixture(autouse=True, name="mock_batch") +def mock_batch_fixture(): + """Mock batch creator and return mocked batch object.""" + mock_batch = MagicMock() + with patch(f"{PRODUCER_PATH}.create_batch", return_value=mock_batch): + yield mock_batch + + +@pytest.fixture(autouse=True, name="mock_policy") +def mock_policy_fixture(): + """Mock azure shared key credential.""" + with patch(f"{AZURE_EVENT_HUB_PATH}.EventHubSharedKeyCredential") as policy: + yield policy + + +@pytest.fixture(autouse=True, name="mock_event_data") +def mock_event_data_fixture(): + """Mock the azure event data component.""" + with patch(f"{AZURE_EVENT_HUB_PATH}.EventData") as event_data: + yield event_data + + +@pytest.fixture(autouse=True, name="mock_call_later") +def mock_call_later_fixture(): + """Mock async_call_later to allow queue processing on demand.""" + with patch(f"{AZURE_EVENT_HUB_PATH}.async_call_later") as mock_call_later: + yield mock_call_later + + +async def test_minimal_config(hass): + """Test the minimal config and defaults of component.""" + config = {azure_event_hub.DOMAIN: MIN_CONFIG} + assert await async_setup_component(hass, azure_event_hub.DOMAIN, config) + + +async def test_full_config(hass): + """Test the full config of component.""" + config = { + azure_event_hub.DOMAIN: { + "send_interval": 10, + "max_delay": 10, + "filter": { + "include_domains": ["light"], + "include_entity_globs": ["sensor.included_*"], + "include_entities": ["binary_sensor.included"], + "exclude_domains": ["light"], + "exclude_entity_globs": ["sensor.excluded_*"], + "exclude_entities": ["binary_sensor.excluded"], + }, + } + } + config[azure_event_hub.DOMAIN].update(MIN_CONFIG) + assert await async_setup_component(hass, azure_event_hub.DOMAIN, config) + + +async def _setup(hass, mock_call_later, filter_config): + """Shared set up for filtering tests.""" + config = {azure_event_hub.DOMAIN: {"filter": filter_config}} + config[azure_event_hub.DOMAIN].update(MIN_CONFIG) + + assert await async_setup_component(hass, azure_event_hub.DOMAIN, config) + await hass.async_block_till_done() + mock_call_later.assert_called_once() + return mock_call_later.call_args[0][2] + + +async def _run_filter_tests(hass, tests, process_queue, mock_batch): + """Run a series of filter tests on azure event hub.""" + for test in tests: + hass.states.async_set(test.id, STATE_ON) + await hass.async_block_till_done() + await process_queue(None) + + if test.should_pass: + mock_batch.add.assert_called_once() + mock_batch.add.reset_mock() + else: + mock_batch.add.assert_not_called() + + +async def test_allowlist(hass, mock_batch, mock_call_later): + """Test an allowlist only config.""" + process_queue = await _setup( + hass, + mock_call_later, + { + "include_domains": ["light"], + "include_entity_globs": ["sensor.included_*"], + "include_entities": ["binary_sensor.included"], + }, + ) + + tests = [ + FilterTest("climate.excluded", False), + FilterTest("light.included", True), + FilterTest("sensor.excluded_test", False), + FilterTest("sensor.included_test", True), + FilterTest("binary_sensor.included", True), + FilterTest("binary_sensor.excluded", False), + ] + + await _run_filter_tests(hass, tests, process_queue, mock_batch) + + +async def test_denylist(hass, mock_batch, mock_call_later): + """Test a denylist only config.""" + process_queue = await _setup( + hass, + mock_call_later, + { + "exclude_domains": ["climate"], + "exclude_entity_globs": ["sensor.excluded_*"], + "exclude_entities": ["binary_sensor.excluded"], + }, + ) + + tests = [ + FilterTest("climate.excluded", False), + FilterTest("light.included", True), + FilterTest("sensor.excluded_test", False), + FilterTest("sensor.included_test", True), + FilterTest("binary_sensor.included", True), + FilterTest("binary_sensor.excluded", False), + ] + + await _run_filter_tests(hass, tests, process_queue, mock_batch) + + +async def test_filtered_allowlist(hass, mock_batch, mock_call_later): + """Test an allowlist config with a filtering denylist.""" + process_queue = await _setup( + hass, + mock_call_later, + { + "include_domains": ["light"], + "include_entity_globs": ["*.included_*"], + "exclude_domains": ["climate"], + "exclude_entity_globs": ["*.excluded_*"], + "exclude_entities": ["light.excluded"], + }, + ) + + tests = [ + FilterTest("light.included", True), + FilterTest("light.excluded_test", False), + FilterTest("light.excluded", False), + FilterTest("sensor.included_test", True), + FilterTest("climate.included_test", False), + ] + + await _run_filter_tests(hass, tests, process_queue, mock_batch) + + +async def test_filtered_denylist(hass, mock_batch, mock_call_later): + """Test a denylist config with a filtering allowlist.""" + process_queue = await _setup( + hass, + mock_call_later, + { + "include_entities": ["climate.included", "sensor.excluded_test"], + "exclude_domains": ["climate"], + "exclude_entity_globs": ["*.excluded_*"], + "exclude_entities": ["light.excluded"], + }, + ) + + tests = [ + FilterTest("climate.excluded", False), + FilterTest("climate.included", True), + FilterTest("switch.excluded_test", False), + FilterTest("sensor.excluded_test", True), + FilterTest("light.excluded", False), + FilterTest("light.included", True), + ] + + await _run_filter_tests(hass, tests, process_queue, mock_batch) diff --git a/tests/components/blackbird/test_media_player.py b/tests/components/blackbird/test_media_player.py index b090368a4ce..316ed681fa0 100644 --- a/tests/components/blackbird/test_media_player.py +++ b/tests/components/blackbird/test_media_player.py @@ -203,8 +203,9 @@ class TestBlackbirdMediaPlayer(unittest.TestCase): self.media_player = self.hass.data[DATA_BLACKBIRD]["/dev/ttyUSB0-3"] self.media_player.hass = self.hass self.media_player.entity_id = "media_player.zone_3" + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): + def tear_down_cleanup(self): """Tear down the test case.""" self.hass.stop() diff --git a/tests/components/blink/__init__.py b/tests/components/blink/__init__.py new file mode 100644 index 00000000000..d7d04d83549 --- /dev/null +++ b/tests/components/blink/__init__.py @@ -0,0 +1 @@ +"""Tests for the Blink component.""" diff --git a/tests/components/blink/test_config_flow.py b/tests/components/blink/test_config_flow.py new file mode 100644 index 00000000000..e6315aac972 --- /dev/null +++ b/tests/components/blink/test_config_flow.py @@ -0,0 +1,204 @@ +"""Test the Blink config flow.""" +from homeassistant import config_entries, data_entry_flow, setup +from homeassistant.components.blink import DOMAIN + +from tests.async_mock import Mock, patch +from tests.common import MockConfigEntry + + +async def test_form(hass): + """Test we get the form.""" + await setup.async_setup_component(hass, "persistent_notification", {}) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] == "form" + assert result["errors"] == {} + + with patch( + "homeassistant.components.blink.config_flow.Blink", + return_value=Mock( + get_auth_token=Mock(return_value=True), + key_required=False, + login_response={}, + ), + ), patch( + "homeassistant.components.blink.async_setup", return_value=True + ) as mock_setup, patch( + "homeassistant.components.blink.async_setup_entry", return_value=True, + ) as mock_setup_entry: + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {"username": "blink@example.com", "password": "example"}, + ) + + assert result2["type"] == "create_entry" + assert result2["title"] == "blink" + assert result2["result"].unique_id == "blink@example.com" + assert result2["data"] == { + "username": "blink@example.com", + "password": "example", + "login_response": {}, + } + await hass.async_block_till_done() + assert len(mock_setup.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_import(hass): + """Test we import the config.""" + with patch( + "homeassistant.components.blink.config_flow.Blink", + return_value=Mock( + get_auth_token=Mock(return_value=True), + key_required=False, + login_response={}, + ), + ), patch( + "homeassistant.components.blink.async_setup_entry", return_value=True, + ) as mock_setup_entry: + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data={ + "username": "blink@example.com", + "password": "example", + "scan_interval": 10, + }, + ) + + assert result["type"] == "create_entry" + assert result["title"] == "blink" + assert result["result"].unique_id == "blink@example.com" + assert result["data"] == { + "username": "blink@example.com", + "password": "example", + "scan_interval": 10, + "login_response": {}, + } + await hass.async_block_till_done() + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_2fa(hass): + """Test we get the 2fa form.""" + await setup.async_setup_component(hass, "persistent_notification", {}) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + mock_blink = Mock( + get_auth_token=Mock(return_value=True), + key_required=True, + login_response={}, + login_handler=Mock(send_auth_key=Mock(return_value=True)), + ) + + with patch( + "homeassistant.components.blink.config_flow.Blink", return_value=mock_blink + ), patch( + "homeassistant.components.blink.async_setup", return_value=True + ) as mock_setup: + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {"username": "blink@example.com", "password": "example"} + ) + + assert result2["type"] == "form" + assert result2["step_id"] == "2fa" + + mock_blink.key_required = False + with patch( + "homeassistant.components.blink.config_flow.Blink", return_value=mock_blink + ), patch( + "homeassistant.components.blink.async_setup", return_value=True + ) as mock_setup, patch( + "homeassistant.components.blink.async_setup_entry", return_value=True + ) as mock_setup_entry: + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], {"pin": "1234"} + ) + + assert result3["type"] == "create_entry" + assert result3["title"] == "blink" + assert result3["result"].unique_id == "blink@example.com" + await hass.async_block_till_done() + assert len(mock_setup.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_invalid_auth(hass): + """Test we handle invalid auth.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + "homeassistant.components.blink.config_flow.Blink.get_auth_token", + return_value=None, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {"username": "blink@example.com", "password": "example"} + ) + + assert result2["type"] == "form" + assert result2["errors"] == {"base": "invalid_auth"} + + +async def test_form_unknown_error(hass): + """Test we handle unknown error at startup.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + "homeassistant.components.blink.config_flow.Blink.get_auth_token", + return_value=None, + ), patch( + "homeassistant.components.blink.config_flow.validate_input", + side_effect=KeyError, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {"username": "blink@example.com", "password": "example"} + ) + + assert result2["type"] == "form" + assert result2["errors"] == {"base": "unknown"} + + +async def test_options_flow(hass): + """Test config flow options.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + data={ + "username": "blink@example.com", + "password": "example", + "login_response": {}, + }, + options={}, + entry_id=1, + ) + config_entry.add_to_hass(hass) + + mock_blink = Mock( + login_handler=True, + setup_params=Mock(return_value=True), + setup_post_verify=Mock(return_value=True), + ) + + with patch("homeassistant.components.blink.Blink", return_value=mock_blink): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.options.async_init( + config_entry.entry_id, context={"show_advanced_options": False} + ) + + assert result["type"] == data_entry_flow.RESULT_TYPE_FORM + assert result["step_id"] == "simple_options" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], user_input={"scan_interval": 5}, + ) + + assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY + assert result["data"] == {"scan_interval": 5} + assert mock_blink.refresh_rate == 5 diff --git a/tests/components/bom/test_sensor.py b/tests/components/bom/test_sensor.py index 8c647bbf6cf..49cdb3e6ee3 100644 --- a/tests/components/bom/test_sensor.py +++ b/tests/components/bom/test_sensor.py @@ -60,10 +60,7 @@ class TestBOMWeatherSensor(unittest.TestCase): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.config = VALID_CONFIG - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) @patch("requests.get", side_effect=mocked_requests) def test_setup(self, mock_get): diff --git a/tests/components/canary/test_init.py b/tests/components/canary/test_init.py index a3f6fbd7e2d..0cfbfd56de6 100644 --- a/tests/components/canary/test_init.py +++ b/tests/components/canary/test_init.py @@ -43,8 +43,9 @@ class TestCanary(unittest.TestCase): def setUp(self): """Initialize values for this test case class.""" self.hass = get_test_home_assistant() + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() diff --git a/tests/components/canary/test_sensor.py b/tests/components/canary/test_sensor.py index 5a4a82ccc5a..d7d16fa5f88 100644 --- a/tests/components/canary/test_sensor.py +++ b/tests/components/canary/test_sensor.py @@ -34,10 +34,7 @@ class TestCanarySensorSetup(unittest.TestCase): """Initialize values for this testcase class.""" self.hass = get_test_home_assistant() self.config = copy.deepcopy(VALID_CONFIG) - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def test_setup_sensors(self): """Test the sensor setup.""" diff --git a/tests/components/cert_expiry/helpers.py b/tests/components/cert_expiry/helpers.py new file mode 100644 index 00000000000..222f07b7803 --- /dev/null +++ b/tests/components/cert_expiry/helpers.py @@ -0,0 +1,15 @@ +"""Helpers for Cert Expiry tests.""" +from datetime import datetime, timedelta + +from homeassistant.util import dt + + +def static_datetime(): + """Build a datetime object for testing in the correct timezone.""" + return dt.as_utc(datetime(2020, 6, 12, 8, 0, 0)) + + +def future_timestamp(days): + """Create timestamp object for requested days in future.""" + delta = timedelta(days=days, minutes=1) + return static_datetime() + delta diff --git a/tests/components/cert_expiry/test_config_flow.py b/tests/components/cert_expiry/test_config_flow.py index 9618525ef32..e5d90e12d13 100644 --- a/tests/components/cert_expiry/test_config_flow.py +++ b/tests/components/cert_expiry/test_config_flow.py @@ -7,6 +7,7 @@ from homeassistant.components.cert_expiry.const import DEFAULT_PORT, DOMAIN from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT from .const import HOST, PORT +from .helpers import future_timestamp from tests.async_mock import patch from tests.common import MockConfigEntry @@ -21,7 +22,7 @@ async def test_user(hass): assert result["step_id"] == "user" with patch( - "homeassistant.components.cert_expiry.config_flow.get_cert_time_to_expiry" + "homeassistant.components.cert_expiry.config_flow.get_cert_expiry_timestamp" ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_HOST: HOST, CONF_PORT: PORT} @@ -65,12 +66,15 @@ async def test_user_with_bad_cert(hass): async def test_import_host_only(hass): """Test import with host only.""" with patch( - "homeassistant.components.cert_expiry.config_flow.get_cert_time_to_expiry", - return_value=1, + "homeassistant.components.cert_expiry.config_flow.get_cert_expiry_timestamp" + ), patch( + "homeassistant.components.cert_expiry.get_cert_expiry_timestamp", + return_value=future_timestamp(1), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "import"}, data={CONF_HOST: HOST} ) + await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == HOST @@ -78,21 +82,21 @@ async def test_import_host_only(hass): assert result["data"][CONF_PORT] == DEFAULT_PORT assert result["result"].unique_id == f"{HOST}:{DEFAULT_PORT}" - with patch("homeassistant.components.cert_expiry.sensor.async_setup_entry"): - await hass.async_block_till_done() - async def test_import_host_and_port(hass): """Test import with host and port.""" with patch( - "homeassistant.components.cert_expiry.config_flow.get_cert_time_to_expiry", - return_value=1, + "homeassistant.components.cert_expiry.config_flow.get_cert_expiry_timestamp" + ), patch( + "homeassistant.components.cert_expiry.get_cert_expiry_timestamp", + return_value=future_timestamp(1), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "import"}, data={CONF_HOST: HOST, CONF_PORT: PORT}, ) + await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == HOST @@ -100,18 +104,19 @@ async def test_import_host_and_port(hass): assert result["data"][CONF_PORT] == PORT assert result["result"].unique_id == f"{HOST}:{PORT}" - with patch("homeassistant.components.cert_expiry.sensor.async_setup_entry"): - await hass.async_block_till_done() - async def test_import_non_default_port(hass): """Test import with host and non-default port.""" with patch( - "homeassistant.components.cert_expiry.config_flow.get_cert_time_to_expiry" + "homeassistant.components.cert_expiry.config_flow.get_cert_expiry_timestamp" + ), patch( + "homeassistant.components.cert_expiry.get_cert_expiry_timestamp", + return_value=future_timestamp(1), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "import"}, data={CONF_HOST: HOST, CONF_PORT: 888} ) + await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == f"{HOST}:888" @@ -119,21 +124,21 @@ async def test_import_non_default_port(hass): assert result["data"][CONF_PORT] == 888 assert result["result"].unique_id == f"{HOST}:888" - with patch("homeassistant.components.cert_expiry.sensor.async_setup_entry"): - await hass.async_block_till_done() - async def test_import_with_name(hass): """Test import with name (deprecated).""" with patch( - "homeassistant.components.cert_expiry.config_flow.get_cert_time_to_expiry", - return_value=1, + "homeassistant.components.cert_expiry.config_flow.get_cert_expiry_timestamp" + ), patch( + "homeassistant.components.cert_expiry.get_cert_expiry_timestamp", + return_value=future_timestamp(1), ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "import"}, data={CONF_NAME: "legacy", CONF_HOST: HOST, CONF_PORT: PORT}, ) + await hass.async_block_till_done() assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == HOST @@ -141,9 +146,6 @@ async def test_import_with_name(hass): assert result["data"][CONF_PORT] == PORT assert result["result"].unique_id == f"{HOST}:{PORT}" - with patch("homeassistant.components.cert_expiry.sensor.async_setup_entry"): - await hass.async_block_till_done() - async def test_bad_import(hass): """Test import step.""" @@ -162,7 +164,7 @@ async def test_bad_import(hass): async def test_abort_if_already_setup(hass): """Test we abort if the cert is already setup.""" MockConfigEntry( - domain="cert_expiry", + domain=DOMAIN, data={CONF_HOST: HOST, CONF_PORT: PORT}, unique_id=f"{HOST}:{PORT}", ).add_to_hass(hass) diff --git a/tests/components/cert_expiry/test_init.py b/tests/components/cert_expiry/test_init.py index 3a2aeb84734..2f5e4ce9a1c 100644 --- a/tests/components/cert_expiry/test_init.py +++ b/tests/components/cert_expiry/test_init.py @@ -9,6 +9,7 @@ from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from .const import HOST, PORT +from .helpers import future_timestamp, static_datetime from tests.async_mock import patch from tests.common import MockConfigEntry, async_fire_time_changed @@ -30,11 +31,10 @@ async def test_setup_with_config(hass): async_fire_time_changed(hass, next_update) with patch( - "homeassistant.components.cert_expiry.config_flow.get_cert_time_to_expiry", - return_value=100, + "homeassistant.components.cert_expiry.config_flow.get_cert_expiry_timestamp" ), patch( - "homeassistant.components.cert_expiry.sensor.get_cert_time_to_expiry", - return_value=100, + "homeassistant.components.cert_expiry.get_cert_expiry_timestamp", + return_value=future_timestamp(1), ): await hass.async_block_till_done() @@ -52,8 +52,8 @@ async def test_update_unique_id(hass): assert not entry.unique_id with patch( - "homeassistant.components.cert_expiry.sensor.get_cert_time_to_expiry", - return_value=100, + "homeassistant.components.cert_expiry.get_cert_expiry_timestamp", + return_value=future_timestamp(1), ): assert await async_setup_component(hass, DOMAIN, {}) is True await hass.async_block_till_done() @@ -62,7 +62,8 @@ async def test_update_unique_id(hass): assert entry.unique_id == f"{HOST}:{PORT}" -async def test_unload_config_entry(hass): +@patch("homeassistant.util.dt.utcnow", return_value=static_datetime()) +async def test_unload_config_entry(mock_now, hass): """Test unloading a config entry.""" entry = MockConfigEntry( domain=DOMAIN, @@ -76,8 +77,8 @@ async def test_unload_config_entry(hass): assert entry is config_entries[0] with patch( - "homeassistant.components.cert_expiry.sensor.get_cert_time_to_expiry", - return_value=100, + "homeassistant.components.cert_expiry.get_cert_expiry_timestamp", + return_value=future_timestamp(100), ): assert await async_setup_component(hass, DOMAIN, {}) is True await hass.async_block_till_done() diff --git a/tests/components/cert_expiry/test_sensors.py b/tests/components/cert_expiry/test_sensors.py index 9fcd1ac3efe..7896da9e74b 100644 --- a/tests/components/cert_expiry/test_sensors.py +++ b/tests/components/cert_expiry/test_sensors.py @@ -3,26 +3,32 @@ from datetime import timedelta import socket import ssl -from homeassistant.const import CONF_HOST, CONF_PORT, STATE_UNAVAILABLE +from homeassistant.components.cert_expiry.const import DOMAIN +from homeassistant.config_entries import ENTRY_STATE_SETUP_RETRY +from homeassistant.const import CONF_HOST, CONF_PORT, STATE_UNAVAILABLE, STATE_UNKNOWN import homeassistant.util.dt as dt_util from .const import HOST, PORT +from .helpers import future_timestamp, static_datetime from tests.async_mock import patch from tests.common import MockConfigEntry, async_fire_time_changed -async def test_async_setup_entry(hass): +@patch("homeassistant.util.dt.utcnow", return_value=static_datetime()) +async def test_async_setup_entry(mock_now, hass): """Test async_setup_entry.""" entry = MockConfigEntry( - domain="cert_expiry", + domain=DOMAIN, data={CONF_HOST: HOST, CONF_PORT: PORT}, unique_id=f"{HOST}:{PORT}", ) + timestamp = future_timestamp(100) + with patch( - "homeassistant.components.cert_expiry.sensor.get_cert_time_to_expiry", - return_value=100, + "homeassistant.components.cert_expiry.get_cert_expiry_timestamp", + return_value=timestamp, ): entry.add_to_hass(hass) assert await hass.config_entries.async_setup(entry.entry_id) @@ -35,11 +41,18 @@ async def test_async_setup_entry(hass): assert state.attributes.get("error") == "None" assert state.attributes.get("is_valid") + state = hass.states.get("sensor.cert_expiry_timestamp_example_com") + assert state is not None + assert state.state != STATE_UNAVAILABLE + assert state.state == timestamp.isoformat() + assert state.attributes.get("error") == "None" + assert state.attributes.get("is_valid") + async def test_async_setup_entry_bad_cert(hass): """Test async_setup_entry with a bad/expired cert.""" entry = MockConfigEntry( - domain="cert_expiry", + domain=DOMAIN, data={CONF_HOST: HOST, CONF_PORT: PORT}, unique_id=f"{HOST}:{PORT}", ) @@ -63,7 +76,7 @@ async def test_async_setup_entry_bad_cert(hass): async def test_async_setup_entry_host_unavailable(hass): """Test async_setup_entry when host is unavailable.""" entry = MockConfigEntry( - domain="cert_expiry", + domain=DOMAIN, data={CONF_HOST: HOST, CONF_PORT: PORT}, unique_id=f"{HOST}:{PORT}", ) @@ -73,11 +86,10 @@ async def test_async_setup_entry_host_unavailable(hass): side_effect=socket.gaierror, ): entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(entry.entry_id) + assert await hass.config_entries.async_setup(entry.entry_id) is False await hass.async_block_till_done() - state = hass.states.get("sensor.cert_expiry_example_com") - assert state is None + assert entry.state == ENTRY_STATE_SETUP_RETRY next_update = dt_util.utcnow() + timedelta(seconds=45) async_fire_time_changed(hass, next_update) @@ -94,14 +106,19 @@ async def test_async_setup_entry_host_unavailable(hass): async def test_update_sensor(hass): """Test async_update for sensor.""" entry = MockConfigEntry( - domain="cert_expiry", + domain=DOMAIN, data={CONF_HOST: HOST, CONF_PORT: PORT}, unique_id=f"{HOST}:{PORT}", ) - with patch( - "homeassistant.components.cert_expiry.sensor.get_cert_time_to_expiry", - return_value=100, + starting_time = static_datetime() + timestamp = future_timestamp(100) + + with patch("homeassistant.util.dt.utcnow", return_value=starting_time), patch( + "homeassistant.helpers.update_coordinator.utcnow", return_value=starting_time + ), patch( + "homeassistant.components.cert_expiry.get_cert_expiry_timestamp", + return_value=timestamp, ): entry.add_to_hass(hass) assert await hass.config_entries.async_setup(entry.entry_id) @@ -114,13 +131,22 @@ async def test_update_sensor(hass): assert state.attributes.get("error") == "None" assert state.attributes.get("is_valid") - next_update = dt_util.utcnow() + timedelta(hours=12) - async_fire_time_changed(hass, next_update) + state = hass.states.get("sensor.cert_expiry_timestamp_example_com") + assert state is not None + assert state.state != STATE_UNAVAILABLE + assert state.state == timestamp.isoformat() + assert state.attributes.get("error") == "None" + assert state.attributes.get("is_valid") - with patch( - "homeassistant.components.cert_expiry.sensor.get_cert_time_to_expiry", - return_value=99, + next_update = starting_time + timedelta(hours=24) + + with patch("homeassistant.util.dt.utcnow", return_value=next_update), patch( + "homeassistant.helpers.update_coordinator.utcnow", return_value=next_update + ), patch( + "homeassistant.components.cert_expiry.get_cert_expiry_timestamp", + return_value=timestamp, ): + async_fire_time_changed(hass, next_update) await hass.async_block_till_done() state = hass.states.get("sensor.cert_expiry_example_com") @@ -130,18 +156,30 @@ async def test_update_sensor(hass): assert state.attributes.get("error") == "None" assert state.attributes.get("is_valid") + state = hass.states.get("sensor.cert_expiry_timestamp_example_com") + assert state is not None + assert state.state != STATE_UNAVAILABLE + assert state.state == timestamp.isoformat() + assert state.attributes.get("error") == "None" + assert state.attributes.get("is_valid") + async def test_update_sensor_network_errors(hass): """Test async_update for sensor.""" entry = MockConfigEntry( - domain="cert_expiry", + domain=DOMAIN, data={CONF_HOST: HOST, CONF_PORT: PORT}, unique_id=f"{HOST}:{PORT}", ) - with patch( - "homeassistant.components.cert_expiry.sensor.get_cert_time_to_expiry", - return_value=100, + starting_time = static_datetime() + timestamp = future_timestamp(100) + + with patch("homeassistant.util.dt.utcnow", return_value=starting_time), patch( + "homeassistant.helpers.update_coordinator.utcnow", return_value=starting_time + ), patch( + "homeassistant.components.cert_expiry.get_cert_expiry_timestamp", + return_value=timestamp, ): entry.add_to_hass(hass) assert await hass.config_entries.async_setup(entry.entry_id) @@ -154,41 +192,54 @@ async def test_update_sensor_network_errors(hass): assert state.attributes.get("error") == "None" assert state.attributes.get("is_valid") - next_update = dt_util.utcnow() + timedelta(hours=12) - async_fire_time_changed(hass, next_update) + state = hass.states.get("sensor.cert_expiry_timestamp_example_com") + assert state is not None + assert state.state != STATE_UNAVAILABLE + assert state.state == timestamp.isoformat() + assert state.attributes.get("error") == "None" + assert state.attributes.get("is_valid") - with patch( + next_update = starting_time + timedelta(hours=24) + + with patch("homeassistant.util.dt.utcnow", return_value=next_update), patch( + "homeassistant.helpers.update_coordinator.utcnow", return_value=next_update + ), patch( "homeassistant.components.cert_expiry.helper.get_cert", side_effect=socket.gaierror, ): + async_fire_time_changed(hass, next_update) await hass.async_block_till_done() + next_update = starting_time + timedelta(hours=48) + state = hass.states.get("sensor.cert_expiry_example_com") assert state.state == STATE_UNAVAILABLE - next_update = dt_util.utcnow() + timedelta(hours=12) - async_fire_time_changed(hass, next_update) - - with patch( - "homeassistant.components.cert_expiry.sensor.get_cert_time_to_expiry", - return_value=99, + with patch("homeassistant.util.dt.utcnow", return_value=next_update), patch( + "homeassistant.helpers.update_coordinator.utcnow", return_value=next_update + ), patch( + "homeassistant.components.cert_expiry.get_cert_expiry_timestamp", + return_value=timestamp, ): + async_fire_time_changed(hass, next_update) await hass.async_block_till_done() state = hass.states.get("sensor.cert_expiry_example_com") assert state is not None assert state.state != STATE_UNAVAILABLE - assert state.state == "99" + assert state.state == "98" assert state.attributes.get("error") == "None" assert state.attributes.get("is_valid") - next_update = dt_util.utcnow() + timedelta(hours=12) - async_fire_time_changed(hass, next_update) + next_update = starting_time + timedelta(hours=72) - with patch( + with patch("homeassistant.util.dt.utcnow", return_value=next_update), patch( + "homeassistant.helpers.update_coordinator.utcnow", return_value=next_update + ), patch( "homeassistant.components.cert_expiry.helper.get_cert", side_effect=ssl.SSLError("something bad"), ): + async_fire_time_changed(hass, next_update) await hass.async_block_till_done() state = hass.states.get("sensor.cert_expiry_example_com") @@ -198,12 +249,20 @@ async def test_update_sensor_network_errors(hass): assert state.attributes.get("error") == "something bad" assert not state.attributes.get("is_valid") - next_update = dt_util.utcnow() + timedelta(hours=12) - async_fire_time_changed(hass, next_update) + state = hass.states.get("sensor.cert_expiry_timestamp_example_com") + assert state is not None + assert state.state == STATE_UNKNOWN + assert state.attributes.get("error") == "something bad" + assert not state.attributes.get("is_valid") - with patch( + next_update = starting_time + timedelta(hours=96) + + with patch("homeassistant.util.dt.utcnow", return_value=next_update), patch( + "homeassistant.helpers.update_coordinator.utcnow", return_value=next_update + ), patch( "homeassistant.components.cert_expiry.helper.get_cert", side_effect=Exception() ): + async_fire_time_changed(hass, next_update) await hass.async_block_till_done() state = hass.states.get("sensor.cert_expiry_example_com") diff --git a/tests/components/cloud/test_http_api.py b/tests/components/cloud/test_http_api.py index df506d2d8fc..13c4649f39e 100644 --- a/tests/components/cloud/test_http_api.py +++ b/tests/components/cloud/test_http_api.py @@ -2,6 +2,7 @@ import asyncio from ipaddress import ip_network +import aiohttp from hass_nabucasa import thingtalk from hass_nabucasa.auth import Unauthenticated, UnknownError from hass_nabucasa.const import STATE_CONNECTED @@ -24,15 +25,15 @@ from tests.components.google_assistant import MockConfig SUBSCRIPTION_INFO_URL = "https://api-test.hass.io/subscription_info" -@pytest.fixture() -def mock_auth(): +@pytest.fixture(name="mock_auth") +def mock_auth_fixture(): """Mock check token.""" with patch("hass_nabucasa.auth.CognitoAuth.async_check_token"): yield -@pytest.fixture() -def mock_cloud_login(hass, setup_api): +@pytest.fixture(name="mock_cloud_login") +def mock_cloud_login_fixture(hass, setup_api): """Mock cloud is logged in.""" hass.data[DOMAIN].id_token = jwt.encode( { @@ -44,8 +45,8 @@ def mock_cloud_login(hass, setup_api): ) -@pytest.fixture(autouse=True) -def setup_api(hass, aioclient_mock): +@pytest.fixture(autouse=True, name="setup_api") +def setup_api_fixture(hass, aioclient_mock): """Initialize HTTP API.""" hass.loop.run_until_complete( mock_cloud( @@ -67,15 +68,15 @@ def setup_api(hass, aioclient_mock): return mock_cloud_prefs(hass) -@pytest.fixture -def cloud_client(hass, hass_client): +@pytest.fixture(name="cloud_client") +def cloud_client_fixture(hass, hass_client): """Fixture that can fetch from the cloud client.""" with patch("hass_nabucasa.Cloud.write_user_info"): yield hass.loop.run_until_complete(hass_client()) -@pytest.fixture -def mock_cognito(): +@pytest.fixture(name="mock_cognito") +def mock_cognito_fixture(): """Mock warrant.""" with patch("hass_nabucasa.auth.CognitoAuth._cognito") as mock_cog: yield mock_cog() @@ -279,6 +280,17 @@ async def test_forgot_password_view_unknown_error(mock_cognito, cloud_client): assert req.status == 502 +async def test_forgot_password_view_aiohttp_error(mock_cognito, cloud_client): + """Test unknown error while logging out.""" + mock_cognito.initiate_forgot_password.side_effect = aiohttp.ClientResponseError( + Mock(), Mock() + ) + req = await cloud_client.post( + "/api/cloud/forgot_password", json={"email": "hello@bla.com"} + ) + assert req.status == 500 + + async def test_resend_confirm_view(mock_cognito, cloud_client): """Test logging out.""" req = await cloud_client.post( @@ -350,14 +362,18 @@ async def test_websocket_status( }, "alexa_entities": { "include_domains": [], + "include_entity_globs": [], "include_entities": ["light.kitchen", "switch.ac"], "exclude_domains": [], + "exclude_entity_globs": [], "exclude_entities": [], }, "google_entities": { "include_domains": ["light"], + "include_entity_globs": [], "include_entities": [], "exclude_domains": [], + "exclude_entity_globs": [], "exclude_entities": [], }, "remote_domain": None, @@ -582,6 +598,7 @@ async def test_enabling_remote_trusted_networks_local4( hass, hass_ws_client, setup_api, mock_cloud_login ): """Test we cannot enable remote UI when trusted networks active.""" + # pylint: disable=protected-access hass.auth._providers[ ("trusted_networks", None) ] = tn_auth.TrustedNetworksAuthProvider( @@ -614,6 +631,7 @@ async def test_enabling_remote_trusted_networks_local6( hass, hass_ws_client, setup_api, mock_cloud_login ): """Test we cannot enable remote UI when trusted networks active.""" + # pylint: disable=protected-access hass.auth._providers[ ("trusted_networks", None) ] = tn_auth.TrustedNetworksAuthProvider( @@ -646,6 +664,7 @@ async def test_enabling_remote_trusted_networks_other( hass, hass_ws_client, setup_api, mock_cloud_login ): """Test we can enable remote UI when trusted networks active.""" + # pylint: disable=protected-access hass.auth._providers[ ("trusted_networks", None) ] = tn_auth.TrustedNetworksAuthProvider( diff --git a/tests/components/coinmarketcap/test_sensor.py b/tests/components/coinmarketcap/test_sensor.py index f53a92a3d86..4d1fb31da95 100644 --- a/tests/components/coinmarketcap/test_sensor.py +++ b/tests/components/coinmarketcap/test_sensor.py @@ -23,10 +23,7 @@ class TestCoinMarketCapSensor(unittest.TestCase): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.config = VALID_CONFIG - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) @patch( "coinmarketcap.Market.ticker", diff --git a/tests/components/command_line/test_binary_sensor.py b/tests/components/command_line/test_binary_sensor.py index 33c28b7d65a..90871faaf78 100644 --- a/tests/components/command_line/test_binary_sensor.py +++ b/tests/components/command_line/test_binary_sensor.py @@ -14,10 +14,7 @@ class TestCommandSensorBinarySensor(unittest.TestCase): def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def test_setup(self): """Test sensor setup.""" diff --git a/tests/components/command_line/test_notify.py b/tests/components/command_line/test_notify.py index f20011d4482..ecdb5af91da 100644 --- a/tests/components/command_line/test_notify.py +++ b/tests/components/command_line/test_notify.py @@ -16,8 +16,9 @@ class TestCommandLine(unittest.TestCase): def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop down everything that was started.""" self.hass.stop() diff --git a/tests/components/command_line/test_sensor.py b/tests/components/command_line/test_sensor.py index d48f5789fb3..9d7e46002f6 100644 --- a/tests/components/command_line/test_sensor.py +++ b/tests/components/command_line/test_sensor.py @@ -14,10 +14,7 @@ class TestCommandSensorSensor(unittest.TestCase): def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def update_side_effect(self, data): """Side effect function for mocking CommandSensorData.update().""" diff --git a/tests/components/command_line/test_switch.py b/tests/components/command_line/test_switch.py index ab5d0044f73..5c4a1aa336f 100644 --- a/tests/components/command_line/test_switch.py +++ b/tests/components/command_line/test_switch.py @@ -20,10 +20,7 @@ class TestCommandSwitch(unittest.TestCase): def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def test_state_none(self): """Test with none state.""" diff --git a/tests/components/configurator/test_init.py b/tests/components/configurator/test_init.py index b572609c5a2..9d696ab5f86 100644 --- a/tests/components/configurator/test_init.py +++ b/tests/components/configurator/test_init.py @@ -15,11 +15,7 @@ class TestConfigurator(unittest.TestCase): def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() - - # pylint: disable=invalid-name - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def test_request_least_info(self): """Test request config with least amount of data.""" diff --git a/tests/components/coronavirus/conftest.py b/tests/components/coronavirus/conftest.py index 6e49d2aa164..bbe5a463802 100644 --- a/tests/components/coronavirus/conftest.py +++ b/tests/components/coronavirus/conftest.py @@ -13,6 +13,13 @@ def mock_cases(): return_value=[ Mock(country="Netherlands", confirmed=10, recovered=8, deaths=1, current=1), Mock(country="Germany", confirmed=1, recovered=0, deaths=0, current=0), + Mock( + country="Sweden", + confirmed=None, + recovered=None, + deaths=None, + current=None, + ), ], ) as mock_get_cases: yield mock_get_cases diff --git a/tests/components/daikin/test_config_flow.py b/tests/components/daikin/test_config_flow.py index f8d13bdd355..c315bcc32a8 100644 --- a/tests/components/daikin/test_config_flow.py +++ b/tests/components/daikin/test_config_flow.py @@ -47,7 +47,7 @@ def mock_daikin_discovery(): with patch("homeassistant.components.daikin.config_flow.Discovery") as Discovery: Discovery().poll.return_value = { "127.0.01": {"mac": "AABBCCDDEEFF", "id": "test"} - } + }.values() yield Discovery diff --git a/tests/components/darksky/test_sensor.py b/tests/components/darksky/test_sensor.py index 9a48e4f1cce..be1e9849452 100644 --- a/tests/components/darksky/test_sensor.py +++ b/tests/components/darksky/test_sensor.py @@ -105,8 +105,9 @@ class TestDarkSkySetup(unittest.TestCase): self.lat = self.hass.config.latitude = 37.8267 self.lon = self.hass.config.longitude = -122.423 self.entities = [] + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() diff --git a/tests/components/darksky/test_weather.py b/tests/components/darksky/test_weather.py index 9f43534d7cd..1a2a2e156d9 100644 --- a/tests/components/darksky/test_weather.py +++ b/tests/components/darksky/test_weather.py @@ -23,8 +23,9 @@ class TestDarkSky(unittest.TestCase): self.hass.config.units = METRIC_SYSTEM self.lat = self.hass.config.latitude = 37.8267 self.lon = self.hass.config.longitude = -122.423 + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): + def tear_down_cleanup(self): """Stop down everything that was started.""" self.hass.stop() diff --git a/tests/components/datadog/test_init.py b/tests/components/datadog/test_init.py index 71b2af33fd3..9ba64bb43ff 100644 --- a/tests/components/datadog/test_init.py +++ b/tests/components/datadog/test_init.py @@ -21,8 +21,9 @@ class TestDatadog(unittest.TestCase): def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() diff --git a/tests/components/debugpy/__init__.py b/tests/components/debugpy/__init__.py new file mode 100644 index 00000000000..4b816141921 --- /dev/null +++ b/tests/components/debugpy/__init__.py @@ -0,0 +1 @@ +"""Tests for the Remote Python Debugger integration.""" diff --git a/tests/components/debugpy/test_init.py b/tests/components/debugpy/test_init.py new file mode 100644 index 00000000000..1de8da9ac9a --- /dev/null +++ b/tests/components/debugpy/test_init.py @@ -0,0 +1,61 @@ +"""Tests for the Remote Python Debugger integration.""" +import pytest + +from homeassistant.components.debugpy import ( + CONF_HOST, + CONF_PORT, + CONF_START, + CONF_WAIT, + DOMAIN, + SERVICE_START, +) +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.async_mock import patch + + +@pytest.fixture +def mock_debugpy(): + """Mock debugpy lib.""" + with patch("homeassistant.components.debugpy.debugpy") as mocked_debugpy: + yield mocked_debugpy + + +async def test_default(hass: HomeAssistant, mock_debugpy) -> None: + """Test if the default settings work.""" + assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) + + mock_debugpy.listen.assert_called_once_with(("0.0.0.0", 5678)) + mock_debugpy.wait_for_client.assert_not_called() + assert len(mock_debugpy.method_calls) == 1 + + +async def test_wait_on_startup(hass: HomeAssistant, mock_debugpy) -> None: + """Test if the waiting for client is called.""" + assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_WAIT: True}}) + + mock_debugpy.listen.assert_called_once_with(("0.0.0.0", 5678)) + mock_debugpy.wait_for_client.assert_called_once() + assert len(mock_debugpy.method_calls) == 2 + + +async def test_on_demand(hass: HomeAssistant, mock_debugpy) -> None: + """Test on-demand debugging using a service call.""" + assert await async_setup_component( + hass, + DOMAIN, + {DOMAIN: {CONF_START: False, CONF_HOST: "127.0.0.1", CONF_PORT: 80}}, + ) + + mock_debugpy.listen.assert_not_called() + mock_debugpy.wait_for_client.assert_not_called() + assert len(mock_debugpy.method_calls) == 0 + + await hass.services.async_call( + DOMAIN, SERVICE_START, blocking=True, + ) + + mock_debugpy.listen.assert_called_once_with(("127.0.0.1", 80)) + mock_debugpy.wait_for_client.assert_not_called() + assert len(mock_debugpy.method_calls) == 1 diff --git a/tests/components/demo/test_geo_location.py b/tests/components/demo/test_geo_location.py index 0ba3a35b891..dd4c2022c5e 100644 --- a/tests/components/demo/test_geo_location.py +++ b/tests/components/demo/test_geo_location.py @@ -26,10 +26,7 @@ class TestDemoPlatform(unittest.TestCase): def setUp(self): """Initialize values for this testcase class.""" self.hass = get_test_home_assistant() - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def test_setup_platform(self): """Test setup of demo platform via configuration.""" diff --git a/tests/components/demo/test_humidifier.py b/tests/components/demo/test_humidifier.py new file mode 100644 index 00000000000..ba2bd60f8f2 --- /dev/null +++ b/tests/components/demo/test_humidifier.py @@ -0,0 +1,166 @@ +"""The tests for the demo humidifier component.""" + +import pytest +import voluptuous as vol + +from homeassistant.components.humidifier.const import ( + ATTR_HUMIDITY, + ATTR_MAX_HUMIDITY, + ATTR_MIN_HUMIDITY, + ATTR_MODE, + DOMAIN, + MODE_AWAY, + MODE_ECO, + SERVICE_SET_HUMIDITY, + SERVICE_SET_MODE, +) +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TOGGLE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_OFF, + STATE_ON, +) +from homeassistant.setup import async_setup_component + +ENTITY_DEHUMIDIFIER = "humidifier.dehumidifier" +ENTITY_HYGROSTAT = "humidifier.hygrostat" +ENTITY_HUMIDIFIER = "humidifier.humidifier" + + +@pytest.fixture(autouse=True) +async def setup_demo_humidifier(hass): + """Initialize setup demo humidifier.""" + assert await async_setup_component( + hass, DOMAIN, {"humidifier": {"platform": "demo"}} + ) + await hass.async_block_till_done() + + +def test_setup_params(hass): + """Test the initial parameters.""" + state = hass.states.get(ENTITY_DEHUMIDIFIER) + assert state.state == STATE_ON + assert state.attributes.get(ATTR_HUMIDITY) == 54 + + +def test_default_setup_params(hass): + """Test the setup with default parameters.""" + state = hass.states.get(ENTITY_DEHUMIDIFIER) + assert state.attributes.get(ATTR_MIN_HUMIDITY) == 0 + assert state.attributes.get(ATTR_MAX_HUMIDITY) == 100 + + +async def test_set_target_humidity_bad_attr(hass): + """Test setting the target humidity without required attribute.""" + state = hass.states.get(ENTITY_DEHUMIDIFIER) + assert state.attributes.get(ATTR_HUMIDITY) == 54 + + with pytest.raises(vol.Invalid): + await hass.services.async_call( + DOMAIN, + SERVICE_SET_HUMIDITY, + {ATTR_HUMIDITY: None, ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, + blocking=True, + ) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_DEHUMIDIFIER) + assert state.attributes.get(ATTR_HUMIDITY) == 54 + + +async def test_set_target_humidity(hass): + """Test the setting of the target humidity.""" + state = hass.states.get(ENTITY_DEHUMIDIFIER) + assert state.attributes.get(ATTR_HUMIDITY) == 54 + + await hass.services.async_call( + DOMAIN, + SERVICE_SET_HUMIDITY, + {ATTR_HUMIDITY: 64, ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, + blocking=True, + ) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_DEHUMIDIFIER) + assert state.attributes.get(ATTR_HUMIDITY) == 64 + + +async def test_set_hold_mode_away(hass): + """Test setting the hold mode away.""" + await hass.services.async_call( + DOMAIN, + SERVICE_SET_MODE, + {ATTR_MODE: MODE_AWAY, ATTR_ENTITY_ID: ENTITY_HYGROSTAT}, + blocking=True, + ) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_HYGROSTAT) + assert state.attributes.get(ATTR_MODE) == MODE_AWAY + + +async def test_set_hold_mode_eco(hass): + """Test setting the hold mode eco.""" + await hass.services.async_call( + DOMAIN, + SERVICE_SET_MODE, + {ATTR_MODE: MODE_ECO, ATTR_ENTITY_ID: ENTITY_HYGROSTAT}, + blocking=True, + ) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_HYGROSTAT) + assert state.attributes.get(ATTR_MODE) == MODE_ECO + + +async def test_turn_on(hass): + """Test turn on device.""" + await hass.services.async_call( + DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True + ) + state = hass.states.get(ENTITY_DEHUMIDIFIER) + assert state.state == STATE_OFF + + await hass.services.async_call( + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True + ) + state = hass.states.get(ENTITY_DEHUMIDIFIER) + assert state.state == STATE_ON + + +async def test_turn_off(hass): + """Test turn off device.""" + await hass.services.async_call( + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True + ) + state = hass.states.get(ENTITY_DEHUMIDIFIER) + assert state.state == STATE_ON + + await hass.services.async_call( + DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True + ) + state = hass.states.get(ENTITY_DEHUMIDIFIER) + assert state.state == STATE_OFF + + +async def test_toggle(hass): + """Test toggle device.""" + await hass.services.async_call( + DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True + ) + state = hass.states.get(ENTITY_DEHUMIDIFIER) + assert state.state == STATE_ON + + await hass.services.async_call( + DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True + ) + state = hass.states.get(ENTITY_DEHUMIDIFIER) + assert state.state == STATE_OFF + + await hass.services.async_call( + DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True + ) + state = hass.states.get(ENTITY_DEHUMIDIFIER) + assert state.state == STATE_ON diff --git a/tests/components/demo/test_notify.py b/tests/components/demo/test_notify.py index 7c7b7fa0aa1..07b6d968c84 100644 --- a/tests/components/demo/test_notify.py +++ b/tests/components/demo/test_notify.py @@ -32,8 +32,9 @@ class TestNotifyDemo(unittest.TestCase): self.events.append(event) self.hass.bus.listen(demo.EVENT_NOTIFY, record_event) + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop down everything that was started.""" self.hass.stop() diff --git a/tests/components/demo/test_remote.py b/tests/components/demo/test_remote.py index 7ea31fbeb69..fa87213768d 100644 --- a/tests/components/demo/test_remote.py +++ b/tests/components/demo/test_remote.py @@ -24,8 +24,9 @@ class TestDemoRemote(unittest.TestCase): ) self.hass.block_till_done() - # pylint: disable=invalid-name - def tearDown(self): + self.addCleanup(self.tear_down_cleanup) + + def tear_down_cleanup(self): """Stop down everything that was started.""" self.hass.stop() diff --git a/tests/components/denonavr/test_config_flow.py b/tests/components/denonavr/test_config_flow.py new file mode 100644 index 00000000000..d7ab51ff029 --- /dev/null +++ b/tests/components/denonavr/test_config_flow.py @@ -0,0 +1,561 @@ +"""Test the DenonAVR config flow.""" +import pytest + +from homeassistant import config_entries, data_entry_flow +from homeassistant.components import ssdp +from homeassistant.components.denonavr.config_flow import ( + CONF_MANUFACTURER, + CONF_MODEL, + CONF_SERIAL_NUMBER, + CONF_SHOW_ALL_SOURCES, + CONF_TYPE, + CONF_ZONE2, + CONF_ZONE3, + DOMAIN, +) +from homeassistant.const import CONF_HOST, CONF_MAC + +from tests.async_mock import patch +from tests.common import MockConfigEntry + +TEST_HOST = "1.2.3.4" +TEST_MAC = "ab:cd:ef:gh" +TEST_HOST2 = "5.6.7.8" +TEST_NAME = "Test_Receiver" +TEST_MODEL = "model5" +TEST_RECEIVER_TYPE = "avr-x" +TEST_SERIALNUMBER = "123456789" +TEST_MANUFACTURER = "Denon" +TEST_SSDP_LOCATION = f"http://{TEST_HOST}/" +TEST_UNIQUE_ID = f"{TEST_MODEL}-{TEST_SERIALNUMBER}" +TEST_DISCOVER_1_RECEIVER = [{CONF_HOST: TEST_HOST}] +TEST_DISCOVER_2_RECEIVER = [{CONF_HOST: TEST_HOST}, {CONF_HOST: TEST_HOST2}] + + +@pytest.fixture(name="denonavr_connect", autouse=True) +def denonavr_connect_fixture(): + """Mock denonavr connection and entry setup.""" + with patch( + "homeassistant.components.denonavr.receiver.denonavr.DenonAVR._update_input_func_list", + return_value=True, + ), patch( + "homeassistant.components.denonavr.receiver.denonavr.DenonAVR._get_receiver_name", + return_value=TEST_NAME, + ), patch( + "homeassistant.components.denonavr.receiver.denonavr.DenonAVR._get_support_sound_mode", + return_value=True, + ), patch( + "homeassistant.components.denonavr.receiver.denonavr.DenonAVR._update_avr_2016", + return_value=True, + ), patch( + "homeassistant.components.denonavr.receiver.denonavr.DenonAVR._update_avr", + return_value=True, + ), patch( + "homeassistant.components.denonavr.receiver.denonavr.DenonAVR.get_device_info", + return_value=True, + ), patch( + "homeassistant.components.denonavr.receiver.denonavr.DenonAVR.name", TEST_NAME, + ), patch( + "homeassistant.components.denonavr.receiver.denonavr.DenonAVR.model_name", + TEST_MODEL, + ), patch( + "homeassistant.components.denonavr.receiver.denonavr.DenonAVR.serial_number", + TEST_SERIALNUMBER, + ), patch( + "homeassistant.components.denonavr.receiver.denonavr.DenonAVR.manufacturer", + TEST_MANUFACTURER, + ), patch( + "homeassistant.components.denonavr.receiver.denonavr.DenonAVR.receiver_type", + TEST_RECEIVER_TYPE, + ), patch( + "homeassistant.components.denonavr.config_flow.get_mac_address", + return_value=TEST_MAC, + ), patch( + "homeassistant.components.denonavr.async_setup_entry", return_value=True + ): + yield + + +async def test_config_flow_manual_host_success(hass): + """ + Successful flow manually initialized by the user. + + Host specified. + """ + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] == "form" + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: TEST_HOST}, + ) + + assert result["type"] == "create_entry" + assert result["title"] == TEST_NAME + assert result["data"] == { + CONF_HOST: TEST_HOST, + CONF_MAC: TEST_MAC, + CONF_MODEL: TEST_MODEL, + CONF_TYPE: TEST_RECEIVER_TYPE, + CONF_MANUFACTURER: TEST_MANUFACTURER, + CONF_SERIAL_NUMBER: TEST_SERIALNUMBER, + } + + +async def test_config_flow_manual_discover_1_success(hass): + """ + Successful flow manually initialized by the user. + + Without the host specified and 1 receiver discovered. + """ + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] == "form" + assert result["step_id"] == "user" + assert result["errors"] == {} + + with patch( + "homeassistant.components.denonavr.config_flow.denonavr.ssdp.identify_denonavr_receivers", + return_value=TEST_DISCOVER_1_RECEIVER, + ): + result = await hass.config_entries.flow.async_configure(result["flow_id"], {},) + + assert result["type"] == "create_entry" + assert result["title"] == TEST_NAME + assert result["data"] == { + CONF_HOST: TEST_HOST, + CONF_MAC: TEST_MAC, + CONF_MODEL: TEST_MODEL, + CONF_TYPE: TEST_RECEIVER_TYPE, + CONF_MANUFACTURER: TEST_MANUFACTURER, + CONF_SERIAL_NUMBER: TEST_SERIALNUMBER, + } + + +async def test_config_flow_manual_discover_2_success(hass): + """ + Successful flow manually initialized by the user. + + Without the host specified and 2 receiver discovered. + """ + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] == "form" + assert result["step_id"] == "user" + assert result["errors"] == {} + + with patch( + "homeassistant.components.denonavr.config_flow.denonavr.ssdp.identify_denonavr_receivers", + return_value=TEST_DISCOVER_2_RECEIVER, + ): + result = await hass.config_entries.flow.async_configure(result["flow_id"], {},) + + assert result["type"] == "form" + assert result["step_id"] == "select" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {"select_host": TEST_HOST2}, + ) + + assert result["type"] == "create_entry" + assert result["title"] == TEST_NAME + assert result["data"] == { + CONF_HOST: TEST_HOST2, + CONF_MAC: TEST_MAC, + CONF_MODEL: TEST_MODEL, + CONF_TYPE: TEST_RECEIVER_TYPE, + CONF_MANUFACTURER: TEST_MANUFACTURER, + CONF_SERIAL_NUMBER: TEST_SERIALNUMBER, + } + + +async def test_config_flow_manual_discover_error(hass): + """ + Failed flow manually initialized by the user. + + Without the host specified and no receiver discovered. + """ + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] == "form" + assert result["step_id"] == "user" + assert result["errors"] == {} + + with patch( + "homeassistant.components.denonavr.config_flow.denonavr.ssdp.identify_denonavr_receivers", + return_value=[], + ): + result = await hass.config_entries.flow.async_configure(result["flow_id"], {},) + + assert result["type"] == "form" + assert result["step_id"] == "user" + assert result["errors"] == {"base": "discovery_error"} + + +async def test_config_flow_manual_host_no_serial(hass): + """ + Successful flow manually initialized by the user. + + Host specified and an error getting the serial number. + """ + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] == "form" + assert result["step_id"] == "user" + assert result["errors"] == {} + + with patch( + "homeassistant.components.denonavr.receiver.denonavr.DenonAVR.serial_number", + None, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: TEST_HOST}, + ) + + assert result["type"] == "create_entry" + assert result["title"] == TEST_NAME + assert result["data"] == { + CONF_HOST: TEST_HOST, + CONF_MAC: TEST_MAC, + CONF_MODEL: TEST_MODEL, + CONF_TYPE: TEST_RECEIVER_TYPE, + CONF_MANUFACTURER: TEST_MANUFACTURER, + CONF_SERIAL_NUMBER: None, + } + + +async def test_config_flow_manual_host_no_mac(hass): + """ + Successful flow manually initialized by the user. + + Host specified and an error getting the mac address. + """ + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] == "form" + assert result["step_id"] == "user" + assert result["errors"] == {} + + with patch( + "homeassistant.components.denonavr.config_flow.get_mac_address", + return_value=None, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: TEST_HOST}, + ) + + assert result["type"] == "create_entry" + assert result["title"] == TEST_NAME + assert result["data"] == { + CONF_HOST: TEST_HOST, + CONF_MAC: None, + CONF_MODEL: TEST_MODEL, + CONF_TYPE: TEST_RECEIVER_TYPE, + CONF_MANUFACTURER: TEST_MANUFACTURER, + CONF_SERIAL_NUMBER: TEST_SERIALNUMBER, + } + + +async def test_config_flow_manual_host_no_serial_no_mac(hass): + """ + Successful flow manually initialized by the user. + + Host specified and an error getting the serial number and mac address. + """ + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] == "form" + assert result["step_id"] == "user" + assert result["errors"] == {} + + with patch( + "homeassistant.components.denonavr.receiver.denonavr.DenonAVR.serial_number", + None, + ), patch( + "homeassistant.components.denonavr.config_flow.get_mac_address", + return_value=None, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: TEST_HOST}, + ) + + assert result["type"] == "create_entry" + assert result["title"] == TEST_NAME + assert result["data"] == { + CONF_HOST: TEST_HOST, + CONF_MAC: None, + CONF_MODEL: TEST_MODEL, + CONF_TYPE: TEST_RECEIVER_TYPE, + CONF_MANUFACTURER: TEST_MANUFACTURER, + CONF_SERIAL_NUMBER: None, + } + + +async def test_config_flow_manual_host_no_serial_no_mac_exception(hass): + """ + Successful flow manually initialized by the user. + + Host specified and an error getting the serial number and exception getting mac address. + """ + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] == "form" + assert result["step_id"] == "user" + assert result["errors"] == {} + + with patch( + "homeassistant.components.denonavr.receiver.denonavr.DenonAVR.serial_number", + None, + ), patch( + "homeassistant.components.denonavr.config_flow.get_mac_address", + side_effect=OSError, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: TEST_HOST}, + ) + + assert result["type"] == "create_entry" + assert result["title"] == TEST_NAME + assert result["data"] == { + CONF_HOST: TEST_HOST, + CONF_MAC: None, + CONF_MODEL: TEST_MODEL, + CONF_TYPE: TEST_RECEIVER_TYPE, + CONF_MANUFACTURER: TEST_MANUFACTURER, + CONF_SERIAL_NUMBER: None, + } + + +async def test_config_flow_manual_host_connection_error(hass): + """ + Failed flow manually initialized by the user. + + Host specified and a connection error. + """ + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] == "form" + assert result["step_id"] == "user" + assert result["errors"] == {} + + with patch( + "homeassistant.components.denonavr.receiver.denonavr.DenonAVR.get_device_info", + side_effect=ConnectionError, + ), patch( + "homeassistant.components.denonavr.receiver.denonavr.DenonAVR.receiver_type", + None, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: TEST_HOST}, + ) + + assert result["type"] == "abort" + assert result["reason"] == "connection_error" + + +async def test_config_flow_manual_host_no_device_info(hass): + """ + Failed flow manually initialized by the user. + + Host specified and no device info (due to receiver power off). + """ + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] == "form" + assert result["step_id"] == "user" + assert result["errors"] == {} + + with patch( + "homeassistant.components.denonavr.receiver.denonavr.DenonAVR.receiver_type", + None, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: TEST_HOST}, + ) + + assert result["type"] == "abort" + assert result["reason"] == "connection_error" + + +async def test_config_flow_ssdp(hass): + """Successful flow initialized by ssdp discovery.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_SSDP}, + data={ + ssdp.ATTR_UPNP_MANUFACTURER: TEST_MANUFACTURER, + ssdp.ATTR_UPNP_MODEL_NAME: TEST_MODEL, + ssdp.ATTR_UPNP_SERIAL: TEST_SERIALNUMBER, + ssdp.ATTR_SSDP_LOCATION: TEST_SSDP_LOCATION, + }, + ) + + assert result["type"] == "form" + assert result["step_id"] == "confirm" + + result = await hass.config_entries.flow.async_configure(result["flow_id"], {},) + + assert result["type"] == "create_entry" + assert result["title"] == TEST_NAME + assert result["data"] == { + CONF_HOST: TEST_HOST, + CONF_MAC: TEST_MAC, + CONF_MODEL: TEST_MODEL, + CONF_TYPE: TEST_RECEIVER_TYPE, + CONF_MANUFACTURER: TEST_MANUFACTURER, + CONF_SERIAL_NUMBER: TEST_SERIALNUMBER, + } + + +async def test_config_flow_ssdp_not_denon(hass): + """ + Failed flow initialized by ssdp discovery. + + Not supported manufacturer. + """ + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_SSDP}, + data={ + ssdp.ATTR_UPNP_MANUFACTURER: "NotSupported", + ssdp.ATTR_UPNP_MODEL_NAME: TEST_MODEL, + ssdp.ATTR_UPNP_SERIAL: TEST_SERIALNUMBER, + ssdp.ATTR_SSDP_LOCATION: TEST_SSDP_LOCATION, + }, + ) + + assert result["type"] == "abort" + assert result["reason"] == "not_denonavr_manufacturer" + + +async def test_config_flow_ssdp_missing_info(hass): + """ + Failed flow initialized by ssdp discovery. + + Missing information. + """ + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_SSDP}, + data={ + ssdp.ATTR_UPNP_MANUFACTURER: TEST_MANUFACTURER, + ssdp.ATTR_SSDP_LOCATION: TEST_SSDP_LOCATION, + }, + ) + + assert result["type"] == "abort" + assert result["reason"] == "not_denonavr_missing" + + +async def test_options_flow(hass): + """Test specifying non default settings using options flow.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=TEST_UNIQUE_ID, + data={ + CONF_HOST: TEST_HOST, + CONF_MAC: TEST_MAC, + CONF_MODEL: TEST_MODEL, + CONF_TYPE: TEST_RECEIVER_TYPE, + CONF_MANUFACTURER: TEST_MANUFACTURER, + CONF_SERIAL_NUMBER: TEST_SERIALNUMBER, + }, + title=TEST_NAME, + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.options.async_init(config_entry.entry_id) + + assert result["type"] == data_entry_flow.RESULT_TYPE_FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={CONF_SHOW_ALL_SOURCES: True, CONF_ZONE2: True, CONF_ZONE3: True}, + ) + + assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY + assert config_entry.options == { + CONF_SHOW_ALL_SOURCES: True, + CONF_ZONE2: True, + CONF_ZONE3: True, + } + + +async def test_config_flow_manual_host_no_serial_double_config(hass): + """ + Failed flow manually initialized by the user twice. + + Host specified and an error getting the serial number. + """ + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] == "form" + assert result["step_id"] == "user" + assert result["errors"] == {} + + with patch( + "homeassistant.components.denonavr.receiver.denonavr.DenonAVR.serial_number", + None, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: TEST_HOST}, + ) + + assert result["type"] == "create_entry" + assert result["title"] == TEST_NAME + assert result["data"] == { + CONF_HOST: TEST_HOST, + CONF_MAC: TEST_MAC, + CONF_MODEL: TEST_MODEL, + CONF_TYPE: TEST_RECEIVER_TYPE, + CONF_MANUFACTURER: TEST_MANUFACTURER, + CONF_SERIAL_NUMBER: None, + } + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] == "form" + assert result["step_id"] == "user" + assert result["errors"] == {} + + with patch( + "homeassistant.components.denonavr.receiver.denonavr.DenonAVR.serial_number", + None, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: TEST_HOST}, + ) + + assert result["type"] == "abort" + assert result["reason"] == "already_configured" diff --git a/tests/components/denonavr/test_media_player.py b/tests/components/denonavr/test_media_player.py index 1547391a339..980ad758c80 100644 --- a/tests/components/denonavr/test_media_player.py +++ b/tests/components/denonavr/test_media_player.py @@ -1,57 +1,92 @@ """The tests for the denonavr media player platform.""" +from unittest.mock import patch + import pytest from homeassistant.components import media_player -from homeassistant.components.denonavr import ATTR_COMMAND, DOMAIN, SERVICE_GET_COMMAND -from homeassistant.const import ATTR_ENTITY_ID, CONF_HOST, CONF_NAME, CONF_PLATFORM -from homeassistant.setup import async_setup_component +from homeassistant.components.denonavr import ATTR_COMMAND, SERVICE_GET_COMMAND +from homeassistant.components.denonavr.config_flow import ( + CONF_MANUFACTURER, + CONF_MODEL, + CONF_SERIAL_NUMBER, + CONF_TYPE, + DOMAIN, +) +from homeassistant.const import ATTR_ENTITY_ID, CONF_HOST, CONF_MAC -from tests.async_mock import patch +from tests.common import MockConfigEntry -NAME = "fake" -ENTITY_ID = f"{media_player.DOMAIN}.{NAME}" +TEST_HOST = "1.2.3.4" +TEST_MAC = "ab:cd:ef:gh" +TEST_NAME = "Test_Receiver" +TEST_MODEL = "model5" +TEST_SERIALNUMBER = "123456789" +TEST_MANUFACTURER = "Denon" +TEST_RECEIVER_TYPE = "avr-x" +TEST_ZONE = "Main" +TEST_UNIQUE_ID = f"{TEST_MODEL}-{TEST_SERIALNUMBER}" +TEST_TIMEOUT = 2 +TEST_SHOW_ALL_SOURCES = False +TEST_ZONE2 = False +TEST_ZONE3 = False +ENTITY_ID = f"{media_player.DOMAIN}.{TEST_NAME}" @pytest.fixture(name="client") def client_fixture(): """Patch of client library for tests.""" with patch( - "homeassistant.components.denonavr.media_player.denonavr.DenonAVR", - autospec=True, + "homeassistant.components.denonavr.receiver.denonavr.DenonAVR", autospec=True, ) as mock_client_class, patch( - "homeassistant.components.denonavr.media_player.denonavr.discover" + "homeassistant.components.denonavr.receiver.denonavr.discover" ): - mock_client_class.return_value.name = NAME + mock_client_class.return_value.name = TEST_NAME + mock_client_class.return_value.model_name = TEST_MODEL + mock_client_class.return_value.serial_number = TEST_SERIALNUMBER + mock_client_class.return_value.manufacturer = TEST_MANUFACTURER + mock_client_class.return_value.receiver_type = TEST_RECEIVER_TYPE + mock_client_class.return_value.zone = TEST_ZONE + mock_client_class.return_value.input_func_list = [] + mock_client_class.return_value.sound_mode_list = [] mock_client_class.return_value.zones = {"Main": mock_client_class.return_value} yield mock_client_class.return_value async def setup_denonavr(hass): - """Initialize webostv and media_player for tests.""" - assert await async_setup_component( - hass, - media_player.DOMAIN, - { - media_player.DOMAIN: { - CONF_PLATFORM: "denonavr", - CONF_HOST: "fake", - CONF_NAME: NAME, - } - }, + """Initialize media_player for tests.""" + entry_data = { + CONF_HOST: TEST_HOST, + CONF_MAC: TEST_MAC, + CONF_MODEL: TEST_MODEL, + CONF_TYPE: TEST_RECEIVER_TYPE, + CONF_MANUFACTURER: TEST_MANUFACTURER, + CONF_SERIAL_NUMBER: TEST_SERIALNUMBER, + } + + mock_entry = MockConfigEntry( + domain=DOMAIN, unique_id=TEST_UNIQUE_ID, data=entry_data, ) + + mock_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_entry.entry_id) await hass.async_block_till_done() + state = hass.states.get(ENTITY_ID) + + assert state + assert state.name == TEST_NAME + async def test_get_command(hass, client): """Test generic command functionality.""" - await setup_denonavr(hass) data = { ATTR_ENTITY_ID: ENTITY_ID, - ATTR_COMMAND: "test", + ATTR_COMMAND: "test_command", } await hass.services.async_call(DOMAIN, SERVICE_GET_COMMAND, data) await hass.async_block_till_done() - client.send_get_command.assert_called_with("test") + client.send_get_command.assert_called_with("test_command") diff --git a/tests/components/dte_energy_bridge/test_sensor.py b/tests/components/dte_energy_bridge/test_sensor.py index 34f0a0a28c3..38d712468c0 100644 --- a/tests/components/dte_energy_bridge/test_sensor.py +++ b/tests/components/dte_energy_bridge/test_sensor.py @@ -17,10 +17,7 @@ class TestDteEnergyBridgeSetup(unittest.TestCase): def setUp(self): """Initialize values for this testcase class.""" self.hass = get_test_home_assistant() - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def test_setup_with_config(self): """Test the platform setup with configuration.""" diff --git a/tests/components/dyson/common.py b/tests/components/dyson/common.py index b78e7d58283..f1dabe5203d 100644 --- a/tests/components/dyson/common.py +++ b/tests/components/dyson/common.py @@ -23,3 +23,4 @@ def load_mock_device(device): device.state.oscillation_angle_low = "000" device.state.oscillation_angle_high = "000" device.state.filter_life = "000" + device.state.heat_target = 200 diff --git a/tests/components/dyson/test_climate.py b/tests/components/dyson/test_climate.py index af17d1f0ab4..cca589875aa 100644 --- a/tests/components/dyson/test_climate.py +++ b/tests/components/dyson/test_climate.py @@ -1,19 +1,52 @@ """Test the Dyson fan component.""" +import json import unittest -from unittest import mock -from libpurecool.const import FocusMode, HeatMode, HeatState, HeatTarget +from libpurecool.const import ( + FanPower, + FanSpeed, + FanState, + FocusMode, + HeatMode, + HeatState, + HeatTarget, +) +from libpurecool.dyson_pure_hotcool import DysonPureHotCool from libpurecool.dyson_pure_hotcool_link import DysonPureHotCoolLink from libpurecool.dyson_pure_state import DysonPureHotCoolState +from libpurecool.dyson_pure_state_v2 import DysonPureHotCoolV2State from homeassistant.components import dyson as dyson_parent +from homeassistant.components.climate import ( + DOMAIN, + SERVICE_SET_FAN_MODE, + SERVICE_SET_HVAC_MODE, + SERVICE_SET_TEMPERATURE, +) +from homeassistant.components.climate.const import ( + ATTR_CURRENT_HUMIDITY, + ATTR_FAN_MODE, + ATTR_HVAC_ACTION, + ATTR_HVAC_MODE, + CURRENT_HVAC_COOL, + CURRENT_HVAC_HEAT, + CURRENT_HVAC_IDLE, + FAN_AUTO, + FAN_HIGH, + FAN_LOW, + FAN_MEDIUM, + FAN_OFF, + HVAC_MODE_COOL, + HVAC_MODE_HEAT, + HVAC_MODE_OFF, +) from homeassistant.components.dyson import climate as dyson -from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS +from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE, TEMP_CELSIUS from homeassistant.setup import async_setup_component from .common import load_mock_device -from tests.async_mock import patch +from tests.async_mock import MagicMock, Mock, patch from tests.common import get_test_home_assistant @@ -22,7 +55,6 @@ class MockDysonState(DysonPureHotCoolState): def __init__(self): """Create new Mock Dyson State.""" - pass def _get_config(): @@ -40,9 +72,22 @@ def _get_config(): } +def _get_dyson_purehotcool_device(): + """Return a valid device as provided by the Dyson web services.""" + device = Mock(spec=DysonPureHotCool) + load_mock_device(device) + device.name = "Living room" + device.state.heat_target = "0000" + device.state.heat_mode = HeatMode.HEAT_OFF.value + device.state.fan_power = FanPower.POWER_OFF.value + device.environmental_state.humidity = 42 + device.environmental_state.temperature = 298 + return device + + def _get_device_with_no_state(): """Return a device with no state.""" - device = mock.Mock(spec=DysonPureHotCoolLink) + device = Mock(spec=DysonPureHotCoolLink) load_mock_device(device) device.state = None device.environmental_state = None @@ -51,14 +96,14 @@ def _get_device_with_no_state(): def _get_device_off(): """Return a device with state off.""" - device = mock.Mock(spec=DysonPureHotCoolLink) + device = Mock(spec=DysonPureHotCoolLink) load_mock_device(device) return device def _get_device_focus(): """Return a device with fan state of focus mode.""" - device = mock.Mock(spec=DysonPureHotCoolLink) + device = Mock(spec=DysonPureHotCoolLink) load_mock_device(device) device.state.focus_mode = FocusMode.FOCUS_ON.value return device @@ -66,7 +111,7 @@ def _get_device_focus(): def _get_device_diffuse(): """Return a device with fan state of diffuse mode.""" - device = mock.Mock(spec=DysonPureHotCoolLink) + device = Mock(spec=DysonPureHotCoolLink) load_mock_device(device) device.state.focus_mode = FocusMode.FOCUS_OFF.value return device @@ -74,7 +119,7 @@ def _get_device_diffuse(): def _get_device_cool(): """Return a device with state of cooling.""" - device = mock.Mock(spec=DysonPureHotCoolLink) + device = Mock(spec=DysonPureHotCoolLink) load_mock_device(device) device.state.focus_mode = FocusMode.FOCUS_OFF.value device.state.heat_target = HeatTarget.celsius(12) @@ -85,7 +130,7 @@ def _get_device_cool(): def _get_device_heat_off(): """Return a device with state of heat reached target.""" - device = mock.Mock(spec=DysonPureHotCoolLink) + device = Mock(spec=DysonPureHotCoolLink) load_mock_device(device) device.state.heat_mode = HeatMode.HEAT_ON.value device.state.heat_state = HeatState.HEAT_STATE_OFF.value @@ -94,7 +139,7 @@ def _get_device_heat_off(): def _get_device_heat_on(): """Return a device with state of heating.""" - device = mock.Mock(spec=DysonPureHotCoolLink) + device = Mock(spec=DysonPureHotCoolLink) load_mock_device(device) device.serial = "YY-YYYYY-YY" device.state.heat_target = HeatTarget.celsius(23) @@ -111,15 +156,16 @@ class DysonTest(unittest.TestCase): def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() def test_setup_component_without_devices(self): """Test setup component with no devices.""" self.hass.data[dyson.DYSON_DEVICES] = [] - add_devices = mock.MagicMock() + add_devices = MagicMock() dyson.setup_platform(self.hass, None, add_devices) add_devices.assert_not_called() @@ -131,18 +177,10 @@ class DysonTest(unittest.TestCase): _get_device_heat_on(), ] self.hass.data[dyson.DYSON_DEVICES] = devices - add_devices = mock.MagicMock() + add_devices = MagicMock() dyson.setup_platform(self.hass, None, add_devices, discovery_info={}) assert add_devices.called - def test_setup_component_with_invalid_devices(self): - """Test setup component with invalid devices.""" - devices = [None, "foo_bar"] - self.hass.data[dyson.DYSON_DEVICES] = devices - add_devices = mock.MagicMock() - dyson.setup_platform(self.hass, None, add_devices, discovery_info={}) - add_devices.assert_called_with([]) - def test_setup_component(self): """Test setup component with devices.""" device_fan = _get_device_heat_on() @@ -159,7 +197,7 @@ class DysonTest(unittest.TestCase): """Test set climate temperature.""" device = _get_device_heat_on() device.temp_unit = TEMP_CELSIUS - entity = dyson.DysonPureHotCoolLinkDevice(device) + entity = dyson.DysonPureHotCoolLinkEntity(device) assert not entity.should_poll # Without target temp. @@ -194,8 +232,8 @@ class DysonTest(unittest.TestCase): """Test set climate temperature when heating is off.""" device = _get_device_cool() device.temp_unit = TEMP_CELSIUS - entity = dyson.DysonPureHotCoolLinkDevice(device) - entity.schedule_update_ha_state = mock.Mock() + entity = dyson.DysonPureHotCoolLinkEntity(device) + entity.schedule_update_ha_state = Mock() kwargs = {ATTR_TEMPERATURE: 23} entity.set_temperature(**kwargs) @@ -207,7 +245,7 @@ class DysonTest(unittest.TestCase): def test_dyson_set_fan_mode(self): """Test set fan mode.""" device = _get_device_heat_on() - entity = dyson.DysonPureHotCoolLinkDevice(device) + entity = dyson.DysonPureHotCoolLinkEntity(device) assert not entity.should_poll entity.set_fan_mode(dyson.FAN_FOCUS) @@ -221,7 +259,7 @@ class DysonTest(unittest.TestCase): def test_dyson_fan_modes(self): """Test get fan list.""" device = _get_device_heat_on() - entity = dyson.DysonPureHotCoolLinkDevice(device) + entity = dyson.DysonPureHotCoolLinkEntity(device) assert len(entity.fan_modes) == 2 assert dyson.FAN_FOCUS in entity.fan_modes assert dyson.FAN_DIFFUSE in entity.fan_modes @@ -229,19 +267,19 @@ class DysonTest(unittest.TestCase): def test_dyson_fan_mode_focus(self): """Test fan focus mode.""" device = _get_device_focus() - entity = dyson.DysonPureHotCoolLinkDevice(device) + entity = dyson.DysonPureHotCoolLinkEntity(device) assert entity.fan_mode == dyson.FAN_FOCUS def test_dyson_fan_mode_diffuse(self): """Test fan diffuse mode.""" device = _get_device_diffuse() - entity = dyson.DysonPureHotCoolLinkDevice(device) + entity = dyson.DysonPureHotCoolLinkEntity(device) assert entity.fan_mode == dyson.FAN_DIFFUSE def test_dyson_set_hvac_mode(self): """Test set operation mode.""" device = _get_device_heat_on() - entity = dyson.DysonPureHotCoolLinkDevice(device) + entity = dyson.DysonPureHotCoolLinkEntity(device) assert not entity.should_poll entity.set_hvac_mode(dyson.HVAC_MODE_HEAT) @@ -255,7 +293,7 @@ class DysonTest(unittest.TestCase): def test_dyson_operation_list(self): """Test get operation list.""" device = _get_device_heat_on() - entity = dyson.DysonPureHotCoolLinkDevice(device) + entity = dyson.DysonPureHotCoolLinkEntity(device) assert len(entity.hvac_modes) == 2 assert dyson.HVAC_MODE_HEAT in entity.hvac_modes assert dyson.HVAC_MODE_COOL in entity.hvac_modes @@ -263,7 +301,7 @@ class DysonTest(unittest.TestCase): def test_dyson_heat_off(self): """Test turn off heat.""" device = _get_device_heat_off() - entity = dyson.DysonPureHotCoolLinkDevice(device) + entity = dyson.DysonPureHotCoolLinkEntity(device) entity.set_hvac_mode(dyson.HVAC_MODE_COOL) set_config = device.set_configuration set_config.assert_called_with(heat_mode=HeatMode.HEAT_OFF) @@ -271,7 +309,7 @@ class DysonTest(unittest.TestCase): def test_dyson_heat_on(self): """Test turn on heat.""" device = _get_device_heat_on() - entity = dyson.DysonPureHotCoolLinkDevice(device) + entity = dyson.DysonPureHotCoolLinkEntity(device) entity.set_hvac_mode(dyson.HVAC_MODE_HEAT) set_config = device.set_configuration set_config.assert_called_with(heat_mode=HeatMode.HEAT_ON) @@ -279,34 +317,34 @@ class DysonTest(unittest.TestCase): def test_dyson_heat_value_on(self): """Test get heat value on.""" device = _get_device_heat_on() - entity = dyson.DysonPureHotCoolLinkDevice(device) + entity = dyson.DysonPureHotCoolLinkEntity(device) assert entity.hvac_mode == dyson.HVAC_MODE_HEAT def test_dyson_heat_value_off(self): """Test get heat value off.""" device = _get_device_cool() - entity = dyson.DysonPureHotCoolLinkDevice(device) + entity = dyson.DysonPureHotCoolLinkEntity(device) assert entity.hvac_mode == dyson.HVAC_MODE_COOL def test_dyson_heat_value_idle(self): """Test get heat value idle.""" device = _get_device_heat_off() - entity = dyson.DysonPureHotCoolLinkDevice(device) + entity = dyson.DysonPureHotCoolLinkEntity(device) assert entity.hvac_mode == dyson.HVAC_MODE_HEAT assert entity.hvac_action == dyson.CURRENT_HVAC_IDLE def test_on_message(self): """Test when message is received.""" device = _get_device_heat_on() - entity = dyson.DysonPureHotCoolLinkDevice(device) - entity.schedule_update_ha_state = mock.Mock() + entity = dyson.DysonPureHotCoolLinkEntity(device) + entity.schedule_update_ha_state = Mock() entity.on_message(MockDysonState()) entity.schedule_update_ha_state.assert_called_with() def test_general_properties(self): """Test properties of entity.""" device = _get_device_with_no_state() - entity = dyson.DysonPureHotCoolLinkDevice(device) + entity = dyson.DysonPureHotCoolLinkEntity(device) assert entity.should_poll is False assert entity.supported_features == dyson.SUPPORT_FLAGS assert entity.temperature_unit == TEMP_CELSIUS @@ -314,41 +352,41 @@ class DysonTest(unittest.TestCase): def test_property_current_humidity(self): """Test properties of current humidity.""" device = _get_device_heat_on() - entity = dyson.DysonPureHotCoolLinkDevice(device) + entity = dyson.DysonPureHotCoolLinkEntity(device) assert entity.current_humidity == 53 def test_property_current_humidity_with_invalid_env_state(self): """Test properties of current humidity with invalid env state.""" device = _get_device_off() device.environmental_state.humidity = 0 - entity = dyson.DysonPureHotCoolLinkDevice(device) + entity = dyson.DysonPureHotCoolLinkEntity(device) assert entity.current_humidity is None def test_property_current_humidity_without_env_state(self): """Test properties of current humidity without env state.""" device = _get_device_with_no_state() - entity = dyson.DysonPureHotCoolLinkDevice(device) + entity = dyson.DysonPureHotCoolLinkEntity(device) assert entity.current_humidity is None def test_property_current_temperature(self): """Test properties of current temperature.""" device = _get_device_heat_on() - entity = dyson.DysonPureHotCoolLinkDevice(device) + entity = dyson.DysonPureHotCoolLinkEntity(device) # Result should be in celsius, hence then subtraction of 273. assert entity.current_temperature == 289 - 273 def test_property_target_temperature(self): """Test properties of target temperature.""" device = _get_device_heat_on() - entity = dyson.DysonPureHotCoolLinkDevice(device) + entity = dyson.DysonPureHotCoolLinkEntity(device) assert entity.target_temperature == 23 @patch( - "libpurecool.dyson.DysonAccount.devices", + "homeassistant.components.dyson.DysonAccount.devices", return_value=[_get_device_heat_on(), _get_device_cool()], ) -@patch("libpurecool.dyson.DysonAccount.login", return_value=True) +@patch("homeassistant.components.dyson.DysonAccount.login", return_value=True) async def test_setup_component_with_parent_discovery( mocked_login, mocked_devices, hass ): @@ -356,4 +394,328 @@ async def test_setup_component_with_parent_discovery( await async_setup_component(hass, dyson_parent.DOMAIN, _get_config()) await hass.async_block_till_done() - assert len(hass.data[dyson.DYSON_DEVICES]) == 2 + entity_ids = hass.states.async_entity_ids("climate") + assert len(entity_ids) == 2 + + +@patch("homeassistant.components.dyson.DysonAccount.login", return_value=True) +@patch( + "homeassistant.components.dyson.DysonAccount.devices", + return_value=[_get_dyson_purehotcool_device()], +) +async def test_purehotcool_component_setup_only_once(devices, login, hass): + """Test if entities are created only once.""" + config = _get_config() + await async_setup_component(hass, dyson_parent.DOMAIN, config) + await hass.async_block_till_done() + + entity_ids = hass.states.async_entity_ids("climate") + assert len(entity_ids) == 1 + state = hass.states.get(entity_ids[0]) + assert state.name == "Living room" + + +@patch("homeassistant.components.dyson.DysonAccount.login", return_value=True) +@patch( + "homeassistant.components.dyson.DysonAccount.devices", + return_value=[_get_device_off()], +) +async def test_purehotcoollink_component_setup_only_once(devices, login, hass): + """Test if entities are created only once.""" + config = _get_config() + await async_setup_component(hass, dyson_parent.DOMAIN, config) + await hass.async_block_till_done() + + entity_ids = hass.states.async_entity_ids("climate") + assert len(entity_ids) == 1 + state = hass.states.get(entity_ids[0]) + assert state.name == "Temp Name" + + +@patch("homeassistant.components.dyson.DysonAccount.login", return_value=True) +@patch( + "homeassistant.components.dyson.DysonAccount.devices", + return_value=[_get_dyson_purehotcool_device()], +) +async def test_purehotcool_update_state(devices, login, hass): + """Test state update.""" + device = devices.return_value[0] + await async_setup_component(hass, dyson_parent.DOMAIN, _get_config()) + await hass.async_block_till_done() + event = { + "msg": "CURRENT-STATE", + "product-state": { + "fpwr": "ON", + "fdir": "OFF", + "auto": "OFF", + "oscs": "ON", + "oson": "ON", + "nmod": "OFF", + "rhtm": "ON", + "fnst": "FAN", + "ercd": "11E1", + "wacd": "NONE", + "nmdv": "0004", + "fnsp": "0002", + "bril": "0002", + "corf": "ON", + "cflr": "0085", + "hflr": "0095", + "sltm": "OFF", + "osal": "0045", + "osau": "0095", + "ancp": "CUST", + "tilt": "OK", + "hsta": "HEAT", + "hmax": "2986", + "hmod": "HEAT", + }, + } + device.state = DysonPureHotCoolV2State(json.dumps(event)) + + for call in device.add_message_listener.call_args_list: + callback = call[0][0] + if type(callback.__self__) == dyson.DysonPureHotCoolEntity: + callback(device.state) + + await hass.async_block_till_done() + state = hass.states.get("climate.living_room") + attributes = state.attributes + + assert attributes[ATTR_TEMPERATURE] == 25 + assert attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_HEAT + + +@patch("homeassistant.components.dyson.DysonAccount.login", return_value=True) +@patch( + "homeassistant.components.dyson.DysonAccount.devices", + return_value=[_get_dyson_purehotcool_device()], +) +async def test_purehotcool_empty_env_attributes(devices, login, hass): + """Test empty environmental state update.""" + device = devices.return_value[0] + device.environmental_state.temperature = None + device.environmental_state.humidity = None + await async_setup_component(hass, dyson_parent.DOMAIN, _get_config()) + await hass.async_block_till_done() + + state = hass.states.get("climate.living_room") + attributes = state.attributes + + assert ATTR_CURRENT_HUMIDITY not in attributes + + +@patch("homeassistant.components.dyson.DysonAccount.login", return_value=True) +@patch( + "homeassistant.components.dyson.DysonAccount.devices", + return_value=[_get_dyson_purehotcool_device()], +) +async def test_purehotcool_fan_state_off(devices, login, hass): + """Test device fan state off.""" + device = devices.return_value[0] + device.state.fan_state = FanState.FAN_OFF.value + await async_setup_component(hass, dyson_parent.DOMAIN, _get_config()) + await hass.async_block_till_done() + + state = hass.states.get("climate.living_room") + attributes = state.attributes + + assert attributes[ATTR_FAN_MODE] == FAN_OFF + + +@patch("homeassistant.components.dyson.DysonAccount.login", return_value=True) +@patch( + "homeassistant.components.dyson.DysonAccount.devices", + return_value=[_get_dyson_purehotcool_device()], +) +async def test_purehotcool_hvac_action_cool(devices, login, hass): + """Test device HVAC action cool.""" + device = devices.return_value[0] + device.state.fan_power = FanPower.POWER_ON.value + await async_setup_component(hass, dyson_parent.DOMAIN, _get_config()) + await hass.async_block_till_done() + + state = hass.states.get("climate.living_room") + attributes = state.attributes + + assert attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_COOL + + +@patch("homeassistant.components.dyson.DysonAccount.login", return_value=True) +@patch( + "homeassistant.components.dyson.DysonAccount.devices", + return_value=[_get_dyson_purehotcool_device()], +) +async def test_purehotcool_hvac_action_idle(devices, login, hass): + """Test device HVAC action idle.""" + device = devices.return_value[0] + device.state.fan_power = FanPower.POWER_ON.value + device.state.heat_mode = HeatMode.HEAT_ON.value + await async_setup_component(hass, dyson_parent.DOMAIN, _get_config()) + await hass.async_block_till_done() + + state = hass.states.get("climate.living_room") + attributes = state.attributes + + assert attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_IDLE + + +@patch("homeassistant.components.dyson.DysonAccount.login", return_value=True) +@patch( + "homeassistant.components.dyson.DysonAccount.devices", + return_value=[_get_dyson_purehotcool_device()], +) +async def test_purehotcool_set_temperature(devices, login, hass): + """Test set temperature.""" + device = devices.return_value[0] + await async_setup_component(hass, dyson_parent.DOMAIN, _get_config()) + await hass.async_block_till_done() + state = hass.states.get("climate.living_room") + attributes = state.attributes + min_temp = attributes["min_temp"] + max_temp = attributes["max_temp"] + + await hass.services.async_call( + DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: "climate.bed_room", ATTR_TEMPERATURE: 23}, + True, + ) + device.set_heat_target.assert_not_called() + + await hass.services.async_call( + DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: "climate.living_room", ATTR_TEMPERATURE: 23}, + True, + ) + assert device.set_heat_target.call_count == 1 + device.set_heat_target.assert_called_with("2960") + + await hass.services.async_call( + DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: "climate.living_room", ATTR_TEMPERATURE: min_temp - 1}, + True, + ) + assert device.set_heat_target.call_count == 2 + device.set_heat_target.assert_called_with(HeatTarget.celsius(min_temp)) + + await hass.services.async_call( + DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: "climate.living_room", ATTR_TEMPERATURE: max_temp + 1}, + True, + ) + assert device.set_heat_target.call_count == 3 + device.set_heat_target.assert_called_with(HeatTarget.celsius(max_temp)) + + +@patch("homeassistant.components.dyson.DysonAccount.login", return_value=True) +@patch( + "homeassistant.components.dyson.DysonAccount.devices", + return_value=[_get_dyson_purehotcool_device()], +) +async def test_purehotcool_set_fan_mode(devices, login, hass): + """Test set fan mode.""" + device = devices.return_value[0] + await async_setup_component(hass, dyson_parent.DOMAIN, _get_config()) + await hass.async_block_till_done() + + await hass.services.async_call( + DOMAIN, + SERVICE_SET_FAN_MODE, + {ATTR_ENTITY_ID: "climate.bed_room", ATTR_FAN_MODE: FAN_OFF}, + True, + ) + device.turn_off.assert_not_called() + + await hass.services.async_call( + DOMAIN, + SERVICE_SET_FAN_MODE, + {ATTR_ENTITY_ID: "climate.living_room", ATTR_FAN_MODE: FAN_OFF}, + True, + ) + assert device.turn_off.call_count == 1 + + await hass.services.async_call( + DOMAIN, + SERVICE_SET_FAN_MODE, + {ATTR_ENTITY_ID: "climate.living_room", ATTR_FAN_MODE: FAN_LOW}, + True, + ) + assert device.set_fan_speed.call_count == 1 + device.set_fan_speed.assert_called_with(FanSpeed.FAN_SPEED_4) + + await hass.services.async_call( + DOMAIN, + SERVICE_SET_FAN_MODE, + {ATTR_ENTITY_ID: "climate.living_room", ATTR_FAN_MODE: FAN_MEDIUM}, + True, + ) + assert device.set_fan_speed.call_count == 2 + device.set_fan_speed.assert_called_with(FanSpeed.FAN_SPEED_7) + + await hass.services.async_call( + DOMAIN, + SERVICE_SET_FAN_MODE, + {ATTR_ENTITY_ID: "climate.living_room", ATTR_FAN_MODE: FAN_HIGH}, + True, + ) + assert device.set_fan_speed.call_count == 3 + device.set_fan_speed.assert_called_with(FanSpeed.FAN_SPEED_10) + + await hass.services.async_call( + DOMAIN, + SERVICE_SET_FAN_MODE, + {ATTR_ENTITY_ID: "climate.living_room", ATTR_FAN_MODE: FAN_AUTO}, + True, + ) + assert device.set_fan_speed.call_count == 4 + device.set_fan_speed.assert_called_with(FanSpeed.FAN_SPEED_AUTO) + + +@patch("homeassistant.components.dyson.DysonAccount.login", return_value=True) +@patch( + "homeassistant.components.dyson.DysonAccount.devices", + return_value=[_get_dyson_purehotcool_device()], +) +async def test_purehotcool_set_hvac_mode(devices, login, hass): + """Test set HVAC mode.""" + device = devices.return_value[0] + await async_setup_component(hass, dyson_parent.DOMAIN, _get_config()) + await hass.async_block_till_done() + + await hass.services.async_call( + DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: "climate.bed_room", ATTR_HVAC_MODE: HVAC_MODE_OFF}, + True, + ) + device.turn_off.assert_not_called() + + await hass.services.async_call( + DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: "climate.living_room", ATTR_HVAC_MODE: HVAC_MODE_OFF}, + True, + ) + assert device.turn_off.call_count == 1 + + await hass.services.async_call( + DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: "climate.living_room", ATTR_HVAC_MODE: HVAC_MODE_HEAT}, + True, + ) + assert device.turn_on.call_count == 1 + assert device.enable_heat_mode.call_count == 1 + + await hass.services.async_call( + DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: "climate.living_room", ATTR_HVAC_MODE: HVAC_MODE_COOL}, + True, + ) + assert device.turn_on.call_count == 2 + assert device.disable_heat_mode.call_count == 1 diff --git a/tests/components/dyson/test_fan.py b/tests/components/dyson/test_fan.py index d4db6051960..807cf3565ed 100644 --- a/tests/components/dyson/test_fan.py +++ b/tests/components/dyson/test_fan.py @@ -136,8 +136,9 @@ class DysonSetupTest(unittest.TestCase): def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() @@ -173,8 +174,9 @@ class DysonTest(unittest.TestCase): def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() diff --git a/tests/components/dyson/test_init.py b/tests/components/dyson/test_init.py index 37cc69b6765..d2c36beb7d5 100644 --- a/tests/components/dyson/test_init.py +++ b/tests/components/dyson/test_init.py @@ -41,8 +41,9 @@ class DysonTest(unittest.TestCase): def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() diff --git a/tests/components/dyson/test_sensor.py b/tests/components/dyson/test_sensor.py index 92bd3bba9aa..d15826863bb 100644 --- a/tests/components/dyson/test_sensor.py +++ b/tests/components/dyson/test_sensor.py @@ -84,8 +84,9 @@ class DysonTest(unittest.TestCase): def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() diff --git a/tests/components/dyson/test_vacuum.py b/tests/components/dyson/test_vacuum.py index fc801cbe649..0ff19665807 100644 --- a/tests/components/dyson/test_vacuum.py +++ b/tests/components/dyson/test_vacuum.py @@ -70,8 +70,9 @@ class DysonTest(unittest.TestCase): def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() diff --git a/tests/components/efergy/test_sensor.py b/tests/components/efergy/test_sensor.py index 252669233e5..7d1d928cef8 100644 --- a/tests/components/efergy/test_sensor.py +++ b/tests/components/efergy/test_sensor.py @@ -75,8 +75,9 @@ class TestEfergySensor(unittest.TestCase): """Initialize values for this test case class.""" self.hass = get_test_home_assistant() self.config = ONE_SENSOR_CONFIG + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() diff --git a/tests/components/fail2ban/test_sensor.py b/tests/components/fail2ban/test_sensor.py index b164cc93f2e..fa78c9b1bbf 100644 --- a/tests/components/fail2ban/test_sensor.py +++ b/tests/components/fail2ban/test_sensor.py @@ -66,10 +66,7 @@ class TestBanSensor(unittest.TestCase): def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) @patch("os.path.isfile", Mock(return_value=True)) def test_setup(self): diff --git a/tests/components/fan/test_init.py b/tests/components/fan/test_init.py index f27a3ff8ab6..134a5976baa 100644 --- a/tests/components/fan/test_init.py +++ b/tests/components/fan/test_init.py @@ -21,8 +21,9 @@ class TestFanEntity(unittest.TestCase): def setUp(self): """Set up test data.""" self.fan = BaseFan() + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): + def tear_down_cleanup(self): """Tear down unit test data.""" self.fan = None diff --git a/tests/components/feedreader/test_init.py b/tests/components/feedreader/test_init.py index 823bdf6eb63..1983cb5ab8d 100644 --- a/tests/components/feedreader/test_init.py +++ b/tests/components/feedreader/test_init.py @@ -42,10 +42,7 @@ class TestFeedreaderComponent(unittest.TestCase): data_file = self.hass.config.path(f"{feedreader.DOMAIN}.pickle") if exists(data_file): remove(data_file) - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def test_setup_one_feed(self): """Test the general setup of this component.""" diff --git a/tests/components/file/test_notify.py b/tests/components/file/test_notify.py index e4ae125949a..d7f380cf6da 100644 --- a/tests/components/file/test_notify.py +++ b/tests/components/file/test_notify.py @@ -17,8 +17,9 @@ class TestNotifyFile(unittest.TestCase): def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop down everything that was started.""" self.hass.stop() diff --git a/tests/components/forked_daapd/test_config_flow.py b/tests/components/forked_daapd/test_config_flow.py index 3dc62bae8bd..17b30121aaf 100644 --- a/tests/components/forked_daapd/test_config_flow.py +++ b/tests/components/forked_daapd/test_config_flow.py @@ -158,6 +158,17 @@ async def test_config_flow_zeroconf_invalid(hass): ) # doesn't create the entry, tries to show form but gets abort assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "not_forked_daapd" + # test with svn mtd-version from Firefly + discovery_info = { + "host": "127.0.0.1", + "port": 23, + "properties": {"mtd-version": "svn-1676", "Machine Name": "firefly"}, + } + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_ZEROCONF}, data=discovery_info + ) # doesn't create the entry, tries to show form but gets abort + assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT + assert result["reason"] == "not_forked_daapd" async def test_config_flow_zeroconf_valid(hass): diff --git a/tests/components/generic/test_camera.py b/tests/components/generic/test_camera.py index a983efa115c..fffa5db6be5 100644 --- a/tests/components/generic/test_camera.py +++ b/tests/components/generic/test_camera.py @@ -1,10 +1,12 @@ """The tests for generic camera component.""" import asyncio -from unittest import mock +from homeassistant.components.websocket_api.const import TYPE_RESULT from homeassistant.const import HTTP_INTERNAL_SERVER_ERROR, HTTP_NOT_FOUND from homeassistant.setup import async_setup_component +from tests.async_mock import patch + async def test_fetching_url(aioclient_mock, hass, hass_client): """Test that it fetches the given url.""" @@ -119,7 +121,7 @@ async def test_limit_refetch(aioclient_mock, hass, hass_client): hass.states.async_set("sensor.temp", "5") - with mock.patch("async_timeout.timeout", side_effect=asyncio.TimeoutError()): + with patch("async_timeout.timeout", side_effect=asyncio.TimeoutError()): resp = await client.get("/api/camera_proxy/camera.config_test") assert aioclient_mock.call_count == 0 assert resp.status == HTTP_INTERNAL_SERVER_ERROR @@ -156,6 +158,104 @@ async def test_limit_refetch(aioclient_mock, hass, hass_client): assert body == "hello planet" +async def test_stream_source(aioclient_mock, hass, hass_client, hass_ws_client): + """Test that the stream source is rendered.""" + assert await async_setup_component( + hass, + "camera", + { + "camera": { + "name": "config_test", + "platform": "generic", + "still_image_url": "https://example.com", + "stream_source": 'http://example.com/{{ states.sensor.temp.state + "a" }}', + "limit_refetch_to_url_change": True, + } + }, + ) + await hass.async_block_till_done() + + hass.states.async_set("sensor.temp", "5") + + with patch( + "homeassistant.components.camera.request_stream", + return_value="http://home.assistant/playlist.m3u8", + ) as mock_request_stream: + # Request playlist through WebSocket + client = await hass_ws_client(hass) + + await client.send_json( + {"id": 1, "type": "camera/stream", "entity_id": "camera.config_test"} + ) + msg = await client.receive_json() + + # Assert WebSocket response + assert mock_request_stream.call_count == 1 + assert mock_request_stream.call_args[0][1] == "http://example.com/5a" + assert msg["id"] == 1 + assert msg["type"] == TYPE_RESULT + assert msg["success"] + assert msg["result"]["url"][-13:] == "playlist.m3u8" + + # Cause a template render error + hass.states.async_remove("sensor.temp") + + await client.send_json( + {"id": 2, "type": "camera/stream", "entity_id": "camera.config_test"} + ) + msg = await client.receive_json() + + # Assert that no new call to the stream request should have been made + assert mock_request_stream.call_count == 1 + # Assert the websocket error message + assert msg["id"] == 2 + assert msg["type"] == TYPE_RESULT + assert msg["success"] is False + assert msg["error"] == { + "code": "start_stream_failed", + "message": "camera.config_test does not support play stream service", + } + + +async def test_no_stream_source(aioclient_mock, hass, hass_client, hass_ws_client): + """Test a stream request without stream source option set.""" + assert await async_setup_component( + hass, + "camera", + { + "camera": { + "name": "config_test", + "platform": "generic", + "still_image_url": "https://example.com", + "limit_refetch_to_url_change": True, + } + }, + ) + await hass.async_block_till_done() + + with patch( + "homeassistant.components.camera.request_stream", + return_value="http://home.assistant/playlist.m3u8", + ) as mock_request_stream: + # Request playlist through WebSocket + client = await hass_ws_client(hass) + + await client.send_json( + {"id": 3, "type": "camera/stream", "entity_id": "camera.config_test"} + ) + msg = await client.receive_json() + + # Assert the websocket error message + assert mock_request_stream.call_count == 0 + assert msg["id"] == 3 + assert msg["type"] == TYPE_RESULT + assert msg["success"] is False + assert msg["error"] == { + "code": "start_stream_failed", + "message": "camera.config_test does not support play stream service", + } + + async def test_camera_content_type(aioclient_mock, hass, hass_client): """Test generic camera with custom content_type.""" svg_image = "" diff --git a/tests/components/geo_rss_events/test_sensor.py b/tests/components/geo_rss_events/test_sensor.py index 9f7cdd3faab..61e4ab1e7d1 100644 --- a/tests/components/geo_rss_events/test_sensor.py +++ b/tests/components/geo_rss_events/test_sensor.py @@ -42,10 +42,7 @@ class TestGeoRssServiceUpdater(unittest.TestCase): """Initialize values for this testcase class.""" self.hass = get_test_home_assistant() # self.config = VALID_CONFIG_WITHOUT_CATEGORIES - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) @staticmethod def _generate_mock_feed_entry( diff --git a/tests/components/google_assistant/test_trait.py b/tests/components/google_assistant/test_trait.py index 3dca89b8193..cf8fde8af7b 100644 --- a/tests/components/google_assistant/test_trait.py +++ b/tests/components/google_assistant/test_trait.py @@ -873,7 +873,11 @@ async def test_arm_disarm_arm_away(hass): State( "alarm_control_panel.alarm", STATE_ALARM_ARMED_AWAY, - {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, + { + alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, + ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME + | alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY, + }, ), PIN_CONFIG, ) @@ -892,25 +896,6 @@ async def test_arm_disarm_arm_away(hass): {"level_synonym": ["armed away", "away"], "lang": "en"} ], }, - { - "level_name": "armed_night", - "level_values": [ - {"level_synonym": ["armed night", "night"], "lang": "en"} - ], - }, - { - "level_name": "armed_custom_bypass", - "level_values": [ - { - "level_synonym": ["armed custom bypass", "custom"], - "lang": "en", - } - ], - }, - { - "level_name": "triggered", - "level_values": [{"level_synonym": ["triggered"], "lang": "en"}], - }, ], "ordered": False, } @@ -1031,6 +1016,11 @@ async def test_arm_disarm_arm_away(hass): ) assert len(calls) == 2 + with pytest.raises(error.SmartHomeError) as err: + await trt.execute( + trait.COMMAND_ARMDISARM, PIN_DATA, {"arm": True}, {}, + ) + async def test_arm_disarm_disarm(hass): """Test ArmDisarm trait Disarming support for alarm_control_panel domain.""" @@ -1043,31 +1033,17 @@ async def test_arm_disarm_disarm(hass): State( "alarm_control_panel.alarm", STATE_ALARM_DISARMED, - {alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True}, + { + alarm_control_panel.ATTR_CODE_ARM_REQUIRED: True, + ATTR_SUPPORTED_FEATURES: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER + | alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS, + }, ), PIN_CONFIG, ) assert trt.sync_attributes() == { "availableArmLevels": { "levels": [ - { - "level_name": "armed_home", - "level_values": [ - {"level_synonym": ["armed home", "home"], "lang": "en"} - ], - }, - { - "level_name": "armed_away", - "level_values": [ - {"level_synonym": ["armed away", "away"], "lang": "en"} - ], - }, - { - "level_name": "armed_night", - "level_values": [ - {"level_synonym": ["armed night", "night"], "lang": "en"} - ], - }, { "level_name": "armed_custom_bypass", "level_values": [ diff --git a/tests/components/google_pubsub/test_init.py b/tests/components/google_pubsub/test_init.py new file mode 100644 index 00000000000..aebcfa06b13 --- /dev/null +++ b/tests/components/google_pubsub/test_init.py @@ -0,0 +1,262 @@ +"""The tests for the Google Pub/Sub component.""" +from collections import namedtuple +from datetime import datetime + +import pytest + +import homeassistant.components.google_pubsub as google_pubsub +from homeassistant.components.google_pubsub import DateTimeJSONEncoder as victim +from homeassistant.const import EVENT_STATE_CHANGED +from homeassistant.core import split_entity_id +from homeassistant.setup import async_setup_component + +import tests.async_mock as mock + +GOOGLE_PUBSUB_PATH = "homeassistant.components.google_pubsub" + + +async def test_datetime(): + """Test datetime encoding.""" + time = datetime(2019, 1, 13, 12, 30, 5) + assert victim().encode(time) == '"2019-01-13T12:30:05"' + + +async def test_no_datetime(): + """Test integer encoding.""" + assert victim().encode(42) == "42" + + +async def test_nested(): + """Test dictionary encoding.""" + assert victim().encode({"foo": "bar"}) == '{"foo": "bar"}' + + +@pytest.fixture(autouse=True, name="mock_client") +def mock_client_fixture(): + """Mock the pubsub client.""" + with mock.patch(f"{GOOGLE_PUBSUB_PATH}.pubsub_v1") as client: + client.PublisherClient = mock.MagicMock() + setattr( + client.PublisherClient, + "from_service_account_json", + mock.MagicMock(return_value=mock.MagicMock()), + ) + yield client + + +@pytest.fixture(autouse=True, name="mock_os") +def mock_os_fixture(): + """Mock the OS cli.""" + with mock.patch(f"{GOOGLE_PUBSUB_PATH}.os") as os_cli: + os_cli.path = mock.MagicMock() + setattr(os_cli.path, "join", mock.MagicMock(return_value="path")) + yield os_cli + + +@pytest.fixture(autouse=True) +def mock_bus_and_json(hass, monkeypatch): + """Mock the event bus listener and os component.""" + hass.bus.listen = mock.MagicMock() + monkeypatch.setattr( + f"{GOOGLE_PUBSUB_PATH}.json.dumps", mock.Mock(return_value=mock.MagicMock()) + ) + + +async def test_minimal_config(hass, mock_client): + """Test the minimal config and defaults of component.""" + config = { + google_pubsub.DOMAIN: { + "project_id": "proj", + "topic_name": "topic", + "credentials_json": "creds", + "filter": {}, + } + } + assert await async_setup_component(hass, google_pubsub.DOMAIN, config) + await hass.async_block_till_done() + assert hass.bus.listen.called + assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] + assert mock_client.PublisherClient.from_service_account_json.call_count == 1 + assert ( + mock_client.PublisherClient.from_service_account_json.call_args[0][0] == "path" + ) + + +async def test_full_config(hass, mock_client): + """Test the full config of the component.""" + config = { + google_pubsub.DOMAIN: { + "project_id": "proj", + "topic_name": "topic", + "credentials_json": "creds", + "filter": { + "include_domains": ["light"], + "include_entity_globs": ["sensor.included_*"], + "include_entities": ["binary_sensor.included"], + "exclude_domains": ["light"], + "exclude_entity_globs": ["sensor.excluded_*"], + "exclude_entities": ["binary_sensor.excluded"], + }, + } + } + assert await async_setup_component(hass, google_pubsub.DOMAIN, config) + await hass.async_block_till_done() + assert hass.bus.listen.called + assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] + assert mock_client.PublisherClient.from_service_account_json.call_count == 1 + assert ( + mock_client.PublisherClient.from_service_account_json.call_args[0][0] == "path" + ) + + +FilterTest = namedtuple("FilterTest", "id should_pass") + + +def make_event(entity_id): + """Make a mock event for test.""" + domain = split_entity_id(entity_id)[0] + state = mock.MagicMock( + state="not blank", + domain=domain, + entity_id=entity_id, + object_id="entity", + attributes={}, + ) + return mock.MagicMock(data={"new_state": state}, time_fired=12345) + + +async def _setup(hass, filter_config): + """Shared set up for filtering tests.""" + config = { + google_pubsub.DOMAIN: { + "project_id": "proj", + "topic_name": "topic", + "credentials_json": "creds", + "filter": filter_config, + } + } + assert await async_setup_component(hass, google_pubsub.DOMAIN, config) + await hass.async_block_till_done() + return hass.bus.listen.call_args_list[0][0][1] + + +async def test_allowlist(hass, mock_client): + """Test an allowlist only config.""" + handler_method = await _setup( + hass, + { + "include_domains": ["light"], + "include_entity_globs": ["sensor.included_*"], + "include_entities": ["binary_sensor.included"], + }, + ) + publish_client = mock_client.PublisherClient.from_service_account_json("path") + + tests = [ + FilterTest("climate.excluded", False), + FilterTest("light.included", True), + FilterTest("sensor.excluded_test", False), + FilterTest("sensor.included_test", True), + FilterTest("binary_sensor.included", True), + FilterTest("binary_sensor.excluded", False), + ] + + for test in tests: + event = make_event(test.id) + handler_method(event) + + was_called = publish_client.publish.call_count == 1 + assert test.should_pass == was_called + publish_client.publish.reset_mock() + + +async def test_denylist(hass, mock_client): + """Test a denylist only config.""" + handler_method = await _setup( + hass, + { + "exclude_domains": ["climate"], + "exclude_entity_globs": ["sensor.excluded_*"], + "exclude_entities": ["binary_sensor.excluded"], + }, + ) + publish_client = mock_client.PublisherClient.from_service_account_json("path") + + tests = [ + FilterTest("climate.excluded", False), + FilterTest("light.included", True), + FilterTest("sensor.excluded_test", False), + FilterTest("sensor.included_test", True), + FilterTest("binary_sensor.included", True), + FilterTest("binary_sensor.excluded", False), + ] + + for test in tests: + event = make_event(test.id) + handler_method(event) + + was_called = publish_client.publish.call_count == 1 + assert test.should_pass == was_called + publish_client.publish.reset_mock() + + +async def test_filtered_allowlist(hass, mock_client): + """Test an allowlist config with a filtering denylist.""" + handler_method = await _setup( + hass, + { + "include_domains": ["light"], + "include_entity_globs": ["*.included_*"], + "exclude_domains": ["climate"], + "exclude_entity_globs": ["*.excluded_*"], + "exclude_entities": ["light.excluded"], + }, + ) + publish_client = mock_client.PublisherClient.from_service_account_json("path") + + tests = [ + FilterTest("light.included", True), + FilterTest("light.excluded_test", False), + FilterTest("light.excluded", False), + FilterTest("sensor.included_test", True), + FilterTest("climate.included_test", False), + ] + + for test in tests: + event = make_event(test.id) + handler_method(event) + + was_called = publish_client.publish.call_count == 1 + assert test.should_pass == was_called + publish_client.publish.reset_mock() + + +async def test_filtered_denylist(hass, mock_client): + """Test a denylist config with a filtering allowlist.""" + handler_method = await _setup( + hass, + { + "include_entities": ["climate.included", "sensor.excluded_test"], + "exclude_domains": ["climate"], + "exclude_entity_globs": ["*.excluded_*"], + "exclude_entities": ["light.excluded"], + }, + ) + publish_client = mock_client.PublisherClient.from_service_account_json("path") + + tests = [ + FilterTest("climate.excluded", False), + FilterTest("climate.included", True), + FilterTest("switch.excluded_test", False), + FilterTest("sensor.excluded_test", True), + FilterTest("light.excluded", False), + FilterTest("light.included", True), + ] + + for test in tests: + event = make_event(test.id) + handler_method(event) + + was_called = publish_client.publish.call_count == 1 + assert test.should_pass == was_called + publish_client.publish.reset_mock() diff --git a/tests/components/google_pubsub/test_pubsub.py b/tests/components/google_pubsub/test_pubsub.py deleted file mode 100644 index 77ca4ed8bd7..00000000000 --- a/tests/components/google_pubsub/test_pubsub.py +++ /dev/null @@ -1,21 +0,0 @@ -"""The tests for the Google Pub/Sub component.""" -from datetime import datetime - -from homeassistant.components.google_pubsub import DateTimeJSONEncoder as victim - - -class TestDateTimeJSONEncoder: - """Bundle for DateTimeJSONEncoder tests.""" - - def test_datetime(self): - """Test datetime encoding.""" - time = datetime(2019, 1, 13, 12, 30, 5) - assert victim().encode(time) == '"2019-01-13T12:30:05"' - - def test_no_datetime(self): - """Test integer encoding.""" - assert victim().encode(42) == "42" - - def test_nested(self): - """Test dictionary encoding.""" - assert victim().encode({"foo": "bar"}) == '{"foo": "bar"}' diff --git a/tests/components/google_wifi/test_sensor.py b/tests/components/google_wifi/test_sensor.py index 69db8de184b..ff3ec0429fa 100644 --- a/tests/components/google_wifi/test_sensor.py +++ b/tests/components/google_wifi/test_sensor.py @@ -40,10 +40,7 @@ class TestGoogleWifiSetup(unittest.TestCase): def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) @requests_mock.Mocker() def test_setup_minimum(self, mock_req): @@ -92,10 +89,7 @@ class TestGoogleWifiSensor(unittest.TestCase): self.hass = get_test_home_assistant() with requests_mock.Mocker() as mock_req: self.setup_api(MOCK_DATA, mock_req) - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def setup_api(self, data, mock_req): """Set up API with fake data.""" diff --git a/tests/components/group/test_cover.py b/tests/components/group/test_cover.py index 1adad3e3d85..98460762389 100644 --- a/tests/components/group/test_cover.py +++ b/tests/components/group/test_cover.py @@ -86,6 +86,12 @@ async def test_attributes(hass, setup_comp): state = hass.states.get(COVER_GROUP) assert state.state == STATE_CLOSED assert state.attributes[ATTR_FRIENDLY_NAME] == DEFAULT_NAME + assert state.attributes[ATTR_ENTITY_ID] == [ + DEMO_COVER, + DEMO_COVER_POS, + DEMO_COVER_TILT, + DEMO_TILT, + ] assert ATTR_ASSUMED_STATE not in state.attributes assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0 assert ATTR_CURRENT_POSITION not in state.attributes diff --git a/tests/components/group/test_init.py b/tests/components/group/test_init.py index c4d98ad37cc..ff5f3a30f75 100644 --- a/tests/components/group/test_init.py +++ b/tests/components/group/test_init.py @@ -28,11 +28,7 @@ class TestComponentsGroup(unittest.TestCase): def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() - - # pylint: disable=invalid-name - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def test_setup_group_with_mixed_groupable_states(self): """Try to set up a group with mixed groupable states.""" diff --git a/tests/components/group/test_light.py b/tests/components/group/test_light.py index 70dab4472ed..2a2e21f77c5 100644 --- a/tests/components/group/test_light.py +++ b/tests/components/group/test_light.py @@ -34,17 +34,25 @@ from tests.async_mock import MagicMock async def test_default_state(hass): """Test light group default state.""" + hass.states.async_set("light.kitchen", "on") await async_setup_component( hass, LIGHT_DOMAIN, - {LIGHT_DOMAIN: {"platform": DOMAIN, "entities": [], "name": "Bedroom Group"}}, + { + LIGHT_DOMAIN: { + "platform": DOMAIN, + "entities": ["light.kitchen", "light.bedroom"], + "name": "Bedroom Group", + } + }, ) await hass.async_block_till_done() state = hass.states.get("light.bedroom_group") assert state is not None - assert state.state == STATE_UNAVAILABLE + assert state.state == STATE_ON assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0 + assert state.attributes.get(ATTR_ENTITY_ID) == ["light.kitchen", "light.bedroom"] assert state.attributes.get(ATTR_BRIGHTNESS) is None assert state.attributes.get(ATTR_HS_COLOR) is None assert state.attributes.get(ATTR_COLOR_TEMP) is None @@ -562,14 +570,14 @@ async def test_invalid_service_calls(hass): await grouped_light.async_turn_on(brightness=150, four_oh_four="404") data = {ATTR_ENTITY_ID: ["light.test1", "light.test2"], ATTR_BRIGHTNESS: 150} mock_call.assert_called_once_with( - LIGHT_DOMAIN, SERVICE_TURN_ON, data, blocking=True + LIGHT_DOMAIN, SERVICE_TURN_ON, data, blocking=True, context=None ) mock_call.reset_mock() await grouped_light.async_turn_off(transition=4, four_oh_four="404") data = {ATTR_ENTITY_ID: ["light.test1", "light.test2"], ATTR_TRANSITION: 4} mock_call.assert_called_once_with( - LIGHT_DOMAIN, SERVICE_TURN_OFF, data, blocking=True + LIGHT_DOMAIN, SERVICE_TURN_OFF, data, blocking=True, context=None ) mock_call.reset_mock() @@ -588,5 +596,5 @@ async def test_invalid_service_calls(hass): data.pop(ATTR_RGB_COLOR) data.pop(ATTR_XY_COLOR) mock_call.assert_called_once_with( - LIGHT_DOMAIN, SERVICE_TURN_ON, data, blocking=True + LIGHT_DOMAIN, SERVICE_TURN_ON, data, blocking=True, context=None ) diff --git a/tests/components/group/test_notify.py b/tests/components/group/test_notify.py index 0925b318c9e..b120cf2cea4 100644 --- a/tests/components/group/test_notify.py +++ b/tests/components/group/test_notify.py @@ -63,8 +63,9 @@ class TestNotifyGroup(unittest.TestCase): ).result() assert self.service is not None + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() diff --git a/tests/components/harmony/test_config_flow.py b/tests/components/harmony/test_config_flow.py index 079923330e2..812e3414ea9 100644 --- a/tests/components/harmony/test_config_flow.py +++ b/tests/components/harmony/test_config_flow.py @@ -66,6 +66,7 @@ async def test_form_import(hass): "name": "friend", "activity": "Watch TV", "delay_secs": 0.9, + "activity_notify": True, "unique_id": "555234534543", }, ) @@ -78,6 +79,7 @@ async def test_form_import(hass): "name": "friend", "activity": "Watch TV", "delay_secs": 0.9, + "activity_notify": True, } # It is not possible to import options at this time # so they end up in the config entry data and are @@ -148,6 +150,7 @@ async def test_form_cannot_connect(hass): "name": "friend", "activity": "Watch TV", "delay_secs": 0.2, + "activity_notify": True, }, ) diff --git a/tests/components/hddtemp/test_sensor.py b/tests/components/hddtemp/test_sensor.py index 84315afb476..4062d737ea2 100644 --- a/tests/components/hddtemp/test_sensor.py +++ b/tests/components/hddtemp/test_sensor.py @@ -87,10 +87,7 @@ class TestHDDTempSensor(unittest.TestCase): "model": "WDC WD15EARS-00Z5B1", }, } - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) @patch("telnetlib.Telnet", new=TelnetMock) def test_hddtemp_min_config(self): diff --git a/tests/components/heos/test_media_player.py b/tests/components/heos/test_media_player.py index 31aced7f807..835f34e5efc 100644 --- a/tests/components/heos/test_media_player.py +++ b/tests/components/heos/test_media_player.py @@ -614,6 +614,29 @@ async def test_play_media_url(hass, config_entry, config, controller, caplog): assert "Unable to play media: Failure (1)" in caplog.text +async def test_play_media_music(hass, config_entry, config, controller, caplog): + """Test the play media service with type music.""" + await setup_platform(hass, config_entry, config) + player = controller.players[1] + url = "http://news/podcast.mp3" + # First pass completes successfully, second pass raises command error + for _ in range(2): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_CONTENT_TYPE: MEDIA_TYPE_MUSIC, + ATTR_MEDIA_CONTENT_ID: url, + }, + blocking=True, + ) + player.play_url.assert_called_once_with(url) + player.play_url.reset_mock() + player.play_url.side_effect = CommandFailedError(None, "Failure", 1) + assert "Unable to play media: Failure (1)" in caplog.text + + async def test_play_media_quick_select( hass, config_entry, config, controller, caplog, quick_selects ): diff --git a/tests/components/history/test_init.py b/tests/components/history/test_init.py index de449b02c74..34b22481400 100644 --- a/tests/components/history/test_init.py +++ b/tests/components/history/test_init.py @@ -1,5 +1,6 @@ """The tests the History component.""" # pylint: disable=protected-access,invalid-name +from copy import copy from datetime import timedelta import json import unittest @@ -26,8 +27,9 @@ class TestComponentHistory(unittest.TestCase): def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() @@ -187,6 +189,39 @@ class TestComponentHistory(unittest.TestCase): assert states == hist[entity_id] + def test_ensure_state_can_be_copied(self): + """Ensure a state can pass though copy(). + + The filter integration uses copy() on states + from history. + """ + self.init_recorder() + entity_id = "sensor.test" + + def set_state(state): + """Set the state.""" + self.hass.states.set(entity_id, state) + wait_recording_done(self.hass) + return self.hass.states.get(entity_id) + + start = dt_util.utcnow() - timedelta(minutes=2) + point = start + timedelta(minutes=1) + + with patch( + "homeassistant.components.recorder.dt_util.utcnow", return_value=start + ): + set_state("1") + + with patch( + "homeassistant.components.recorder.dt_util.utcnow", return_value=point + ): + set_state("2") + + hist = history.get_last_state_changes(self.hass, 2, entity_id) + + assert copy(hist[entity_id][0]) == hist[entity_id][0] + assert copy(hist[entity_id][1]) == hist[entity_id][1] + def test_get_significant_states(self): """Test that only significant states are returned. @@ -695,12 +730,6 @@ class TestComponentHistory(unittest.TestCase): ): # This state will be skipped only different in time set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt3)}) - # This state will be skipped as it hidden - set_state( - mp3, - "Apple TV", - attributes={"media_title": str(sentinel.mt2), "hidden": True}, - ) # This state will be skipped because domain blacklisted set_state(zone, "zoning") set_state(script_nc, "off") @@ -727,8 +756,6 @@ class TestComponentHistory(unittest.TestCase): states[therm].append( set_state(therm, 21, attributes={"current_temperature": 20}) ) - # state will be skipped since entity is hidden - set_state(therm, 22, attributes={"current_temperature": 21, "hidden": True}) return zero, four, states @@ -767,6 +794,16 @@ async def test_fetch_period_api_with_minimal_response(hass, hass_client): assert response.status == 200 +async def test_fetch_period_api_with_no_timestamp(hass, hass_client): + """Test the fetch period view for history with no timestamp.""" + await hass.async_add_executor_job(init_recorder_component, hass) + await async_setup_component(hass, "history", {}) + await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done) + client = await hass_client() + response = await client.get("/api/history/period") + assert response.status == 200 + + async def test_fetch_period_api_with_include_order(hass, hass_client): """Test the fetch period view for history.""" await hass.async_add_executor_job(init_recorder_component, hass) diff --git a/tests/components/history_stats/test_sensor.py b/tests/components/history_stats/test_sensor.py index 900af4988e2..25cca7615a7 100644 --- a/tests/components/history_stats/test_sensor.py +++ b/tests/components/history_stats/test_sensor.py @@ -23,10 +23,7 @@ class TestHistoryStatsSensor(unittest.TestCase): def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def test_setup(self): """Test the history statistics sensor setup.""" diff --git a/tests/components/home_connect/test_config_flow.py b/tests/components/home_connect/test_config_flow.py index d6d936fe16e..57c6fb2af7f 100644 --- a/tests/components/home_connect/test_config_flow.py +++ b/tests/components/home_connect/test_config_flow.py @@ -8,6 +8,8 @@ from homeassistant.components.home_connect.const import ( from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET from homeassistant.helpers import config_entry_oauth2_flow +from tests.async_mock import patch + CLIENT_ID = "1234" CLIENT_SECRET = "5678" @@ -53,6 +55,11 @@ async def test_full_flow(hass, aiohttp_client, aioclient_mock): }, ) - result = await hass.config_entries.flow.async_configure(result["flow_id"]) + with patch( + "homeassistant.components.home_connect.async_setup_entry", return_value=True + ) as mock_setup_entry: + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done() assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/homeassistant/test_init.py b/tests/components/homeassistant/test_init.py index b9309d70d63..3419aea06af 100644 --- a/tests/components/homeassistant/test_init.py +++ b/tests/components/homeassistant/test_init.py @@ -120,11 +120,7 @@ class TestComponentsCore(unittest.TestCase): self.hass.states.set("light.Bowl", STATE_ON) self.hass.states.set("light.Ceiling", STATE_OFF) - - # pylint: disable=invalid-name - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def test_is_on(self): """Test is_on method.""" diff --git a/tests/components/homekit/test_homekit.py b/tests/components/homekit/test_homekit.py index ca31b4501b9..1903e3eca8f 100644 --- a/tests/components/homekit/test_homekit.py +++ b/tests/components/homekit/test_homekit.py @@ -66,20 +66,20 @@ from tests.components.homekit.common import patch_debounce IP_ADDRESS = "127.0.0.1" -@pytest.fixture -def device_reg(hass): +@pytest.fixture(name="device_reg") +def device_reg_fixture(hass): """Return an empty, loaded, registry.""" return mock_device_registry(hass) -@pytest.fixture -def entity_reg(hass): +@pytest.fixture(name="entity_reg") +def entity_reg_fixture(hass): """Return an empty, loaded, registry.""" return mock_registry(hass) -@pytest.fixture(scope="module") -def debounce_patcher(): +@pytest.fixture(name="debounce_patcher", scope="module") +def debounce_patcher_fixture(): """Patch debounce method.""" patcher = patch_debounce() yield patcher.start() @@ -88,7 +88,6 @@ def debounce_patcher(): async def test_setup_min(hass): """Test async_setup with min config options.""" - entry = MockConfigEntry( domain=DOMAIN, data={CONF_NAME: BRIDGE_NAME, CONF_PORT: DEFAULT_PORT}, @@ -413,6 +412,47 @@ async def test_homekit_entity_filter(hass): assert mock_get_acc.called is False +async def test_homekit_entity_glob_filter(hass): + """Test the entity filter.""" + entry = await async_init_integration(hass) + + entity_filter = generate_filter( + ["cover"], ["demo.test"], [], [], ["*.included_*"], ["*.excluded_*"] + ) + homekit = HomeKit( + hass, + None, + None, + None, + entity_filter, + {}, + DEFAULT_SAFE_MODE, + advertise_ip=None, + entry_id=entry.entry_id, + ) + homekit.bridge = Mock() + homekit.bridge.accessories = {} + + with patch(f"{PATH_HOMEKIT}.get_accessory") as mock_get_acc: + mock_get_acc.return_value = None + + homekit.add_bridge_accessory(State("cover.test", "open")) + assert mock_get_acc.called is True + mock_get_acc.reset_mock() + + homekit.add_bridge_accessory(State("demo.test", "on")) + assert mock_get_acc.called is True + mock_get_acc.reset_mock() + + homekit.add_bridge_accessory(State("cover.excluded_test", "open")) + assert mock_get_acc.called is False + mock_get_acc.reset_mock() + + homekit.add_bridge_accessory(State("light.included_test", "light")) + assert mock_get_acc.called is True + mock_get_acc.reset_mock() + + async def test_homekit_start(hass, hk_driver, device_reg, debounce_patcher): """Test HomeKit start method.""" entry = await async_init_integration(hass) @@ -432,6 +472,7 @@ async def test_homekit_start(hass, hk_driver, device_reg, debounce_patcher): homekit.bridge = Mock() homekit.bridge.accessories = [] homekit.driver = hk_driver + # pylint: disable=protected-access homekit._filter = Mock(return_value=True) connection = (device_registry.CONNECTION_NETWORK_MAC, "AA:BB:CC:DD:EE:FF") @@ -587,7 +628,6 @@ async def test_homekit_stop(hass): async def test_homekit_reset_accessories(hass): """Test adding too many accessories to HomeKit.""" - entry = MockConfigEntry( domain=DOMAIN, data={CONF_NAME: "mock_name", CONF_PORT: 12345} ) @@ -629,7 +669,7 @@ async def test_homekit_reset_accessories(hass): ) await hass.async_block_till_done() - assert 2 == hk_driver_config_changed.call_count + assert hk_driver_config_changed.call_count == 2 assert mock_add_accessory.called homekit.status = STATUS_READY @@ -686,6 +726,7 @@ async def test_homekit_finds_linked_batteries( entry_id=entry.entry_id, ) homekit.driver = hk_driver + # pylint: disable=protected-access homekit._filter = Mock(return_value=True) homekit.bridge = HomeBridge(hass, hk_driver, "mock_bridge") @@ -818,7 +859,6 @@ async def test_setup_imported(hass): async def test_yaml_updates_update_config_entry_for_name(hass): """Test async_setup with imported config.""" - entry = MockConfigEntry( domain=DOMAIN, source=SOURCE_IMPORT, @@ -858,7 +898,6 @@ async def test_yaml_updates_update_config_entry_for_name(hass): async def test_raise_config_entry_not_ready(hass): """Test async_setup when the port is not available.""" - entry = MockConfigEntry( domain=DOMAIN, data={CONF_NAME: BRIDGE_NAME, CONF_PORT: DEFAULT_PORT}, @@ -918,6 +957,7 @@ async def test_homekit_ignored_missing_devices( entry_id=entry.entry_id, ) homekit.driver = hk_driver + # pylint: disable=protected-access homekit._filter = Mock(return_value=True) homekit.bridge = HomeBridge(hass, hk_driver, "mock_bridge") @@ -997,6 +1037,7 @@ async def test_homekit_finds_linked_motion_sensors( entry_id=entry.entry_id, ) homekit.driver = hk_driver + # pylint: disable=protected-access homekit._filter = Mock(return_value=True) homekit.bridge = HomeBridge(hass, hk_driver, "mock_bridge") diff --git a/tests/components/homekit/test_init.py b/tests/components/homekit/test_init.py index 6d01413da8f..1fad563445b 100644 --- a/tests/components/homekit/test_init.py +++ b/tests/components/homekit/test_init.py @@ -1,5 +1,4 @@ """Test HomeKit initialization.""" -from homeassistant import core as ha from homeassistant.components import logbook from homeassistant.components.homekit.const import ( ATTR_DISPLAY_NAME, @@ -11,18 +10,22 @@ from homeassistant.const import ATTR_ENTITY_ID, ATTR_SERVICE from homeassistant.setup import async_setup_component from tests.async_mock import patch +from tests.components.logbook.test_init import MockLazyEventPartialState async def test_humanify_homekit_changed_event(hass, hk_driver): """Test humanifying HomeKit changed event.""" + hass.config.components.add("recorder") with patch("homeassistant.components.homekit.HomeKit"): assert await async_setup_component(hass, "homekit", {"homekit": {}}) + assert await async_setup_component(hass, "logbook", {}) + entity_attr_cache = logbook.EntityAttributeCache(hass) event1, event2 = list( logbook.humanify( hass, [ - ha.Event( + MockLazyEventPartialState( EVENT_HOMEKIT_CHANGED, { ATTR_ENTITY_ID: "lock.front_door", @@ -30,7 +33,7 @@ async def test_humanify_homekit_changed_event(hass, hk_driver): ATTR_SERVICE: "lock", }, ), - ha.Event( + MockLazyEventPartialState( EVENT_HOMEKIT_CHANGED, { ATTR_ENTITY_ID: "cover.window", @@ -40,6 +43,7 @@ async def test_humanify_homekit_changed_event(hass, hk_driver): }, ), ], + entity_attr_cache, ) ) diff --git a/tests/components/homematic/test_notify.py b/tests/components/homematic/test_notify.py index 411be41eb39..e1bd2f4f229 100644 --- a/tests/components/homematic/test_notify.py +++ b/tests/components/homematic/test_notify.py @@ -14,8 +14,9 @@ class TestHomematicNotify(unittest.TestCase): def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop down everything that was started.""" self.hass.stop() diff --git a/tests/components/homematicip_cloud/test_binary_sensor.py b/tests/components/homematicip_cloud/test_binary_sensor.py index 43b88976043..7fe9a7327ea 100644 --- a/tests/components/homematicip_cloud/test_binary_sensor.py +++ b/tests/components/homematicip_cloud/test_binary_sensor.py @@ -128,6 +128,36 @@ async def test_hmip_shutter_contact(hass, default_mock_hap_factory): assert ha_state.attributes[ATTR_SABOTAGE] +async def test_hmip_shutter_contact_optical(hass, default_mock_hap_factory): + """Test HomematicipShutterContact.""" + entity_id = "binary_sensor.sitzplatzture" + entity_name = "Sitzplatzt\u00fcre" + device_model = "HmIP-SWDO-PL" + mock_hap = await default_mock_hap_factory.async_get_mock_hap( + test_devices=[entity_name] + ) + + ha_state, hmip_device = get_and_check_entity_basics( + hass, mock_hap, entity_id, entity_name, device_model + ) + + assert ha_state.state == STATE_OFF + await async_manipulate_test_data(hass, hmip_device, "windowState", WindowState.OPEN) + ha_state = hass.states.get(entity_id) + assert ha_state.state == STATE_ON + + await async_manipulate_test_data(hass, hmip_device, "windowState", None) + ha_state = hass.states.get(entity_id) + assert ha_state.state == STATE_OFF + + # test common attributes + assert ha_state.attributes[ATTR_RSSI_DEVICE] == -72 + assert not ha_state.attributes.get(ATTR_SABOTAGE) + await async_manipulate_test_data(hass, hmip_device, "sabotage", True) + ha_state = hass.states.get(entity_id) + assert ha_state.attributes[ATTR_SABOTAGE] + + async def test_hmip_motion_detector(hass, default_mock_hap_factory): """Test HomematicipMotionDetector.""" entity_id = "binary_sensor.bewegungsmelder_fur_55er_rahmen_innen" diff --git a/tests/components/homematicip_cloud/test_cover.py b/tests/components/homematicip_cloud/test_cover.py index 7da1a94bdd7..7ef0e3d6703 100644 --- a/tests/components/homematicip_cloud/test_cover.py +++ b/tests/components/homematicip_cloud/test_cover.py @@ -207,6 +207,53 @@ async def test_hmip_garage_door_tormatic(hass, default_mock_hap_factory): assert hmip_device.mock_calls[-1][1] == (DoorCommand.STOP,) +async def test_hmip_garage_door_hoermann(hass, default_mock_hap_factory): + """Test HomematicipCoverShutte.""" + entity_id = "cover.garage_door" + entity_name = "Garage door" + device_model = "HmIP-MOD-HO" + mock_hap = await default_mock_hap_factory.async_get_mock_hap( + test_devices=[entity_name] + ) + + ha_state, hmip_device = get_and_check_entity_basics( + hass, mock_hap, entity_id, entity_name, device_model + ) + + assert ha_state.state == "closed" + assert ha_state.attributes["current_position"] == 0 + service_call_counter = len(hmip_device.mock_calls) + + await hass.services.async_call( + "cover", "open_cover", {"entity_id": entity_id}, blocking=True + ) + assert len(hmip_device.mock_calls) == service_call_counter + 1 + assert hmip_device.mock_calls[-1][0] == "send_door_command" + assert hmip_device.mock_calls[-1][1] == (DoorCommand.OPEN,) + await async_manipulate_test_data(hass, hmip_device, "doorState", DoorState.OPEN) + ha_state = hass.states.get(entity_id) + assert ha_state.state == STATE_OPEN + assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100 + + await hass.services.async_call( + "cover", "close_cover", {"entity_id": entity_id}, blocking=True + ) + assert len(hmip_device.mock_calls) == service_call_counter + 3 + assert hmip_device.mock_calls[-1][0] == "send_door_command" + assert hmip_device.mock_calls[-1][1] == (DoorCommand.CLOSE,) + await async_manipulate_test_data(hass, hmip_device, "doorState", DoorState.CLOSED) + ha_state = hass.states.get(entity_id) + assert ha_state.state == STATE_CLOSED + assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0 + + await hass.services.async_call( + "cover", "stop_cover", {"entity_id": entity_id}, blocking=True + ) + assert len(hmip_device.mock_calls) == service_call_counter + 5 + assert hmip_device.mock_calls[-1][0] == "send_door_command" + assert hmip_device.mock_calls[-1][1] == (DoorCommand.STOP,) + + async def test_hmip_cover_shutter_group(hass, default_mock_hap_factory): """Test HomematicipCoverShutteGroup.""" entity_id = "cover.rollos_shuttergroup" diff --git a/tests/components/homematicip_cloud/test_device.py b/tests/components/homematicip_cloud/test_device.py index 8a8d52d167a..8ea44795c96 100644 --- a/tests/components/homematicip_cloud/test_device.py +++ b/tests/components/homematicip_cloud/test_device.py @@ -22,7 +22,7 @@ async def test_hmip_load_all_supported_devices(hass, default_mock_hap_factory): test_devices=None, test_groups=None ) - assert len(mock_hap.hmip_device_by_entity_id) == 183 + assert len(mock_hap.hmip_device_by_entity_id) == 186 async def test_hmip_remove_device(hass, default_mock_hap_factory): diff --git a/tests/components/humidifier/__init__.py b/tests/components/humidifier/__init__.py new file mode 100644 index 00000000000..1ef3f5b7216 --- /dev/null +++ b/tests/components/humidifier/__init__.py @@ -0,0 +1 @@ +"""The tests for humidifier component.""" diff --git a/tests/components/humidifier/test_device_action.py b/tests/components/humidifier/test_device_action.py new file mode 100644 index 00000000000..91b7819e18b --- /dev/null +++ b/tests/components/humidifier/test_device_action.py @@ -0,0 +1,358 @@ +"""The tests for Humidifier device actions.""" +import pytest +import voluptuous_serialize + +import homeassistant.components.automation as automation +from homeassistant.components.humidifier import DOMAIN, const, device_action +from homeassistant.const import STATE_ON +from homeassistant.helpers import config_validation as cv, device_registry +from homeassistant.setup import async_setup_component + +from tests.common import ( + MockConfigEntry, + assert_lists_same, + async_get_device_automations, + async_mock_service, + mock_device_registry, + mock_registry, +) + + +@pytest.fixture +def device_reg(hass): + """Return an empty, loaded, registry.""" + return mock_device_registry(hass) + + +@pytest.fixture +def entity_reg(hass): + """Return an empty, loaded, registry.""" + return mock_registry(hass) + + +async def test_get_actions(hass, device_reg, entity_reg): + """Test we get the expected actions from a humidifier.""" + config_entry = MockConfigEntry(domain="test", data={}) + config_entry.add_to_hass(hass) + device_entry = device_reg.async_get_or_create( + config_entry_id=config_entry.entry_id, + connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + ) + entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id) + hass.states.async_set("humidifier.test_5678", STATE_ON, {}) + hass.states.async_set( + "humidifier.test_5678", "attributes", {"supported_features": 1} + ) + expected_actions = [ + { + "domain": DOMAIN, + "type": "turn_on", + "device_id": device_entry.id, + "entity_id": "humidifier.test_5678", + }, + { + "domain": DOMAIN, + "type": "turn_off", + "device_id": device_entry.id, + "entity_id": "humidifier.test_5678", + }, + { + "domain": DOMAIN, + "type": "toggle", + "device_id": device_entry.id, + "entity_id": "humidifier.test_5678", + }, + { + "domain": DOMAIN, + "type": "set_humidity", + "device_id": device_entry.id, + "entity_id": "humidifier.test_5678", + }, + { + "domain": DOMAIN, + "type": "set_mode", + "device_id": device_entry.id, + "entity_id": "humidifier.test_5678", + }, + ] + actions = await async_get_device_automations(hass, "action", device_entry.id) + assert_lists_same(actions, expected_actions) + + +async def test_get_action_no_modes(hass, device_reg, entity_reg): + """Test we get the expected actions from a humidifier.""" + config_entry = MockConfigEntry(domain="test", data={}) + config_entry.add_to_hass(hass) + device_entry = device_reg.async_get_or_create( + config_entry_id=config_entry.entry_id, + connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + ) + entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id) + hass.states.async_set("humidifier.test_5678", STATE_ON, {}) + hass.states.async_set( + "humidifier.test_5678", "attributes", {"supported_features": 0} + ) + expected_actions = [ + { + "domain": DOMAIN, + "type": "turn_on", + "device_id": device_entry.id, + "entity_id": "humidifier.test_5678", + }, + { + "domain": DOMAIN, + "type": "turn_off", + "device_id": device_entry.id, + "entity_id": "humidifier.test_5678", + }, + { + "domain": DOMAIN, + "type": "toggle", + "device_id": device_entry.id, + "entity_id": "humidifier.test_5678", + }, + { + "domain": DOMAIN, + "type": "set_humidity", + "device_id": device_entry.id, + "entity_id": "humidifier.test_5678", + }, + ] + actions = await async_get_device_automations(hass, "action", device_entry.id) + assert_lists_same(actions, expected_actions) + + +async def test_get_action_no_state(hass, device_reg, entity_reg): + """Test we get the expected actions from a humidifier.""" + config_entry = MockConfigEntry(domain="test", data={}) + config_entry.add_to_hass(hass) + device_entry = device_reg.async_get_or_create( + config_entry_id=config_entry.entry_id, + connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + ) + entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id) + expected_actions = [ + { + "domain": DOMAIN, + "type": "turn_on", + "device_id": device_entry.id, + "entity_id": "humidifier.test_5678", + }, + { + "domain": DOMAIN, + "type": "turn_off", + "device_id": device_entry.id, + "entity_id": "humidifier.test_5678", + }, + { + "domain": DOMAIN, + "type": "toggle", + "device_id": device_entry.id, + "entity_id": "humidifier.test_5678", + }, + { + "domain": DOMAIN, + "type": "set_humidity", + "device_id": device_entry.id, + "entity_id": "humidifier.test_5678", + }, + ] + actions = await async_get_device_automations(hass, "action", device_entry.id) + assert_lists_same(actions, expected_actions) + + +async def test_action(hass): + """Test for actions.""" + hass.states.async_set( + "humidifier.entity", + STATE_ON, + {const.ATTR_AVAILABLE_MODES: [const.MODE_HOME, const.MODE_AWAY]}, + ) + + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: [ + { + "trigger": { + "platform": "event", + "event_type": "test_event_turn_off", + }, + "action": { + "domain": DOMAIN, + "device_id": "abcdefgh", + "entity_id": "humidifier.entity", + "type": "turn_off", + }, + }, + { + "trigger": { + "platform": "event", + "event_type": "test_event_turn_on", + }, + "action": { + "domain": DOMAIN, + "device_id": "abcdefgh", + "entity_id": "humidifier.entity", + "type": "turn_on", + }, + }, + { + "trigger": {"platform": "event", "event_type": "test_event_toggle"}, + "action": { + "domain": DOMAIN, + "device_id": "abcdefgh", + "entity_id": "humidifier.entity", + "type": "toggle", + }, + }, + { + "trigger": { + "platform": "event", + "event_type": "test_event_set_humidity", + }, + "action": { + "domain": DOMAIN, + "device_id": "abcdefgh", + "entity_id": "humidifier.entity", + "type": "set_humidity", + "humidity": 35, + }, + }, + { + "trigger": { + "platform": "event", + "event_type": "test_event_set_mode", + }, + "action": { + "domain": DOMAIN, + "device_id": "abcdefgh", + "entity_id": "humidifier.entity", + "type": "set_mode", + "mode": const.MODE_AWAY, + }, + }, + ] + }, + ) + + set_humidity_calls = async_mock_service(hass, "humidifier", "set_humidity") + set_mode_calls = async_mock_service(hass, "humidifier", "set_mode") + turn_on_calls = async_mock_service(hass, "humidifier", "turn_on") + turn_off_calls = async_mock_service(hass, "humidifier", "turn_off") + toggle_calls = async_mock_service(hass, "humidifier", "toggle") + + assert len(set_humidity_calls) == 0 + assert len(set_mode_calls) == 0 + assert len(turn_on_calls) == 0 + assert len(turn_off_calls) == 0 + assert len(toggle_calls) == 0 + + hass.bus.async_fire("test_event_set_humidity") + await hass.async_block_till_done() + assert len(set_humidity_calls) == 1 + assert len(set_mode_calls) == 0 + assert len(turn_on_calls) == 0 + assert len(turn_off_calls) == 0 + assert len(toggle_calls) == 0 + + hass.bus.async_fire("test_event_set_mode") + await hass.async_block_till_done() + assert len(set_humidity_calls) == 1 + assert len(set_mode_calls) == 1 + assert len(turn_on_calls) == 0 + assert len(turn_off_calls) == 0 + assert len(toggle_calls) == 0 + + hass.bus.async_fire("test_event_turn_off") + await hass.async_block_till_done() + assert len(set_humidity_calls) == 1 + assert len(set_mode_calls) == 1 + assert len(turn_on_calls) == 0 + assert len(turn_off_calls) == 1 + assert len(toggle_calls) == 0 + + hass.bus.async_fire("test_event_turn_on") + await hass.async_block_till_done() + assert len(set_humidity_calls) == 1 + assert len(set_mode_calls) == 1 + assert len(turn_on_calls) == 1 + assert len(turn_off_calls) == 1 + assert len(toggle_calls) == 0 + + hass.bus.async_fire("test_event_toggle") + await hass.async_block_till_done() + assert len(set_humidity_calls) == 1 + assert len(set_mode_calls) == 1 + assert len(turn_on_calls) == 1 + assert len(turn_off_calls) == 1 + assert len(toggle_calls) == 1 + + +async def test_capabilities(hass): + """Test getting capabilities.""" + # Test capabililities without state + capabilities = await device_action.async_get_action_capabilities( + hass, + { + "domain": DOMAIN, + "device_id": "abcdefgh", + "entity_id": "humidifier.entity", + "type": "set_mode", + }, + ) + + assert capabilities and "extra_fields" in capabilities + + assert voluptuous_serialize.convert( + capabilities["extra_fields"], custom_serializer=cv.custom_serializer + ) == [{"name": "mode", "options": [], "required": True, "type": "select"}] + + # Set state + hass.states.async_set( + "humidifier.entity", + STATE_ON, + {const.ATTR_AVAILABLE_MODES: [const.MODE_HOME, const.MODE_AWAY]}, + ) + + # Set humidity + capabilities = await device_action.async_get_action_capabilities( + hass, + { + "domain": DOMAIN, + "device_id": "abcdefgh", + "entity_id": "humidifier.entity", + "type": "set_humidity", + }, + ) + + assert capabilities and "extra_fields" in capabilities + + assert voluptuous_serialize.convert( + capabilities["extra_fields"], custom_serializer=cv.custom_serializer + ) == [{"name": "humidity", "required": True, "type": "integer"}] + + # Set mode + capabilities = await device_action.async_get_action_capabilities( + hass, + { + "domain": DOMAIN, + "device_id": "abcdefgh", + "entity_id": "humidifier.entity", + "type": "set_mode", + }, + ) + + assert capabilities and "extra_fields" in capabilities + + assert voluptuous_serialize.convert( + capabilities["extra_fields"], custom_serializer=cv.custom_serializer + ) == [ + { + "name": "mode", + "options": [("home", "home"), ("away", "away")], + "required": True, + "type": "select", + } + ] diff --git a/tests/components/humidifier/test_init.py b/tests/components/humidifier/test_init.py new file mode 100644 index 00000000000..22af30d484a --- /dev/null +++ b/tests/components/humidifier/test_init.py @@ -0,0 +1,35 @@ +"""The tests for the humidifier component.""" +from unittest.mock import MagicMock + +from homeassistant.components.humidifier import HumidifierEntity + + +class MockHumidifierEntity(HumidifierEntity): + """Mock Humidifier device to use in tests.""" + + @property + def supported_features(self) -> int: + """Return the list of supported features.""" + return 0 + + +async def test_sync_turn_on(hass): + """Test if async turn_on calls sync turn_on.""" + humidifier = MockHumidifierEntity() + humidifier.hass = hass + + humidifier.turn_on = MagicMock() + await humidifier.async_turn_on() + + assert humidifier.turn_on.called + + +async def test_sync_turn_off(hass): + """Test if async turn_off calls sync turn_off.""" + humidifier = MockHumidifierEntity() + humidifier.hass = hass + + humidifier.turn_off = MagicMock() + await humidifier.async_turn_off() + + assert humidifier.turn_off.called diff --git a/tests/components/humidifier/test_reproduce_state.py b/tests/components/humidifier/test_reproduce_state.py new file mode 100644 index 00000000000..8c1f69353a0 --- /dev/null +++ b/tests/components/humidifier/test_reproduce_state.py @@ -0,0 +1,237 @@ +"""The tests for reproduction of state.""" + +import pytest + +from homeassistant.components.humidifier.const import ( + ATTR_HUMIDITY, + ATTR_MODE, + DOMAIN, + MODE_AWAY, + MODE_ECO, + MODE_NORMAL, + SERVICE_SET_HUMIDITY, + SERVICE_SET_MODE, +) +from homeassistant.components.humidifier.reproduce_state import async_reproduce_states +from homeassistant.const import SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON +from homeassistant.core import Context, State + +from tests.common import async_mock_service + +ENTITY_1 = "humidifier.test1" +ENTITY_2 = "humidifier.test2" + + +async def test_reproducing_on_off_states(hass, caplog): + """Test reproducing humidifier states.""" + hass.states.async_set(ENTITY_1, "off", {ATTR_MODE: MODE_NORMAL, ATTR_HUMIDITY: 45}) + hass.states.async_set(ENTITY_2, "on", {ATTR_MODE: MODE_NORMAL, ATTR_HUMIDITY: 45}) + + turn_on_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON) + turn_off_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_OFF) + mode_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_MODE) + humidity_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_HUMIDITY) + + # These calls should do nothing as entities already in desired state + await hass.helpers.state.async_reproduce_state( + [ + State(ENTITY_1, "off", {ATTR_MODE: MODE_NORMAL, ATTR_HUMIDITY: 45}), + State(ENTITY_2, "on", {ATTR_MODE: MODE_NORMAL, ATTR_HUMIDITY: 45}), + ], + ) + + assert len(turn_on_calls) == 0 + assert len(turn_off_calls) == 0 + assert len(mode_calls) == 0 + assert len(humidity_calls) == 0 + + # Test invalid state is handled + await hass.helpers.state.async_reproduce_state([State(ENTITY_1, "not_supported")]) + + assert "not_supported" in caplog.text + assert len(turn_on_calls) == 0 + assert len(turn_off_calls) == 0 + assert len(mode_calls) == 0 + assert len(humidity_calls) == 0 + + # Make sure correct services are called + await hass.helpers.state.async_reproduce_state( + [ + State(ENTITY_2, "off"), + State(ENTITY_1, "on", {}), + # Should not raise + State("humidifier.non_existing", "on"), + ] + ) + + assert len(turn_on_calls) == 1 + assert turn_on_calls[0].domain == "humidifier" + assert turn_on_calls[0].data == {"entity_id": ENTITY_1} + + assert len(turn_off_calls) == 1 + assert turn_off_calls[0].domain == "humidifier" + assert turn_off_calls[0].data == {"entity_id": ENTITY_2} + + # Make sure we didn't call services for missing attributes + assert len(mode_calls) == 0 + assert len(humidity_calls) == 0 + + +async def test_multiple_attrs(hass): + """Test turn on with multiple attributes.""" + hass.states.async_set(ENTITY_1, STATE_OFF, {}) + + turn_on_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON) + turn_off_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_OFF) + mode_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_MODE) + humidity_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_HUMIDITY) + + await async_reproduce_states( + hass, [State(ENTITY_1, STATE_ON, {ATTR_MODE: MODE_NORMAL, ATTR_HUMIDITY: 45})] + ) + + await hass.async_block_till_done() + + assert len(turn_on_calls) == 1 + assert turn_on_calls[0].data == {"entity_id": ENTITY_1} + assert len(turn_off_calls) == 0 + assert len(mode_calls) == 1 + assert mode_calls[0].data == {"entity_id": ENTITY_1, "mode": "normal"} + assert len(humidity_calls) == 1 + assert humidity_calls[0].data == {"entity_id": ENTITY_1, "humidity": 45} + + +async def test_turn_off_multiple_attrs(hass): + """Test set mode and humidity for off state.""" + hass.states.async_set(ENTITY_1, STATE_ON, {}) + + turn_on_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON) + turn_off_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_OFF) + mode_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_MODE) + humidity_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_HUMIDITY) + + await async_reproduce_states( + hass, [State(ENTITY_1, STATE_OFF, {ATTR_MODE: MODE_NORMAL, ATTR_HUMIDITY: 45})] + ) + + await hass.async_block_till_done() + + assert len(turn_on_calls) == 0 + assert len(turn_off_calls) == 1 + assert turn_off_calls[0].data == {"entity_id": ENTITY_1} + assert len(mode_calls) == 0 + assert len(humidity_calls) == 0 + + +async def test_multiple_modes(hass): + """Test that multiple states gets calls.""" + hass.states.async_set(ENTITY_1, STATE_OFF, {}) + hass.states.async_set(ENTITY_2, STATE_OFF, {}) + + turn_on_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON) + turn_off_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_OFF) + mode_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_MODE) + humidity_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_HUMIDITY) + + await async_reproduce_states( + hass, + [ + State(ENTITY_1, STATE_ON, {ATTR_MODE: MODE_ECO, ATTR_HUMIDITY: 40}), + State(ENTITY_2, STATE_ON, {ATTR_MODE: MODE_NORMAL, ATTR_HUMIDITY: 50}), + ], + ) + + await hass.async_block_till_done() + + assert len(turn_on_calls) == 2 + assert len(turn_off_calls) == 0 + assert len(mode_calls) == 2 + # order is not guaranteed + assert any( + call.data == {"entity_id": ENTITY_1, "mode": MODE_ECO} for call in mode_calls + ) + assert any( + call.data == {"entity_id": ENTITY_2, "mode": MODE_NORMAL} for call in mode_calls + ) + assert len(humidity_calls) == 2 + # order is not guaranteed + assert any( + call.data == {"entity_id": ENTITY_1, "humidity": 40} for call in humidity_calls + ) + assert any( + call.data == {"entity_id": ENTITY_2, "humidity": 50} for call in humidity_calls + ) + + +async def test_state_with_none(hass): + """Test that none is not a humidifier state.""" + hass.states.async_set(ENTITY_1, STATE_OFF, {}) + + turn_on_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON) + turn_off_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_OFF) + mode_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_MODE) + humidity_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_HUMIDITY) + + await async_reproduce_states(hass, [State(ENTITY_1, None)]) + + await hass.async_block_till_done() + + assert len(turn_on_calls) == 0 + assert len(turn_off_calls) == 0 + assert len(mode_calls) == 0 + assert len(humidity_calls) == 0 + + +async def test_state_with_context(hass): + """Test that context is forwarded.""" + hass.states.async_set(ENTITY_1, STATE_OFF, {}) + + turn_on_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON) + turn_off_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_OFF) + mode_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_MODE) + humidity_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_HUMIDITY) + + context = Context() + + await async_reproduce_states( + hass, + [State(ENTITY_1, STATE_ON, {ATTR_MODE: MODE_AWAY, ATTR_HUMIDITY: 45})], + context=context, + ) + + await hass.async_block_till_done() + + assert len(turn_on_calls) == 1 + assert turn_on_calls[0].data == {"entity_id": ENTITY_1} + assert turn_on_calls[0].context == context + assert len(turn_off_calls) == 0 + assert len(mode_calls) == 1 + assert mode_calls[0].data == {"entity_id": ENTITY_1, "mode": "away"} + assert mode_calls[0].context == context + assert len(humidity_calls) == 1 + assert humidity_calls[0].data == {"entity_id": ENTITY_1, "humidity": 45} + assert humidity_calls[0].context == context + + +@pytest.mark.parametrize( + "service,attribute", + [(SERVICE_SET_MODE, ATTR_MODE), (SERVICE_SET_HUMIDITY, ATTR_HUMIDITY)], +) +async def test_attribute(hass, service, attribute): + """Test that service call is made for each attribute.""" + hass.states.async_set(ENTITY_1, STATE_ON, {}) + + turn_on_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON) + turn_off_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_OFF) + calls_1 = async_mock_service(hass, DOMAIN, service) + + value = "dummy" + + await async_reproduce_states(hass, [State(ENTITY_1, STATE_ON, {attribute: value})]) + + await hass.async_block_till_done() + + assert len(turn_on_calls) == 0 + assert len(turn_off_calls) == 0 + assert len(calls_1) == 1 + assert calls_1[0].data == {"entity_id": ENTITY_1, attribute: value} diff --git a/tests/components/hvv_departures/__init__.py b/tests/components/hvv_departures/__init__.py new file mode 100644 index 00000000000..bc238f43f5e --- /dev/null +++ b/tests/components/hvv_departures/__init__.py @@ -0,0 +1 @@ +"""Tests for the HVV Departures integration.""" diff --git a/tests/components/hvv_departures/test_config_flow.py b/tests/components/hvv_departures/test_config_flow.py new file mode 100644 index 00000000000..3f9098abfc8 --- /dev/null +++ b/tests/components/hvv_departures/test_config_flow.py @@ -0,0 +1,344 @@ +"""Test the HVV Departures config flow.""" +import json + +from pygti.exceptions import CannotConnect, InvalidAuth + +from homeassistant import data_entry_flow +from homeassistant.components.hvv_departures.const import ( + CONF_FILTER, + CONF_REAL_TIME, + CONF_STATION, + DOMAIN, +) +from homeassistant.config_entries import CONN_CLASS_CLOUD_POLL, SOURCE_USER +from homeassistant.const import CONF_HOST, CONF_OFFSET, CONF_PASSWORD, CONF_USERNAME + +from tests.async_mock import patch +from tests.common import MockConfigEntry, load_fixture + +FIXTURE_INIT = json.loads(load_fixture("hvv_departures/init.json")) +FIXTURE_CHECK_NAME = json.loads(load_fixture("hvv_departures/check_name.json")) +FIXTURE_STATION_INFORMATION = json.loads( + load_fixture("hvv_departures/station_information.json") +) +FIXTURE_CONFIG_ENTRY = json.loads(load_fixture("hvv_departures/config_entry.json")) +FIXTURE_OPTIONS = json.loads(load_fixture("hvv_departures/options.json")) +FIXTURE_DEPARTURE_LIST = json.loads(load_fixture("hvv_departures/departure_list.json")) + + +async def test_user_flow(hass): + """Test that config flow works.""" + + with patch( + "homeassistant.components.hvv_departures.hub.GTI.init", + return_value=FIXTURE_INIT, + ), patch("pygti.gti.GTI.checkName", return_value=FIXTURE_CHECK_NAME,), patch( + "pygti.gti.GTI.stationInformation", return_value=FIXTURE_STATION_INFORMATION, + ), patch( + "homeassistant.components.hvv_departures.async_setup", return_value=True + ), patch( + "homeassistant.components.hvv_departures.async_setup_entry", return_value=True, + ): + + # step: user + + result_user = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={ + CONF_HOST: "api-test.geofox.de", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + + assert result_user["step_id"] == "station" + + # step: station + result_station = await hass.config_entries.flow.async_configure( + result_user["flow_id"], {CONF_STATION: "Wartenau"}, + ) + + assert result_station["step_id"] == "station_select" + + # step: station_select + result_station_select = await hass.config_entries.flow.async_configure( + result_user["flow_id"], {CONF_STATION: "Wartenau"}, + ) + + assert result_station_select["type"] == "create_entry" + assert result_station_select["title"] == "Wartenau" + assert result_station_select["data"] == { + CONF_HOST: "api-test.geofox.de", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_STATION: { + "name": "Wartenau", + "city": "Hamburg", + "combinedName": "Wartenau", + "id": "Master:10901", + "type": "STATION", + "coordinate": {"x": 10.035515, "y": 53.56478}, + "serviceTypes": ["bus", "u"], + "hasStationInformation": True, + }, + } + + +async def test_user_flow_no_results(hass): + """Test that config flow works when there are no results.""" + + with patch( + "homeassistant.components.hvv_departures.hub.GTI.init", + return_value=FIXTURE_INIT, + ), patch( + "pygti.gti.GTI.checkName", return_value={"returnCode": "OK", "results": []}, + ), patch( + "homeassistant.components.hvv_departures.async_setup", return_value=True + ), patch( + "homeassistant.components.hvv_departures.async_setup_entry", return_value=True, + ): + + # step: user + + result_user = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={ + CONF_HOST: "api-test.geofox.de", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + + assert result_user["step_id"] == "station" + + # step: station + result_station = await hass.config_entries.flow.async_configure( + result_user["flow_id"], {CONF_STATION: "non_existing_station"}, + ) + + assert result_station["step_id"] == "station" + assert result_station["errors"]["base"] == "no_results" + + +async def test_user_flow_invalid_auth(hass): + """Test that config flow handles invalid auth.""" + + with patch( + "homeassistant.components.hvv_departures.hub.GTI.init", + side_effect=InvalidAuth( + "ERROR_TEXT", + "Bei der Verarbeitung der Anfrage ist ein technisches Problem aufgetreten.", + "Authentication failed!", + ), + ): + + # step: user + result_user = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={ + CONF_HOST: "api-test.geofox.de", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + + assert result_user["type"] == "form" + assert result_user["errors"] == {"base": "invalid_auth"} + + +async def test_user_flow_cannot_connect(hass): + """Test that config flow handles connection errors.""" + + with patch( + "homeassistant.components.hvv_departures.hub.GTI.init", + side_effect=CannotConnect(), + ): + + # step: user + result_user = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={ + CONF_HOST: "api-test.geofox.de", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + + assert result_user["type"] == "form" + assert result_user["errors"] == {"base": "cannot_connect"} + + +async def test_user_flow_station(hass): + """Test that config flow handles empty data on step station.""" + + with patch( + "homeassistant.components.hvv_departures.hub.GTI.init", return_value=True, + ), patch( + "pygti.gti.GTI.checkName", return_value={"returnCode": "OK", "results": []}, + ): + + # step: user + + result_user = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={ + CONF_HOST: "api-test.geofox.de", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + + assert result_user["step_id"] == "station" + + # step: station + result_station = await hass.config_entries.flow.async_configure( + result_user["flow_id"], None, + ) + assert result_station["type"] == "form" + assert result_station["step_id"] == "station" + + +async def test_user_flow_station_select(hass): + """Test that config flow handles empty data on step station_select.""" + + with patch( + "homeassistant.components.hvv_departures.hub.GTI.init", return_value=True, + ), patch( + "pygti.gti.GTI.checkName", return_value=FIXTURE_CHECK_NAME, + ): + result_user = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={ + CONF_HOST: "api-test.geofox.de", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + + result_station = await hass.config_entries.flow.async_configure( + result_user["flow_id"], {CONF_STATION: "Wartenau"}, + ) + + # step: station_select + result_station_select = await hass.config_entries.flow.async_configure( + result_station["flow_id"], None, + ) + + assert result_station_select["type"] == "form" + assert result_station_select["step_id"] == "station_select" + + +async def test_options_flow(hass): + """Test that options flow works.""" + + config_entry = MockConfigEntry( + version=1, + domain=DOMAIN, + title="Wartenau", + data=FIXTURE_CONFIG_ENTRY, + source="user", + connection_class=CONN_CLASS_CLOUD_POLL, + system_options={"disable_new_entities": False}, + options=FIXTURE_OPTIONS, + unique_id="1234", + ) + config_entry.add_to_hass(hass) + + with patch( + "homeassistant.components.hvv_departures.hub.GTI.init", return_value=True, + ), patch( + "pygti.gti.GTI.departureList", return_value=FIXTURE_DEPARTURE_LIST, + ): + assert await hass.config_entries.async_setup(config_entry.entry_id) + + result = await hass.config_entries.options.async_init(config_entry.entry_id) + + assert result["type"] == data_entry_flow.RESULT_TYPE_FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={CONF_FILTER: ["0"], CONF_OFFSET: 15, CONF_REAL_TIME: False}, + ) + + assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY + assert config_entry.options == { + CONF_FILTER: [ + { + "serviceID": "HHA-U:U1_HHA-U", + "stationIDs": ["Master:10902"], + "label": "Fuhlsbüttel Nord / Ochsenzoll / Norderstedt Mitte / Kellinghusenstraße / Ohlsdorf / Garstedt", + "serviceName": "U1", + } + ], + CONF_OFFSET: 15, + CONF_REAL_TIME: False, + } + + +async def test_options_flow_invalid_auth(hass): + """Test that options flow works.""" + + config_entry = MockConfigEntry( + version=1, + domain=DOMAIN, + title="Wartenau", + data=FIXTURE_CONFIG_ENTRY, + source="user", + connection_class=CONN_CLASS_CLOUD_POLL, + system_options={"disable_new_entities": False}, + options=FIXTURE_OPTIONS, + unique_id="1234", + ) + config_entry.add_to_hass(hass) + + with patch( + "homeassistant.components.hvv_departures.hub.GTI.init", + side_effect=InvalidAuth( + "ERROR_TEXT", + "Bei der Verarbeitung der Anfrage ist ein technisches Problem aufgetreten.", + "Authentication failed!", + ), + ): + assert await hass.config_entries.async_setup(config_entry.entry_id) + result = await hass.config_entries.options.async_init(config_entry.entry_id) + + assert result["type"] == data_entry_flow.RESULT_TYPE_FORM + assert result["step_id"] == "init" + + assert result["errors"] == {"base": "invalid_auth"} + + +async def test_options_flow_cannot_connect(hass): + """Test that options flow works.""" + + config_entry = MockConfigEntry( + version=1, + domain=DOMAIN, + title="Wartenau", + data=FIXTURE_CONFIG_ENTRY, + source="user", + connection_class=CONN_CLASS_CLOUD_POLL, + system_options={"disable_new_entities": False}, + options=FIXTURE_OPTIONS, + unique_id="1234", + ) + config_entry.add_to_hass(hass) + + with patch( + "pygti.gti.GTI.departureList", side_effect=CannotConnect(), + ): + assert await hass.config_entries.async_setup(config_entry.entry_id) + + result = await hass.config_entries.options.async_init(config_entry.entry_id) + + assert result["type"] == data_entry_flow.RESULT_TYPE_FORM + assert result["step_id"] == "init" + + assert result["errors"] == {"base": "cannot_connect"} diff --git a/tests/components/imap_email_content/test_sensor.py b/tests/components/imap_email_content/test_sensor.py index ee39bac51ef..3a0c006d15f 100644 --- a/tests/components/imap_email_content/test_sensor.py +++ b/tests/components/imap_email_content/test_sensor.py @@ -37,10 +37,7 @@ class EmailContentSensor(unittest.TestCase): def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def test_allowed_sender(self): """Test emails from allowed sender.""" diff --git a/tests/components/influxdb/test_init.py b/tests/components/influxdb/test_init.py index bdfd41eae83..f9514f7ebff 100644 --- a/tests/components/influxdb/test_init.py +++ b/tests/components/influxdb/test_init.py @@ -1,7 +1,7 @@ """The tests for the InfluxDB component.""" import datetime -import unittest -from unittest import mock + +import pytest import homeassistant.components.influxdb as influxdb from homeassistant.const import ( @@ -11,748 +11,1103 @@ from homeassistant.const import ( STATE_STANDBY, UNIT_PERCENTAGE, ) -from homeassistant.setup import setup_component +from homeassistant.setup import async_setup_component -from tests.common import get_test_home_assistant +from tests.async_mock import MagicMock, Mock, call, patch + +BASE_V1_CONFIG = {} +BASE_V2_CONFIG = { + "api_version": influxdb.API_VERSION_2, + "organization": "org", + "token": "token", +} -@mock.patch("homeassistant.components.influxdb.InfluxDBClient") -@mock.patch( - "homeassistant.components.influxdb.InfluxThread.batch_timeout", - mock.Mock(return_value=0), -) -class TestInfluxDB(unittest.TestCase): - """Test the InfluxDB component.""" +@pytest.fixture(autouse=True) +def mock_batch_timeout(hass, monkeypatch): + """Mock the event bus listener and the batch timeout for tests.""" + hass.bus.listen = MagicMock() + monkeypatch.setattr( + "homeassistant.components.influxdb.InfluxThread.batch_timeout", + Mock(return_value=0), + ) - def setUp(self): - """Set up things to be run when tests are started.""" - self.hass = get_test_home_assistant() - self.handler_method = None - self.hass.bus.listen = mock.Mock() - def tearDown(self): - """Clear data.""" - self.hass.stop() +@pytest.fixture(name="mock_client") +def mock_client_fixture(request): + """Patch the InfluxDBClient object with mock for version under test.""" + if request.param == influxdb.API_VERSION_2: + client_target = "homeassistant.components.influxdb.InfluxDBClientV2" + else: + client_target = "homeassistant.components.influxdb.InfluxDBClient" - def test_setup_config_full(self, mock_client): - """Test the setup with full configuration.""" - config = { - "influxdb": { - "host": "host", - "port": 123, - "database": "db", + with patch(client_target) as client: + yield client + + +@pytest.fixture(name="get_mock_call") +def get_mock_call_fixture(request): + """Get version specific lambda to make write API call mock.""" + if request.param == influxdb.API_VERSION_2: + return lambda body: call(bucket=influxdb.DEFAULT_BUCKET, record=body) + # pylint: disable=unnecessary-lambda + return lambda body: call(body) + + +def _get_write_api_mock_v1(mock_influx_client): + """Return the write api mock for the V1 client.""" + return mock_influx_client.return_value.write_points + + +def _get_write_api_mock_v2(mock_influx_client): + """Return the write api mock for the V2 client.""" + return mock_influx_client.return_value.write_api.return_value.write + + +@pytest.mark.parametrize( + "mock_client, config_ext, get_write_api", + [ + ( + influxdb.DEFAULT_API_VERSION, + { + "api_version": influxdb.DEFAULT_API_VERSION, "username": "user", "password": "password", - "max_retries": 4, - "ssl": "False", "verify_ssl": "False", - } + }, + _get_write_api_mock_v1, + ), + ( + influxdb.API_VERSION_2, + { + "api_version": influxdb.API_VERSION_2, + "token": "token", + "organization": "organization", + "bucket": "bucket", + }, + _get_write_api_mock_v2, + ), + ], + indirect=["mock_client"], +) +async def test_setup_config_full(hass, mock_client, config_ext, get_write_api): + """Test the setup with full configuration.""" + config = { + "influxdb": { + "host": "host", + "port": 123, + "database": "db", + "max_retries": 4, + "ssl": "False", } - assert setup_component(self.hass, influxdb.DOMAIN, config) - assert self.hass.bus.listen.called - assert EVENT_STATE_CHANGED == self.hass.bus.listen.call_args_list[0][0][0] - assert mock_client.return_value.write_points.call_count == 1 + } + config["influxdb"].update(config_ext) - def test_setup_config_defaults(self, mock_client): - """Test the setup with default configuration.""" - config = {"influxdb": {"host": "host", "username": "user", "password": "pass"}} - assert setup_component(self.hass, influxdb.DOMAIN, config) - assert self.hass.bus.listen.called - assert EVENT_STATE_CHANGED == self.hass.bus.listen.call_args_list[0][0][0] + assert await async_setup_component(hass, influxdb.DOMAIN, config) + await hass.async_block_till_done() + assert hass.bus.listen.called + assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] + assert get_write_api(mock_client).call_count == 1 - def test_setup_minimal_config(self, mock_client): - """Test the setup with minimal configuration.""" - config = {"influxdb": {}} - assert setup_component(self.hass, influxdb.DOMAIN, config) +@pytest.mark.parametrize( + "mock_client, config_ext, get_write_api", + [ + (influxdb.DEFAULT_API_VERSION, BASE_V1_CONFIG, _get_write_api_mock_v1), + (influxdb.API_VERSION_2, BASE_V2_CONFIG, _get_write_api_mock_v2), + ], + indirect=["mock_client"], +) +async def test_setup_minimal_config(hass, mock_client, config_ext, get_write_api): + """Test the setup with minimal configuration and defaults.""" + config = {"influxdb": {}} + config["influxdb"].update(config_ext) - def test_setup_missing_password(self, mock_client): - """Test the setup with existing username and missing password.""" - config = {"influxdb": {"username": "user"}} + assert await async_setup_component(hass, influxdb.DOMAIN, config) + await hass.async_block_till_done() + assert hass.bus.listen.called + assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] + assert get_write_api(mock_client).call_count == 1 - assert not setup_component(self.hass, influxdb.DOMAIN, config) - def _setup(self, mock_client, **kwargs): - """Set up the client.""" - config = { - "influxdb": { - "host": "host", +@pytest.mark.parametrize( + "mock_client, config_ext, get_write_api", + [ + (influxdb.DEFAULT_API_VERSION, {"username": "user"}, _get_write_api_mock_v1), + (influxdb.DEFAULT_API_VERSION, {"token": "token"}, _get_write_api_mock_v1), + ( + influxdb.API_VERSION_2, + {"api_version": influxdb.API_VERSION_2, "organization": "organization"}, + _get_write_api_mock_v2, + ), + ( + influxdb.API_VERSION_2, + { + "api_version": influxdb.API_VERSION_2, + "token": "token", + "organization": "organization", "username": "user", "password": "pass", - "exclude": { - "entities": ["fake.blacklisted"], - "domains": ["another_fake"], - }, - } + }, + _get_write_api_mock_v2, + ), + ], + indirect=["mock_client"], +) +async def test_invalid_config(hass, mock_client, config_ext, get_write_api): + """Test the setup with invalid config or config options specified for wrong version.""" + config = {"influxdb": {}} + config["influxdb"].update(config_ext) + + assert not await async_setup_component(hass, influxdb.DOMAIN, config) + + +async def _setup(hass, mock_influx_client, config_ext, get_write_api): + """Prepare client for next test and return event handler method.""" + config = { + "influxdb": { + "host": "host", + "exclude": {"entities": ["fake.blacklisted"], "domains": ["another_fake"]}, } - config["influxdb"].update(kwargs) - assert setup_component(self.hass, influxdb.DOMAIN, config) - self.handler_method = self.hass.bus.listen.call_args_list[0][0][1] - mock_client.return_value.write_points.reset_mock() + } + config["influxdb"].update(config_ext) + assert await async_setup_component(hass, influxdb.DOMAIN, config) + await hass.async_block_till_done() + # A call is made to the write API during setup to test the connection. + # Therefore we reset the write API mock here before the test begins. + get_write_api(mock_influx_client).reset_mock() + return hass.bus.listen.call_args_list[0][0][1] - def test_event_listener(self, mock_client): - """Test the event listener.""" - self._setup(mock_client) - # map of HA State to valid influxdb [state, value] fields - valid = { - "1": [None, 1], - "1.0": [None, 1.0], - STATE_ON: [STATE_ON, 1], - STATE_OFF: [STATE_OFF, 0], - STATE_STANDBY: [STATE_STANDBY, None], - "foo": ["foo", None], +@pytest.mark.parametrize( + "mock_client, config_ext, get_write_api, get_mock_call", + [ + ( + influxdb.DEFAULT_API_VERSION, + BASE_V1_CONFIG, + _get_write_api_mock_v1, + influxdb.DEFAULT_API_VERSION, + ), + ( + influxdb.API_VERSION_2, + BASE_V2_CONFIG, + _get_write_api_mock_v2, + influxdb.API_VERSION_2, + ), + ], + indirect=["mock_client", "get_mock_call"], +) +async def test_event_listener( + hass, mock_client, config_ext, get_write_api, get_mock_call +): + """Test the event listener.""" + handler_method = await _setup(hass, mock_client, config_ext, get_write_api) + + # map of HA State to valid influxdb [state, value] fields + valid = { + "1": [None, 1], + "1.0": [None, 1.0], + STATE_ON: [STATE_ON, 1], + STATE_OFF: [STATE_OFF, 0], + STATE_STANDBY: [STATE_STANDBY, None], + "foo": ["foo", None], + } + for in_, out in valid.items(): + attrs = { + "unit_of_measurement": "foobars", + "longitude": "1.1", + "latitude": "2.2", + "battery_level": f"99{UNIT_PERCENTAGE}", + "temperature": "20c", + "last_seen": "Last seen 23 minutes ago", + "updated_at": datetime.datetime(2017, 1, 1, 0, 0), + "multi_periods": "0.120.240.2023873", } - for in_, out in valid.items(): - attrs = { - "unit_of_measurement": "foobars", - "longitude": "1.1", - "latitude": "2.2", - "battery_level": f"99{UNIT_PERCENTAGE}", - "temperature": "20c", - "last_seen": "Last seen 23 minutes ago", - "updated_at": datetime.datetime(2017, 1, 1, 0, 0), - "multi_periods": "0.120.240.2023873", - } - state = mock.MagicMock( - state=in_, - domain="fake", - entity_id="fake.entity-id", - object_id="entity", - attributes=attrs, - ) - event = mock.MagicMock(data={"new_state": state}, time_fired=12345) - body = [ - { - "measurement": "foobars", - "tags": {"domain": "fake", "entity_id": "entity"}, - "time": 12345, - "fields": { - "longitude": 1.1, - "latitude": 2.2, - "battery_level_str": f"99{UNIT_PERCENTAGE}", - "battery_level": 99.0, - "temperature_str": "20c", - "temperature": 20.0, - "last_seen_str": "Last seen 23 minutes ago", - "last_seen": 23.0, - "updated_at_str": "2017-01-01 00:00:00", - "updated_at": 20170101000000, - "multi_periods_str": "0.120.240.2023873", - }, - } - ] - if out[0] is not None: - body[0]["fields"]["state"] = out[0] - if out[1] is not None: - body[0]["fields"]["value"] = out[1] - - self.handler_method(event) - self.hass.data[influxdb.DOMAIN].block_till_done() - - assert mock_client.return_value.write_points.call_count == 1 - assert mock_client.return_value.write_points.call_args == mock.call(body) - mock_client.return_value.write_points.reset_mock() - - def test_event_listener_no_units(self, mock_client): - """Test the event listener for missing units.""" - self._setup(mock_client) - - for unit in (None, ""): - if unit: - attrs = {"unit_of_measurement": unit} - else: - attrs = {} - state = mock.MagicMock( - state=1, - domain="fake", - entity_id="fake.entity-id", - object_id="entity", - attributes=attrs, - ) - event = mock.MagicMock(data={"new_state": state}, time_fired=12345) - body = [ - { - "measurement": "fake.entity-id", - "tags": {"domain": "fake", "entity_id": "entity"}, - "time": 12345, - "fields": {"value": 1}, - } - ] - self.handler_method(event) - self.hass.data[influxdb.DOMAIN].block_till_done() - assert mock_client.return_value.write_points.call_count == 1 - assert mock_client.return_value.write_points.call_args == mock.call(body) - mock_client.return_value.write_points.reset_mock() - - def test_event_listener_inf(self, mock_client): - """Test the event listener for missing units.""" - self._setup(mock_client) - - attrs = {"bignumstring": "9" * 999, "nonumstring": "nan"} - state = mock.MagicMock( - state=8, + state = MagicMock( + state=in_, domain="fake", entity_id="fake.entity-id", object_id="entity", attributes=attrs, ) - event = mock.MagicMock(data={"new_state": state}, time_fired=12345) + event = MagicMock(data={"new_state": state}, time_fired=12345) + body = [ + { + "measurement": "foobars", + "tags": {"domain": "fake", "entity_id": "entity"}, + "time": 12345, + "fields": { + "longitude": 1.1, + "latitude": 2.2, + "battery_level_str": f"99{UNIT_PERCENTAGE}", + "battery_level": 99.0, + "temperature_str": "20c", + "temperature": 20.0, + "last_seen_str": "Last seen 23 minutes ago", + "last_seen": 23.0, + "updated_at_str": "2017-01-01 00:00:00", + "updated_at": 20170101000000, + "multi_periods_str": "0.120.240.2023873", + }, + } + ] + if out[0] is not None: + body[0]["fields"]["state"] = out[0] + if out[1] is not None: + body[0]["fields"]["value"] = out[1] + + handler_method(event) + hass.data[influxdb.DOMAIN].block_till_done() + + write_api = get_write_api(mock_client) + assert write_api.call_count == 1 + assert write_api.call_args == get_mock_call(body) + write_api.reset_mock() + + +@pytest.mark.parametrize( + "mock_client, config_ext, get_write_api, get_mock_call", + [ + ( + influxdb.DEFAULT_API_VERSION, + BASE_V1_CONFIG, + _get_write_api_mock_v1, + influxdb.DEFAULT_API_VERSION, + ), + ( + influxdb.API_VERSION_2, + BASE_V2_CONFIG, + _get_write_api_mock_v2, + influxdb.API_VERSION_2, + ), + ], + indirect=["mock_client", "get_mock_call"], +) +async def test_event_listener_no_units( + hass, mock_client, config_ext, get_write_api, get_mock_call +): + """Test the event listener for missing units.""" + handler_method = await _setup(hass, mock_client, config_ext, get_write_api) + + for unit in (None, ""): + if unit: + attrs = {"unit_of_measurement": unit} + else: + attrs = {} + state = MagicMock( + state=1, + domain="fake", + entity_id="fake.entity-id", + object_id="entity", + attributes=attrs, + ) + event = MagicMock(data={"new_state": state}, time_fired=12345) body = [ { "measurement": "fake.entity-id", "tags": {"domain": "fake", "entity_id": "entity"}, "time": 12345, - "fields": {"value": 8}, + "fields": {"value": 1}, } ] - self.handler_method(event) - self.hass.data[influxdb.DOMAIN].block_till_done() - assert mock_client.return_value.write_points.call_count == 1 - assert mock_client.return_value.write_points.call_args == mock.call(body) - mock_client.return_value.write_points.reset_mock() + handler_method(event) + hass.data[influxdb.DOMAIN].block_till_done() - def test_event_listener_states(self, mock_client): - """Test the event listener against ignored states.""" - self._setup(mock_client) + write_api = get_write_api(mock_client) + assert write_api.call_count == 1 + assert write_api.call_args == get_mock_call(body) + write_api.reset_mock() - for state_state in (1, "unknown", "", "unavailable"): - state = mock.MagicMock( - state=state_state, - domain="fake", - entity_id="fake.entity-id", - object_id="entity", - attributes={}, - ) - event = mock.MagicMock(data={"new_state": state}, time_fired=12345) - body = [ - { - "measurement": "fake.entity-id", - "tags": {"domain": "fake", "entity_id": "entity"}, - "time": 12345, - "fields": {"value": 1}, - } - ] - self.handler_method(event) - self.hass.data[influxdb.DOMAIN].block_till_done() - if state_state == 1: - assert mock_client.return_value.write_points.call_count == 1 - assert mock_client.return_value.write_points.call_args == mock.call( - body - ) - else: - assert not mock_client.return_value.write_points.called - mock_client.return_value.write_points.reset_mock() - def test_event_listener_blacklist(self, mock_client): - """Test the event listener against a blacklist.""" - self._setup(mock_client) +@pytest.mark.parametrize( + "mock_client, config_ext, get_write_api, get_mock_call", + [ + ( + influxdb.DEFAULT_API_VERSION, + BASE_V1_CONFIG, + _get_write_api_mock_v1, + influxdb.DEFAULT_API_VERSION, + ), + ( + influxdb.API_VERSION_2, + BASE_V2_CONFIG, + _get_write_api_mock_v2, + influxdb.API_VERSION_2, + ), + ], + indirect=["mock_client", "get_mock_call"], +) +async def test_event_listener_inf( + hass, mock_client, config_ext, get_write_api, get_mock_call +): + """Test the event listener with large or invalid numbers.""" + handler_method = await _setup(hass, mock_client, config_ext, get_write_api) - for entity_id in ("ok", "blacklisted"): - state = mock.MagicMock( - state=1, - domain="fake", - entity_id=f"fake.{entity_id}", - object_id=entity_id, - attributes={}, - ) - event = mock.MagicMock(data={"new_state": state}, time_fired=12345) - body = [ - { - "measurement": f"fake.{entity_id}", - "tags": {"domain": "fake", "entity_id": entity_id}, - "time": 12345, - "fields": {"value": 1}, - } - ] - self.handler_method(event) - self.hass.data[influxdb.DOMAIN].block_till_done() - if entity_id == "ok": - assert mock_client.return_value.write_points.call_count == 1 - assert mock_client.return_value.write_points.call_args == mock.call( - body - ) - else: - assert not mock_client.return_value.write_points.called - mock_client.return_value.write_points.reset_mock() - - def test_event_listener_blacklist_domain(self, mock_client): - """Test the event listener against a blacklist.""" - self._setup(mock_client) - - for domain in ("ok", "another_fake"): - state = mock.MagicMock( - state=1, - domain=domain, - entity_id=f"{domain}.something", - object_id="something", - attributes={}, - ) - event = mock.MagicMock(data={"new_state": state}, time_fired=12345) - body = [ - { - "measurement": f"{domain}.something", - "tags": {"domain": domain, "entity_id": "something"}, - "time": 12345, - "fields": {"value": 1}, - } - ] - self.handler_method(event) - self.hass.data[influxdb.DOMAIN].block_till_done() - if domain == "ok": - assert mock_client.return_value.write_points.call_count == 1 - assert mock_client.return_value.write_points.call_args == mock.call( - body - ) - else: - assert not mock_client.return_value.write_points.called - mock_client.return_value.write_points.reset_mock() - - def test_event_listener_whitelist(self, mock_client): - """Test the event listener against a whitelist.""" - config = { - "influxdb": { - "host": "host", - "username": "user", - "password": "pass", - "include": {"entities": ["fake.included"]}, - } + attrs = {"bignumstring": "9" * 999, "nonumstring": "nan"} + state = MagicMock( + state=8, + domain="fake", + entity_id="fake.entity-id", + object_id="entity", + attributes=attrs, + ) + event = MagicMock(data={"new_state": state}, time_fired=12345) + body = [ + { + "measurement": "fake.entity-id", + "tags": {"domain": "fake", "entity_id": "entity"}, + "time": 12345, + "fields": {"value": 8}, } - assert setup_component(self.hass, influxdb.DOMAIN, config) - self.handler_method = self.hass.bus.listen.call_args_list[0][0][1] - mock_client.return_value.write_points.reset_mock() + ] + handler_method(event) + hass.data[influxdb.DOMAIN].block_till_done() - for entity_id in ("included", "default"): - state = mock.MagicMock( - state=1, - domain="fake", - entity_id=f"fake.{entity_id}", - object_id=entity_id, - attributes={}, - ) - event = mock.MagicMock(data={"new_state": state}, time_fired=12345) - body = [ - { - "measurement": f"fake.{entity_id}", - "tags": {"domain": "fake", "entity_id": entity_id}, - "time": 12345, - "fields": {"value": 1}, - } - ] - self.handler_method(event) - self.hass.data[influxdb.DOMAIN].block_till_done() - if entity_id == "included": - assert mock_client.return_value.write_points.call_count == 1 - assert mock_client.return_value.write_points.call_args == mock.call( - body - ) - else: - assert not mock_client.return_value.write_points.called - mock_client.return_value.write_points.reset_mock() + write_api = get_write_api(mock_client) + assert write_api.call_count == 1 + assert write_api.call_args == get_mock_call(body) - def test_event_listener_whitelist_domain(self, mock_client): - """Test the event listener against a whitelist.""" - config = { - "influxdb": { - "host": "host", - "username": "user", - "password": "pass", - "include": {"domains": ["fake"]}, + +@pytest.mark.parametrize( + "mock_client, config_ext, get_write_api, get_mock_call", + [ + ( + influxdb.DEFAULT_API_VERSION, + BASE_V1_CONFIG, + _get_write_api_mock_v1, + influxdb.DEFAULT_API_VERSION, + ), + ( + influxdb.API_VERSION_2, + BASE_V2_CONFIG, + _get_write_api_mock_v2, + influxdb.API_VERSION_2, + ), + ], + indirect=["mock_client", "get_mock_call"], +) +async def test_event_listener_states( + hass, mock_client, config_ext, get_write_api, get_mock_call +): + """Test the event listener against ignored states.""" + handler_method = await _setup(hass, mock_client, config_ext, get_write_api) + + for state_state in (1, "unknown", "", "unavailable"): + state = MagicMock( + state=state_state, + domain="fake", + entity_id="fake.entity-id", + object_id="entity", + attributes={}, + ) + event = MagicMock(data={"new_state": state}, time_fired=12345) + body = [ + { + "measurement": "fake.entity-id", + "tags": {"domain": "fake", "entity_id": "entity"}, + "time": 12345, + "fields": {"value": 1}, } - } - assert setup_component(self.hass, influxdb.DOMAIN, config) - self.handler_method = self.hass.bus.listen.call_args_list[0][0][1] - mock_client.return_value.write_points.reset_mock() + ] + handler_method(event) + hass.data[influxdb.DOMAIN].block_till_done() - for domain in ("fake", "another_fake"): - state = mock.MagicMock( - state=1, - domain=domain, - entity_id=f"{domain}.something", - object_id="something", - attributes={}, - ) - event = mock.MagicMock(data={"new_state": state}, time_fired=12345) - body = [ - { - "measurement": f"{domain}.something", - "tags": {"domain": domain, "entity_id": "something"}, - "time": 12345, - "fields": {"value": 1}, - } - ] - self.handler_method(event) - self.hass.data[influxdb.DOMAIN].block_till_done() - if domain == "fake": - assert mock_client.return_value.write_points.call_count == 1 - assert mock_client.return_value.write_points.call_args == mock.call( - body - ) - else: - assert not mock_client.return_value.write_points.called - mock_client.return_value.write_points.reset_mock() + write_api = get_write_api(mock_client) + if state_state == 1: + assert write_api.call_count == 1 + assert write_api.call_args == get_mock_call(body) + else: + assert not write_api.called + write_api.reset_mock() - def test_event_listener_whitelist_domain_and_entities(self, mock_client): - """Test the event listener against a whitelist.""" - config = { - "influxdb": { - "host": "host", - "username": "user", - "password": "pass", - "include": {"domains": ["fake"], "entities": ["other.one"]}, + +@pytest.mark.parametrize( + "mock_client, config_ext, get_write_api, get_mock_call", + [ + ( + influxdb.DEFAULT_API_VERSION, + BASE_V1_CONFIG, + _get_write_api_mock_v1, + influxdb.DEFAULT_API_VERSION, + ), + ( + influxdb.API_VERSION_2, + BASE_V2_CONFIG, + _get_write_api_mock_v2, + influxdb.API_VERSION_2, + ), + ], + indirect=["mock_client", "get_mock_call"], +) +async def test_event_listener_blacklist( + hass, mock_client, config_ext, get_write_api, get_mock_call +): + """Test the event listener against a blacklist.""" + handler_method = await _setup(hass, mock_client, config_ext, get_write_api) + + for entity_id in ("ok", "blacklisted"): + state = MagicMock( + state=1, + domain="fake", + entity_id=f"fake.{entity_id}", + object_id=entity_id, + attributes={}, + ) + event = MagicMock(data={"new_state": state}, time_fired=12345) + body = [ + { + "measurement": f"fake.{entity_id}", + "tags": {"domain": "fake", "entity_id": entity_id}, + "time": 12345, + "fields": {"value": 1}, } - } - assert setup_component(self.hass, influxdb.DOMAIN, config) - self.handler_method = self.hass.bus.listen.call_args_list[0][0][1] - mock_client.return_value.write_points.reset_mock() + ] + handler_method(event) + hass.data[influxdb.DOMAIN].block_till_done() - for domain in ("fake", "another_fake"): - state = mock.MagicMock( - state=1, - domain=domain, - entity_id=f"{domain}.something", - object_id="something", - attributes={}, - ) - event = mock.MagicMock(data={"new_state": state}, time_fired=12345) - body = [ - { - "measurement": f"{domain}.something", - "tags": {"domain": domain, "entity_id": "something"}, - "time": 12345, - "fields": {"value": 1}, - } - ] - self.handler_method(event) - self.hass.data[influxdb.DOMAIN].block_till_done() - if domain == "fake": - assert mock_client.return_value.write_points.call_count == 1 - assert mock_client.return_value.write_points.call_args == mock.call( - body - ) - else: - assert not mock_client.return_value.write_points.called - mock_client.return_value.write_points.reset_mock() + write_api = get_write_api(mock_client) + if entity_id == "ok": + assert write_api.call_count == 1 + assert write_api.call_args == get_mock_call(body) + else: + assert not write_api.called + write_api.reset_mock() - for entity_id in ("one", "two"): - state = mock.MagicMock( - state=1, - domain="other", - entity_id=f"other.{entity_id}", - object_id=entity_id, - attributes={}, - ) - event = mock.MagicMock(data={"new_state": state}, time_fired=12345) - body = [ - { - "measurement": f"other.{entity_id}", - "tags": {"domain": "other", "entity_id": entity_id}, - "time": 12345, - "fields": {"value": 1}, - } - ] - self.handler_method(event) - self.hass.data[influxdb.DOMAIN].block_till_done() - if entity_id == "one": - assert mock_client.return_value.write_points.call_count == 1 - assert mock_client.return_value.write_points.call_args == mock.call( - body - ) - else: - assert not mock_client.return_value.write_points.called - mock_client.return_value.write_points.reset_mock() - def test_event_listener_invalid_type(self, mock_client): - """Test the event listener when an attribute has an invalid type.""" - self._setup(mock_client) +@pytest.mark.parametrize( + "mock_client, config_ext, get_write_api, get_mock_call", + [ + ( + influxdb.DEFAULT_API_VERSION, + BASE_V1_CONFIG, + _get_write_api_mock_v1, + influxdb.DEFAULT_API_VERSION, + ), + ( + influxdb.API_VERSION_2, + BASE_V2_CONFIG, + _get_write_api_mock_v2, + influxdb.API_VERSION_2, + ), + ], + indirect=["mock_client", "get_mock_call"], +) +async def test_event_listener_blacklist_domain( + hass, mock_client, config_ext, get_write_api, get_mock_call +): + """Test the event listener against a domain blacklist.""" + handler_method = await _setup(hass, mock_client, config_ext, get_write_api) - # map of HA State to valid influxdb [state, value] fields - valid = { - "1": [None, 1], - "1.0": [None, 1.0], - STATE_ON: [STATE_ON, 1], - STATE_OFF: [STATE_OFF, 0], - STATE_STANDBY: [STATE_STANDBY, None], - "foo": ["foo", None], - } - for in_, out in valid.items(): - attrs = { - "unit_of_measurement": "foobars", - "longitude": "1.1", - "latitude": "2.2", - "invalid_attribute": ["value1", "value2"], + for domain in ("ok", "another_fake"): + state = MagicMock( + state=1, + domain=domain, + entity_id=f"{domain}.something", + object_id="something", + attributes={}, + ) + event = MagicMock(data={"new_state": state}, time_fired=12345) + body = [ + { + "measurement": f"{domain}.something", + "tags": {"domain": domain, "entity_id": "something"}, + "time": 12345, + "fields": {"value": 1}, } - state = mock.MagicMock( - state=in_, - domain="fake", - entity_id="fake.entity-id", - object_id="entity", - attributes=attrs, - ) - event = mock.MagicMock(data={"new_state": state}, time_fired=12345) - body = [ - { - "measurement": "foobars", - "tags": {"domain": "fake", "entity_id": "entity"}, - "time": 12345, - "fields": { - "longitude": 1.1, - "latitude": 2.2, - "invalid_attribute_str": "['value1', 'value2']", - }, - } - ] - if out[0] is not None: - body[0]["fields"]["state"] = out[0] - if out[1] is not None: - body[0]["fields"]["value"] = out[1] + ] + handler_method(event) + hass.data[influxdb.DOMAIN].block_till_done() - self.handler_method(event) - self.hass.data[influxdb.DOMAIN].block_till_done() - assert mock_client.return_value.write_points.call_count == 1 - assert mock_client.return_value.write_points.call_args == mock.call(body) - mock_client.return_value.write_points.reset_mock() + write_api = get_write_api(mock_client) + if domain == "ok": + assert write_api.call_count == 1 + assert write_api.call_args == get_mock_call(body) + else: + assert not write_api.called + write_api.reset_mock() - def test_event_listener_default_measurement(self, mock_client): - """Test the event listener with a default measurement.""" - config = { - "influxdb": { - "host": "host", - "username": "user", - "password": "pass", - "default_measurement": "state", - "exclude": {"entities": ["fake.blacklisted"]}, + +@pytest.mark.parametrize( + "mock_client, config_ext, get_write_api, get_mock_call", + [ + ( + influxdb.DEFAULT_API_VERSION, + BASE_V1_CONFIG, + _get_write_api_mock_v1, + influxdb.DEFAULT_API_VERSION, + ), + ( + influxdb.API_VERSION_2, + BASE_V2_CONFIG, + _get_write_api_mock_v2, + influxdb.API_VERSION_2, + ), + ], + indirect=["mock_client", "get_mock_call"], +) +async def test_event_listener_whitelist( + hass, mock_client, config_ext, get_write_api, get_mock_call +): + """Test the event listener against a whitelist.""" + config = {"include": {"entities": ["fake.included"]}} + config.update(config_ext) + handler_method = await _setup(hass, mock_client, config, get_write_api) + + for entity_id in ("included", "default"): + state = MagicMock( + state=1, + domain="fake", + entity_id=f"fake.{entity_id}", + object_id=entity_id, + attributes={}, + ) + event = MagicMock(data={"new_state": state}, time_fired=12345) + body = [ + { + "measurement": f"fake.{entity_id}", + "tags": {"domain": "fake", "entity_id": entity_id}, + "time": 12345, + "fields": {"value": 1}, } - } - assert setup_component(self.hass, influxdb.DOMAIN, config) - self.handler_method = self.hass.bus.listen.call_args_list[0][0][1] - mock_client.return_value.write_points.reset_mock() + ] + handler_method(event) + hass.data[influxdb.DOMAIN].block_till_done() - for entity_id in ("ok", "blacklisted"): - state = mock.MagicMock( - state=1, - domain="fake", - entity_id=f"fake.{entity_id}", - object_id=entity_id, - attributes={}, - ) - event = mock.MagicMock(data={"new_state": state}, time_fired=12345) - body = [ - { - "measurement": "state", - "tags": {"domain": "fake", "entity_id": entity_id}, - "time": 12345, - "fields": {"value": 1}, - } - ] - self.handler_method(event) - self.hass.data[influxdb.DOMAIN].block_till_done() - if entity_id == "ok": - assert mock_client.return_value.write_points.call_count == 1 - assert mock_client.return_value.write_points.call_args == mock.call( - body - ) - else: - assert not mock_client.return_value.write_points.called - mock_client.return_value.write_points.reset_mock() + write_api = get_write_api(mock_client) + if entity_id == "included": + assert write_api.call_count == 1 + assert write_api.call_args == get_mock_call(body) + else: + assert not write_api.called + write_api.reset_mock() - def test_event_listener_unit_of_measurement_field(self, mock_client): - """Test the event listener for unit of measurement field.""" - config = { - "influxdb": { - "host": "host", - "username": "user", - "password": "pass", - "override_measurement": "state", + +@pytest.mark.parametrize( + "mock_client, config_ext, get_write_api, get_mock_call", + [ + ( + influxdb.DEFAULT_API_VERSION, + BASE_V1_CONFIG, + _get_write_api_mock_v1, + influxdb.DEFAULT_API_VERSION, + ), + ( + influxdb.API_VERSION_2, + BASE_V2_CONFIG, + _get_write_api_mock_v2, + influxdb.API_VERSION_2, + ), + ], + indirect=["mock_client", "get_mock_call"], +) +async def test_event_listener_whitelist_domain( + hass, mock_client, config_ext, get_write_api, get_mock_call +): + """Test the event listener against a domain whitelist.""" + config = {"include": {"domains": ["fake"]}} + config.update(config_ext) + handler_method = await _setup(hass, mock_client, config, get_write_api) + + for domain in ("fake", "another_fake"): + state = MagicMock( + state=1, + domain=domain, + entity_id=f"{domain}.something", + object_id="something", + attributes={}, + ) + event = MagicMock(data={"new_state": state}, time_fired=12345) + body = [ + { + "measurement": f"{domain}.something", + "tags": {"domain": domain, "entity_id": "something"}, + "time": 12345, + "fields": {"value": 1}, } - } - assert setup_component(self.hass, influxdb.DOMAIN, config) - self.handler_method = self.hass.bus.listen.call_args_list[0][0][1] - mock_client.return_value.write_points.reset_mock() + ] + handler_method(event) + hass.data[influxdb.DOMAIN].block_till_done() - attrs = {"unit_of_measurement": "foobars"} - state = mock.MagicMock( - state="foo", + write_api = get_write_api(mock_client) + if domain == "fake": + assert write_api.call_count == 1 + assert write_api.call_args == get_mock_call(body) + else: + assert not write_api.called + write_api.reset_mock() + + +@pytest.mark.parametrize( + "mock_client, config_ext, get_write_api, get_mock_call", + [ + ( + influxdb.DEFAULT_API_VERSION, + BASE_V1_CONFIG, + _get_write_api_mock_v1, + influxdb.DEFAULT_API_VERSION, + ), + ( + influxdb.API_VERSION_2, + BASE_V2_CONFIG, + _get_write_api_mock_v2, + influxdb.API_VERSION_2, + ), + ], + indirect=["mock_client", "get_mock_call"], +) +async def test_event_listener_whitelist_domain_and_entities( + hass, mock_client, config_ext, get_write_api, get_mock_call +): + """Test the event listener against a domain and entity whitelist.""" + config = {"include": {"domains": ["fake"], "entities": ["other.one"]}} + config.update(config_ext) + handler_method = await _setup(hass, mock_client, config, get_write_api) + + for domain in ("fake", "another_fake"): + state = MagicMock( + state=1, + domain=domain, + entity_id=f"{domain}.something", + object_id="something", + attributes={}, + ) + event = MagicMock(data={"new_state": state}, time_fired=12345) + body = [ + { + "measurement": f"{domain}.something", + "tags": {"domain": domain, "entity_id": "something"}, + "time": 12345, + "fields": {"value": 1}, + } + ] + handler_method(event) + hass.data[influxdb.DOMAIN].block_till_done() + + write_api = get_write_api(mock_client) + if domain == "fake": + assert write_api.call_count == 1 + assert write_api.call_args == get_mock_call(body) + else: + assert not write_api.called + write_api.reset_mock() + + for entity_id in ("one", "two"): + state = MagicMock( + state=1, + domain="other", + entity_id=f"other.{entity_id}", + object_id=entity_id, + attributes={}, + ) + event = MagicMock(data={"new_state": state}, time_fired=12345) + body = [ + { + "measurement": f"other.{entity_id}", + "tags": {"domain": "other", "entity_id": entity_id}, + "time": 12345, + "fields": {"value": 1}, + } + ] + handler_method(event) + hass.data[influxdb.DOMAIN].block_till_done() + + write_api = get_write_api(mock_client) + if entity_id == "one": + assert write_api.call_count == 1 + assert write_api.call_args == get_mock_call(body) + else: + assert not write_api.called + write_api.reset_mock() + + +@pytest.mark.parametrize( + "mock_client, config_ext, get_write_api, get_mock_call", + [ + ( + influxdb.DEFAULT_API_VERSION, + BASE_V1_CONFIG, + _get_write_api_mock_v1, + influxdb.DEFAULT_API_VERSION, + ), + ( + influxdb.API_VERSION_2, + BASE_V2_CONFIG, + _get_write_api_mock_v2, + influxdb.API_VERSION_2, + ), + ], + indirect=["mock_client", "get_mock_call"], +) +async def test_event_listener_invalid_type( + hass, mock_client, config_ext, get_write_api, get_mock_call +): + """Test the event listener when an attribute has an invalid type.""" + handler_method = await _setup(hass, mock_client, config_ext, get_write_api) + + # map of HA State to valid influxdb [state, value] fields + valid = { + "1": [None, 1], + "1.0": [None, 1.0], + STATE_ON: [STATE_ON, 1], + STATE_OFF: [STATE_OFF, 0], + STATE_STANDBY: [STATE_STANDBY, None], + "foo": ["foo", None], + } + for in_, out in valid.items(): + attrs = { + "unit_of_measurement": "foobars", + "longitude": "1.1", + "latitude": "2.2", + "invalid_attribute": ["value1", "value2"], + } + state = MagicMock( + state=in_, domain="fake", entity_id="fake.entity-id", object_id="entity", attributes=attrs, ) - event = mock.MagicMock(data={"new_state": state}, time_fired=12345) + event = MagicMock(data={"new_state": state}, time_fired=12345) body = [ { - "measurement": "state", + "measurement": "foobars", "tags": {"domain": "fake", "entity_id": "entity"}, "time": 12345, - "fields": {"state": "foo", "unit_of_measurement_str": "foobars"}, + "fields": { + "longitude": 1.1, + "latitude": 2.2, + "invalid_attribute_str": "['value1', 'value2']", + }, } ] - self.handler_method(event) - self.hass.data[influxdb.DOMAIN].block_till_done() - assert mock_client.return_value.write_points.call_count == 1 - assert mock_client.return_value.write_points.call_args == mock.call(body) - mock_client.return_value.write_points.reset_mock() + if out[0] is not None: + body[0]["fields"]["state"] = out[0] + if out[1] is not None: + body[0]["fields"]["value"] = out[1] - def test_event_listener_tags_attributes(self, mock_client): - """Test the event listener when some attributes should be tags.""" - config = { - "influxdb": { - "host": "host", - "username": "user", - "password": "pass", - "tags_attributes": ["friendly_fake"], - } + handler_method(event) + hass.data[influxdb.DOMAIN].block_till_done() + + write_api = get_write_api(mock_client) + assert write_api.call_count == 1 + assert write_api.call_args == get_mock_call(body) + write_api.reset_mock() + + +@pytest.mark.parametrize( + "mock_client, config_ext, get_write_api, get_mock_call", + [ + ( + influxdb.DEFAULT_API_VERSION, + BASE_V1_CONFIG, + _get_write_api_mock_v1, + influxdb.DEFAULT_API_VERSION, + ), + ( + influxdb.API_VERSION_2, + BASE_V2_CONFIG, + _get_write_api_mock_v2, + influxdb.API_VERSION_2, + ), + ], + indirect=["mock_client", "get_mock_call"], +) +async def test_event_listener_default_measurement( + hass, mock_client, config_ext, get_write_api, get_mock_call +): + """Test the event listener with a default measurement.""" + config = {"default_measurement": "state"} + config.update(config_ext) + handler_method = await _setup(hass, mock_client, config, get_write_api) + + state = MagicMock( + state=1, domain="fake", entity_id="fake.ok", object_id="ok", attributes={}, + ) + event = MagicMock(data={"new_state": state}, time_fired=12345) + body = [ + { + "measurement": "state", + "tags": {"domain": "fake", "entity_id": "ok"}, + "time": 12345, + "fields": {"value": 1}, } - assert setup_component(self.hass, influxdb.DOMAIN, config) - self.handler_method = self.hass.bus.listen.call_args_list[0][0][1] - mock_client.return_value.write_points.reset_mock() + ] + handler_method(event) + hass.data[influxdb.DOMAIN].block_till_done() - attrs = {"friendly_fake": "tag_str", "field_fake": "field_str"} - state = mock.MagicMock( + write_api = get_write_api(mock_client) + assert write_api.call_count == 1 + assert write_api.call_args == get_mock_call(body) + + +@pytest.mark.parametrize( + "mock_client, config_ext, get_write_api, get_mock_call", + [ + ( + influxdb.DEFAULT_API_VERSION, + BASE_V1_CONFIG, + _get_write_api_mock_v1, + influxdb.DEFAULT_API_VERSION, + ), + ( + influxdb.API_VERSION_2, + BASE_V2_CONFIG, + _get_write_api_mock_v2, + influxdb.API_VERSION_2, + ), + ], + indirect=["mock_client", "get_mock_call"], +) +async def test_event_listener_unit_of_measurement_field( + hass, mock_client, config_ext, get_write_api, get_mock_call +): + """Test the event listener for unit of measurement field.""" + config = {"override_measurement": "state"} + config.update(config_ext) + handler_method = await _setup(hass, mock_client, config, get_write_api) + + attrs = {"unit_of_measurement": "foobars"} + state = MagicMock( + state="foo", + domain="fake", + entity_id="fake.entity-id", + object_id="entity", + attributes=attrs, + ) + event = MagicMock(data={"new_state": state}, time_fired=12345) + body = [ + { + "measurement": "state", + "tags": {"domain": "fake", "entity_id": "entity"}, + "time": 12345, + "fields": {"state": "foo", "unit_of_measurement_str": "foobars"}, + } + ] + handler_method(event) + hass.data[influxdb.DOMAIN].block_till_done() + + write_api = get_write_api(mock_client) + assert write_api.call_count == 1 + assert write_api.call_args == get_mock_call(body) + + +@pytest.mark.parametrize( + "mock_client, config_ext, get_write_api, get_mock_call", + [ + ( + influxdb.DEFAULT_API_VERSION, + BASE_V1_CONFIG, + _get_write_api_mock_v1, + influxdb.DEFAULT_API_VERSION, + ), + ( + influxdb.API_VERSION_2, + BASE_V2_CONFIG, + _get_write_api_mock_v2, + influxdb.API_VERSION_2, + ), + ], + indirect=["mock_client", "get_mock_call"], +) +async def test_event_listener_tags_attributes( + hass, mock_client, config_ext, get_write_api, get_mock_call +): + """Test the event listener when some attributes should be tags.""" + config = {"tags_attributes": ["friendly_fake"]} + config.update(config_ext) + handler_method = await _setup(hass, mock_client, config, get_write_api) + + attrs = {"friendly_fake": "tag_str", "field_fake": "field_str"} + state = MagicMock( + state=1, + domain="fake", + entity_id="fake.something", + object_id="something", + attributes=attrs, + ) + event = MagicMock(data={"new_state": state}, time_fired=12345) + body = [ + { + "measurement": "fake.something", + "tags": { + "domain": "fake", + "entity_id": "something", + "friendly_fake": "tag_str", + }, + "time": 12345, + "fields": {"value": 1, "field_fake_str": "field_str"}, + } + ] + handler_method(event) + hass.data[influxdb.DOMAIN].block_till_done() + + write_api = get_write_api(mock_client) + assert write_api.call_count == 1 + assert write_api.call_args == get_mock_call(body) + + +@pytest.mark.parametrize( + "mock_client, config_ext, get_write_api, get_mock_call", + [ + ( + influxdb.DEFAULT_API_VERSION, + BASE_V1_CONFIG, + _get_write_api_mock_v1, + influxdb.DEFAULT_API_VERSION, + ), + ( + influxdb.API_VERSION_2, + BASE_V2_CONFIG, + _get_write_api_mock_v2, + influxdb.API_VERSION_2, + ), + ], + indirect=["mock_client", "get_mock_call"], +) +async def test_event_listener_component_override_measurement( + hass, mock_client, config_ext, get_write_api, get_mock_call +): + """Test the event listener with overridden measurements.""" + config = { + "component_config": { + "sensor.fake_humidity": {"override_measurement": "humidity"} + }, + "component_config_glob": { + "binary_sensor.*motion": {"override_measurement": "motion"} + }, + "component_config_domain": {"climate": {"override_measurement": "hvac"}}, + } + config.update(config_ext) + handler_method = await _setup(hass, mock_client, config, get_write_api) + + test_components = [ + {"domain": "sensor", "id": "fake_humidity", "res": "humidity"}, + {"domain": "binary_sensor", "id": "fake_motion", "res": "motion"}, + {"domain": "climate", "id": "fake_thermostat", "res": "hvac"}, + {"domain": "other", "id": "just_fake", "res": "other.just_fake"}, + ] + for comp in test_components: + state = MagicMock( state=1, - domain="fake", - entity_id="fake.something", - object_id="something", - attributes=attrs, + domain=comp["domain"], + entity_id=f"{comp['domain']}.{comp['id']}", + object_id=comp["id"], + attributes={}, ) - event = mock.MagicMock(data={"new_state": state}, time_fired=12345) + event = MagicMock(data={"new_state": state}, time_fired=12345) body = [ { - "measurement": "fake.something", - "tags": { - "domain": "fake", - "entity_id": "something", - "friendly_fake": "tag_str", - }, + "measurement": comp["res"], + "tags": {"domain": comp["domain"], "entity_id": comp["id"]}, "time": 12345, - "fields": {"value": 1, "field_fake_str": "field_str"}, + "fields": {"value": 1}, } ] - self.handler_method(event) - self.hass.data[influxdb.DOMAIN].block_till_done() - assert mock_client.return_value.write_points.call_count == 1 - assert mock_client.return_value.write_points.call_args == mock.call(body) - mock_client.return_value.write_points.reset_mock() + handler_method(event) + hass.data[influxdb.DOMAIN].block_till_done() - def test_event_listener_component_override_measurement(self, mock_client): - """Test the event listener with overridden measurements.""" - config = { - "influxdb": { - "host": "host", - "username": "user", - "password": "pass", - "component_config": { - "sensor.fake_humidity": {"override_measurement": "humidity"} - }, - "component_config_glob": { - "binary_sensor.*motion": {"override_measurement": "motion"} - }, - "component_config_domain": { - "climate": {"override_measurement": "hvac"} - }, - } - } - assert setup_component(self.hass, influxdb.DOMAIN, config) - self.handler_method = self.hass.bus.listen.call_args_list[0][0][1] - mock_client.return_value.write_points.reset_mock() + write_api = get_write_api(mock_client) + assert write_api.call_count == 1 + assert write_api.call_args == get_mock_call(body) + write_api.reset_mock() - test_components = [ - {"domain": "sensor", "id": "fake_humidity", "res": "humidity"}, - {"domain": "binary_sensor", "id": "fake_motion", "res": "motion"}, - {"domain": "climate", "id": "fake_thermostat", "res": "hvac"}, - {"domain": "other", "id": "just_fake", "res": "other.just_fake"}, - ] - for comp in test_components: - state = mock.MagicMock( - state=1, - domain=comp["domain"], - entity_id=f"{comp['domain']}.{comp['id']}", - object_id=comp["id"], - attributes={}, - ) - event = mock.MagicMock(data={"new_state": state}, time_fired=12345) - body = [ - { - "measurement": comp["res"], - "tags": {"domain": comp["domain"], "entity_id": comp["id"]}, - "time": 12345, - "fields": {"value": 1}, - } - ] - self.handler_method(event) - self.hass.data[influxdb.DOMAIN].block_till_done() - assert mock_client.return_value.write_points.call_count == 1 - assert mock_client.return_value.write_points.call_args == mock.call(body) - mock_client.return_value.write_points.reset_mock() - def test_scheduled_write(self, mock_client): - """Test the event listener to retry after write failures.""" - config = { - "influxdb": { - "host": "host", - "username": "user", - "password": "pass", - "max_retries": 1, - } - } - assert setup_component(self.hass, influxdb.DOMAIN, config) - self.handler_method = self.hass.bus.listen.call_args_list[0][0][1] - mock_client.return_value.write_points.reset_mock() +@pytest.mark.parametrize( + "mock_client, config_ext, get_write_api, get_mock_call", + [ + ( + influxdb.DEFAULT_API_VERSION, + BASE_V1_CONFIG, + _get_write_api_mock_v1, + influxdb.DEFAULT_API_VERSION, + ), + ( + influxdb.API_VERSION_2, + BASE_V2_CONFIG, + _get_write_api_mock_v2, + influxdb.API_VERSION_2, + ), + ], + indirect=["mock_client", "get_mock_call"], +) +async def test_event_listener_scheduled_write( + hass, mock_client, config_ext, get_write_api, get_mock_call +): + """Test the event listener retries after a write failure.""" + config = {"max_retries": 1} + config.update(config_ext) + handler_method = await _setup(hass, mock_client, config, get_write_api) - state = mock.MagicMock( - state=1, - domain="fake", - entity_id="entity.id", - object_id="entity", - attributes={}, - ) - event = mock.MagicMock(data={"new_state": state}, time_fired=12345) - mock_client.return_value.write_points.side_effect = IOError("foo") + state = MagicMock( + state=1, + domain="fake", + entity_id="entity.id", + object_id="entity", + attributes={}, + ) + event = MagicMock(data={"new_state": state}, time_fired=12345) + write_api = get_write_api(mock_client) + write_api.side_effect = IOError("foo") - # Write fails - with mock.patch.object(influxdb.time, "sleep") as mock_sleep: - self.handler_method(event) - self.hass.data[influxdb.DOMAIN].block_till_done() - assert mock_sleep.called - json_data = mock_client.return_value.write_points.call_args[0][0] - assert mock_client.return_value.write_points.call_count == 2 - mock_client.return_value.write_points.assert_called_with(json_data) + # Write fails + with patch.object(influxdb.time, "sleep") as mock_sleep: + handler_method(event) + hass.data[influxdb.DOMAIN].block_till_done() + assert mock_sleep.called + assert write_api.call_count == 2 - # Write works again - mock_client.return_value.write_points.side_effect = None - with mock.patch.object(influxdb.time, "sleep") as mock_sleep: - self.handler_method(event) - self.hass.data[influxdb.DOMAIN].block_till_done() - assert not mock_sleep.called - assert mock_client.return_value.write_points.call_count == 3 + # Write works again + write_api.side_effect = None + with patch.object(influxdb.time, "sleep") as mock_sleep: + handler_method(event) + hass.data[influxdb.DOMAIN].block_till_done() + assert not mock_sleep.called + assert write_api.call_count == 3 - def test_queue_backlog_full(self, mock_client): - """Test the event listener to drop old events.""" - self._setup(mock_client) - state = mock.MagicMock( - state=1, - domain="fake", - entity_id="entity.id", - object_id="entity", - attributes={}, - ) - event = mock.MagicMock(data={"new_state": state}, time_fired=12345) +@pytest.mark.parametrize( + "mock_client, config_ext, get_write_api, get_mock_call", + [ + ( + influxdb.DEFAULT_API_VERSION, + BASE_V1_CONFIG, + _get_write_api_mock_v1, + influxdb.DEFAULT_API_VERSION, + ), + ( + influxdb.API_VERSION_2, + BASE_V2_CONFIG, + _get_write_api_mock_v2, + influxdb.API_VERSION_2, + ), + ], + indirect=["mock_client", "get_mock_call"], +) +async def test_event_listener_backlog_full( + hass, mock_client, config_ext, get_write_api, get_mock_call +): + """Test the event listener drops old events when backlog gets full.""" + handler_method = await _setup(hass, mock_client, config_ext, get_write_api) - monotonic_time = 0 + state = MagicMock( + state=1, + domain="fake", + entity_id="entity.id", + object_id="entity", + attributes={}, + ) + event = MagicMock(data={"new_state": state}, time_fired=12345) - def fast_monotonic(): - """Monotonic time that ticks fast enough to cause a timeout.""" - nonlocal monotonic_time - monotonic_time += 60 - return monotonic_time + monotonic_time = 0 - with mock.patch( - "homeassistant.components.influxdb.time.monotonic", new=fast_monotonic - ): - self.handler_method(event) - self.hass.data[influxdb.DOMAIN].block_till_done() + def fast_monotonic(): + """Monotonic time that ticks fast enough to cause a timeout.""" + nonlocal monotonic_time + monotonic_time += 60 + return monotonic_time - assert mock_client.return_value.write_points.call_count == 0 + with patch("homeassistant.components.influxdb.time.monotonic", new=fast_monotonic): + handler_method(event) + hass.data[influxdb.DOMAIN].block_till_done() - mock_client.return_value.write_points.reset_mock() + assert get_write_api(mock_client).call_count == 0 diff --git a/tests/components/ipp/test_config_flow.py b/tests/components/ipp/test_config_flow.py index a468115f239..a3f253d3c81 100644 --- a/tests/components/ipp/test_config_flow.py +++ b/tests/components/ipp/test_config_flow.py @@ -264,10 +264,10 @@ async def test_zeroconf_with_uuid_device_exists_abort( assert result["reason"] == "already_configured" -async def test_zeroconf_empty_unique_id_required_abort( +async def test_zeroconf_empty_unique_id( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: - """Test we abort zeroconf flow if printer lacks (empty) unique identification.""" + """Test zeroconf flow if printer lacks (empty) unique identification.""" mock_connection(aioclient_mock, no_unique_id=True) discovery_info = { @@ -278,14 +278,13 @@ async def test_zeroconf_empty_unique_id_required_abort( DOMAIN, context={"source": SOURCE_ZEROCONF}, data=discovery_info, ) - assert result["type"] == RESULT_TYPE_ABORT - assert result["reason"] == "unique_id_required" + assert result["type"] == RESULT_TYPE_FORM -async def test_zeroconf_unique_id_required_abort( +async def test_zeroconf_no_unique_id( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: - """Test we abort zeroconf flow if printer lacks unique identification.""" + """Test zeroconf flow if printer lacks unique identification.""" mock_connection(aioclient_mock, no_unique_id=True) discovery_info = MOCK_ZEROCONF_IPP_SERVICE_INFO.copy() @@ -293,8 +292,7 @@ async def test_zeroconf_unique_id_required_abort( DOMAIN, context={"source": SOURCE_ZEROCONF}, data=discovery_info, ) - assert result["type"] == RESULT_TYPE_ABORT - assert result["reason"] == "unique_id_required" + assert result["type"] == RESULT_TYPE_FORM async def test_full_user_flow_implementation( diff --git a/tests/components/kira/test_init.py b/tests/components/kira/test_init.py index 8656ac23264..b57d8c97617 100644 --- a/tests/components/kira/test_init.py +++ b/tests/components/kira/test_init.py @@ -45,8 +45,9 @@ class TestKiraSetup(unittest.TestCase): self._module_patcher.start() self.work_dir = tempfile.mkdtemp() + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() self._module_patcher.stop() diff --git a/tests/components/kira/test_remote.py b/tests/components/kira/test_remote.py index b1ac7ea12fc..c946823474c 100644 --- a/tests/components/kira/test_remote.py +++ b/tests/components/kira/test_remote.py @@ -30,10 +30,7 @@ class TestKiraSensor(unittest.TestCase): self.mock_kira = MagicMock() self.hass.data[kira.DOMAIN] = {kira.CONF_REMOTE: {}} self.hass.data[kira.DOMAIN][kira.CONF_REMOTE]["kira"] = self.mock_kira - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def test_service_call(self): """Test Kira's ability to send commands.""" diff --git a/tests/components/kira/test_sensor.py b/tests/components/kira/test_sensor.py index 2fae36dc670..21572a8735b 100644 --- a/tests/components/kira/test_sensor.py +++ b/tests/components/kira/test_sensor.py @@ -28,10 +28,7 @@ class TestKiraSensor(unittest.TestCase): mock_kira = MagicMock() self.hass.data[kira.DOMAIN] = {kira.CONF_SENSOR: {}} self.hass.data[kira.DOMAIN][kira.CONF_SENSOR]["kira"] = mock_kira - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) # pylint: disable=protected-access def test_kira_sensor_callback(self): diff --git a/tests/components/konnected/test_config_flow.py b/tests/components/konnected/test_config_flow.py index a0d870b37ff..dbca89efe30 100644 --- a/tests/components/konnected/test_config_flow.py +++ b/tests/components/konnected/test_config_flow.py @@ -69,7 +69,9 @@ async def test_pro_flow_works(hass, mock_panel): assert result["type"] == "form" assert result["step_id"] == "user" + # pro uses chipId instead of MAC as unique id mock_panel.get_status.return_value = { + "chipId": "1234567", "mac": "11:22:33:44:55:66", "model": "Konnected Pro", } @@ -80,7 +82,7 @@ async def test_pro_flow_works(hass, mock_panel): assert result["step_id"] == "confirm" assert result["description_placeholders"] == { "model": "Konnected Alarm Panel Pro", - "id": "112233445566", + "id": "1234567", "host": "1.2.3.4", "port": 1234, } @@ -192,8 +194,9 @@ async def test_import_no_host_user_finish(hass, mock_panel): async def test_import_ssdp_host_user_finish(hass, mock_panel): - """Test importing a panel with no host info which ssdp discovers.""" + """Test importing a pro panel with no host info which ssdp discovers.""" mock_panel.get_status.return_value = { + "chipId": "somechipid", "mac": "11:22:33:44:55:66", "model": "Konnected Pro", } @@ -224,12 +227,12 @@ async def test_import_ssdp_host_user_finish(hass, mock_panel): "out1": "Disabled", }, }, - "id": "112233445566", + "id": "somechipid", }, ) assert result["type"] == "form" assert result["step_id"] == "import_confirm" - assert result["description_placeholders"]["id"] == "112233445566" + assert result["description_placeholders"]["id"] == "somechipid" # discover the panel via ssdp ssdp_result = await hass.config_entries.flow.async_init( @@ -251,7 +254,7 @@ async def test_import_ssdp_host_user_finish(hass, mock_panel): assert result["step_id"] == "confirm" assert result["description_placeholders"] == { "model": "Konnected Alarm Panel Pro", - "id": "112233445566", + "id": "somechipid", "host": "0.0.0.0", "port": 1234, } diff --git a/tests/components/light/test_init.py b/tests/components/light/test_init.py index 2fa22cd81dd..b46be5c926c 100644 --- a/tests/components/light/test_init.py +++ b/tests/components/light/test_init.py @@ -32,9 +32,9 @@ class TestLight(unittest.TestCase): def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() + self.addCleanup(self.tear_down_cleanup) - # pylint: disable=invalid-name - def tearDown(self): + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() diff --git a/tests/components/logbook/test_init.py b/tests/components/logbook/test_init.py index 660d3182f42..03ef09b438d 100644 --- a/tests/components/logbook/test_init.py +++ b/tests/components/logbook/test_init.py @@ -1,7 +1,9 @@ """The tests for the logbook component.""" # pylint: disable=protected-access,invalid-name +import collections from datetime import datetime, timedelta from functools import partial +import json import logging import unittest @@ -10,24 +12,32 @@ import voluptuous as vol from homeassistant.components import logbook, recorder, sun from homeassistant.components.alexa.smart_home import EVENT_ALEXA_SMART_HOME +from homeassistant.components.automation import EVENT_AUTOMATION_TRIGGERED +from homeassistant.components.recorder.models import process_timestamp_to_utc_isoformat +from homeassistant.components.script import EVENT_SCRIPT_STARTED from homeassistant.const import ( ATTR_ENTITY_ID, - ATTR_HIDDEN, ATTR_NAME, + CONF_DOMAINS, + CONF_ENTITIES, EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, - EVENT_SCRIPT_STARTED, EVENT_STATE_CHANGED, STATE_NOT_HOME, STATE_OFF, STATE_ON, ) import homeassistant.core as ha +from homeassistant.helpers.entityfilter import ( + CONF_ENTITY_GLOBS, + convert_include_exclude_filter, +) +from homeassistant.helpers.json import JSONEncoder from homeassistant.setup import async_setup_component, setup_component import homeassistant.util.dt as dt_util -from tests.async_mock import patch -from tests.common import get_test_home_assistant, init_recorder_component +from tests.async_mock import Mock, patch +from tests.common import get_test_home_assistant, init_recorder_component, mock_platform from tests.components.recorder.common import trigger_db_commit _LOGGER = logging.getLogger(__name__) @@ -44,10 +54,7 @@ class TestComponentLogbook(unittest.TestCase): init_recorder_component(self.hass) # Force an in memory DB with patch("homeassistant.components.http.start_http_server_and_save_config"): assert setup_component(self.hass, logbook.DOMAIN, self.EMPTY_CONFIG) - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def test_service_call_create_logbook_entry(self): """Test if service call create log book entry.""" @@ -123,12 +130,15 @@ class TestComponentLogbook(unittest.TestCase): pointA = dt_util.utcnow().replace(minute=2) pointB = pointA.replace(minute=5) pointC = pointA + timedelta(minutes=logbook.GROUP_BY_MINUTES) + entity_attr_cache = logbook.EntityAttributeCache(self.hass) eventA = self.create_state_changed_event(pointA, entity_id, 10) eventB = self.create_state_changed_event(pointB, entity_id, 20) eventC = self.create_state_changed_event(pointC, entity_id, 30) - entries = list(logbook.humanify(self.hass, (eventA, eventB, eventC))) + entries = list( + logbook.humanify(self.hass, (eventA, eventB, eventC), entity_attr_cache) + ) assert len(entries) == 2 self.assert_entry( @@ -143,12 +153,17 @@ class TestComponentLogbook(unittest.TestCase): """Test remove continuous sensor events from logbook.""" entity_id = "sensor.bla" pointA = dt_util.utcnow() + entity_attr_cache = logbook.EntityAttributeCache(self.hass) attributes = {"unit_of_measurement": "foo"} eventA = self.create_state_changed_event(pointA, entity_id, 10, attributes) - entries = list(logbook.humanify(self.hass, (eventA,))) - - assert len(entries) == 0 + entities_filter = convert_include_exclude_filter( + logbook.CONFIG_SCHEMA({logbook.DOMAIN: {}})[logbook.DOMAIN] + ) + assert ( + logbook._keep_event(self.hass, eventA, entities_filter, entity_attr_cache) + is False + ) def test_exclude_new_entities(self): """Test if events are excluded on first update.""" @@ -156,18 +171,30 @@ class TestComponentLogbook(unittest.TestCase): entity_id2 = "sensor.blu" pointA = dt_util.utcnow() pointB = pointA + timedelta(minutes=logbook.GROUP_BY_MINUTES) + entity_attr_cache = logbook.EntityAttributeCache(self.hass) - eventA = self.create_state_changed_event(pointA, entity_id, 10) + state_on = ha.State( + entity_id, "on", {"brightness": 200}, pointA, pointA + ).as_dict() + + eventA = self.create_state_changed_event_from_old_new( + entity_id, pointA, None, state_on + ) eventB = self.create_state_changed_event(pointB, entity_id2, 20) - eventA.data["old_state"] = None - entities_filter = logbook._generate_filter_from_config({}) + entities_filter = convert_include_exclude_filter( + logbook.CONFIG_SCHEMA({logbook.DOMAIN: {}})[logbook.DOMAIN] + ) events = [ e - for e in (ha.Event(EVENT_HOMEASSISTANT_STOP), eventA, eventB) - if logbook._keep_event(self.hass, e, entities_filter) + for e in ( + MockLazyEventPartialState(EVENT_HOMEASSISTANT_STOP), + eventA, + eventB, + ) + if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache) ] - entries = list(logbook.humanify(self.hass, events)) + entries = list(logbook.humanify(self.hass, events, entity_attr_cache)) assert len(entries) == 2 self.assert_entry( @@ -183,46 +210,29 @@ class TestComponentLogbook(unittest.TestCase): entity_id2 = "sensor.blu" pointA = dt_util.utcnow() pointB = pointA + timedelta(minutes=logbook.GROUP_BY_MINUTES) + entity_attr_cache = logbook.EntityAttributeCache(self.hass) - eventA = self.create_state_changed_event(pointA, entity_id, 10) - eventB = self.create_state_changed_event(pointB, entity_id2, 20) - eventA.data["new_state"] = None - - entities_filter = logbook._generate_filter_from_config({}) - events = [ - e - for e in (ha.Event(EVENT_HOMEASSISTANT_STOP), eventA, eventB) - if logbook._keep_event(self.hass, e, entities_filter) - ] - entries = list(logbook.humanify(self.hass, events)) - - assert len(entries) == 2 - self.assert_entry( - entries[0], name="Home Assistant", message="stopped", domain=ha.DOMAIN - ) - self.assert_entry( - entries[1], pointB, "blu", domain="sensor", entity_id=entity_id2 - ) - - def test_exclude_events_hidden(self): - """Test if events are excluded if entity is hidden.""" - entity_id = "sensor.bla" - entity_id2 = "sensor.blu" - pointA = dt_util.utcnow() - pointB = pointA + timedelta(minutes=logbook.GROUP_BY_MINUTES) - - eventA = self.create_state_changed_event( - pointA, entity_id, 10, {ATTR_HIDDEN: "true"} + state_on = ha.State( + entity_id, "on", {"brightness": 200}, pointA, pointA + ).as_dict() + eventA = self.create_state_changed_event_from_old_new( + None, pointA, state_on, None, ) eventB = self.create_state_changed_event(pointB, entity_id2, 20) - entities_filter = logbook._generate_filter_from_config({}) + entities_filter = convert_include_exclude_filter( + logbook.CONFIG_SCHEMA({logbook.DOMAIN: {}})[logbook.DOMAIN] + ) events = [ e - for e in (ha.Event(EVENT_HOMEASSISTANT_STOP), eventA, eventB) - if logbook._keep_event(self.hass, e, entities_filter) + for e in ( + MockLazyEventPartialState(EVENT_HOMEASSISTANT_STOP), + eventA, + eventB, + ) + if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache) ] - entries = list(logbook.humanify(self.hass, events)) + entries = list(logbook.humanify(self.hass, events, entity_attr_cache)) assert len(entries) == 2 self.assert_entry( @@ -238,6 +248,7 @@ class TestComponentLogbook(unittest.TestCase): entity_id2 = "sensor.blu" pointA = dt_util.utcnow() pointB = pointA + timedelta(minutes=logbook.GROUP_BY_MINUTES) + entity_attr_cache = logbook.EntityAttributeCache(self.hass) eventA = self.create_state_changed_event(pointA, entity_id, 10) eventB = self.create_state_changed_event(pointB, entity_id2, 20) @@ -245,18 +256,20 @@ class TestComponentLogbook(unittest.TestCase): config = logbook.CONFIG_SCHEMA( { ha.DOMAIN: {}, - logbook.DOMAIN: { - logbook.CONF_EXCLUDE: {logbook.CONF_ENTITIES: [entity_id]} - }, + logbook.DOMAIN: {logbook.CONF_EXCLUDE: {CONF_ENTITIES: [entity_id]}}, } ) - entities_filter = logbook._generate_filter_from_config(config[logbook.DOMAIN]) + entities_filter = convert_include_exclude_filter(config[logbook.DOMAIN]) events = [ e - for e in (ha.Event(EVENT_HOMEASSISTANT_STOP), eventA, eventB) - if logbook._keep_event(self.hass, e, entities_filter) + for e in ( + MockLazyEventPartialState(EVENT_HOMEASSISTANT_STOP), + eventA, + eventB, + ) + if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache) ] - entries = list(logbook.humanify(self.hass, events)) + entries = list(logbook.humanify(self.hass, events, entity_attr_cache)) assert len(entries) == 2 self.assert_entry( @@ -272,6 +285,7 @@ class TestComponentLogbook(unittest.TestCase): entity_id2 = "sensor.blu" pointA = dt_util.utcnow() pointB = pointA + timedelta(minutes=logbook.GROUP_BY_MINUTES) + entity_attr_cache = logbook.EntityAttributeCache(self.hass) eventA = self.create_state_changed_event(pointA, entity_id, 10) eventB = self.create_state_changed_event(pointB, entity_id2, 20) @@ -280,22 +294,22 @@ class TestComponentLogbook(unittest.TestCase): { ha.DOMAIN: {}, logbook.DOMAIN: { - logbook.CONF_EXCLUDE: {logbook.CONF_DOMAINS: ["switch", "alexa"]} + logbook.CONF_EXCLUDE: {CONF_DOMAINS: ["switch", "alexa"]} }, } ) - entities_filter = logbook._generate_filter_from_config(config[logbook.DOMAIN]) + entities_filter = convert_include_exclude_filter(config[logbook.DOMAIN]) events = [ e for e in ( - ha.Event(EVENT_HOMEASSISTANT_START), - ha.Event(EVENT_ALEXA_SMART_HOME), + MockLazyEventPartialState(EVENT_HOMEASSISTANT_START), + MockLazyEventPartialState(EVENT_ALEXA_SMART_HOME), eventA, eventB, ) - if logbook._keep_event(self.hass, e, entities_filter) + if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache) ] - entries = list(logbook.humanify(self.hass, events)) + entries = list(logbook.humanify(self.hass, events, entity_attr_cache)) assert len(entries) == 2 self.assert_entry( @@ -305,44 +319,52 @@ class TestComponentLogbook(unittest.TestCase): entries[1], pointB, "blu", domain="sensor", entity_id=entity_id2 ) - def test_exclude_script_events(self): - """Test if script start can be excluded by entity_id.""" - name = "My Script Rule" - domain = "script" - entity_id = "script.my_script" - entity_id2 = "script.my_script_2" + def test_exclude_events_domain_glob(self): + """Test if events are filtered if domain or glob is excluded in config.""" + entity_id = "switch.bla" entity_id2 = "sensor.blu" + entity_id3 = "sensor.excluded" + pointA = dt_util.utcnow() + pointB = pointA + timedelta(minutes=logbook.GROUP_BY_MINUTES) + pointC = pointB + timedelta(minutes=logbook.GROUP_BY_MINUTES) + entity_attr_cache = logbook.EntityAttributeCache(self.hass) - eventA = ha.Event( - logbook.EVENT_SCRIPT_STARTED, - {logbook.ATTR_NAME: name, logbook.ATTR_ENTITY_ID: entity_id}, - ) - eventB = ha.Event( - logbook.EVENT_SCRIPT_STARTED, - {logbook.ATTR_NAME: name, logbook.ATTR_ENTITY_ID: entity_id2}, - ) + eventA = self.create_state_changed_event(pointA, entity_id, 10) + eventB = self.create_state_changed_event(pointB, entity_id2, 20) + eventC = self.create_state_changed_event(pointC, entity_id3, 30) config = logbook.CONFIG_SCHEMA( { ha.DOMAIN: {}, logbook.DOMAIN: { - logbook.CONF_EXCLUDE: {logbook.CONF_ENTITIES: [entity_id]} + logbook.CONF_EXCLUDE: { + CONF_DOMAINS: ["switch", "alexa"], + CONF_ENTITY_GLOBS: "*.excluded", + } }, } ) - entities_filter = logbook._generate_filter_from_config(config[logbook.DOMAIN]) + entities_filter = convert_include_exclude_filter(config[logbook.DOMAIN]) events = [ e - for e in (ha.Event(EVENT_HOMEASSISTANT_STOP), eventA, eventB) - if logbook._keep_event(self.hass, e, entities_filter) + for e in ( + MockLazyEventPartialState(EVENT_HOMEASSISTANT_START), + MockLazyEventPartialState(EVENT_ALEXA_SMART_HOME), + eventA, + eventB, + eventC, + ) + if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache) ] - entries = list(logbook.humanify(self.hass, events)) + entries = list(logbook.humanify(self.hass, events, entity_attr_cache)) assert len(entries) == 2 self.assert_entry( - entries[0], name="Home Assistant", message="stopped", domain=ha.DOMAIN + entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN + ) + self.assert_entry( + entries[1], pointB, "blu", domain="sensor", entity_id=entity_id2 ) - self.assert_entry(entries[1], name=name, domain=domain, entity_id=entity_id2) def test_include_events_entity(self): """Test if events are filtered if entity is included in config.""" @@ -350,6 +372,7 @@ class TestComponentLogbook(unittest.TestCase): entity_id2 = "sensor.blu" pointA = dt_util.utcnow() pointB = pointA + timedelta(minutes=logbook.GROUP_BY_MINUTES) + entity_attr_cache = logbook.EntityAttributeCache(self.hass) eventA = self.create_state_changed_event(pointA, entity_id, 10) eventB = self.create_state_changed_event(pointB, entity_id2, 20) @@ -358,17 +381,24 @@ class TestComponentLogbook(unittest.TestCase): { ha.DOMAIN: {}, logbook.DOMAIN: { - logbook.CONF_INCLUDE: {logbook.CONF_ENTITIES: [entity_id2]} + logbook.CONF_INCLUDE: { + CONF_DOMAINS: ["homeassistant"], + CONF_ENTITIES: [entity_id2], + } }, } ) - entities_filter = logbook._generate_filter_from_config(config[logbook.DOMAIN]) + entities_filter = convert_include_exclude_filter(config[logbook.DOMAIN]) events = [ e - for e in (ha.Event(EVENT_HOMEASSISTANT_STOP), eventA, eventB) - if logbook._keep_event(self.hass, e, entities_filter) + for e in ( + MockLazyEventPartialState(EVENT_HOMEASSISTANT_STOP), + eventA, + eventB, + ) + if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache) ] - entries = list(logbook.humanify(self.hass, events)) + entries = list(logbook.humanify(self.hass, events, entity_attr_cache)) assert len(entries) == 2 self.assert_entry( @@ -385,8 +415,9 @@ class TestComponentLogbook(unittest.TestCase): entity_id2 = "sensor.blu" pointA = dt_util.utcnow() pointB = pointA + timedelta(minutes=logbook.GROUP_BY_MINUTES) + entity_attr_cache = logbook.EntityAttributeCache(self.hass) - event_alexa = ha.Event( + event_alexa = MockLazyEventPartialState( EVENT_ALEXA_SMART_HOME, {"request": {"namespace": "Alexa.Discovery", "name": "Discover"}}, ) @@ -398,17 +429,24 @@ class TestComponentLogbook(unittest.TestCase): { ha.DOMAIN: {}, logbook.DOMAIN: { - logbook.CONF_INCLUDE: {logbook.CONF_DOMAINS: ["sensor", "alexa"]} + logbook.CONF_INCLUDE: { + CONF_DOMAINS: ["homeassistant", "sensor", "alexa"] + } }, } ) - entities_filter = logbook._generate_filter_from_config(config[logbook.DOMAIN]) + entities_filter = convert_include_exclude_filter(config[logbook.DOMAIN]) events = [ e - for e in (ha.Event(EVENT_HOMEASSISTANT_START), event_alexa, eventA, eventB,) - if logbook._keep_event(self.hass, e, entities_filter) + for e in ( + MockLazyEventPartialState(EVENT_HOMEASSISTANT_START), + event_alexa, + eventA, + eventB, + ) + if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache) ] - entries = list(logbook.humanify(self.hass, events)) + entries = list(logbook.humanify(self.hass, events, entity_attr_cache)) assert len(entries) == 3 self.assert_entry( @@ -419,6 +457,63 @@ class TestComponentLogbook(unittest.TestCase): entries[2], pointB, "blu", domain="sensor", entity_id=entity_id2 ) + def test_include_events_domain_glob(self): + """Test if events are filtered if domain or glob is included in config.""" + assert setup_component(self.hass, "alexa", {}) + entity_id = "switch.bla" + entity_id2 = "sensor.blu" + entity_id3 = "switch.included" + pointA = dt_util.utcnow() + pointB = pointA + timedelta(minutes=logbook.GROUP_BY_MINUTES) + pointC = pointB + timedelta(minutes=logbook.GROUP_BY_MINUTES) + entity_attr_cache = logbook.EntityAttributeCache(self.hass) + + event_alexa = MockLazyEventPartialState( + EVENT_ALEXA_SMART_HOME, + {"request": {"namespace": "Alexa.Discovery", "name": "Discover"}}, + ) + + eventA = self.create_state_changed_event(pointA, entity_id, 10) + eventB = self.create_state_changed_event(pointB, entity_id2, 20) + eventC = self.create_state_changed_event(pointC, entity_id3, 30) + + config = logbook.CONFIG_SCHEMA( + { + ha.DOMAIN: {}, + logbook.DOMAIN: { + logbook.CONF_INCLUDE: { + CONF_DOMAINS: ["homeassistant", "sensor", "alexa"], + CONF_ENTITY_GLOBS: ["*.included"], + } + }, + } + ) + entities_filter = convert_include_exclude_filter(config[logbook.DOMAIN]) + events = [ + e + for e in ( + MockLazyEventPartialState(EVENT_HOMEASSISTANT_START), + event_alexa, + eventA, + eventB, + eventC, + ) + if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache) + ] + entries = list(logbook.humanify(self.hass, events, entity_attr_cache)) + + assert len(entries) == 4 + self.assert_entry( + entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN + ) + self.assert_entry(entries[1], name="Amazon Alexa", domain="alexa") + self.assert_entry( + entries[2], pointB, "blu", domain="sensor", entity_id=entity_id2 + ) + self.assert_entry( + entries[3], pointC, "included", domain="switch", entity_id=entity_id3 + ) + def test_include_exclude_events(self): """Test if events are filtered if include and exclude is configured.""" entity_id = "switch.bla" @@ -426,6 +521,7 @@ class TestComponentLogbook(unittest.TestCase): entity_id3 = "sensor.bli" pointA = dt_util.utcnow() pointB = pointA + timedelta(minutes=logbook.GROUP_BY_MINUTES) + entity_attr_cache = logbook.EntityAttributeCache(self.hass) eventA1 = self.create_state_changed_event(pointA, entity_id, 10) eventA2 = self.create_state_changed_event(pointA, entity_id2, 10) @@ -438,30 +534,30 @@ class TestComponentLogbook(unittest.TestCase): ha.DOMAIN: {}, logbook.DOMAIN: { logbook.CONF_INCLUDE: { - logbook.CONF_DOMAINS: ["sensor"], - logbook.CONF_ENTITIES: ["switch.bla"], + CONF_DOMAINS: ["sensor", "homeassistant"], + CONF_ENTITIES: ["switch.bla"], }, logbook.CONF_EXCLUDE: { - logbook.CONF_DOMAINS: ["switch"], - logbook.CONF_ENTITIES: ["sensor.bli"], + CONF_DOMAINS: ["switch"], + CONF_ENTITIES: ["sensor.bli"], }, }, } ) - entities_filter = logbook._generate_filter_from_config(config[logbook.DOMAIN]) + entities_filter = convert_include_exclude_filter(config[logbook.DOMAIN]) events = [ e for e in ( - ha.Event(EVENT_HOMEASSISTANT_START), + MockLazyEventPartialState(EVENT_HOMEASSISTANT_START), eventA1, eventA2, eventA3, eventB1, eventB2, ) - if logbook._keep_event(self.hass, e, entities_filter) + if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache) ] - entries = list(logbook.humanify(self.hass, events)) + entries = list(logbook.humanify(self.hass, events, entity_attr_cache)) assert len(entries) == 5 self.assert_entry( @@ -480,26 +576,81 @@ class TestComponentLogbook(unittest.TestCase): entries[4], pointB, "blu", domain="sensor", entity_id=entity_id2 ) - def test_exclude_auto_groups(self): - """Test if events of automatically generated groups are filtered.""" + def test_include_exclude_events_with_glob_filters(self): + """Test if events are filtered if include and exclude is configured.""" entity_id = "switch.bla" - entity_id2 = "group.switches" + entity_id2 = "sensor.blu" + entity_id3 = "sensor.bli" + entity_id4 = "light.included" + entity_id5 = "switch.included" + entity_id6 = "sensor.excluded" pointA = dt_util.utcnow() + pointB = pointA + timedelta(minutes=logbook.GROUP_BY_MINUTES) + pointC = pointB + timedelta(minutes=logbook.GROUP_BY_MINUTES) + entity_attr_cache = logbook.EntityAttributeCache(self.hass) - eventA = self.create_state_changed_event(pointA, entity_id, 10) - eventB = self.create_state_changed_event(pointA, entity_id2, 20, {"auto": True}) + eventA1 = self.create_state_changed_event(pointA, entity_id, 10) + eventA2 = self.create_state_changed_event(pointA, entity_id2, 10) + eventA3 = self.create_state_changed_event(pointA, entity_id3, 10) + eventB1 = self.create_state_changed_event(pointB, entity_id, 20) + eventB2 = self.create_state_changed_event(pointB, entity_id2, 20) + eventC1 = self.create_state_changed_event(pointC, entity_id4, 30) + eventC2 = self.create_state_changed_event(pointC, entity_id5, 30) + eventC3 = self.create_state_changed_event(pointC, entity_id6, 30) - entities_filter = logbook._generate_filter_from_config({}) + config = logbook.CONFIG_SCHEMA( + { + ha.DOMAIN: {}, + logbook.DOMAIN: { + logbook.CONF_INCLUDE: { + CONF_DOMAINS: ["sensor", "homeassistant"], + CONF_ENTITIES: ["switch.bla"], + CONF_ENTITY_GLOBS: ["*.included"], + }, + logbook.CONF_EXCLUDE: { + CONF_DOMAINS: ["switch"], + CONF_ENTITY_GLOBS: ["*.excluded"], + CONF_ENTITIES: ["sensor.bli"], + }, + }, + } + ) + entities_filter = convert_include_exclude_filter(config[logbook.DOMAIN]) events = [ e - for e in (eventA, eventB) - if logbook._keep_event(self.hass, e, entities_filter) + for e in ( + MockLazyEventPartialState(EVENT_HOMEASSISTANT_START), + eventA1, + eventA2, + eventA3, + eventB1, + eventB2, + eventC1, + eventC2, + eventC3, + ) + if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache) ] - entries = list(logbook.humanify(self.hass, events)) + entries = list(logbook.humanify(self.hass, events, entity_attr_cache)) - assert len(entries) == 1 + assert len(entries) == 6 self.assert_entry( - entries[0], pointA, "bla", domain="switch", entity_id=entity_id + entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN + ) + self.assert_entry( + entries[1], pointA, "bla", domain="switch", entity_id=entity_id + ) + self.assert_entry( + entries[2], pointA, "blu", domain="sensor", entity_id=entity_id2 + ) + self.assert_entry( + entries[3], pointB, "bla", domain="switch", entity_id=entity_id + ) + self.assert_entry( + entries[4], pointB, "blu", domain="sensor", entity_id=entity_id2 + ) + self.assert_entry( + entries[5], pointC, "included", domain="light", entity_id=entity_id4 ) def test_exclude_attribute_changes(self): @@ -507,6 +658,7 @@ class TestComponentLogbook(unittest.TestCase): pointA = dt_util.utcnow() pointB = pointA + timedelta(minutes=1) pointC = pointB + timedelta(minutes=1) + entity_attr_cache = logbook.EntityAttributeCache(self.hass) state_off = ha.State("light.kitchen", "off", {}, pointA, pointA).as_dict() state_100 = ha.State( @@ -516,32 +668,22 @@ class TestComponentLogbook(unittest.TestCase): "light.kitchen", "on", {"brightness": 200}, pointB, pointC ).as_dict() - eventA = ha.Event( - EVENT_STATE_CHANGED, - { - "entity_id": "light.kitchen", - "old_state": state_off, - "new_state": state_100, - }, - time_fired=pointB, + eventA = self.create_state_changed_event_from_old_new( + "light.kitchen", pointB, state_off, state_100 ) - eventB = ha.Event( - EVENT_STATE_CHANGED, - { - "entity_id": "light.kitchen", - "old_state": state_100, - "new_state": state_200, - }, - time_fired=pointC, + eventB = self.create_state_changed_event_from_old_new( + "light.kitchen", pointC, state_100, state_200 ) - entities_filter = logbook._generate_filter_from_config({}) + entities_filter = convert_include_exclude_filter( + logbook.CONFIG_SCHEMA({logbook.DOMAIN: {}})[logbook.DOMAIN] + ) events = [ e for e in (eventA, eventB) - if logbook._keep_event(self.hass, e, entities_filter) + if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache) ] - entries = list(logbook.humanify(self.hass, events)) + entries = list(logbook.humanify(self.hass, events, entity_attr_cache)) assert len(entries) == 1 self.assert_entry( @@ -553,14 +695,16 @@ class TestComponentLogbook(unittest.TestCase): Events that are occurring in the same minute. """ + entity_attr_cache = logbook.EntityAttributeCache(self.hass) entries = list( logbook.humanify( self.hass, ( - ha.Event(EVENT_HOMEASSISTANT_STOP), - ha.Event(EVENT_HOMEASSISTANT_START), + MockLazyEventPartialState(EVENT_HOMEASSISTANT_STOP), + MockLazyEventPartialState(EVENT_HOMEASSISTANT_START), ), - ) + entity_attr_cache, + ), ) assert len(entries) == 1 @@ -572,14 +716,16 @@ class TestComponentLogbook(unittest.TestCase): """Test if HA start is not filtered or converted into a restart.""" entity_id = "switch.bla" pointA = dt_util.utcnow() + entity_attr_cache = logbook.EntityAttributeCache(self.hass) entries = list( logbook.humanify( self.hass, ( - ha.Event(EVENT_HOMEASSISTANT_START), + MockLazyEventPartialState(EVENT_HOMEASSISTANT_START), self.create_state_changed_event(pointA, entity_id, 10), ), + entity_attr_cache, ) ) @@ -591,545 +737,623 @@ class TestComponentLogbook(unittest.TestCase): entries[1], pointA, "bla", domain="switch", entity_id=entity_id ) - def test_entry_message_from_state_device(self): + def test_entry_message_from_event_device(self): """Test if logbook message is correctly created for switches. Especially test if the special handling for turn on/off events is done. """ pointA = dt_util.utcnow() - + entity_attr_cache = logbook.EntityAttributeCache(self.hass) # message for a device state change eventA = self.create_state_changed_event(pointA, "switch.bla", 10) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "changed to 10" # message for a switch turned on eventA = self.create_state_changed_event(pointA, "switch.bla", STATE_ON) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "turned on" # message for a switch turned off eventA = self.create_state_changed_event(pointA, "switch.bla", STATE_OFF) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "turned off" - def test_entry_message_from_state_device_tracker(self): + def test_entry_message_from_event_device_tracker(self): """Test if logbook message is correctly created for device tracker.""" pointA = dt_util.utcnow() + entity_attr_cache = logbook.EntityAttributeCache(self.hass) # message for a device tracker "not home" state eventA = self.create_state_changed_event( pointA, "device_tracker.john", STATE_NOT_HOME ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "is away" # message for a device tracker "home" state eventA = self.create_state_changed_event(pointA, "device_tracker.john", "work") - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "is at work" - def test_entry_message_from_state_person(self): + def test_entry_message_from_event_person(self): """Test if logbook message is correctly created for a person.""" pointA = dt_util.utcnow() + entity_attr_cache = logbook.EntityAttributeCache(self.hass) # message for a device tracker "not home" state eventA = self.create_state_changed_event(pointA, "person.john", STATE_NOT_HOME) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "is away" # message for a device tracker "home" state eventA = self.create_state_changed_event(pointA, "person.john", "work") - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "is at work" - def test_entry_message_from_state_sun(self): + def test_entry_message_from_event_sun(self): """Test if logbook message is correctly created for sun.""" pointA = dt_util.utcnow() + entity_attr_cache = logbook.EntityAttributeCache(self.hass) # message for a sun rise eventA = self.create_state_changed_event( pointA, "sun.sun", sun.STATE_ABOVE_HORIZON ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "has risen" # message for a sun set eventA = self.create_state_changed_event( pointA, "sun.sun", sun.STATE_BELOW_HORIZON ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "has set" - def test_entry_message_from_state_binary_sensor_battery(self): + def test_entry_message_from_event_binary_sensor_battery(self): """Test if logbook message is correctly created for a binary_sensor.""" pointA = dt_util.utcnow() attributes = {"device_class": "battery"} + entity_attr_cache = logbook.EntityAttributeCache(self.hass) # message for a binary_sensor battery "low" state eventA = self.create_state_changed_event( pointA, "binary_sensor.battery", STATE_ON, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "is low" # message for a binary_sensor battery "normal" state eventA = self.create_state_changed_event( pointA, "binary_sensor.battery", STATE_OFF, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "is normal" - def test_entry_message_from_state_binary_sensor_connectivity(self): + def test_entry_message_from_event_binary_sensor_connectivity(self): """Test if logbook message is correctly created for a binary_sensor.""" pointA = dt_util.utcnow() attributes = {"device_class": "connectivity"} + entity_attr_cache = logbook.EntityAttributeCache(self.hass) # message for a binary_sensor connectivity "connected" state eventA = self.create_state_changed_event( pointA, "binary_sensor.connectivity", STATE_ON, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "is connected" # message for a binary_sensor connectivity "disconnected" state eventA = self.create_state_changed_event( pointA, "binary_sensor.connectivity", STATE_OFF, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "is disconnected" - def test_entry_message_from_state_binary_sensor_door(self): + def test_entry_message_from_event_binary_sensor_door(self): """Test if logbook message is correctly created for a binary_sensor.""" pointA = dt_util.utcnow() attributes = {"device_class": "door"} + entity_attr_cache = logbook.EntityAttributeCache(self.hass) # message for a binary_sensor door "open" state eventA = self.create_state_changed_event( pointA, "binary_sensor.door", STATE_ON, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "is opened" # message for a binary_sensor door "closed" state eventA = self.create_state_changed_event( pointA, "binary_sensor.door", STATE_OFF, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "is closed" - def test_entry_message_from_state_binary_sensor_garage_door(self): + def test_entry_message_from_event_binary_sensor_garage_door(self): """Test if logbook message is correctly created for a binary_sensor.""" pointA = dt_util.utcnow() attributes = {"device_class": "garage_door"} + entity_attr_cache = logbook.EntityAttributeCache(self.hass) # message for a binary_sensor garage_door "open" state eventA = self.create_state_changed_event( pointA, "binary_sensor.garage_door", STATE_ON, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "is opened" # message for a binary_sensor garage_door "closed" state eventA = self.create_state_changed_event( pointA, "binary_sensor.garage_door", STATE_OFF, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "is closed" - def test_entry_message_from_state_binary_sensor_opening(self): + def test_entry_message_from_event_binary_sensor_opening(self): """Test if logbook message is correctly created for a binary_sensor.""" pointA = dt_util.utcnow() attributes = {"device_class": "opening"} + entity_attr_cache = logbook.EntityAttributeCache(self.hass) # message for a binary_sensor opening "open" state eventA = self.create_state_changed_event( pointA, "binary_sensor.opening", STATE_ON, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "is opened" # message for a binary_sensor opening "closed" state eventA = self.create_state_changed_event( pointA, "binary_sensor.opening", STATE_OFF, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "is closed" - def test_entry_message_from_state_binary_sensor_window(self): + def test_entry_message_from_event_binary_sensor_window(self): """Test if logbook message is correctly created for a binary_sensor.""" pointA = dt_util.utcnow() attributes = {"device_class": "window"} + entity_attr_cache = logbook.EntityAttributeCache(self.hass) # message for a binary_sensor window "open" state eventA = self.create_state_changed_event( pointA, "binary_sensor.window", STATE_ON, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "is opened" # message for a binary_sensor window "closed" state eventA = self.create_state_changed_event( pointA, "binary_sensor.window", STATE_OFF, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "is closed" - def test_entry_message_from_state_binary_sensor_lock(self): + def test_entry_message_from_event_binary_sensor_lock(self): """Test if logbook message is correctly created for a binary_sensor.""" pointA = dt_util.utcnow() attributes = {"device_class": "lock"} + entity_attr_cache = logbook.EntityAttributeCache(self.hass) # message for a binary_sensor lock "unlocked" state eventA = self.create_state_changed_event( pointA, "binary_sensor.lock", STATE_ON, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "is unlocked" # message for a binary_sensor lock "locked" state eventA = self.create_state_changed_event( pointA, "binary_sensor.lock", STATE_OFF, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "is locked" - def test_entry_message_from_state_binary_sensor_plug(self): + def test_entry_message_from_event_binary_sensor_plug(self): """Test if logbook message is correctly created for a binary_sensor.""" pointA = dt_util.utcnow() attributes = {"device_class": "plug"} + entity_attr_cache = logbook.EntityAttributeCache(self.hass) # message for a binary_sensor plug "unpluged" state eventA = self.create_state_changed_event( pointA, "binary_sensor.plug", STATE_ON, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "is plugged in" # message for a binary_sensor plug "pluged" state eventA = self.create_state_changed_event( pointA, "binary_sensor.plug", STATE_OFF, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "is unplugged" - def test_entry_message_from_state_binary_sensor_presence(self): + def test_entry_message_from_event_binary_sensor_presence(self): """Test if logbook message is correctly created for a binary_sensor.""" pointA = dt_util.utcnow() attributes = {"device_class": "presence"} + entity_attr_cache = logbook.EntityAttributeCache(self.hass) # message for a binary_sensor presence "home" state eventA = self.create_state_changed_event( pointA, "binary_sensor.presence", STATE_ON, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "is at home" # message for a binary_sensor presence "away" state eventA = self.create_state_changed_event( pointA, "binary_sensor.presence", STATE_OFF, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "is away" - def test_entry_message_from_state_binary_sensor_safety(self): + def test_entry_message_from_event_binary_sensor_safety(self): """Test if logbook message is correctly created for a binary_sensor.""" pointA = dt_util.utcnow() attributes = {"device_class": "safety"} + entity_attr_cache = logbook.EntityAttributeCache(self.hass) # message for a binary_sensor safety "unsafe" state eventA = self.create_state_changed_event( pointA, "binary_sensor.safety", STATE_ON, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "is unsafe" # message for a binary_sensor safety "safe" state eventA = self.create_state_changed_event( pointA, "binary_sensor.safety", STATE_OFF, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "is safe" - def test_entry_message_from_state_binary_sensor_cold(self): + def test_entry_message_from_event_binary_sensor_cold(self): """Test if logbook message is correctly created for a binary_sensor.""" pointA = dt_util.utcnow() attributes = {"device_class": "cold"} + entity_attr_cache = logbook.EntityAttributeCache(self.hass) # message for a binary_sensor cold "detected" state eventA = self.create_state_changed_event( pointA, "binary_sensor.cold", STATE_ON, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "detected cold" # message for a binary_sensori cold "cleared" state eventA = self.create_state_changed_event( pointA, "binary_sensor.cold", STATE_OFF, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "cleared (no cold detected)" - def test_entry_message_from_state_binary_sensor_gas(self): + def test_entry_message_from_event_binary_sensor_gas(self): """Test if logbook message is correctly created for a binary_sensor.""" pointA = dt_util.utcnow() attributes = {"device_class": "gas"} + entity_attr_cache = logbook.EntityAttributeCache(self.hass) # message for a binary_sensor gas "detected" state eventA = self.create_state_changed_event( pointA, "binary_sensor.gas", STATE_ON, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "detected gas" # message for a binary_sensori gas "cleared" state eventA = self.create_state_changed_event( pointA, "binary_sensor.gas", STATE_OFF, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "cleared (no gas detected)" - def test_entry_message_from_state_binary_sensor_heat(self): + def test_entry_message_from_event_binary_sensor_heat(self): """Test if logbook message is correctly created for a binary_sensor.""" pointA = dt_util.utcnow() attributes = {"device_class": "heat"} + entity_attr_cache = logbook.EntityAttributeCache(self.hass) # message for a binary_sensor heat "detected" state eventA = self.create_state_changed_event( pointA, "binary_sensor.heat", STATE_ON, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "detected heat" # message for a binary_sensori heat "cleared" state eventA = self.create_state_changed_event( pointA, "binary_sensor.heat", STATE_OFF, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "cleared (no heat detected)" - def test_entry_message_from_state_binary_sensor_light(self): + def test_entry_message_from_event_binary_sensor_light(self): """Test if logbook message is correctly created for a binary_sensor.""" pointA = dt_util.utcnow() attributes = {"device_class": "light"} + entity_attr_cache = logbook.EntityAttributeCache(self.hass) # message for a binary_sensor light "detected" state eventA = self.create_state_changed_event( pointA, "binary_sensor.light", STATE_ON, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "detected light" # message for a binary_sensori light "cleared" state eventA = self.create_state_changed_event( pointA, "binary_sensor.light", STATE_OFF, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "cleared (no light detected)" - def test_entry_message_from_state_binary_sensor_moisture(self): + def test_entry_message_from_event_binary_sensor_moisture(self): """Test if logbook message is correctly created for a binary_sensor.""" pointA = dt_util.utcnow() attributes = {"device_class": "moisture"} + entity_attr_cache = logbook.EntityAttributeCache(self.hass) # message for a binary_sensor moisture "detected" state eventA = self.create_state_changed_event( pointA, "binary_sensor.moisture", STATE_ON, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "detected moisture" # message for a binary_sensori moisture "cleared" state eventA = self.create_state_changed_event( pointA, "binary_sensor.moisture", STATE_OFF, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "cleared (no moisture detected)" - def test_entry_message_from_state_binary_sensor_motion(self): + def test_entry_message_from_event_binary_sensor_motion(self): """Test if logbook message is correctly created for a binary_sensor.""" pointA = dt_util.utcnow() attributes = {"device_class": "motion"} + entity_attr_cache = logbook.EntityAttributeCache(self.hass) # message for a binary_sensor motion "detected" state eventA = self.create_state_changed_event( pointA, "binary_sensor.motion", STATE_ON, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "detected motion" # message for a binary_sensori motion "cleared" state eventA = self.create_state_changed_event( pointA, "binary_sensor.motion", STATE_OFF, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "cleared (no motion detected)" - def test_entry_message_from_state_binary_sensor_occupancy(self): + def test_entry_message_from_event_binary_sensor_occupancy(self): """Test if logbook message is correctly created for a binary_sensor.""" pointA = dt_util.utcnow() attributes = {"device_class": "occupancy"} + entity_attr_cache = logbook.EntityAttributeCache(self.hass) # message for a binary_sensor occupancy "detected" state eventA = self.create_state_changed_event( pointA, "binary_sensor.occupancy", STATE_ON, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "detected occupancy" # message for a binary_sensori occupancy "cleared" state eventA = self.create_state_changed_event( pointA, "binary_sensor.occupancy", STATE_OFF, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "cleared (no occupancy detected)" - def test_entry_message_from_state_binary_sensor_power(self): + def test_entry_message_from_event_binary_sensor_power(self): """Test if logbook message is correctly created for a binary_sensor.""" pointA = dt_util.utcnow() attributes = {"device_class": "power"} + entity_attr_cache = logbook.EntityAttributeCache(self.hass) # message for a binary_sensor power "detected" state eventA = self.create_state_changed_event( pointA, "binary_sensor.power", STATE_ON, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "detected power" # message for a binary_sensori power "cleared" state eventA = self.create_state_changed_event( pointA, "binary_sensor.power", STATE_OFF, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "cleared (no power detected)" - def test_entry_message_from_state_binary_sensor_problem(self): + def test_entry_message_from_event_binary_sensor_problem(self): """Test if logbook message is correctly created for a binary_sensor.""" pointA = dt_util.utcnow() attributes = {"device_class": "problem"} + entity_attr_cache = logbook.EntityAttributeCache(self.hass) # message for a binary_sensor problem "detected" state eventA = self.create_state_changed_event( pointA, "binary_sensor.problem", STATE_ON, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "detected problem" # message for a binary_sensori problem "cleared" state eventA = self.create_state_changed_event( pointA, "binary_sensor.problem", STATE_OFF, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "cleared (no problem detected)" - def test_entry_message_from_state_binary_sensor_smoke(self): + def test_entry_message_from_event_binary_sensor_smoke(self): """Test if logbook message is correctly created for a binary_sensor.""" pointA = dt_util.utcnow() attributes = {"device_class": "smoke"} + entity_attr_cache = logbook.EntityAttributeCache(self.hass) # message for a binary_sensor smoke "detected" state eventA = self.create_state_changed_event( pointA, "binary_sensor.smoke", STATE_ON, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "detected smoke" # message for a binary_sensori smoke "cleared" state eventA = self.create_state_changed_event( pointA, "binary_sensor.smoke", STATE_OFF, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "cleared (no smoke detected)" - def test_entry_message_from_state_binary_sensor_sound(self): + def test_entry_message_from_event_binary_sensor_sound(self): """Test if logbook message is correctly created for a binary_sensor.""" pointA = dt_util.utcnow() attributes = {"device_class": "sound"} + entity_attr_cache = logbook.EntityAttributeCache(self.hass) # message for a binary_sensor sound "detected" state eventA = self.create_state_changed_event( pointA, "binary_sensor.sound", STATE_ON, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "detected sound" # message for a binary_sensori sound "cleared" state eventA = self.create_state_changed_event( pointA, "binary_sensor.sound", STATE_OFF, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "cleared (no sound detected)" - def test_entry_message_from_state_binary_sensor_vibration(self): + def test_entry_message_from_event_binary_sensor_vibration(self): """Test if logbook message is correctly created for a binary_sensor.""" pointA = dt_util.utcnow() attributes = {"device_class": "vibration"} + entity_attr_cache = logbook.EntityAttributeCache(self.hass) # message for a binary_sensor vibration "detected" state eventA = self.create_state_changed_event( pointA, "binary_sensor.vibration", STATE_ON, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "detected vibration" # message for a binary_sensori vibration "cleared" state eventA = self.create_state_changed_event( pointA, "binary_sensor.vibration", STATE_OFF, attributes ) - to_state = ha.State.from_dict(eventA.data.get("new_state")) - message = logbook._entry_message_from_state(to_state.domain, to_state) + message = logbook._entry_message_from_event( + self.hass, eventA.entity_id, eventA.domain, eventA, entity_attr_cache + ) assert message == "cleared (no vibration detected)" def test_process_custom_logbook_entries(self): @@ -1137,12 +1361,13 @@ class TestComponentLogbook(unittest.TestCase): name = "Nice name" message = "has a custom entry" entity_id = "sun.sun" + entity_attr_cache = logbook.EntityAttributeCache(self.hass) entries = list( logbook.humanify( self.hass, ( - ha.Event( + MockLazyEventPartialState( logbook.EVENT_LOGBOOK_ENTRY, { logbook.ATTR_NAME: name, @@ -1151,6 +1376,7 @@ class TestComponentLogbook(unittest.TestCase): }, ), ), + entity_attr_cache, ) ) @@ -1159,12 +1385,13 @@ class TestComponentLogbook(unittest.TestCase): entries[0], name=name, message=message, domain="sun", entity_id=entity_id ) + # pylint: disable=no-self-use def assert_entry( self, entry, when=None, name=None, message=None, domain=None, entity_id=None ): """Assert an entry is what is expected.""" if when: - assert when == entry["when"] + assert when.isoformat() == entry["when"] if name: assert name == entry["name"] @@ -1195,12 +1422,49 @@ class TestComponentLogbook(unittest.TestCase): entity_id, state, attributes, last_changed, last_updated ).as_dict() - return ha.Event( - EVENT_STATE_CHANGED, - {"entity_id": entity_id, "old_state": old_state, "new_state": new_state}, - time_fired=event_time_fired, + return self.create_state_changed_event_from_old_new( + entity_id, event_time_fired, old_state, new_state ) + # pylint: disable=no-self-use + def create_state_changed_event_from_old_new( + self, entity_id, event_time_fired, old_state, new_state + ): + """Create a state changed event from a old and new state.""" + attributes = {} + if new_state is not None: + attributes = new_state.get("attributes") + attributes_json = json.dumps(attributes, cls=JSONEncoder) + row = collections.namedtuple( + "Row", + [ + "event_type" + "event_data" + "time_fired" + "context_id" + "context_user_id" + "state" + "entity_id" + "domain" + "attributes" + "state_id", + "old_state_id", + ], + ) + + row.event_type = EVENT_STATE_CHANGED + row.event_data = "{}" + row.attributes = attributes_json + row.time_fired = event_time_fired + row.state = new_state and new_state.get("state") + row.entity_id = entity_id + row.domain = entity_id and ha.split_entity_id(entity_id)[0] + row.context_id = None + row.context_user_id = None + row.old_state_id = old_state and 1 + row.state_id = new_state and 1 + return logbook.LazyEventPartialState(row) + async def test_logbook_view(hass, hass_client): """Test the logbook view.""" @@ -1237,36 +1501,36 @@ async def test_logbook_view_period_entity(hass, hass_client): # Test today entries without filters response = await client.get(f"/api/logbook/{start_date.isoformat()}") assert response.status == 200 - json = await response.json() - assert len(json) == 2 - assert json[0]["entity_id"] == entity_id_test - assert json[1]["entity_id"] == entity_id_second + response_json = await response.json() + assert len(response_json) == 2 + assert response_json[0]["entity_id"] == entity_id_test + assert response_json[1]["entity_id"] == entity_id_second # Test today entries with filter by period response = await client.get(f"/api/logbook/{start_date.isoformat()}?period=1") assert response.status == 200 - json = await response.json() - assert len(json) == 2 - assert json[0]["entity_id"] == entity_id_test - assert json[1]["entity_id"] == entity_id_second + response_json = await response.json() + assert len(response_json) == 2 + assert response_json[0]["entity_id"] == entity_id_test + assert response_json[1]["entity_id"] == entity_id_second # Test today entries with filter by entity_id response = await client.get( f"/api/logbook/{start_date.isoformat()}?entity=switch.test" ) assert response.status == 200 - json = await response.json() - assert len(json) == 1 - assert json[0]["entity_id"] == entity_id_test + response_json = await response.json() + assert len(response_json) == 1 + assert response_json[0]["entity_id"] == entity_id_test # Test entries for 3 days with filter by entity_id response = await client.get( f"/api/logbook/{start_date.isoformat()}?period=3&entity=switch.test" ) assert response.status == 200 - json = await response.json() - assert len(json) == 1 - assert json[0]["entity_id"] == entity_id_test + response_json = await response.json() + assert len(response_json) == 1 + assert response_json[0]["entity_id"] == entity_id_test # Tomorrow time 00:00:00 start = (dt_util.utcnow() + timedelta(days=1)).date() @@ -1275,59 +1539,46 @@ async def test_logbook_view_period_entity(hass, hass_client): # Test tomorrow entries without filters response = await client.get(f"/api/logbook/{start_date.isoformat()}") assert response.status == 200 - json = await response.json() - assert len(json) == 0 + response_json = await response.json() + assert len(response_json) == 0 # Test tomorrow entries with filter by entity_id response = await client.get( f"/api/logbook/{start_date.isoformat()}?entity=switch.test" ) assert response.status == 200 - json = await response.json() - assert len(json) == 0 + response_json = await response.json() + assert len(response_json) == 0 # Test entries from tomorrow to 3 days ago with filter by entity_id response = await client.get( f"/api/logbook/{start_date.isoformat()}?period=3&entity=switch.test" ) assert response.status == 200 - json = await response.json() - assert len(json) == 1 - assert json[0]["entity_id"] == entity_id_test - - -async def test_humanify_script_started_event(hass): - """Test humanifying Script Run event.""" - event1, event2 = list( - logbook.humanify( - hass, - [ - ha.Event( - EVENT_SCRIPT_STARTED, - {ATTR_ENTITY_ID: "script.hello", ATTR_NAME: "Hello Script"}, - ), - ha.Event( - EVENT_SCRIPT_STARTED, - {ATTR_ENTITY_ID: "script.bye", ATTR_NAME: "Bye Script"}, - ), - ], - ) - ) - - assert event1["name"] == "Hello Script" - assert event1["domain"] == "script" - assert event1["message"] == "started" - assert event1["entity_id"] == "script.hello" - - assert event2["name"] == "Bye Script" - assert event2["domain"] == "script" - assert event2["message"] == "started" - assert event2["entity_id"] == "script.bye" + response_json = await response.json() + assert len(response_json) == 1 + assert response_json[0]["entity_id"] == entity_id_test async def test_logbook_describe_event(hass, hass_client): """Test teaching logbook about a new event.""" await hass.async_add_executor_job(init_recorder_component, hass) + + def _describe(event): + """Describe an event.""" + return {"name": "Test Name", "message": "tested a message"} + + hass.config.components.add("fake_integration") + mock_platform( + hass, + "fake_integration.logbook", + Mock( + async_describe_events=lambda hass, async_describe_event: async_describe_event( + "test_domain", "some_event", _describe + ) + ), + ) + assert await async_setup_component(hass, "logbook", {}) with patch( "homeassistant.util.dt.utcnow", @@ -1339,12 +1590,6 @@ async def test_logbook_describe_event(hass, hass_client): hass.data[recorder.DATA_INSTANCE].block_till_done ) - def _describe(event): - """Describe an event.""" - return {"name": "Test Name", "message": "tested a message"} - - hass.components.logbook.async_describe_event("test_domain", "some_event", _describe) - client = await hass_client() response = await client.get("/api/logbook") results = await response.json() @@ -1362,6 +1607,26 @@ async def test_exclude_described_event(hass, hass_client): entity_id2 = "automation.included_rule" entity_id3 = "sensor.excluded_domain" + def _describe(event): + """Describe an event.""" + return { + "name": "Test Name", + "message": "tested a message", + "entity_id": event.data.get(ATTR_ENTITY_ID), + } + + def async_describe_events(hass, async_describe_event): + """Mock to describe events.""" + async_describe_event("automation", "some_automation_event", _describe) + async_describe_event("sensor", "some_event", _describe) + + hass.config.components.add("fake_integration") + mock_platform( + hass, + "fake_integration.logbook", + Mock(async_describe_events=async_describe_events), + ) + await hass.async_add_executor_job(init_recorder_component, hass) assert await async_setup_component( hass, @@ -1369,8 +1634,8 @@ async def test_exclude_described_event(hass, hass_client): { logbook.DOMAIN: { logbook.CONF_EXCLUDE: { - logbook.CONF_DOMAINS: ["sensor"], - logbook.CONF_ENTITIES: [entity_id], + CONF_DOMAINS: ["sensor"], + CONF_ENTITIES: [entity_id], } } }, @@ -1396,19 +1661,6 @@ async def test_exclude_described_event(hass, hass_client): hass.data[recorder.DATA_INSTANCE].block_till_done ) - def _describe(event): - """Describe an event.""" - return { - "name": "Test Name", - "message": "tested a message", - "entity_id": event.data.get(ATTR_ENTITY_ID), - } - - hass.components.logbook.async_describe_event( - "automation", "some_automation_event", _describe - ) - hass.components.logbook.async_describe_event("sensor", "some_event", _describe) - client = await hass_client() response = await client.get("/api/logbook") results = await response.json() @@ -1418,3 +1670,156 @@ async def test_exclude_described_event(hass, hass_client): assert event["message"] == "tested a message" assert event["domain"] == "automation" assert event["entity_id"] == "automation.included_rule" + + +async def test_logbook_view_end_time_entity(hass, hass_client): + """Test the logbook view with end_time and entity.""" + await hass.async_add_executor_job(init_recorder_component, hass) + await async_setup_component(hass, "logbook", {}) + await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done) + + entity_id_test = "switch.test" + hass.states.async_set(entity_id_test, STATE_OFF) + hass.states.async_set(entity_id_test, STATE_ON) + entity_id_second = "switch.second" + hass.states.async_set(entity_id_second, STATE_OFF) + hass.states.async_set(entity_id_second, STATE_ON) + await hass.async_add_job(partial(trigger_db_commit, hass)) + await hass.async_block_till_done() + await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done) + + client = await hass_client() + + # Today time 00:00:00 + start = dt_util.utcnow().date() + start_date = datetime(start.year, start.month, start.day) + + # Test today entries with filter by end_time + end_time = start + timedelta(hours=24) + response = await client.get( + f"/api/logbook/{start_date.isoformat()}?end_time={end_time}" + ) + assert response.status == 200 + response_json = await response.json() + assert len(response_json) == 2 + assert response_json[0]["entity_id"] == entity_id_test + assert response_json[1]["entity_id"] == entity_id_second + + # Test entries for 3 days with filter by entity_id + end_time = start + timedelta(hours=72) + response = await client.get( + f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=switch.test" + ) + assert response.status == 200 + response_json = await response.json() + assert len(response_json) == 1 + assert response_json[0]["entity_id"] == entity_id_test + + # Tomorrow time 00:00:00 + start = dt_util.utcnow() + start_date = datetime(start.year, start.month, start.day) + + # Test entries from today to 3 days with filter by entity_id + end_time = start_date + timedelta(hours=72) + response = await client.get( + f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=switch.test" + ) + assert response.status == 200 + response_json = await response.json() + assert len(response_json) == 1 + assert response_json[0]["entity_id"] == entity_id_test + + +async def test_logbook_entity_filter_with_automations(hass, hass_client): + """Test the logbook view with end_time and entity with automations and scripts.""" + await hass.async_add_executor_job(init_recorder_component, hass) + await async_setup_component(hass, "logbook", {}) + await async_setup_component(hass, "automation", {}) + await async_setup_component(hass, "script", {}) + + await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done) + + entity_id_test = "alarm_control_panel.area_001" + hass.states.async_set(entity_id_test, STATE_OFF) + hass.states.async_set(entity_id_test, STATE_ON) + entity_id_second = "alarm_control_panel.area_002" + hass.states.async_set(entity_id_second, STATE_OFF) + hass.states.async_set(entity_id_second, STATE_ON) + + hass.bus.async_fire( + EVENT_AUTOMATION_TRIGGERED, + {ATTR_NAME: "Mock automation", ATTR_ENTITY_ID: "automation.mock_automation"}, + ) + hass.bus.async_fire( + EVENT_SCRIPT_STARTED, + {ATTR_NAME: "Mock script", ATTR_ENTITY_ID: "script.mock_script"}, + ) + hass.bus.async_fire(EVENT_HOMEASSISTANT_START) + + await hass.async_add_job(partial(trigger_db_commit, hass)) + await hass.async_block_till_done() + await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done) + + client = await hass_client() + + # Today time 00:00:00 + start = dt_util.utcnow().date() + start_date = datetime(start.year, start.month, start.day) + + # Test today entries with filter by end_time + end_time = start + timedelta(hours=24) + response = await client.get( + f"/api/logbook/{start_date.isoformat()}?end_time={end_time}" + ) + assert response.status == 200 + json_dict = await response.json() + + assert len(json_dict) == 5 + assert json_dict[0]["entity_id"] == entity_id_test + assert json_dict[1]["entity_id"] == entity_id_second + assert json_dict[2]["entity_id"] == "automation.mock_automation" + assert json_dict[3]["entity_id"] == "script.mock_script" + assert json_dict[4]["domain"] == "homeassistant" + + # Test entries for 3 days with filter by entity_id + end_time = start + timedelta(hours=72) + response = await client.get( + f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=alarm_control_panel.area_001" + ) + assert response.status == 200 + json_dict = await response.json() + assert len(json_dict) == 1 + assert json_dict[0]["entity_id"] == entity_id_test + + # Tomorrow time 00:00:00 + start = dt_util.utcnow() + start_date = datetime(start.year, start.month, start.day) + + # Test entries from today to 3 days with filter by entity_id + end_time = start_date + timedelta(hours=72) + response = await client.get( + f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=alarm_control_panel.area_002" + ) + assert response.status == 200 + json_dict = await response.json() + assert len(json_dict) == 1 + assert json_dict[0]["entity_id"] == entity_id_second + + +class MockLazyEventPartialState(ha.Event): + """Minimal mock of a Lazy event.""" + + @property + def time_fired_minute(self): + """Minute the event was fired.""" + return self.time_fired.minute + + @property + def context_user_id(self): + """Context user id of event.""" + return self.context.user_id + + @property + def time_fired_isoformat(self): + """Time event was fired in utc isoformat.""" + return process_timestamp_to_utc_isoformat(self.time_fired) diff --git a/tests/components/logentries/test_init.py b/tests/components/logentries/test_init.py index f850a7dd62b..2d7b7ee6c25 100644 --- a/tests/components/logentries/test_init.py +++ b/tests/components/logentries/test_init.py @@ -16,8 +16,9 @@ class TestLogentries(unittest.TestCase): def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() diff --git a/tests/components/london_air/test_sensor.py b/tests/components/london_air/test_sensor.py index f596750ea7d..066fc357a50 100644 --- a/tests/components/london_air/test_sensor.py +++ b/tests/components/london_air/test_sensor.py @@ -18,10 +18,7 @@ class TestLondonAirSensor(unittest.TestCase): """Initialize values for this testcase class.""" self.hass = get_test_home_assistant() self.config = VALID_CONFIG - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) @requests_mock.Mocker() def test_setup(self, mock_req): diff --git a/tests/components/manual_mqtt/test_alarm_control_panel.py b/tests/components/manual_mqtt/test_alarm_control_panel.py index a23382d9f78..aa318d61b84 100644 --- a/tests/components/manual_mqtt/test_alarm_control_panel.py +++ b/tests/components/manual_mqtt/test_alarm_control_panel.py @@ -1,6 +1,5 @@ """The tests for the manual_mqtt Alarm Control Panel component.""" from datetime import timedelta -import unittest from homeassistant.components import alarm_control_panel from homeassistant.const import ( @@ -11,1771 +10,1698 @@ from homeassistant.const import ( STATE_ALARM_PENDING, STATE_ALARM_TRIGGERED, ) -from homeassistant.setup import setup_component +from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.async_mock import Mock, patch +from tests.async_mock import patch from tests.common import ( assert_setup_component, - fire_mqtt_message, - fire_time_changed, - get_test_home_assistant, - mock_mqtt_component, + async_fire_mqtt_message, + async_fire_time_changed, ) from tests.components.alarm_control_panel import common CODE = "HELLO_CODE" -class TestAlarmControlPanelManualMqtt(unittest.TestCase): - """Test the manual_mqtt alarm module.""" - - def setUp(self): # pylint: disable=invalid-name - """Set up things to be run when tests are started.""" - self.hass = get_test_home_assistant() - self.hass.config_entries._async_schedule_save = Mock() - self.mock_publish = mock_mqtt_component(self.hass) - - def tearDown(self): # pylint: disable=invalid-name - """Stop down everything that was started.""" - self.hass.stop() - - def test_fail_setup_without_state_topic(self): - """Test for failing with no state topic.""" - with assert_setup_component(0) as config: - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - alarm_control_panel.DOMAIN: { - "platform": "mqtt_alarm", - "command_topic": "alarm/command", - } - }, - ) - assert not config[alarm_control_panel.DOMAIN] - - def test_fail_setup_without_command_topic(self): - """Test failing with no command topic.""" - with assert_setup_component(0): - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - alarm_control_panel.DOMAIN: { - "platform": "mqtt_alarm", - "state_topic": "alarm/state", - } - }, - ) - - def test_arm_home_no_pending(self): - """Test arm home method.""" - assert setup_component( - self.hass, +async def test_fail_setup_without_state_topic(hass, mqtt_mock): + """Test for failing with no state topic.""" + with assert_setup_component(0) as config: + assert await async_setup_component( + hass, alarm_control_panel.DOMAIN, { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "code": CODE, - "pending_time": 0, - "disarm_after_trigger": False, + alarm_control_panel.DOMAIN: { + "platform": "mqtt_alarm", "command_topic": "alarm/command", + } + }, + ) + assert not config[alarm_control_panel.DOMAIN] + + +async def test_fail_setup_without_command_topic(hass, mqtt_mock): + """Test failing with no command topic.""" + with assert_setup_component(0): + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + alarm_control_panel.DOMAIN: { + "platform": "mqtt_alarm", "state_topic": "alarm/state", } }, ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_arm_home(self.hass, CODE) - self.hass.block_till_done() - - assert STATE_ALARM_ARMED_HOME == self.hass.states.get(entity_id).state - - def test_arm_home_no_pending_when_code_not_req(self): - """Test arm home method.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "code": CODE, - "code_arm_required": False, - "pending_time": 0, - "disarm_after_trigger": False, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_arm_home(self.hass, 0) - self.hass.block_till_done() - - assert STATE_ALARM_ARMED_HOME == self.hass.states.get(entity_id).state - - def test_arm_home_with_pending(self): - """Test arm home method.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "code": CODE, - "pending_time": 1, - "disarm_after_trigger": False, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_arm_home(self.hass, CODE, entity_id) - self.hass.block_till_done() - - assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state - - state = self.hass.states.get(entity_id) - assert state.attributes["post_pending_state"] == STATE_ALARM_ARMED_HOME - - future = dt_util.utcnow() + timedelta(seconds=1) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - assert STATE_ALARM_ARMED_HOME == self.hass.states.get(entity_id).state - - def test_arm_home_with_invalid_code(self): - """Attempt to arm home without a valid code.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "code": CODE, - "pending_time": 1, - "disarm_after_trigger": False, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_arm_home(self.hass, f"{CODE}2") - self.hass.block_till_done() - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - def test_arm_away_no_pending(self): - """Test arm home method.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "code": CODE, - "pending_time": 0, - "disarm_after_trigger": False, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_arm_away(self.hass, CODE, entity_id) - self.hass.block_till_done() - - assert STATE_ALARM_ARMED_AWAY == self.hass.states.get(entity_id).state - - def test_arm_away_no_pending_when_code_not_req(self): - """Test arm home method.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "code_arm_required": False, - "code": CODE, - "pending_time": 0, - "disarm_after_trigger": False, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_arm_away(self.hass, 0, entity_id) - self.hass.block_till_done() - - assert STATE_ALARM_ARMED_AWAY == self.hass.states.get(entity_id).state - - def test_arm_home_with_template_code(self): - """Attempt to arm with a template-based code.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "code_template": '{{ "abc" }}', - "pending_time": 0, - "disarm_after_trigger": False, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_arm_home(self.hass, "abc") - self.hass.block_till_done() - - state = self.hass.states.get(entity_id) - assert STATE_ALARM_ARMED_HOME == state.state - - def test_arm_away_with_pending(self): - """Test arm home method.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "code": CODE, - "pending_time": 1, - "disarm_after_trigger": False, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_arm_away(self.hass, CODE) - self.hass.block_till_done() - - assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state - - state = self.hass.states.get(entity_id) - assert state.attributes["post_pending_state"] == STATE_ALARM_ARMED_AWAY - - future = dt_util.utcnow() + timedelta(seconds=1) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - assert STATE_ALARM_ARMED_AWAY == self.hass.states.get(entity_id).state - - def test_arm_away_with_invalid_code(self): - """Attempt to arm away without a valid code.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "code": CODE, - "pending_time": 1, - "disarm_after_trigger": False, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_arm_away(self.hass, f"{CODE}2") - self.hass.block_till_done() - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - def test_arm_night_no_pending(self): - """Test arm night method.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "code": CODE, - "pending_time": 0, - "disarm_after_trigger": False, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_arm_night(self.hass, CODE, entity_id) - self.hass.block_till_done() - - assert STATE_ALARM_ARMED_NIGHT == self.hass.states.get(entity_id).state - - def test_arm_night_no_pending_when_code_not_req(self): - """Test arm night method.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "code_arm_required": False, - "code": CODE, - "pending_time": 0, - "disarm_after_trigger": False, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_arm_night(self.hass, 0, entity_id) - self.hass.block_till_done() - - assert STATE_ALARM_ARMED_NIGHT == self.hass.states.get(entity_id).state - - def test_arm_night_with_pending(self): - """Test arm night method.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "code": CODE, - "pending_time": 1, - "disarm_after_trigger": False, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_arm_night(self.hass, CODE) - self.hass.block_till_done() - - assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state - - state = self.hass.states.get(entity_id) - assert state.attributes["post_pending_state"] == STATE_ALARM_ARMED_NIGHT - - future = dt_util.utcnow() + timedelta(seconds=1) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - assert STATE_ALARM_ARMED_NIGHT == self.hass.states.get(entity_id).state - - # Do not go to the pending state when updating to the same state - common.alarm_arm_night(self.hass, CODE, entity_id) - self.hass.block_till_done() - - assert STATE_ALARM_ARMED_NIGHT == self.hass.states.get(entity_id).state - - def test_arm_night_with_invalid_code(self): - """Attempt to arm night without a valid code.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "code": CODE, - "pending_time": 1, - "disarm_after_trigger": False, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_arm_night(self.hass, f"{CODE}2") - self.hass.block_till_done() - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - def test_trigger_no_pending(self): - """Test triggering when no pending submitted method.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "trigger_time": 1, - "disarm_after_trigger": False, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_trigger(self.hass, entity_id=entity_id) - self.hass.block_till_done() - - assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state - - future = dt_util.utcnow() + timedelta(seconds=60) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - assert STATE_ALARM_TRIGGERED == self.hass.states.get(entity_id).state - - def test_trigger_with_delay(self): - """Test trigger method and switch from pending to triggered.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "code": CODE, - "delay_time": 1, - "pending_time": 0, - "disarm_after_trigger": False, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_arm_away(self.hass, CODE) - self.hass.block_till_done() - - assert STATE_ALARM_ARMED_AWAY == self.hass.states.get(entity_id).state - - common.alarm_trigger(self.hass, entity_id=entity_id) - self.hass.block_till_done() - - state = self.hass.states.get(entity_id) + + +async def test_arm_home_no_pending(hass, mqtt_mock): + """Test arm home method.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "code": CODE, + "pending_time": 0, + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_arm_home(hass, CODE) + await hass.async_block_till_done() + + assert STATE_ALARM_ARMED_HOME == hass.states.get(entity_id).state + + +async def test_arm_home_no_pending_when_code_not_req(hass, mqtt_mock): + """Test arm home method.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "code": CODE, + "code_arm_required": False, + "pending_time": 0, + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_arm_home(hass, 0) + await hass.async_block_till_done() + + assert STATE_ALARM_ARMED_HOME == hass.states.get(entity_id).state + + +async def test_arm_home_with_pending(hass, mqtt_mock): + """Test arm home method.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "code": CODE, + "pending_time": 1, + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_arm_home(hass, CODE, entity_id) + await hass.async_block_till_done() + + assert STATE_ALARM_PENDING == hass.states.get(entity_id).state + + state = hass.states.get(entity_id) + assert state.attributes["post_pending_state"] == STATE_ALARM_ARMED_HOME + + future = dt_util.utcnow() + timedelta(seconds=1) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + assert STATE_ALARM_ARMED_HOME == hass.states.get(entity_id).state + + +async def test_arm_home_with_invalid_code(hass, mqtt_mock): + """Attempt to arm home without a valid code.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "code": CODE, + "pending_time": 1, + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_arm_home(hass, f"{CODE}2") + await hass.async_block_till_done() + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + +async def test_arm_away_no_pending(hass, mqtt_mock): + """Test arm home method.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "code": CODE, + "pending_time": 0, + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_arm_away(hass, CODE, entity_id) + await hass.async_block_till_done() + + assert STATE_ALARM_ARMED_AWAY == hass.states.get(entity_id).state + + +async def test_arm_away_no_pending_when_code_not_req(hass, mqtt_mock): + """Test arm home method.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "code_arm_required": False, + "code": CODE, + "pending_time": 0, + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_arm_away(hass, 0, entity_id) + await hass.async_block_till_done() + + assert STATE_ALARM_ARMED_AWAY == hass.states.get(entity_id).state + + +async def test_arm_home_with_template_code(hass, mqtt_mock): + """Attempt to arm with a template-based code.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "code_template": '{{ "abc" }}', + "pending_time": 0, + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_arm_home(hass, "abc") + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert STATE_ALARM_ARMED_HOME == state.state + + +async def test_arm_away_with_pending(hass, mqtt_mock): + """Test arm home method.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "code": CODE, + "pending_time": 1, + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_arm_away(hass, CODE) + await hass.async_block_till_done() + + assert STATE_ALARM_PENDING == hass.states.get(entity_id).state + + state = hass.states.get(entity_id) + assert state.attributes["post_pending_state"] == STATE_ALARM_ARMED_AWAY + + future = dt_util.utcnow() + timedelta(seconds=1) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + assert STATE_ALARM_ARMED_AWAY == hass.states.get(entity_id).state + + +async def test_arm_away_with_invalid_code(hass, mqtt_mock): + """Attempt to arm away without a valid code.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "code": CODE, + "pending_time": 1, + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_arm_away(hass, f"{CODE}2") + await hass.async_block_till_done() + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + +async def test_arm_night_no_pending(hass, mqtt_mock): + """Test arm night method.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "code": CODE, + "pending_time": 0, + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_arm_night(hass, CODE, entity_id) + await hass.async_block_till_done() + + assert STATE_ALARM_ARMED_NIGHT == hass.states.get(entity_id).state + + +async def test_arm_night_no_pending_when_code_not_req(hass, mqtt_mock): + """Test arm night method.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "code_arm_required": False, + "code": CODE, + "pending_time": 0, + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_arm_night(hass, 0, entity_id) + await hass.async_block_till_done() + + assert STATE_ALARM_ARMED_NIGHT == hass.states.get(entity_id).state + + +async def test_arm_night_with_pending(hass, mqtt_mock): + """Test arm night method.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "code": CODE, + "pending_time": 1, + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_arm_night(hass, CODE) + await hass.async_block_till_done() + + assert STATE_ALARM_PENDING == hass.states.get(entity_id).state + + state = hass.states.get(entity_id) + assert state.attributes["post_pending_state"] == STATE_ALARM_ARMED_NIGHT + + future = dt_util.utcnow() + timedelta(seconds=1) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + assert STATE_ALARM_ARMED_NIGHT == hass.states.get(entity_id).state + + # Do not go to the pending state when updating to the same state + await common.async_alarm_arm_night(hass, CODE, entity_id) + await hass.async_block_till_done() + + assert STATE_ALARM_ARMED_NIGHT == hass.states.get(entity_id).state + + +async def test_arm_night_with_invalid_code(hass, mqtt_mock): + """Attempt to arm night without a valid code.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "code": CODE, + "pending_time": 1, + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_arm_night(hass, f"{CODE}2") + await hass.async_block_till_done() + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + +async def test_trigger_no_pending(hass, mqtt_mock): + """Test triggering when no pending submitted method.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "trigger_time": 1, + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_trigger(hass, entity_id=entity_id) + await hass.async_block_till_done() + + assert STATE_ALARM_PENDING == hass.states.get(entity_id).state + + future = dt_util.utcnow() + timedelta(seconds=60) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + assert STATE_ALARM_TRIGGERED == hass.states.get(entity_id).state + + +async def test_trigger_with_delay(hass, mqtt_mock): + """Test trigger method and switch from pending to triggered.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "code": CODE, + "delay_time": 1, + "pending_time": 0, + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_arm_away(hass, CODE) + await hass.async_block_till_done() + + assert STATE_ALARM_ARMED_AWAY == hass.states.get(entity_id).state + + await common.async_alarm_trigger(hass, entity_id=entity_id) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert STATE_ALARM_PENDING == state.state + assert STATE_ALARM_TRIGGERED == state.attributes["post_pending_state"] + + future = dt_util.utcnow() + timedelta(seconds=1) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert STATE_ALARM_TRIGGERED == state.state + + +async def test_trigger_zero_trigger_time(hass, mqtt_mock): + """Test disabled trigger.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "pending_time": 0, + "trigger_time": 0, + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_trigger(hass) + await hass.async_block_till_done() + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + +async def test_trigger_zero_trigger_time_with_pending(hass, mqtt_mock): + """Test disabled trigger.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "pending_time": 2, + "trigger_time": 0, + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_trigger(hass) + await hass.async_block_till_done() + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + +async def test_trigger_with_pending(hass, mqtt_mock): + """Test arm home method.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "pending_time": 2, + "trigger_time": 3, + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_trigger(hass) + await hass.async_block_till_done() + + assert STATE_ALARM_PENDING == hass.states.get(entity_id).state + + state = hass.states.get(entity_id) + assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED + + future = dt_util.utcnow() + timedelta(seconds=2) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + assert STATE_ALARM_TRIGGERED == hass.states.get(entity_id).state + + future = dt_util.utcnow() + timedelta(seconds=5) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + +async def test_trigger_with_disarm_after_trigger(hass, mqtt_mock): + """Test disarm after trigger.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "trigger_time": 5, + "pending_time": 0, + "disarm_after_trigger": True, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_trigger(hass, entity_id=entity_id) + await hass.async_block_till_done() + + assert STATE_ALARM_TRIGGERED == hass.states.get(entity_id).state + + future = dt_util.utcnow() + timedelta(seconds=5) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + +async def test_trigger_with_zero_specific_trigger_time(hass, mqtt_mock): + """Test trigger method.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "trigger_time": 5, + "disarmed": {"trigger_time": 0}, + "pending_time": 0, + "disarm_after_trigger": True, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_trigger(hass, entity_id=entity_id) + await hass.async_block_till_done() + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + +async def test_trigger_with_unused_zero_specific_trigger_time(hass, mqtt_mock): + """Test disarm after trigger.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "trigger_time": 5, + "armed_home": {"trigger_time": 0}, + "pending_time": 0, + "disarm_after_trigger": True, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_trigger(hass, entity_id=entity_id) + await hass.async_block_till_done() + + assert STATE_ALARM_TRIGGERED == hass.states.get(entity_id).state + + future = dt_util.utcnow() + timedelta(seconds=5) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + +async def test_trigger_with_specific_trigger_time(hass, mqtt_mock): + """Test disarm after trigger.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "disarmed": {"trigger_time": 5}, + "pending_time": 0, + "disarm_after_trigger": True, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_trigger(hass, entity_id=entity_id) + await hass.async_block_till_done() + + assert STATE_ALARM_TRIGGERED == hass.states.get(entity_id).state + + future = dt_util.utcnow() + timedelta(seconds=5) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + +async def test_back_to_back_trigger_with_no_disarm_after_trigger(hass, mqtt_mock): + """Test no disarm after back to back trigger.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "trigger_time": 5, + "pending_time": 0, + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_arm_away(hass, CODE, entity_id) + await hass.async_block_till_done() + + assert STATE_ALARM_ARMED_AWAY == hass.states.get(entity_id).state + + await common.async_alarm_trigger(hass, entity_id=entity_id) + await hass.async_block_till_done() + + assert STATE_ALARM_TRIGGERED == hass.states.get(entity_id).state + + future = dt_util.utcnow() + timedelta(seconds=5) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + assert STATE_ALARM_ARMED_AWAY == hass.states.get(entity_id).state + + await common.async_alarm_trigger(hass, entity_id=entity_id) + await hass.async_block_till_done() + + assert STATE_ALARM_TRIGGERED == hass.states.get(entity_id).state + + future = dt_util.utcnow() + timedelta(seconds=5) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + assert STATE_ALARM_ARMED_AWAY == hass.states.get(entity_id).state + + +async def test_disarm_while_pending_trigger(hass, mqtt_mock): + """Test disarming while pending state.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "trigger_time": 5, + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_trigger(hass) + await hass.async_block_till_done() + + assert STATE_ALARM_PENDING == hass.states.get(entity_id).state + + await common.async_alarm_disarm(hass, entity_id=entity_id) + await hass.async_block_till_done() + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + future = dt_util.utcnow() + timedelta(seconds=5) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + +async def test_disarm_during_trigger_with_invalid_code(hass, mqtt_mock): + """Test disarming while code is invalid.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "pending_time": 5, + "code": f"{CODE}2", + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_trigger(hass) + await hass.async_block_till_done() + + assert STATE_ALARM_PENDING == hass.states.get(entity_id).state + + await common.async_alarm_disarm(hass, entity_id=entity_id) + await hass.async_block_till_done() + + assert STATE_ALARM_PENDING == hass.states.get(entity_id).state + + future = dt_util.utcnow() + timedelta(seconds=5) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + assert STATE_ALARM_TRIGGERED == hass.states.get(entity_id).state + + +async def test_trigger_with_unused_specific_delay(hass, mqtt_mock): + """Test trigger method and switch from pending to triggered.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "code": CODE, + "delay_time": 5, + "pending_time": 0, + "armed_home": {"delay_time": 10}, + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_arm_away(hass, CODE) + await hass.async_block_till_done() + + assert STATE_ALARM_ARMED_AWAY == hass.states.get(entity_id).state + + await common.async_alarm_trigger(hass, entity_id=entity_id) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert STATE_ALARM_PENDING == state.state + assert STATE_ALARM_TRIGGERED == state.attributes["post_pending_state"] + + future = dt_util.utcnow() + timedelta(seconds=5) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state == STATE_ALARM_TRIGGERED + + +async def test_trigger_with_specific_delay(hass, mqtt_mock): + """Test trigger method and switch from pending to triggered.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "code": CODE, + "delay_time": 10, + "pending_time": 0, + "armed_away": {"delay_time": 1}, + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_arm_away(hass, CODE) + await hass.async_block_till_done() + + assert STATE_ALARM_ARMED_AWAY == hass.states.get(entity_id).state + + await common.async_alarm_trigger(hass, entity_id=entity_id) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert STATE_ALARM_PENDING == state.state + assert STATE_ALARM_TRIGGERED == state.attributes["post_pending_state"] + + future = dt_util.utcnow() + timedelta(seconds=1) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state == STATE_ALARM_TRIGGERED + + +async def test_trigger_with_pending_and_delay(hass, mqtt_mock): + """Test trigger method and switch from pending to triggered.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "code": CODE, + "delay_time": 1, + "pending_time": 0, + "triggered": {"pending_time": 1}, + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_arm_away(hass, CODE) + await hass.async_block_till_done() + + assert STATE_ALARM_ARMED_AWAY == hass.states.get(entity_id).state + + await common.async_alarm_trigger(hass, entity_id=entity_id) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state == STATE_ALARM_PENDING + assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED + + future = dt_util.utcnow() + timedelta(seconds=1) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state == STATE_ALARM_PENDING + assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED + + future += timedelta(seconds=1) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state == STATE_ALARM_TRIGGERED + + +async def test_trigger_with_pending_and_specific_delay(hass, mqtt_mock): + """Test trigger method and switch from pending to triggered.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "code": CODE, + "delay_time": 10, + "pending_time": 0, + "armed_away": {"delay_time": 1}, + "triggered": {"pending_time": 1}, + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_arm_away(hass, CODE) + await hass.async_block_till_done() + + assert STATE_ALARM_ARMED_AWAY == hass.states.get(entity_id).state + + await common.async_alarm_trigger(hass, entity_id=entity_id) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state == STATE_ALARM_PENDING + assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED + + future = dt_util.utcnow() + timedelta(seconds=1) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state == STATE_ALARM_PENDING + assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED + + future += timedelta(seconds=1) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state == STATE_ALARM_TRIGGERED + + +async def test_armed_home_with_specific_pending(hass, mqtt_mock): + """Test arm home method.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "pending_time": 10, + "armed_home": {"pending_time": 2}, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + await common.async_alarm_arm_home(hass) + await hass.async_block_till_done() + + assert STATE_ALARM_PENDING == hass.states.get(entity_id).state + + future = dt_util.utcnow() + timedelta(seconds=2) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + assert STATE_ALARM_ARMED_HOME == hass.states.get(entity_id).state + + +async def test_armed_away_with_specific_pending(hass, mqtt_mock): + """Test arm home method.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "pending_time": 10, + "armed_away": {"pending_time": 2}, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + await common.async_alarm_arm_away(hass) + await hass.async_block_till_done() + + assert STATE_ALARM_PENDING == hass.states.get(entity_id).state + + future = dt_util.utcnow() + timedelta(seconds=2) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + assert STATE_ALARM_ARMED_AWAY == hass.states.get(entity_id).state + + +async def test_armed_night_with_specific_pending(hass, mqtt_mock): + """Test arm home method.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "pending_time": 10, + "armed_night": {"pending_time": 2}, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + await common.async_alarm_arm_night(hass) + await hass.async_block_till_done() + + assert STATE_ALARM_PENDING == hass.states.get(entity_id).state + + future = dt_util.utcnow() + timedelta(seconds=2) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + assert STATE_ALARM_ARMED_NIGHT == hass.states.get(entity_id).state + + +async def test_trigger_with_specific_pending(hass, mqtt_mock): + """Test arm home method.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "pending_time": 10, + "triggered": {"pending_time": 2}, + "trigger_time": 3, + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + await common.async_alarm_trigger(hass) + await hass.async_block_till_done() + + assert STATE_ALARM_PENDING == hass.states.get(entity_id).state + + future = dt_util.utcnow() + timedelta(seconds=2) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + assert STATE_ALARM_TRIGGERED == hass.states.get(entity_id).state + + future = dt_util.utcnow() + timedelta(seconds=5) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + +async def test_arm_away_after_disabled_disarmed(hass, mqtt_mock): + """Test pending state with and without zero trigger time.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "code": CODE, + "pending_time": 0, + "delay_time": 1, + "armed_away": {"pending_time": 1}, + "disarmed": {"trigger_time": 0}, + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_arm_away(hass, CODE) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert STATE_ALARM_PENDING == state.state + assert STATE_ALARM_DISARMED == state.attributes["pre_pending_state"] + assert STATE_ALARM_ARMED_AWAY == state.attributes["post_pending_state"] + + await common.async_alarm_trigger(hass, entity_id=entity_id) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert STATE_ALARM_PENDING == state.state + assert STATE_ALARM_DISARMED == state.attributes["pre_pending_state"] + assert STATE_ALARM_ARMED_AWAY == state.attributes["post_pending_state"] + + future = dt_util.utcnow() + timedelta(seconds=1) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert STATE_ALARM_ARMED_AWAY == state.state + + await common.async_alarm_trigger(hass, entity_id=entity_id) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) assert STATE_ALARM_PENDING == state.state + assert STATE_ALARM_ARMED_AWAY == state.attributes["pre_pending_state"] assert STATE_ALARM_TRIGGERED == state.attributes["post_pending_state"] - future = dt_util.utcnow() + timedelta(seconds=1) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - state = self.hass.states.get(entity_id) - assert STATE_ALARM_TRIGGERED == state.state - - def test_trigger_zero_trigger_time(self): - """Test disabled trigger.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "pending_time": 0, - "trigger_time": 0, - "disarm_after_trigger": False, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_trigger(self.hass) - self.hass.block_till_done() - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - def test_trigger_zero_trigger_time_with_pending(self): - """Test disabled trigger.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "pending_time": 2, - "trigger_time": 0, - "disarm_after_trigger": False, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_trigger(self.hass) - self.hass.block_till_done() - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - def test_trigger_with_pending(self): - """Test arm home method.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "pending_time": 2, - "trigger_time": 3, - "disarm_after_trigger": False, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_trigger(self.hass) - self.hass.block_till_done() - - assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state - - state = self.hass.states.get(entity_id) - assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED - - future = dt_util.utcnow() + timedelta(seconds=2) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - assert STATE_ALARM_TRIGGERED == self.hass.states.get(entity_id).state - - future = dt_util.utcnow() + timedelta(seconds=5) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - def test_trigger_with_disarm_after_trigger(self): - """Test disarm after trigger.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "trigger_time": 5, - "pending_time": 0, - "disarm_after_trigger": True, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_trigger(self.hass, entity_id=entity_id) - self.hass.block_till_done() - - assert STATE_ALARM_TRIGGERED == self.hass.states.get(entity_id).state - - future = dt_util.utcnow() + timedelta(seconds=5) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - def test_trigger_with_zero_specific_trigger_time(self): - """Test trigger method.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "trigger_time": 5, - "disarmed": {"trigger_time": 0}, - "pending_time": 0, - "disarm_after_trigger": True, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_trigger(self.hass, entity_id=entity_id) - self.hass.block_till_done() - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - def test_trigger_with_unused_zero_specific_trigger_time(self): - """Test disarm after trigger.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "trigger_time": 5, - "armed_home": {"trigger_time": 0}, - "pending_time": 0, - "disarm_after_trigger": True, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_trigger(self.hass, entity_id=entity_id) - self.hass.block_till_done() - - assert STATE_ALARM_TRIGGERED == self.hass.states.get(entity_id).state - - future = dt_util.utcnow() + timedelta(seconds=5) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - def test_trigger_with_specific_trigger_time(self): - """Test disarm after trigger.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "disarmed": {"trigger_time": 5}, - "pending_time": 0, - "disarm_after_trigger": True, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_trigger(self.hass, entity_id=entity_id) - self.hass.block_till_done() - - assert STATE_ALARM_TRIGGERED == self.hass.states.get(entity_id).state - - future = dt_util.utcnow() + timedelta(seconds=5) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - def test_back_to_back_trigger_with_no_disarm_after_trigger(self): - """Test no disarm after back to back trigger.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "trigger_time": 5, - "pending_time": 0, - "disarm_after_trigger": False, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_arm_away(self.hass, CODE, entity_id) - self.hass.block_till_done() - - assert STATE_ALARM_ARMED_AWAY == self.hass.states.get(entity_id).state - - common.alarm_trigger(self.hass, entity_id=entity_id) - self.hass.block_till_done() - - assert STATE_ALARM_TRIGGERED == self.hass.states.get(entity_id).state - - future = dt_util.utcnow() + timedelta(seconds=5) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - assert STATE_ALARM_ARMED_AWAY == self.hass.states.get(entity_id).state - - common.alarm_trigger(self.hass, entity_id=entity_id) - self.hass.block_till_done() - - assert STATE_ALARM_TRIGGERED == self.hass.states.get(entity_id).state - - future = dt_util.utcnow() + timedelta(seconds=5) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - assert STATE_ALARM_ARMED_AWAY == self.hass.states.get(entity_id).state - - def test_disarm_while_pending_trigger(self): - """Test disarming while pending state.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "trigger_time": 5, - "disarm_after_trigger": False, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_trigger(self.hass) - self.hass.block_till_done() - - assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state - - common.alarm_disarm(self.hass, entity_id=entity_id) - self.hass.block_till_done() - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - future = dt_util.utcnow() + timedelta(seconds=5) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - def test_disarm_during_trigger_with_invalid_code(self): - """Test disarming while code is invalid.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "pending_time": 5, - "code": f"{CODE}2", - "disarm_after_trigger": False, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_trigger(self.hass) - self.hass.block_till_done() - - assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state - - common.alarm_disarm(self.hass, entity_id=entity_id) - self.hass.block_till_done() - - assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state - - future = dt_util.utcnow() + timedelta(seconds=5) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - assert STATE_ALARM_TRIGGERED == self.hass.states.get(entity_id).state - - def test_trigger_with_unused_specific_delay(self): - """Test trigger method and switch from pending to triggered.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "code": CODE, - "delay_time": 5, - "pending_time": 0, - "armed_home": {"delay_time": 10}, - "disarm_after_trigger": False, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_arm_away(self.hass, CODE) - self.hass.block_till_done() - - assert STATE_ALARM_ARMED_AWAY == self.hass.states.get(entity_id).state - - common.alarm_trigger(self.hass, entity_id=entity_id) - self.hass.block_till_done() - - state = self.hass.states.get(entity_id) - assert STATE_ALARM_PENDING == state.state - assert STATE_ALARM_TRIGGERED == state.attributes["post_pending_state"] - - future = dt_util.utcnow() + timedelta(seconds=5) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - state = self.hass.states.get(entity_id) - assert state.state == STATE_ALARM_TRIGGERED - - def test_trigger_with_specific_delay(self): - """Test trigger method and switch from pending to triggered.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "code": CODE, - "delay_time": 10, - "pending_time": 0, - "armed_away": {"delay_time": 1}, - "disarm_after_trigger": False, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_arm_away(self.hass, CODE) - self.hass.block_till_done() - - assert STATE_ALARM_ARMED_AWAY == self.hass.states.get(entity_id).state - - common.alarm_trigger(self.hass, entity_id=entity_id) - self.hass.block_till_done() - - state = self.hass.states.get(entity_id) - assert STATE_ALARM_PENDING == state.state - assert STATE_ALARM_TRIGGERED == state.attributes["post_pending_state"] - - future = dt_util.utcnow() + timedelta(seconds=1) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - state = self.hass.states.get(entity_id) - assert state.state == STATE_ALARM_TRIGGERED - - def test_trigger_with_pending_and_delay(self): - """Test trigger method and switch from pending to triggered.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "code": CODE, - "delay_time": 1, - "pending_time": 0, - "triggered": {"pending_time": 1}, - "disarm_after_trigger": False, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_arm_away(self.hass, CODE) - self.hass.block_till_done() - - assert STATE_ALARM_ARMED_AWAY == self.hass.states.get(entity_id).state - - common.alarm_trigger(self.hass, entity_id=entity_id) - self.hass.block_till_done() - - state = self.hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED - - future = dt_util.utcnow() + timedelta(seconds=1) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - state = self.hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED - - future += timedelta(seconds=1) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - state = self.hass.states.get(entity_id) - assert state.state == STATE_ALARM_TRIGGERED - - def test_trigger_with_pending_and_specific_delay(self): - """Test trigger method and switch from pending to triggered.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "code": CODE, - "delay_time": 10, - "pending_time": 0, - "armed_away": {"delay_time": 1}, - "triggered": {"pending_time": 1}, - "disarm_after_trigger": False, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_arm_away(self.hass, CODE) - self.hass.block_till_done() - - assert STATE_ALARM_ARMED_AWAY == self.hass.states.get(entity_id).state - - common.alarm_trigger(self.hass, entity_id=entity_id) - self.hass.block_till_done() - - state = self.hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED - - future = dt_util.utcnow() + timedelta(seconds=1) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - state = self.hass.states.get(entity_id) - assert state.state == STATE_ALARM_PENDING - assert state.attributes["post_pending_state"] == STATE_ALARM_TRIGGERED - - future += timedelta(seconds=1) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - state = self.hass.states.get(entity_id) - assert state.state == STATE_ALARM_TRIGGERED - - def test_armed_home_with_specific_pending(self): - """Test arm home method.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "pending_time": 10, - "armed_home": {"pending_time": 2}, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - common.alarm_arm_home(self.hass) - self.hass.block_till_done() - - assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state - - future = dt_util.utcnow() + timedelta(seconds=2) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - assert STATE_ALARM_ARMED_HOME == self.hass.states.get(entity_id).state - - def test_armed_away_with_specific_pending(self): - """Test arm home method.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "pending_time": 10, - "armed_away": {"pending_time": 2}, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - common.alarm_arm_away(self.hass) - self.hass.block_till_done() - - assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state - - future = dt_util.utcnow() + timedelta(seconds=2) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - assert STATE_ALARM_ARMED_AWAY == self.hass.states.get(entity_id).state - - def test_armed_night_with_specific_pending(self): - """Test arm home method.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "pending_time": 10, - "armed_night": {"pending_time": 2}, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - common.alarm_arm_night(self.hass) - self.hass.block_till_done() - - assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state - - future = dt_util.utcnow() + timedelta(seconds=2) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - assert STATE_ALARM_ARMED_NIGHT == self.hass.states.get(entity_id).state - - def test_trigger_with_specific_pending(self): - """Test arm home method.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "pending_time": 10, - "triggered": {"pending_time": 2}, - "trigger_time": 3, - "disarm_after_trigger": False, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - common.alarm_trigger(self.hass) - self.hass.block_till_done() - - assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state - - future = dt_util.utcnow() + timedelta(seconds=2) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - assert STATE_ALARM_TRIGGERED == self.hass.states.get(entity_id).state - - future = dt_util.utcnow() + timedelta(seconds=5) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - def test_arm_away_after_disabled_disarmed(self): - """Test pending state with and without zero trigger time.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "code": CODE, - "pending_time": 0, - "delay_time": 1, - "armed_away": {"pending_time": 1}, - "disarmed": {"trigger_time": 0}, - "disarm_after_trigger": False, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_arm_away(self.hass, CODE) - self.hass.block_till_done() - - state = self.hass.states.get(entity_id) - assert STATE_ALARM_PENDING == state.state - assert STATE_ALARM_DISARMED == state.attributes["pre_pending_state"] - assert STATE_ALARM_ARMED_AWAY == state.attributes["post_pending_state"] - - common.alarm_trigger(self.hass, entity_id=entity_id) - self.hass.block_till_done() - - state = self.hass.states.get(entity_id) - assert STATE_ALARM_PENDING == state.state - assert STATE_ALARM_DISARMED == state.attributes["pre_pending_state"] - assert STATE_ALARM_ARMED_AWAY == state.attributes["post_pending_state"] - - future = dt_util.utcnow() + timedelta(seconds=1) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - state = self.hass.states.get(entity_id) - assert STATE_ALARM_ARMED_AWAY == state.state - - common.alarm_trigger(self.hass, entity_id=entity_id) - self.hass.block_till_done() - - state = self.hass.states.get(entity_id) - assert STATE_ALARM_PENDING == state.state - assert STATE_ALARM_ARMED_AWAY == state.attributes["pre_pending_state"] - assert STATE_ALARM_TRIGGERED == state.attributes["post_pending_state"] - - future += timedelta(seconds=1) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - state = self.hass.states.get(entity_id) - assert STATE_ALARM_TRIGGERED == state.state - - def test_disarm_with_template_code(self): - """Attempt to disarm with a valid or invalid template-based code.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - "alarm_control_panel": { - "platform": "manual_mqtt", - "name": "test", - "code_template": '{{ "" if from_state == "disarmed" else "abc" }}', - "pending_time": 0, - "disarm_after_trigger": False, - "command_topic": "alarm/command", - "state_topic": "alarm/state", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_arm_home(self.hass, "def") - self.hass.block_till_done() - - state = self.hass.states.get(entity_id) - assert STATE_ALARM_ARMED_HOME == state.state - - common.alarm_disarm(self.hass, "def") - self.hass.block_till_done() - - state = self.hass.states.get(entity_id) - assert STATE_ALARM_ARMED_HOME == state.state - - common.alarm_disarm(self.hass, "abc") - self.hass.block_till_done() - - state = self.hass.states.get(entity_id) - assert STATE_ALARM_DISARMED == state.state - - def test_arm_home_via_command_topic(self): - """Test arming home via command topic.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - alarm_control_panel.DOMAIN: { - "platform": "manual_mqtt", - "name": "test", - "pending_time": 1, - "state_topic": "alarm/state", - "command_topic": "alarm/command", - "payload_arm_home": "ARM_HOME", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - # Fire the arm command via MQTT; ensure state changes to pending - fire_mqtt_message(self.hass, "alarm/command", "ARM_HOME") - self.hass.block_till_done() - assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state - - # Fast-forward a little bit - future = dt_util.utcnow() + timedelta(seconds=1) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - assert STATE_ALARM_ARMED_HOME == self.hass.states.get(entity_id).state - - def test_arm_away_via_command_topic(self): - """Test arming away via command topic.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - alarm_control_panel.DOMAIN: { - "platform": "manual_mqtt", - "name": "test", - "pending_time": 1, - "state_topic": "alarm/state", - "command_topic": "alarm/command", - "payload_arm_away": "ARM_AWAY", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - # Fire the arm command via MQTT; ensure state changes to pending - fire_mqtt_message(self.hass, "alarm/command", "ARM_AWAY") - self.hass.block_till_done() - assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state - - # Fast-forward a little bit - future = dt_util.utcnow() + timedelta(seconds=1) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - assert STATE_ALARM_ARMED_AWAY == self.hass.states.get(entity_id).state - - def test_arm_night_via_command_topic(self): - """Test arming night via command topic.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - alarm_control_panel.DOMAIN: { - "platform": "manual_mqtt", - "name": "test", - "pending_time": 1, - "state_topic": "alarm/state", - "command_topic": "alarm/command", - "payload_arm_night": "ARM_NIGHT", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - # Fire the arm command via MQTT; ensure state changes to pending - fire_mqtt_message(self.hass, "alarm/command", "ARM_NIGHT") - self.hass.block_till_done() - assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state - - # Fast-forward a little bit - future = dt_util.utcnow() + timedelta(seconds=1) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - - assert STATE_ALARM_ARMED_NIGHT == self.hass.states.get(entity_id).state - - def test_disarm_pending_via_command_topic(self): - """Test disarming pending alarm via command topic.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - alarm_control_panel.DOMAIN: { - "platform": "manual_mqtt", - "name": "test", - "pending_time": 1, - "state_topic": "alarm/state", - "command_topic": "alarm/command", - "payload_disarm": "DISARM", - } - }, - ) - self.hass.block_till_done() - - entity_id = "alarm_control_panel.test" - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - common.alarm_trigger(self.hass) - self.hass.block_till_done() - - assert STATE_ALARM_PENDING == self.hass.states.get(entity_id).state - - # Now that we're pending, receive a command to disarm - fire_mqtt_message(self.hass, "alarm/command", "DISARM") - self.hass.block_till_done() - - assert STATE_ALARM_DISARMED == self.hass.states.get(entity_id).state - - def test_state_changes_are_published_to_mqtt(self): - """Test publishing of MQTT messages when state changes.""" - assert setup_component( - self.hass, - alarm_control_panel.DOMAIN, - { - alarm_control_panel.DOMAIN: { - "platform": "manual_mqtt", - "name": "test", - "pending_time": 1, - "trigger_time": 1, - "state_topic": "alarm/state", - "command_topic": "alarm/command", - } - }, - ) - self.hass.block_till_done() - - # Component should send disarmed alarm state on startup - self.hass.block_till_done() - self.mock_publish.async_publish.assert_called_once_with( - "alarm/state", STATE_ALARM_DISARMED, 0, True - ) - self.mock_publish.async_publish.reset_mock() - - # Arm in home mode - common.alarm_arm_home(self.hass) - self.hass.block_till_done() - self.mock_publish.async_publish.assert_called_once_with( - "alarm/state", STATE_ALARM_PENDING, 0, True - ) - self.mock_publish.async_publish.reset_mock() - # Fast-forward a little bit - future = dt_util.utcnow() + timedelta(seconds=1) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - self.mock_publish.async_publish.assert_called_once_with( - "alarm/state", STATE_ALARM_ARMED_HOME, 0, True - ) - self.mock_publish.async_publish.reset_mock() - - # Arm in away mode - common.alarm_arm_away(self.hass) - self.hass.block_till_done() - self.mock_publish.async_publish.assert_called_once_with( - "alarm/state", STATE_ALARM_PENDING, 0, True - ) - self.mock_publish.async_publish.reset_mock() - # Fast-forward a little bit - future = dt_util.utcnow() + timedelta(seconds=1) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - self.mock_publish.async_publish.assert_called_once_with( - "alarm/state", STATE_ALARM_ARMED_AWAY, 0, True - ) - self.mock_publish.async_publish.reset_mock() - - # Arm in night mode - common.alarm_arm_night(self.hass) - self.hass.block_till_done() - self.mock_publish.async_publish.assert_called_once_with( - "alarm/state", STATE_ALARM_PENDING, 0, True - ) - self.mock_publish.async_publish.reset_mock() - # Fast-forward a little bit - future = dt_util.utcnow() + timedelta(seconds=1) - with patch( - ( - "homeassistant.components.manual_mqtt.alarm_control_panel." - "dt_util.utcnow" - ), - return_value=future, - ): - fire_time_changed(self.hass, future) - self.hass.block_till_done() - self.mock_publish.async_publish.assert_called_once_with( - "alarm/state", STATE_ALARM_ARMED_NIGHT, 0, True - ) - self.mock_publish.async_publish.reset_mock() - - # Disarm - common.alarm_disarm(self.hass) - self.hass.block_till_done() - self.mock_publish.async_publish.assert_called_once_with( - "alarm/state", STATE_ALARM_DISARMED, 0, True - ) + future += timedelta(seconds=1) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert STATE_ALARM_TRIGGERED == state.state + + +async def test_disarm_with_template_code(hass, mqtt_mock): + """Attempt to disarm with a valid or invalid template-based code.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + "alarm_control_panel": { + "platform": "manual_mqtt", + "name": "test", + "code_template": '{{ "" if from_state == "disarmed" else "abc" }}', + "pending_time": 0, + "disarm_after_trigger": False, + "command_topic": "alarm/command", + "state_topic": "alarm/state", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_arm_home(hass, "def") + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert STATE_ALARM_ARMED_HOME == state.state + + await common.async_alarm_disarm(hass, "def") + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert STATE_ALARM_ARMED_HOME == state.state + + await common.async_alarm_disarm(hass, "abc") + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert STATE_ALARM_DISARMED == state.state + + +async def test_arm_home_via_command_topic(hass, mqtt_mock): + """Test arming home via command topic.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + alarm_control_panel.DOMAIN: { + "platform": "manual_mqtt", + "name": "test", + "pending_time": 1, + "state_topic": "alarm/state", + "command_topic": "alarm/command", + "payload_arm_home": "ARM_HOME", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + # Fire the arm command via MQTT; ensure state changes to pending + async_fire_mqtt_message(hass, "alarm/command", "ARM_HOME") + await hass.async_block_till_done() + assert STATE_ALARM_PENDING == hass.states.get(entity_id).state + + # Fast-forward a little bit + future = dt_util.utcnow() + timedelta(seconds=1) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + assert STATE_ALARM_ARMED_HOME == hass.states.get(entity_id).state + + +async def test_arm_away_via_command_topic(hass, mqtt_mock): + """Test arming away via command topic.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + alarm_control_panel.DOMAIN: { + "platform": "manual_mqtt", + "name": "test", + "pending_time": 1, + "state_topic": "alarm/state", + "command_topic": "alarm/command", + "payload_arm_away": "ARM_AWAY", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + # Fire the arm command via MQTT; ensure state changes to pending + async_fire_mqtt_message(hass, "alarm/command", "ARM_AWAY") + await hass.async_block_till_done() + assert STATE_ALARM_PENDING == hass.states.get(entity_id).state + + # Fast-forward a little bit + future = dt_util.utcnow() + timedelta(seconds=1) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + assert STATE_ALARM_ARMED_AWAY == hass.states.get(entity_id).state + + +async def test_arm_night_via_command_topic(hass, mqtt_mock): + """Test arming night via command topic.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + alarm_control_panel.DOMAIN: { + "platform": "manual_mqtt", + "name": "test", + "pending_time": 1, + "state_topic": "alarm/state", + "command_topic": "alarm/command", + "payload_arm_night": "ARM_NIGHT", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + # Fire the arm command via MQTT; ensure state changes to pending + async_fire_mqtt_message(hass, "alarm/command", "ARM_NIGHT") + await hass.async_block_till_done() + assert STATE_ALARM_PENDING == hass.states.get(entity_id).state + + # Fast-forward a little bit + future = dt_util.utcnow() + timedelta(seconds=1) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + assert STATE_ALARM_ARMED_NIGHT == hass.states.get(entity_id).state + + +async def test_disarm_pending_via_command_topic(hass, mqtt_mock): + """Test disarming pending alarm via command topic.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + alarm_control_panel.DOMAIN: { + "platform": "manual_mqtt", + "name": "test", + "pending_time": 1, + "state_topic": "alarm/state", + "command_topic": "alarm/command", + "payload_disarm": "DISARM", + } + }, + ) + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.test" + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + await common.async_alarm_trigger(hass) + await hass.async_block_till_done() + + assert STATE_ALARM_PENDING == hass.states.get(entity_id).state + + # Now that we're pending, receive a command to disarm + async_fire_mqtt_message(hass, "alarm/command", "DISARM") + await hass.async_block_till_done() + + assert STATE_ALARM_DISARMED == hass.states.get(entity_id).state + + +async def test_state_changes_are_published_to_mqtt(hass, mqtt_mock): + """Test publishing of MQTT messages when state changes.""" + assert await async_setup_component( + hass, + alarm_control_panel.DOMAIN, + { + alarm_control_panel.DOMAIN: { + "platform": "manual_mqtt", + "name": "test", + "pending_time": 1, + "trigger_time": 1, + "state_topic": "alarm/state", + "command_topic": "alarm/command", + } + }, + ) + await hass.async_block_till_done() + + # Component should send disarmed alarm state on startup + await hass.async_block_till_done() + mqtt_mock.async_publish.assert_called_once_with( + "alarm/state", STATE_ALARM_DISARMED, 0, True + ) + mqtt_mock.async_publish.reset_mock() + + # Arm in home mode + await common.async_alarm_arm_home(hass) + await hass.async_block_till_done() + mqtt_mock.async_publish.assert_called_once_with( + "alarm/state", STATE_ALARM_PENDING, 0, True + ) + mqtt_mock.async_publish.reset_mock() + # Fast-forward a little bit + future = dt_util.utcnow() + timedelta(seconds=1) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + mqtt_mock.async_publish.assert_called_once_with( + "alarm/state", STATE_ALARM_ARMED_HOME, 0, True + ) + mqtt_mock.async_publish.reset_mock() + + # Arm in away mode + await common.async_alarm_arm_away(hass) + await hass.async_block_till_done() + mqtt_mock.async_publish.assert_called_once_with( + "alarm/state", STATE_ALARM_PENDING, 0, True + ) + mqtt_mock.async_publish.reset_mock() + # Fast-forward a little bit + future = dt_util.utcnow() + timedelta(seconds=1) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + mqtt_mock.async_publish.assert_called_once_with( + "alarm/state", STATE_ALARM_ARMED_AWAY, 0, True + ) + mqtt_mock.async_publish.reset_mock() + + # Arm in night mode + await common.async_alarm_arm_night(hass) + await hass.async_block_till_done() + mqtt_mock.async_publish.assert_called_once_with( + "alarm/state", STATE_ALARM_PENDING, 0, True + ) + mqtt_mock.async_publish.reset_mock() + # Fast-forward a little bit + future = dt_util.utcnow() + timedelta(seconds=1) + with patch( + ("homeassistant.components.manual_mqtt.alarm_control_panel." "dt_util.utcnow"), + return_value=future, + ): + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + mqtt_mock.async_publish.assert_called_once_with( + "alarm/state", STATE_ALARM_ARMED_NIGHT, 0, True + ) + mqtt_mock.async_publish.reset_mock() + + # Disarm + await common.async_alarm_disarm(hass) + await hass.async_block_till_done() + mqtt_mock.async_publish.assert_called_once_with( + "alarm/state", STATE_ALARM_DISARMED, 0, True + ) diff --git a/tests/components/media_player/test_async_helpers.py b/tests/components/media_player/test_async_helpers.py index ac0d70bded9..12414801a52 100644 --- a/tests/components/media_player/test_async_helpers.py +++ b/tests/components/media_player/test_async_helpers.py @@ -146,8 +146,9 @@ class TestAsyncMediaPlayer(unittest.TestCase): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.player = AsyncMediaPlayer(self.hass) + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): + def tear_down_cleanup(self): """Shut down test instance.""" self.hass.stop() @@ -207,8 +208,9 @@ class TestSyncMediaPlayer(unittest.TestCase): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.player = SyncMediaPlayer(self.hass) + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): + def tear_down_cleanup(self): """Shut down test instance.""" self.hass.stop() diff --git a/tests/components/metoffice/__init__.py b/tests/components/metoffice/__init__.py new file mode 100644 index 00000000000..fdefc3d4786 --- /dev/null +++ b/tests/components/metoffice/__init__.py @@ -0,0 +1 @@ +"""Tests for the metoffice component.""" diff --git a/tests/components/metoffice/conftest.py b/tests/components/metoffice/conftest.py new file mode 100644 index 00000000000..9538c7a8668 --- /dev/null +++ b/tests/components/metoffice/conftest.py @@ -0,0 +1,22 @@ +"""Fixtures for Met Office weather integration tests.""" +from datapoint.exceptions import APIException +import pytest + +from tests.async_mock import patch + + +@pytest.fixture() +def mock_simple_manager_fail(): + """Mock datapoint Manager with default values for testing in config_flow.""" + with patch("datapoint.Manager") as mock_manager: + instance = mock_manager.return_value + instance.get_nearest_forecast_site.side_effect = APIException() + instance.get_forecast_for_site.side_effect = APIException() + instance.latitude = None + instance.longitude = None + instance.site = None + instance.site_id = None + instance.site_name = None + instance.now = None + + yield mock_manager diff --git a/tests/components/metoffice/const.py b/tests/components/metoffice/const.py new file mode 100644 index 00000000000..5d8d781b042 --- /dev/null +++ b/tests/components/metoffice/const.py @@ -0,0 +1,58 @@ +"""Helpers for testing Met Office DataPoint.""" + +from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME + +DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S%z" +TEST_DATETIME_STRING = "2020-04-25 12:00:00+0000" + +TEST_API_KEY = "test-metoffice-api-key" + +TEST_LATITUDE_WAVERTREE = 53.38374 +TEST_LONGITUDE_WAVERTREE = -2.90929 +TEST_SITE_NAME_WAVERTREE = "Wavertree" + +TEST_LATITUDE_KINGSLYNN = 52.75556 +TEST_LONGITUDE_KINGSLYNN = 0.44231 +TEST_SITE_NAME_KINGSLYNN = "King's Lynn" + +METOFFICE_CONFIG_WAVERTREE = { + CONF_API_KEY: TEST_API_KEY, + CONF_LATITUDE: TEST_LATITUDE_WAVERTREE, + CONF_LONGITUDE: TEST_LONGITUDE_WAVERTREE, + CONF_NAME: TEST_SITE_NAME_WAVERTREE, +} + +METOFFICE_CONFIG_KINGSLYNN = { + CONF_API_KEY: TEST_API_KEY, + CONF_LATITUDE: TEST_LATITUDE_KINGSLYNN, + CONF_LONGITUDE: TEST_LONGITUDE_KINGSLYNN, + CONF_NAME: TEST_SITE_NAME_KINGSLYNN, +} + +KINGSLYNN_SENSOR_RESULTS = { + "weather": ("weather", "sunny"), + "visibility": ("visibility", "Very Good"), + "visibility_distance": ("visibility_distance", "20-40"), + "temperature": ("temperature", "14"), + "feels_like_temperature": ("feels_like_temperature", "13"), + "uv": ("uv_index", "6"), + "precipitation": ("probability_of_precipitation", "0"), + "wind_direction": ("wind_direction", "E"), + "wind_gust": ("wind_gust", "7"), + "wind_speed": ("wind_speed", "2"), + "humidity": ("humidity", "60"), +} + +WAVERTREE_SENSOR_RESULTS = { + "weather": ("weather", "sunny"), + "visibility": ("visibility", "Good"), + "visibility_distance": ("visibility_distance", "10-20"), + "temperature": ("temperature", "17"), + "feels_like_temperature": ("feels_like_temperature", "14"), + "uv": ("uv_index", "5"), + "precipitation": ("probability_of_precipitation", "0"), + "wind_direction": ("wind_direction", "SSE"), + "wind_gust": ("wind_gust", "16"), + "wind_speed": ("wind_speed", "9"), + "humidity": ("humidity", "50"), +} diff --git a/tests/components/metoffice/test_config_flow.py b/tests/components/metoffice/test_config_flow.py new file mode 100644 index 00000000000..6916e949b1c --- /dev/null +++ b/tests/components/metoffice/test_config_flow.py @@ -0,0 +1,122 @@ +"""Test the National Weather Service (NWS) config flow.""" +import json + +from homeassistant import config_entries, setup +from homeassistant.components.metoffice.const import DOMAIN + +from .const import ( + METOFFICE_CONFIG_WAVERTREE, + TEST_API_KEY, + TEST_LATITUDE_WAVERTREE, + TEST_LONGITUDE_WAVERTREE, + TEST_SITE_NAME_WAVERTREE, +) + +from tests.async_mock import patch +from tests.common import MockConfigEntry, load_fixture + + +async def test_form(hass, requests_mock): + """Test we get the form.""" + hass.config.latitude = TEST_LATITUDE_WAVERTREE + hass.config.longitude = TEST_LONGITUDE_WAVERTREE + + # all metoffice test data encapsulated in here + mock_json = json.loads(load_fixture("metoffice.json")) + all_sites = json.dumps(mock_json["all_sites"]) + requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites) + + await setup.async_setup_component(hass, "persistent_notification", {}) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] == "form" + assert result["errors"] == {} + + with patch( + "homeassistant.components.metoffice.async_setup", return_value=True + ) as mock_setup, patch( + "homeassistant.components.metoffice.async_setup_entry", return_value=True, + ) as mock_setup_entry: + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {"api_key": TEST_API_KEY} + ) + + assert result2["type"] == "create_entry" + assert result2["title"] == TEST_SITE_NAME_WAVERTREE + assert result2["data"] == { + "api_key": TEST_API_KEY, + "latitude": TEST_LATITUDE_WAVERTREE, + "longitude": TEST_LONGITUDE_WAVERTREE, + "name": TEST_SITE_NAME_WAVERTREE, + } + await hass.async_block_till_done() + assert len(mock_setup.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_already_configured(hass, requests_mock): + """Test we handle duplicate entries.""" + hass.config.latitude = TEST_LATITUDE_WAVERTREE + hass.config.longitude = TEST_LONGITUDE_WAVERTREE + + # all metoffice test data encapsulated in here + mock_json = json.loads(load_fixture("metoffice.json")) + + all_sites = json.dumps(mock_json["all_sites"]) + + requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites) + requests_mock.get( + "/public/data/val/wxfcs/all/json/354107?res=3hourly", text="", + ) + + MockConfigEntry( + domain=DOMAIN, + unique_id=f"{TEST_LATITUDE_WAVERTREE}_{TEST_LONGITUDE_WAVERTREE}", + data=METOFFICE_CONFIG_WAVERTREE, + ).add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_USER}, + data=METOFFICE_CONFIG_WAVERTREE, + ) + + assert result["type"] == "abort" + assert result["reason"] == "already_configured" + + +async def test_form_cannot_connect(hass, requests_mock): + """Test we handle cannot connect error.""" + hass.config.latitude = TEST_LATITUDE_WAVERTREE + hass.config.longitude = TEST_LONGITUDE_WAVERTREE + + requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text="") + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {"api_key": TEST_API_KEY}, + ) + + assert result2["type"] == "form" + assert result2["errors"] == {"base": "cannot_connect"} + + +async def test_form_unknown_error(hass, mock_simple_manager_fail): + """Test we handle unknown error.""" + mock_instance = mock_simple_manager_fail.return_value + mock_instance.get_nearest_forecast_site.side_effect = ValueError + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {"api_key": TEST_API_KEY}, + ) + + assert result2["type"] == "form" + assert result2["errors"] == {"base": "unknown"} diff --git a/tests/components/metoffice/test_sensor.py b/tests/components/metoffice/test_sensor.py new file mode 100644 index 00000000000..70a66a3093c --- /dev/null +++ b/tests/components/metoffice/test_sensor.py @@ -0,0 +1,117 @@ +"""The tests for the Met Office sensor component.""" +from datetime import datetime, timezone +import json + +from homeassistant.components.metoffice.const import ATTRIBUTION, DOMAIN + +from .const import ( + DATETIME_FORMAT, + KINGSLYNN_SENSOR_RESULTS, + METOFFICE_CONFIG_KINGSLYNN, + METOFFICE_CONFIG_WAVERTREE, + TEST_DATETIME_STRING, + TEST_SITE_NAME_KINGSLYNN, + TEST_SITE_NAME_WAVERTREE, + WAVERTREE_SENSOR_RESULTS, +) + +from tests.async_mock import Mock, patch +from tests.common import MockConfigEntry, load_fixture + + +@patch( + "datapoint.Forecast.datetime.datetime", + Mock(now=Mock(return_value=datetime(2020, 4, 25, 12, tzinfo=timezone.utc))), +) +async def test_one_sensor_site_running(hass, requests_mock): + """Test the Met Office sensor platform.""" + + # all metoffice test data encapsulated in here + mock_json = json.loads(load_fixture("metoffice.json")) + all_sites = json.dumps(mock_json["all_sites"]) + wavertree_hourly = json.dumps(mock_json["wavertree_hourly"]) + + requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites) + requests_mock.get( + "/public/data/val/wxfcs/all/json/354107?res=3hourly", text=wavertree_hourly, + ) + + entry = MockConfigEntry(domain=DOMAIN, data=METOFFICE_CONFIG_WAVERTREE,) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + running_sensor_ids = hass.states.async_entity_ids("sensor") + assert len(running_sensor_ids) > 0 + for running_id in running_sensor_ids: + sensor = hass.states.get(running_id) + sensor_id = sensor.attributes.get("sensor_id") + sensor_name, sensor_value = WAVERTREE_SENSOR_RESULTS[sensor_id] + + assert sensor.state == sensor_value + assert ( + sensor.attributes.get("last_update").strftime(DATETIME_FORMAT) + == TEST_DATETIME_STRING + ) + assert sensor.attributes.get("site_id") == "354107" + assert sensor.attributes.get("site_name") == TEST_SITE_NAME_WAVERTREE + assert sensor.attributes.get("attribution") == ATTRIBUTION + + +@patch( + "datapoint.Forecast.datetime.datetime", + Mock(now=Mock(return_value=datetime(2020, 4, 25, 12, tzinfo=timezone.utc))), +) +async def test_two_sensor_sites_running(hass, requests_mock): + """Test we handle two sets of sensors running for two different sites.""" + + # all metoffice test data encapsulated in here + mock_json = json.loads(load_fixture("metoffice.json")) + all_sites = json.dumps(mock_json["all_sites"]) + wavertree_hourly = json.dumps(mock_json["wavertree_hourly"]) + kingslynn_hourly = json.dumps(mock_json["kingslynn_hourly"]) + + requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites) + requests_mock.get( + "/public/data/val/wxfcs/all/json/354107?res=3hourly", text=wavertree_hourly + ) + requests_mock.get( + "/public/data/val/wxfcs/all/json/322380?res=3hourly", text=kingslynn_hourly + ) + + entry = MockConfigEntry(domain=DOMAIN, data=METOFFICE_CONFIG_WAVERTREE,) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + entry2 = MockConfigEntry(domain=DOMAIN, data=METOFFICE_CONFIG_KINGSLYNN,) + entry2.add_to_hass(hass) + await hass.config_entries.async_setup(entry2.entry_id) + await hass.async_block_till_done() + + running_sensor_ids = hass.states.async_entity_ids("sensor") + assert len(running_sensor_ids) > 0 + for running_id in running_sensor_ids: + sensor = hass.states.get(running_id) + sensor_id = sensor.attributes.get("sensor_id") + if sensor.attributes.get("site_id") == "354107": + sensor_name, sensor_value = WAVERTREE_SENSOR_RESULTS[sensor_id] + assert sensor.state == sensor_value + assert ( + sensor.attributes.get("last_update").strftime(DATETIME_FORMAT) + == TEST_DATETIME_STRING + ) + assert sensor.attributes.get("sensor_id") == sensor_id + assert sensor.attributes.get("site_id") == "354107" + assert sensor.attributes.get("site_name") == TEST_SITE_NAME_WAVERTREE + assert sensor.attributes.get("attribution") == ATTRIBUTION + + else: + sensor_name, sensor_value = KINGSLYNN_SENSOR_RESULTS[sensor_id] + assert sensor.state == sensor_value + assert ( + sensor.attributes.get("last_update").strftime(DATETIME_FORMAT) + == TEST_DATETIME_STRING + ) + assert sensor.attributes.get("sensor_id") == sensor_id + assert sensor.attributes.get("site_id") == "322380" + assert sensor.attributes.get("site_name") == TEST_SITE_NAME_KINGSLYNN + assert sensor.attributes.get("attribution") == ATTRIBUTION diff --git a/tests/components/metoffice/test_weather.py b/tests/components/metoffice/test_weather.py new file mode 100644 index 00000000000..08440798f47 --- /dev/null +++ b/tests/components/metoffice/test_weather.py @@ -0,0 +1,159 @@ +"""The tests for the Met Office sensor component.""" +from datetime import datetime, timedelta, timezone +import json + +from homeassistant.components.metoffice.const import DOMAIN +from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.util import utcnow + +from .const import ( + METOFFICE_CONFIG_KINGSLYNN, + METOFFICE_CONFIG_WAVERTREE, + WAVERTREE_SENSOR_RESULTS, +) + +from tests.async_mock import Mock, patch +from tests.common import MockConfigEntry, async_fire_time_changed, load_fixture + + +@patch( + "datapoint.Forecast.datetime.datetime", + Mock(now=Mock(return_value=datetime(2020, 4, 25, 12, tzinfo=timezone.utc))), +) +async def test_site_cannot_connect(hass, requests_mock): + """Test we handle cannot connect error.""" + + requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text="") + requests_mock.get("/public/data/val/wxfcs/all/json/354107?res=3hourly", text="") + + entry = MockConfigEntry(domain=DOMAIN, data=METOFFICE_CONFIG_WAVERTREE,) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert hass.states.get("weather.met_office_wavertree") is None + for sensor_id in WAVERTREE_SENSOR_RESULTS: + sensor_name, sensor_value = WAVERTREE_SENSOR_RESULTS[sensor_id] + sensor = hass.states.get(f"sensor.wavertree_{sensor_name}") + assert sensor is None + + +@patch( + "datapoint.Forecast.datetime.datetime", + Mock(now=Mock(return_value=datetime(2020, 4, 25, 12, tzinfo=timezone.utc))), +) +async def test_site_cannot_update(hass, requests_mock): + """Test we handle cannot connect error.""" + + # all metoffice test data encapsulated in here + mock_json = json.loads(load_fixture("metoffice.json")) + all_sites = json.dumps(mock_json["all_sites"]) + wavertree_hourly = json.dumps(mock_json["wavertree_hourly"]) + + requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites) + requests_mock.get( + "/public/data/val/wxfcs/all/json/354107?res=3hourly", text=wavertree_hourly + ) + + entry = MockConfigEntry(domain=DOMAIN, data=METOFFICE_CONFIG_WAVERTREE,) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + entity = hass.states.get("weather.met_office_wavertree") + assert entity + + requests_mock.get("/public/data/val/wxfcs/all/json/354107?res=3hourly", text="") + + future_time = utcnow() + timedelta(minutes=20) + async_fire_time_changed(hass, future_time) + await hass.async_block_till_done() + + entity = hass.states.get("weather.met_office_wavertree") + assert entity.state == STATE_UNAVAILABLE + + +@patch( + "datapoint.Forecast.datetime.datetime", + Mock(now=Mock(return_value=datetime(2020, 4, 25, 12, tzinfo=timezone.utc))), +) +async def test_one_weather_site_running(hass, requests_mock): + """Test the Met Office weather platform.""" + + # all metoffice test data encapsulated in here + mock_json = json.loads(load_fixture("metoffice.json")) + all_sites = json.dumps(mock_json["all_sites"]) + wavertree_hourly = json.dumps(mock_json["wavertree_hourly"]) + + requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites) + requests_mock.get( + "/public/data/val/wxfcs/all/json/354107?res=3hourly", text=wavertree_hourly, + ) + + entry = MockConfigEntry(domain=DOMAIN, data=METOFFICE_CONFIG_WAVERTREE,) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + # Wavertree weather platform expected results + entity = hass.states.get("weather.met_office_wavertree") + assert entity + + assert entity.state == "sunny" + assert entity.attributes.get("temperature") == 17 + assert entity.attributes.get("wind_speed") == 9 + assert entity.attributes.get("wind_bearing") == "SSE" + assert entity.attributes.get("visibility") == "Good - 10-20" + assert entity.attributes.get("humidity") == 50 + + +@patch( + "datapoint.Forecast.datetime.datetime", + Mock(now=Mock(return_value=datetime(2020, 4, 25, 12, tzinfo=timezone.utc))), +) +async def test_two_weather_sites_running(hass, requests_mock): + """Test we handle two different weather sites both running.""" + + # all metoffice test data encapsulated in here + mock_json = json.loads(load_fixture("metoffice.json")) + all_sites = json.dumps(mock_json["all_sites"]) + wavertree_hourly = json.dumps(mock_json["wavertree_hourly"]) + kingslynn_hourly = json.dumps(mock_json["kingslynn_hourly"]) + + requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites) + requests_mock.get( + "/public/data/val/wxfcs/all/json/354107?res=3hourly", text=wavertree_hourly + ) + requests_mock.get( + "/public/data/val/wxfcs/all/json/322380?res=3hourly", text=kingslynn_hourly + ) + + entry = MockConfigEntry(domain=DOMAIN, data=METOFFICE_CONFIG_WAVERTREE,) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + entry2 = MockConfigEntry(domain=DOMAIN, data=METOFFICE_CONFIG_KINGSLYNN,) + entry2.add_to_hass(hass) + await hass.config_entries.async_setup(entry2.entry_id) + await hass.async_block_till_done() + + # Wavertree weather platform expected results + entity = hass.states.get("weather.met_office_wavertree") + assert entity + + assert entity.state == "sunny" + assert entity.attributes.get("temperature") == 17 + assert entity.attributes.get("wind_speed") == 9 + assert entity.attributes.get("wind_bearing") == "SSE" + assert entity.attributes.get("visibility") == "Good - 10-20" + assert entity.attributes.get("humidity") == 50 + + # King's Lynn weather platform expected results + entity = hass.states.get("weather.met_office_king_s_lynn") + assert entity + + assert entity.state == "sunny" + assert entity.attributes.get("temperature") == 14 + assert entity.attributes.get("wind_speed") == 2 + assert entity.attributes.get("wind_bearing") == "E" + assert entity.attributes.get("visibility") == "Very Good - 20-40" + assert entity.attributes.get("humidity") == 60 diff --git a/tests/components/mobile_app/test_webhook.py b/tests/components/mobile_app/test_webhook.py index c0071913035..790ebc56bf6 100644 --- a/tests/components/mobile_app/test_webhook.py +++ b/tests/components/mobile_app/test_webhook.py @@ -3,14 +3,17 @@ import logging import pytest +from homeassistant.components.camera import SUPPORT_STREAM as CAMERA_SUPPORT_STREAM from homeassistant.components.mobile_app.const import CONF_SECRET from homeassistant.components.zone import DOMAIN as ZONE_DOMAIN from homeassistant.const import CONF_WEBHOOK_ID from homeassistant.core import callback +from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from .const import CALL_SERVICE, FIRE_EVENT, REGISTER_CLEARTEXT, RENDER_TEMPLATE, UPDATE +from tests.async_mock import patch from tests.common import async_mock_service _LOGGER = logging.getLogger(__name__) @@ -303,3 +306,103 @@ async def test_webhook_enable_encryption(hass, webhook_client, create_registrati decrypted_data = decrypt_payload(key, enc_json["encrypted_data"]) assert decrypted_data == {"one": "Hello world"} + + +async def test_webhook_camera_stream_non_existent( + hass, create_registrations, webhook_client +): + """Test fetching camera stream URLs for a non-existent camera.""" + webhook_id = create_registrations[1]["webhook_id"] + + resp = await webhook_client.post( + f"/api/webhook/{webhook_id}", + json={ + "type": "stream_camera", + "data": {"camera_entity_id": "camera.doesnt_exist"}, + }, + ) + + assert resp.status == 400 + webhook_json = await resp.json() + assert webhook_json["success"] is False + + +async def test_webhook_camera_stream_non_hls( + hass, create_registrations, webhook_client +): + """Test fetching camera stream URLs for a non-HLS/stream-supporting camera.""" + hass.states.async_set("camera.non_stream_camera", "idle", {"supported_features": 0}) + + webhook_id = create_registrations[1]["webhook_id"] + + resp = await webhook_client.post( + f"/api/webhook/{webhook_id}", + json={ + "type": "stream_camera", + "data": {"camera_entity_id": "camera.non_stream_camera"}, + }, + ) + + assert resp.status == 200 + webhook_json = await resp.json() + assert webhook_json["hls_path"] is None + assert ( + webhook_json["mjpeg_path"] + == "/api/camera_proxy_stream/camera.non_stream_camera" + ) + + +async def test_webhook_camera_stream_stream_available( + hass, create_registrations, webhook_client +): + """Test fetching camera stream URLs for an HLS/stream-supporting camera.""" + hass.states.async_set( + "camera.stream_camera", "idle", {"supported_features": CAMERA_SUPPORT_STREAM} + ) + + webhook_id = create_registrations[1]["webhook_id"] + + with patch( + "homeassistant.components.camera.async_request_stream", + return_value="/api/streams/some_hls_stream", + ): + resp = await webhook_client.post( + f"/api/webhook/{webhook_id}", + json={ + "type": "stream_camera", + "data": {"camera_entity_id": "camera.stream_camera"}, + }, + ) + + assert resp.status == 200 + webhook_json = await resp.json() + assert webhook_json["hls_path"] == "/api/streams/some_hls_stream" + assert webhook_json["mjpeg_path"] == "/api/camera_proxy_stream/camera.stream_camera" + + +async def test_webhook_camera_stream_stream_available_but_errors( + hass, create_registrations, webhook_client +): + """Test fetching camera stream URLs for an HLS/stream-supporting camera but that streaming errors.""" + hass.states.async_set( + "camera.stream_camera", "idle", {"supported_features": CAMERA_SUPPORT_STREAM} + ) + + webhook_id = create_registrations[1]["webhook_id"] + + with patch( + "homeassistant.components.camera.async_request_stream", + side_effect=HomeAssistantError(), + ): + resp = await webhook_client.post( + f"/api/webhook/{webhook_id}", + json={ + "type": "stream_camera", + "data": {"camera_entity_id": "camera.stream_camera"}, + }, + ) + + assert resp.status == 200 + webhook_json = await resp.json() + assert webhook_json["hls_path"] is None + assert webhook_json["mjpeg_path"] == "/api/camera_proxy_stream/camera.stream_camera" diff --git a/tests/components/mochad/test_light.py b/tests/components/mochad/test_light.py index 2dd385f0253..eb9cf047d02 100644 --- a/tests/components/mochad/test_light.py +++ b/tests/components/mochad/test_light.py @@ -28,10 +28,7 @@ class TestMochadSwitchSetup(unittest.TestCase): def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) @mock.patch("homeassistant.components.mochad.light.MochadLight") def test_setup_adds_proper_devices(self, mock_light): diff --git a/tests/components/mochad/test_switch.py b/tests/components/mochad/test_switch.py index 699edfe899c..66f84c16fee 100644 --- a/tests/components/mochad/test_switch.py +++ b/tests/components/mochad/test_switch.py @@ -30,10 +30,7 @@ class TestMochadSwitchSetup(unittest.TestCase): def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) @mock.patch("homeassistant.components.mochad.switch.MochadSwitch") def test_setup_adds_proper_devices(self, mock_switch): diff --git a/tests/components/mold_indicator/test_sensor.py b/tests/components/mold_indicator/test_sensor.py index 5f3b223bf66..423c728ff72 100644 --- a/tests/components/mold_indicator/test_sensor.py +++ b/tests/components/mold_indicator/test_sensor.py @@ -32,8 +32,9 @@ class TestSensorMoldIndicator(unittest.TestCase): self.hass.states.set( "test.indoorhumidity", "50", {ATTR_UNIT_OF_MEASUREMENT: UNIT_PERCENTAGE} ) + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): + def tear_down_cleanup(self): """Stop down everything that was started.""" self.hass.stop() diff --git a/tests/components/mqtt/conftest.py b/tests/components/mqtt/conftest.py index 290682549f5..895327d1756 100644 --- a/tests/components/mqtt/conftest.py +++ b/tests/components/mqtt/conftest.py @@ -1,12 +1 @@ """Test fixtures for mqtt component.""" -import pytest - -from tests.common import async_mock_mqtt_component - - -@pytest.fixture -def mqtt_mock(loop, hass): - """Fixture to mock MQTT.""" - client = loop.run_until_complete(async_mock_mqtt_component(hass)) - client.reset_mock() - return client diff --git a/tests/components/mqtt/test_alarm_control_panel.py b/tests/components/mqtt/test_alarm_control_panel.py index 03e8133bde9..aa6452fd9c8 100644 --- a/tests/components/mqtt/test_alarm_control_panel.py +++ b/tests/components/mqtt/test_alarm_control_panel.py @@ -17,8 +17,10 @@ from homeassistant.const import ( STATE_ALARM_TRIGGERED, STATE_UNKNOWN, ) +from homeassistant.setup import async_setup_component from .test_common import ( + help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, @@ -40,11 +42,7 @@ from .test_common import ( help_test_update_with_json_attrs_not_dict, ) -from tests.common import ( - assert_setup_component, - async_fire_mqtt_message, - async_setup_component, -) +from tests.common import assert_setup_component, async_fire_mqtt_message from tests.components.alarm_control_panel import common CODE_NUMBER = "1234" @@ -469,6 +467,13 @@ async def test_attributes_code_text(hass, mqtt_mock): ) +async def test_availability_when_connection_lost(hass, mqtt_mock): + """Test availability after MQTT disconnection.""" + await help_test_availability_when_connection_lost( + hass, mqtt_mock, alarm_control_panel.DOMAIN, DEFAULT_CONFIG_CODE + ) + + async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( @@ -525,7 +530,7 @@ async def test_discovery_update_attr(hass, mqtt_mock, caplog): ) -async def test_unique_id(hass): +async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one alarm per unique_id.""" config = { alarm_control_panel.DOMAIN: [ @@ -545,7 +550,7 @@ async def test_unique_id(hass): }, ] } - await help_test_unique_id(hass, alarm_control_panel.DOMAIN, config) + await help_test_unique_id(hass, mqtt_mock, alarm_control_panel.DOMAIN, config) async def test_discovery_removal_alarm(hass, mqtt_mock, caplog): diff --git a/tests/components/mqtt/test_binary_sensor.py b/tests/components/mqtt/test_binary_sensor.py index 8c68fabf214..67bb6704339 100644 --- a/tests/components/mqtt/test_binary_sensor.py +++ b/tests/components/mqtt/test_binary_sensor.py @@ -18,6 +18,7 @@ from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from .test_common import ( + help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, @@ -40,11 +41,7 @@ from .test_common import ( ) from tests.async_mock import patch -from tests.common import ( - MockConfigEntry, - async_fire_mqtt_message, - async_fire_time_changed, -) +from tests.common import async_fire_mqtt_message, async_fire_time_changed DEFAULT_CONFIG = { binary_sensor.DOMAIN: { @@ -298,6 +295,13 @@ async def test_invalid_device_class(hass, mqtt_mock): assert state is None +async def test_availability_when_connection_lost(hass, mqtt_mock): + """Test availability after MQTT disconnection.""" + await help_test_availability_when_connection_lost( + hass, mqtt_mock, binary_sensor.DOMAIN, DEFAULT_CONFIG + ) + + async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( @@ -472,7 +476,7 @@ async def test_discovery_update_attr(hass, mqtt_mock, caplog): ) -async def test_unique_id(hass): +async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one sensor per unique_id.""" config = { binary_sensor.DOMAIN: [ @@ -490,7 +494,7 @@ async def test_unique_id(hass): }, ] } - await help_test_unique_id(hass, binary_sensor.DOMAIN, config) + await help_test_unique_id(hass, mqtt_mock, binary_sensor.DOMAIN, config) async def test_discovery_removal_binary_sensor(hass, mqtt_mock, caplog): @@ -519,8 +523,8 @@ async def test_expiration_on_discovery_and_discovery_update_of_binary_sensor( hass, mqtt_mock, caplog ): """Test that binary_sensor with expire_after set behaves correctly on discovery and discovery update.""" - entry = MockConfigEntry(domain=mqtt.DOMAIN) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", entry) config = { "name": "Test", diff --git a/tests/components/mqtt/test_camera.py b/tests/components/mqtt/test_camera.py index 11b846d4c38..6869b530668 100644 --- a/tests/components/mqtt/test_camera.py +++ b/tests/components/mqtt/test_camera.py @@ -8,6 +8,7 @@ from homeassistant.components.mqtt.discovery import async_start from homeassistant.setup import async_setup_component from .test_common import ( + help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, @@ -29,21 +30,16 @@ from .test_common import ( help_test_update_with_json_attrs_not_dict, ) -from tests.common import ( - MockConfigEntry, - async_fire_mqtt_message, - async_mock_mqtt_component, -) +from tests.common import async_fire_mqtt_message DEFAULT_CONFIG = { camera.DOMAIN: {"platform": "mqtt", "name": "test", "topic": "test_topic"} } -async def test_run_camera_setup(hass, aiohttp_client): +async def test_run_camera_setup(hass, aiohttp_client, mqtt_mock): """Test that it fetches the given payload.""" topic = "test/camera" - await async_mock_mqtt_component(hass) await async_setup_component( hass, "camera", @@ -62,6 +58,13 @@ async def test_run_camera_setup(hass, aiohttp_client): assert body == "beer" +async def test_availability_when_connection_lost(hass, mqtt_mock): + """Test availability after MQTT disconnection.""" + await help_test_availability_when_connection_lost( + hass, mqtt_mock, camera.DOMAIN, DEFAULT_CONFIG + ) + + async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( @@ -118,7 +121,7 @@ async def test_discovery_update_attr(hass, mqtt_mock, caplog): ) -async def test_unique_id(hass): +async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one camera per unique_id.""" config = { camera.DOMAIN: [ @@ -136,7 +139,7 @@ async def test_unique_id(hass): }, ] } - await help_test_unique_id(hass, camera.DOMAIN, config) + await help_test_unique_id(hass, mqtt_mock, camera.DOMAIN, config) async def test_discovery_removal_camera(hass, mqtt_mock, caplog): @@ -147,8 +150,8 @@ async def test_discovery_removal_camera(hass, mqtt_mock, caplog): async def test_discovery_update_camera(hass, mqtt_mock, caplog): """Test update of discovered camera.""" - entry = MockConfigEntry(domain=mqtt.DOMAIN) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", entry) data1 = '{ "name": "Beer",' ' "topic": "test_topic"}' data2 = '{ "name": "Milk",' ' "topic": "test_topic"}' @@ -161,8 +164,8 @@ async def test_discovery_update_camera(hass, mqtt_mock, caplog): @pytest.mark.no_fail_on_log_exception async def test_discovery_broken(hass, mqtt_mock, caplog): """Test handling of bad discovery message.""" - entry = MockConfigEntry(domain=mqtt.DOMAIN) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", entry) data1 = '{ "name": "Beer" }' data2 = '{ "name": "Milk",' ' "topic": "test_topic"}' diff --git a/tests/components/mqtt/test_climate.py b/tests/components/mqtt/test_climate.py index 30018c7c175..6a7bdf0b7e6 100644 --- a/tests/components/mqtt/test_climate.py +++ b/tests/components/mqtt/test_climate.py @@ -23,8 +23,10 @@ from homeassistant.components.climate.const import ( SUPPORT_TARGET_TEMPERATURE_RANGE, ) from homeassistant.const import STATE_OFF +from homeassistant.setup import async_setup_component from .test_common import ( + help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, @@ -47,7 +49,7 @@ from .test_common import ( ) from tests.async_mock import call -from tests.common import async_fire_mqtt_message, async_setup_component +from tests.common import async_fire_mqtt_message from tests.components.climate import common ENTITY_CLIMATE = "climate.test" @@ -608,6 +610,13 @@ async def test_set_aux(hass, mqtt_mock): assert state.attributes.get("aux_heat") == "off" +async def test_availability_when_connection_lost(hass, mqtt_mock): + """Test availability after MQTT disconnection.""" + await help_test_availability_when_connection_lost( + hass, mqtt_mock, CLIMATE_DOMAIN, DEFAULT_CONFIG + ) + + async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( @@ -862,7 +871,7 @@ async def test_discovery_update_attr(hass, mqtt_mock, caplog): ) -async def test_unique_id(hass): +async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one climate per unique_id.""" config = { CLIMATE_DOMAIN: [ @@ -882,7 +891,7 @@ async def test_unique_id(hass): }, ] } - await help_test_unique_id(hass, CLIMATE_DOMAIN, config) + await help_test_unique_id(hass, mqtt_mock, CLIMATE_DOMAIN, config) async def test_discovery_removal_climate(hass, mqtt_mock, caplog): diff --git a/tests/components/mqtt/test_common.py b/tests/components/mqtt/test_common.py index d0ddc1d4830..4275cc36e13 100644 --- a/tests/components/mqtt/test_common.py +++ b/tests/components/mqtt/test_common.py @@ -6,17 +6,14 @@ from unittest import mock from homeassistant.components import mqtt from homeassistant.components.mqtt import debug_info +from homeassistant.components.mqtt.const import MQTT_DISCONNECTED from homeassistant.components.mqtt.discovery import async_start from homeassistant.const import ATTR_ASSUMED_STATE, STATE_UNAVAILABLE +from homeassistant.helpers.dispatcher import async_dispatcher_send +from homeassistant.setup import async_setup_component from tests.async_mock import ANY -from tests.common import ( - MockConfigEntry, - async_fire_mqtt_message, - async_mock_mqtt_component, - async_setup_component, - mock_registry, -) +from tests.common import async_fire_mqtt_message, mock_registry DEFAULT_CONFIG_DEVICE_INFO_ID = { "identifiers": ["helloworld"], @@ -35,6 +32,22 @@ DEFAULT_CONFIG_DEVICE_INFO_MAC = { } +async def help_test_availability_when_connection_lost(hass, mqtt_mock, domain, config): + """Test availability after MQTT disconnection.""" + assert await async_setup_component(hass, domain, config) + await hass.async_block_till_done() + + state = hass.states.get(f"{domain}.test") + assert state.state != STATE_UNAVAILABLE + + mqtt_mock.connected = False + async_dispatcher_send(hass, MQTT_DISCONNECTED) + await hass.async_block_till_done() + + state = hass.states.get(f"{domain}.test") + assert state.state == STATE_UNAVAILABLE + + async def help_test_availability_without_topic(hass, mqtt_mock, domain, config): """Test availability without defined availability topic.""" assert "availability_topic" not in config[domain] @@ -232,8 +245,8 @@ async def help_test_discovery_update_attr(hass, mqtt_mock, caplog, domain, confi data1 = json.dumps(config1[domain]) data2 = json.dumps(config2[domain]) - entry = MockConfigEntry(domain=mqtt.DOMAIN) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", entry) async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", data1) await hass.async_block_till_done() async_fire_mqtt_message(hass, "attr-topic1", '{ "val": "100" }') @@ -255,9 +268,8 @@ async def help_test_discovery_update_attr(hass, mqtt_mock, caplog, domain, confi assert state.attributes.get("val") == "75" -async def help_test_unique_id(hass, domain, config): +async def help_test_unique_id(hass, mqtt_mock, domain, config): """Test unique id option only creates one entity per unique_id.""" - await async_mock_mqtt_component(hass) assert await async_setup_component(hass, domain, config) await hass.async_block_till_done() assert len(hass.states.async_entity_ids(domain)) == 1 @@ -268,8 +280,8 @@ async def help_test_discovery_removal(hass, mqtt_mock, caplog, domain, data): This is a test helper for the MqttDiscoveryUpdate mixin. """ - entry = MockConfigEntry(domain=mqtt.DOMAIN) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", entry) async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", data) await hass.async_block_till_done() @@ -290,8 +302,8 @@ async def help_test_discovery_update(hass, mqtt_mock, caplog, domain, data1, dat This is a test helper for the MqttDiscoveryUpdate mixin. """ - entry = MockConfigEntry(domain=mqtt.DOMAIN) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", entry) async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", data1) await hass.async_block_till_done() @@ -313,8 +325,8 @@ async def help_test_discovery_update(hass, mqtt_mock, caplog, domain, data1, dat async def help_test_discovery_broken(hass, mqtt_mock, caplog, domain, data1, data2): """Test handling of bad discovery message.""" - entry = MockConfigEntry(domain=mqtt.DOMAIN) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", entry) async_fire_mqtt_message(hass, f"homeassistant/{domain}/bla/config", data1) await hass.async_block_till_done() @@ -342,9 +354,8 @@ async def help_test_entity_device_info_with_identifier(hass, mqtt_mock, domain, config["device"] = copy.deepcopy(DEFAULT_CONFIG_DEVICE_INFO_ID) config["unique_id"] = "veryunique" - entry = MockConfigEntry(domain=mqtt.DOMAIN) - entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", entry) registry = await hass.helpers.device_registry.async_get_registry() data = json.dumps(config) @@ -370,9 +381,8 @@ async def help_test_entity_device_info_with_connection(hass, mqtt_mock, domain, config["device"] = copy.deepcopy(DEFAULT_CONFIG_DEVICE_INFO_MAC) config["unique_id"] = "veryunique" - entry = MockConfigEntry(domain=mqtt.DOMAIN) - entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", entry) registry = await hass.helpers.device_registry.async_get_registry() data = json.dumps(config) @@ -395,9 +405,8 @@ async def help_test_entity_device_info_remove(hass, mqtt_mock, domain, config): config["device"] = copy.deepcopy(DEFAULT_CONFIG_DEVICE_INFO_ID) config["unique_id"] = "veryunique" - entry = MockConfigEntry(domain=mqtt.DOMAIN) - entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", entry) dev_registry = await hass.helpers.device_registry.async_get_registry() ent_registry = await hass.helpers.entity_registry.async_get_registry() @@ -427,9 +436,8 @@ async def help_test_entity_device_info_update(hass, mqtt_mock, domain, config): config["device"] = copy.deepcopy(DEFAULT_CONFIG_DEVICE_INFO_ID) config["unique_id"] = "veryunique" - entry = MockConfigEntry(domain=mqtt.DOMAIN) - entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", entry) registry = await hass.helpers.device_registry.async_get_registry() data = json.dumps(config) @@ -465,16 +473,15 @@ async def help_test_entity_id_update_subscriptions( topics = ["avty-topic", "test-topic"] assert len(topics) > 0 registry = mock_registry(hass, {}) - mock_mqtt = await async_mock_mqtt_component(hass) assert await async_setup_component(hass, domain, config,) await hass.async_block_till_done() state = hass.states.get(f"{domain}.test") assert state is not None - assert mock_mqtt.async_subscribe.call_count == len(topics) + assert mqtt_mock.async_subscribe.call_count == len(topics) for topic in topics: - mock_mqtt.async_subscribe.assert_any_call(topic, ANY, ANY, ANY) - mock_mqtt.async_subscribe.reset_mock() + mqtt_mock.async_subscribe.assert_any_call(topic, ANY, ANY, ANY) + mqtt_mock.async_subscribe.reset_mock() registry.async_update_entity(f"{domain}.test", new_entity_id=f"{domain}.milk") await hass.async_block_till_done() @@ -485,7 +492,7 @@ async def help_test_entity_id_update_subscriptions( state = hass.states.get(f"{domain}.milk") assert state is not None for topic in topics: - mock_mqtt.async_subscribe.assert_any_call(topic, ANY, ANY, ANY) + mqtt_mock.async_subscribe.assert_any_call(topic, ANY, ANY, ANY) async def help_test_entity_id_update_discovery_update( @@ -501,9 +508,8 @@ async def help_test_entity_id_update_discovery_update( config[domain]["availability_topic"] = "avty-topic" topic = "avty-topic" - entry = MockConfigEntry(domain=mqtt.DOMAIN) - entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", entry) ent_registry = mock_registry(hass, {}) data = json.dumps(config[domain]) @@ -542,9 +548,8 @@ async def help_test_entity_debug_info(hass, mqtt_mock, domain, config): config["device"] = copy.deepcopy(DEFAULT_CONFIG_DEVICE_INFO_ID) config["unique_id"] = "veryunique" - entry = MockConfigEntry(domain=mqtt.DOMAIN) - entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", entry) registry = await hass.helpers.device_registry.async_get_registry() data = json.dumps(config) @@ -578,9 +583,8 @@ async def help_test_entity_debug_info_max_messages(hass, mqtt_mock, domain, conf config["device"] = copy.deepcopy(DEFAULT_CONFIG_DEVICE_INFO_ID) config["unique_id"] = "veryunique" - entry = MockConfigEntry(domain=mqtt.DOMAIN) - entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", entry) registry = await hass.helpers.device_registry.async_get_registry() data = json.dumps(config) @@ -643,9 +647,8 @@ async def help_test_entity_debug_info_message( if payload is None: payload = "ON" - entry = MockConfigEntry(domain=mqtt.DOMAIN) - entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", entry) registry = await hass.helpers.device_registry.async_get_registry() data = json.dumps(config) @@ -692,9 +695,8 @@ async def help_test_entity_debug_info_remove(hass, mqtt_mock, domain, config): config["device"] = copy.deepcopy(DEFAULT_CONFIG_DEVICE_INFO_ID) config["unique_id"] = "veryunique" - entry = MockConfigEntry(domain=mqtt.DOMAIN) - entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", entry) registry = await hass.helpers.device_registry.async_get_registry() data = json.dumps(config) @@ -738,9 +740,8 @@ async def help_test_entity_debug_info_update_entity_id(hass, mqtt_mock, domain, config["device"] = copy.deepcopy(DEFAULT_CONFIG_DEVICE_INFO_ID) config["unique_id"] = "veryunique" - entry = MockConfigEntry(domain=mqtt.DOMAIN) - entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", entry) dev_registry = await hass.helpers.device_registry.async_get_registry() ent_registry = mock_registry(hass, {}) diff --git a/tests/components/mqtt/test_config_flow.py b/tests/components/mqtt/test_config_flow.py index 0990accec9f..581395b702a 100644 --- a/tests/components/mqtt/test_config_flow.py +++ b/tests/components/mqtt/test_config_flow.py @@ -1,7 +1,11 @@ """Test config flow.""" import pytest +import voluptuous as vol +from homeassistant import data_entry_flow +from homeassistant.components import mqtt +from homeassistant.components.mqtt.discovery import async_start from homeassistant.setup import async_setup_component from tests.async_mock import patch @@ -144,3 +148,340 @@ async def test_hassio_confirm(hass, mock_try_connection, mock_finish_setup): assert len(mock_try_connection.mock_calls) == 1 # Check config entry got setup assert len(mock_finish_setup.mock_calls) == 1 + + +async def test_option_flow(hass, mqtt_mock, mock_try_connection): + """Test config flow options.""" + mock_try_connection.return_value = True + config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", config_entry) + config_entry.data = { + mqtt.CONF_BROKER: "test-broker", + mqtt.CONF_PORT: 1234, + } + + mqtt_mock.async_connect.reset_mock() + + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] == data_entry_flow.RESULT_TYPE_FORM + assert result["step_id"] == "broker" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + mqtt.CONF_BROKER: "another-broker", + mqtt.CONF_PORT: 2345, + mqtt.CONF_USERNAME: "user", + mqtt.CONF_PASSWORD: "pass", + }, + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_FORM + assert result["step_id"] == "options" + + await hass.async_block_till_done() + assert mqtt_mock.async_connect.call_count == 0 + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + mqtt.CONF_DISCOVERY: True, + "birth_topic": "ha_state/online", + "birth_payload": "online", + "birth_qos": 1, + "birth_retain": True, + "will_topic": "ha_state/offline", + "will_payload": "offline", + "will_qos": 2, + "will_retain": True, + }, + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY + assert result["data"] is None + assert config_entry.data == { + mqtt.CONF_BROKER: "another-broker", + mqtt.CONF_PORT: 2345, + mqtt.CONF_USERNAME: "user", + mqtt.CONF_PASSWORD: "pass", + mqtt.CONF_DISCOVERY: True, + mqtt.CONF_BIRTH_MESSAGE: { + mqtt.ATTR_TOPIC: "ha_state/online", + mqtt.ATTR_PAYLOAD: "online", + mqtt.ATTR_QOS: 1, + mqtt.ATTR_RETAIN: True, + }, + mqtt.CONF_WILL_MESSAGE: { + mqtt.ATTR_TOPIC: "ha_state/offline", + mqtt.ATTR_PAYLOAD: "offline", + mqtt.ATTR_QOS: 2, + mqtt.ATTR_RETAIN: True, + }, + } + + await hass.async_block_till_done() + assert mqtt_mock.async_connect.call_count == 1 + + +def get_default(schema, key): + """Get default value for key in voluptuous schema.""" + for k in schema.keys(): + if k == key: + if k.default == vol.UNDEFINED: + return None + return k.default() + + +def get_suggested(schema, key): + """Get suggested value for key in voluptuous schema.""" + for k in schema.keys(): + if k == key: + if k.description is None or "suggested_value" not in k.description: + return None + return k.description["suggested_value"] + + +async def test_option_flow_default_suggested_values( + hass, mqtt_mock, mock_try_connection +): + """Test config flow options has default/suggested values.""" + mock_try_connection.return_value = True + config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", config_entry) + config_entry.data = { + mqtt.CONF_BROKER: "test-broker", + mqtt.CONF_PORT: 1234, + mqtt.CONF_USERNAME: "user", + mqtt.CONF_PASSWORD: "pass", + mqtt.CONF_DISCOVERY: True, + mqtt.CONF_BIRTH_MESSAGE: { + mqtt.ATTR_TOPIC: "ha_state/online", + mqtt.ATTR_PAYLOAD: "online", + mqtt.ATTR_QOS: 1, + mqtt.ATTR_RETAIN: True, + }, + mqtt.CONF_WILL_MESSAGE: { + mqtt.ATTR_TOPIC: "ha_state/offline", + mqtt.ATTR_PAYLOAD: "offline", + mqtt.ATTR_QOS: 2, + mqtt.ATTR_RETAIN: False, + }, + } + + # Test default/suggested values from config + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] == data_entry_flow.RESULT_TYPE_FORM + assert result["step_id"] == "broker" + defaults = { + mqtt.CONF_BROKER: "test-broker", + mqtt.CONF_PORT: 1234, + } + suggested = { + mqtt.CONF_USERNAME: "user", + mqtt.CONF_PASSWORD: "pass", + } + for k, v in defaults.items(): + assert get_default(result["data_schema"].schema, k) == v + for k, v in suggested.items(): + assert get_suggested(result["data_schema"].schema, k) == v + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + mqtt.CONF_BROKER: "another-broker", + mqtt.CONF_PORT: 2345, + mqtt.CONF_USERNAME: "us3r", + mqtt.CONF_PASSWORD: "p4ss", + }, + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_FORM + assert result["step_id"] == "options" + defaults = { + mqtt.CONF_DISCOVERY: True, + "birth_qos": 1, + "birth_retain": True, + "will_qos": 2, + "will_retain": False, + } + suggested = { + "birth_topic": "ha_state/online", + "birth_payload": "online", + "will_topic": "ha_state/offline", + "will_payload": "offline", + } + for k, v in defaults.items(): + assert get_default(result["data_schema"].schema, k) == v + for k, v in suggested.items(): + assert get_suggested(result["data_schema"].schema, k) == v + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + mqtt.CONF_DISCOVERY: False, + "birth_topic": "ha_state/onl1ne", + "birth_payload": "onl1ne", + "birth_qos": 2, + "birth_retain": False, + "will_topic": "ha_state/offl1ne", + "will_payload": "offl1ne", + "will_qos": 1, + "will_retain": True, + }, + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY + + # Test updated default/suggested values from config + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] == data_entry_flow.RESULT_TYPE_FORM + assert result["step_id"] == "broker" + defaults = { + mqtt.CONF_BROKER: "another-broker", + mqtt.CONF_PORT: 2345, + } + suggested = { + mqtt.CONF_USERNAME: "us3r", + mqtt.CONF_PASSWORD: "p4ss", + } + for k, v in defaults.items(): + assert get_default(result["data_schema"].schema, k) == v + for k, v in suggested.items(): + assert get_suggested(result["data_schema"].schema, k) == v + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={mqtt.CONF_BROKER: "another-broker", mqtt.CONF_PORT: 2345}, + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_FORM + assert result["step_id"] == "options" + defaults = { + mqtt.CONF_DISCOVERY: False, + "birth_qos": 2, + "birth_retain": False, + "will_qos": 1, + "will_retain": True, + } + suggested = { + "birth_topic": "ha_state/onl1ne", + "birth_payload": "onl1ne", + "will_topic": "ha_state/offl1ne", + "will_payload": "offl1ne", + } + for k, v in defaults.items(): + assert get_default(result["data_schema"].schema, k) == v + for k, v in suggested.items(): + assert get_suggested(result["data_schema"].schema, k) == v + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + mqtt.CONF_DISCOVERY: True, + "birth_topic": "ha_state/onl1ne", + "birth_payload": "onl1ne", + "birth_qos": 2, + "birth_retain": False, + "will_topic": "ha_state/offl1ne", + "will_payload": "offl1ne", + "will_qos": 1, + "will_retain": True, + }, + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY + + +async def test_options_user_connection_fails(hass, mock_try_connection): + """Test if connection cannot be made.""" + config_entry = MockConfigEntry(domain=mqtt.DOMAIN) + config_entry.add_to_hass(hass) + config_entry.data = { + mqtt.CONF_BROKER: "test-broker", + mqtt.CONF_PORT: 1234, + } + + mock_try_connection.return_value = False + + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] == "form" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={mqtt.CONF_BROKER: "bad-broker", mqtt.CONF_PORT: 2345}, + ) + + assert result["type"] == "form" + assert result["errors"]["base"] == "cannot_connect" + + # Check we tried the connection + assert len(mock_try_connection.mock_calls) == 1 + # Check config entry did not update + assert config_entry.data == { + mqtt.CONF_BROKER: "test-broker", + mqtt.CONF_PORT: 1234, + } + + +async def test_options_bad_birth_message_fails(hass, mock_try_connection): + """Test bad birth message.""" + config_entry = MockConfigEntry(domain=mqtt.DOMAIN) + config_entry.add_to_hass(hass) + config_entry.data = { + mqtt.CONF_BROKER: "test-broker", + mqtt.CONF_PORT: 1234, + } + + mock_try_connection.return_value = True + + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] == "form" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={mqtt.CONF_BROKER: "another-broker", mqtt.CONF_PORT: 2345}, + ) + + assert result["type"] == "form" + assert result["step_id"] == "options" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], user_input={"birth_topic": "ha_state/online/#"}, + ) + assert result["type"] == "form" + assert result["errors"]["base"] == "bad_birth" + + # Check config entry did not update + assert config_entry.data == { + mqtt.CONF_BROKER: "test-broker", + mqtt.CONF_PORT: 1234, + } + + +async def test_options_bad_will_message_fails(hass, mock_try_connection): + """Test bad will message.""" + config_entry = MockConfigEntry(domain=mqtt.DOMAIN) + config_entry.add_to_hass(hass) + config_entry.data = { + mqtt.CONF_BROKER: "test-broker", + mqtt.CONF_PORT: 1234, + } + + mock_try_connection.return_value = True + + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] == "form" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={mqtt.CONF_BROKER: "another-broker", mqtt.CONF_PORT: 2345}, + ) + + assert result["type"] == "form" + assert result["step_id"] == "options" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], user_input={"will_topic": "ha_state/offline/#"}, + ) + assert result["type"] == "form" + assert result["errors"]["base"] == "bad_will" + + # Check config entry did not update + assert config_entry.data == { + mqtt.CONF_BROKER: "test-broker", + mqtt.CONF_PORT: 1234, + } diff --git a/tests/components/mqtt/test_cover.py b/tests/components/mqtt/test_cover.py index eb758ebf93a..c3f00badef8 100644 --- a/tests/components/mqtt/test_cover.py +++ b/tests/components/mqtt/test_cover.py @@ -30,6 +30,7 @@ from homeassistant.const import ( from homeassistant.setup import async_setup_component from .test_common import ( + help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, @@ -1735,6 +1736,13 @@ async def test_find_in_range_altered_inverted(hass, mqtt_mock): assert mqtt_cover.find_in_range_from_percent(60, "cover") == 120 +async def test_availability_when_connection_lost(hass, mqtt_mock): + """Test availability after MQTT disconnection.""" + await help_test_availability_when_connection_lost( + hass, mqtt_mock, cover.DOMAIN, DEFAULT_CONFIG + ) + + async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( @@ -1831,7 +1839,7 @@ async def test_discovery_update_attr(hass, mqtt_mock, caplog): ) -async def test_unique_id(hass): +async def test_unique_id(hass, mqtt_mock): """Test unique_id option only creates one cover per id.""" config = { cover.DOMAIN: [ @@ -1849,7 +1857,7 @@ async def test_unique_id(hass): }, ] } - await help_test_unique_id(hass, cover.DOMAIN, config) + await help_test_unique_id(hass, mqtt_mock, cover.DOMAIN, config) async def test_discovery_removal_cover(hass, mqtt_mock, caplog): diff --git a/tests/components/mqtt/test_device_trigger.py b/tests/components/mqtt/test_device_trigger.py index aa7dc746951..43b483c074b 100644 --- a/tests/components/mqtt/test_device_trigger.py +++ b/tests/components/mqtt/test_device_trigger.py @@ -10,11 +10,9 @@ from homeassistant.components.mqtt.discovery import async_start from homeassistant.setup import async_setup_component from tests.common import ( - MockConfigEntry, assert_lists_same, async_fire_mqtt_message, async_get_device_automations, - async_mock_mqtt_component, async_mock_service, mock_device_registry, mock_registry, @@ -41,9 +39,8 @@ def calls(hass): async def test_get_triggers(hass, device_reg, entity_reg, mqtt_mock): """Test we get the expected triggers from a discovered mqtt device.""" - config_entry = MockConfigEntry(domain=DOMAIN, data={}) - config_entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, config_entry) + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + await async_start(hass, "homeassistant", config_entry) data1 = ( '{ "automation_type":"trigger",' @@ -73,9 +70,8 @@ async def test_get_triggers(hass, device_reg, entity_reg, mqtt_mock): async def test_get_unknown_triggers(hass, device_reg, entity_reg, mqtt_mock): """Test we don't get unknown triggers.""" - config_entry = MockConfigEntry(domain=DOMAIN, data={}) - config_entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, config_entry) + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + await async_start(hass, "homeassistant", config_entry) # Discover a sensor (without device triggers) data1 = ( @@ -117,9 +113,8 @@ async def test_get_unknown_triggers(hass, device_reg, entity_reg, mqtt_mock): async def test_get_non_existing_triggers(hass, device_reg, entity_reg, mqtt_mock): """Test getting non existing triggers.""" - config_entry = MockConfigEntry(domain=DOMAIN, data={}) - config_entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, config_entry) + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + await async_start(hass, "homeassistant", config_entry) # Discover a sensor (without device triggers) data1 = ( @@ -138,9 +133,8 @@ async def test_get_non_existing_triggers(hass, device_reg, entity_reg, mqtt_mock @pytest.mark.no_fail_on_log_exception async def test_discover_bad_triggers(hass, device_reg, entity_reg, mqtt_mock): """Test bad discovery message.""" - config_entry = MockConfigEntry(domain=DOMAIN, data={}) - config_entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, config_entry) + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + await async_start(hass, "homeassistant", config_entry) # Test sending bad data data0 = ( @@ -184,9 +178,8 @@ async def test_discover_bad_triggers(hass, device_reg, entity_reg, mqtt_mock): async def test_update_remove_triggers(hass, device_reg, entity_reg, mqtt_mock): """Test triggers can be updated and removed.""" - config_entry = MockConfigEntry(domain=DOMAIN, data={}) - config_entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, config_entry) + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + await async_start(hass, "homeassistant", config_entry) data1 = ( '{ "automation_type":"trigger",' @@ -241,9 +234,8 @@ async def test_update_remove_triggers(hass, device_reg, entity_reg, mqtt_mock): async def test_if_fires_on_mqtt_message(hass, device_reg, calls, mqtt_mock): """Test triggers firing.""" - config_entry = MockConfigEntry(domain=DOMAIN, data={}) - config_entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, config_entry) + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + await async_start(hass, "homeassistant", config_entry) data1 = ( '{ "automation_type":"trigger",' @@ -320,9 +312,8 @@ async def test_if_fires_on_mqtt_message_late_discover( hass, device_reg, calls, mqtt_mock ): """Test triggers firing of MQTT device triggers discovered after setup.""" - config_entry = MockConfigEntry(domain=DOMAIN, data={}) - config_entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, config_entry) + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + await async_start(hass, "homeassistant", config_entry) data0 = ( '{ "device":{"identifiers":["0AFFD2"]},' @@ -407,9 +398,8 @@ async def test_if_fires_on_mqtt_message_after_update( hass, device_reg, calls, mqtt_mock ): """Test triggers firing after update.""" - config_entry = MockConfigEntry(domain=DOMAIN, data={}) - config_entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, config_entry) + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + await async_start(hass, "homeassistant", config_entry) data1 = ( '{ "automation_type":"trigger",' @@ -484,10 +474,8 @@ async def test_if_fires_on_mqtt_message_after_update( async def test_no_resubscribe_same_topic(hass, device_reg, mqtt_mock): """Test subscription to topics without change.""" - mock_mqtt = await async_mock_mqtt_component(hass) - config_entry = MockConfigEntry(domain=DOMAIN, data={}) - config_entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, config_entry) + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + await async_start(hass, "homeassistant", config_entry) data1 = ( '{ "automation_type":"trigger",' @@ -523,19 +511,18 @@ async def test_no_resubscribe_same_topic(hass, device_reg, mqtt_mock): }, ) - call_count = mock_mqtt.async_subscribe.call_count + call_count = mqtt_mock.async_subscribe.call_count async_fire_mqtt_message(hass, "homeassistant/device_automation/bla1/config", data1) await hass.async_block_till_done() - assert mock_mqtt.async_subscribe.call_count == call_count + assert mqtt_mock.async_subscribe.call_count == call_count async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( hass, device_reg, calls, mqtt_mock ): """Test triggers not firing after removal.""" - config_entry = MockConfigEntry(domain=DOMAIN, data={}) - config_entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, config_entry) + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + await async_start(hass, "homeassistant", config_entry) data1 = ( '{ "automation_type":"trigger",' @@ -597,9 +584,8 @@ async def test_not_fires_on_mqtt_message_after_remove_from_registry( hass, device_reg, calls, mqtt_mock ): """Test triggers not firing after removal.""" - config_entry = MockConfigEntry(domain=DOMAIN, data={}) - config_entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, config_entry) + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + await async_start(hass, "homeassistant", config_entry) data1 = ( '{ "automation_type":"trigger",' @@ -651,9 +637,8 @@ async def test_not_fires_on_mqtt_message_after_remove_from_registry( async def test_attach_remove(hass, device_reg, mqtt_mock): """Test attach and removal of trigger.""" - config_entry = MockConfigEntry(domain=DOMAIN, data={}) - config_entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, config_entry) + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + await async_start(hass, "homeassistant", config_entry) data1 = ( '{ "automation_type":"trigger",' @@ -704,9 +689,8 @@ async def test_attach_remove(hass, device_reg, mqtt_mock): async def test_attach_remove_late(hass, device_reg, mqtt_mock): """Test attach and removal of trigger .""" - config_entry = MockConfigEntry(domain=DOMAIN, data={}) - config_entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, config_entry) + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + await async_start(hass, "homeassistant", config_entry) data0 = ( '{ "device":{"identifiers":["0AFFD2"]},' @@ -765,9 +749,8 @@ async def test_attach_remove_late(hass, device_reg, mqtt_mock): async def test_attach_remove_late2(hass, device_reg, mqtt_mock): """Test attach and removal of trigger .""" - config_entry = MockConfigEntry(domain=DOMAIN, data={}) - config_entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, config_entry) + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + await async_start(hass, "homeassistant", config_entry) data0 = ( '{ "device":{"identifiers":["0AFFD2"]},' @@ -820,9 +803,8 @@ async def test_attach_remove_late2(hass, device_reg, mqtt_mock): async def test_entity_device_info_with_connection(hass, mqtt_mock): """Test MQTT device registry integration.""" - entry = MockConfigEntry(domain=DOMAIN) - entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(DOMAIN)[0] + await async_start(hass, "homeassistant", entry) registry = await hass.helpers.device_registry.async_get_registry() data = json.dumps( @@ -854,9 +836,8 @@ async def test_entity_device_info_with_connection(hass, mqtt_mock): async def test_entity_device_info_with_identifier(hass, mqtt_mock): """Test MQTT device registry integration.""" - entry = MockConfigEntry(domain=DOMAIN) - entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(DOMAIN)[0] + await async_start(hass, "homeassistant", entry) registry = await hass.helpers.device_registry.async_get_registry() data = json.dumps( @@ -888,9 +869,8 @@ async def test_entity_device_info_with_identifier(hass, mqtt_mock): async def test_entity_device_info_update(hass, mqtt_mock): """Test device registry update.""" - entry = MockConfigEntry(domain=DOMAIN) - entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(DOMAIN)[0] + await async_start(hass, "homeassistant", entry) registry = await hass.helpers.device_registry.async_get_registry() config = { @@ -928,9 +908,8 @@ async def test_entity_device_info_update(hass, mqtt_mock): async def test_cleanup_trigger(hass, device_reg, entity_reg, mqtt_mock): """Test trigger discovery topic is cleaned when device is removed from registry.""" - config_entry = MockConfigEntry(domain=DOMAIN) - config_entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, config_entry) + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + await async_start(hass, "homeassistant", config_entry) config = { "automation_type": "trigger", @@ -967,9 +946,8 @@ async def test_cleanup_trigger(hass, device_reg, entity_reg, mqtt_mock): async def test_cleanup_device(hass, device_reg, entity_reg, mqtt_mock): """Test removal from device registry when trigger is removed.""" - config_entry = MockConfigEntry(domain=DOMAIN) - config_entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, config_entry) + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + await async_start(hass, "homeassistant", config_entry) config = { "automation_type": "trigger", @@ -1000,9 +978,8 @@ async def test_cleanup_device(hass, device_reg, entity_reg, mqtt_mock): async def test_cleanup_device_several_triggers(hass, device_reg, entity_reg, mqtt_mock): """Test removal from device registry when the last trigger is removed.""" - config_entry = MockConfigEntry(domain=DOMAIN) - config_entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, config_entry) + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + await async_start(hass, "homeassistant", config_entry) config1 = { "automation_type": "trigger", @@ -1060,9 +1037,8 @@ async def test_cleanup_device_with_entity1(hass, device_reg, entity_reg, mqtt_mo Trigger removed first, then entity. """ - config_entry = MockConfigEntry(domain=DOMAIN) - config_entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, config_entry) + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + await async_start(hass, "homeassistant", config_entry) config1 = { "automation_type": "trigger", @@ -1116,9 +1092,8 @@ async def test_cleanup_device_with_entity2(hass, device_reg, entity_reg, mqtt_mo Entity removed first, then trigger. """ - config_entry = MockConfigEntry(domain=DOMAIN) - config_entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, config_entry) + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + await async_start(hass, "homeassistant", config_entry) config1 = { "automation_type": "trigger", @@ -1172,9 +1147,8 @@ async def test_trigger_debug_info(hass, mqtt_mock): This is a test helper for MQTT debug_info. """ - entry = MockConfigEntry(domain=DOMAIN) - entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(DOMAIN)[0] + await async_start(hass, "homeassistant", entry) registry = await hass.helpers.device_registry.async_get_registry() config = { diff --git a/tests/components/mqtt/test_discovery.py b/tests/components/mqtt/test_discovery.py index 8c75d77efb8..6c317e17989 100644 --- a/tests/components/mqtt/test_discovery.py +++ b/tests/components/mqtt/test_discovery.py @@ -13,12 +13,7 @@ from homeassistant.components.mqtt.discovery import ALREADY_DISCOVERED, async_st from homeassistant.const import STATE_OFF, STATE_ON from tests.async_mock import AsyncMock, patch -from tests.common import ( - MockConfigEntry, - async_fire_mqtt_message, - mock_device_registry, - mock_registry, -) +from tests.common import async_fire_mqtt_message, mock_device_registry, mock_registry @pytest.fixture @@ -35,11 +30,10 @@ def entity_reg(hass): async def test_subscribing_config_topic(hass, mqtt_mock): """Test setting up discovery.""" - entry = MockConfigEntry(domain=mqtt.DOMAIN, data={mqtt.CONF_BROKER: "test-broker"}) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] - hass_config = {} discovery_topic = "homeassistant" - await async_start(hass, discovery_topic, hass_config, entry) + await async_start(hass, discovery_topic, entry) assert mqtt_mock.async_subscribe.called call_args = mqtt_mock.async_subscribe.mock_calls[0][1] @@ -52,12 +46,10 @@ async def test_invalid_topic(hass, mqtt_mock): with patch( "homeassistant.components.mqtt.discovery.async_dispatcher_send" ) as mock_dispatcher_send: - entry = MockConfigEntry( - domain=mqtt.DOMAIN, data={mqtt.CONF_BROKER: "test-broker"} - ) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] mock_dispatcher_send = AsyncMock(return_value=None) - await async_start(hass, "homeassistant", {}, entry) + await async_start(hass, "homeassistant", entry) async_fire_mqtt_message( hass, "homeassistant/binary_sensor/bla/not_config", "{}" @@ -71,12 +63,10 @@ async def test_invalid_json(hass, mqtt_mock, caplog): with patch( "homeassistant.components.mqtt.discovery.async_dispatcher_send" ) as mock_dispatcher_send: - entry = MockConfigEntry( - domain=mqtt.DOMAIN, data={mqtt.CONF_BROKER: "test-broker"} - ) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] mock_dispatcher_send = AsyncMock(return_value=None) - await async_start(hass, "homeassistant", {}, entry) + await async_start(hass, "homeassistant", entry) async_fire_mqtt_message( hass, "homeassistant/binary_sensor/bla/config", "not json" @@ -91,12 +81,12 @@ async def test_only_valid_components(hass, mqtt_mock, caplog): with patch( "homeassistant.components.mqtt.discovery.async_dispatcher_send" ) as mock_dispatcher_send: - entry = MockConfigEntry(domain=mqtt.DOMAIN) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] invalid_component = "timer" mock_dispatcher_send = AsyncMock(return_value=None) - await async_start(hass, "homeassistant", {}, entry) + await async_start(hass, "homeassistant", entry) async_fire_mqtt_message( hass, f"homeassistant/{invalid_component}/bla/config", "{}" @@ -111,9 +101,9 @@ async def test_only_valid_components(hass, mqtt_mock, caplog): async def test_correct_config_discovery(hass, mqtt_mock, caplog): """Test sending in correct JSON.""" - entry = MockConfigEntry(domain=mqtt.DOMAIN) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] - await async_start(hass, "homeassistant", {}, entry) + await async_start(hass, "homeassistant", entry) async_fire_mqtt_message( hass, @@ -131,9 +121,9 @@ async def test_correct_config_discovery(hass, mqtt_mock, caplog): async def test_discover_fan(hass, mqtt_mock, caplog): """Test discovering an MQTT fan.""" - entry = MockConfigEntry(domain=mqtt.DOMAIN) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] - await async_start(hass, "homeassistant", {}, entry) + await async_start(hass, "homeassistant", entry) async_fire_mqtt_message( hass, @@ -151,9 +141,9 @@ async def test_discover_fan(hass, mqtt_mock, caplog): async def test_discover_climate(hass, mqtt_mock, caplog): """Test discovering an MQTT climate component.""" - entry = MockConfigEntry(domain=mqtt.DOMAIN) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] - await async_start(hass, "homeassistant", {}, entry) + await async_start(hass, "homeassistant", entry) data = ( '{ "name": "ClimateTest",' @@ -173,9 +163,9 @@ async def test_discover_climate(hass, mqtt_mock, caplog): async def test_discover_alarm_control_panel(hass, mqtt_mock, caplog): """Test discovering an MQTT alarm control panel component.""" - entry = MockConfigEntry(domain=mqtt.DOMAIN) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] - await async_start(hass, "homeassistant", {}, entry) + await async_start(hass, "homeassistant", entry) data = ( '{ "name": "AlarmControlPanelTest",' @@ -195,9 +185,9 @@ async def test_discover_alarm_control_panel(hass, mqtt_mock, caplog): async def test_discovery_incl_nodeid(hass, mqtt_mock, caplog): """Test sending in correct JSON with optional node_id included.""" - entry = MockConfigEntry(domain=mqtt.DOMAIN) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] - await async_start(hass, "homeassistant", {}, entry) + await async_start(hass, "homeassistant", entry) async_fire_mqtt_message( hass, @@ -215,9 +205,9 @@ async def test_discovery_incl_nodeid(hass, mqtt_mock, caplog): async def test_non_duplicate_discovery(hass, mqtt_mock, caplog): """Test for a non duplicate component.""" - entry = MockConfigEntry(domain=mqtt.DOMAIN) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] - await async_start(hass, "homeassistant", {}, entry) + await async_start(hass, "homeassistant", entry) async_fire_mqtt_message( hass, @@ -242,9 +232,9 @@ async def test_non_duplicate_discovery(hass, mqtt_mock, caplog): async def test_removal(hass, mqtt_mock, caplog): """Test removal of component through empty discovery message.""" - entry = MockConfigEntry(domain=mqtt.DOMAIN) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] - await async_start(hass, "homeassistant", {}, entry) + await async_start(hass, "homeassistant", entry) async_fire_mqtt_message( hass, @@ -263,9 +253,9 @@ async def test_removal(hass, mqtt_mock, caplog): async def test_rediscover(hass, mqtt_mock, caplog): """Test rediscover of removed component.""" - entry = MockConfigEntry(domain=mqtt.DOMAIN) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] - await async_start(hass, "homeassistant", {}, entry) + await async_start(hass, "homeassistant", entry) async_fire_mqtt_message( hass, @@ -293,9 +283,9 @@ async def test_rediscover(hass, mqtt_mock, caplog): async def test_duplicate_removal(hass, mqtt_mock, caplog): """Test for a non duplicate component.""" - entry = MockConfigEntry(domain=mqtt.DOMAIN) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] - await async_start(hass, "homeassistant", {}, entry) + await async_start(hass, "homeassistant", entry) async_fire_mqtt_message( hass, @@ -315,9 +305,8 @@ async def test_duplicate_removal(hass, mqtt_mock, caplog): async def test_cleanup_device(hass, device_reg, entity_reg, mqtt_mock): """Test discvered device is cleaned up when removed from registry.""" - config_entry = MockConfigEntry(domain=mqtt.DOMAIN) - config_entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, config_entry) + config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", config_entry) data = ( '{ "device":{"identifiers":["0AFFD2"]},' @@ -358,9 +347,9 @@ async def test_cleanup_device(hass, device_reg, entity_reg, mqtt_mock): async def test_discovery_expansion(hass, mqtt_mock, caplog): """Test expansion of abbreviated discovery payload.""" - entry = MockConfigEntry(domain=mqtt.DOMAIN) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] - await async_start(hass, "homeassistant", {}, entry) + await async_start(hass, "homeassistant", entry) data = ( '{ "~": "some/base/topic",' @@ -448,9 +437,9 @@ async def test_missing_discover_abbreviations(hass, mqtt_mock, caplog): async def test_no_implicit_state_topic_switch(hass, mqtt_mock, caplog): """Test no implicit state topic for switch.""" - entry = MockConfigEntry(domain=mqtt.DOMAIN) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] - await async_start(hass, "homeassistant", {}, entry) + await async_start(hass, "homeassistant", entry) data = '{ "name": "Test1",' ' "command_topic": "cmnd"' "}" @@ -473,9 +462,9 @@ async def test_no_implicit_state_topic_switch(hass, mqtt_mock, caplog): async def test_complex_discovery_topic_prefix(hass, mqtt_mock, caplog): """Tests handling of discovery topic prefix with multiple slashes.""" - entry = MockConfigEntry(domain=mqtt.DOMAIN) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] - await async_start(hass, "my_home/homeassistant/register", {}, entry) + await async_start(hass, "my_home/homeassistant/register", entry) async_fire_mqtt_message( hass, diff --git a/tests/components/mqtt/test_fan.py b/tests/components/mqtt/test_fan.py index 7f6eb79e85e..6114fe48ff4 100644 --- a/tests/components/mqtt/test_fan.py +++ b/tests/components/mqtt/test_fan.py @@ -11,6 +11,7 @@ from homeassistant.const import ( from homeassistant.setup import async_setup_component from .test_common import ( + help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, @@ -600,6 +601,13 @@ async def test_supported_features(hass, mqtt_mock): ) +async def test_availability_when_connection_lost(hass, mqtt_mock): + """Test availability after MQTT disconnection.""" + await help_test_availability_when_connection_lost( + hass, mqtt_mock, fan.DOMAIN, DEFAULT_CONFIG + ) + + async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( @@ -656,7 +664,7 @@ async def test_discovery_update_attr(hass, mqtt_mock, caplog): ) -async def test_unique_id(hass): +async def test_unique_id(hass, mqtt_mock): """Test unique_id option only creates one fan per id.""" config = { fan.DOMAIN: [ @@ -676,7 +684,7 @@ async def test_unique_id(hass): }, ] } - await help_test_unique_id(hass, fan.DOMAIN, config) + await help_test_unique_id(hass, mqtt_mock, fan.DOMAIN, config) async def test_discovery_removal_fan(hass, mqtt_mock, caplog): diff --git a/tests/components/mqtt/test_init.py b/tests/components/mqtt/test_init.py index 3626c5a746c..58a982544f0 100644 --- a/tests/components/mqtt/test_init.py +++ b/tests/components/mqtt/test_init.py @@ -2,7 +2,6 @@ from datetime import datetime, timedelta import json import ssl -import unittest import pytest import voluptuous as vol @@ -27,13 +26,8 @@ from tests.common import ( MockConfigEntry, async_fire_mqtt_message, async_fire_time_changed, - async_mock_mqtt_component, - fire_mqtt_message, - get_test_home_assistant, mock_device_registry, - mock_mqtt_component, mock_registry, - threadsafe_coroutine_factory, ) from tests.testing_config.custom_components.test.sensor import DEVICE_CLASSES @@ -64,617 +58,616 @@ def mock_mqtt(): yield mock_mqtt -async def async_mock_mqtt_client(hass, config=None): - """Mock the MQTT paho client.""" - if config is None: - config = {mqtt.CONF_BROKER: "mock-broker"} - - with patch("paho.mqtt.client.Client") as mock_client: - mock_client().connect.return_value = 0 - mock_client().subscribe.return_value = (0, 0) - mock_client().unsubscribe.return_value = (0, 0) - mock_client().publish.return_value = (0, 0) - result = await async_setup_component(hass, mqtt.DOMAIN, {mqtt.DOMAIN: config}) - assert result - await hass.async_block_till_done() - return mock_client() +@pytest.fixture +def calls(): + """Fixture to record calls.""" + return [] -mock_mqtt_client = threadsafe_coroutine_factory(async_mock_mqtt_client) - - -# pylint: disable=invalid-name -class TestMQTTComponent(unittest.TestCase): - """Test the MQTT component.""" - - def setUp(self): # pylint: disable=invalid-name - """Set up things to be run when tests are started.""" - self.hass = get_test_home_assistant() - mock_mqtt_component(self.hass) - self.calls = [] - - def tearDown(self): # pylint: disable=invalid-name - """Stop everything that was started.""" - self.hass.stop() +@pytest.fixture +def record_calls(calls): + """Fixture to record calls.""" @callback - def record_calls(self, *args): + def record_calls(*args): """Record calls.""" - self.calls.append(args) + calls.append(args) - def aiohttp_client_stops_on_home_assistant_start(self): - """Test if client stops on HA stop.""" - self.hass.bus.fire(EVENT_HOMEASSISTANT_STOP) - self.hass.block_till_done() - assert self.hass.data["mqtt"].async_disconnect.called + return record_calls - def test_publish_calls_service(self): - """Test the publishing of call to services.""" - self.hass.bus.listen_once(EVENT_CALL_SERVICE, self.record_calls) - mqtt.publish(self.hass, "test-topic", "test-payload") +async def test_mqtt_connects_on_home_assistant_mqtt_setup( + hass, mqtt_client_mock, mqtt_mock +): + """Test if client is connected after mqtt init on bootstrap.""" + assert mqtt_client_mock.connect.call_count == 1 - self.hass.block_till_done() - assert len(self.calls) == 1 - assert self.calls[0][0].data["service_data"][mqtt.ATTR_TOPIC] == "test-topic" - assert ( - self.calls[0][0].data["service_data"][mqtt.ATTR_PAYLOAD] == "test-payload" - ) +async def test_mqtt_disconnects_on_home_assistant_stop(hass, mqtt_mock): + """Test if client stops on HA stop.""" + hass.bus.fire(EVENT_HOMEASSISTANT_STOP) + await hass.async_block_till_done() + await hass.async_block_till_done() + assert mqtt_mock.async_disconnect.called - def test_service_call_without_topic_does_not_publish(self): - """Test the service call if topic is missing.""" - self.hass.bus.fire( - EVENT_CALL_SERVICE, - {ATTR_DOMAIN: mqtt.DOMAIN, ATTR_SERVICE: mqtt.SERVICE_PUBLISH}, - ) - self.hass.block_till_done() - assert not self.hass.data["mqtt"].async_publish.called - def test_service_call_with_template_payload_renders_template(self): - """Test the service call with rendered template. +async def test_publish_calls_service(hass, mqtt_mock, calls, record_calls): + """Test the publishing of call to services.""" + hass.bus.async_listen_once(EVENT_CALL_SERVICE, record_calls) - If 'payload_template' is provided and 'payload' is not, then render it. - """ - mqtt.publish_template(self.hass, "test/topic", "{{ 1+1 }}") - self.hass.block_till_done() - assert self.hass.data["mqtt"].async_publish.called - assert self.hass.data["mqtt"].async_publish.call_args[0][1] == "2" + mqtt.async_publish(hass, "test-topic", "test-payload") - def test_service_call_with_payload_doesnt_render_template(self): - """Test the service call with unrendered template. + await hass.async_block_till_done() - If both 'payload' and 'payload_template' are provided then fail. - """ - payload = "not a template" - payload_template = "a template" - with pytest.raises(vol.Invalid): - self.hass.services.call( - mqtt.DOMAIN, - mqtt.SERVICE_PUBLISH, - { - mqtt.ATTR_TOPIC: "test/topic", - mqtt.ATTR_PAYLOAD: payload, - mqtt.ATTR_PAYLOAD_TEMPLATE: payload_template, - }, - blocking=True, - ) - assert not self.hass.data["mqtt"].async_publish.called + assert len(calls) == 1 + assert calls[0][0].data["service_data"][mqtt.ATTR_TOPIC] == "test-topic" + assert calls[0][0].data["service_data"][mqtt.ATTR_PAYLOAD] == "test-payload" - def test_service_call_with_ascii_qos_retain_flags(self): - """Test the service call with args that can be misinterpreted. - Empty payload message and ascii formatted qos and retain flags. - """ - self.hass.services.call( +async def test_service_call_without_topic_does_not_publish(hass, mqtt_mock): + """Test the service call if topic is missing.""" + hass.bus.fire( + EVENT_CALL_SERVICE, + {ATTR_DOMAIN: mqtt.DOMAIN, ATTR_SERVICE: mqtt.SERVICE_PUBLISH}, + ) + await hass.async_block_till_done() + assert not mqtt_mock.async_publish.called + + +async def test_service_call_with_template_payload_renders_template(hass, mqtt_mock): + """Test the service call with rendered template. + + If 'payload_template' is provided and 'payload' is not, then render it. + """ + mqtt.async_publish_template(hass, "test/topic", "{{ 1+1 }}") + await hass.async_block_till_done() + assert mqtt_mock.async_publish.called + assert mqtt_mock.async_publish.call_args[0][1] == "2" + + +async def test_service_call_with_payload_doesnt_render_template(hass, mqtt_mock): + """Test the service call with unrendered template. + + If both 'payload' and 'payload_template' are provided then fail. + """ + payload = "not a template" + payload_template = "a template" + with pytest.raises(vol.Invalid): + await hass.services.async_call( mqtt.DOMAIN, mqtt.SERVICE_PUBLISH, { mqtt.ATTR_TOPIC: "test/topic", - mqtt.ATTR_PAYLOAD: "", - mqtt.ATTR_QOS: "2", - mqtt.ATTR_RETAIN: "no", + mqtt.ATTR_PAYLOAD: payload, + mqtt.ATTR_PAYLOAD_TEMPLATE: payload_template, }, blocking=True, ) - assert self.hass.data["mqtt"].async_publish.called - assert self.hass.data["mqtt"].async_publish.call_args[0][2] == 2 - assert not self.hass.data["mqtt"].async_publish.call_args[0][3] + assert not mqtt_mock.async_publish.called - def test_validate_topic(self): - """Test topic name/filter validation.""" - # Invalid UTF-8, must not contain U+D800 to U+DFFF. - with pytest.raises(vol.Invalid): - mqtt.valid_topic("\ud800") - with pytest.raises(vol.Invalid): - mqtt.valid_topic("\udfff") - # Topic MUST NOT be empty - with pytest.raises(vol.Invalid): - mqtt.valid_topic("") - # Topic MUST NOT be longer than 65535 encoded bytes. - with pytest.raises(vol.Invalid): - mqtt.valid_topic("ü" * 32768) - # UTF-8 MUST NOT include null character - with pytest.raises(vol.Invalid): - mqtt.valid_topic("bad\0one") - # Topics "SHOULD NOT" include these special characters - # (not MUST NOT, RFC2119). The receiver MAY close the connection. - mqtt.valid_topic("\u0001") - mqtt.valid_topic("\u001F") - mqtt.valid_topic("\u009F") - mqtt.valid_topic("\u009F") - mqtt.valid_topic("\uffff") +async def test_service_call_with_ascii_qos_retain_flags(hass, mqtt_mock): + """Test the service call with args that can be misinterpreted. - def test_validate_subscribe_topic(self): - """Test invalid subscribe topics.""" - mqtt.valid_subscribe_topic("#") - mqtt.valid_subscribe_topic("sport/#") - with pytest.raises(vol.Invalid): - mqtt.valid_subscribe_topic("sport/#/") - with pytest.raises(vol.Invalid): - mqtt.valid_subscribe_topic("foo/bar#") - with pytest.raises(vol.Invalid): - mqtt.valid_subscribe_topic("foo/#/bar") + Empty payload message and ascii formatted qos and retain flags. + """ + await hass.services.async_call( + mqtt.DOMAIN, + mqtt.SERVICE_PUBLISH, + { + mqtt.ATTR_TOPIC: "test/topic", + mqtt.ATTR_PAYLOAD: "", + mqtt.ATTR_QOS: "2", + mqtt.ATTR_RETAIN: "no", + }, + blocking=True, + ) + assert mqtt_mock.async_publish.called + assert mqtt_mock.async_publish.call_args[0][2] == 2 + assert not mqtt_mock.async_publish.call_args[0][3] - mqtt.valid_subscribe_topic("+") - mqtt.valid_subscribe_topic("+/tennis/#") - with pytest.raises(vol.Invalid): - mqtt.valid_subscribe_topic("sport+") - with pytest.raises(vol.Invalid): - mqtt.valid_subscribe_topic("sport+/") - with pytest.raises(vol.Invalid): - mqtt.valid_subscribe_topic("sport/+1") - with pytest.raises(vol.Invalid): - mqtt.valid_subscribe_topic("sport/+#") - with pytest.raises(vol.Invalid): - mqtt.valid_subscribe_topic("bad+topic") - mqtt.valid_subscribe_topic("sport/+/player1") - mqtt.valid_subscribe_topic("/finance") - mqtt.valid_subscribe_topic("+/+") - mqtt.valid_subscribe_topic("$SYS/#") - def test_validate_publish_topic(self): - """Test invalid publish topics.""" - with pytest.raises(vol.Invalid): - mqtt.valid_publish_topic("pub+") - with pytest.raises(vol.Invalid): - mqtt.valid_publish_topic("pub/+") - with pytest.raises(vol.Invalid): - mqtt.valid_publish_topic("1#") - with pytest.raises(vol.Invalid): - mqtt.valid_publish_topic("bad+topic") - mqtt.valid_publish_topic("//") +def test_validate_topic(): + """Test topic name/filter validation.""" + # Invalid UTF-8, must not contain U+D800 to U+DFFF. + with pytest.raises(vol.Invalid): + mqtt.util.valid_topic("\ud800") + with pytest.raises(vol.Invalid): + mqtt.util.valid_topic("\udfff") + # Topic MUST NOT be empty + with pytest.raises(vol.Invalid): + mqtt.util.valid_topic("") + # Topic MUST NOT be longer than 65535 encoded bytes. + with pytest.raises(vol.Invalid): + mqtt.util.valid_topic("ü" * 32768) + # UTF-8 MUST NOT include null character + with pytest.raises(vol.Invalid): + mqtt.util.valid_topic("bad\0one") - # Topic names beginning with $ SHOULD NOT be used, but can - mqtt.valid_publish_topic("$SYS/") + # Topics "SHOULD NOT" include these special characters + # (not MUST NOT, RFC2119). The receiver MAY close the connection. + mqtt.util.valid_topic("\u0001") + mqtt.util.valid_topic("\u001F") + mqtt.util.valid_topic("\u009F") + mqtt.util.valid_topic("\u009F") + mqtt.util.valid_topic("\uffff") - def test_entity_device_info_schema(self): - """Test MQTT entity device info validation.""" - # just identifier - mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA({"identifiers": ["abcd"]}) - mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA({"identifiers": "abcd"}) - # just connection - mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA( - {"connections": [["mac", "02:5b:26:a8:dc:12"]]} - ) - # full device info + +def test_validate_subscribe_topic(): + """Test invalid subscribe topics.""" + mqtt.valid_subscribe_topic("#") + mqtt.valid_subscribe_topic("sport/#") + with pytest.raises(vol.Invalid): + mqtt.valid_subscribe_topic("sport/#/") + with pytest.raises(vol.Invalid): + mqtt.valid_subscribe_topic("foo/bar#") + with pytest.raises(vol.Invalid): + mqtt.valid_subscribe_topic("foo/#/bar") + + mqtt.valid_subscribe_topic("+") + mqtt.valid_subscribe_topic("+/tennis/#") + with pytest.raises(vol.Invalid): + mqtt.valid_subscribe_topic("sport+") + with pytest.raises(vol.Invalid): + mqtt.valid_subscribe_topic("sport+/") + with pytest.raises(vol.Invalid): + mqtt.valid_subscribe_topic("sport/+1") + with pytest.raises(vol.Invalid): + mqtt.valid_subscribe_topic("sport/+#") + with pytest.raises(vol.Invalid): + mqtt.valid_subscribe_topic("bad+topic") + mqtt.valid_subscribe_topic("sport/+/player1") + mqtt.valid_subscribe_topic("/finance") + mqtt.valid_subscribe_topic("+/+") + mqtt.valid_subscribe_topic("$SYS/#") + + +def test_validate_publish_topic(): + """Test invalid publish topics.""" + with pytest.raises(vol.Invalid): + mqtt.valid_publish_topic("pub+") + with pytest.raises(vol.Invalid): + mqtt.valid_publish_topic("pub/+") + with pytest.raises(vol.Invalid): + mqtt.valid_publish_topic("1#") + with pytest.raises(vol.Invalid): + mqtt.valid_publish_topic("bad+topic") + mqtt.valid_publish_topic("//") + + # Topic names beginning with $ SHOULD NOT be used, but can + mqtt.valid_publish_topic("$SYS/") + + +def test_entity_device_info_schema(): + """Test MQTT entity device info validation.""" + # just identifier + mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA({"identifiers": ["abcd"]}) + mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA({"identifiers": "abcd"}) + # just connection + mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA({"connections": [["mac", "02:5b:26:a8:dc:12"]]}) + # full device info + mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA( + { + "identifiers": ["helloworld", "hello"], + "connections": [["mac", "02:5b:26:a8:dc:12"], ["zigbee", "zigbee_id"]], + "manufacturer": "Whatever", + "name": "Beer", + "model": "Glass", + "sw_version": "0.1-beta", + } + ) + # full device info with via_device + mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA( + { + "identifiers": ["helloworld", "hello"], + "connections": [["mac", "02:5b:26:a8:dc:12"], ["zigbee", "zigbee_id"]], + "manufacturer": "Whatever", + "name": "Beer", + "model": "Glass", + "sw_version": "0.1-beta", + "via_device": "test-hub", + } + ) + # no identifiers + with pytest.raises(vol.Invalid): mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA( { - "identifiers": ["helloworld", "hello"], - "connections": [["mac", "02:5b:26:a8:dc:12"], ["zigbee", "zigbee_id"]], "manufacturer": "Whatever", "name": "Beer", "model": "Glass", "sw_version": "0.1-beta", } ) - # full device info with via_device + # empty identifiers + with pytest.raises(vol.Invalid): mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA( - { - "identifiers": ["helloworld", "hello"], - "connections": [["mac", "02:5b:26:a8:dc:12"], ["zigbee", "zigbee_id"]], - "manufacturer": "Whatever", - "name": "Beer", - "model": "Glass", - "sw_version": "0.1-beta", - "via_device": "test-hub", - } + {"identifiers": [], "connections": [], "name": "Beer"} ) - # no identifiers - with pytest.raises(vol.Invalid): - mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA( - { - "manufacturer": "Whatever", - "name": "Beer", - "model": "Glass", - "sw_version": "0.1-beta", - } - ) - # empty identifiers - with pytest.raises(vol.Invalid): - mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA( - {"identifiers": [], "connections": [], "name": "Beer"} - ) -# pylint: disable=invalid-name -class TestMQTTCallbacks(unittest.TestCase): - """Test the MQTT callbacks.""" +async def test_receiving_non_utf8_message_gets_logged( + hass, mqtt_mock, calls, record_calls, caplog +): + """Test receiving a non utf8 encoded message.""" + await mqtt.async_subscribe(hass, "test-topic", record_calls) - def setUp(self): # pylint: disable=invalid-name - """Set up things to be run when tests are started.""" - self.hass = get_test_home_assistant() - mock_mqtt_client(self.hass) - self.calls = [] + async_fire_mqtt_message(hass, "test-topic", b"\x9a") - def tearDown(self): # pylint: disable=invalid-name - """Stop everything that was started.""" - self.hass.stop() + await hass.async_block_till_done() + assert ( + "Can't decode payload b'\\x9a' on test-topic with encoding utf-8" in caplog.text + ) + + +async def test_all_subscriptions_run_when_decode_fails( + hass, mqtt_mock, calls, record_calls +): + """Test all other subscriptions still run when decode fails for one.""" + await mqtt.async_subscribe(hass, "test-topic", record_calls, encoding="ascii") + await mqtt.async_subscribe(hass, "test-topic", record_calls) + + async_fire_mqtt_message(hass, "test-topic", TEMP_CELSIUS) + + await hass.async_block_till_done() + assert len(calls) == 1 + + +async def test_subscribe_topic(hass, mqtt_mock, calls, record_calls): + """Test the subscription of a topic.""" + unsub = await mqtt.async_subscribe(hass, "test-topic", record_calls) + + async_fire_mqtt_message(hass, "test-topic", "test-payload") + + await hass.async_block_till_done() + assert len(calls) == 1 + assert calls[0][0].topic == "test-topic" + assert calls[0][0].payload == "test-payload" + + unsub() + + async_fire_mqtt_message(hass, "test-topic", "test-payload") + + await hass.async_block_till_done() + assert len(calls) == 1 + + +async def test_subscribe_deprecated(hass, mqtt_mock): + """Test the subscription of a topic using deprecated callback signature.""" + calls = [] @callback - def record_calls(self, *args): + def record_calls(topic, payload, qos): """Record calls.""" - self.calls.append(args) + calls.append((topic, payload, qos)) - def aiohttp_client_starts_on_home_assistant_mqtt_setup(self): - """Test if client is connected after mqtt init on bootstrap.""" - assert self.hass.data["mqtt"]._mqttc.connect.call_count == 1 + unsub = await mqtt.async_subscribe(hass, "test-topic", record_calls) - def test_receiving_non_utf8_message_gets_logged(self): - """Test receiving a non utf8 encoded message.""" - mqtt.subscribe(self.hass, "test-topic", self.record_calls) + async_fire_mqtt_message(hass, "test-topic", "test-payload") - with self.assertLogs(level="WARNING") as test_handle: - fire_mqtt_message(self.hass, "test-topic", b"\x9a") + await hass.async_block_till_done() + assert len(calls) == 1 + assert calls[0][0] == "test-topic" + assert calls[0][1] == "test-payload" - self.hass.block_till_done() - assert ( - "WARNING:homeassistant.components.mqtt:Can't decode payload " - "b'\\x9a' on test-topic with encoding utf-8" in test_handle.output[0] - ) + unsub() - def test_all_subscriptions_run_when_decode_fails(self): - """Test all other subscriptions still run when decode fails for one.""" - mqtt.subscribe(self.hass, "test-topic", self.record_calls, encoding="ascii") - mqtt.subscribe(self.hass, "test-topic", self.record_calls) + async_fire_mqtt_message(hass, "test-topic", "test-payload") - fire_mqtt_message(self.hass, "test-topic", TEMP_CELSIUS) + await hass.async_block_till_done() + assert len(calls) == 1 - self.hass.block_till_done() - assert len(self.calls) == 1 - def test_subscribe_topic(self): - """Test the subscription of a topic.""" - unsub = mqtt.subscribe(self.hass, "test-topic", self.record_calls) +async def test_subscribe_deprecated_async(hass, mqtt_mock): + """Test the subscription of a topic using deprecated callback signature.""" + calls = [] - fire_mqtt_message(self.hass, "test-topic", "test-payload") + @callback + async def record_calls(topic, payload, qos): + """Record calls.""" + calls.append((topic, payload, qos)) - self.hass.block_till_done() - assert len(self.calls) == 1 - assert self.calls[0][0].topic == "test-topic" - assert self.calls[0][0].payload == "test-payload" + unsub = await mqtt.async_subscribe(hass, "test-topic", record_calls) - unsub() + async_fire_mqtt_message(hass, "test-topic", "test-payload") - fire_mqtt_message(self.hass, "test-topic", "test-payload") + await hass.async_block_till_done() + assert len(calls) == 1 + assert calls[0][0] == "test-topic" + assert calls[0][1] == "test-payload" - self.hass.block_till_done() - assert len(self.calls) == 1 + unsub() - def test_subscribe_deprecated(self): - """Test the subscription of a topic using deprecated callback signature.""" - calls = [] + async_fire_mqtt_message(hass, "test-topic", "test-payload") - @callback - def record_calls(topic, payload, qos): - """Record calls.""" - calls.append((topic, payload, qos)) + await hass.async_block_till_done() + assert len(calls) == 1 - unsub = mqtt.subscribe(self.hass, "test-topic", record_calls) - fire_mqtt_message(self.hass, "test-topic", "test-payload") +async def test_subscribe_topic_not_match(hass, mqtt_mock, calls, record_calls): + """Test if subscribed topic is not a match.""" + await mqtt.async_subscribe(hass, "test-topic", record_calls) - self.hass.block_till_done() - assert len(calls) == 1 - assert calls[0][0] == "test-topic" - assert calls[0][1] == "test-payload" + async_fire_mqtt_message(hass, "another-test-topic", "test-payload") - unsub() + await hass.async_block_till_done() + assert len(calls) == 0 - fire_mqtt_message(self.hass, "test-topic", "test-payload") - self.hass.block_till_done() - assert len(calls) == 1 +async def test_subscribe_topic_level_wildcard(hass, mqtt_mock, calls, record_calls): + """Test the subscription of wildcard topics.""" + await mqtt.async_subscribe(hass, "test-topic/+/on", record_calls) - def test_subscribe_deprecated_async(self): - """Test the subscription of a topic using deprecated callback signature.""" - calls = [] + async_fire_mqtt_message(hass, "test-topic/bier/on", "test-payload") - @callback - async def record_calls(topic, payload, qos): - """Record calls.""" - calls.append((topic, payload, qos)) + await hass.async_block_till_done() + assert len(calls) == 1 + assert calls[0][0].topic == "test-topic/bier/on" + assert calls[0][0].payload == "test-payload" - unsub = mqtt.subscribe(self.hass, "test-topic", record_calls) - fire_mqtt_message(self.hass, "test-topic", "test-payload") +async def test_subscribe_topic_level_wildcard_no_subtree_match( + hass, mqtt_mock, calls, record_calls +): + """Test the subscription of wildcard topics.""" + await mqtt.async_subscribe(hass, "test-topic/+/on", record_calls) - self.hass.block_till_done() - assert len(calls) == 1 - assert calls[0][0] == "test-topic" - assert calls[0][1] == "test-payload" + async_fire_mqtt_message(hass, "test-topic/bier", "test-payload") - unsub() + await hass.async_block_till_done() + assert len(calls) == 0 - fire_mqtt_message(self.hass, "test-topic", "test-payload") - self.hass.block_till_done() - assert len(calls) == 1 +async def test_subscribe_topic_level_wildcard_root_topic_no_subtree_match( + hass, mqtt_mock, calls, record_calls +): + """Test the subscription of wildcard topics.""" + await mqtt.async_subscribe(hass, "test-topic/#", record_calls) - def test_subscribe_topic_not_match(self): - """Test if subscribed topic is not a match.""" - mqtt.subscribe(self.hass, "test-topic", self.record_calls) + async_fire_mqtt_message(hass, "test-topic-123", "test-payload") - fire_mqtt_message(self.hass, "another-test-topic", "test-payload") + await hass.async_block_till_done() + assert len(calls) == 0 - self.hass.block_till_done() - assert len(self.calls) == 0 - def test_subscribe_topic_level_wildcard(self): - """Test the subscription of wildcard topics.""" - mqtt.subscribe(self.hass, "test-topic/+/on", self.record_calls) +async def test_subscribe_topic_subtree_wildcard_subtree_topic( + hass, mqtt_mock, calls, record_calls +): + """Test the subscription of wildcard topics.""" + await mqtt.async_subscribe(hass, "test-topic/#", record_calls) - fire_mqtt_message(self.hass, "test-topic/bier/on", "test-payload") + async_fire_mqtt_message(hass, "test-topic/bier/on", "test-payload") - self.hass.block_till_done() - assert len(self.calls) == 1 - assert self.calls[0][0].topic == "test-topic/bier/on" - assert self.calls[0][0].payload == "test-payload" + await hass.async_block_till_done() + assert len(calls) == 1 + assert calls[0][0].topic == "test-topic/bier/on" + assert calls[0][0].payload == "test-payload" - def test_subscribe_topic_level_wildcard_no_subtree_match(self): - """Test the subscription of wildcard topics.""" - mqtt.subscribe(self.hass, "test-topic/+/on", self.record_calls) - fire_mqtt_message(self.hass, "test-topic/bier", "test-payload") +async def test_subscribe_topic_subtree_wildcard_root_topic( + hass, mqtt_mock, calls, record_calls +): + """Test the subscription of wildcard topics.""" + await mqtt.async_subscribe(hass, "test-topic/#", record_calls) - self.hass.block_till_done() - assert len(self.calls) == 0 + async_fire_mqtt_message(hass, "test-topic", "test-payload") - def test_subscribe_topic_level_wildcard_root_topic_no_subtree_match(self): - """Test the subscription of wildcard topics.""" - mqtt.subscribe(self.hass, "test-topic/#", self.record_calls) + await hass.async_block_till_done() + assert len(calls) == 1 + assert calls[0][0].topic == "test-topic" + assert calls[0][0].payload == "test-payload" - fire_mqtt_message(self.hass, "test-topic-123", "test-payload") - self.hass.block_till_done() - assert len(self.calls) == 0 +async def test_subscribe_topic_subtree_wildcard_no_match( + hass, mqtt_mock, calls, record_calls +): + """Test the subscription of wildcard topics.""" + await mqtt.async_subscribe(hass, "test-topic/#", record_calls) - def test_subscribe_topic_subtree_wildcard_subtree_topic(self): - """Test the subscription of wildcard topics.""" - mqtt.subscribe(self.hass, "test-topic/#", self.record_calls) + async_fire_mqtt_message(hass, "another-test-topic", "test-payload") - fire_mqtt_message(self.hass, "test-topic/bier/on", "test-payload") + await hass.async_block_till_done() + assert len(calls) == 0 - self.hass.block_till_done() - assert len(self.calls) == 1 - assert self.calls[0][0].topic == "test-topic/bier/on" - assert self.calls[0][0].payload == "test-payload" - def test_subscribe_topic_subtree_wildcard_root_topic(self): - """Test the subscription of wildcard topics.""" - mqtt.subscribe(self.hass, "test-topic/#", self.record_calls) +async def test_subscribe_topic_level_wildcard_and_wildcard_root_topic( + hass, mqtt_mock, calls, record_calls +): + """Test the subscription of wildcard topics.""" + await mqtt.async_subscribe(hass, "+/test-topic/#", record_calls) - fire_mqtt_message(self.hass, "test-topic", "test-payload") + async_fire_mqtt_message(hass, "hi/test-topic", "test-payload") - self.hass.block_till_done() - assert len(self.calls) == 1 - assert self.calls[0][0].topic == "test-topic" - assert self.calls[0][0].payload == "test-payload" + await hass.async_block_till_done() + assert len(calls) == 1 + assert calls[0][0].topic == "hi/test-topic" + assert calls[0][0].payload == "test-payload" - def test_subscribe_topic_subtree_wildcard_no_match(self): - """Test the subscription of wildcard topics.""" - mqtt.subscribe(self.hass, "test-topic/#", self.record_calls) - fire_mqtt_message(self.hass, "another-test-topic", "test-payload") +async def test_subscribe_topic_level_wildcard_and_wildcard_subtree_topic( + hass, mqtt_mock, calls, record_calls +): + """Test the subscription of wildcard topics.""" + await mqtt.async_subscribe(hass, "+/test-topic/#", record_calls) - self.hass.block_till_done() - assert len(self.calls) == 0 + async_fire_mqtt_message(hass, "hi/test-topic/here-iam", "test-payload") - def test_subscribe_topic_level_wildcard_and_wildcard_root_topic(self): - """Test the subscription of wildcard topics.""" - mqtt.subscribe(self.hass, "+/test-topic/#", self.record_calls) + await hass.async_block_till_done() + assert len(calls) == 1 + assert calls[0][0].topic == "hi/test-topic/here-iam" + assert calls[0][0].payload == "test-payload" - fire_mqtt_message(self.hass, "hi/test-topic", "test-payload") - self.hass.block_till_done() - assert len(self.calls) == 1 - assert self.calls[0][0].topic == "hi/test-topic" - assert self.calls[0][0].payload == "test-payload" +async def test_subscribe_topic_level_wildcard_and_wildcard_level_no_match( + hass, mqtt_mock, calls, record_calls +): + """Test the subscription of wildcard topics.""" + await mqtt.async_subscribe(hass, "+/test-topic/#", record_calls) - def test_subscribe_topic_level_wildcard_and_wildcard_subtree_topic(self): - """Test the subscription of wildcard topics.""" - mqtt.subscribe(self.hass, "+/test-topic/#", self.record_calls) + async_fire_mqtt_message(hass, "hi/here-iam/test-topic", "test-payload") - fire_mqtt_message(self.hass, "hi/test-topic/here-iam", "test-payload") + await hass.async_block_till_done() + assert len(calls) == 0 - self.hass.block_till_done() - assert len(self.calls) == 1 - assert self.calls[0][0].topic == "hi/test-topic/here-iam" - assert self.calls[0][0].payload == "test-payload" - def test_subscribe_topic_level_wildcard_and_wildcard_level_no_match(self): - """Test the subscription of wildcard topics.""" - mqtt.subscribe(self.hass, "+/test-topic/#", self.record_calls) +async def test_subscribe_topic_level_wildcard_and_wildcard_no_match( + hass, mqtt_mock, calls, record_calls +): + """Test the subscription of wildcard topics.""" + await mqtt.async_subscribe(hass, "+/test-topic/#", record_calls) - fire_mqtt_message(self.hass, "hi/here-iam/test-topic", "test-payload") + async_fire_mqtt_message(hass, "hi/another-test-topic", "test-payload") - self.hass.block_till_done() - assert len(self.calls) == 0 + await hass.async_block_till_done() + assert len(calls) == 0 - def test_subscribe_topic_level_wildcard_and_wildcard_no_match(self): - """Test the subscription of wildcard topics.""" - mqtt.subscribe(self.hass, "+/test-topic/#", self.record_calls) - fire_mqtt_message(self.hass, "hi/another-test-topic", "test-payload") +async def test_subscribe_topic_sys_root(hass, mqtt_mock, calls, record_calls): + """Test the subscription of $ root topics.""" + await mqtt.async_subscribe(hass, "$test-topic/subtree/on", record_calls) - self.hass.block_till_done() - assert len(self.calls) == 0 + async_fire_mqtt_message(hass, "$test-topic/subtree/on", "test-payload") - def test_subscribe_topic_sys_root(self): - """Test the subscription of $ root topics.""" - mqtt.subscribe(self.hass, "$test-topic/subtree/on", self.record_calls) + await hass.async_block_till_done() + assert len(calls) == 1 + assert calls[0][0].topic == "$test-topic/subtree/on" + assert calls[0][0].payload == "test-payload" - fire_mqtt_message(self.hass, "$test-topic/subtree/on", "test-payload") - self.hass.block_till_done() - assert len(self.calls) == 1 - assert self.calls[0][0].topic == "$test-topic/subtree/on" - assert self.calls[0][0].payload == "test-payload" +async def test_subscribe_topic_sys_root_and_wildcard_topic( + hass, mqtt_mock, calls, record_calls +): + """Test the subscription of $ root and wildcard topics.""" + await mqtt.async_subscribe(hass, "$test-topic/#", record_calls) - def test_subscribe_topic_sys_root_and_wildcard_topic(self): - """Test the subscription of $ root and wildcard topics.""" - mqtt.subscribe(self.hass, "$test-topic/#", self.record_calls) + async_fire_mqtt_message(hass, "$test-topic/some-topic", "test-payload") - fire_mqtt_message(self.hass, "$test-topic/some-topic", "test-payload") + await hass.async_block_till_done() + assert len(calls) == 1 + assert calls[0][0].topic == "$test-topic/some-topic" + assert calls[0][0].payload == "test-payload" - self.hass.block_till_done() - assert len(self.calls) == 1 - assert self.calls[0][0].topic == "$test-topic/some-topic" - assert self.calls[0][0].payload == "test-payload" - def test_subscribe_topic_sys_root_and_wildcard_subtree_topic(self): - """Test the subscription of $ root and wildcard subtree topics.""" - mqtt.subscribe(self.hass, "$test-topic/subtree/#", self.record_calls) +async def test_subscribe_topic_sys_root_and_wildcard_subtree_topic( + hass, mqtt_mock, calls, record_calls +): + """Test the subscription of $ root and wildcard subtree topics.""" + await mqtt.async_subscribe(hass, "$test-topic/subtree/#", record_calls) - fire_mqtt_message(self.hass, "$test-topic/subtree/some-topic", "test-payload") + async_fire_mqtt_message(hass, "$test-topic/subtree/some-topic", "test-payload") - self.hass.block_till_done() - assert len(self.calls) == 1 - assert self.calls[0][0].topic == "$test-topic/subtree/some-topic" - assert self.calls[0][0].payload == "test-payload" + await hass.async_block_till_done() + assert len(calls) == 1 + assert calls[0][0].topic == "$test-topic/subtree/some-topic" + assert calls[0][0].payload == "test-payload" - def test_subscribe_special_characters(self): - """Test the subscription to topics with special characters.""" - topic = "/test-topic/$(.)[^]{-}" - payload = "p4y.l[]a|> ?" - mqtt.subscribe(self.hass, topic, self.record_calls) +async def test_subscribe_special_characters(hass, mqtt_mock, calls, record_calls): + """Test the subscription to topics with special characters.""" + topic = "/test-topic/$(.)[^]{-}" + payload = "p4y.l[]a|> ?" - fire_mqtt_message(self.hass, topic, payload) - self.hass.block_till_done() - assert len(self.calls) == 1 - assert self.calls[0][0].topic == topic - assert self.calls[0][0].payload == payload + await mqtt.async_subscribe(hass, topic, record_calls) - def test_retained_message_on_subscribe_received(self): - """Test every subscriber receives retained message on subscribe.""" + async_fire_mqtt_message(hass, topic, payload) + await hass.async_block_till_done() + assert len(calls) == 1 + assert calls[0][0].topic == topic + assert calls[0][0].payload == payload - def side_effect(*args): - async_fire_mqtt_message(self.hass, "test/state", "online") - return 0, 0 - self.hass.data["mqtt"]._mqttc.subscribe.side_effect = side_effect +async def test_retained_message_on_subscribe_received( + hass, mqtt_client_mock, mqtt_mock +): + """Test every subscriber receives retained message on subscribe.""" - # Fake that the client is connected - self.hass.data["mqtt"].connected = True + def side_effect(*args): + async_fire_mqtt_message(hass, "test/state", "online") + return 0, 0 - calls_a = MagicMock() - mqtt.subscribe(self.hass, "test/state", calls_a) - self.hass.block_till_done() - assert calls_a.called + mqtt_client_mock.subscribe.side_effect = side_effect - calls_b = MagicMock() - mqtt.subscribe(self.hass, "test/state", calls_b) - self.hass.block_till_done() - assert calls_b.called + # Fake that the client is connected + mqtt_mock().connected = True - def test_not_calling_unsubscribe_with_active_subscribers(self): - """Test not calling unsubscribe() when other subscribers are active.""" - # Fake that the client is connected - self.hass.data["mqtt"].connected = True + calls_a = MagicMock() + await mqtt.async_subscribe(hass, "test/state", calls_a) + await hass.async_block_till_done() + assert calls_a.called - unsub = mqtt.subscribe(self.hass, "test/state", None) - mqtt.subscribe(self.hass, "test/state", None) - self.hass.block_till_done() - assert self.hass.data["mqtt"]._mqttc.subscribe.called - - unsub() - self.hass.block_till_done() - assert not self.hass.data["mqtt"]._mqttc.unsubscribe.called + calls_b = MagicMock() + await mqtt.async_subscribe(hass, "test/state", calls_b) + await hass.async_block_till_done() + assert calls_b.called - def test_restore_subscriptions_on_reconnect(self): - """Test subscriptions are restored on reconnect.""" - # Fake that the client is connected - self.hass.data["mqtt"].connected = True - - mqtt.subscribe(self.hass, "test/state", None) - self.hass.block_till_done() - assert self.hass.data["mqtt"]._mqttc.subscribe.call_count == 1 - - self.hass.data["mqtt"]._mqtt_on_disconnect(None, None, 0) - self.hass.data["mqtt"]._mqtt_on_connect(None, None, None, 0) - self.hass.block_till_done() - assert self.hass.data["mqtt"]._mqttc.subscribe.call_count == 2 - - def test_restore_all_active_subscriptions_on_reconnect(self): - """Test active subscriptions are restored correctly on reconnect.""" - # Fake that the client is connected - self.hass.data["mqtt"].connected = True - - self.hass.data["mqtt"]._mqttc.subscribe.side_effect = ( - (0, 1), - (0, 2), - (0, 3), - (0, 4), - ) - - unsub = mqtt.subscribe(self.hass, "test/state", None, qos=2) - mqtt.subscribe(self.hass, "test/state", None) - mqtt.subscribe(self.hass, "test/state", None, qos=1) - self.hass.block_till_done() - - expected = [ - call("test/state", 2), - call("test/state", 0), - call("test/state", 1), - ] - assert self.hass.data["mqtt"]._mqttc.subscribe.mock_calls == expected - - unsub() - self.hass.block_till_done() - assert self.hass.data["mqtt"]._mqttc.unsubscribe.call_count == 0 - - self.hass.data["mqtt"]._mqtt_on_disconnect(None, None, 0) - self.hass.data["mqtt"]._mqtt_on_connect(None, None, None, 0) - self.hass.block_till_done() - - expected.append(call("test/state", 1)) - assert self.hass.data["mqtt"]._mqttc.subscribe.mock_calls == expected - - -async def test_setup_embedded_starts_with_no_config(hass): - """Test setting up embedded server with no config.""" - client_config = ("localhost", 1883, "user", "pass", None, "3.1.1") - - with patch( - "homeassistant.components.mqtt.server.async_start", - return_value=(True, client_config), - ) as _start: - await async_mock_mqtt_client(hass, {}) - assert _start.call_count == 1 - - -async def test_setup_embedded_with_embedded(hass): - """Test setting up embedded server with no config.""" - client_config = ("localhost", 1883, "user", "pass", None, "3.1.1") - - with patch( - "homeassistant.components.mqtt.server.async_start", - return_value=(True, client_config), - ) as _start: - await async_mock_mqtt_client(hass, {"embedded": None}) - assert _start.call_count == 1 + +async def test_not_calling_unsubscribe_with_active_subscribers( + hass, mqtt_client_mock, mqtt_mock +): + """Test not calling unsubscribe() when other subscribers are active.""" + # Fake that the client is connected + mqtt_mock().connected = True + + unsub = await mqtt.async_subscribe(hass, "test/state", None) + await mqtt.async_subscribe(hass, "test/state", None) + await hass.async_block_till_done() + assert mqtt_client_mock.subscribe.called + + unsub() + await hass.async_block_till_done() + assert not mqtt_client_mock.unsubscribe.called + + +async def test_restore_subscriptions_on_reconnect(hass, mqtt_client_mock, mqtt_mock): + """Test subscriptions are restored on reconnect.""" + # Fake that the client is connected + mqtt_mock().connected = True + + await mqtt.async_subscribe(hass, "test/state", None) + await hass.async_block_till_done() + assert mqtt_client_mock.subscribe.call_count == 1 + + mqtt_mock._mqtt_on_disconnect(None, None, 0) + mqtt_mock._mqtt_on_connect(None, None, None, 0) + await hass.async_block_till_done() + assert mqtt_client_mock.subscribe.call_count == 2 + + +async def test_restore_all_active_subscriptions_on_reconnect( + hass, mqtt_client_mock, mqtt_mock +): + """Test active subscriptions are restored correctly on reconnect.""" + # Fake that the client is connected + mqtt_mock().connected = True + + mqtt_client_mock.subscribe.side_effect = ( + (0, 1), + (0, 2), + (0, 3), + (0, 4), + ) + + unsub = await mqtt.async_subscribe(hass, "test/state", None, qos=2) + await mqtt.async_subscribe(hass, "test/state", None) + await mqtt.async_subscribe(hass, "test/state", None, qos=1) + await hass.async_block_till_done() + + expected = [ + call("test/state", 2), + call("test/state", 0), + call("test/state", 1), + ] + assert mqtt_client_mock.subscribe.mock_calls == expected + + unsub() + await hass.async_block_till_done() + assert mqtt_client_mock.unsubscribe.call_count == 0 + + mqtt_mock._mqtt_on_disconnect(None, None, 0) + mqtt_mock._mqtt_on_connect(None, None, None, 0) + await hass.async_block_till_done() + + expected.append(call("test/state", 1)) + assert mqtt_client_mock.subscribe.mock_calls == expected async def test_setup_logs_error_if_no_connect_broker(hass, caplog): @@ -697,119 +690,93 @@ async def test_setup_raises_ConfigEntryNotReady_if_no_connect_broker(hass, caplo assert "Failed to connect to MQTT server due to exception:" in caplog.text -async def test_setup_uses_certificate_on_certificate_set_to_auto(hass, mock_mqtt): +async def test_setup_uses_certificate_on_certificate_set_to_auto(hass): """Test setup uses bundled certs when certificate is set to auto.""" - entry = MockConfigEntry( - domain=mqtt.DOMAIN, - data={mqtt.CONF_BROKER: "test-broker", "certificate": "auto"}, - ) + calls = [] - assert await mqtt.async_setup_entry(hass, entry) + def mock_tls_set(certificate, certfile=None, keyfile=None, tls_version=None): + calls.append((certificate, certfile, keyfile, tls_version)) - assert mock_mqtt.called + with patch("paho.mqtt.client.Client") as mock_client: + mock_client().tls_set = mock_tls_set + entry = MockConfigEntry( + domain=mqtt.DOMAIN, + data={mqtt.CONF_BROKER: "test-broker", "certificate": "auto"}, + ) - import requests.certs + assert await mqtt.async_setup_entry(hass, entry) - expectedCertificate = requests.certs.where() - assert mock_mqtt.mock_calls[0][2]["certificate"] == expectedCertificate + assert calls + + import certifi + + expectedCertificate = certifi.where() + # assert mock_mqtt.mock_calls[0][1][2]["certificate"] == expectedCertificate + assert calls[0][0] == expectedCertificate -async def test_setup_does_not_use_certificate_on_mqtts_port(hass, mock_mqtt): - """Test setup doesn't use bundled certs when ssl set.""" - entry = MockConfigEntry( - domain=mqtt.DOMAIN, data={mqtt.CONF_BROKER: "test-broker", "port": 8883} - ) - - assert await mqtt.async_setup_entry(hass, entry) - - assert mock_mqtt.called - assert mock_mqtt.mock_calls[0][2]["port"] == 8883 - - import requests.certs - - mqttsCertificateBundle = requests.certs.where() - assert mock_mqtt.mock_calls[0][2]["port"] != mqttsCertificateBundle - - -async def test_setup_without_tls_config_uses_tlsv1_under_python36(hass, mock_mqtt): +async def test_setup_without_tls_config_uses_tlsv1_under_python36(hass): """Test setup defaults to TLSv1 under python3.6.""" - entry = MockConfigEntry(domain=mqtt.DOMAIN, data={mqtt.CONF_BROKER: "test-broker"}) + calls = [] - assert await mqtt.async_setup_entry(hass, entry) + def mock_tls_set(certificate, certfile=None, keyfile=None, tls_version=None): + calls.append((certificate, certfile, keyfile, tls_version)) - assert mock_mqtt.called + with patch("paho.mqtt.client.Client") as mock_client: + mock_client().tls_set = mock_tls_set + entry = MockConfigEntry( + domain=mqtt.DOMAIN, + data={"certificate": "auto", mqtt.CONF_BROKER: "test-broker"}, + ) - import sys + assert await mqtt.async_setup_entry(hass, entry) - if sys.hexversion >= 0x03060000: - expectedTlsVersion = ssl.PROTOCOL_TLS # pylint: disable=no-member - else: - expectedTlsVersion = ssl.PROTOCOL_TLSv1 + assert calls - assert mock_mqtt.mock_calls[0][2]["tls_version"] == expectedTlsVersion + import sys + + if sys.hexversion >= 0x03060000: + expectedTlsVersion = ssl.PROTOCOL_TLS # pylint: disable=no-member + else: + expectedTlsVersion = ssl.PROTOCOL_TLSv1 + + assert calls[0][3] == expectedTlsVersion -async def test_setup_with_tls_config_uses_tls_version1_2(hass, mock_mqtt): - """Test setup uses specified TLS version.""" - entry = MockConfigEntry( - domain=mqtt.DOMAIN, data={mqtt.CONF_BROKER: "test-broker", "tls_version": "1.2"} - ) - - assert await mqtt.async_setup_entry(hass, entry) - - assert mock_mqtt.called - - assert mock_mqtt.mock_calls[0][2]["tls_version"] == ssl.PROTOCOL_TLSv1_2 - - -async def test_setup_with_tls_config_of_v1_under_python36_only_uses_v1(hass, mock_mqtt): - """Test setup uses TLSv1.0 if explicitly chosen.""" - entry = MockConfigEntry( - domain=mqtt.DOMAIN, data={mqtt.CONF_BROKER: "test-broker", "tls_version": "1.0"} - ) - - assert await mqtt.async_setup_entry(hass, entry) - - assert mock_mqtt.called - assert mock_mqtt.mock_calls[0][2]["tls_version"] == ssl.PROTOCOL_TLSv1 - - -async def test_birth_message(hass): - """Test sending birth message.""" - mqtt_client = await async_mock_mqtt_client( - hass, +@pytest.mark.parametrize( + "mqtt_config", + [ { mqtt.CONF_BROKER: "mock-broker", mqtt.CONF_BIRTH_MESSAGE: { mqtt.ATTR_TOPIC: "birth", mqtt.ATTR_PAYLOAD: "birth", }, - }, - ) + } + ], +) +async def test_birth_message(hass, mqtt_client_mock, mqtt_mock): + """Test sending birth message.""" calls = [] - mqtt_client.publish.side_effect = lambda *args: calls.append(args) - hass.data["mqtt"]._mqtt_on_connect(None, None, 0, 0) + mqtt_client_mock.publish.side_effect = lambda *args: calls.append(args) + mqtt_mock._mqtt_on_connect(None, None, 0, 0) await hass.async_block_till_done() assert calls[-1] == ("birth", "birth", 0, False) -async def test_mqtt_subscribes_topics_on_connect(hass): +async def test_mqtt_subscribes_topics_on_connect(hass, mqtt_client_mock, mqtt_mock): """Test subscription to topic on connect.""" - mqtt_client = await async_mock_mqtt_client(hass) - - hass.data["mqtt"].subscriptions = [ - mqtt.Subscription("topic/test", None), - mqtt.Subscription("home/sensor", None, 2), - mqtt.Subscription("still/pending", None), - mqtt.Subscription("still/pending", None, 1), - ] + await mqtt.async_subscribe(hass, "topic/test", None) + await mqtt.async_subscribe(hass, "home/sensor", None, 2) + await mqtt.async_subscribe(hass, "still/pending", None) + await mqtt.async_subscribe(hass, "still/pending", None, 1) hass.add_job = MagicMock() - hass.data["mqtt"]._mqtt_on_connect(None, None, 0, 0) + mqtt_mock._mqtt_on_connect(None, None, 0, 0) await hass.async_block_till_done() - assert mqtt_client.disconnect.call_count == 0 + assert mqtt_client_mock.disconnect.call_count == 0 expected = {"topic/test": 0, "home/sensor": 2, "still/pending": 1} calls = {call[1][1]: call[1][2] for call in hass.add_job.mock_calls} @@ -822,9 +789,8 @@ async def test_setup_fails_without_config(hass): @pytest.mark.no_fail_on_log_exception -async def test_message_callback_exception_gets_logged(hass, caplog): +async def test_message_callback_exception_gets_logged(hass, caplog, mqtt_mock): """Test exception raised by message handler.""" - await async_mock_mqtt_component(hass) @callback def bad_handler(*args): @@ -841,10 +807,8 @@ async def test_message_callback_exception_gets_logged(hass, caplog): ) -async def test_mqtt_ws_subscription(hass, hass_ws_client): +async def test_mqtt_ws_subscription(hass, hass_ws_client, mqtt_mock): """Test MQTT websocket subscription.""" - await async_mock_mqtt_component(hass) - client = await hass_ws_client(hass) await client.send_json({"id": 5, "type": "mqtt/subscribe", "topic": "test-topic"}) response = await client.receive_json() @@ -867,10 +831,8 @@ async def test_mqtt_ws_subscription(hass, hass_ws_client): assert response["success"] -async def test_dump_service(hass): +async def test_dump_service(hass, mqtt_mock): """Test that we can dump a topic.""" - await async_mock_mqtt_component(hass) - mopen = mock_open() await hass.services.async_call( @@ -893,9 +855,8 @@ async def test_mqtt_ws_remove_discovered_device( hass, device_reg, entity_reg, hass_ws_client, mqtt_mock ): """Test MQTT websocket device removal.""" - config_entry = MockConfigEntry(domain=mqtt.DOMAIN) - config_entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, config_entry) + config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", config_entry) data = ( '{ "device":{"identifiers":["0AFFD2"]},' @@ -926,9 +887,8 @@ async def test_mqtt_ws_remove_discovered_device_twice( hass, device_reg, hass_ws_client, mqtt_mock ): """Test MQTT websocket device removal.""" - config_entry = MockConfigEntry(domain=mqtt.DOMAIN) - config_entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, config_entry) + config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", config_entry) data = ( '{ "device":{"identifiers":["0AFFD2"]},' @@ -961,9 +921,8 @@ async def test_mqtt_ws_remove_discovered_device_same_topic( hass, device_reg, hass_ws_client, mqtt_mock ): """Test MQTT websocket device removal.""" - config_entry = MockConfigEntry(domain=mqtt.DOMAIN) - config_entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, config_entry) + config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", config_entry) data = ( '{ "device":{"identifiers":["0AFFD2"]},' @@ -1019,9 +978,8 @@ async def test_mqtt_ws_get_device_debug_info( hass, device_reg, hass_ws_client, mqtt_mock ): """Test MQTT websocket device debug info.""" - config_entry = MockConfigEntry(domain=mqtt.DOMAIN) - config_entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, config_entry) + config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", config_entry) config = { "device": {"identifiers": ["0AFFD2"]}, @@ -1105,9 +1063,8 @@ async def test_debug_info_multiple_devices(hass, mqtt_mock): }, ] - entry = MockConfigEntry(domain=mqtt.DOMAIN) - entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", entry) registry = await hass.helpers.device_registry.async_get_registry() for d in devices: @@ -1187,9 +1144,8 @@ async def test_debug_info_multiple_entities_triggers(hass, mqtt_mock): }, ] - entry = MockConfigEntry(domain=mqtt.DOMAIN) - entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", entry) registry = await hass.helpers.device_registry.async_get_registry() for c in config: @@ -1265,9 +1221,8 @@ async def test_debug_info_wildcard(hass, mqtt_mock): "unique_id": "veryunique", } - entry = MockConfigEntry(domain=mqtt.DOMAIN) - entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", entry) registry = await hass.helpers.device_registry.async_get_registry() data = json.dumps(config) @@ -1314,9 +1269,8 @@ async def test_debug_info_filter_same(hass, mqtt_mock): "unique_id": "veryunique", } - entry = MockConfigEntry(domain=mqtt.DOMAIN) - entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", entry) registry = await hass.helpers.device_registry.async_get_registry() data = json.dumps(config) @@ -1376,9 +1330,8 @@ async def test_debug_info_same_topic(hass, mqtt_mock): "unique_id": "veryunique", } - entry = MockConfigEntry(domain=mqtt.DOMAIN) - entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", entry) registry = await hass.helpers.device_registry.async_get_registry() data = json.dumps(config) @@ -1430,9 +1383,8 @@ async def test_debug_info_qos_retain(hass, mqtt_mock): "unique_id": "veryunique", } - entry = MockConfigEntry(domain=mqtt.DOMAIN) - entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", entry) registry = await hass.helpers.device_registry.async_get_registry() data = json.dumps(config) diff --git a/tests/components/mqtt/test_legacy_vacuum.py b/tests/components/mqtt/test_legacy_vacuum.py index 032a55edee4..893c1b78f1e 100644 --- a/tests/components/mqtt/test_legacy_vacuum.py +++ b/tests/components/mqtt/test_legacy_vacuum.py @@ -23,6 +23,7 @@ from homeassistant.const import CONF_NAME, CONF_PLATFORM, STATE_OFF, STATE_ON from homeassistant.setup import async_setup_component from .test_common import ( + help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, @@ -543,6 +544,13 @@ async def test_missing_fan_speed_template(hass, mqtt_mock): assert state is None +async def test_availability_when_connection_lost(hass, mqtt_mock): + """Test availability after MQTT disconnection.""" + await help_test_availability_when_connection_lost( + hass, mqtt_mock, vacuum.DOMAIN, DEFAULT_CONFIG_2 + ) + + async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( @@ -617,7 +625,7 @@ async def test_unique_id(hass, mqtt_mock): }, ] } - await help_test_unique_id(hass, vacuum.DOMAIN, config) + await help_test_unique_id(hass, mqtt_mock, vacuum.DOMAIN, config) async def test_discovery_removal_vacuum(hass, mqtt_mock, caplog): diff --git a/tests/components/mqtt/test_light.py b/tests/components/mqtt/test_light.py index f832e235915..5fa8fa181e5 100644 --- a/tests/components/mqtt/test_light.py +++ b/tests/components/mqtt/test_light.py @@ -162,6 +162,7 @@ import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( + help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, @@ -184,11 +185,7 @@ from .test_common import ( ) from tests.async_mock import call, patch -from tests.common import ( - MockConfigEntry, - assert_setup_component, - async_fire_mqtt_message, -) +from tests.common import assert_setup_component, async_fire_mqtt_message from tests.components.light import common DEFAULT_CONFIG = { @@ -1326,6 +1323,13 @@ async def test_effect(hass, mqtt_mock): mqtt_mock.async_publish.assert_called_once_with("test_light/set", "OFF", 0, False) +async def test_availability_when_connection_lost(hass, mqtt_mock): + """Test availability after MQTT disconnection.""" + await help_test_availability_when_connection_lost( + hass, mqtt_mock, light.DOMAIN, DEFAULT_CONFIG + ) + + async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( @@ -1382,7 +1386,7 @@ async def test_discovery_update_attr(hass, mqtt_mock, caplog): ) -async def test_unique_id(hass): +async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one light per unique_id.""" config = { light.DOMAIN: [ @@ -1402,7 +1406,7 @@ async def test_unique_id(hass): }, ] } - await help_test_unique_id(hass, light.DOMAIN, config) + await help_test_unique_id(hass, mqtt_mock, light.DOMAIN, config) async def test_discovery_removal_light(hass, mqtt_mock, caplog): @@ -1417,8 +1421,8 @@ async def test_discovery_removal_light(hass, mqtt_mock, caplog): async def test_discovery_deprecated(hass, mqtt_mock, caplog): """Test discovery of mqtt light with deprecated platform option.""" - entry = MockConfigEntry(domain=mqtt.DOMAIN) - await async_start(hass, "homeassistant", {"mqtt": {}}, entry) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", entry) data = ( '{ "name": "Beer",' ' "platform": "mqtt",' ' "command_topic": "test_topic"}' ) diff --git a/tests/components/mqtt/test_light_json.py b/tests/components/mqtt/test_light_json.py index bb9e2afb0e5..7bb3763654e 100644 --- a/tests/components/mqtt/test_light_json.py +++ b/tests/components/mqtt/test_light_json.py @@ -102,6 +102,7 @@ import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( + help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, @@ -1065,6 +1066,13 @@ async def test_invalid_values(hass, mqtt_mock): assert state.attributes.get("color_temp") == 100 +async def test_availability_when_connection_lost(hass, mqtt_mock): + """Test availability after MQTT disconnection.""" + await help_test_availability_when_connection_lost( + hass, mqtt_mock, light.DOMAIN, DEFAULT_CONFIG + ) + + async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( @@ -1121,7 +1129,7 @@ async def test_discovery_update_attr(hass, mqtt_mock, caplog): ) -async def test_unique_id(hass): +async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one light per unique_id.""" config = { light.DOMAIN: [ @@ -1143,7 +1151,7 @@ async def test_unique_id(hass): }, ] } - await help_test_unique_id(hass, light.DOMAIN, config) + await help_test_unique_id(hass, mqtt_mock, light.DOMAIN, config) async def test_discovery_removal(hass, mqtt_mock, caplog): diff --git a/tests/components/mqtt/test_light_template.py b/tests/components/mqtt/test_light_template.py index cb5aff40b4b..f0e226d2095 100644 --- a/tests/components/mqtt/test_light_template.py +++ b/tests/components/mqtt/test_light_template.py @@ -39,6 +39,7 @@ import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( + help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, @@ -797,6 +798,13 @@ async def test_invalid_values(hass, mqtt_mock): assert state.attributes.get("effect") == "rainbow" +async def test_availability_when_connection_lost(hass, mqtt_mock): + """Test availability after MQTT disconnection.""" + await help_test_availability_when_connection_lost( + hass, mqtt_mock, light.DOMAIN, DEFAULT_CONFIG + ) + + async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( @@ -853,7 +861,7 @@ async def test_discovery_update_attr(hass, mqtt_mock, caplog): ) -async def test_unique_id(hass): +async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one light per unique_id.""" config = { light.DOMAIN: [ @@ -877,7 +885,7 @@ async def test_unique_id(hass): }, ] } - await help_test_unique_id(hass, light.DOMAIN, config) + await help_test_unique_id(hass, mqtt_mock, light.DOMAIN, config) async def test_discovery_removal(hass, mqtt_mock, caplog): diff --git a/tests/components/mqtt/test_lock.py b/tests/components/mqtt/test_lock.py index 0e9a9af850f..ff130077a95 100644 --- a/tests/components/mqtt/test_lock.py +++ b/tests/components/mqtt/test_lock.py @@ -12,6 +12,7 @@ from homeassistant.const import ATTR_ASSUMED_STATE, ATTR_ENTITY_ID from homeassistant.setup import async_setup_component from .test_common import ( + help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, @@ -272,6 +273,13 @@ async def test_sending_mqtt_commands_and_explicit_optimistic(hass, mqtt_mock): assert state.attributes.get(ATTR_ASSUMED_STATE) +async def test_availability_when_connection_lost(hass, mqtt_mock): + """Test availability after MQTT disconnection.""" + await help_test_availability_when_connection_lost( + hass, mqtt_mock, LOCK_DOMAIN, DEFAULT_CONFIG + ) + + async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( @@ -328,7 +336,7 @@ async def test_discovery_update_attr(hass, mqtt_mock, caplog): ) -async def test_unique_id(hass): +async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one lock per unique_id.""" config = { LOCK_DOMAIN: [ @@ -348,7 +356,7 @@ async def test_unique_id(hass): }, ] } - await help_test_unique_id(hass, LOCK_DOMAIN, config) + await help_test_unique_id(hass, mqtt_mock, LOCK_DOMAIN, config) async def test_discovery_removal_lock(hass, mqtt_mock, caplog): diff --git a/tests/components/mqtt/test_sensor.py b/tests/components/mqtt/test_sensor.py index d711b9e3bb8..f54a27e8805 100644 --- a/tests/components/mqtt/test_sensor.py +++ b/tests/components/mqtt/test_sensor.py @@ -13,6 +13,7 @@ from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from .test_common import ( + help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, @@ -39,11 +40,7 @@ from .test_common import ( ) from tests.async_mock import patch -from tests.common import ( - MockConfigEntry, - async_fire_mqtt_message, - async_fire_time_changed, -) +from tests.common import async_fire_mqtt_message, async_fire_time_changed DEFAULT_CONFIG = { sensor.DOMAIN: {"platform": "mqtt", "name": "test", "state_topic": "test-topic"} @@ -232,6 +229,13 @@ async def test_force_update_enabled(hass, mqtt_mock): assert len(events) == 2 +async def test_availability_when_connection_lost(hass, mqtt_mock): + """Test availability after MQTT disconnection.""" + await help_test_availability_when_connection_lost( + hass, mqtt_mock, sensor.DOMAIN, DEFAULT_CONFIG + ) + + async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( @@ -333,7 +337,7 @@ async def test_discovery_update_attr(hass, mqtt_mock, caplog): ) -async def test_unique_id(hass): +async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one sensor per unique_id.""" config = { sensor.DOMAIN: [ @@ -351,7 +355,7 @@ async def test_unique_id(hass): }, ] } - await help_test_unique_id(hass, sensor.DOMAIN, config) + await help_test_unique_id(hass, mqtt_mock, sensor.DOMAIN, config) async def test_discovery_removal_sensor(hass, mqtt_mock, caplog): @@ -423,9 +427,8 @@ async def test_entity_id_update_discovery_update(hass, mqtt_mock): async def test_entity_device_info_with_hub(hass, mqtt_mock): """Test MQTT sensor device registry integration.""" - entry = MockConfigEntry(domain=mqtt.DOMAIN) - entry.add_to_hass(hass) - await async_start(hass, "homeassistant", {}, entry) + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + await async_start(hass, "homeassistant", entry) registry = await hass.helpers.device_registry.async_get_registry() hub = registry.async_get_or_create( diff --git a/tests/components/mqtt/test_server.py b/tests/components/mqtt/test_server.py deleted file mode 100644 index b3320d6aaca..00000000000 --- a/tests/components/mqtt/test_server.py +++ /dev/null @@ -1,87 +0,0 @@ -"""The tests for the MQTT component embedded server.""" -from unittest.mock import MagicMock, Mock - -import pytest - -import homeassistant.components.mqtt as mqtt -from homeassistant.const import CONF_PASSWORD -from homeassistant.setup import setup_component - -from tests.async_mock import AsyncMock, patch -from tests.common import get_test_home_assistant, mock_coro - - -@pytest.fixture(autouse=True) -def inject_fixture(hass_storage): - """Inject pytest fixtures.""" - - -class TestMQTT: - """Test the MQTT component.""" - - def setup_method(self, method): - """Set up things to be run when tests are started.""" - self.hass = get_test_home_assistant() - - def teardown_method(self, method): - """Stop everything that was started.""" - self.hass.stop() - - @patch("passlib.apps.custom_app_context", Mock(return_value="")) - @patch("tempfile.NamedTemporaryFile", Mock(return_value=MagicMock())) - @patch("hbmqtt.broker.Broker", Mock(return_value=MagicMock(start=AsyncMock()))) - @patch("hbmqtt.broker.Broker.start", AsyncMock(return_value=None)) - @patch("homeassistant.components.mqtt.MQTT") - def test_creating_config_with_pass_and_no_http_pass(self, mock_mqtt): - """Test if the MQTT server gets started with password. - - Since 0.77, MQTT server has to set up its own password. - """ - mock_mqtt().async_connect = AsyncMock(return_value=True) - self.hass.bus.listen_once = MagicMock() - password = "mqtt_secret" - - assert setup_component( - self.hass, mqtt.DOMAIN, {mqtt.DOMAIN: {CONF_PASSWORD: password}} - ) - self.hass.block_till_done() - assert mock_mqtt.called - assert mock_mqtt.mock_calls[1][2]["username"] == "homeassistant" - assert mock_mqtt.mock_calls[1][2]["password"] == password - - @patch("passlib.apps.custom_app_context", Mock(return_value="")) - @patch("tempfile.NamedTemporaryFile", Mock(return_value=MagicMock())) - @patch("hbmqtt.broker.Broker", Mock(return_value=MagicMock(start=AsyncMock()))) - @patch("hbmqtt.broker.Broker.start", AsyncMock(return_value=None)) - @patch("homeassistant.components.mqtt.MQTT") - def test_creating_config_with_pass_and_http_pass(self, mock_mqtt): - """Test if the MQTT server gets started with password. - - Since 0.77, MQTT server has to set up its own password. - """ - mock_mqtt().async_connect = AsyncMock(return_value=True) - self.hass.bus.listen_once = MagicMock() - password = "mqtt_secret" - - self.hass.config.api = MagicMock(api_password="api_password") - assert setup_component( - self.hass, mqtt.DOMAIN, {mqtt.DOMAIN: {CONF_PASSWORD: password}} - ) - self.hass.block_till_done() - assert mock_mqtt.called - assert mock_mqtt.mock_calls[1][2]["username"] == "homeassistant" - assert mock_mqtt.mock_calls[1][2]["password"] == password - - @patch("tempfile.NamedTemporaryFile", Mock(return_value=MagicMock())) - @patch("hbmqtt.broker.Broker.start", return_value=mock_coro()) - def test_broker_config_fails(self, mock_run): - """Test if the MQTT component fails if server fails.""" - from hbmqtt.broker import BrokerException - - mock_run.side_effect = BrokerException - - self.hass.config.api = MagicMock(api_password=None) - - assert not setup_component( - self.hass, mqtt.DOMAIN, {mqtt.DOMAIN: {mqtt.CONF_EMBEDDED: {}}} - ) diff --git a/tests/components/mqtt/test_state_vacuum.py b/tests/components/mqtt/test_state_vacuum.py index f77a1a11ca1..c8ca7d3691b 100644 --- a/tests/components/mqtt/test_state_vacuum.py +++ b/tests/components/mqtt/test_state_vacuum.py @@ -33,6 +33,7 @@ from homeassistant.const import ( from homeassistant.setup import async_setup_component from .test_common import ( + help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, @@ -321,6 +322,13 @@ async def test_status_invalid_json(hass, mqtt_mock): assert state.state == STATE_UNKNOWN +async def test_availability_when_connection_lost(hass, mqtt_mock): + """Test availability after MQTT disconnection.""" + await help_test_availability_when_connection_lost( + hass, mqtt_mock, vacuum.DOMAIN, DEFAULT_CONFIG_2 + ) + + async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( @@ -397,7 +405,7 @@ async def test_unique_id(hass, mqtt_mock): }, ] } - await help_test_unique_id(hass, vacuum.DOMAIN, config) + await help_test_unique_id(hass, mqtt_mock, vacuum.DOMAIN, config) async def test_discovery_removal_vacuum(hass, mqtt_mock, caplog): diff --git a/tests/components/mqtt/test_subscription.py b/tests/components/mqtt/test_subscription.py index 6c906cda1d1..05f14107384 100644 --- a/tests/components/mqtt/test_subscription.py +++ b/tests/components/mqtt/test_subscription.py @@ -7,7 +7,7 @@ from homeassistant.components.mqtt.subscription import ( ) from homeassistant.core import callback -from tests.common import async_fire_mqtt_message, async_mock_mqtt_component +from tests.common import async_fire_mqtt_message async def test_subscribe_topics(hass, mqtt_mock, caplog): @@ -119,7 +119,6 @@ async def test_modify_topics(hass, mqtt_mock, caplog): async def test_qos_encoding_default(hass, mqtt_mock, caplog): """Test default qos and encoding.""" - mock_mqtt = await async_mock_mqtt_component(hass) @callback def msg_callback(*args): @@ -132,14 +131,13 @@ async def test_qos_encoding_default(hass, mqtt_mock, caplog): sub_state, {"test_topic1": {"topic": "test-topic1", "msg_callback": msg_callback}}, ) - mock_mqtt.async_subscribe.assert_called_once_with( + mqtt_mock.async_subscribe.assert_called_once_with( "test-topic1", mock.ANY, 0, "utf-8" ) async def test_qos_encoding_custom(hass, mqtt_mock, caplog): """Test custom qos and encoding.""" - mock_mqtt = await async_mock_mqtt_component(hass) @callback def msg_callback(*args): @@ -159,14 +157,13 @@ async def test_qos_encoding_custom(hass, mqtt_mock, caplog): } }, ) - mock_mqtt.async_subscribe.assert_called_once_with( + mqtt_mock.async_subscribe.assert_called_once_with( "test-topic1", mock.ANY, 1, "utf-16" ) async def test_no_change(hass, mqtt_mock, caplog): """Test subscription to topics without change.""" - mock_mqtt = await async_mock_mqtt_component(hass) @callback def msg_callback(*args): @@ -179,10 +176,10 @@ async def test_no_change(hass, mqtt_mock, caplog): sub_state, {"test_topic1": {"topic": "test-topic1", "msg_callback": msg_callback}}, ) - call_count = mock_mqtt.async_subscribe.call_count + call_count = mqtt_mock.async_subscribe.call_count sub_state = await async_subscribe_topics( hass, sub_state, {"test_topic1": {"topic": "test-topic1", "msg_callback": msg_callback}}, ) - assert call_count == mock_mqtt.async_subscribe.call_count + assert call_count == mqtt_mock.async_subscribe.call_count diff --git a/tests/components/mqtt/test_switch.py b/tests/components/mqtt/test_switch.py index da66e2a7f60..869a413eb6b 100644 --- a/tests/components/mqtt/test_switch.py +++ b/tests/components/mqtt/test_switch.py @@ -7,6 +7,7 @@ import homeassistant.core as ha from homeassistant.setup import async_setup_component from .test_common import ( + help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, help_test_default_availability_payload, @@ -29,7 +30,7 @@ from .test_common import ( ) from tests.async_mock import patch -from tests.common import async_fire_mqtt_message, async_mock_mqtt_component +from tests.common import async_fire_mqtt_message from tests.components.switch import common DEFAULT_CONFIG = { @@ -37,13 +38,7 @@ DEFAULT_CONFIG = { } -@pytest.fixture -def mock_publish(hass): - """Initialize components.""" - yield hass.loop.run_until_complete(async_mock_mqtt_component(hass)) - - -async def test_controlling_state_via_topic(hass, mock_publish): +async def test_controlling_state_via_topic(hass, mqtt_mock): """Test the controlling state via topic.""" assert await async_setup_component( hass, @@ -76,7 +71,7 @@ async def test_controlling_state_via_topic(hass, mock_publish): assert state.state == STATE_OFF -async def test_sending_mqtt_commands_and_optimistic(hass, mock_publish): +async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock): """Test the sending MQTT commands in optimistic mode.""" fake_state = ha.State("switch.test", "on") @@ -106,23 +101,23 @@ async def test_sending_mqtt_commands_and_optimistic(hass, mock_publish): await common.async_turn_on(hass, "switch.test") - mock_publish.async_publish.assert_called_once_with( + mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer on", 2, False ) - mock_publish.async_publish.reset_mock() + mqtt_mock.async_publish.reset_mock() state = hass.states.get("switch.test") assert state.state == STATE_ON await common.async_turn_off(hass, "switch.test") - mock_publish.async_publish.assert_called_once_with( + mqtt_mock.async_publish.assert_called_once_with( "command-topic", "beer off", 2, False ) state = hass.states.get("switch.test") assert state.state == STATE_OFF -async def test_controlling_state_via_topic_and_json_message(hass, mock_publish): +async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock): """Test the controlling state via topic and JSON message.""" assert await async_setup_component( hass, @@ -155,6 +150,13 @@ async def test_controlling_state_via_topic_and_json_message(hass, mock_publish): assert state.state == STATE_OFF +async def test_availability_when_connection_lost(hass, mqtt_mock): + """Test availability after MQTT disconnection.""" + await help_test_availability_when_connection_lost( + hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG + ) + + async def test_availability_without_topic(hass, mqtt_mock): """Test availability without defined availability topic.""" await help_test_availability_without_topic( @@ -198,7 +200,7 @@ async def test_custom_availability_payload(hass, mqtt_mock): ) -async def test_custom_state_payload(hass, mock_publish): +async def test_custom_state_payload(hass, mqtt_mock): """Test the state payload.""" assert await async_setup_component( hass, @@ -268,7 +270,7 @@ async def test_discovery_update_attr(hass, mqtt_mock, caplog): ) -async def test_unique_id(hass): +async def test_unique_id(hass, mqtt_mock): """Test unique id option only creates one switch per unique_id.""" config = { switch.DOMAIN: [ @@ -288,7 +290,7 @@ async def test_unique_id(hass): }, ] } - await help_test_unique_id(hass, switch.DOMAIN, config) + await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config) async def test_discovery_removal_switch(hass, mqtt_mock, caplog): diff --git a/tests/components/mqtt_eventstream/test_init.py b/tests/components/mqtt_eventstream/test_init.py index 8050535eed4..ecdedf904d4 100644 --- a/tests/components/mqtt_eventstream/test_init.py +++ b/tests/components/mqtt_eventstream/test_init.py @@ -7,15 +7,13 @@ import homeassistant.components.mqtt_eventstream as eventstream from homeassistant.const import EVENT_STATE_CHANGED from homeassistant.core import State, callback from homeassistant.helpers.json import JSONEncoder -from homeassistant.setup import setup_component +from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import ANY, patch from tests.common import ( - fire_mqtt_message, - fire_time_changed, - get_test_home_assistant, - mock_mqtt_component, + async_fire_mqtt_message, + async_fire_time_changed, mock_state_change_event, ) @@ -25,177 +23,170 @@ def mock_storage(hass_storage): """Autouse hass_storage for the TestCase tests.""" -class TestMqttEventStream: - """Test the MQTT eventstream module.""" +async def add_eventstream(hass, sub_topic=None, pub_topic=None, ignore_event=None): + """Add a mqtt_eventstream component.""" + config = {} + if sub_topic: + config["subscribe_topic"] = sub_topic + if pub_topic: + config["publish_topic"] = pub_topic + if ignore_event: + config["ignore_event"] = ignore_event + return await async_setup_component( + hass, eventstream.DOMAIN, {eventstream.DOMAIN: config} + ) - def setup_method(self): - """Set up things to be run when tests are started.""" - self.hass = get_test_home_assistant() - self.mock_mqtt = mock_mqtt_component(self.hass) - def teardown_method(self): - """Stop everything that was started.""" - self.hass.stop() +async def test_setup_succeeds(hass, mqtt_mock): + """Test the success of the setup.""" + assert await add_eventstream(hass) - def add_eventstream(self, sub_topic=None, pub_topic=None, ignore_event=None): - """Add a mqtt_eventstream component.""" - config = {} - if sub_topic: - config["subscribe_topic"] = sub_topic - if pub_topic: - config["publish_topic"] = pub_topic - if ignore_event: - config["ignore_event"] = ignore_event - return setup_component( - self.hass, eventstream.DOMAIN, {eventstream.DOMAIN: config} - ) - def test_setup_succeeds(self): - """Test the success of the setup.""" - assert self.add_eventstream() +async def test_setup_with_pub(hass, mqtt_mock): + """Test the setup with subscription.""" + # Should start off with no listeners for all events + assert hass.bus.async_listeners().get("*") is None - def test_setup_with_pub(self): - """Test the setup with subscription.""" - # Should start off with no listeners for all events - assert self.hass.bus.listeners.get("*") is None + assert await add_eventstream(hass, pub_topic="bar") + await hass.async_block_till_done() - assert self.add_eventstream(pub_topic="bar") - self.hass.block_till_done() + # Verify that the event handler has been added as a listener + assert hass.bus.async_listeners().get("*") == 1 - # Verify that the event handler has been added as a listener - assert self.hass.bus.listeners.get("*") == 1 - @patch("homeassistant.components.mqtt.async_subscribe") - def test_subscribe(self, mock_sub): - """Test the subscription.""" - sub_topic = "foo" - assert self.add_eventstream(sub_topic=sub_topic) - self.hass.block_till_done() +async def test_subscribe(hass, mqtt_mock): + """Test the subscription.""" + sub_topic = "foo" + assert await add_eventstream(hass, sub_topic=sub_topic) + await hass.async_block_till_done() - # Verify that the this entity was subscribed to the topic - mock_sub.assert_called_with(self.hass, sub_topic, ANY) + # Verify that the this entity was subscribed to the topic + mqtt_mock.async_subscribe.assert_called_with(sub_topic, ANY, 0, ANY) - @patch("homeassistant.components.mqtt.async_publish") - @patch("homeassistant.core.dt_util.utcnow") - def test_state_changed_event_sends_message(self, mock_utcnow, mock_pub): - """Test the sending of a new message if event changed.""" - now = dt_util.as_utc(dt_util.now()) - e_id = "fake.entity" - pub_topic = "bar" - mock_utcnow.return_value = now +async def test_state_changed_event_sends_message(hass, mqtt_mock): + """Test the sending of a new message if event changed.""" + now = dt_util.as_utc(dt_util.now()) + e_id = "fake.entity" + pub_topic = "bar" + with patch( + ("homeassistant.core.dt_util.utcnow"), return_value=now, + ): # Add the eventstream component for publishing events - assert self.add_eventstream(pub_topic=pub_topic) - self.hass.block_till_done() + assert await add_eventstream(hass, pub_topic=pub_topic) + await hass.async_block_till_done() # Reset the mock because it will have already gotten calls for the # mqtt_eventstream state change on initialization, etc. - mock_pub.reset_mock() + mqtt_mock.async_publish.reset_mock() # Set a state of an entity - mock_state_change_event(self.hass, State(e_id, "on")) - self.hass.block_till_done() + mock_state_change_event(hass, State(e_id, "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() - # The order of the JSON is indeterminate, - # so first just check that publish was called - mock_pub.assert_called_with(self.hass, pub_topic, ANY) - assert mock_pub.called + # The order of the JSON is indeterminate, + # so first just check that publish was called + mqtt_mock.async_publish.assert_called_with(pub_topic, ANY, 0, False) + assert mqtt_mock.async_publish.called - # Get the actual call to publish and make sure it was the one - # we were looking for - msg = mock_pub.call_args[0][2] - event = {} - event["event_type"] = EVENT_STATE_CHANGED - new_state = { - "last_updated": now.isoformat(), - "state": "on", - "entity_id": e_id, - "attributes": {}, - "last_changed": now.isoformat(), - } - event["event_data"] = {"new_state": new_state, "entity_id": e_id} + # Get the actual call to publish and make sure it was the one + # we were looking for + msg = mqtt_mock.async_publish.call_args[0][1] + event = {} + event["event_type"] = EVENT_STATE_CHANGED + new_state = { + "last_updated": now.isoformat(), + "state": "on", + "entity_id": e_id, + "attributes": {}, + "last_changed": now.isoformat(), + } + event["event_data"] = {"new_state": new_state, "entity_id": e_id} - # Verify that the message received was that expected - result = json.loads(msg) - result["event_data"]["new_state"].pop("context") - assert result == event + # Verify that the message received was that expected + result = json.loads(msg) + result["event_data"]["new_state"].pop("context") + assert result == event - @patch("homeassistant.components.mqtt.async_publish") - def test_time_event_does_not_send_message(self, mock_pub): - """Test the sending of a new message if time event.""" - assert self.add_eventstream(pub_topic="bar") - self.hass.block_till_done() - # Reset the mock because it will have already gotten calls for the - # mqtt_eventstream state change on initialization, etc. - mock_pub.reset_mock() +async def test_time_event_does_not_send_message(hass, mqtt_mock): + """Test the sending of a new message if time event.""" + assert await add_eventstream(hass, pub_topic="bar") + await hass.async_block_till_done() - fire_time_changed(self.hass, dt_util.utcnow()) - assert not mock_pub.called + # Reset the mock because it will have already gotten calls for the + # mqtt_eventstream state change on initialization, etc. + mqtt_mock.async_publish.reset_mock() - def test_receiving_remote_event_fires_hass_event(self): - """Test the receiving of the remotely fired event.""" - sub_topic = "foo" - assert self.add_eventstream(sub_topic=sub_topic) - self.hass.block_till_done() + async_fire_time_changed(hass, dt_util.utcnow()) + assert not mqtt_mock.async_publish.called - calls = [] - @callback - def listener(_): - calls.append(1) +async def test_receiving_remote_event_fires_hass_event(hass, mqtt_mock): + """Test the receiving of the remotely fired event.""" + sub_topic = "foo" + assert await add_eventstream(hass, sub_topic=sub_topic) + await hass.async_block_till_done() - self.hass.bus.listen_once("test_event", listener) - self.hass.block_till_done() + calls = [] - payload = json.dumps( - {"event_type": "test_event", "event_data": {}}, cls=JSONEncoder - ) - fire_mqtt_message(self.hass, sub_topic, payload) - self.hass.block_till_done() + @callback + def listener(_): + calls.append(1) - assert 1 == len(calls) + hass.bus.async_listen_once("test_event", listener) + await hass.async_block_till_done() - @patch("homeassistant.components.mqtt.async_publish") - def test_ignored_event_doesnt_send_over_stream(self, mock_pub): - """Test the ignoring of sending events if defined.""" - assert self.add_eventstream(pub_topic="bar", ignore_event=["state_changed"]) - self.hass.block_till_done() + payload = json.dumps( + {"event_type": "test_event", "event_data": {}}, cls=JSONEncoder + ) + async_fire_mqtt_message(hass, sub_topic, payload) + await hass.async_block_till_done() - e_id = "entity.test_id" - event = {} - event["event_type"] = EVENT_STATE_CHANGED - new_state = {"state": "on", "entity_id": e_id, "attributes": {}} - event["event_data"] = {"new_state": new_state, "entity_id": e_id} + assert 1 == len(calls) - # Reset the mock because it will have already gotten calls for the - # mqtt_eventstream state change on initialization, etc. - mock_pub.reset_mock() - # Set a state of an entity - mock_state_change_event(self.hass, State(e_id, "on")) - self.hass.block_till_done() +async def test_ignored_event_doesnt_send_over_stream(hass, mqtt_mock): + """Test the ignoring of sending events if defined.""" + assert await add_eventstream(hass, pub_topic="bar", ignore_event=["state_changed"]) + await hass.async_block_till_done() - assert not mock_pub.called + e_id = "entity.test_id" + event = {} + event["event_type"] = EVENT_STATE_CHANGED + new_state = {"state": "on", "entity_id": e_id, "attributes": {}} + event["event_data"] = {"new_state": new_state, "entity_id": e_id} - @patch("homeassistant.components.mqtt.async_publish") - def test_wrong_ignored_event_sends_over_stream(self, mock_pub): - """Test the ignoring of sending events if defined.""" - assert self.add_eventstream(pub_topic="bar", ignore_event=["statee_changed"]) - self.hass.block_till_done() + # Reset the mock because it will have already gotten calls for the + # mqtt_eventstream state change on initialization, etc. + mqtt_mock.async_publish.reset_mock() - e_id = "entity.test_id" - event = {} - event["event_type"] = EVENT_STATE_CHANGED - new_state = {"state": "on", "entity_id": e_id, "attributes": {}} - event["event_data"] = {"new_state": new_state, "entity_id": e_id} + # Set a state of an entity + mock_state_change_event(hass, State(e_id, "on")) + await hass.async_block_till_done() - # Reset the mock because it will have already gotten calls for the - # mqtt_eventstream state change on initialization, etc. - mock_pub.reset_mock() + assert not mqtt_mock.async_publish.called - # Set a state of an entity - mock_state_change_event(self.hass, State(e_id, "on")) - self.hass.block_till_done() - assert mock_pub.called +async def test_wrong_ignored_event_sends_over_stream(hass, mqtt_mock): + """Test the ignoring of sending events if defined.""" + assert await add_eventstream(hass, pub_topic="bar", ignore_event=["statee_changed"]) + await hass.async_block_till_done() + + e_id = "entity.test_id" + event = {} + event["event_type"] = EVENT_STATE_CHANGED + new_state = {"state": "on", "entity_id": e_id, "attributes": {}} + event["event_data"] = {"new_state": new_state, "entity_id": e_id} + + # Reset the mock because it will have already gotten calls for the + # mqtt_eventstream state change on initialization, etc. + mqtt_mock.async_publish.reset_mock() + + # Set a state of an entity + mock_state_change_event(hass, State(e_id, "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + + assert mqtt_mock.async_publish.called diff --git a/tests/components/mqtt_json/test_device_tracker.py b/tests/components/mqtt_json/test_device_tracker.py index 864b3c232ed..d0f798c18ae 100644 --- a/tests/components/mqtt_json/test_device_tracker.py +++ b/tests/components/mqtt_json/test_device_tracker.py @@ -13,7 +13,7 @@ from homeassistant.const import CONF_PLATFORM from homeassistant.setup import async_setup_component from tests.async_mock import patch -from tests.common import async_fire_mqtt_message, async_mock_mqtt_component +from tests.common import async_fire_mqtt_message _LOGGER = logging.getLogger(__name__) @@ -28,9 +28,8 @@ LOCATION_MESSAGE_INCOMPLETE = {"longitude": 2.0} @pytest.fixture(autouse=True) -def setup_comp(hass): +def setup_comp(hass, mqtt_mock): """Initialize components.""" - hass.loop.run_until_complete(async_mock_mqtt_component(hass)) yaml_devices = hass.config.path(YAML_DEVICES) yield if os.path.isfile(yaml_devices): diff --git a/tests/components/mqtt_room/test_sensor.py b/tests/components/mqtt_room/test_sensor.py index 20aa34342d3..e17fbb4847d 100644 --- a/tests/components/mqtt_room/test_sensor.py +++ b/tests/components/mqtt_room/test_sensor.py @@ -9,7 +9,7 @@ from homeassistant.setup import async_setup_component from homeassistant.util import dt from tests.async_mock import patch -from tests.common import async_fire_mqtt_message, async_mock_mqtt_component +from tests.common import async_fire_mqtt_message DEVICE_ID = "123TESTMAC" NAME = "test_device" @@ -50,10 +50,8 @@ async def assert_distance(hass, distance): assert state.attributes.get("distance") == distance -async def test_room_update(hass): +async def test_room_update(hass, mqtt_mock): """Test the updating between rooms.""" - await async_mock_mqtt_component(hass) - assert await async_setup_component( hass, sensor.DOMAIN, diff --git a/tests/components/mqtt_statestream/test_init.py b/tests/components/mqtt_statestream/test_init.py index aa9ef0d5de8..a3b3c1ccfea 100644 --- a/tests/components/mqtt_statestream/test_init.py +++ b/tests/components/mqtt_statestream/test_init.py @@ -3,14 +3,10 @@ import pytest import homeassistant.components.mqtt_statestream as statestream from homeassistant.core import State -from homeassistant.setup import setup_component +from homeassistant.setup import async_setup_component -from tests.async_mock import ANY, call, patch -from tests.common import ( - get_test_home_assistant, - mock_mqtt_component, - mock_state_change_event, -) +from tests.async_mock import ANY, call +from tests.common import mock_state_change_event @pytest.fixture(autouse=True) @@ -18,360 +14,529 @@ def mock_storage(hass_storage): """Autouse hass_storage for the TestCase tests.""" -class TestMqttStateStream: - """Test the MQTT statestream module.""" - - def setup_method(self): - """Set up things to be run when tests are started.""" - self.hass = get_test_home_assistant() - self.mock_mqtt = mock_mqtt_component(self.hass) - - def teardown_method(self): - """Stop everything that was started.""" - self.hass.stop() - - def add_statestream( - self, - base_topic=None, - publish_attributes=None, - publish_timestamps=None, - publish_include=None, - publish_exclude=None, - ): - """Add a mqtt_statestream component.""" - config = {} - if base_topic: - config["base_topic"] = base_topic - if publish_attributes: - config["publish_attributes"] = publish_attributes - if publish_timestamps: - config["publish_timestamps"] = publish_timestamps - if publish_include: - config["include"] = publish_include - if publish_exclude: - config["exclude"] = publish_exclude - return setup_component( - self.hass, statestream.DOMAIN, {statestream.DOMAIN: config} - ) - - def test_fails_with_no_base(self): - """Setup should fail if no base_topic is set.""" - assert self.add_statestream() is False - - def test_setup_succeeds_without_attributes(self): - """Test the success of the setup with a valid base_topic.""" - assert self.add_statestream(base_topic="pub") - - def test_setup_succeeds_with_attributes(self): - """Test setup with a valid base_topic and publish_attributes.""" - assert self.add_statestream(base_topic="pub", publish_attributes=True) - - @patch("homeassistant.components.mqtt.async_publish") - @patch("homeassistant.core.dt_util.utcnow") - def test_state_changed_event_sends_message(self, mock_utcnow, mock_pub): - """Test the sending of a new message if event changed.""" - e_id = "fake.entity" - base_topic = "pub" - - # Add the statestream component for publishing state updates - assert self.add_statestream(base_topic=base_topic) - self.hass.block_till_done() - - # Reset the mock because it will have already gotten calls for the - # mqtt_statestream state change on initialization, etc. - mock_pub.reset_mock() - - # Set a state of an entity - mock_state_change_event(self.hass, State(e_id, "on")) - self.hass.block_till_done() - - # Make sure 'on' was published to pub/fake/entity/state - mock_pub.assert_called_with(self.hass, "pub/fake/entity/state", "on", 1, True) - assert mock_pub.called - - @patch("homeassistant.components.mqtt.async_publish") - @patch("homeassistant.core.dt_util.utcnow") - def test_state_changed_event_sends_message_and_timestamp( - self, mock_utcnow, mock_pub - ): - """Test the sending of a message and timestamps if event changed.""" - e_id = "another.entity" - base_topic = "pub" - - # Add the statestream component for publishing state updates - assert self.add_statestream( - base_topic=base_topic, publish_attributes=None, publish_timestamps=True - ) - self.hass.block_till_done() - - # Reset the mock because it will have already gotten calls for the - # mqtt_statestream state change on initialization, etc. - mock_pub.reset_mock() - - # Set a state of an entity - mock_state_change_event(self.hass, State(e_id, "on")) - self.hass.block_till_done() - - # Make sure 'on' was published to pub/fake/entity/state - calls = [ - call.async_publish(self.hass, "pub/another/entity/state", "on", 1, True), - call.async_publish( - self.hass, "pub/another/entity/last_changed", ANY, 1, True - ), - call.async_publish( - self.hass, "pub/another/entity/last_updated", ANY, 1, True - ), - ] - - mock_pub.assert_has_calls(calls, any_order=True) - assert mock_pub.called - - @patch("homeassistant.components.mqtt.async_publish") - @patch("homeassistant.core.dt_util.utcnow") - def test_state_changed_attr_sends_message(self, mock_utcnow, mock_pub): - """Test the sending of a new message if attribute changed.""" - e_id = "fake.entity" - base_topic = "pub" - - # Add the statestream component for publishing state updates - assert self.add_statestream(base_topic=base_topic, publish_attributes=True) - self.hass.block_till_done() - - # Reset the mock because it will have already gotten calls for the - # mqtt_statestream state change on initialization, etc. - mock_pub.reset_mock() - - test_attributes = {"testing": "YES", "list": ["a", "b", "c"], "bool": False} - - # Set a state of an entity - mock_state_change_event( - self.hass, State(e_id, "off", attributes=test_attributes) - ) - self.hass.block_till_done() - - # Make sure 'on' was published to pub/fake/entity/state - calls = [ - call.async_publish(self.hass, "pub/fake/entity/state", "off", 1, True), - call.async_publish(self.hass, "pub/fake/entity/testing", '"YES"', 1, True), - call.async_publish( - self.hass, "pub/fake/entity/list", '["a", "b", "c"]', 1, True - ), - call.async_publish(self.hass, "pub/fake/entity/bool", "false", 1, True), - ] - - mock_pub.assert_has_calls(calls, any_order=True) - assert mock_pub.called - - @patch("homeassistant.components.mqtt.async_publish") - @patch("homeassistant.core.dt_util.utcnow") - def test_state_changed_event_include_domain(self, mock_utcnow, mock_pub): - """Test that filtering on included domain works as expected.""" - base_topic = "pub" - - incl = {"domains": ["fake"]} - excl = {} - - # Add the statestream component for publishing state updates - # Set the filter to allow fake.* items - assert self.add_statestream( - base_topic=base_topic, publish_include=incl, publish_exclude=excl - ) - self.hass.block_till_done() - - # Reset the mock because it will have already gotten calls for the - # mqtt_statestream state change on initialization, etc. - mock_pub.reset_mock() - - # Set a state of an entity - mock_state_change_event(self.hass, State("fake.entity", "on")) - self.hass.block_till_done() - - # Make sure 'on' was published to pub/fake/entity/state - mock_pub.assert_called_with(self.hass, "pub/fake/entity/state", "on", 1, True) - assert mock_pub.called - - mock_pub.reset_mock() - # Set a state of an entity that shouldn't be included - mock_state_change_event(self.hass, State("fake2.entity", "on")) - self.hass.block_till_done() - - assert not mock_pub.called - - @patch("homeassistant.components.mqtt.async_publish") - @patch("homeassistant.core.dt_util.utcnow") - def test_state_changed_event_include_entity(self, mock_utcnow, mock_pub): - """Test that filtering on included entity works as expected.""" - base_topic = "pub" - - incl = {"entities": ["fake.entity"]} - excl = {} - - # Add the statestream component for publishing state updates - # Set the filter to allow fake.* items - assert self.add_statestream( - base_topic=base_topic, publish_include=incl, publish_exclude=excl - ) - self.hass.block_till_done() - - # Reset the mock because it will have already gotten calls for the - # mqtt_statestream state change on initialization, etc. - mock_pub.reset_mock() - - # Set a state of an entity - mock_state_change_event(self.hass, State("fake.entity", "on")) - self.hass.block_till_done() - - # Make sure 'on' was published to pub/fake/entity/state - mock_pub.assert_called_with(self.hass, "pub/fake/entity/state", "on", 1, True) - assert mock_pub.called - - mock_pub.reset_mock() - # Set a state of an entity that shouldn't be included - mock_state_change_event(self.hass, State("fake.entity2", "on")) - self.hass.block_till_done() - - assert not mock_pub.called - - @patch("homeassistant.components.mqtt.async_publish") - @patch("homeassistant.core.dt_util.utcnow") - def test_state_changed_event_exclude_domain(self, mock_utcnow, mock_pub): - """Test that filtering on excluded domain works as expected.""" - base_topic = "pub" - - incl = {} - excl = {"domains": ["fake2"]} - - # Add the statestream component for publishing state updates - # Set the filter to allow fake.* items - assert self.add_statestream( - base_topic=base_topic, publish_include=incl, publish_exclude=excl - ) - self.hass.block_till_done() - - # Reset the mock because it will have already gotten calls for the - # mqtt_statestream state change on initialization, etc. - mock_pub.reset_mock() - - # Set a state of an entity - mock_state_change_event(self.hass, State("fake.entity", "on")) - self.hass.block_till_done() - - # Make sure 'on' was published to pub/fake/entity/state - mock_pub.assert_called_with(self.hass, "pub/fake/entity/state", "on", 1, True) - assert mock_pub.called - - mock_pub.reset_mock() - # Set a state of an entity that shouldn't be included - mock_state_change_event(self.hass, State("fake2.entity", "on")) - self.hass.block_till_done() - - assert not mock_pub.called - - @patch("homeassistant.components.mqtt.async_publish") - @patch("homeassistant.core.dt_util.utcnow") - def test_state_changed_event_exclude_entity(self, mock_utcnow, mock_pub): - """Test that filtering on excluded entity works as expected.""" - base_topic = "pub" - - incl = {} - excl = {"entities": ["fake.entity2"]} - - # Add the statestream component for publishing state updates - # Set the filter to allow fake.* items - assert self.add_statestream( - base_topic=base_topic, publish_include=incl, publish_exclude=excl - ) - self.hass.block_till_done() - - # Reset the mock because it will have already gotten calls for the - # mqtt_statestream state change on initialization, etc. - mock_pub.reset_mock() - - # Set a state of an entity - mock_state_change_event(self.hass, State("fake.entity", "on")) - self.hass.block_till_done() - - # Make sure 'on' was published to pub/fake/entity/state - mock_pub.assert_called_with(self.hass, "pub/fake/entity/state", "on", 1, True) - assert mock_pub.called - - mock_pub.reset_mock() - # Set a state of an entity that shouldn't be included - mock_state_change_event(self.hass, State("fake.entity2", "on")) - self.hass.block_till_done() - - assert not mock_pub.called - - @patch("homeassistant.components.mqtt.async_publish") - @patch("homeassistant.core.dt_util.utcnow") - def test_state_changed_event_exclude_domain_include_entity( - self, mock_utcnow, mock_pub - ): - """Test filtering with excluded domain and included entity.""" - base_topic = "pub" - - incl = {"entities": ["fake.entity"]} - excl = {"domains": ["fake"]} - - # Add the statestream component for publishing state updates - # Set the filter to allow fake.* items - assert self.add_statestream( - base_topic=base_topic, publish_include=incl, publish_exclude=excl - ) - self.hass.block_till_done() - - # Reset the mock because it will have already gotten calls for the - # mqtt_statestream state change on initialization, etc. - mock_pub.reset_mock() - - # Set a state of an entity - mock_state_change_event(self.hass, State("fake.entity", "on")) - self.hass.block_till_done() - - # Make sure 'on' was published to pub/fake/entity/state - mock_pub.assert_called_with(self.hass, "pub/fake/entity/state", "on", 1, True) - assert mock_pub.called - - mock_pub.reset_mock() - # Set a state of an entity that shouldn't be included - mock_state_change_event(self.hass, State("fake.entity2", "on")) - self.hass.block_till_done() - - assert not mock_pub.called - - @patch("homeassistant.components.mqtt.async_publish") - @patch("homeassistant.core.dt_util.utcnow") - def test_state_changed_event_include_domain_exclude_entity( - self, mock_utcnow, mock_pub - ): - """Test filtering with included domain and excluded entity.""" - base_topic = "pub" - - incl = {"domains": ["fake"]} - excl = {"entities": ["fake.entity2"]} - - # Add the statestream component for publishing state updates - # Set the filter to allow fake.* items - assert self.add_statestream( - base_topic=base_topic, publish_include=incl, publish_exclude=excl - ) - self.hass.block_till_done() - - # Reset the mock because it will have already gotten calls for the - # mqtt_statestream state change on initialization, etc. - mock_pub.reset_mock() - - # Set a state of an entity - mock_state_change_event(self.hass, State("fake.entity", "on")) - self.hass.block_till_done() - - # Make sure 'on' was published to pub/fake/entity/state - mock_pub.assert_called_with(self.hass, "pub/fake/entity/state", "on", 1, True) - assert mock_pub.called - - mock_pub.reset_mock() - # Set a state of an entity that shouldn't be included - mock_state_change_event(self.hass, State("fake.entity2", "on")) - self.hass.block_till_done() - - assert not mock_pub.called +async def add_statestream( + hass, + base_topic=None, + publish_attributes=None, + publish_timestamps=None, + publish_include=None, + publish_exclude=None, +): + """Add a mqtt_statestream component.""" + config = {} + if base_topic: + config["base_topic"] = base_topic + if publish_attributes: + config["publish_attributes"] = publish_attributes + if publish_timestamps: + config["publish_timestamps"] = publish_timestamps + if publish_include: + config["include"] = publish_include + if publish_exclude: + config["exclude"] = publish_exclude + return await async_setup_component( + hass, statestream.DOMAIN, {statestream.DOMAIN: config} + ) + + +async def test_fails_with_no_base(hass, mqtt_mock): + """Setup should fail if no base_topic is set.""" + assert await add_statestream(hass) is False + + +async def test_setup_succeeds_without_attributes(hass, mqtt_mock): + """Test the success of the setup with a valid base_topic.""" + assert await add_statestream(hass, base_topic="pub") + + +async def test_setup_succeeds_with_attributes(hass, mqtt_mock): + """Test setup with a valid base_topic and publish_attributes.""" + assert await add_statestream(hass, base_topic="pub", publish_attributes=True) + + +async def test_state_changed_event_sends_message(hass, mqtt_mock): + """Test the sending of a new message if event changed.""" + e_id = "fake.entity" + base_topic = "pub" + + # Add the statestream component for publishing state updates + assert await add_statestream(hass, base_topic=base_topic) + await hass.async_block_till_done() + + # Reset the mock because it will have already gotten calls for the + # mqtt_statestream state change on initialization, etc. + mqtt_mock.async_publish.reset_mock() + + # Set a state of an entity + mock_state_change_event(hass, State(e_id, "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Make sure 'on' was published to pub/fake/entity/state + mqtt_mock.async_publish.assert_called_with("pub/fake/entity/state", "on", 1, True) + assert mqtt_mock.async_publish.called + + +async def test_state_changed_event_sends_message_and_timestamp(hass, mqtt_mock): + """Test the sending of a message and timestamps if event changed.""" + e_id = "another.entity" + base_topic = "pub" + + # Add the statestream component for publishing state updates + assert await add_statestream( + hass, base_topic=base_topic, publish_attributes=None, publish_timestamps=True + ) + await hass.async_block_till_done() + + # Reset the mock because it will have already gotten calls for the + # mqtt_statestream state change on initialization, etc. + mqtt_mock.async_publish.reset_mock() + + # Set a state of an entity + mock_state_change_event(hass, State(e_id, "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Make sure 'on' was published to pub/fake/entity/state + calls = [ + call.async_publish("pub/another/entity/state", "on", 1, True), + call.async_publish("pub/another/entity/last_changed", ANY, 1, True), + call.async_publish("pub/another/entity/last_updated", ANY, 1, True), + ] + + mqtt_mock.async_publish.assert_has_calls(calls, any_order=True) + assert mqtt_mock.async_publish.called + + +async def test_state_changed_attr_sends_message(hass, mqtt_mock): + """Test the sending of a new message if attribute changed.""" + e_id = "fake.entity" + base_topic = "pub" + + # Add the statestream component for publishing state updates + assert await add_statestream(hass, base_topic=base_topic, publish_attributes=True) + await hass.async_block_till_done() + + # Reset the mock because it will have already gotten calls for the + # mqtt_statestream state change on initialization, etc. + mqtt_mock.async_publish.reset_mock() + + test_attributes = {"testing": "YES", "list": ["a", "b", "c"], "bool": False} + + # Set a state of an entity + mock_state_change_event(hass, State(e_id, "off", attributes=test_attributes)) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Make sure 'on' was published to pub/fake/entity/state + calls = [ + call.async_publish("pub/fake/entity/state", "off", 1, True), + call.async_publish("pub/fake/entity/testing", '"YES"', 1, True), + call.async_publish("pub/fake/entity/list", '["a", "b", "c"]', 1, True), + call.async_publish("pub/fake/entity/bool", "false", 1, True), + ] + + mqtt_mock.async_publish.assert_has_calls(calls, any_order=True) + assert mqtt_mock.async_publish.called + + +async def test_state_changed_event_include_domain(hass, mqtt_mock): + """Test that filtering on included domain works as expected.""" + base_topic = "pub" + + incl = {"domains": ["fake"]} + excl = {} + + # Add the statestream component for publishing state updates + # Set the filter to allow fake.* items + assert await add_statestream( + hass, base_topic=base_topic, publish_include=incl, publish_exclude=excl + ) + await hass.async_block_till_done() + + # Reset the mock because it will have already gotten calls for the + # mqtt_statestream state change on initialization, etc. + mqtt_mock.async_publish.reset_mock() + + # Set a state of an entity + mock_state_change_event(hass, State("fake.entity", "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Make sure 'on' was published to pub/fake/entity/state + mqtt_mock.async_publish.assert_called_with("pub/fake/entity/state", "on", 1, True) + assert mqtt_mock.async_publish.called + + mqtt_mock.async_publish.reset_mock() + # Set a state of an entity that shouldn't be included + mock_state_change_event(hass, State("fake2.entity", "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + + assert not mqtt_mock.async_publish.called + + +async def test_state_changed_event_include_entity(hass, mqtt_mock): + """Test that filtering on included entity works as expected.""" + base_topic = "pub" + + incl = {"entities": ["fake.entity"]} + excl = {} + + # Add the statestream component for publishing state updates + # Set the filter to allow fake.* items + assert await add_statestream( + hass, base_topic=base_topic, publish_include=incl, publish_exclude=excl + ) + await hass.async_block_till_done() + + # Reset the mock because it will have already gotten calls for the + # mqtt_statestream state change on initialization, etc. + mqtt_mock.async_publish.reset_mock() + + # Set a state of an entity + mock_state_change_event(hass, State("fake.entity", "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Make sure 'on' was published to pub/fake/entity/state + mqtt_mock.async_publish.assert_called_with("pub/fake/entity/state", "on", 1, True) + assert mqtt_mock.async_publish.called + + mqtt_mock.async_publish.reset_mock() + # Set a state of an entity that shouldn't be included + mock_state_change_event(hass, State("fake.entity2", "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + + assert not mqtt_mock.async_publish.called + + +async def test_state_changed_event_exclude_domain(hass, mqtt_mock): + """Test that filtering on excluded domain works as expected.""" + base_topic = "pub" + + incl = {} + excl = {"domains": ["fake2"]} + + # Add the statestream component for publishing state updates + # Set the filter to allow fake.* items + assert await add_statestream( + hass, base_topic=base_topic, publish_include=incl, publish_exclude=excl + ) + await hass.async_block_till_done() + + # Reset the mock because it will have already gotten calls for the + # mqtt_statestream state change on initialization, etc. + mqtt_mock.async_publish.reset_mock() + + # Set a state of an entity + mock_state_change_event(hass, State("fake.entity", "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Make sure 'on' was published to pub/fake/entity/state + mqtt_mock.async_publish.assert_called_with("pub/fake/entity/state", "on", 1, True) + assert mqtt_mock.async_publish.called + + mqtt_mock.async_publish.reset_mock() + # Set a state of an entity that shouldn't be included + mock_state_change_event(hass, State("fake2.entity", "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + + assert not mqtt_mock.async_publish.called + + +async def test_state_changed_event_exclude_entity(hass, mqtt_mock): + """Test that filtering on excluded entity works as expected.""" + base_topic = "pub" + + incl = {} + excl = {"entities": ["fake.entity2"]} + + # Add the statestream component for publishing state updates + # Set the filter to allow fake.* items + assert await add_statestream( + hass, base_topic=base_topic, publish_include=incl, publish_exclude=excl + ) + await hass.async_block_till_done() + + # Reset the mock because it will have already gotten calls for the + # mqtt_statestream state change on initialization, etc. + mqtt_mock.async_publish.reset_mock() + + # Set a state of an entity + mock_state_change_event(hass, State("fake.entity", "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Make sure 'on' was published to pub/fake/entity/state + mqtt_mock.async_publish.assert_called_with("pub/fake/entity/state", "on", 1, True) + assert mqtt_mock.async_publish.called + + mqtt_mock.async_publish.reset_mock() + # Set a state of an entity that shouldn't be included + mock_state_change_event(hass, State("fake.entity2", "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + + assert not mqtt_mock.async_publish.called + + +async def test_state_changed_event_exclude_domain_include_entity(hass, mqtt_mock): + """Test filtering with excluded domain and included entity.""" + base_topic = "pub" + + incl = {"entities": ["fake.entity"]} + excl = {"domains": ["fake"]} + + # Add the statestream component for publishing state updates + # Set the filter to allow fake.* items + assert await add_statestream( + hass, base_topic=base_topic, publish_include=incl, publish_exclude=excl + ) + await hass.async_block_till_done() + + # Reset the mock because it will have already gotten calls for the + # mqtt_statestream state change on initialization, etc. + mqtt_mock.async_publish.reset_mock() + + # Set a state of an entity + mock_state_change_event(hass, State("fake.entity", "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Make sure 'on' was published to pub/fake/entity/state + mqtt_mock.async_publish.assert_called_with("pub/fake/entity/state", "on", 1, True) + assert mqtt_mock.async_publish.called + + mqtt_mock.async_publish.reset_mock() + # Set a state of an entity that shouldn't be included + mock_state_change_event(hass, State("fake.entity2", "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + + assert not mqtt_mock.async_publish.called + + +async def test_state_changed_event_include_domain_exclude_entity(hass, mqtt_mock): + """Test filtering with included domain and excluded entity.""" + base_topic = "pub" + + incl = {"domains": ["fake"]} + excl = {"entities": ["fake.entity2"]} + + # Add the statestream component for publishing state updates + # Set the filter to allow fake.* items + assert await add_statestream( + hass, base_topic=base_topic, publish_include=incl, publish_exclude=excl + ) + await hass.async_block_till_done() + + # Reset the mock because it will have already gotten calls for the + # mqtt_statestream state change on initialization, etc. + mqtt_mock.async_publish.reset_mock() + + # Set a state of an entity + mock_state_change_event(hass, State("fake.entity", "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Make sure 'on' was published to pub/fake/entity/state + mqtt_mock.async_publish.assert_called_with("pub/fake/entity/state", "on", 1, True) + assert mqtt_mock.async_publish.called + + mqtt_mock.async_publish.reset_mock() + # Set a state of an entity that shouldn't be included + mock_state_change_event(hass, State("fake.entity2", "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + + assert not mqtt_mock.async_publish.called + + +async def test_state_changed_event_include_globs(hass, mqtt_mock): + """Test that filtering on included glob works as expected.""" + base_topic = "pub" + + incl = {"entity_globs": ["*.included_*"]} + excl = {} + + # Add the statestream component for publishing state updates + # Set the filter to allow *.included_* items + assert await add_statestream( + hass, base_topic=base_topic, publish_include=incl, publish_exclude=excl + ) + await hass.async_block_till_done() + + # Reset the mock because it will have already gotten calls for the + # mqtt_statestream state change on initialization, etc. + mqtt_mock.async_publish.reset_mock() + + # Set a state of an entity with included glob + mock_state_change_event(hass, State("fake2.included_entity", "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Make sure 'on' was published to pub/fake2/included_entity/state + mqtt_mock.async_publish.assert_called_with( + "pub/fake2/included_entity/state", "on", 1, True + ) + assert mqtt_mock.async_publish.called + + mqtt_mock.async_publish.reset_mock() + # Set a state of an entity that shouldn't be included + mock_state_change_event(hass, State("fake2.entity", "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + + assert not mqtt_mock.async_publish.called + + +async def test_state_changed_event_exclude_globs(hass, mqtt_mock): + """Test that filtering on excluded globs works as expected.""" + base_topic = "pub" + + incl = {} + excl = {"entity_globs": ["*.excluded_*"]} + + # Add the statestream component for publishing state updates + # Set the filter to allow *.excluded_* items + assert await add_statestream( + hass, base_topic=base_topic, publish_include=incl, publish_exclude=excl + ) + await hass.async_block_till_done() + + # Reset the mock because it will have already gotten calls for the + # mqtt_statestream state change on initialization, etc. + mqtt_mock.async_publish.reset_mock() + + # Set a state of an entity + mock_state_change_event(hass, State("fake.entity", "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Make sure 'on' was published to pub/fake/entity/state + mqtt_mock.async_publish.assert_called_with("pub/fake/entity/state", "on", 1, True) + assert mqtt_mock.async_publish.called + + mqtt_mock.async_publish.reset_mock() + # Set a state of an entity that shouldn't be included by glob + mock_state_change_event(hass, State("fake.excluded_entity", "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + + assert not mqtt_mock.async_publish.called + + +async def test_state_changed_event_exclude_domain_globs_include_entity(hass, mqtt_mock): + """Test filtering with excluded domain and glob and included entity.""" + base_topic = "pub" + + incl = {"entities": ["fake.entity"]} + excl = {"domains": ["fake"], "entity_globs": ["*.excluded_*"]} + + # Add the statestream component for publishing state updates + # Set the filter to exclude with include filter + assert await add_statestream( + hass, base_topic=base_topic, publish_include=incl, publish_exclude=excl + ) + await hass.async_block_till_done() + + # Reset the mock because it will have already gotten calls for the + # mqtt_statestream state change on initialization, etc. + mqtt_mock.async_publish.reset_mock() + + # Set a state of an entity + mock_state_change_event(hass, State("fake.entity", "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Make sure 'on' was published to pub/fake/entity/state + mqtt_mock.async_publish.assert_called_with("pub/fake/entity/state", "on", 1, True) + assert mqtt_mock.async_publish.called + + mqtt_mock.async_publish.reset_mock() + # Set a state of an entity that doesn't match any filters + mock_state_change_event(hass, State("fake2.included_entity", "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Make sure 'on' was published to pub/fake/entity/state + mqtt_mock.async_publish.assert_called_with( + "pub/fake2/included_entity/state", "on", 1, True + ) + assert mqtt_mock.async_publish.called + + mqtt_mock.async_publish.reset_mock() + # Set a state of an entity that shouldn't be included by domain + mock_state_change_event(hass, State("fake.entity2", "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + + assert not mqtt_mock.async_publish.called + + mqtt_mock.async_publish.reset_mock() + # Set a state of an entity that shouldn't be included by glob + mock_state_change_event(hass, State("fake.excluded_entity", "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + + assert not mqtt_mock.async_publish.called + + +async def test_state_changed_event_include_domain_globs_exclude_entity(hass, mqtt_mock): + """Test filtering with included domain and glob and excluded entity.""" + base_topic = "pub" + + incl = {"domains": ["fake"], "entity_globs": ["*.included_*"]} + excl = {"entities": ["fake.entity2"]} + + # Add the statestream component for publishing state updates + # Set the filter to include with exclude filter + assert await add_statestream( + hass, base_topic=base_topic, publish_include=incl, publish_exclude=excl + ) + await hass.async_block_till_done() + + # Reset the mock because it will have already gotten calls for the + # mqtt_statestream state change on initialization, etc. + mqtt_mock.async_publish.reset_mock() + + # Set a state of an entity included by domain + mock_state_change_event(hass, State("fake.entity", "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Make sure 'on' was published to pub/fake/entity/state + mqtt_mock.async_publish.assert_called_with("pub/fake/entity/state", "on", 1, True) + assert mqtt_mock.async_publish.called + + mqtt_mock.async_publish.reset_mock() + # Set a state of an entity included by glob + mock_state_change_event(hass, State("fake.included_entity", "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Make sure 'on' was published to pub/fake/entity/state + mqtt_mock.async_publish.assert_called_with( + "pub/fake/included_entity/state", "on", 1, True + ) + assert mqtt_mock.async_publish.called + + mqtt_mock.async_publish.reset_mock() + # Set a state of an entity that shouldn't be included + mock_state_change_event(hass, State("fake.entity2", "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + + assert not mqtt_mock.async_publish.called + + mqtt_mock.async_publish.reset_mock() + # Set a state of an entity that doesn't match any filters + mock_state_change_event(hass, State("fake2.entity", "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + + assert not mqtt_mock.async_publish.called diff --git a/tests/components/nextbus/test_sensor.py b/tests/components/nextbus/test_sensor.py index 74ea6cce127..4e7f0af8526 100644 --- a/tests/components/nextbus/test_sensor.py +++ b/tests/components/nextbus/test_sensor.py @@ -5,9 +5,10 @@ import pytest import homeassistant.components.nextbus.sensor as nextbus import homeassistant.components.sensor as sensor +from homeassistant.setup import async_setup_component from tests.async_mock import patch -from tests.common import assert_setup_component, async_setup_component +from tests.common import assert_setup_component VALID_AGENCY = "sf-muni" VALID_ROUTE = "F" diff --git a/tests/components/nsw_fuel_station/test_sensor.py b/tests/components/nsw_fuel_station/test_sensor.py index 861aa155f4f..d4e10ff5129 100644 --- a/tests/components/nsw_fuel_station/test_sensor.py +++ b/tests/components/nsw_fuel_station/test_sensor.py @@ -79,10 +79,7 @@ class TestNSWFuelStation(unittest.TestCase): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.config = VALID_CONFIG - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) @patch( "homeassistant.components.nsw_fuel_station.sensor.FuelCheckClient", diff --git a/tests/components/nut/test_config_flow.py b/tests/components/nut/test_config_flow.py index 7eb0ac20184..5a2155441b5 100644 --- a/tests/components/nut/test_config_flow.py +++ b/tests/components/nut/test_config_flow.py @@ -1,4 +1,5 @@ """Test the Network UPS Tools (NUT) config flow.""" + from homeassistant import config_entries, data_entry_flow, setup from homeassistant.components.nut.const import DOMAIN from homeassistant.const import CONF_RESOURCES, CONF_SCAN_INTERVAL @@ -16,6 +17,59 @@ VALID_CONFIG = { } +async def test_form_zeroconf(hass): + """Test we can setup from zeroconf.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data={"host": "192.168.1.5", "port": 1234}, + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + mock_pynut = _get_mock_pynutclient( + list_vars={"battery.voltage": "voltage", "ups.status": "OL"}, list_ups=["ups1"] + ) + + with patch( + "homeassistant.components.nut.PyNUTClient", return_value=mock_pynut, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"username": "test-username", "password": "test-password"}, + ) + + assert result2["step_id"] == "resources" + assert result2["type"] == "form" + + with patch( + "homeassistant.components.nut.PyNUTClient", return_value=mock_pynut, + ), patch( + "homeassistant.components.nut.async_setup", return_value=True + ) as mock_setup, patch( + "homeassistant.components.nut.async_setup_entry", return_value=True, + ) as mock_setup_entry: + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + {"resources": ["battery.voltage", "ups.status", "ups.status.display"]}, + ) + + assert result3["type"] == "create_entry" + assert result3["title"] == "192.168.1.5:1234" + assert result3["data"] == { + "host": "192.168.1.5", + "password": "test-password", + "port": 1234, + "resources": ["battery.voltage", "ups.status", "ups.status.display"], + "username": "test-username", + } + assert result3["result"].unique_id is None + await hass.async_block_till_done() + assert len(mock_setup.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + + async def test_form_user_one_ups(hass): """Test we get the form.""" await setup.async_setup_component(hass, "persistent_notification", {}) diff --git a/tests/components/nws/const.py b/tests/components/nws/const.py index 6dee20a0759..8b23f9cc850 100644 --- a/tests/components/nws/const.py +++ b/tests/components/nws/const.py @@ -60,7 +60,7 @@ EXPECTED_OBSERVATION_IMPERIAL = { ), ATTR_WEATHER_WIND_BEARING: 180, ATTR_WEATHER_WIND_SPEED: round( - convert_distance(10, LENGTH_METERS, LENGTH_MILES) * 3600 + convert_distance(10, LENGTH_KILOMETERS, LENGTH_MILES) ), ATTR_WEATHER_PRESSURE: round( convert_pressure(100000, PRESSURE_PA, PRESSURE_INHG), 2 @@ -74,9 +74,7 @@ EXPECTED_OBSERVATION_IMPERIAL = { EXPECTED_OBSERVATION_METRIC = { ATTR_WEATHER_TEMPERATURE: 10, ATTR_WEATHER_WIND_BEARING: 180, - ATTR_WEATHER_WIND_SPEED: round( - convert_distance(10, LENGTH_METERS, LENGTH_KILOMETERS) * 3600 - ), + ATTR_WEATHER_WIND_SPEED: 10, ATTR_WEATHER_PRESSURE: round(convert_pressure(100000, PRESSURE_PA, PRESSURE_HPA)), ATTR_WEATHER_VISIBILITY: round( convert_distance(10000, LENGTH_METERS, LENGTH_KILOMETERS) diff --git a/tests/components/nx584/test_binary_sensor.py b/tests/components/nx584/test_binary_sensor.py index a02a4fbe3af..89892bb06f0 100644 --- a/tests/components/nx584/test_binary_sensor.py +++ b/tests/components/nx584/test_binary_sensor.py @@ -36,8 +36,9 @@ class TestNX584SensorSetup(unittest.TestCase): client = nx584_client.Client.return_value client.list_zones.return_value = self.fake_zones client.get_version.return_value = "1.1" + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() self._mock_client.stop() @@ -134,6 +135,7 @@ class TestNX584ZoneSensor(unittest.TestCase): assert "foo" == sensor.name assert not sensor.should_poll assert sensor.is_on + assert sensor.device_state_attributes["zone_number"] == 1 zone["state"] = False assert not sensor.is_on diff --git a/tests/components/onboarding/test_views.py b/tests/components/onboarding/test_views.py index 7deda0e7edc..0d425642622 100644 --- a/tests/components/onboarding/test_views.py +++ b/tests/components/onboarding/test_views.py @@ -11,7 +11,7 @@ from homeassistant.setup import async_setup_component from . import mock_storage from tests.async_mock import patch -from tests.common import CLIENT_ID, register_auth_provider +from tests.common import CLIENT_ID, CLIENT_REDIRECT_URI, register_auth_provider from tests.components.met.conftest import mock_weather # noqa: F401 @@ -192,7 +192,8 @@ async def test_onboarding_integration(hass, hass_storage, hass_client): client = await hass_client() resp = await client.post( - "/api/onboarding/integration", json={"client_id": CLIENT_ID} + "/api/onboarding/integration", + json={"client_id": CLIENT_ID, "redirect_uri": CLIENT_REDIRECT_URI}, ) assert resp.status == 200 @@ -217,6 +218,35 @@ async def test_onboarding_integration(hass, hass_storage, hass_client): await hass.auth.async_validate_access_token(tokens["access_token"]) is not None ) + # Onboarding refresh token and new refresh token + for user in await hass.auth.async_get_users(): + assert len(user.refresh_tokens) == 2, user + + +async def test_onboarding_integration_invalid_redirect_uri( + hass, hass_storage, hass_client +): + """Test finishing integration step.""" + mock_storage(hass_storage, {"done": [const.STEP_USER]}) + + assert await async_setup_component(hass, "onboarding", {}) + + client = await hass_client() + + resp = await client.post( + "/api/onboarding/integration", + json={"client_id": CLIENT_ID, "redirect_uri": "http://invalid-redirect.uri"}, + ) + + assert resp.status == 400 + + # We will still mark the last step as done because there is nothing left. + assert const.STEP_INTEGRATION in hass_storage[const.DOMAIN]["data"]["done"] + + # Only refresh token from onboarding should be there + for user in await hass.auth.async_get_users(): + assert len(user.refresh_tokens) == 1, user + async def test_onboarding_integration_requires_auth(hass, hass_storage, aiohttp_client): """Test finishing integration step.""" diff --git a/tests/components/openhardwaremonitor/test_sensor.py b/tests/components/openhardwaremonitor/test_sensor.py index db44216c535..6c8776f740d 100644 --- a/tests/components/openhardwaremonitor/test_sensor.py +++ b/tests/components/openhardwaremonitor/test_sensor.py @@ -21,8 +21,9 @@ class TestOpenHardwareMonitorSetup(unittest.TestCase): "port": 8085, } } + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() diff --git a/tests/components/owntracks/test_device_tracker.py b/tests/components/owntracks/test_device_tracker.py index d71f0fe0aee..7f066346d5a 100644 --- a/tests/components/owntracks/test_device_tracker.py +++ b/tests/components/owntracks/test_device_tracker.py @@ -8,12 +8,7 @@ from homeassistant.const import STATE_NOT_HOME from homeassistant.setup import async_setup_component from tests.async_mock import patch -from tests.common import ( - MockConfigEntry, - async_fire_mqtt_message, - async_mock_mqtt_component, - mock_coro, -) +from tests.common import MockConfigEntry, async_fire_mqtt_message, mock_coro USER = "greg" DEVICE = "phone" @@ -286,13 +281,12 @@ BAD_JSON_SUFFIX = "** and it ends here ^^" @pytest.fixture -def setup_comp(hass, mock_device_tracker_conf): +def setup_comp(hass, mock_device_tracker_conf, mqtt_mock): """Initialize components.""" assert hass.loop.run_until_complete( async_setup_component(hass, "persistent_notification", {}) ) hass.loop.run_until_complete(async_setup_component(hass, "device_tracker", {})) - hass.loop.run_until_complete(async_mock_mqtt_component(hass)) hass.states.async_set("zone.inner", "zoning", INNER_ZONE) diff --git a/tests/components/panel_custom/test_init.py b/tests/components/panel_custom/test_init.py index c2abd673065..caa55749c50 100644 --- a/tests/components/panel_custom/test_init.py +++ b/tests/components/panel_custom/test_init.py @@ -30,19 +30,22 @@ async def test_webcomponent_custom_path_not_found(hass): assert "nice_url" not in panels -async def test_webcomponent_custom_path(hass): +async def test_webcomponent_custom_path(hass, caplog): """Test if a web component is found in config panels dir.""" filename = "mock.file" config = { - "panel_custom": { - "name": "todo-mvc", - "webcomponent_path": filename, - "sidebar_title": "Sidebar Title", - "sidebar_icon": "mdi:iconicon", - "url_path": "nice_url", - "config": {"hello": "world"}, - } + "panel_custom": [ + { + "name": "todo-mvc", + "webcomponent_path": filename, + "sidebar_title": "Sidebar Title", + "sidebar_icon": "mdi:iconicon", + "url_path": "nice_url", + "config": {"hello": "world"}, + }, + {"name": "todo-mvc"}, + ] } with patch("os.path.isfile", Mock(return_value=True)): @@ -70,6 +73,8 @@ async def test_webcomponent_custom_path(hass): assert panel.sidebar_icon == "mdi:iconicon" assert panel.sidebar_title == "Sidebar Title" + assert "Got HTML panel with duplicate name todo-mvc. Not registering" in caplog.text + async def test_js_webcomponent(hass): """Test if a web component is found in config panels dir.""" @@ -151,16 +156,41 @@ async def test_module_webcomponent(hass): assert panel.sidebar_title == "Sidebar Title" +async def test_latest_and_es5_build(hass): + """Test specifying an es5 and latest build.""" + config = { + "panel_custom": { + "name": "todo-mvc", + "js_url": "/local/es5.js", + "module_url": "/local/latest.js", + "url_path": "nice_url", + } + } + + assert await setup.async_setup_component(hass, "panel_custom", config) + + panels = hass.data.get(frontend.DATA_PANELS, {}) + + assert panels + assert "nice_url" in panels + + panel = panels["nice_url"] + + assert panel.config == { + "_panel_custom": { + "name": "todo-mvc", + "js_url": "/local/es5.js", + "module_url": "/local/latest.js", + "embed_iframe": False, + "trust_external": False, + }, + } + assert panel.frontend_url_path == "nice_url" + + async def test_url_option_conflict(hass): """Test config with multiple url options.""" to_try = [ - { - "panel_custom": { - "name": "todo-mvc", - "module_url": "/local/bla.js", - "js_url": "/local/bla.js", - } - }, { "panel_custom": { "name": "todo-mvc", diff --git a/tests/components/panel_iframe/test_init.py b/tests/components/panel_iframe/test_init.py index b38f3c4b1fa..7d42c4d03d3 100644 --- a/tests/components/panel_iframe/test_init.py +++ b/tests/components/panel_iframe/test_init.py @@ -14,10 +14,7 @@ class TestPanelIframe(unittest.TestCase): def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def test_wrong_config(self): """Test setup with wrong configuration.""" diff --git a/tests/components/pi_hole/test_init.py b/tests/components/pi_hole/test_init.py index 73a501c74ce..088b56d75b9 100644 --- a/tests/components/pi_hole/test_init.py +++ b/tests/components/pi_hole/test_init.py @@ -1,11 +1,14 @@ """Test pi_hole component.""" from homeassistant.components import pi_hole +from homeassistant.components.pi_hole.const import MIN_TIME_BETWEEN_UPDATES +from homeassistant.setup import async_setup_component +from homeassistant.util import dt as dt_util from . import _create_mocked_hole, _patch_config_flow_hole from tests.async_mock import patch -from tests.common import async_setup_component +from tests.common import async_fire_time_changed def _patch_init_hole(mocked_hole): @@ -138,3 +141,23 @@ async def test_enable_service_call(hass): await hass.async_block_till_done() assert mocked_hole.enable.call_count == 2 + + +async def test_update_coordinator(hass): + """Test update coordinator.""" + mocked_hole = _create_mocked_hole() + sensor_entity_id = "sensor.pi_hole_ads_blocked_today" + with _patch_config_flow_hole(mocked_hole), _patch_init_hole(mocked_hole): + assert await async_setup_component( + hass, pi_hole.DOMAIN, {pi_hole.DOMAIN: [{"host": "pi.hole"}]} + ) + await hass.async_block_till_done() + assert mocked_hole.get_data.call_count == 3 + assert hass.states.get(sensor_entity_id).state == "0" + + mocked_hole.data["ads_blocked_today"] = 1 + utcnow = dt_util.utcnow() + async_fire_time_changed(hass, utcnow + MIN_TIME_BETWEEN_UPDATES) + await hass.async_block_till_done() + assert mocked_hole.get_data.call_count == 4 + assert hass.states.get(sensor_entity_id).state == "1" diff --git a/tests/components/pilight/test_init.py b/tests/components/pilight/test_init.py index 53b1ec3a94d..5c2acc0d8f2 100644 --- a/tests/components/pilight/test_init.py +++ b/tests/components/pilight/test_init.py @@ -71,8 +71,9 @@ class TestPilight(unittest.TestCase): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.skip_teardown_stop = False + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): + def tear_down_cleanup(self): """Stop everything that was started.""" if not self.skip_teardown_stop: self.hass.stop() @@ -374,10 +375,7 @@ class TestPilightCallrateThrottler(unittest.TestCase): def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def test_call_rate_delay_throttle_disabled(self): """Test that the limiter is a noop if no delay set.""" diff --git a/tests/components/plex/mock_classes.py b/tests/components/plex/mock_classes.py index 2d69801e797..93cb2c5bbee 100644 --- a/tests/components/plex/mock_classes.py +++ b/tests/components/plex/mock_classes.py @@ -38,28 +38,37 @@ class MockGDM: class MockResource: """Mock a PlexAccount resource.""" - def __init__(self, index): + def __init__(self, index, kind="server"): """Initialize the object.""" - self.name = MOCK_SERVERS[index][CONF_SERVER] - self.clientIdentifier = MOCK_SERVERS[index][ # pylint: disable=invalid-name - CONF_SERVER_IDENTIFIER - ] - self.provides = ["server"] - self._mock_plex_server = MockPlexServer(index) + if kind == "server": + self.name = MOCK_SERVERS[index][CONF_SERVER] + self.clientIdentifier = MOCK_SERVERS[index][ # pylint: disable=invalid-name + CONF_SERVER_IDENTIFIER + ] + self.provides = ["server"] + self.device = MockPlexServer(index) + else: + self.name = f"plex.tv Resource Player {index+10}" + self.clientIdentifier = f"client-{index+10}" + self.provides = ["player"] + self.device = MockPlexClient(f"http://192.168.0.1{index}:32500", index + 10) + self.presence = index == 0 def connect(self, timeout): """Mock the resource connect method.""" - return self._mock_plex_server + return self.device class MockPlexAccount: """Mock a PlexAccount instance.""" - def __init__(self, servers=1): + def __init__(self, servers=1, players=3): """Initialize the object.""" self._resources = [] for index in range(servers): self._resources.append(MockResource(index)) + for index in range(players): + self._resources.append(MockResource(index, kind="player")) def resource(self, name): """Mock the PlexAccount resource lookup method.""" @@ -240,6 +249,11 @@ class MockPlexSession: """Mock the duration attribute.""" return 10000000 + @property + def librarySectionID(self): + """Mock the librarySectionID attribute.""" + return 1 + @property def ratingKey(self): """Mock the ratingKey attribute.""" diff --git a/tests/components/plex/test_config_flow.py b/tests/components/plex/test_config_flow.py index 4ffea576514..125367a32f6 100644 --- a/tests/components/plex/test_config_flow.py +++ b/tests/components/plex/test_config_flow.py @@ -367,8 +367,8 @@ async def test_option_flow(hass): ) with patch("plexapi.server.PlexServer", return_value=mock_plex_server), patch( - "homeassistant.components.plex.PlexWebsocket.listen" - ) as mock_listen: + "plexapi.myplex.MyPlexAccount", return_value=MockPlexAccount() + ), patch("homeassistant.components.plex.PlexWebsocket.listen") as mock_listen: entry.add_to_hass(hass) assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() @@ -417,8 +417,8 @@ async def test_missing_option_flow(hass): ) with patch("plexapi.server.PlexServer", return_value=mock_plex_server), patch( - "homeassistant.components.plex.PlexWebsocket.listen" - ) as mock_listen: + "plexapi.myplex.MyPlexAccount", return_value=MockPlexAccount() + ), patch("homeassistant.components.plex.PlexWebsocket.listen") as mock_listen: entry.add_to_hass(hass) assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() @@ -471,16 +471,17 @@ async def test_option_flow_new_users_available(hass, caplog): mock_plex_server = MockPlexServer(config_entry=entry) with patch("plexapi.server.PlexServer", return_value=mock_plex_server), patch( - "homeassistant.components.plex.PlexWebsocket.listen" - ): + "plexapi.myplex.MyPlexAccount", return_value=MockPlexAccount() + ), patch("homeassistant.components.plex.PlexWebsocket.listen"): entry.add_to_hass(hass) assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() server_id = mock_plex_server.machineIdentifier - async_dispatcher_send(hass, PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id)) - await hass.async_block_till_done() + with patch("plexapi.myplex.MyPlexAccount", return_value=MockPlexAccount()): + async_dispatcher_send(hass, PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id)) + await hass.async_block_till_done() monitored_users = hass.data[DOMAIN][SERVERS][server_id].option_monitored_users @@ -740,6 +741,8 @@ async def test_setup_with_limited_credentials(hass): ), patch.object( mock_plex_server, "systemAccounts", side_effect=plexapi.exceptions.Unauthorized ) as mock_accounts, patch( + "plexapi.myplex.MyPlexAccount", return_value=MockPlexAccount() + ), patch( "homeassistant.components.plex.PlexWebsocket.listen" ) as mock_listen: entry.add_to_hass(hass) diff --git a/tests/components/plex/test_init.py b/tests/components/plex/test_init.py index 461efe9d320..76b1138fc06 100644 --- a/tests/components/plex/test_init.py +++ b/tests/components/plex/test_init.py @@ -13,7 +13,7 @@ from homeassistant.config_entries import ( ENTRY_STATE_SETUP_ERROR, ENTRY_STATE_SETUP_RETRY, ) -from homeassistant.const import CONF_URL, CONF_VERIFY_SSL +from homeassistant.const import CONF_TOKEN, CONF_URL, CONF_VERIFY_SSL from homeassistant.helpers.dispatcher import async_dispatcher_send import homeassistant.util.dt as dt_util @@ -115,8 +115,8 @@ async def test_set_config_entry_unique_id(hass): ) with patch("plexapi.server.PlexServer", return_value=mock_plex_server), patch( - "homeassistant.components.plex.PlexWebsocket.listen" - ) as mock_listen: + "plexapi.myplex.MyPlexAccount", return_value=MockPlexAccount() + ), patch("homeassistant.components.plex.PlexWebsocket.listen") as mock_listen: entry.add_to_hass(hass) assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() @@ -181,8 +181,8 @@ async def test_setup_with_insecure_config_entry(hass): ) with patch("plexapi.server.PlexServer", return_value=mock_plex_server), patch( - "homeassistant.components.plex.PlexWebsocket.listen" - ) as mock_listen: + "plexapi.myplex.MyPlexAccount", return_value=MockPlexAccount() + ), patch("homeassistant.components.plex.PlexWebsocket.listen") as mock_listen: entry.add_to_hass(hass) assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() @@ -210,8 +210,8 @@ async def test_unload_config_entry(hass): assert entry is config_entries[0] with patch("plexapi.server.PlexServer", return_value=mock_plex_server), patch( - "homeassistant.components.plex.PlexWebsocket.listen" - ) as mock_listen: + "plexapi.myplex.MyPlexAccount", return_value=MockPlexAccount() + ), patch("homeassistant.components.plex.PlexWebsocket.listen") as mock_listen: assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() assert mock_listen.called @@ -243,8 +243,8 @@ async def test_setup_with_photo_session(hass): ) with patch("plexapi.server.PlexServer", return_value=mock_plex_server), patch( - "homeassistant.components.plex.PlexWebsocket.listen" - ): + "plexapi.myplex.MyPlexAccount", return_value=MockPlexAccount() + ), patch("homeassistant.components.plex.PlexWebsocket.listen"): entry.add_to_hass(hass) assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() @@ -290,10 +290,33 @@ async def test_setup_when_certificate_changed(hass): new_entry = MockConfigEntry(domain=const.DOMAIN, data=DEFAULT_DATA) + # Test with account failure + with patch( + "plexapi.server.PlexServer", side_effect=WrongCertHostnameException + ), patch( + "plexapi.myplex.MyPlexAccount", side_effect=plexapi.exceptions.Unauthorized + ): + old_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(old_entry.entry_id) is False + await hass.async_block_till_done() + + assert old_entry.state == ENTRY_STATE_SETUP_ERROR + await hass.config_entries.async_unload(old_entry.entry_id) + + # Test with no servers found + with patch( + "plexapi.server.PlexServer", side_effect=WrongCertHostnameException + ), patch("plexapi.myplex.MyPlexAccount", return_value=MockPlexAccount(servers=0)): + assert await hass.config_entries.async_setup(old_entry.entry_id) is False + await hass.async_block_till_done() + + assert old_entry.state == ENTRY_STATE_SETUP_ERROR + await hass.config_entries.async_unload(old_entry.entry_id) + + # Test with success with patch( "plexapi.server.PlexServer", side_effect=WrongCertHostnameException ), patch("plexapi.myplex.MyPlexAccount", return_value=MockPlexAccount()): - old_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(old_entry.entry_id) await hass.async_block_till_done() @@ -304,3 +327,32 @@ async def test_setup_when_certificate_changed(hass): old_entry.data[const.PLEX_SERVER_CONFIG][CONF_URL] == new_entry.data[const.PLEX_SERVER_CONFIG][CONF_URL] ) + + +async def test_tokenless_server(hass): + """Test setup with a server with token auth disabled.""" + mock_plex_server = MockPlexServer() + + TOKENLESS_DATA = copy.deepcopy(DEFAULT_DATA) + TOKENLESS_DATA[const.PLEX_SERVER_CONFIG].pop(CONF_TOKEN, None) + + entry = MockConfigEntry( + domain=const.DOMAIN, + data=TOKENLESS_DATA, + options=DEFAULT_OPTIONS, + unique_id=DEFAULT_DATA["server_id"], + ) + + with patch("plexapi.server.PlexServer", return_value=mock_plex_server), patch( + "plexapi.myplex.MyPlexAccount", side_effect=plexapi.exceptions.Unauthorized + ), patch("homeassistant.components.plex.PlexWebsocket.listen"): + entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state == ENTRY_STATE_LOADED + + server_id = mock_plex_server.machineIdentifier + + async_dispatcher_send(hass, const.PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id)) + await hass.async_block_till_done() diff --git a/tests/components/plex/test_media_players.py b/tests/components/plex/test_media_players.py new file mode 100644 index 00000000000..d3e2de91cf9 --- /dev/null +++ b/tests/components/plex/test_media_players.py @@ -0,0 +1,86 @@ +"""Tests for Plex media_players.""" +from plexapi.exceptions import NotFound + +from homeassistant.components.plex.const import DOMAIN, SERVERS + +from .const import DEFAULT_DATA, DEFAULT_OPTIONS +from .mock_classes import MockPlexAccount, MockPlexServer + +from tests.async_mock import patch +from tests.common import MockConfigEntry + + +async def test_plex_tv_clients(hass): + """Test getting Plex clients from plex.tv.""" + entry = MockConfigEntry( + domain=DOMAIN, + data=DEFAULT_DATA, + options=DEFAULT_OPTIONS, + unique_id=DEFAULT_DATA["server_id"], + ) + + mock_plex_server = MockPlexServer(config_entry=entry) + mock_plex_account = MockPlexAccount() + + with patch("plexapi.server.PlexServer", return_value=mock_plex_server), patch( + "plexapi.myplex.MyPlexAccount", return_value=mock_plex_account + ), patch("homeassistant.components.plex.PlexWebsocket.listen"): + entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + server_id = mock_plex_server.machineIdentifier + plex_server = hass.data[DOMAIN][SERVERS][server_id] + + resource = next( + x + for x in mock_plex_account.resources() + if x.name.startswith("plex.tv Resource Player") + ) + with patch.object(resource, "connect", side_effect=NotFound): + await plex_server._async_update_platforms() + await hass.async_block_till_done() + + media_players_before = len(hass.states.async_entity_ids("media_player")) + + # Ensure one more client is discovered + await hass.config_entries.async_unload(entry.entry_id) + + with patch("plexapi.server.PlexServer", return_value=mock_plex_server), patch( + "plexapi.myplex.MyPlexAccount", return_value=mock_plex_account + ), patch("homeassistant.components.plex.PlexWebsocket.listen"): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + plex_server = hass.data[DOMAIN][SERVERS][server_id] + + await plex_server._async_update_platforms() + await hass.async_block_till_done() + + media_players_after = len(hass.states.async_entity_ids("media_player")) + assert media_players_after == media_players_before + 1 + + # Ensure only plex.tv resource client is found + await hass.config_entries.async_unload(entry.entry_id) + + mock_plex_server.clear_clients() + mock_plex_server.clear_sessions() + + with patch("plexapi.server.PlexServer", return_value=mock_plex_server), patch( + "plexapi.myplex.MyPlexAccount", return_value=mock_plex_account + ), patch("homeassistant.components.plex.PlexWebsocket.listen"): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + plex_server = hass.data[DOMAIN][SERVERS][server_id] + + await plex_server._async_update_platforms() + await hass.async_block_till_done() + + assert len(hass.states.async_entity_ids("media_player")) == 1 + + # Ensure cache gets called + await plex_server._async_update_platforms() + await hass.async_block_till_done() + + assert len(hass.states.async_entity_ids("media_player")) == 1 diff --git a/tests/components/plex/test_playback.py b/tests/components/plex/test_playback.py index 7a90d8dfad8..dafc8720ab1 100644 --- a/tests/components/plex/test_playback.py +++ b/tests/components/plex/test_playback.py @@ -28,8 +28,8 @@ async def test_sonos_playback(hass): mock_plex_server = MockPlexServer(config_entry=entry) with patch("plexapi.server.PlexServer", return_value=mock_plex_server), patch( - "homeassistant.components.plex.PlexWebsocket.listen" - ): + "plexapi.myplex.MyPlexAccount", return_value=MockPlexAccount() + ), patch("homeassistant.components.plex.PlexWebsocket.listen"): entry.add_to_hass(hass) assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() @@ -37,10 +37,6 @@ async def test_sonos_playback(hass): server_id = mock_plex_server.machineIdentifier loaded_server = hass.data[DOMAIN][SERVERS][server_id] - with patch("plexapi.myplex.MyPlexAccount", return_value=MockPlexAccount()): - # Access and cache PlexAccount - assert loaded_server.account - # Test Sonos integration lookup failure with patch.object( hass.components.sonos, "get_coordinator_id", side_effect=HomeAssistantError diff --git a/tests/components/plex/test_server.py b/tests/components/plex/test_server.py index 6831b045da6..5cd0d13e90c 100644 --- a/tests/components/plex/test_server.py +++ b/tests/components/plex/test_server.py @@ -26,6 +26,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_send from .const import DEFAULT_DATA, DEFAULT_OPTIONS from .mock_classes import ( + MockPlexAccount, MockPlexArtist, MockPlexLibrary, MockPlexLibrarySection, @@ -54,8 +55,8 @@ async def test_new_users_available(hass): mock_plex_server = MockPlexServer(config_entry=entry) with patch("plexapi.server.PlexServer", return_value=mock_plex_server), patch( - "homeassistant.components.plex.PlexWebsocket.listen" - ): + "plexapi.myplex.MyPlexAccount", return_value=MockPlexAccount() + ), patch("homeassistant.components.plex.PlexWebsocket.listen"): entry.add_to_hass(hass) assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() @@ -93,8 +94,8 @@ async def test_new_ignored_users_available(hass, caplog): mock_plex_server = MockPlexServer(config_entry=entry) with patch("plexapi.server.PlexServer", return_value=mock_plex_server), patch( - "homeassistant.components.plex.PlexWebsocket.listen" - ): + "plexapi.myplex.MyPlexAccount", return_value=MockPlexAccount() + ), patch("homeassistant.components.plex.PlexWebsocket.listen"): entry.add_to_hass(hass) assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() @@ -245,8 +246,8 @@ async def test_ignore_plex_web_client(hass): mock_plex_server = MockPlexServer(config_entry=entry) with patch("plexapi.server.PlexServer", return_value=mock_plex_server), patch( - "homeassistant.components.plex.PlexWebsocket.listen" - ): + "plexapi.myplex.MyPlexAccount", return_value=MockPlexAccount(players=0) + ), patch("homeassistant.components.plex.PlexWebsocket.listen"): entry.add_to_hass(hass) assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() @@ -277,8 +278,8 @@ async def test_media_lookups(hass): mock_plex_server = MockPlexServer(config_entry=entry) with patch("plexapi.server.PlexServer", return_value=mock_plex_server), patch( - "homeassistant.components.plex.PlexWebsocket.listen" - ): + "plexapi.myplex.MyPlexAccount", return_value=MockPlexAccount() + ), patch("homeassistant.components.plex.PlexWebsocket.listen"): entry.add_to_hass(hass) assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() @@ -289,6 +290,7 @@ async def test_media_lookups(hass): # Plex Key searches async_dispatcher_send(hass, PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id)) await hass.async_block_till_done() + media_player_id = hass.states.async_entity_ids("media_player")[0] with patch("homeassistant.components.plex.PlexServer.create_playqueue"): assert await hass.services.async_call( diff --git a/tests/components/prometheus/test_init.py b/tests/components/prometheus/test_init.py index 4539948cc5a..ca86658a88f 100644 --- a/tests/components/prometheus/test_init.py +++ b/tests/components/prometheus/test_init.py @@ -1,4 +1,6 @@ """The tests for the Prometheus exporter.""" +from collections import namedtuple + import pytest from homeassistant import setup @@ -10,9 +12,15 @@ from homeassistant.const import ( DEGREE, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, + EVENT_STATE_CHANGED, ) +from homeassistant.core import split_entity_id from homeassistant.setup import async_setup_component +import tests.async_mock as mock + +PROMETHEUS_PATH = "homeassistant.components.prometheus" + @pytest.fixture async def prometheus_client(loop, hass, hass_client): @@ -139,3 +147,171 @@ async def test_view(prometheus_client): # pylint: disable=redefined-outer-name 'entity="sensor.sps30_pm_1um_weight_concentration",' 'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body ) + + +@pytest.fixture(name="mock_client") +def mock_client_fixture(): + """Mock the prometheus client.""" + with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client: + counter_client = mock.MagicMock() + client.Counter = mock.MagicMock(return_value=counter_client) + setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock())) + yield counter_client + + +@pytest.fixture +def mock_bus(hass): + """Mock the event bus listener.""" + hass.bus.listen = mock.MagicMock() + + +@pytest.mark.usefixtures("mock_bus") +async def test_minimal_config(hass, mock_client): + """Test the minimal config and defaults of component.""" + config = {prometheus.DOMAIN: {}} + assert await async_setup_component(hass, prometheus.DOMAIN, config) + await hass.async_block_till_done() + assert hass.bus.listen.called + assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] + + +@pytest.mark.usefixtures("mock_bus") +async def test_full_config(hass, mock_client): + """Test the full config of component.""" + config = { + prometheus.DOMAIN: { + "namespace": "ns", + "default_metric": "m", + "override_metric": "m", + "component_config": {"fake.test": {"override_metric": "km"}}, + "component_config_glob": {"fake.time_*": {"override_metric": "h"}}, + "component_config_domain": {"climate": {"override_metric": "°C"}}, + "filter": { + "include_domains": ["climate"], + "include_entity_globs": ["fake.time_*"], + "include_entities": ["fake.test"], + "exclude_domains": ["script"], + "exclude_entity_globs": ["climate.excluded_*"], + "exclude_entities": ["fake.time_excluded"], + }, + } + } + assert await async_setup_component(hass, prometheus.DOMAIN, config) + await hass.async_block_till_done() + assert hass.bus.listen.called + assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0] + + +FilterTest = namedtuple("FilterTest", "id should_pass") + + +def make_event(entity_id): + """Make a mock event for test.""" + domain = split_entity_id(entity_id)[0] + state = mock.MagicMock( + state="not blank", + domain=domain, + entity_id=entity_id, + object_id="entity", + attributes={}, + ) + return mock.MagicMock(data={"new_state": state}, time_fired=12345) + + +async def _setup(hass, filter_config): + """Shared set up for filtering tests.""" + config = {prometheus.DOMAIN: {"filter": filter_config}} + assert await async_setup_component(hass, prometheus.DOMAIN, config) + await hass.async_block_till_done() + return hass.bus.listen.call_args_list[0][0][1] + + +@pytest.mark.usefixtures("mock_bus") +async def test_allowlist(hass, mock_client): + """Test an allowlist only config.""" + handler_method = await _setup( + hass, + { + "include_domains": ["fake"], + "include_entity_globs": ["test.included_*"], + "include_entities": ["not_real.included"], + }, + ) + + tests = [ + FilterTest("climate.excluded", False), + FilterTest("fake.included", True), + FilterTest("test.excluded_test", False), + FilterTest("test.included_test", True), + FilterTest("not_real.included", True), + FilterTest("not_real.excluded", False), + ] + + for test in tests: + event = make_event(test.id) + handler_method(event) + + was_called = mock_client.labels.call_count == 1 + assert test.should_pass == was_called + mock_client.labels.reset_mock() + + +@pytest.mark.usefixtures("mock_bus") +async def test_denylist(hass, mock_client): + """Test a denylist only config.""" + handler_method = await _setup( + hass, + { + "exclude_domains": ["fake"], + "exclude_entity_globs": ["test.excluded_*"], + "exclude_entities": ["not_real.excluded"], + }, + ) + + tests = [ + FilterTest("fake.excluded", False), + FilterTest("light.included", True), + FilterTest("test.excluded_test", False), + FilterTest("test.included_test", True), + FilterTest("not_real.included", True), + FilterTest("not_real.excluded", False), + ] + + for test in tests: + event = make_event(test.id) + handler_method(event) + + was_called = mock_client.labels.call_count == 1 + assert test.should_pass == was_called + mock_client.labels.reset_mock() + + +@pytest.mark.usefixtures("mock_bus") +async def test_filtered_denylist(hass, mock_client): + """Test a denylist config with a filtering allowlist.""" + handler_method = await _setup( + hass, + { + "include_entities": ["fake.included", "test.excluded_test"], + "exclude_domains": ["fake"], + "exclude_entity_globs": ["*.excluded_*"], + "exclude_entities": ["not_real.excluded"], + }, + ) + + tests = [ + FilterTest("fake.excluded", False), + FilterTest("fake.included", True), + FilterTest("alt_fake.excluded_test", False), + FilterTest("test.excluded_test", True), + FilterTest("not_real.excluded", False), + FilterTest("not_real.included", True), + ] + + for test in tests: + event = make_event(test.id) + handler_method(event) + + was_called = mock_client.labels.call_count == 1 + assert test.should_pass == was_called + mock_client.labels.reset_mock() diff --git a/tests/components/proximity/test_init.py b/tests/components/proximity/test_init.py index d3b6f9274e5..55aca3223f0 100644 --- a/tests/components/proximity/test_init.py +++ b/tests/components/proximity/test_init.py @@ -25,10 +25,7 @@ class TestProximity(unittest.TestCase): "zoning", {"name": "work", "latitude": 2.3, "longitude": 1.3, "radius": 10}, ) - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def test_proximities(self): """Test a list of proximities.""" diff --git a/tests/components/pushbullet/test_notify.py b/tests/components/pushbullet/test_notify.py index 930d9261f9c..b0fb55e152d 100644 --- a/tests/components/pushbullet/test_notify.py +++ b/tests/components/pushbullet/test_notify.py @@ -18,8 +18,9 @@ class TestPushBullet(unittest.TestCase): def setUp(self): """Initialize values for this test case class.""" self.hass = get_test_home_assistant() + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that we started.""" self.hass.stop() diff --git a/tests/components/pvpc_hourly_pricing/test_sensor.py b/tests/components/pvpc_hourly_pricing/test_sensor.py index 7ef50113de5..781ce35b99f 100644 --- a/tests/components/pvpc_hourly_pricing/test_sensor.py +++ b/tests/components/pvpc_hourly_pricing/test_sensor.py @@ -7,11 +7,12 @@ from pytz import timezone from homeassistant.components.pvpc_hourly_pricing import ATTR_TARIFF, DOMAIN from homeassistant.const import CONF_NAME from homeassistant.core import ATTR_NOW, EVENT_TIME_CHANGED +from homeassistant.setup import async_setup_component from .conftest import check_valid_state from tests.async_mock import patch -from tests.common import async_setup_component, date_util +from tests.common import date_util from tests.test_util.aiohttp import AiohttpClientMocker diff --git a/tests/components/radarr/test_sensor.py b/tests/components/radarr/test_sensor.py index 0e76e99e721..96575f81154 100644 --- a/tests/components/radarr/test_sensor.py +++ b/tests/components/radarr/test_sensor.py @@ -208,8 +208,9 @@ class TestRadarrSetup(unittest.TestCase): self.DEVICES = [] self.hass = get_test_home_assistant() self.hass.config.time_zone = "America/Los_Angeles" + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() diff --git a/tests/components/recorder/test_init.py b/tests/components/recorder/test_init.py index 1931a367ee8..843609cf308 100644 --- a/tests/components/recorder/test_init.py +++ b/tests/components/recorder/test_init.py @@ -5,12 +5,19 @@ import unittest import pytest -from homeassistant.components.recorder import Recorder +from homeassistant.components.recorder import ( + CONFIG_SCHEMA, + DOMAIN, + Recorder, + run_information, + run_information_from_instance, + run_information_with_session, +) from homeassistant.components.recorder.const import DATA_INSTANCE -from homeassistant.components.recorder.models import Events, States +from homeassistant.components.recorder.models import Events, RecorderRuns, States from homeassistant.components.recorder.util import session_scope from homeassistant.const import MATCH_ALL -from homeassistant.core import ATTR_NOW, EVENT_TIME_CHANGED, callback +from homeassistant.core import ATTR_NOW, EVENT_TIME_CHANGED, Context, callback from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util @@ -28,8 +35,9 @@ class TestRecorder(unittest.TestCase): self.hass = get_test_home_assistant() init_recorder_component(self.hass) self.hass.start() + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() @@ -49,7 +57,7 @@ class TestRecorder(unittest.TestCase): assert db_states[0].event_id > 0 state = db_states[0].to_native() - assert state == self.hass.states.get(entity_id) + assert state == _state_empty_context(self.hass, entity_id) def test_saving_event(self): """Test saving and restoring an event.""" @@ -129,13 +137,34 @@ def _add_events(hass, events): return [ev.to_native() for ev in session.query(Events)] +def _state_empty_context(hass, entity_id): + # We don't restore context unless we need it by joining the + # events table on the event_id for state_changed events + state = hass.states.get(entity_id) + state.context = Context(id=None) + return state + + # pylint: disable=redefined-outer-name,invalid-name def test_saving_state_include_domains(hass_recorder): """Test saving and restoring a state.""" hass = hass_recorder({"include": {"domains": "test2"}}) states = _add_entities(hass, ["test.recorder", "test2.recorder"]) assert len(states) == 1 - assert hass.states.get("test2.recorder") == states[0] + assert _state_empty_context(hass, "test2.recorder") == states[0] + + +def test_saving_state_include_domains_globs(hass_recorder): + """Test saving and restoring a state.""" + hass = hass_recorder( + {"include": {"domains": "test2", "entity_globs": "*.included_*"}} + ) + states = _add_entities( + hass, ["test.recorder", "test2.recorder", "test3.included_entity"] + ) + assert len(states) == 2 + assert _state_empty_context(hass, "test2.recorder") == states[0] + assert _state_empty_context(hass, "test3.included_entity") == states[1] def test_saving_state_incl_entities(hass_recorder): @@ -143,7 +172,7 @@ def test_saving_state_incl_entities(hass_recorder): hass = hass_recorder({"include": {"entities": "test2.recorder"}}) states = _add_entities(hass, ["test.recorder", "test2.recorder"]) assert len(states) == 1 - assert hass.states.get("test2.recorder") == states[0] + assert _state_empty_context(hass, "test2.recorder") == states[0] def test_saving_event_exclude_event_type(hass_recorder): @@ -159,7 +188,19 @@ def test_saving_state_exclude_domains(hass_recorder): hass = hass_recorder({"exclude": {"domains": "test"}}) states = _add_entities(hass, ["test.recorder", "test2.recorder"]) assert len(states) == 1 - assert hass.states.get("test2.recorder") == states[0] + assert _state_empty_context(hass, "test2.recorder") == states[0] + + +def test_saving_state_exclude_domains_globs(hass_recorder): + """Test saving and restoring a state.""" + hass = hass_recorder( + {"exclude": {"domains": "test", "entity_globs": "*.excluded_*"}} + ) + states = _add_entities( + hass, ["test.recorder", "test2.recorder", "test2.excluded_entity"] + ) + assert len(states) == 1 + assert _state_empty_context(hass, "test2.recorder") == states[0] def test_saving_state_exclude_entities(hass_recorder): @@ -167,7 +208,7 @@ def test_saving_state_exclude_entities(hass_recorder): hass = hass_recorder({"exclude": {"entities": "test.recorder"}}) states = _add_entities(hass, ["test.recorder", "test2.recorder"]) assert len(states) == 1 - assert hass.states.get("test2.recorder") == states[0] + assert _state_empty_context(hass, "test2.recorder") == states[0] def test_saving_state_exclude_domain_include_entity(hass_recorder): @@ -179,6 +220,20 @@ def test_saving_state_exclude_domain_include_entity(hass_recorder): assert len(states) == 2 +def test_saving_state_exclude_domain_glob_include_entity(hass_recorder): + """Test saving and restoring a state.""" + hass = hass_recorder( + { + "include": {"entities": ["test.recorder", "test.excluded_entity"]}, + "exclude": {"domains": "test", "entity_globs": "*._excluded_*"}, + } + ) + states = _add_entities( + hass, ["test.recorder", "test2.recorder", "test.excluded_entity"] + ) + assert len(states) == 3 + + def test_saving_state_include_domain_exclude_entity(hass_recorder): """Test saving and restoring a state.""" hass = hass_recorder( @@ -186,8 +241,24 @@ def test_saving_state_include_domain_exclude_entity(hass_recorder): ) states = _add_entities(hass, ["test.recorder", "test2.recorder", "test.ok"]) assert len(states) == 1 - assert hass.states.get("test.ok") == states[0] - assert hass.states.get("test.ok").state == "state2" + assert _state_empty_context(hass, "test.ok") == states[0] + assert _state_empty_context(hass, "test.ok").state == "state2" + + +def test_saving_state_include_domain_glob_exclude_entity(hass_recorder): + """Test saving and restoring a state.""" + hass = hass_recorder( + { + "exclude": {"entities": ["test.recorder", "test2.included_entity"]}, + "include": {"domains": "test", "entity_globs": "*._included_*"}, + } + ) + states = _add_entities( + hass, ["test.recorder", "test2.recorder", "test.ok", "test2.included_entity"] + ) + assert len(states) == 1 + assert _state_empty_context(hass, "test.ok") == states[0] + assert _state_empty_context(hass, "test.ok").state == "state2" def test_recorder_setup_failure(): @@ -206,8 +277,8 @@ def test_recorder_setup_failure(): uri="sqlite://", db_max_retries=10, db_retry_wait=3, - include={}, - exclude={}, + entity_filter=CONFIG_SCHEMA({DOMAIN: {}}), + exclude_t=[], ) rec.start() rec.join() @@ -229,6 +300,7 @@ async def test_defaults_set(hass): assert await async_setup_component(hass, "history", {}) assert recorder_config is not None + # pylint: disable=unsubscriptable-object assert recorder_config["auto_purge"] assert recorder_config["purge_keep_days"] == 10 @@ -245,7 +317,7 @@ def test_auto_purge(hass_recorder): test_time = tz.localize(datetime(2020, 1, 1, 4, 12, 0)) with patch( - "homeassistant.components.recorder.purge.purge_old_data" + "homeassistant.components.recorder.purge.purge_old_data", return_value=True ) as purge_old_data: for delta in (-1, 0, 1): hass.bus.fire( @@ -257,3 +329,87 @@ def test_auto_purge(hass_recorder): assert len(purge_old_data.mock_calls) == 1 dt_util.set_default_time_zone(original_tz) + + +def test_saving_sets_old_state(hass_recorder): + """Test saving sets old state.""" + hass = hass_recorder() + + hass.states.set("test.one", "on", {}) + hass.states.set("test.two", "on", {}) + wait_recording_done(hass) + hass.states.set("test.one", "off", {}) + hass.states.set("test.two", "off", {}) + wait_recording_done(hass) + + with session_scope(hass=hass) as session: + states = list(session.query(States)) + assert len(states) == 4 + + assert states[0].entity_id == "test.one" + assert states[1].entity_id == "test.two" + assert states[2].entity_id == "test.one" + assert states[3].entity_id == "test.two" + + assert states[0].old_state_id is None + assert states[1].old_state_id is None + assert states[2].old_state_id == states[0].state_id + assert states[3].old_state_id == states[1].state_id + + +def test_saving_state_with_serializable_data(hass_recorder, caplog): + """Test saving data that cannot be serialized does not crash.""" + hass = hass_recorder() + + hass.states.set("test.one", "on", {"fail": CannotSerializeMe()}) + wait_recording_done(hass) + hass.states.set("test.two", "on", {}) + wait_recording_done(hass) + hass.states.set("test.two", "off", {}) + wait_recording_done(hass) + + with session_scope(hass=hass) as session: + states = list(session.query(States)) + assert len(states) == 2 + + assert states[0].entity_id == "test.two" + assert states[1].entity_id == "test.two" + assert states[0].old_state_id is None + assert states[1].old_state_id == states[0].state_id + + assert "State is not JSON serializable" in caplog.text + + +def test_run_information(hass_recorder): + """Ensure run_information returns expected data.""" + before_start_recording = dt_util.utcnow() + hass = hass_recorder() + run_info = run_information_from_instance(hass) + assert isinstance(run_info, RecorderRuns) + assert run_info.closed_incorrect is False + + with session_scope(hass=hass) as session: + run_info = run_information_with_session(session) + assert isinstance(run_info, RecorderRuns) + assert run_info.closed_incorrect is False + + run_info = run_information(hass) + assert isinstance(run_info, RecorderRuns) + assert run_info.closed_incorrect is False + + hass.states.set("test.two", "on", {}) + wait_recording_done(hass) + run_info = run_information(hass) + assert isinstance(run_info, RecorderRuns) + assert run_info.closed_incorrect is False + + run_info = run_information(hass, before_start_recording) + assert run_info is None + + run_info = run_information(hass, dt_util.utcnow()) + assert isinstance(run_info, RecorderRuns) + assert run_info.closed_incorrect is False + + +class CannotSerializeMe: + """A class that the JSONEncoder cannot serialize.""" diff --git a/tests/components/recorder/test_models.py b/tests/components/recorder/test_models.py index 276194b5d6c..bf659282e3e 100644 --- a/tests/components/recorder/test_models.py +++ b/tests/components/recorder/test_models.py @@ -2,12 +2,22 @@ from datetime import datetime import unittest +import pytest +import pytz from sqlalchemy import create_engine from sqlalchemy.orm import scoped_session, sessionmaker -from homeassistant.components.recorder.models import Base, Events, RecorderRuns, States +from homeassistant.components.recorder.models import ( + Base, + Events, + RecorderRuns, + States, + process_timestamp, + process_timestamp_to_utc_isoformat, +) from homeassistant.const import EVENT_STATE_CHANGED import homeassistant.core as ha +from homeassistant.exceptions import InvalidEntityFormatError from homeassistant.util import dt ENGINE = None @@ -58,6 +68,9 @@ class TestStates(unittest.TestCase): {"entity_id": "sensor.temperature", "old_state": None, "new_state": state}, context=state.context, ) + # We don't restore context unless we need it by joining the + # events table on the event_id for state_changed events + state.context = ha.Context(id=None) assert state == States.from_event(event).to_native() def test_from_event_to_delete_state(self): @@ -88,8 +101,9 @@ class TestRecorderRuns(unittest.TestCase): session.query(Events).delete() session.query(States).delete() session.query(RecorderRuns).delete() + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Clean up.""" self.session.rollback() @@ -154,8 +168,76 @@ class TestRecorderRuns(unittest.TestCase): def test_states_from_native_invalid_entity_id(): """Test loading a state from an invalid entity ID.""" - event = States() - event.entity_id = "test.invalid__id" - event.attributes = "{}" - state = event.to_native() + state = States() + state.entity_id = "test.invalid__id" + state.attributes = "{}" + with pytest.raises(InvalidEntityFormatError): + state = state.to_native() + + state = state.to_native(validate_entity_id=False) assert state.entity_id == "test.invalid__id" + + +async def test_process_timestamp(): + """Test processing time stamp to UTC.""" + datetime_with_tzinfo = datetime(2016, 7, 9, 11, 0, 0, tzinfo=dt.UTC) + datetime_without_tzinfo = datetime(2016, 7, 9, 11, 0, 0) + est = pytz.timezone("US/Eastern") + datetime_est_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=est) + nst = pytz.timezone("Canada/Newfoundland") + datetime_nst_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=nst) + hst = pytz.timezone("US/Hawaii") + datetime_hst_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=hst) + + assert process_timestamp(datetime_with_tzinfo) == datetime( + 2016, 7, 9, 11, 0, 0, tzinfo=dt.UTC + ) + assert process_timestamp(datetime_without_tzinfo) == datetime( + 2016, 7, 9, 11, 0, 0, tzinfo=dt.UTC + ) + assert process_timestamp(datetime_est_timezone) == datetime( + 2016, 7, 9, 15, 56, tzinfo=dt.UTC + ) + assert process_timestamp(datetime_nst_timezone) == datetime( + 2016, 7, 9, 14, 31, tzinfo=dt.UTC + ) + assert process_timestamp(datetime_hst_timezone) == datetime( + 2016, 7, 9, 21, 31, tzinfo=dt.UTC + ) + assert process_timestamp(None) is None + + +async def test_process_timestamp_to_utc_isoformat(): + """Test processing time stamp to UTC isoformat.""" + datetime_with_tzinfo = datetime(2016, 7, 9, 11, 0, 0, tzinfo=dt.UTC) + datetime_without_tzinfo = datetime(2016, 7, 9, 11, 0, 0) + est = pytz.timezone("US/Eastern") + datetime_est_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=est) + est = pytz.timezone("US/Eastern") + datetime_est_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=est) + nst = pytz.timezone("Canada/Newfoundland") + datetime_nst_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=nst) + hst = pytz.timezone("US/Hawaii") + datetime_hst_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=hst) + + assert ( + process_timestamp_to_utc_isoformat(datetime_with_tzinfo) + == "2016-07-09T11:00:00+00:00" + ) + assert ( + process_timestamp_to_utc_isoformat(datetime_without_tzinfo) + == "2016-07-09T11:00:00+00:00" + ) + assert ( + process_timestamp_to_utc_isoformat(datetime_est_timezone) + == "2016-07-09T15:56:00+00:00" + ) + assert ( + process_timestamp_to_utc_isoformat(datetime_nst_timezone) + == "2016-07-09T14:31:00+00:00" + ) + assert ( + process_timestamp_to_utc_isoformat(datetime_hst_timezone) + == "2016-07-09T21:31:00+00:00" + ) + assert process_timestamp_to_utc_isoformat(None) is None diff --git a/tests/components/recorder/test_purge.py b/tests/components/recorder/test_purge.py index 4ec08c432b0..93fb6e51621 100644 --- a/tests/components/recorder/test_purge.py +++ b/tests/components/recorder/test_purge.py @@ -5,9 +5,10 @@ import unittest from homeassistant.components import recorder from homeassistant.components.recorder.const import DATA_INSTANCE -from homeassistant.components.recorder.models import Events, States +from homeassistant.components.recorder.models import Events, RecorderRuns, States from homeassistant.components.recorder.purge import purge_old_data from homeassistant.components.recorder.util import session_scope +from homeassistant.util import dt as dt_util from tests.async_mock import patch from tests.common import get_test_home_assistant, init_recorder_component @@ -21,8 +22,9 @@ class TestRecorderPurge(unittest.TestCase): self.hass = get_test_home_assistant() init_recorder_component(self.hass) self.hass.start() + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() @@ -93,6 +95,32 @@ class TestRecorderPurge(unittest.TestCase): ) ) + def _add_test_recorder_runs(self): + """Add a few recorder_runs for testing.""" + now = datetime.now() + five_days_ago = now - timedelta(days=5) + eleven_days_ago = now - timedelta(days=11) + + self.hass.block_till_done() + self.hass.data[DATA_INSTANCE].block_till_done() + + with recorder.session_scope(hass=self.hass) as session: + for rec_id in range(6): + if rec_id < 2: + timestamp = eleven_days_ago + elif rec_id < 4: + timestamp = five_days_ago + else: + timestamp = now + + session.add( + RecorderRuns( + start=timestamp, + created=dt_util.utcnow(), + end=timestamp + timedelta(days=1), + ) + ) + def test_purge_old_states(self): """Test deleting old states.""" self._add_test_states() @@ -102,9 +130,16 @@ class TestRecorderPurge(unittest.TestCase): assert states.count() == 6 # run purge_old_data() - purge_old_data(self.hass.data[DATA_INSTANCE], 4, repack=False) + finished = purge_old_data(self.hass.data[DATA_INSTANCE], 4, repack=False) + assert not finished + assert states.count() == 4 - # we should only have 2 states left after purging + finished = purge_old_data(self.hass.data[DATA_INSTANCE], 4, repack=False) + assert not finished + assert states.count() == 2 + + finished = purge_old_data(self.hass.data[DATA_INSTANCE], 4, repack=False) + assert finished assert states.count() == 2 def test_purge_old_events(self): @@ -116,9 +151,17 @@ class TestRecorderPurge(unittest.TestCase): assert events.count() == 6 # run purge_old_data() - purge_old_data(self.hass.data[DATA_INSTANCE], 4, repack=False) + finished = purge_old_data(self.hass.data[DATA_INSTANCE], 4, repack=False) + assert not finished + assert events.count() == 4 + + finished = purge_old_data(self.hass.data[DATA_INSTANCE], 4, repack=False) + assert not finished + assert events.count() == 2 # we should only have 2 events left + finished = purge_old_data(self.hass.data[DATA_INSTANCE], 4, repack=False) + assert finished assert events.count() == 2 def test_purge_method(self): @@ -126,6 +169,7 @@ class TestRecorderPurge(unittest.TestCase): service_data = {"keep_days": 4} self._add_test_events() self._add_test_states() + self._add_test_recorder_runs() # make sure we start with 6 states with session_scope(hass=self.hass) as session: @@ -135,6 +179,9 @@ class TestRecorderPurge(unittest.TestCase): events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%")) assert events.count() == 6 + recorder_runs = session.query(RecorderRuns) + assert recorder_runs.count() == 7 + self.hass.data[DATA_INSTANCE].block_till_done() # run purge method - no service data, use defaults @@ -161,6 +208,9 @@ class TestRecorderPurge(unittest.TestCase): # now we should only have 2 events left assert events.count() == 2 + # now we should only have 3 recorder runs left + assert recorder_runs.count() == 3 + assert not ( "EVENT_TEST_PURGE" in (event.event_type for event in events.all()) ) @@ -174,6 +224,6 @@ class TestRecorderPurge(unittest.TestCase): self.hass.block_till_done() self.hass.data[DATA_INSTANCE].block_till_done() assert ( - mock_logger.debug.mock_calls[3][1][0] + mock_logger.debug.mock_calls[5][1][0] == "Vacuuming SQL DB to free space" ) diff --git a/tests/components/recorder/test_util.py b/tests/components/recorder/test_util.py index 8de5acd78db..6a4126e76fd 100644 --- a/tests/components/recorder/test_util.py +++ b/tests/components/recorder/test_util.py @@ -47,7 +47,7 @@ def test_recorder_bad_execute(hass_recorder): hass_recorder() - def to_native(): + def to_native(validate_entity_id=True): """Rasie exception.""" raise SQLAlchemyError() @@ -57,6 +57,6 @@ def test_recorder_bad_execute(hass_recorder): with pytest.raises(SQLAlchemyError), patch( "homeassistant.components.recorder.time.sleep" ) as e_mock: - util.execute((mck1,)) + util.execute((mck1,), to_native=True) assert e_mock.call_count == 2 diff --git a/tests/components/reddit/test_sensor.py b/tests/components/reddit/test_sensor.py index 33c7fae76b0..faa3679f492 100644 --- a/tests/components/reddit/test_sensor.py +++ b/tests/components/reddit/test_sensor.py @@ -157,8 +157,9 @@ class TestRedditSetup(unittest.TestCase): def setUp(self): """Initialize values for this testcase class.""" self.hass = get_test_home_assistant() + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() diff --git a/tests/components/remember_the_milk/test_init.py b/tests/components/remember_the_milk/test_init.py index 2bba18f0052..1c9438c602b 100644 --- a/tests/components/remember_the_milk/test_init.py +++ b/tests/components/remember_the_milk/test_init.py @@ -30,8 +30,9 @@ class TestConfiguration(unittest.TestCase): } } ) + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): + def tear_down_cleanup(self): """Exit Home Assistant.""" self.hass.stop() diff --git a/tests/components/remote/test_init.py b/tests/components/remote/test_init.py index 031131276fe..eb47d365f83 100644 --- a/tests/components/remote/test_init.py +++ b/tests/components/remote/test_init.py @@ -28,11 +28,7 @@ class TestRemote(unittest.TestCase): def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() - - # pylint: disable=invalid-name - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def test_is_on(self): """Test is_on.""" diff --git a/tests/components/rest/test_binary_sensor.py b/tests/components/rest/test_binary_sensor.py index 762c1705d77..e56342d861f 100644 --- a/tests/components/rest/test_binary_sensor.py +++ b/tests/components/rest/test_binary_sensor.py @@ -33,10 +33,7 @@ class TestRestBinarySensorSetup(unittest.TestCase): self.hass = get_test_home_assistant() # Reset for this test. self.DEVICES = [] - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def test_setup_missing_config(self): """Test setup with configuration missing required entries.""" @@ -91,6 +88,42 @@ class TestRestBinarySensorSetup(unittest.TestCase): self.hass.block_till_done() assert 1 == mock_req.call_count + @requests_mock.Mocker() + def test_setup_minimum_resource_template(self, mock_req): + """Test setup with minimum configuration (resource_template).""" + mock_req.get("http://localhost", status_code=200) + with assert_setup_component(1, "binary_sensor"): + assert setup_component( + self.hass, + "binary_sensor", + { + "binary_sensor": { + "platform": "rest", + "resource_template": "http://localhost", + } + }, + ) + self.hass.block_till_done() + assert mock_req.call_count == 1 + + @requests_mock.Mocker() + def test_setup_duplicate_resource(self, mock_req): + """Test setup with duplicate resources.""" + mock_req.get("http://localhost", status_code=200) + with assert_setup_component(0, "binary_sensor"): + assert setup_component( + self.hass, + "binary_sensor", + { + "binary_sensor": { + "platform": "rest", + "resource": "http://localhost", + "resource_template": "http://localhost", + } + }, + ) + self.hass.block_till_done() + @requests_mock.Mocker() def test_setup_get(self, mock_req): """Test setup with valid configuration.""" @@ -158,14 +191,19 @@ class TestRestBinarySensor(unittest.TestCase): self.name = "foo" self.device_class = "light" self.value_template = template.Template("{{ value_json.key }}", self.hass) + self.force_update = False + self.resource_template = None self.binary_sensor = rest.RestBinarySensor( - self.hass, self.rest, self.name, self.device_class, self.value_template + self.hass, + self.rest, + self.name, + self.device_class, + self.value_template, + self.force_update, + self.resource_template, ) - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def update_side_effect(self, data): """Side effect function for mocking RestData.update().""" @@ -216,7 +254,13 @@ class TestRestBinarySensor(unittest.TestCase): "rest.RestData.update", side_effect=self.update_side_effect("true") ) self.binary_sensor = rest.RestBinarySensor( - self.hass, self.rest, self.name, self.device_class, None + self.hass, + self.rest, + self.name, + self.device_class, + None, + self.force_update, + self.resource_template, ) self.binary_sensor.update() assert STATE_ON == self.binary_sensor.state diff --git a/tests/components/rest/test_sensor.py b/tests/components/rest/test_sensor.py index 77d88f083e4..90a8b8d361e 100644 --- a/tests/components/rest/test_sensor.py +++ b/tests/components/rest/test_sensor.py @@ -25,10 +25,7 @@ class TestRestSensorSetup(unittest.TestCase): def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def test_setup_missing_config(self): """Test setup with configuration missing required entries.""" @@ -237,10 +234,7 @@ class TestRestSensor(unittest.TestCase): self.resource_template, self.json_attrs_path, ) - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def update_side_effect(self, data, headers): """Side effect function for mocking RestData.update().""" diff --git a/tests/components/rfxtrx/test_cover.py b/tests/components/rfxtrx/test_cover.py index d85ea5cf6f4..9ecbcda3409 100644 --- a/tests/components/rfxtrx/test_cover.py +++ b/tests/components/rfxtrx/test_cover.py @@ -18,8 +18,9 @@ class TestCoverRfxtrx(unittest.TestCase): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() mock_component("rfxtrx") + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): + def tear_down_cleanup(self): """Stop everything that was started.""" rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS = [] rfxtrx_core.RFX_DEVICES = {} diff --git a/tests/components/rfxtrx/test_init.py b/tests/components/rfxtrx/test_init.py index 6b75cc92fc6..669ec24efc3 100644 --- a/tests/components/rfxtrx/test_init.py +++ b/tests/components/rfxtrx/test_init.py @@ -18,8 +18,9 @@ class TestRFXTRX(unittest.TestCase): def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): + def tear_down_cleanup(self): """Stop everything that was started.""" rfxtrx.RECEIVED_EVT_SUBSCRIBERS = [] rfxtrx.RFX_DEVICES = {} diff --git a/tests/components/rfxtrx/test_light.py b/tests/components/rfxtrx/test_light.py index a5230cc5f3c..1a8ee94d9a4 100644 --- a/tests/components/rfxtrx/test_light.py +++ b/tests/components/rfxtrx/test_light.py @@ -18,8 +18,9 @@ class TestLightRfxtrx(unittest.TestCase): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() mock_component(self.hass, "rfxtrx") + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): + def tear_down_cleanup(self): """Stop everything that was started.""" rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS = [] rfxtrx_core.RFX_DEVICES = {} diff --git a/tests/components/rfxtrx/test_sensor.py b/tests/components/rfxtrx/test_sensor.py index e258ebb9aa1..fa713a024cf 100644 --- a/tests/components/rfxtrx/test_sensor.py +++ b/tests/components/rfxtrx/test_sensor.py @@ -18,8 +18,9 @@ class TestSensorRfxtrx(unittest.TestCase): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() mock_component(self.hass, "rfxtrx") + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): + def tear_down_cleanup(self): """Stop everything that was started.""" rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS = [] rfxtrx_core.RFX_DEVICES = {} diff --git a/tests/components/rfxtrx/test_switch.py b/tests/components/rfxtrx/test_switch.py index 66da197aae8..cc1a6008fa8 100644 --- a/tests/components/rfxtrx/test_switch.py +++ b/tests/components/rfxtrx/test_switch.py @@ -18,8 +18,9 @@ class TestSwitchRfxtrx(unittest.TestCase): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() mock_component(self.hass, "rfxtrx") + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): + def tear_down_cleanup(self): """Stop everything that was started.""" rfxtrx_core.RECEIVED_EVT_SUBSCRIBERS = [] rfxtrx_core.RFX_DEVICES = {} diff --git a/tests/components/ring/test_init.py b/tests/components/ring/test_init.py index 39d2c63ffdd..4ad056150dd 100644 --- a/tests/components/ring/test_init.py +++ b/tests/components/ring/test_init.py @@ -23,8 +23,9 @@ class TestRing(unittest.TestCase): """Initialize values for this test case class.""" self.hass = get_test_home_assistant() self.config = VALID_CONFIG + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() diff --git a/tests/components/roku/__init__.py b/tests/components/roku/__init__.py index e3c4c8ce27c..a73e1b7d5aa 100644 --- a/tests/components/roku/__init__.py +++ b/tests/components/roku/__init__.py @@ -33,6 +33,7 @@ def mock_connection( app: str = "roku", host: str = HOST, power: bool = True, + media_state: str = "close", error: bool = False, server_error: bool = False, ) -> None: @@ -89,6 +90,12 @@ def mock_connection( headers={"Content-Type": "text/xml"}, ) + aioclient_mock.get( + f"{roku_url}/query/media-player", + text=load_fixture(f"roku/media-player-{media_state}.xml"), + headers={"Content-Type": "text/xml"}, + ) + aioclient_mock.post( re.compile(f"{roku_url}/keypress/.*"), text="OK", ) @@ -97,6 +104,8 @@ def mock_connection( re.compile(f"{roku_url}/launch/.*"), text="OK", ) + aioclient_mock.post(f"{roku_url}/search", text="OK") + def mock_connection_error( aioclient_mock: AiohttpClientMocker, @@ -115,6 +124,7 @@ def mock_connection_error( aioclient_mock.post(re.compile(f"{roku_url}/keypress/.*"), exc=SocketGIAError) aioclient_mock.post(re.compile(f"{roku_url}/launch/.*"), exc=SocketGIAError) + aioclient_mock.post(f"{roku_url}/search", exc=SocketGIAError) def mock_connection_server_error( @@ -134,6 +144,7 @@ def mock_connection_server_error( aioclient_mock.post(re.compile(f"{roku_url}/keypress/.*"), status=500) aioclient_mock.post(re.compile(f"{roku_url}/launch/.*"), status=500) + aioclient_mock.post(f"{roku_url}/search", status=500) async def setup_integration( @@ -145,6 +156,7 @@ async def setup_integration( unique_id: str = UPNP_SERIAL, error: bool = False, power: bool = True, + media_state: str = "close", server_error: bool = False, skip_entry_setup: bool = False, ) -> MockConfigEntry: @@ -161,6 +173,7 @@ async def setup_integration( host=host, error=error, power=power, + media_state=media_state, server_error=server_error, ) await hass.config_entries.async_setup(entry.entry_id) diff --git a/tests/components/roku/test_media_player.py b/tests/components/roku/test_media_player.py index 9ac758585e9..a05cde5a596 100644 --- a/tests/components/roku/test_media_player.py +++ b/tests/components/roku/test_media_player.py @@ -28,6 +28,7 @@ from homeassistant.components.media_player.const import ( SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_STEP, ) +from homeassistant.components.roku.const import ATTR_KEYWORD, DOMAIN, SERVICE_SEARCH from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_MEDIA_NEXT_TRACK, @@ -42,6 +43,7 @@ from homeassistant.const import ( SERVICE_VOLUME_UP, STATE_HOME, STATE_IDLE, + STATE_PAUSED, STATE_PLAYING, STATE_STANDBY, STATE_UNAVAILABLE, @@ -213,6 +215,21 @@ async def test_attributes_app( assert state.attributes.get(ATTR_INPUT_SOURCE) == "Netflix" +async def test_attributes_app_media_paused( + hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker +) -> None: + """Test attributes for app with paused media.""" + await setup_integration(hass, aioclient_mock, app="pluto", media_state="pause") + + state = hass.states.get(MAIN_ENTITY_ID) + assert state.state == STATE_PAUSED + + assert state.attributes.get(ATTR_MEDIA_CONTENT_TYPE) == MEDIA_TYPE_APP + assert state.attributes.get(ATTR_APP_ID) == "74519" + assert state.attributes.get(ATTR_APP_NAME) == "Pluto TV - It's Free TV" + assert state.attributes.get(ATTR_INPUT_SOURCE) == "Pluto TV - It's Free TV" + + async def test_attributes_screensaver( hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker ) -> None: @@ -406,3 +423,19 @@ async def test_tv_services( ) tune_mock.assert_called_once_with("55") + + +async def test_integration_services( + hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker +) -> None: + """Test integration services.""" + await setup_integration(hass, aioclient_mock) + + with patch("homeassistant.components.roku.Roku.search") as search_mock: + await hass.services.async_call( + DOMAIN, + SERVICE_SEARCH, + {ATTR_ENTITY_ID: MAIN_ENTITY_ID, ATTR_KEYWORD: "Space Jam"}, + blocking=True, + ) + search_mock.assert_called_once_with("Space Jam") diff --git a/tests/components/roku/test_remote.py b/tests/components/roku/test_remote.py index 6b50d4362c1..96426e5b10a 100644 --- a/tests/components/roku/test_remote.py +++ b/tests/components/roku/test_remote.py @@ -39,7 +39,7 @@ async def test_unique_id( async def test_main_services( hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker ) -> None: - """Test the different services.""" + """Test platform services.""" await setup_integration(hass, aioclient_mock) with patch("homeassistant.components.roku.Roku.remote") as remote_mock: diff --git a/tests/components/scene/test_init.py b/tests/components/scene/test_init.py index bff88d8e660..7f8308d3e20 100644 --- a/tests/components/scene/test_init.py +++ b/tests/components/scene/test_init.py @@ -35,8 +35,9 @@ class TestScene(unittest.TestCase): assert not self.light_1.is_on assert not self.light_2.is_on + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() diff --git a/tests/components/script/test_init.py b/tests/components/script/test_init.py index 8dbe43a25ff..bb7340a08da 100644 --- a/tests/components/script/test_init.py +++ b/tests/components/script/test_init.py @@ -4,12 +4,11 @@ import unittest import pytest -from homeassistant.components import script -from homeassistant.components.script import DOMAIN +from homeassistant.components import logbook, script +from homeassistant.components.script import DOMAIN, EVENT_SCRIPT_STARTED from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_NAME, - EVENT_SCRIPT_STARTED, SERVICE_RELOAD, SERVICE_TOGGLE, SERVICE_TURN_OFF, @@ -23,6 +22,7 @@ from homeassistant.setup import async_setup_component, setup_component from tests.async_mock import Mock, patch from tests.common import get_test_home_assistant +from tests.components.logbook.test_init import MockLazyEventPartialState ENTITY_ID = "script.test" @@ -73,8 +73,9 @@ class TestScriptComponent(unittest.TestCase): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() - # pylint: disable=invalid-name - def tearDown(self): + self.addCleanup(self.tear_down_cleanup) + + def tear_down_cleanup(self): """Stop down everything that was started.""" self.hass.stop() @@ -467,3 +468,38 @@ async def test_config(hass): test_script = hass.states.get("script.test_script") assert test_script.name == "Script Name" assert test_script.attributes["icon"] == "mdi:party" + + +async def test_logbook_humanify_script_started_event(hass): + """Test humanifying script started event.""" + hass.config.components.add("recorder") + await async_setup_component(hass, DOMAIN, {}) + await async_setup_component(hass, "logbook", {}) + entity_attr_cache = logbook.EntityAttributeCache(hass) + + event1, event2 = list( + logbook.humanify( + hass, + [ + MockLazyEventPartialState( + EVENT_SCRIPT_STARTED, + {ATTR_ENTITY_ID: "script.hello", ATTR_NAME: "Hello Script"}, + ), + MockLazyEventPartialState( + EVENT_SCRIPT_STARTED, + {ATTR_ENTITY_ID: "script.bye", ATTR_NAME: "Bye Script"}, + ), + ], + entity_attr_cache, + ) + ) + + assert event1["name"] == "Hello Script" + assert event1["domain"] == "script" + assert event1["message"] == "started" + assert event1["entity_id"] == "script.hello" + + assert event2["name"] == "Bye Script" + assert event2["domain"] == "script" + assert event2["message"] == "started" + assert event2["entity_id"] == "script.bye" diff --git a/tests/components/season/test_sensor.py b/tests/components/season/test_sensor.py index 279291d6da5..8918099405a 100644 --- a/tests/components/season/test_sensor.py +++ b/tests/components/season/test_sensor.py @@ -45,10 +45,7 @@ class TestSeason(unittest.TestCase): def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def test_season_should_be_summer_northern_astronomical(self): """Test that season should be summer.""" diff --git a/tests/components/shell_command/test_init.py b/tests/components/shell_command/test_init.py index 8743ab27bd7..76f81ea72df 100644 --- a/tests/components/shell_command/test_init.py +++ b/tests/components/shell_command/test_init.py @@ -40,8 +40,9 @@ class TestShellCommand(unittest.TestCase): """ self.hass = get_test_home_assistant() asyncio.get_child_watcher().attach_loop(self.hass.loop) + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() diff --git a/tests/components/sigfox/test_sensor.py b/tests/components/sigfox/test_sensor.py index c4af07b5799..923392bbaf8 100644 --- a/tests/components/sigfox/test_sensor.py +++ b/tests/components/sigfox/test_sensor.py @@ -39,10 +39,7 @@ class TestSigfoxSensor(unittest.TestCase): def setUp(self): """Initialize values for this testcase class.""" self.hass = get_test_home_assistant() - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def test_invalid_credentials(self): """Test for invalid credentials.""" diff --git a/tests/components/sleepiq/test_binary_sensor.py b/tests/components/sleepiq/test_binary_sensor.py index fbafe8aad7d..ab51a08d4c6 100644 --- a/tests/components/sleepiq/test_binary_sensor.py +++ b/tests/components/sleepiq/test_binary_sensor.py @@ -28,8 +28,9 @@ class TestSleepIQBinarySensorSetup(unittest.TestCase): self.password = "bar" self.config = {"username": self.username, "password": self.password} self.DEVICES = [] + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() diff --git a/tests/components/sleepiq/test_init.py b/tests/components/sleepiq/test_init.py index 9c1c0972fac..850d42763d1 100644 --- a/tests/components/sleepiq/test_init.py +++ b/tests/components/sleepiq/test_init.py @@ -37,8 +37,9 @@ class TestSleepIQ(unittest.TestCase): self.config = { "sleepiq": {"username": self.username, "password": self.password} } + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() diff --git a/tests/components/sleepiq/test_sensor.py b/tests/components/sleepiq/test_sensor.py index d94cd7e4063..3e5cbc657e1 100644 --- a/tests/components/sleepiq/test_sensor.py +++ b/tests/components/sleepiq/test_sensor.py @@ -28,8 +28,9 @@ class TestSleepIQSensorSetup(unittest.TestCase): self.password = "bar" self.config = {"username": self.username, "password": self.password} self.DEVICES = [] + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() diff --git a/tests/components/smappee/__init__.py b/tests/components/smappee/__init__.py new file mode 100644 index 00000000000..8e04b99fa37 --- /dev/null +++ b/tests/components/smappee/__init__.py @@ -0,0 +1 @@ +"""Tests for the Smappee integration.""" diff --git a/tests/components/smappee/test_config_flow.py b/tests/components/smappee/test_config_flow.py new file mode 100644 index 00000000000..e37733a1385 --- /dev/null +++ b/tests/components/smappee/test_config_flow.py @@ -0,0 +1,68 @@ +"""Test the Smappee config flow.""" +from homeassistant import config_entries, data_entry_flow, setup +from homeassistant.components.smappee.const import AUTHORIZE_URL, DOMAIN, TOKEN_URL +from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET +from homeassistant.helpers import config_entry_oauth2_flow + +from tests.async_mock import patch +from tests.common import MockConfigEntry + +CLIENT_ID = "1234" +CLIENT_SECRET = "5678" + + +async def test_abort_if_existing_entry(hass): + """Check flow abort when an entry already exist.""" + MockConfigEntry(domain=DOMAIN).add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT + assert result["reason"] == "single_instance_allowed" + + +async def test_full_flow(hass, aiohttp_client, aioclient_mock): + """Check full flow.""" + assert await setup.async_setup_component( + hass, + DOMAIN, + { + DOMAIN: {CONF_CLIENT_ID: CLIENT_ID, CONF_CLIENT_SECRET: CLIENT_SECRET}, + "http": {"base_url": "https://example.com"}, + }, + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + state = config_entry_oauth2_flow._encode_jwt(hass, {"flow_id": result["flow_id"]}) + + assert result["url"] == ( + f"{AUTHORIZE_URL}?response_type=code&client_id={CLIENT_ID}" + "&redirect_uri=https://example.com/auth/external/callback" + f"&state={state}" + ) + + client = await aiohttp_client(hass.http.app) + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.post( + TOKEN_URL, + json={ + "refresh_token": "mock-refresh-token", + "access_token": "mock-access-token", + "type": "Bearer", + "expires_in": 60, + }, + ) + + with patch( + "homeassistant.components.smappee.async_setup_entry", return_value=True + ) as mock_setup: + await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert len(mock_setup.mock_calls) == 1 diff --git a/tests/components/smtp/test_notify.py b/tests/components/smtp/test_notify.py index f74d47a21c1..d99ee82d4ef 100644 --- a/tests/components/smtp/test_notify.py +++ b/tests/components/smtp/test_notify.py @@ -34,8 +34,9 @@ class TestNotifySmtp(unittest.TestCase): "Home Assistant", 0, ) + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop down everything that was started.""" self.hass.stop() diff --git a/tests/components/snips/test_init.py b/tests/components/snips/test_init.py index 40fb30ddd19..dc69c10a7fb 100644 --- a/tests/components/snips/test_init.py +++ b/tests/components/snips/test_init.py @@ -10,18 +10,11 @@ from homeassistant.components.mqtt import MQTT_PUBLISH_SCHEMA import homeassistant.components.snips as snips from homeassistant.helpers.intent import ServiceIntentHandler, async_register -from tests.common import ( - async_fire_mqtt_message, - async_mock_intent, - async_mock_mqtt_component, - async_mock_service, -) +from tests.common import async_fire_mqtt_message, async_mock_intent, async_mock_service -async def test_snips_config(hass): +async def test_snips_config(hass, mqtt_mock): """Test Snips Config.""" - await async_mock_mqtt_component(hass) - result = await async_setup_component( hass, "snips", @@ -36,10 +29,8 @@ async def test_snips_config(hass): assert result -async def test_snips_bad_config(hass): +async def test_snips_bad_config(hass, mqtt_mock): """Test Snips bad config.""" - await async_mock_mqtt_component(hass) - result = await async_setup_component( hass, "snips", @@ -54,10 +45,8 @@ async def test_snips_bad_config(hass): assert not result -async def test_snips_config_feedback_on(hass): +async def test_snips_config_feedback_on(hass, mqtt_mock): """Test Snips Config.""" - await async_mock_mqtt_component(hass) - calls = async_mock_service(hass, "mqtt", "publish", MQTT_PUBLISH_SCHEMA) result = await async_setup_component( hass, "snips", {"snips": {"feedback_sounds": True}} @@ -74,10 +63,8 @@ async def test_snips_config_feedback_on(hass): assert calls[1].data["retain"] -async def test_snips_config_feedback_off(hass): +async def test_snips_config_feedback_off(hass, mqtt_mock): """Test Snips Config.""" - await async_mock_mqtt_component(hass) - calls = async_mock_service(hass, "mqtt", "publish", MQTT_PUBLISH_SCHEMA) result = await async_setup_component( hass, "snips", {"snips": {"feedback_sounds": False}} @@ -94,10 +81,8 @@ async def test_snips_config_feedback_off(hass): assert not calls[1].data["retain"] -async def test_snips_config_no_feedback(hass): +async def test_snips_config_no_feedback(hass, mqtt_mock): """Test Snips Config.""" - await async_mock_mqtt_component(hass) - calls = async_mock_service(hass, "snips", "say") result = await async_setup_component(hass, "snips", {"snips": {}}) assert result @@ -105,10 +90,8 @@ async def test_snips_config_no_feedback(hass): assert len(calls) == 0 -async def test_snips_intent(hass): +async def test_snips_intent(hass, mqtt_mock): """Test intent via Snips.""" - await async_mock_mqtt_component(hass) - result = await async_setup_component(hass, "snips", {"snips": {}}) assert result payload = """ @@ -152,10 +135,8 @@ async def test_snips_intent(hass): assert intent.text_input == "turn the lights green" -async def test_snips_service_intent(hass): +async def test_snips_service_intent(hass, mqtt_mock): """Test ServiceIntentHandler via Snips.""" - await async_mock_mqtt_component(hass) - hass.states.async_set("light.kitchen", "off") calls = async_mock_service(hass, "light", "turn_on") result = await async_setup_component(hass, "snips", {"snips": {}}) @@ -196,10 +177,8 @@ async def test_snips_service_intent(hass): assert "site_id" not in calls[0].data -async def test_snips_intent_with_duration(hass): +async def test_snips_intent_with_duration(hass, mqtt_mock): """Test intent with Snips duration.""" - await async_mock_mqtt_component(hass) - result = await async_setup_component(hass, "snips", {"snips": {}}) assert result payload = """ @@ -251,10 +230,8 @@ async def test_snips_intent_with_duration(hass): } -async def test_intent_speech_response(hass): +async def test_intent_speech_response(hass, mqtt_mock): """Test intent speech response via Snips.""" - await async_mock_mqtt_component(hass) - calls = async_mock_service(hass, "mqtt", "publish", MQTT_PUBLISH_SCHEMA) result = await async_setup_component(hass, "snips", {"snips": {}}) assert result @@ -292,10 +269,8 @@ async def test_intent_speech_response(hass): assert topic == "hermes/dialogueManager/endSession" -async def test_unknown_intent(hass, caplog): +async def test_unknown_intent(hass, caplog, mqtt_mock): """Test unknown intent.""" - await async_mock_mqtt_component(hass) - caplog.set_level(logging.WARNING) result = await async_setup_component(hass, "snips", {"snips": {}}) assert result @@ -315,10 +290,8 @@ async def test_unknown_intent(hass, caplog): assert "Received unknown intent unknownIntent" in caplog.text -async def test_snips_intent_user(hass): +async def test_snips_intent_user(hass, mqtt_mock): """Test intentName format user_XXX__intentName.""" - await async_mock_mqtt_component(hass) - result = await async_setup_component(hass, "snips", {"snips": {}}) assert result payload = """ @@ -341,10 +314,8 @@ async def test_snips_intent_user(hass): assert intent.intent_type == "Lights" -async def test_snips_intent_username(hass): +async def test_snips_intent_username(hass, mqtt_mock): """Test intentName format username:intentName.""" - await async_mock_mqtt_component(hass) - result = await async_setup_component(hass, "snips", {"snips": {}}) assert result payload = """ @@ -367,10 +338,8 @@ async def test_snips_intent_username(hass): assert intent.intent_type == "Lights" -async def test_snips_low_probability(hass, caplog): +async def test_snips_low_probability(hass, caplog, mqtt_mock): """Test intent via Snips.""" - await async_mock_mqtt_component(hass) - caplog.set_level(logging.WARNING) result = await async_setup_component( hass, "snips", {"snips": {"probability_threshold": 0.5}} @@ -393,10 +362,8 @@ async def test_snips_low_probability(hass, caplog): assert "Intent below probaility threshold 0.49 < 0.5" in caplog.text -async def test_intent_special_slots(hass): +async def test_intent_special_slots(hass, mqtt_mock): """Test intent special slot values via Snips.""" - await async_mock_mqtt_component(hass) - calls = async_mock_service(hass, "light", "turn_on") result = await async_setup_component(hass, "snips", {"snips": {}}) assert result diff --git a/tests/components/speedtestdotnet/__init__.py b/tests/components/speedtestdotnet/__init__.py new file mode 100644 index 00000000000..f67a633e25f --- /dev/null +++ b/tests/components/speedtestdotnet/__init__.py @@ -0,0 +1,55 @@ +"""Tests for SpeedTest.""" + +MOCK_SERVERS = { + 1: [ + { + "url": "http://server_1:8080/speedtest/upload.php", + "lat": "1", + "lon": "1", + "name": "Server1", + "country": "Country1", + "cc": "LL1", + "sponsor": "Server1", + "id": "1", + "host": "server1:8080", + "d": 1, + } + ], + 2: [ + { + "url": "http://server_2:8080/speedtest/upload.php", + "lat": "2", + "lon": "2", + "name": "Server2", + "country": "Country2", + "cc": "LL2", + "sponsor": "server2", + "id": "2", + "host": "server2:8080", + "d": 2, + } + ], +} + +MOCK_RESULTS = { + "download": 1024000, + "upload": 1024000, + "ping": 18.465, + "server": { + "url": "http://test_server:8080/speedtest/upload.php", + "lat": "00.0000", + "lon": "11.1111", + "name": "NAME", + "country": "Country", + "id": "8408", + "host": "test_server:8080", + "d": 1.4858909757493415, + "latency": 18.465, + }, + "timestamp": "2020-05-29T07:28:57.908387Z", + "bytes_sent": 4194304, + "bytes_received": 19712300, + "share": None, +} + +MOCK_STATES = {"ping": "18.465", "download": "1.02", "upload": "1.02"} diff --git a/tests/components/speedtestdotnet/test_config_flow.py b/tests/components/speedtestdotnet/test_config_flow.py new file mode 100644 index 00000000000..943da319aef --- /dev/null +++ b/tests/components/speedtestdotnet/test_config_flow.py @@ -0,0 +1,128 @@ +"""Tests for SpeedTest config flow.""" +from datetime import timedelta + +import pytest +from speedtest import NoMatchedServers + +from homeassistant import data_entry_flow +from homeassistant.components import speedtestdotnet +from homeassistant.components.speedtestdotnet.const import ( + CONF_MANUAL, + CONF_SERVER_ID, + CONF_SERVER_NAME, + DOMAIN, + SENSOR_TYPES, +) +from homeassistant.const import CONF_MONITORED_CONDITIONS, CONF_SCAN_INTERVAL + +from . import MOCK_SERVERS + +from tests.async_mock import patch +from tests.common import MockConfigEntry + + +@pytest.fixture(name="mock_setup") +def mock_setup(): + """Mock entry setup.""" + with patch( + "homeassistant.components.speedtestdotnet.async_setup_entry", return_value=True, + ): + yield + + +async def test_flow_works(hass, mock_setup): + """Test user config.""" + result = await hass.config_entries.flow.async_init( + speedtestdotnet.DOMAIN, context={"source": "user"} + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY + assert result["title"] == "SpeedTest" + + +async def test_import_fails(hass, mock_setup): + """Test import step fails if server_id is not valid.""" + + with patch("speedtest.Speedtest") as mock_api: + mock_api.return_value.get_servers.side_effect = NoMatchedServers + result = await hass.config_entries.flow.async_init( + speedtestdotnet.DOMAIN, + context={"source": "import"}, + data={ + CONF_SERVER_ID: "223", + CONF_MANUAL: True, + CONF_SCAN_INTERVAL: timedelta(minutes=1), + CONF_MONITORED_CONDITIONS: list(SENSOR_TYPES), + }, + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT + assert result["reason"] == "wrong_server_id" + + +async def test_import_success(hass, mock_setup): + """Test import step is successful if server_id is valid.""" + + with patch("speedtest.Speedtest"): + result = await hass.config_entries.flow.async_init( + speedtestdotnet.DOMAIN, + context={"source": "import"}, + data={ + CONF_SERVER_ID: "1", + CONF_MANUAL: True, + CONF_SCAN_INTERVAL: timedelta(minutes=1), + CONF_MONITORED_CONDITIONS: list(SENSOR_TYPES), + }, + ) + + assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY + assert result["title"] == "SpeedTest" + assert result["data"][CONF_SERVER_ID] == "1" + assert result["data"][CONF_MANUAL] is True + assert result["data"][CONF_SCAN_INTERVAL] == 1 + + +async def test_options(hass): + """Test updating options.""" + entry = MockConfigEntry(domain=DOMAIN, title="SpeedTest", data={}, options={},) + entry.add_to_hass(hass) + + with patch("speedtest.Speedtest") as mock_api: + mock_api.return_value.get_servers.return_value = MOCK_SERVERS + await hass.config_entries.async_setup(entry.entry_id) + + result = await hass.config_entries.options.async_init(entry.entry_id) + assert result["type"] == data_entry_flow.RESULT_TYPE_FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_SERVER_NAME: "Country1 - Server1", + CONF_SCAN_INTERVAL: 30, + CONF_MANUAL: False, + }, + ) + + assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY + assert result["data"] == { + CONF_SERVER_NAME: "Country1 - Server1", + CONF_SERVER_ID: "1", + CONF_SCAN_INTERVAL: 30, + CONF_MANUAL: False, + } + + +async def test_integration_already_configured(hass): + """Test integration is already configured.""" + entry = MockConfigEntry(domain=DOMAIN, data={}, options={},) + entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + speedtestdotnet.DOMAIN, context={"source": "user"} + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT + assert result["reason"] == "one_instance_allowed" diff --git a/tests/components/speedtestdotnet/test_init.py b/tests/components/speedtestdotnet/test_init.py new file mode 100644 index 00000000000..7b7eed67c0c --- /dev/null +++ b/tests/components/speedtestdotnet/test_init.py @@ -0,0 +1,66 @@ +"""Tests for SpeedTest integration.""" +import speedtest + +from homeassistant import config_entries +from homeassistant.components import speedtestdotnet +from homeassistant.setup import async_setup_component + +from tests.async_mock import patch +from tests.common import MockConfigEntry + + +async def test_setup_with_config(hass): + """Test that we import the config and setup the integration.""" + config = { + speedtestdotnet.DOMAIN: { + speedtestdotnet.CONF_SERVER_ID: "1", + speedtestdotnet.CONF_MANUAL: True, + speedtestdotnet.CONF_SCAN_INTERVAL: "00:01:00", + } + } + with patch("speedtest.Speedtest"): + assert await async_setup_component(hass, speedtestdotnet.DOMAIN, config) + + +async def test_successful_config_entry(hass): + """Test that SpeedTestDotNet is configured successfully.""" + + entry = MockConfigEntry(domain=speedtestdotnet.DOMAIN, data={},) + entry.add_to_hass(hass) + + with patch("speedtest.Speedtest"), patch( + "homeassistant.config_entries.ConfigEntries.async_forward_entry_setup", + return_value=True, + ) as forward_entry_setup: + await hass.config_entries.async_setup(entry.entry_id) + + assert entry.state == config_entries.ENTRY_STATE_LOADED + assert forward_entry_setup.mock_calls[0][1] == (entry, "sensor",) + + +async def test_setup_failed(hass): + """Test SpeedTestDotNet failed due to an error.""" + + entry = MockConfigEntry(domain=speedtestdotnet.DOMAIN, data={},) + entry.add_to_hass(hass) + + with patch("speedtest.Speedtest", side_effect=speedtest.ConfigRetrievalError): + + await hass.config_entries.async_setup(entry.entry_id) + + assert entry.state == config_entries.ENTRY_STATE_SETUP_RETRY + + +async def test_unload_entry(hass): + """Test removing SpeedTestDotNet.""" + entry = MockConfigEntry(domain=speedtestdotnet.DOMAIN, data={},) + entry.add_to_hass(hass) + + with patch("speedtest.Speedtest"): + await hass.config_entries.async_setup(entry.entry_id) + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state == config_entries.ENTRY_STATE_NOT_LOADED + assert speedtestdotnet.DOMAIN not in hass.data diff --git a/tests/components/speedtestdotnet/test_sensor.py b/tests/components/speedtestdotnet/test_sensor.py new file mode 100644 index 00000000000..5c1606f0f4b --- /dev/null +++ b/tests/components/speedtestdotnet/test_sensor.py @@ -0,0 +1,30 @@ +"""Tests for SpeedTest sensors.""" +from homeassistant.components import speedtestdotnet +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.components.speedtestdotnet.const import DEFAULT_NAME, SENSOR_TYPES + +from . import MOCK_RESULTS, MOCK_SERVERS, MOCK_STATES + +from tests.async_mock import patch +from tests.common import MockConfigEntry + + +async def test_speedtestdotnet_sensors(hass): + """Test sensors created for speedtestdotnet integration.""" + entry = MockConfigEntry(domain=speedtestdotnet.DOMAIN, data={}) + entry.add_to_hass(hass) + + with patch("speedtest.Speedtest") as mock_api: + mock_api.return_value.get_best_server.return_value = MOCK_SERVERS[1][0] + mock_api.return_value.results.dict.return_value = MOCK_RESULTS + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 3 + + for sensor_type in SENSOR_TYPES: + sensor = hass.states.get( + f"sensor.{DEFAULT_NAME}_{SENSOR_TYPES[sensor_type][0]}" + ) + assert sensor.state == MOCK_STATES[sensor_type] diff --git a/tests/components/splunk/test_init.py b/tests/components/splunk/test_init.py index 256c78af502..86de865bc0d 100644 --- a/tests/components/splunk/test_init.py +++ b/tests/components/splunk/test_init.py @@ -19,8 +19,9 @@ class TestSplunk(unittest.TestCase): def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() @@ -57,6 +58,7 @@ class TestSplunk(unittest.TestCase): def _setup(self, mock_requests): """Test the setup.""" + # pylint: disable=attribute-defined-outside-init self.mock_post = mock_requests.post self.mock_request_exception = Exception mock_requests.exceptions.RequestException = self.mock_request_exception @@ -114,7 +116,7 @@ class TestSplunk(unittest.TestCase): ) self.mock_post.reset_mock() - def _setup_with_filter(self): + def _setup_with_filter(self, addl_filters=None): """Test the setup.""" config = { "splunk": { @@ -127,12 +129,15 @@ class TestSplunk(unittest.TestCase): }, } } + if addl_filters: + config["splunk"]["filter"].update(addl_filters) setup_component(self.hass, splunk.DOMAIN, config) @mock.patch.object(splunk, "post_request") def test_splunk_entityfilter(self, mock_requests): """Test event listener.""" + # pylint: disable=no-member self._setup_with_filter() testdata = [ @@ -151,3 +156,27 @@ class TestSplunk(unittest.TestCase): assert splunk.post_request.called splunk.post_request.reset_mock() + + @mock.patch.object(splunk, "post_request") + def test_splunk_entityfilter_with_glob_filter(self, mock_requests): + """Test event listener.""" + # pylint: disable=no-member + self._setup_with_filter({"exclude_entity_globs": ["*.skip_*"]}) + + testdata = [ + {"entity_id": "other_domain.other_entity", "filter_expected": False}, + {"entity_id": "other_domain.excluded_entity", "filter_expected": True}, + {"entity_id": "excluded_domain.other_entity", "filter_expected": True}, + {"entity_id": "test.skip_me", "filter_expected": True}, + ] + + for test in testdata: + mock_state_change_event(self.hass, State(test["entity_id"], "on")) + self.hass.block_till_done() + + if test["filter_expected"]: + assert not splunk.post_request.called + else: + assert splunk.post_request.called + + splunk.post_request.reset_mock() diff --git a/tests/components/squeezebox/__init__.py b/tests/components/squeezebox/__init__.py new file mode 100644 index 00000000000..34c0363292d --- /dev/null +++ b/tests/components/squeezebox/__init__.py @@ -0,0 +1 @@ +"""Tests for the Logitech Squeezebox integration.""" diff --git a/tests/components/squeezebox/test_config_flow.py b/tests/components/squeezebox/test_config_flow.py new file mode 100644 index 00000000000..ec5a649fdc3 --- /dev/null +++ b/tests/components/squeezebox/test_config_flow.py @@ -0,0 +1,263 @@ +"""Test the Logitech Squeezebox config flow.""" +from asynctest import patch +from pysqueezebox import Server + +from homeassistant import config_entries +from homeassistant.components.squeezebox.const import DOMAIN +from homeassistant.const import ( + CONF_HOST, + CONF_PASSWORD, + CONF_PORT, + CONF_USERNAME, + HTTP_UNAUTHORIZED, +) +from homeassistant.data_entry_flow import ( + RESULT_TYPE_ABORT, + RESULT_TYPE_CREATE_ENTRY, + RESULT_TYPE_FORM, +) + +from tests.common import MockConfigEntry + +HOST = "1.1.1.1" +HOST2 = "2.2.2.2" +PORT = 9000 +UUID = "test-uuid" +UNKNOWN_ERROR = "1234" + + +async def mock_discover(_discovery_callback): + """Mock discovering a Logitech Media Server.""" + _discovery_callback(Server(None, HOST, PORT, uuid=UUID)) + + +async def mock_failed_discover(_discovery_callback): + """Mock unsuccessful discovery by doing nothing.""" + + +async def patch_async_query_unauthorized(self, *args): + """Mock an unauthorized query.""" + self.http_status = HTTP_UNAUTHORIZED + return False + + +async def test_user_form(hass): + """Test user-initiated flow, including discovery and the edit step.""" + with patch("pysqueezebox.Server.async_query", return_value={"uuid": UUID},), patch( + "homeassistant.components.squeezebox.async_setup", return_value=True + ) as mock_setup, patch( + "homeassistant.components.squeezebox.async_setup_entry", return_value=True, + ) as mock_setup_entry, patch( + "homeassistant.components.squeezebox.config_flow.async_discover", mock_discover + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] == RESULT_TYPE_FORM + assert result["step_id"] == "edit" + assert CONF_HOST in result["data_schema"].schema + for key in result["data_schema"].schema: + if key == CONF_HOST: + assert key.description == {"suggested_value": HOST} + + # test the edit step + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: HOST, CONF_PORT: PORT, CONF_USERNAME: "", CONF_PASSWORD: ""}, + ) + assert result["type"] == RESULT_TYPE_CREATE_ENTRY + assert result["title"] == HOST + assert result["data"] == { + CONF_HOST: HOST, + CONF_PORT: PORT, + CONF_USERNAME: "", + CONF_PASSWORD: "", + } + + await hass.async_block_till_done() + assert len(mock_setup.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_user_form_timeout(hass): + """Test we handle server search timeout.""" + with patch( + "homeassistant.components.squeezebox.config_flow.async_discover", + mock_failed_discover, + ), patch("homeassistant.components.squeezebox.config_flow.TIMEOUT", 0.1): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] == RESULT_TYPE_FORM + assert result["errors"] == {"base": "no_server_found"} + + # simulate manual input of host + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: HOST2} + ) + assert result2["type"] == RESULT_TYPE_FORM + assert result2["step_id"] == "edit" + assert CONF_HOST in result2["data_schema"].schema + for key in result2["data_schema"].schema: + if key == CONF_HOST: + assert key.description == {"suggested_value": HOST2} + + +async def test_user_form_duplicate(hass): + """Test duplicate discovered servers are skipped.""" + with patch( + "homeassistant.components.squeezebox.config_flow.async_discover", mock_discover, + ), patch("homeassistant.components.squeezebox.config_flow.TIMEOUT", 0.1), patch( + "homeassistant.components.squeezebox.async_setup", return_value=True + ), patch( + "homeassistant.components.squeezebox.async_setup_entry", return_value=True, + ): + entry = MockConfigEntry(domain=DOMAIN, unique_id=UUID) + await hass.config_entries.async_add(entry) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] == RESULT_TYPE_FORM + assert result["errors"] == {"base": "no_server_found"} + + +async def test_form_invalid_auth(hass): + """Test we handle invalid auth.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "edit"} + ) + + async def patch_async_query(self, *args): + self.http_status = HTTP_UNAUTHORIZED + return False + + with patch("pysqueezebox.Server.async_query", new=patch_async_query): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PORT: PORT, + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + + assert result["type"] == RESULT_TYPE_FORM + assert result["errors"] == {"base": "invalid_auth"} + + +async def test_form_cannot_connect(hass): + """Test we handle cannot connect error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "edit"} + ) + + with patch( + "pysqueezebox.Server.async_query", return_value=False, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PORT: PORT, + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + + assert result["type"] == RESULT_TYPE_FORM + assert result["errors"] == {"base": "cannot_connect"} + + +async def test_discovery(hass): + """Test handling of discovered server.""" + with patch( + "pysqueezebox.Server.async_query", return_value={"uuid": UUID}, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_DISCOVERY}, + data={CONF_HOST: HOST, CONF_PORT: PORT, "uuid": UUID}, + ) + assert result["type"] == RESULT_TYPE_FORM + assert result["step_id"] == "edit" + + +async def test_discovery_no_uuid(hass): + """Test handling of discovered server with unavailable uuid.""" + with patch("pysqueezebox.Server.async_query", new=patch_async_query_unauthorized): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_DISCOVERY}, + data={CONF_HOST: HOST, CONF_PORT: PORT}, + ) + assert result["type"] == RESULT_TYPE_FORM + assert result["step_id"] == "edit" + + +async def test_import(hass): + """Test handling of configuration imported.""" + with patch("pysqueezebox.Server.async_query", return_value={"uuid": UUID},), patch( + "homeassistant.components.squeezebox.async_setup", return_value=True + ) as mock_setup, patch( + "homeassistant.components.squeezebox.async_setup_entry", return_value=True, + ) as mock_setup_entry: + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data={CONF_HOST: HOST, CONF_PORT: PORT}, + ) + assert result["type"] == RESULT_TYPE_CREATE_ENTRY + + await hass.async_block_till_done() + assert len(mock_setup.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_import_bad_host(hass): + """Test handling of configuration imported with bad host.""" + with patch("pysqueezebox.Server.async_query", return_value=False): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data={CONF_HOST: HOST, CONF_PORT: PORT}, + ) + assert result["type"] == RESULT_TYPE_ABORT + assert result["reason"] == "cannot_connect" + + +async def test_import_bad_auth(hass): + """Test handling of configuration import with bad authentication.""" + with patch("pysqueezebox.Server.async_query", new=patch_async_query_unauthorized): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data={ + CONF_HOST: HOST, + CONF_PORT: PORT, + CONF_USERNAME: "test", + CONF_PASSWORD: "bad", + }, + ) + assert result["type"] == RESULT_TYPE_ABORT + assert result["reason"] == "invalid_auth" + + +async def test_import_existing(hass): + """Test handling of configuration import of existing server.""" + with patch( + "homeassistant.components.squeezebox.async_setup", return_value=True + ), patch( + "homeassistant.components.squeezebox.async_setup_entry", return_value=True, + ), patch( + "pysqueezebox.Server.async_query", return_value={"ip": HOST, "uuid": UUID}, + ): + entry = MockConfigEntry(domain=DOMAIN, unique_id=UUID) + await hass.config_entries.async_add(entry) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data={CONF_HOST: HOST, CONF_PORT: PORT}, + ) + assert result["type"] == RESULT_TYPE_ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/statistics/test_sensor.py b/tests/components/statistics/test_sensor.py index ffbf4d9fcd8..2234df8e5fe 100644 --- a/tests/components/statistics/test_sensor.py +++ b/tests/components/statistics/test_sensor.py @@ -3,8 +3,6 @@ from datetime import datetime, timedelta import statistics import unittest -import pytest - from homeassistant.components import recorder from homeassistant.components.statistics.sensor import StatisticsSensor from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS @@ -17,6 +15,7 @@ from tests.common import ( get_test_home_assistant, init_recorder_component, ) +from tests.components.recorder.common import wait_recording_done class TestStatisticsSensor(unittest.TestCase): @@ -321,11 +320,12 @@ class TestStatisticsSensor(unittest.TestCase): ) == state.attributes.get("max_age") assert self.change_rate == state.attributes.get("change_rate") - @pytest.mark.skip("Flaky in CI") def test_initialize_from_database(self): """Test initializing the statistics from the database.""" # enable the recorder init_recorder_component(self.hass) + self.hass.block_till_done() + self.hass.data[recorder.DATA_INSTANCE].block_till_done() # store some values for value in self.values: self.hass.states.set( @@ -333,7 +333,7 @@ class TestStatisticsSensor(unittest.TestCase): ) self.hass.block_till_done() # wait for the recorder to really store the data - self.hass.data[recorder.DATA_INSTANCE].block_till_done() + wait_recording_done(self.hass) # only now create the statistics component, so that it must read the # data from the database assert setup_component( @@ -357,7 +357,6 @@ class TestStatisticsSensor(unittest.TestCase): state = self.hass.states.get("sensor.test") assert str(self.mean) == state.state - @pytest.mark.skip("Flaky in CI") def test_initialize_from_database_with_maxage(self): """Test initializing the statistics from the database.""" mock_data = { @@ -381,6 +380,8 @@ class TestStatisticsSensor(unittest.TestCase): # enable the recorder init_recorder_component(self.hass) + self.hass.block_till_done() + self.hass.data[recorder.DATA_INSTANCE].block_till_done() with patch( "homeassistant.components.statistics.sensor.dt_util.utcnow", new=mock_now @@ -397,7 +398,7 @@ class TestStatisticsSensor(unittest.TestCase): mock_data["return_time"] += timedelta(hours=1) # wait for the recorder to really store the data - self.hass.data[recorder.DATA_INSTANCE].block_till_done() + wait_recording_done(self.hass) # only now create the statistics component, so that it must read # the data from the database assert setup_component( diff --git a/tests/components/statsd/test_init.py b/tests/components/statsd/test_init.py index 4c7e9d29fee..9564de85a97 100644 --- a/tests/components/statsd/test_init.py +++ b/tests/components/statsd/test_init.py @@ -19,8 +19,9 @@ class TestStatsd(unittest.TestCase): def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() diff --git a/tests/components/switch/test_init.py b/tests/components/switch/test_init.py index 6605d19d46f..5853e5faee2 100644 --- a/tests/components/switch/test_init.py +++ b/tests/components/switch/test_init.py @@ -22,11 +22,7 @@ class TestSwitch(unittest.TestCase): platform.init() # Switch 1 is ON, switch 2 is OFF self.switch_1, self.switch_2, self.switch_3 = platform.ENTITIES - - # pylint: disable=invalid-name - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def test_methods(self): """Test is_on, turn_on, turn_off methods.""" diff --git a/tests/components/tellduslive/test_config_flow.py b/tests/components/tellduslive/test_config_flow.py index 6ee265de8d5..a8f188fffc7 100644 --- a/tests/components/tellduslive/test_config_flow.py +++ b/tests/components/tellduslive/test_config_flow.py @@ -13,6 +13,7 @@ from homeassistant.components.tellduslive import ( SCAN_INTERVAL, config_flow, ) +from homeassistant.config_entries import SOURCE_DISCOVERY from homeassistant.const import CONF_HOST from tests.common import MockConfigEntry, mock_coro @@ -73,6 +74,7 @@ async def test_abort_if_already_setup(hass): async def test_full_flow_implementation(hass, mock_tellduslive): """Test registering an implementation and finishing flow works.""" flow = init_config_flow(hass) + flow.context = {"source": SOURCE_DISCOVERY} result = await flow.async_step_discovery(["localhost", "tellstick"]) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" @@ -166,6 +168,7 @@ async def test_step_import_load_json(hass, mock_tellduslive): async def test_step_disco_no_local_api(hass, mock_tellduslive): """Test that we trigger when configuring from discovery, not supporting local api.""" flow = init_config_flow(hass) + flow.context = {"source": SOURCE_DISCOVERY} result = await flow.async_step_discovery(["localhost", "tellstick"]) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM @@ -242,7 +245,7 @@ async def test_discovery_already_configured(hass, mock_tellduslive): """Test abort if already configured fires from discovery.""" MockConfigEntry(domain="tellduslive", data={"host": "some-host"}).add_to_hass(hass) flow = init_config_flow(hass) + flow.context = {"source": SOURCE_DISCOVERY} - result = await flow.async_step_discovery(["some-host", ""]) - assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT - assert result["reason"] == "already_setup" + with pytest.raises(data_entry_flow.AbortFlow): + result = await flow.async_step_discovery(["some-host", ""]) diff --git a/tests/components/template/test_vacuum.py b/tests/components/template/test_vacuum.py index c8ae5bdce51..19cd9f0a8ee 100644 --- a/tests/components/template/test_vacuum.py +++ b/tests/components/template/test_vacuum.py @@ -282,6 +282,70 @@ async def test_invalid_availability_template_keeps_component_available(hass, cap assert ("UndefinedError: 'x' is undefined") in caplog.text +async def test_attribute_templates(hass, calls): + """Test attribute_templates template.""" + assert await setup.async_setup_component( + hass, + "vacuum", + { + "vacuum": { + "platform": "template", + "vacuums": { + "test_template_vacuum": { + "value_template": "{{ 'cleaning' }}", + "start": {"service": "script.vacuum_start"}, + "attribute_templates": { + "test_attribute": "It {{ states.sensor.test_state.state }}." + }, + } + }, + } + }, + ) + + await hass.async_block_till_done() + await hass.async_start() + await hass.async_block_till_done() + + state = hass.states.get("vacuum.test_template_vacuum") + assert state.attributes["test_attribute"] == "It ." + + hass.states.async_set("sensor.test_state", "Works") + await hass.async_block_till_done() + await hass.helpers.entity_component.async_update_entity( + "vacuum.test_template_vacuum" + ) + state = hass.states.get("vacuum.test_template_vacuum") + assert state.attributes["test_attribute"] == "It Works." + + +async def test_invalid_attribute_template(hass, caplog): + """Test that errors are logged if rendering template fails.""" + assert await setup.async_setup_component( + hass, + "vacuum", + { + "vacuum": { + "platform": "template", + "vacuums": { + "invalid_template": { + "value_template": "{{ states('input_select.state') }}", + "start": {"service": "script.vacuum_start"}, + "attribute_templates": { + "test_attribute": "{{ this_function_does_not_exist() }}" + }, + } + }, + } + }, + ) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 1 + await hass.helpers.entity_component.async_update_entity("vacuum.invalid_template") + + assert ("Error rendering attribute test_attribute") in caplog.text + + # End of template tests # @@ -529,6 +593,9 @@ async def _register_components(hass): }, }, "fan_speeds": ["low", "medium", "high"], + "attribute_templates": { + "test_attribute": "It {{ states.sensor.test_state.state }}." + }, } assert await setup.async_setup_component( diff --git a/tests/components/tile/__init__.py b/tests/components/tile/__init__.py new file mode 100644 index 00000000000..5f26eb01ce0 --- /dev/null +++ b/tests/components/tile/__init__.py @@ -0,0 +1 @@ +"""Define tests for the Tile component.""" diff --git a/tests/components/tile/test_config_flow.py b/tests/components/tile/test_config_flow.py new file mode 100644 index 00000000000..7b9a80b427d --- /dev/null +++ b/tests/components/tile/test_config_flow.py @@ -0,0 +1,94 @@ +"""Define tests for the Tile config flow.""" +from pytile.errors import TileError + +from homeassistant import data_entry_flow +from homeassistant.components.tile import DOMAIN +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME + +from tests.async_mock import patch +from tests.common import MockConfigEntry + + +async def test_duplicate_error(hass): + """Test that errors are shown when duplicates are added.""" + conf = { + CONF_USERNAME: "user@host.com", + CONF_PASSWORD: "123abc", + } + + MockConfigEntry(domain=DOMAIN, unique_id="user@host.com", data=conf).add_to_hass( + hass + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER}, data=conf + ) + + assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT + assert result["reason"] == "already_configured" + + +async def test_invalid_credentials(hass): + """Test that invalid credentials key throws an error.""" + conf = { + CONF_USERNAME: "user@host.com", + CONF_PASSWORD: "123abc", + } + + with patch( + "homeassistant.components.tile.config_flow.async_login", side_effect=TileError, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER}, data=conf + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_FORM + assert result["errors"] == {"base": "invalid_credentials"} + + +async def test_step_import(hass): + """Test that the import step works.""" + conf = { + CONF_USERNAME: "user@host.com", + CONF_PASSWORD: "123abc", + } + + with patch( + "homeassistant.components.tile.async_setup_entry", return_value=True + ), patch("homeassistant.components.tile.config_flow.async_login"): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=conf + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY + assert result["title"] == "user@host.com" + assert result["data"] == { + CONF_USERNAME: "user@host.com", + CONF_PASSWORD: "123abc", + } + + +async def test_step_user(hass): + """Test that the user step works.""" + conf = { + CONF_USERNAME: "user@host.com", + CONF_PASSWORD: "123abc", + } + + with patch( + "homeassistant.components.tile.async_setup_entry", return_value=True + ), patch("homeassistant.components.tile.config_flow.async_login"): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER}, data=conf + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY + assert result["title"] == "user@host.com" + assert result["data"] == { + CONF_USERNAME: "user@host.com", + CONF_PASSWORD: "123abc", + } diff --git a/tests/components/time_date/test_sensor.py b/tests/components/time_date/test_sensor.py index 80a081cd524..c5e8ccecc40 100644 --- a/tests/components/time_date/test_sensor.py +++ b/tests/components/time_date/test_sensor.py @@ -23,8 +23,9 @@ class TestTimeDateSensor(unittest.TestCase): """Initialize values for this testcase class.""" self.hass = get_test_home_assistant() self.DEFAULT_TIME_ZONE = dt_util.DEFAULT_TIME_ZONE + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): + def tear_down_cleanup(self): """Stop everything that was started.""" dt_util.set_default_time_zone(self.DEFAULT_TIME_ZONE) self.hass.stop() diff --git a/tests/components/toon/test_config_flow.py b/tests/components/toon/test_config_flow.py index 4ba74245876..e9ad7480928 100644 --- a/tests/components/toon/test_config_flow.py +++ b/tests/components/toon/test_config_flow.py @@ -1,182 +1,290 @@ """Tests for the Toon config flow.""" -import pytest -from toonapilib.toonapilibexceptions import ( - AgreementsRetrievalError, - InvalidConsumerKey, - InvalidConsumerSecret, - InvalidCredentials, -) +from toonapi import Agreement, ToonError from homeassistant import data_entry_flow -from homeassistant.components.toon import config_flow -from homeassistant.components.toon.const import CONF_DISPLAY, CONF_TENANT, DOMAIN -from homeassistant.const import ( - CONF_CLIENT_ID, - CONF_CLIENT_SECRET, - CONF_PASSWORD, - CONF_USERNAME, -) +from homeassistant.components.toon.const import CONF_AGREEMENT, CONF_MIGRATE, DOMAIN +from homeassistant.config import async_process_ha_core_config +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET +from homeassistant.helpers import config_entry_oauth2_flow from homeassistant.setup import async_setup_component from tests.async_mock import patch from tests.common import MockConfigEntry -FIXTURE_APP = { - DOMAIN: {CONF_CLIENT_ID: "1234567890abcdef", CONF_CLIENT_SECRET: "1234567890abcdef"} -} - -FIXTURE_CREDENTIALS = { - CONF_USERNAME: "john.doe", - CONF_PASSWORD: "secret", - CONF_TENANT: "eneco", -} - -FIXTURE_DISPLAY = {CONF_DISPLAY: "display1"} - - -@pytest.fixture -def mock_toonapilib(): - """Mock toonapilib.""" - with patch("homeassistant.components.toon.config_flow.Toon") as Toon: - Toon().display_names = [FIXTURE_DISPLAY[CONF_DISPLAY]] - yield Toon - async def setup_component(hass): """Set up Toon component.""" + await async_process_ha_core_config( + hass, {"external_url": "https://example.com"}, + ) + with patch("os.path.isfile", return_value=False): - assert await async_setup_component(hass, DOMAIN, FIXTURE_APP) + assert await async_setup_component( + hass, + DOMAIN, + {DOMAIN: {CONF_CLIENT_ID: "client", CONF_CLIENT_SECRET: "secret"}}, + ) await hass.async_block_till_done() -async def test_abort_if_no_app_configured(hass): +async def test_abort_if_no_configuration(hass): """Test abort if no app is configured.""" - flow = config_flow.ToonFlowHandler() - flow.hass = hass - result = await flow.async_step_user() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT - assert result["reason"] == "no_app" + assert result["reason"] == "missing_configuration" -async def test_show_authenticate_form(hass): - """Test that the authentication form is served.""" - await setup_component(hass) - - flow = config_flow.ToonFlowHandler() - flow.hass = hass - result = await flow.async_step_user(user_input=None) - - assert result["type"] == data_entry_flow.RESULT_TYPE_FORM - assert result["step_id"] == "authenticate" - - -@pytest.mark.parametrize( - "side_effect,reason", - [ - (InvalidConsumerKey, CONF_CLIENT_ID), - (InvalidConsumerSecret, CONF_CLIENT_SECRET), - (AgreementsRetrievalError, "no_agreements"), - (Exception, "unknown_auth_fail"), - ], -) -async def test_toon_abort(hass, mock_toonapilib, side_effect, reason): - """Test we abort on Toon error.""" - await setup_component(hass) - - flow = config_flow.ToonFlowHandler() - flow.hass = hass - - mock_toonapilib.side_effect = side_effect - - result = await flow.async_step_authenticate(user_input=FIXTURE_CREDENTIALS) - - assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT - assert result["reason"] == reason - - -async def test_invalid_credentials(hass, mock_toonapilib): - """Test we show authentication form on Toon auth error.""" - mock_toonapilib.side_effect = InvalidCredentials - - await setup_component(hass) - - flow = config_flow.ToonFlowHandler() - flow.hass = hass - result = await flow.async_step_user(user_input=FIXTURE_CREDENTIALS) - - assert result["type"] == data_entry_flow.RESULT_TYPE_FORM - assert result["step_id"] == "authenticate" - assert result["errors"] == {"base": "credentials"} - - -async def test_full_flow_implementation(hass, mock_toonapilib): +async def test_full_flow_implementation(hass, aiohttp_client, aioclient_mock): """Test registering an integration and finishing flow works.""" await setup_component(hass) - flow = config_flow.ToonFlowHandler() - flow.hass = hass - result = await flow.async_step_user(user_input=None) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_FORM - assert result["step_id"] == "authenticate" + assert result["step_id"] == "pick_implementation" - result = await flow.async_step_user(user_input=FIXTURE_CREDENTIALS) - assert result["type"] == data_entry_flow.RESULT_TYPE_FORM - assert result["step_id"] == "display" + # pylint: disable=protected-access + state = config_entry_oauth2_flow._encode_jwt(hass, {"flow_id": result["flow_id"]}) - result = await flow.async_step_display(user_input=FIXTURE_DISPLAY) - assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY - assert result["title"] == FIXTURE_DISPLAY[CONF_DISPLAY] - assert result["data"][CONF_USERNAME] == FIXTURE_CREDENTIALS[CONF_USERNAME] - assert result["data"][CONF_PASSWORD] == FIXTURE_CREDENTIALS[CONF_PASSWORD] - assert result["data"][CONF_TENANT] == FIXTURE_CREDENTIALS[CONF_TENANT] - assert result["data"][CONF_DISPLAY] == FIXTURE_DISPLAY[CONF_DISPLAY] + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {"implementation": "eneco"} + ) + + assert result2["type"] == data_entry_flow.RESULT_TYPE_EXTERNAL_STEP + assert result2["url"] == ( + "https://api.toon.eu/authorize" + "?response_type=code&client_id=client" + "&redirect_uri=https://example.com/auth/external/callback" + f"&state={state}" + "&tenant_id=eneco&issuer=identity.toon.eu" + ) + + client = await aiohttp_client(hass.http.app) + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.post( + "https://api.toon.eu/token", + json={ + "refresh_token": "mock-refresh-token", + "access_token": "mock-access-token", + "type": "Bearer", + "expires_in": 60, + }, + ) + + with patch("toonapi.Toon.agreements", return_value=[Agreement(agreement_id=123)]): + result3 = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result3["data"]["auth_implementation"] == "eneco" + assert result3["data"]["agreement_id"] == 123 + result3["data"]["token"].pop("expires_at") + assert result3["data"]["token"] == { + "refresh_token": "mock-refresh-token", + "access_token": "mock-access-token", + "type": "Bearer", + "expires_in": 60, + } -async def test_no_displays(hass, mock_toonapilib): +async def test_no_agreements(hass, aiohttp_client, aioclient_mock): """Test abort when there are no displays.""" await setup_component(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) - mock_toonapilib().display_names = [] + # pylint: disable=protected-access + state = config_entry_oauth2_flow._encode_jwt(hass, {"flow_id": result["flow_id"]}) + await hass.config_entries.flow.async_configure( + result["flow_id"], {"implementation": "eneco"} + ) - flow = config_flow.ToonFlowHandler() - flow.hass = hass - await flow.async_step_user(user_input=FIXTURE_CREDENTIALS) + client = await aiohttp_client(hass.http.app) + await client.get(f"/auth/external/callback?code=abcd&state={state}") + aioclient_mock.post( + "https://api.toon.eu/token", + json={ + "refresh_token": "mock-refresh-token", + "access_token": "mock-access-token", + "type": "Bearer", + "expires_in": 60, + }, + ) - result = await flow.async_step_display(user_input=None) + with patch("toonapi.Toon.agreements", return_value=[]): + result3 = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT - assert result["reason"] == "no_displays" + assert result3["type"] == data_entry_flow.RESULT_TYPE_ABORT + assert result3["reason"] == "no_agreements" -async def test_display_already_exists(hass, mock_toonapilib): +async def test_multiple_agreements(hass, aiohttp_client, aioclient_mock): + """Test abort when there are no displays.""" + await setup_component(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + # pylint: disable=protected-access + state = config_entry_oauth2_flow._encode_jwt(hass, {"flow_id": result["flow_id"]}) + await hass.config_entries.flow.async_configure( + result["flow_id"], {"implementation": "eneco"} + ) + + client = await aiohttp_client(hass.http.app) + await client.get(f"/auth/external/callback?code=abcd&state={state}") + + aioclient_mock.post( + "https://api.toon.eu/token", + json={ + "refresh_token": "mock-refresh-token", + "access_token": "mock-access-token", + "type": "Bearer", + "expires_in": 60, + }, + ) + + with patch( + "toonapi.Toon.agreements", + return_value=[Agreement(agreement_id=1), Agreement(agreement_id=2)], + ): + result3 = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result3["type"] == data_entry_flow.RESULT_TYPE_FORM + assert result3["step_id"] == "agreement" + + result4 = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_AGREEMENT: "None None, None"} + ) + assert result4["data"]["auth_implementation"] == "eneco" + assert result4["data"]["agreement_id"] == 1 + + +async def test_agreement_already_set_up(hass, aiohttp_client, aioclient_mock): """Test showing display form again if display already exists.""" await setup_component(hass) + MockConfigEntry(domain=DOMAIN, unique_id=123).add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) - flow = config_flow.ToonFlowHandler() - flow.hass = hass - await flow.async_step_user(user_input=FIXTURE_CREDENTIALS) + # pylint: disable=protected-access + state = config_entry_oauth2_flow._encode_jwt(hass, {"flow_id": result["flow_id"]}) + await hass.config_entries.flow.async_configure( + result["flow_id"], {"implementation": "eneco"} + ) - MockConfigEntry(domain=DOMAIN, data=FIXTURE_DISPLAY).add_to_hass(hass) + client = await aiohttp_client(hass.http.app) + await client.get(f"/auth/external/callback?code=abcd&state={state}") + aioclient_mock.post( + "https://api.toon.eu/token", + json={ + "refresh_token": "mock-refresh-token", + "access_token": "mock-access-token", + "type": "Bearer", + "expires_in": 60, + }, + ) - result = await flow.async_step_display(user_input=FIXTURE_DISPLAY) + with patch("toonapi.Toon.agreements", return_value=[Agreement(agreement_id=123)]): + result3 = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] == data_entry_flow.RESULT_TYPE_FORM - assert result["step_id"] == "display" - assert result["errors"] == {"base": "display_exists"} + assert result3["type"] == data_entry_flow.RESULT_TYPE_ABORT + assert result3["reason"] == "already_configured" -async def test_abort_last_minute_fail(hass, mock_toonapilib): - """Test we abort when API communication fails in the last step.""" +async def test_toon_abort(hass, aiohttp_client, aioclient_mock): + """Test we abort on Toon error.""" + await setup_component(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + # pylint: disable=protected-access + state = config_entry_oauth2_flow._encode_jwt(hass, {"flow_id": result["flow_id"]}) + await hass.config_entries.flow.async_configure( + result["flow_id"], {"implementation": "eneco"} + ) + + client = await aiohttp_client(hass.http.app) + await client.get(f"/auth/external/callback?code=abcd&state={state}") + aioclient_mock.post( + "https://api.toon.eu/token", + json={ + "refresh_token": "mock-refresh-token", + "access_token": "mock-access-token", + "type": "Bearer", + "expires_in": 60, + }, + ) + + with patch("toonapi.Toon.agreements", side_effect=ToonError): + result2 = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result2["type"] == data_entry_flow.RESULT_TYPE_ABORT + assert result2["reason"] == "connection_error" + + +async def test_import(hass): + """Test if importing step works.""" await setup_component(hass) - flow = config_flow.ToonFlowHandler() - flow.hass = hass - await flow.async_step_user(user_input=FIXTURE_CREDENTIALS) + # Setting up the component without entries, should already have triggered + # it. Hence, expect this to throw an already_in_progress. + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT} + ) - mock_toonapilib.side_effect = Exception - - result = await flow.async_step_display(user_input=FIXTURE_DISPLAY) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT - assert result["reason"] == "unknown_auth_fail" + assert result["reason"] == "already_in_progress" + + +async def test_import_migration(hass, aiohttp_client, aioclient_mock): + """Test if importing step with migration works.""" + old_entry = MockConfigEntry(domain=DOMAIN, unique_id=123, version=1) + old_entry.add_to_hass(hass) + + await setup_component(hass) + + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 1 + assert entries[0].version == 1 + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + assert flows[0]["context"][CONF_MIGRATE] == old_entry.entry_id + + # pylint: disable=protected-access + state = config_entry_oauth2_flow._encode_jwt(hass, {"flow_id": flows[0]["flow_id"]}) + await hass.config_entries.flow.async_configure( + flows[0]["flow_id"], {"implementation": "eneco"} + ) + + client = await aiohttp_client(hass.http.app) + await client.get(f"/auth/external/callback?code=abcd&state={state}") + aioclient_mock.post( + "https://api.toon.eu/token", + json={ + "refresh_token": "mock-refresh-token", + "access_token": "mock-access-token", + "type": "Bearer", + "expires_in": 60, + }, + ) + + with patch("toonapi.Toon.agreements", return_value=[Agreement(agreement_id=123)]): + result = await hass.config_entries.flow.async_configure(flows[0]["flow_id"]) + + assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY + + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 1 + assert entries[0].version == 2 diff --git a/tests/components/transport_nsw/test_sensor.py b/tests/components/transport_nsw/test_sensor.py index 0d12589372e..d2aa8fb9387 100644 --- a/tests/components/transport_nsw/test_sensor.py +++ b/tests/components/transport_nsw/test_sensor.py @@ -37,10 +37,7 @@ class TestRMVtransportSensor(unittest.TestCase): def setUp(self): """Set up things to run when tests begin.""" self.hass = get_test_home_assistant() - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) @patch("TransportNSW.TransportNSW.get_departures", side_effect=get_departuresMock) def test_transportnsw_config(self, mock_get_departures): diff --git a/tests/components/tts/test_init.py b/tests/components/tts/test_init.py index 527fb559eb1..b4cb9c67af3 100644 --- a/tests/components/tts/test_init.py +++ b/tests/components/tts/test_init.py @@ -1,6 +1,4 @@ """The tests for the TTS component.""" -import ctypes - import pytest import yarl @@ -265,11 +263,11 @@ async def test_setup_component_and_test_service_with_service_options( "entity_id": "media_player.something", tts.ATTR_MESSAGE: "There is someone at the door.", tts.ATTR_LANGUAGE: "de", - tts.ATTR_OPTIONS: {"voice": "alex"}, + tts.ATTR_OPTIONS: {"voice": "alex", "age": 5}, }, blocking=True, ) - opt_hash = ctypes.c_size_t(hash(frozenset({"voice": "alex"}))).value + opt_hash = tts._hash_options({"voice": "alex", "age": 5}) assert len(calls) == 1 assert calls[0].data[ATTR_MEDIA_CONTENT_TYPE] == MEDIA_TYPE_MUSIC @@ -306,7 +304,7 @@ async def test_setup_component_and_test_with_service_options_def(hass, empty_cac }, blocking=True, ) - opt_hash = ctypes.c_size_t(hash(frozenset({"voice": "alex"}))).value + opt_hash = tts._hash_options({"voice": "alex"}) assert len(calls) == 1 assert calls[0].data[ATTR_MEDIA_CONTENT_TYPE] == MEDIA_TYPE_MUSIC @@ -343,7 +341,7 @@ async def test_setup_component_and_test_service_with_service_options_wrong( }, blocking=True, ) - opt_hash = ctypes.c_size_t(hash(frozenset({"speed": 1}))).value + opt_hash = tts._hash_options({"speed": 1}) assert len(calls) == 0 await hass.async_block_till_done() diff --git a/tests/components/uk_transport/test_sensor.py b/tests/components/uk_transport/test_sensor.py index 7385592fe93..81cfa7ae8ae 100644 --- a/tests/components/uk_transport/test_sensor.py +++ b/tests/components/uk_transport/test_sensor.py @@ -49,10 +49,7 @@ class TestUkTransportSensor(unittest.TestCase): """Initialize values for this testcase class.""" self.hass = get_test_home_assistant() self.config = VALID_CONFIG - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) @requests_mock.Mocker() def test_bus(self, mock_req): diff --git a/tests/components/universal/test_media_player.py b/tests/components/universal/test_media_player.py index bf780d33922..b50906649f0 100644 --- a/tests/components/universal/test_media_player.py +++ b/tests/components/universal/test_media_player.py @@ -220,8 +220,9 @@ class TestMediaPlayer(unittest.TestCase): "shuffle": self.mock_shuffle_switch_id, }, } + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() diff --git a/tests/components/uptime/test_sensor.py b/tests/components/uptime/test_sensor.py index 111114d8aca..08758741b95 100644 --- a/tests/components/uptime/test_sensor.py +++ b/tests/components/uptime/test_sensor.py @@ -16,10 +16,7 @@ class TestUptimeSensor(unittest.TestCase): def setUp(self): """Set up things to run when tests begin.""" self.hass = get_test_home_assistant() - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def test_uptime_min_config(self): """Test minimum uptime configuration.""" diff --git a/tests/components/uvc/test_camera.py b/tests/components/uvc/test_camera.py index 22df898f006..35e6c82ded6 100644 --- a/tests/components/uvc/test_camera.py +++ b/tests/components/uvc/test_camera.py @@ -21,10 +21,7 @@ class TestUVCSetup(unittest.TestCase): def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) @mock.patch("uvcclient.nvr.UVCRemote") @mock.patch.object(uvc, "UnifiVideoCamera") diff --git a/tests/components/vera/test_common.py b/tests/components/vera/test_common.py new file mode 100644 index 00000000000..509bbc5f96a --- /dev/null +++ b/tests/components/vera/test_common.py @@ -0,0 +1,50 @@ +"""Tests for common vera code.""" +from datetime import timedelta + +from homeassistant.components.vera import SubscriptionRegistry +from homeassistant.core import HomeAssistant +from homeassistant.util.dt import utcnow + +from tests.async_mock import MagicMock +from tests.common import async_fire_time_changed + + +async def test_subscription_registry(hass: HomeAssistant) -> None: + """Test subscription registry polling.""" + subscription_registry = SubscriptionRegistry(hass) + # pylint: disable=protected-access + subscription_registry.poll_server_once = poll_server_once_mock = MagicMock() + + poll_server_once_mock.return_value = True + await hass.async_add_executor_job(subscription_registry.start) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=1)) + await hass.async_block_till_done() + poll_server_once_mock.assert_called_once() + + # Last poll was successful and already scheduled the next poll for 1s in the future. + # This will ensure that future poll will fail. + poll_server_once_mock.return_value = False + + # Asserting future poll runs. + poll_server_once_mock.reset_mock() + async_fire_time_changed(hass, utcnow() + timedelta(seconds=2)) + await hass.async_block_till_done() + poll_server_once_mock.assert_called_once() + + # Asserting a future poll is delayed due to the failure set above. + async_fire_time_changed(hass, utcnow() + timedelta(seconds=2)) + poll_server_once_mock.reset_mock() + poll_server_once_mock.assert_not_called() + + poll_server_once_mock.reset_mock() + async_fire_time_changed(hass, utcnow() + timedelta(seconds=60)) + await hass.async_block_till_done() + poll_server_once_mock.assert_called_once() + + poll_server_once_mock.reset_mock() + await hass.async_add_executor_job(subscription_registry.stop) + + # Assert no further polling is performed. + async_fire_time_changed(hass, utcnow() + timedelta(seconds=65)) + await hass.async_block_till_done() + poll_server_once_mock.assert_not_called() diff --git a/tests/components/vultr/test_binary_sensor.py b/tests/components/vultr/test_binary_sensor.py index 609cdbf6a9e..af99dc12c5f 100644 --- a/tests/components/vultr/test_binary_sensor.py +++ b/tests/components/vultr/test_binary_sensor.py @@ -42,8 +42,9 @@ class TestVultrBinarySensorSetup(unittest.TestCase): {CONF_SUBSCRIPTION: "123456", CONF_NAME: "Failed Server"}, {CONF_SUBSCRIPTION: "555555", CONF_NAME: vultr.DEFAULT_NAME}, ] + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): + def tear_down_cleanup(self): """Stop our started services.""" self.hass.stop() diff --git a/tests/components/vultr/test_init.py b/tests/components/vultr/test_init.py index 6035ac547af..9ce96d7969c 100644 --- a/tests/components/vultr/test_init.py +++ b/tests/components/vultr/test_init.py @@ -21,8 +21,9 @@ class TestVultr(unittest.TestCase): """Initialize values for this test case class.""" self.hass = get_test_home_assistant() self.config = VALID_CONFIG + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that we started.""" self.hass.stop() diff --git a/tests/components/vultr/test_sensor.py b/tests/components/vultr/test_sensor.py index 1ced0fec82f..8e9cb606d1e 100644 --- a/tests/components/vultr/test_sensor.py +++ b/tests/components/vultr/test_sensor.py @@ -51,10 +51,7 @@ class TestVultrSensorSetup(unittest.TestCase): CONF_MONITORED_CONDITIONS: ["pending_charges"], }, ] - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) @requests_mock.Mocker() def test_sensor(self, mock): diff --git a/tests/components/vultr/test_switch.py b/tests/components/vultr/test_switch.py index 594617bdfd9..77eb1e7a8c6 100644 --- a/tests/components/vultr/test_switch.py +++ b/tests/components/vultr/test_switch.py @@ -42,8 +42,9 @@ class TestVultrSwitchSetup(unittest.TestCase): {CONF_SUBSCRIPTION: "123456", CONF_NAME: "Failed Server"}, {CONF_SUBSCRIPTION: "555555", CONF_NAME: vultr.DEFAULT_NAME}, ] + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): + def tear_down_cleanup(self): """Stop our started services.""" self.hass.stop() diff --git a/tests/components/wake_on_lan/test_init.py b/tests/components/wake_on_lan/test_init.py index 6eb7afb29f4..331f66b03d8 100644 --- a/tests/components/wake_on_lan/test_init.py +++ b/tests/components/wake_on_lan/test_init.py @@ -13,18 +13,20 @@ async def test_send_magic_packet(hass): with patch("homeassistant.components.wake_on_lan.wakeonlan") as mocked_wakeonlan: mac = "aa:bb:cc:dd:ee:ff" bc_ip = "192.168.255.255" + bc_port = 999 await async_setup_component(hass, DOMAIN, {}) await hass.services.async_call( DOMAIN, SERVICE_SEND_MAGIC_PACKET, - {"mac": mac, "broadcast_address": bc_ip}, + {"mac": mac, "broadcast_address": bc_ip, "broadcast_port": bc_port}, blocking=True, ) assert len(mocked_wakeonlan.mock_calls) == 1 assert mocked_wakeonlan.mock_calls[-1][1][0] == mac assert mocked_wakeonlan.mock_calls[-1][2]["ip_address"] == bc_ip + assert mocked_wakeonlan.mock_calls[-1][2]["port"] == bc_port with pytest.raises(vol.Invalid): await hass.services.async_call( diff --git a/tests/components/wake_on_lan/test_switch.py b/tests/components/wake_on_lan/test_switch.py index d99bc1ccb4f..ce5dbca9585 100644 --- a/tests/components/wake_on_lan/test_switch.py +++ b/tests/components/wake_on_lan/test_switch.py @@ -35,10 +35,7 @@ class TestWolSwitch(unittest.TestCase): def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) @patch("wakeonlan.send_magic_packet", new=send_magic_packet) @patch("subprocess.call", new=call) @@ -129,6 +126,7 @@ class TestWolSwitch(unittest.TestCase): "platform": "wake_on_lan", "mac": "00-01-02-03-04-05", "broadcast_address": "255.255.255.255", + "broadcast_port": 999, } }, ) diff --git a/tests/components/weather/test_weather.py b/tests/components/weather/test_weather.py index d026cd6ee86..ccddb35ad0a 100644 --- a/tests/components/weather/test_weather.py +++ b/tests/components/weather/test_weather.py @@ -6,6 +6,7 @@ from homeassistant.components.weather import ( ATTR_FORECAST, ATTR_FORECAST_CONDITION, ATTR_FORECAST_PRECIPITATION, + ATTR_FORECAST_PRECIPITATION_PROBABILITY, ATTR_FORECAST_TEMP, ATTR_FORECAST_TEMP_LOW, ATTR_WEATHER_ATTRIBUTION, @@ -33,8 +34,9 @@ class TestWeather(unittest.TestCase): self.hass, weather.DOMAIN, {"weather": {"platform": "demo"}} ) self.hass.block_till_done() + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): + def tear_down_cleanup(self): """Stop down everything that was started.""" self.hass.stop() @@ -55,12 +57,20 @@ class TestWeather(unittest.TestCase): assert data.get(ATTR_WEATHER_ATTRIBUTION) == "Powered by Home Assistant" assert data.get(ATTR_FORECAST)[0].get(ATTR_FORECAST_CONDITION) == "rainy" assert data.get(ATTR_FORECAST)[0].get(ATTR_FORECAST_PRECIPITATION) == 1 + assert ( + data.get(ATTR_FORECAST)[0].get(ATTR_FORECAST_PRECIPITATION_PROBABILITY) + == 60 + ) assert data.get(ATTR_FORECAST)[0].get(ATTR_FORECAST_TEMP) == 22 assert data.get(ATTR_FORECAST)[0].get(ATTR_FORECAST_TEMP_LOW) == 15 assert data.get(ATTR_FORECAST)[6].get(ATTR_FORECAST_CONDITION) == "fog" assert data.get(ATTR_FORECAST)[6].get(ATTR_FORECAST_PRECIPITATION) == 0.2 assert data.get(ATTR_FORECAST)[6].get(ATTR_FORECAST_TEMP) == 21 assert data.get(ATTR_FORECAST)[6].get(ATTR_FORECAST_TEMP_LOW) == 12 + assert ( + data.get(ATTR_FORECAST)[6].get(ATTR_FORECAST_PRECIPITATION_PROBABILITY) + == 100 + ) assert len(data.get(ATTR_FORECAST)) == 7 def test_temperature_convert(self): diff --git a/tests/components/withings/common.py b/tests/components/withings/common.py index ca3fef6159e..3ed3b39daee 100644 --- a/tests/components/withings/common.py +++ b/tests/components/withings/common.py @@ -1,24 +1,32 @@ """Common data for for the withings component tests.""" -import re -import time -from typing import List +from dataclasses import dataclass +from typing import List, Optional, Tuple, Union +from urllib.parse import urlparse -import requests_mock -from withings_api import AbstractWithingsApi +from aiohttp.test_utils import TestClient +import arrow +import pytz from withings_api.common import ( - MeasureGetMeasGroupAttrib, - MeasureGetMeasGroupCategory, - MeasureType, - SleepModel, - SleepState, + MeasureGetMeasResponse, + NotifyAppli, + NotifyListResponse, + SleepGetSummaryResponse, + UserGetDeviceResponse, ) from homeassistant import data_entry_flow import homeassistant.components.api as api -import homeassistant.components.http as http +from homeassistant.components.homeassistant import DOMAIN as HA_DOMAIN +import homeassistant.components.webhook as webhook +from homeassistant.components.withings import async_unload_entry +from homeassistant.components.withings.common import ( + ConfigEntryWithingsApi, + DataManager, + get_all_data_managers, +) import homeassistant.components.withings.const as const from homeassistant.config import async_process_ha_core_config -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_USER, ConfigEntry from homeassistant.const import ( CONF_CLIENT_ID, CONF_CLIENT_SECRET, @@ -28,364 +36,290 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers import config_entry_oauth2_flow +from homeassistant.helpers.config_entry_oauth2_flow import AUTH_CALLBACK_PATH from homeassistant.setup import async_setup_component -from homeassistant.util import slugify + +from tests.async_mock import MagicMock +from tests.test_util.aiohttp import AiohttpClientMocker -def get_entity_id(measure, profile) -> str: - """Get an entity id for a measure and profile.""" - return "sensor.{}_{}_{}".format(const.DOMAIN, measure, slugify(profile)) +@dataclass +class ProfileConfig: + """Data representing a user profile.""" + + profile: str + user_id: int + api_response_user_get_device: Union[UserGetDeviceResponse, Exception] + api_response_measure_get_meas: Union[MeasureGetMeasResponse, Exception] + api_response_sleep_get_summary: Union[SleepGetSummaryResponse, Exception] + api_response_notify_list: Union[NotifyListResponse, Exception] + api_response_notify_revoke: Optional[Exception] -def assert_state_equals( - hass: HomeAssistant, profile: str, measure: str, expected -) -> None: - """Assert the state of a withings sensor.""" - entity_id = get_entity_id(measure, profile) - state_obj = hass.states.get(entity_id) - - assert state_obj, f"Expected entity {entity_id} to exist but it did not" - - assert state_obj.state == str(expected), ( - f"Expected {expected} but was {state_obj.state} " - f"for measure {measure}, {entity_id}" +def new_profile_config( + profile: str, + user_id: int, + api_response_user_get_device: Optional[ + Union[UserGetDeviceResponse, Exception] + ] = None, + api_response_measure_get_meas: Optional[ + Union[MeasureGetMeasResponse, Exception] + ] = None, + api_response_sleep_get_summary: Optional[ + Union[SleepGetSummaryResponse, Exception] + ] = None, + api_response_notify_list: Optional[Union[NotifyListResponse, Exception]] = None, + api_response_notify_revoke: Optional[Exception] = None, +) -> ProfileConfig: + """Create a new profile config immutable object.""" + return ProfileConfig( + profile=profile, + user_id=user_id, + api_response_user_get_device=api_response_user_get_device + or UserGetDeviceResponse(devices=[]), + api_response_measure_get_meas=api_response_measure_get_meas + or MeasureGetMeasResponse( + measuregrps=[], + more=False, + offset=0, + timezone=pytz.UTC, + updatetime=arrow.get(12345), + ), + api_response_sleep_get_summary=api_response_sleep_get_summary + or SleepGetSummaryResponse(more=False, offset=0, series=[]), + api_response_notify_list=api_response_notify_list + or NotifyListResponse(profiles=[]), + api_response_notify_revoke=api_response_notify_revoke, ) -async def setup_hass(hass: HomeAssistant) -> dict: - """Configure Home Assistant.""" - profiles = ["Person0", "Person1", "Person2", "Person3", "Person4"] +@dataclass +class WebhookResponse: + """Response data from a webhook.""" - hass_config = { - "homeassistant": { - CONF_UNIT_SYSTEM: CONF_UNIT_SYSTEM_METRIC, - CONF_EXTERNAL_URL: "http://example.local/", - }, - api.DOMAIN: {}, - http.DOMAIN: {"server_port": 8080}, - const.DOMAIN: { - CONF_CLIENT_ID: "my_client_id", - CONF_CLIENT_SECRET: "my_client_secret", - const.CONF_PROFILES: profiles, - }, - } - - await async_process_ha_core_config(hass, hass_config.get("homeassistant")) - assert await async_setup_component(hass, http.DOMAIN, hass_config) - assert await async_setup_component(hass, api.DOMAIN, hass_config) - assert await async_setup_component(hass, const.DOMAIN, hass_config) - await hass.async_block_till_done() - - return hass_config + message: str + message_code: int -async def configure_integration( - hass: HomeAssistant, - aiohttp_client, - aioclient_mock, - profiles: List[str], - profile_index: int, - get_device_response: dict, - getmeasures_response: dict, - get_sleep_response: dict, - get_sleep_summary_response: dict, -) -> None: - """Configure the integration for a specific profile.""" - selected_profile = profiles[profile_index] +class ComponentFactory: + """Manages the setup and unloading of the withing component and profiles.""" - with requests_mock.mock() as rqmck: - rqmck.get( - re.compile(f"{AbstractWithingsApi.URL}/v2/user?.*action=getdevice(&.*|$)"), - status_code=200, - json=get_device_response, + def __init__( + self, + hass: HomeAssistant, + api_class_mock: MagicMock, + aiohttp_client, + aioclient_mock: AiohttpClientMocker, + ) -> None: + """Initialize the object.""" + self._hass = hass + self._api_class_mock = api_class_mock + self._aiohttp_client = aiohttp_client + self._aioclient_mock = aioclient_mock + self._client_id = None + self._client_secret = None + self._profile_configs: Tuple[ProfileConfig, ...] = () + + async def configure_component( + self, + client_id: str = "my_client_id", + client_secret: str = "my_client_secret", + profile_configs: Tuple[ProfileConfig, ...] = (), + ) -> None: + """Configure the wihings component.""" + self._client_id = client_id + self._client_secret = client_secret + self._profile_configs = profile_configs + + hass_config = { + "homeassistant": { + CONF_UNIT_SYSTEM: CONF_UNIT_SYSTEM_METRIC, + CONF_EXTERNAL_URL: "http://127.0.0.1:8080/", + }, + api.DOMAIN: {}, + const.DOMAIN: { + CONF_CLIENT_ID: self._client_id, + CONF_CLIENT_SECRET: self._client_secret, + const.CONF_USE_WEBHOOK: True, + }, + } + + await async_process_ha_core_config(self._hass, hass_config.get("homeassistant")) + assert await async_setup_component(self._hass, HA_DOMAIN, {}) + assert await async_setup_component(self._hass, webhook.DOMAIN, hass_config) + + assert await async_setup_component(self._hass, const.DOMAIN, hass_config) + await self._hass.async_block_till_done() + + @staticmethod + def _setup_api_method(api_method, value) -> None: + if isinstance(value, Exception): + api_method.side_effect = value + else: + api_method.return_value = value + + async def setup_profile(self, user_id: int) -> ConfigEntryWithingsApi: + """Set up a user profile through config flows.""" + profile_config = next( + iter( + [ + profile_config + for profile_config in self._profile_configs + if profile_config.user_id == user_id + ] + ) ) - rqmck.get( - re.compile(f"{AbstractWithingsApi.URL}/v2/sleep?.*action=get(&.*|$)"), - status_code=200, - json=get_sleep_response, + api_mock: ConfigEntryWithingsApi = MagicMock(spec=ConfigEntryWithingsApi) + ComponentFactory._setup_api_method( + api_mock.user_get_device, profile_config.api_response_user_get_device + ) + ComponentFactory._setup_api_method( + api_mock.sleep_get_summary, profile_config.api_response_sleep_get_summary + ) + ComponentFactory._setup_api_method( + api_mock.measure_get_meas, profile_config.api_response_measure_get_meas + ) + ComponentFactory._setup_api_method( + api_mock.notify_list, profile_config.api_response_notify_list + ) + ComponentFactory._setup_api_method( + api_mock.notify_revoke, profile_config.api_response_notify_revoke ) - rqmck.get( - re.compile( - f"{AbstractWithingsApi.URL}/v2/sleep?.*action=getsummary(&.*|$)" - ), - status_code=200, - json=get_sleep_summary_response, - ) - - rqmck.get( - re.compile(f"{AbstractWithingsApi.URL}/measure?.*action=getmeas(&.*|$)"), - status_code=200, - json=getmeasures_response, - ) + self._api_class_mock.reset_mocks() + self._api_class_mock.return_value = api_mock # Get the withings config flow. - result = await hass.config_entries.flow.async_init( + result = await self._hass.config_entries.flow.async_init( const.DOMAIN, context={"source": SOURCE_USER} ) assert result # pylint: disable=protected-access state = config_entry_oauth2_flow._encode_jwt( - hass, {"flow_id": result["flow_id"]} + self._hass, {"flow_id": result["flow_id"]} ) assert result["type"] == data_entry_flow.RESULT_TYPE_EXTERNAL_STEP assert result["url"] == ( "https://account.withings.com/oauth2_user/authorize2?" - "response_type=code&client_id=my_client_id&" - "redirect_uri=http://example.local/auth/external/callback&" + f"response_type=code&client_id={self._client_id}&" + "redirect_uri=http://127.0.0.1:8080/auth/external/callback&" f"state={state}" - "&scope=user.info,user.metrics,user.activity" + "&scope=user.info,user.metrics,user.activity,user.sleepevents" ) # Simulate user being redirected from withings site. - client = await aiohttp_client(hass.http.app) - resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + client: TestClient = await self._aiohttp_client(self._hass.http.app) + resp = await client.get(f"{AUTH_CALLBACK_PATH}?code=abcd&state={state}") assert resp.status == 200 assert resp.headers["content-type"] == "text/html; charset=utf-8" - aioclient_mock.post( + self._aioclient_mock.clear_requests() + self._aioclient_mock.post( "https://account.withings.com/oauth2/token", json={ "refresh_token": "mock-refresh-token", "access_token": "mock-access-token", "type": "Bearer", "expires_in": 60, - "userid": "myuserid", + "userid": profile_config.user_id, }, ) # Present user with a list of profiles to choose from. - result = await hass.config_entries.flow.async_configure(result["flow_id"]) + result = await self._hass.config_entries.flow.async_configure(result["flow_id"]) assert result.get("type") == "form" assert result.get("step_id") == "profile" - assert result.get("data_schema").schema["profile"].container == profiles + assert "profile" in result.get("data_schema").schema - # Select the user profile. - result = await hass.config_entries.flow.async_configure( - result["flow_id"], {const.PROFILE: selected_profile} + # Provide the user profile. + result = await self._hass.config_entries.flow.async_configure( + result["flow_id"], {const.PROFILE: profile_config.profile} ) # Finish the config flow by calling it again. assert result.get("type") == "create_entry" assert result.get("result") config_data = result.get("result").data - assert config_data.get(const.PROFILE) == profiles[profile_index] + assert config_data.get(const.PROFILE) == profile_config.profile assert config_data.get("auth_implementation") == const.DOMAIN assert config_data.get("token") - # Ensure all the flows are complete. - flows = hass.config_entries.flow.async_progress() - assert not flows + # Wait for remaining tasks to complete. + await self._hass.async_block_till_done() + + # Mock the webhook. + data_manager = get_data_manager_by_user_id(self._hass, user_id) + self._aioclient_mock.clear_requests() + self._aioclient_mock.request( + "HEAD", data_manager.webhook_config.url, + ) + + return self._api_class_mock.return_value + + async def call_webhook(self, user_id: int, appli: NotifyAppli) -> WebhookResponse: + """Call the webhook to notify of data changes.""" + client: TestClient = await self._aiohttp_client(self._hass.http.app) + data_manager = get_data_manager_by_user_id(self._hass, user_id) + + resp = await client.post( + urlparse(data_manager.webhook_config.url).path, + data={"userid": user_id, "appli": appli.value}, + ) # Wait for remaining tasks to complete. - await hass.async_block_till_done() + await self._hass.async_block_till_done() + + data = await resp.json() + resp.close() + + return WebhookResponse(message=data["message"], message_code=data["code"]) + + async def unload(self, profile: ProfileConfig) -> None: + """Unload the component for a specific user.""" + config_entries = get_config_entries_for_user_id(self._hass, profile.user_id) + + for config_entry in config_entries: + await async_unload_entry(self._hass, config_entry) + + await self._hass.async_block_till_done() + + assert not get_data_manager_by_user_id(self._hass, profile.user_id) -WITHINGS_GET_DEVICE_RESPONSE_EMPTY = {"status": 0, "body": {"devices": []}} - - -WITHINGS_GET_DEVICE_RESPONSE = { - "status": 0, - "body": { - "devices": [ - { - "type": "type1", - "model": "model1", - "battery": "battery1", - "deviceid": "deviceid1", - "timezone": "UTC", - } +def get_config_entries_for_user_id( + hass: HomeAssistant, user_id: int +) -> Tuple[ConfigEntry]: + """Get a list of config entries that apply to a specific withings user.""" + return tuple( + [ + config_entry + for config_entry in hass.config_entries.async_entries(const.DOMAIN) + if config_entry.data.get("token", {}).get("userid") == user_id ] - }, -} + ) -WITHINGS_MEASURES_RESPONSE_EMPTY = { - "status": 0, - "body": {"updatetime": "2019-08-01", "timezone": "UTC", "measuregrps": []}, -} +def async_get_flow_for_user_id(hass: HomeAssistant, user_id: int) -> List[dict]: + """Get a flow for a user id.""" + return [ + flow + for flow in hass.config_entries.flow.async_progress() + if flow["handler"] == const.DOMAIN and flow["context"].get("userid") == user_id + ] -WITHINGS_MEASURES_RESPONSE = { - "status": 0, - "body": { - "updatetime": "2019-08-01", - "timezone": "UTC", - "measuregrps": [ - # Un-ambiguous groups. - { - "grpid": 1, - "attrib": MeasureGetMeasGroupAttrib.DEVICE_ENTRY_FOR_USER.real, - "date": time.time(), - "created": time.time(), - "category": MeasureGetMeasGroupCategory.REAL.real, - "deviceid": "DEV_ID", - "more": False, - "offset": 0, - "measures": [ - {"type": MeasureType.WEIGHT, "value": 70, "unit": 0}, - {"type": MeasureType.FAT_MASS_WEIGHT, "value": 5, "unit": 0}, - {"type": MeasureType.FAT_FREE_MASS, "value": 60, "unit": 0}, - {"type": MeasureType.MUSCLE_MASS, "value": 50, "unit": 0}, - {"type": MeasureType.BONE_MASS, "value": 10, "unit": 0}, - {"type": MeasureType.HEIGHT, "value": 2, "unit": 0}, - {"type": MeasureType.TEMPERATURE, "value": 40, "unit": 0}, - {"type": MeasureType.BODY_TEMPERATURE, "value": 40, "unit": 0}, - {"type": MeasureType.SKIN_TEMPERATURE, "value": 20, "unit": 0}, - {"type": MeasureType.FAT_RATIO, "value": 70, "unit": -3}, - { - "type": MeasureType.DIASTOLIC_BLOOD_PRESSURE, - "value": 70, - "unit": 0, - }, - { - "type": MeasureType.SYSTOLIC_BLOOD_PRESSURE, - "value": 100, - "unit": 0, - }, - {"type": MeasureType.HEART_RATE, "value": 60, "unit": 0}, - {"type": MeasureType.SP02, "value": 95, "unit": -2}, - {"type": MeasureType.HYDRATION, "value": 95, "unit": -2}, - {"type": MeasureType.PULSE_WAVE_VELOCITY, "value": 100, "unit": 0}, - ], - }, - # Ambiguous groups (we ignore these) - { - "grpid": 1, - "attrib": MeasureGetMeasGroupAttrib.DEVICE_ENTRY_FOR_USER.real, - "date": time.time(), - "created": time.time(), - "category": MeasureGetMeasGroupCategory.REAL.real, - "deviceid": "DEV_ID", - "more": False, - "offset": 0, - "measures": [ - {"type": MeasureType.WEIGHT, "value": 71, "unit": 0}, - {"type": MeasureType.FAT_MASS_WEIGHT, "value": 4, "unit": 0}, - {"type": MeasureType.FAT_FREE_MASS, "value": 40, "unit": 0}, - {"type": MeasureType.MUSCLE_MASS, "value": 51, "unit": 0}, - {"type": MeasureType.BONE_MASS, "value": 11, "unit": 0}, - {"type": MeasureType.HEIGHT, "value": 201, "unit": 0}, - {"type": MeasureType.TEMPERATURE, "value": 41, "unit": 0}, - {"type": MeasureType.BODY_TEMPERATURE, "value": 34, "unit": 0}, - {"type": MeasureType.SKIN_TEMPERATURE, "value": 21, "unit": 0}, - {"type": MeasureType.FAT_RATIO, "value": 71, "unit": -3}, - { - "type": MeasureType.DIASTOLIC_BLOOD_PRESSURE, - "value": 71, - "unit": 0, - }, - { - "type": MeasureType.SYSTOLIC_BLOOD_PRESSURE, - "value": 101, - "unit": 0, - }, - {"type": MeasureType.HEART_RATE, "value": 61, "unit": 0}, - {"type": MeasureType.SP02, "value": 98, "unit": -2}, - {"type": MeasureType.HYDRATION, "value": 96, "unit": -2}, - {"type": MeasureType.PULSE_WAVE_VELOCITY, "value": 102, "unit": 0}, - ], - }, - ], - }, -} - - -WITHINGS_SLEEP_RESPONSE_EMPTY = { - "status": 0, - "body": {"model": SleepModel.TRACKER.real, "series": []}, -} - - -WITHINGS_SLEEP_RESPONSE = { - "status": 0, - "body": { - "model": SleepModel.TRACKER.real, - "series": [ - { - "startdate": "2019-02-01 00:00:00", - "enddate": "2019-02-01 01:00:00", - "state": SleepState.AWAKE.real, - }, - { - "startdate": "2019-02-01 01:00:00", - "enddate": "2019-02-01 02:00:00", - "state": SleepState.LIGHT.real, - }, - { - "startdate": "2019-02-01 02:00:00", - "enddate": "2019-02-01 03:00:00", - "state": SleepState.REM.real, - }, - { - "startdate": "2019-02-01 03:00:00", - "enddate": "2019-02-01 04:00:00", - "state": SleepState.DEEP.real, - }, - ], - }, -} - - -WITHINGS_SLEEP_SUMMARY_RESPONSE_EMPTY = { - "status": 0, - "body": {"more": False, "offset": 0, "series": []}, -} - - -WITHINGS_SLEEP_SUMMARY_RESPONSE = { - "status": 0, - "body": { - "more": False, - "offset": 0, - "series": [ - { - "timezone": "UTC", - "model": SleepModel.SLEEP_MONITOR.real, - "startdate": "2019-02-01", - "enddate": "2019-02-02", - "date": "2019-02-02", - "modified": 12345, - "data": { - "wakeupduration": 110, - "lightsleepduration": 210, - "deepsleepduration": 310, - "remsleepduration": 410, - "wakeupcount": 510, - "durationtosleep": 610, - "durationtowakeup": 710, - "hr_average": 810, - "hr_min": 910, - "hr_max": 1010, - "rr_average": 1110, - "rr_min": 1210, - "rr_max": 1310, - }, - }, - { - "timezone": "UTC", - "model": SleepModel.SLEEP_MONITOR.real, - "startdate": "2019-02-01", - "enddate": "2019-02-02", - "date": "2019-02-02", - "modified": 12345, - "data": { - "wakeupduration": 210, - "lightsleepduration": 310, - "deepsleepduration": 410, - "remsleepduration": 510, - "wakeupcount": 610, - "durationtosleep": 710, - "durationtowakeup": 810, - "hr_average": 910, - "hr_min": 1010, - "hr_max": 1110, - "rr_average": 1210, - "rr_min": 1310, - "rr_max": 1410, - }, - }, - ], - }, -} +def get_data_manager_by_user_id( + hass: HomeAssistant, user_id: int +) -> Optional[DataManager]: + """Get a data manager by the user id.""" + return next( + iter( + [ + data_manager + for data_manager in get_all_data_managers(hass) + if data_manager.user_id == user_id + ] + ), + None, + ) diff --git a/tests/components/withings/conftest.py b/tests/components/withings/conftest.py new file mode 100644 index 00000000000..c95abc8addd --- /dev/null +++ b/tests/components/withings/conftest.py @@ -0,0 +1,22 @@ +"""Fixtures for tests.""" + +from unittest.mock import patch + +import pytest + +from homeassistant.core import HomeAssistant + +from .common import ComponentFactory + +from tests.test_util.aiohttp import AiohttpClientMocker + + +@pytest.fixture() +def component_factory( + hass: HomeAssistant, aiohttp_client, aioclient_mock: AiohttpClientMocker +): + """Return a factory for initializing the withings component.""" + with patch( + "homeassistant.components.withings.common.ConfigEntryWithingsApi" + ) as api_class_mock: + yield ComponentFactory(hass, api_class_mock, aiohttp_client, aioclient_mock) diff --git a/tests/components/withings/test_binary_sensor.py b/tests/components/withings/test_binary_sensor.py new file mode 100644 index 00000000000..b646c667472 --- /dev/null +++ b/tests/components/withings/test_binary_sensor.py @@ -0,0 +1,62 @@ +"""Tests for the Withings component.""" +from withings_api.common import NotifyAppli + +from homeassistant.components.withings.common import ( + WITHINGS_MEASUREMENTS_MAP, + async_get_entity_id, +) +from homeassistant.components.withings.const import Measurement +from homeassistant.const import STATE_OFF, STATE_ON +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_registry import EntityRegistry + +from .common import ComponentFactory, new_profile_config + + +async def test_binary_sensor( + hass: HomeAssistant, component_factory: ComponentFactory +) -> None: + """Test binary sensor.""" + in_bed_attribute = WITHINGS_MEASUREMENTS_MAP[Measurement.IN_BED] + person0 = new_profile_config("person0", 0) + person1 = new_profile_config("person1", 1) + + entity_registry: EntityRegistry = await hass.helpers.entity_registry.async_get_registry() + + await component_factory.configure_component(profile_configs=(person0, person1)) + assert not await async_get_entity_id(hass, in_bed_attribute, person0.user_id) + assert not await async_get_entity_id(hass, in_bed_attribute, person1.user_id) + + # person 0 + await component_factory.setup_profile(person0.user_id) + await component_factory.setup_profile(person1.user_id) + + entity_id0 = await async_get_entity_id(hass, in_bed_attribute, person0.user_id) + entity_id1 = await async_get_entity_id(hass, in_bed_attribute, person1.user_id) + assert entity_id0 + assert entity_id1 + + assert entity_registry.async_is_registered(entity_id0) + assert hass.states.get(entity_id0).state == STATE_OFF + + resp = await component_factory.call_webhook(person0.user_id, NotifyAppli.BED_IN) + assert resp.message_code == 0 + await hass.async_block_till_done() + assert hass.states.get(entity_id0).state == STATE_ON + + resp = await component_factory.call_webhook(person0.user_id, NotifyAppli.BED_OUT) + assert resp.message_code == 0 + await hass.async_block_till_done() + assert hass.states.get(entity_id0).state == STATE_OFF + + # person 1 + assert hass.states.get(entity_id1).state == STATE_OFF + + resp = await component_factory.call_webhook(person1.user_id, NotifyAppli.BED_IN) + assert resp.message_code == 0 + await hass.async_block_till_done() + assert hass.states.get(entity_id1).state == STATE_ON + + # Unload + await component_factory.unload(person0) + await component_factory.unload(person1) diff --git a/tests/components/withings/test_common.py b/tests/components/withings/test_common.py index f0528c36005..22f9e5ba0b6 100644 --- a/tests/components/withings/test_common.py +++ b/tests/components/withings/test_common.py @@ -1,135 +1,236 @@ """Tests for the Withings component.""" -from datetime import timedelta +import datetime +import re +from typing import Any +from urllib.parse import urlparse +from aiohttp.test_utils import TestClient +from asynctest import MagicMock import pytest -from withings_api import WithingsApi -from withings_api.common import TimeoutException, UnauthorizedException +import requests_mock +from withings_api.common import NotifyAppli, NotifyListProfile, NotifyListResponse from homeassistant.components.withings.common import ( - NotAuthenticatedError, - WithingsDataManager, + ConfigEntryWithingsApi, + DataManager, + WebhookConfig, ) -from homeassistant.exceptions import PlatformNotReady -from homeassistant.util import dt +from homeassistant.core import HomeAssistant +from homeassistant.helpers.config_entry_oauth2_flow import AbstractOAuth2Implementation -from tests.async_mock import MagicMock, patch +from tests.common import MockConfigEntry +from tests.components.withings.common import ( + ComponentFactory, + get_data_manager_by_user_id, + new_profile_config, +) +from tests.test_util.aiohttp import AiohttpClientMocker -@pytest.fixture(name="withings_api") -def withings_api_fixture() -> WithingsApi: - """Provide withings api.""" - withings_api = WithingsApi.__new__(WithingsApi) - withings_api.user_get_device = MagicMock() - withings_api.measure_get_meas = MagicMock() - withings_api.sleep_get = MagicMock() - withings_api.sleep_get_summary = MagicMock() - return withings_api +async def test_config_entry_withings_api(hass: HomeAssistant) -> None: + """Test ConfigEntryWithingsApi.""" + config_entry = MockConfigEntry( + data={"token": {"access_token": "mock_access_token", "expires_at": 1111111}} + ) + config_entry.add_to_hass(hass) + + implementation_mock = MagicMock(spec=AbstractOAuth2Implementation) + implementation_mock.async_refresh_token.return_value = { + "expires_at": 1111111, + "access_token": "mock_access_token", + } + + with requests_mock.mock() as rqmck: + rqmck.get( + re.compile(".*"), + status_code=200, + json={"status": 0, "body": {"message": "success"}}, + ) + + api = ConfigEntryWithingsApi(hass, config_entry, implementation_mock) + response = await hass.async_add_executor_job( + api.request, "test", {"arg1": "val1", "arg2": "val2"} + ) + assert response == {"message": "success"} -@pytest.fixture(name="data_manager") -def data_manager_fixture(hass, withings_api: WithingsApi) -> WithingsDataManager: - """Provide data manager.""" - return WithingsDataManager(hass, "My Profile", withings_api) - - -def test_print_service() -> None: - """Test method.""" - # Go from None to True - WithingsDataManager.service_available = None - assert WithingsDataManager.print_service_available() - assert WithingsDataManager.service_available is True - assert not WithingsDataManager.print_service_available() - assert not WithingsDataManager.print_service_available() - - # Go from True to False - assert WithingsDataManager.print_service_unavailable() - assert WithingsDataManager.service_available is False - assert not WithingsDataManager.print_service_unavailable() - assert not WithingsDataManager.print_service_unavailable() - - # Go from False to True - assert WithingsDataManager.print_service_available() - assert WithingsDataManager.service_available is True - assert not WithingsDataManager.print_service_available() - assert not WithingsDataManager.print_service_available() - - # Go from Non to False - WithingsDataManager.service_available = None - assert WithingsDataManager.print_service_unavailable() - assert WithingsDataManager.service_available is False - assert not WithingsDataManager.print_service_unavailable() - assert not WithingsDataManager.print_service_unavailable() - - -async def test_data_manager_call(data_manager: WithingsDataManager) -> None: - """Test method.""" - # Not authenticated 1. - test_function = MagicMock(side_effect=UnauthorizedException(401)) - with pytest.raises(NotAuthenticatedError): - await data_manager.call(test_function) - - # Not authenticated 2. - test_function = MagicMock(side_effect=TimeoutException(522)) - with pytest.raises(PlatformNotReady): - await data_manager.call(test_function) - - # Service error. - test_function = MagicMock(side_effect=PlatformNotReady()) - with pytest.raises(PlatformNotReady): - await data_manager.call(test_function) - - -async def test_data_manager_call_throttle_enabled( - data_manager: WithingsDataManager, +@pytest.mark.parametrize( + ["user_id", "arg_user_id", "arg_appli", "expected_code"], + [ + [0, 0, NotifyAppli.WEIGHT.value, 0], # Success + [0, None, 1, 0], # Success, we ignore the user_id. + [0, None, None, 12], # No request body. + [0, "GG", None, 20], # appli not provided. + [0, 0, None, 20], # appli not provided. + [0, 0, 99, 21], # Invalid appli. + [0, 11, NotifyAppli.WEIGHT.value, 0], # Success, we ignore the user_id + ], +) +async def test_webhook_post( + hass: HomeAssistant, + component_factory: ComponentFactory, + aiohttp_client, + user_id: int, + arg_user_id: Any, + arg_appli: Any, + expected_code: int, ) -> None: - """Test method.""" - hello_func = MagicMock(return_value="HELLO2") + """Test webhook callback.""" + person0 = new_profile_config("person0", user_id) - result = await data_manager.call(hello_func, throttle_domain="test") - assert result == "HELLO2" + await component_factory.configure_component(profile_configs=(person0,)) + await component_factory.setup_profile(person0.user_id) + data_manager = get_data_manager_by_user_id(hass, user_id) - result = await data_manager.call(hello_func, throttle_domain="test") - assert result == "HELLO2" + client: TestClient = await aiohttp_client(hass.http.app) - assert hello_func.call_count == 1 + post_data = {} + if arg_user_id is not None: + post_data["userid"] = arg_user_id + if arg_appli is not None: + post_data["appli"] = arg_appli - -async def test_data_manager_call_throttle_disabled( - data_manager: WithingsDataManager, -) -> None: - """Test method.""" - hello_func = MagicMock(return_value="HELLO2") - - result = await data_manager.call(hello_func) - assert result == "HELLO2" - - result = await data_manager.call(hello_func) - assert result == "HELLO2" - - assert hello_func.call_count == 2 - - -async def test_data_manager_update_sleep_date_range( - data_manager: WithingsDataManager, -) -> None: - """Test method.""" - patch_time_zone = patch( - "homeassistant.util.dt.DEFAULT_TIME_ZONE", - new=dt.get_time_zone("America/Belize"), + resp = await client.post( + urlparse(data_manager.webhook_config.url).path, data=post_data ) - with patch_time_zone: - update_start_time = dt.now() - await data_manager.update_sleep() + # Wait for remaining tasks to complete. + await hass.async_block_till_done() - call_args = data_manager.api.sleep_get.call_args_list[0][1] - startdate = call_args.get("startdate") - enddate = call_args.get("enddate") + data = await resp.json() + resp.close() - assert startdate.tzname() == "CST" + assert data["code"] == expected_code - assert enddate.tzname() == "CST" - assert startdate.tzname() == "CST" - assert update_start_time < enddate - assert enddate < update_start_time + timedelta(seconds=1) - assert enddate > startdate + +async def test_webhook_head( + hass: HomeAssistant, component_factory: ComponentFactory, aiohttp_client, +) -> None: + """Test head method on webhook view.""" + person0 = new_profile_config("person0", 0) + + await component_factory.configure_component(profile_configs=(person0,)) + await component_factory.setup_profile(person0.user_id) + data_manager = get_data_manager_by_user_id(hass, person0.user_id) + + client: TestClient = await aiohttp_client(hass.http.app) + resp = await client.head(urlparse(data_manager.webhook_config.url).path) + assert resp.status == 200 + + +async def test_webhook_put( + hass: HomeAssistant, component_factory: ComponentFactory, aiohttp_client, +) -> None: + """Test webhook callback.""" + person0 = new_profile_config("person0", 0) + + await component_factory.configure_component(profile_configs=(person0,)) + await component_factory.setup_profile(person0.user_id) + data_manager = get_data_manager_by_user_id(hass, person0.user_id) + + client: TestClient = await aiohttp_client(hass.http.app) + resp = await client.put(urlparse(data_manager.webhook_config.url).path) + + # Wait for remaining tasks to complete. + await hass.async_block_till_done() + + assert resp.status == 200 + data = await resp.json() + assert data + assert data["code"] == 2 + + +async def test_data_manager_webhook_subscription( + hass: HomeAssistant, + component_factory: ComponentFactory, + aioclient_mock: AiohttpClientMocker, +) -> None: + """Test data manager webhook subscriptions.""" + person0 = new_profile_config("person0", 0) + await component_factory.configure_component(profile_configs=(person0,)) + + api: ConfigEntryWithingsApi = MagicMock(spec=ConfigEntryWithingsApi) + data_manager = DataManager( + hass, + "person0", + api, + 0, + WebhookConfig(id="1234", url="http://localhost/api/webhook/1234", enabled=True), + ) + + # pylint: disable=protected-access + data_manager._notify_subscribe_delay = datetime.timedelta(seconds=0) + data_manager._notify_unsubscribe_delay = datetime.timedelta(seconds=0) + + api.notify_list.return_value = NotifyListResponse( + profiles=( + NotifyListProfile( + appli=NotifyAppli.BED_IN, + callbackurl="https://not.my.callback/url", + expires=None, + comment=None, + ), + NotifyListProfile( + appli=NotifyAppli.BED_IN, + callbackurl=data_manager.webhook_config.url, + expires=None, + comment=None, + ), + NotifyListProfile( + appli=NotifyAppli.BED_OUT, + callbackurl=data_manager.webhook_config.url, + expires=None, + comment=None, + ), + ) + ) + + aioclient_mock.clear_requests() + aioclient_mock.request( + "HEAD", data_manager.webhook_config.url, status=200, + ) + + # Test subscribing + await data_manager.async_subscribe_webhook() + api.notify_subscribe.assert_any_call( + data_manager.webhook_config.url, NotifyAppli.WEIGHT + ) + api.notify_subscribe.assert_any_call( + data_manager.webhook_config.url, NotifyAppli.CIRCULATORY + ) + api.notify_subscribe.assert_any_call( + data_manager.webhook_config.url, NotifyAppli.ACTIVITY + ) + api.notify_subscribe.assert_any_call( + data_manager.webhook_config.url, NotifyAppli.SLEEP + ) + try: + api.notify_subscribe.assert_any_call( + data_manager.webhook_config.url, NotifyAppli.USER + ) + assert False + except AssertionError: + pass + try: + api.notify_subscribe.assert_any_call( + data_manager.webhook_config.url, NotifyAppli.BED_IN + ) + assert False + except AssertionError: + pass + try: + api.notify_subscribe.assert_any_call( + data_manager.webhook_config.url, NotifyAppli.BED_OUT + ) + assert False + except AssertionError: + pass + + # Test unsubscribing. + await data_manager.async_unsubscribe_webhook() + api.notify_revoke.assert_any_call( + data_manager.webhook_config.url, NotifyAppli.BED_IN + ) + api.notify_revoke.assert_any_call( + data_manager.webhook_config.url, NotifyAppli.BED_OUT + ) diff --git a/tests/components/withings/test_config_flow.py b/tests/components/withings/test_config_flow.py new file mode 100644 index 00000000000..f47a8f95e53 --- /dev/null +++ b/tests/components/withings/test_config_flow.py @@ -0,0 +1,97 @@ +"""Tests for config flow.""" +from aiohttp.test_utils import TestClient + +from homeassistant.components.withings import const +from homeassistant.config import async_process_ha_core_config +from homeassistant.const import ( + CONF_CLIENT_ID, + CONF_CLIENT_SECRET, + CONF_EXTERNAL_URL, + CONF_UNIT_SYSTEM, + CONF_UNIT_SYSTEM_METRIC, +) +from homeassistant.core import DOMAIN as HA_DOMAIN, HomeAssistant +from homeassistant.helpers import config_entry_oauth2_flow +from homeassistant.helpers.config_entry_oauth2_flow import AUTH_CALLBACK_PATH +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry + + +async def test_config_non_unique_profile(hass: HomeAssistant) -> None: + """Test setup a non-unique profile.""" + config_entry = MockConfigEntry( + domain=const.DOMAIN, data={const.PROFILE: "person0"}, unique_id="0" + ) + config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + const.DOMAIN, context={"source": "profile"}, data={const.PROFILE: "person0"} + ) + + assert result + assert result["errors"]["base"] == "profile_exists" + + +async def test_config_reauth_profile( + hass: HomeAssistant, aiohttp_client, aioclient_mock +) -> None: + """Test reauth an existing profile re-creates the config entry.""" + hass_config = { + HA_DOMAIN: { + CONF_UNIT_SYSTEM: CONF_UNIT_SYSTEM_METRIC, + CONF_EXTERNAL_URL: "http://127.0.0.1:8080/", + }, + const.DOMAIN: { + CONF_CLIENT_ID: "my_client_id", + CONF_CLIENT_SECRET: "my_client_secret", + const.CONF_USE_WEBHOOK: False, + }, + } + await async_process_ha_core_config(hass, hass_config.get(HA_DOMAIN)) + assert await async_setup_component(hass, const.DOMAIN, hass_config) + await hass.async_block_till_done() + + config_entry = MockConfigEntry( + domain=const.DOMAIN, data={const.PROFILE: "person0"}, unique_id="0" + ) + config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + const.DOMAIN, context={"source": "reauth", "profile": "person0"} + ) + assert result + assert result["type"] == "form" + assert result["step_id"] == "reauth" + assert result["description_placeholders"] == {const.PROFILE: "person0"} + + result = await hass.config_entries.flow.async_configure(result["flow_id"], {},) + + # pylint: disable=protected-access + state = config_entry_oauth2_flow._encode_jwt(hass, {"flow_id": result["flow_id"]}) + + client: TestClient = await aiohttp_client(hass.http.app) + resp = await client.get(f"{AUTH_CALLBACK_PATH}?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.clear_requests() + aioclient_mock.post( + "https://account.withings.com/oauth2/token", + json={ + "refresh_token": "mock-refresh-token", + "access_token": "mock-access-token", + "type": "Bearer", + "expires_in": 60, + "userid": "0", + }, + ) + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result + assert result["type"] == "abort" + assert result["reason"] == "already_configured" + + entries = hass.config_entries.async_entries(const.DOMAIN) + assert entries + assert entries[0].data["token"]["refresh_token"] == "mock-refresh-token" diff --git a/tests/components/withings/test_init.py b/tests/components/withings/test_init.py index b65e175913d..4f4a85585bf 100644 --- a/tests/components/withings/test_init.py +++ b/tests/components/withings/test_init.py @@ -1,43 +1,37 @@ """Tests for the Withings component.""" -import re -import time - -import requests_mock +import pytest import voluptuous as vol -from withings_api import AbstractWithingsApi -from withings_api.common import SleepModel, SleepState +from withings_api.common import UnauthorizedException -import homeassistant.components.http as http -from homeassistant.components.withings import ( - CONFIG_SCHEMA, - async_setup, - async_setup_entry, - const, -) +import homeassistant.components.webhook as webhook +from homeassistant.components.withings import CONFIG_SCHEMA, DOMAIN, async_setup, const +from homeassistant.components.withings.common import ConfigEntryWithingsApi, DataManager from homeassistant.config import async_process_ha_core_config -from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET, STATE_UNKNOWN -from homeassistant.core import HomeAssistant +from homeassistant.const import ( + CONF_CLIENT_ID, + CONF_CLIENT_SECRET, + CONF_EXTERNAL_URL, + CONF_UNIT_SYSTEM, + CONF_UNIT_SYSTEM_METRIC, +) +from homeassistant.core import DOMAIN as HA_DOMAIN, HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator +from homeassistant.setup import async_setup_component from .common import ( - WITHINGS_GET_DEVICE_RESPONSE, - WITHINGS_GET_DEVICE_RESPONSE_EMPTY, - WITHINGS_MEASURES_RESPONSE, - WITHINGS_MEASURES_RESPONSE_EMPTY, - WITHINGS_SLEEP_RESPONSE, - WITHINGS_SLEEP_RESPONSE_EMPTY, - WITHINGS_SLEEP_SUMMARY_RESPONSE, - WITHINGS_SLEEP_SUMMARY_RESPONSE_EMPTY, - assert_state_equals, - configure_integration, - setup_hass, + ComponentFactory, + async_get_flow_for_user_id, + get_data_manager_by_user_id, + new_profile_config, ) -from tests.async_mock import MagicMock +from tests.async_mock import MagicMock, patch +from tests.common import MockConfigEntry -def config_schema_validate(withings_config) -> None: +def config_schema_validate(withings_config) -> dict: """Assert a schema config succeeds.""" - hass_config = {http.DOMAIN: {}, const.DOMAIN: withings_config} + hass_config = {const.DOMAIN: withings_config} return CONFIG_SCHEMA(hass_config) @@ -57,94 +51,57 @@ def test_config_schema_basic_config() -> None: { CONF_CLIENT_ID: "my_client_id", CONF_CLIENT_SECRET: "my_client_secret", - const.CONF_PROFILES: ["Person 1", "Person 2"], + const.CONF_USE_WEBHOOK: True, } ) def test_config_schema_client_id() -> None: """Test schema.""" + config_schema_assert_fail({CONF_CLIENT_SECRET: "my_client_secret"}) config_schema_assert_fail( - { - CONF_CLIENT_SECRET: "my_client_secret", - const.CONF_PROFILES: ["Person 1", "Person 2"], - } - ) - config_schema_assert_fail( - { - CONF_CLIENT_SECRET: "my_client_secret", - CONF_CLIENT_ID: "", - const.CONF_PROFILES: ["Person 1"], - } + {CONF_CLIENT_SECRET: "my_client_secret", CONF_CLIENT_ID: ""} ) config_schema_validate( - { - CONF_CLIENT_SECRET: "my_client_secret", - CONF_CLIENT_ID: "my_client_id", - const.CONF_PROFILES: ["Person 1"], - } + {CONF_CLIENT_SECRET: "my_client_secret", CONF_CLIENT_ID: "my_client_id"} ) def test_config_schema_client_secret() -> None: """Test schema.""" - config_schema_assert_fail( - {CONF_CLIENT_ID: "my_client_id", const.CONF_PROFILES: ["Person 1"]} - ) - config_schema_assert_fail( - { - CONF_CLIENT_ID: "my_client_id", - CONF_CLIENT_SECRET: "", - const.CONF_PROFILES: ["Person 1"], - } - ) + config_schema_assert_fail({CONF_CLIENT_ID: "my_client_id"}) + config_schema_assert_fail({CONF_CLIENT_ID: "my_client_id", CONF_CLIENT_SECRET: ""}) config_schema_validate( - { - CONF_CLIENT_ID: "my_client_id", - CONF_CLIENT_SECRET: "my_client_secret", - const.CONF_PROFILES: ["Person 1"], - } - ) - - -def test_config_schema_profiles() -> None: - """Test schema.""" - config_schema_assert_fail( {CONF_CLIENT_ID: "my_client_id", CONF_CLIENT_SECRET: "my_client_secret"} ) - config_schema_assert_fail( - { - CONF_CLIENT_ID: "my_client_id", - CONF_CLIENT_SECRET: "my_client_secret", - const.CONF_PROFILES: "", - } - ) - config_schema_assert_fail( - { - CONF_CLIENT_ID: "my_client_id", - CONF_CLIENT_SECRET: "my_client_secret", - const.CONF_PROFILES: [], - } - ) - config_schema_assert_fail( - { - CONF_CLIENT_ID: "my_client_id", - CONF_CLIENT_SECRET: "my_client_secret", - const.CONF_PROFILES: ["Person 1", "Person 1"], - } - ) + + +def test_config_schema_use_webhook() -> None: + """Test schema.""" config_schema_validate( + {CONF_CLIENT_ID: "my_client_id", CONF_CLIENT_SECRET: "my_client_secret"} + ) + config = config_schema_validate( { CONF_CLIENT_ID: "my_client_id", CONF_CLIENT_SECRET: "my_client_secret", - const.CONF_PROFILES: ["Person 1"], + const.CONF_USE_WEBHOOK: True, } ) - config_schema_validate( + assert config[const.DOMAIN][const.CONF_USE_WEBHOOK] is True + config = config_schema_validate( { CONF_CLIENT_ID: "my_client_id", CONF_CLIENT_SECRET: "my_client_secret", - const.CONF_PROFILES: ["Person 1", "Person 2"], + const.CONF_USE_WEBHOOK: False, + } + ) + assert config[const.DOMAIN][const.CONF_USE_WEBHOOK] is False + config_schema_assert_fail( + { + CONF_CLIENT_ID: "my_client_id", + CONF_CLIENT_SECRET: "my_client_secret", + const.CONF_USE_WEBHOOK: "A", } ) @@ -158,285 +115,111 @@ async def test_async_setup_no_config(hass: HomeAssistant) -> None: hass.async_create_task.assert_not_called() -async def test_upgrade_token( - hass: HomeAssistant, aiohttp_client, aioclient_mock -) -> None: - """Test upgrading from old config data format to new one.""" - config = await setup_hass(hass) - profiles = config[const.DOMAIN][const.CONF_PROFILES] - - await async_process_ha_core_config( - hass, {"internal_url": "http://example.local"}, - ) - - await configure_integration( - hass=hass, - aiohttp_client=aiohttp_client, - aioclient_mock=aioclient_mock, - profiles=profiles, - profile_index=0, - get_device_response=WITHINGS_GET_DEVICE_RESPONSE_EMPTY, - getmeasures_response=WITHINGS_MEASURES_RESPONSE_EMPTY, - get_sleep_response=WITHINGS_SLEEP_RESPONSE_EMPTY, - get_sleep_summary_response=WITHINGS_SLEEP_SUMMARY_RESPONSE_EMPTY, - ) - - entries = hass.config_entries.async_entries(const.DOMAIN) - assert entries - - entry = entries[0] - data = entry.data - token = data.get("token") - hass.config_entries.async_update_entry( - entry, - data={ - const.PROFILE: data.get(const.PROFILE), - const.CREDENTIALS: { - "access_token": token.get("access_token"), - "refresh_token": token.get("refresh_token"), - "token_expiry": token.get("expires_at"), - "token_type": token.get("type"), - "userid": token.get("userid"), - CONF_CLIENT_ID: token.get("my_client_id"), - "consumer_secret": token.get("my_consumer_secret"), - }, - }, - ) - - with requests_mock.mock() as rqmck: - rqmck.get( - re.compile(f"{AbstractWithingsApi.URL}/v2/user?.*action=getdevice(&.*|$)"), - status_code=200, - json=WITHINGS_GET_DEVICE_RESPONSE_EMPTY, - ) - - assert await async_setup_entry(hass, entry) - - entries = hass.config_entries.async_entries(const.DOMAIN) - assert entries - - data = entries[0].data - - assert data.get("auth_implementation") == const.DOMAIN - assert data.get("implementation") == const.DOMAIN - assert data.get(const.PROFILE) == profiles[0] - - token = data.get("token") - assert token - assert token.get("access_token") == "mock-access-token" - assert token.get("refresh_token") == "mock-refresh-token" - assert token.get("expires_at") > time.time() - assert token.get("type") == "Bearer" - assert token.get("userid") == "myuserid" - assert not token.get(CONF_CLIENT_ID) - assert not token.get("consumer_secret") - - +@pytest.mark.parametrize( + ["exception"], + [ + [UnauthorizedException("401")], + [UnauthorizedException("401")], + [Exception("401, this is the message")], + ], +) async def test_auth_failure( - hass: HomeAssistant, aiohttp_client, aioclient_mock + hass: HomeAssistant, component_factory: ComponentFactory, exception: Exception ) -> None: """Test auth failure.""" - config = await setup_hass(hass) - profiles = config[const.DOMAIN][const.CONF_PROFILES] - - await async_process_ha_core_config( - hass, {"internal_url": "http://example.local"}, + person0 = new_profile_config( + "person0", + 0, + api_response_user_get_device=exception, + api_response_measure_get_meas=exception, + api_response_sleep_get_summary=exception, ) - await configure_integration( - hass=hass, - aiohttp_client=aiohttp_client, - aioclient_mock=aioclient_mock, - profiles=profiles, - profile_index=0, - get_device_response=WITHINGS_GET_DEVICE_RESPONSE_EMPTY, - getmeasures_response=WITHINGS_MEASURES_RESPONSE_EMPTY, - get_sleep_response=WITHINGS_SLEEP_RESPONSE_EMPTY, - get_sleep_summary_response=WITHINGS_SLEEP_SUMMARY_RESPONSE_EMPTY, + await component_factory.configure_component(profile_configs=(person0,)) + assert not async_get_flow_for_user_id(hass, person0.user_id) + + await component_factory.setup_profile(person0.user_id) + data_manager = get_data_manager_by_user_id(hass, person0.user_id) + await data_manager.poll_data_update_coordinator.async_refresh() + + flows = async_get_flow_for_user_id(hass, person0.user_id) + assert flows + assert len(flows) == 1 + + flow = flows[0] + assert flow["handler"] == const.DOMAIN + assert flow["context"]["profile"] == person0.profile + assert flow["context"]["userid"] == person0.user_id + + result = await hass.config_entries.flow.async_configure( + flow["flow_id"], user_input={} + ) + assert result + assert result["type"] == "external" + assert result["handler"] == const.DOMAIN + assert result["step_id"] == "auth" + + await component_factory.unload(person0) + + +async def test_set_config_unique_id( + hass: HomeAssistant, component_factory: ComponentFactory +) -> None: + """Test upgrading configs to use a unique id.""" + person0 = new_profile_config("person0", 0) + + await component_factory.configure_component(profile_configs=(person0,)) + + config_entry = MockConfigEntry( + domain=DOMAIN, + data={"token": {"userid": "my_user_id"}, "profile": person0.profile}, ) - entries = hass.config_entries.async_entries(const.DOMAIN) - assert entries - - entry = entries[0] - hass.config_entries.async_update_entry( - entry, data={**entry.data, **{"new_item": 1}} - ) - - with requests_mock.mock() as rqmck: - rqmck.get( - re.compile(f"{AbstractWithingsApi.URL}/v2/user?.*action=getdevice(&.*|$)"), - status_code=200, - json={"status": 401, "body": {}}, + with patch("homeassistant.components.withings.async_get_data_manager") as mock: + data_manager: DataManager = MagicMock(spec=DataManager) + data_manager.poll_data_update_coordinator = MagicMock( + spec=DataUpdateCoordinator ) + data_manager.poll_data_update_coordinator.last_update_success = True + mock.return_value = data_manager + config_entry.add_to_hass(hass) - assert not (await async_setup_entry(hass, entry)) + await hass.config_entries.async_setup(config_entry.entry_id) + assert config_entry.unique_id == "my_user_id" -async def test_full_setup(hass: HomeAssistant, aiohttp_client, aioclient_mock) -> None: - """Test the whole component lifecycle.""" - config = await setup_hass(hass) - profiles = config[const.DOMAIN][const.CONF_PROFILES] - - await async_process_ha_core_config( - hass, {"internal_url": "http://example.local"}, - ) - - await configure_integration( - hass=hass, - aiohttp_client=aiohttp_client, - aioclient_mock=aioclient_mock, - profiles=profiles, - profile_index=0, - get_device_response=WITHINGS_GET_DEVICE_RESPONSE, - getmeasures_response=WITHINGS_MEASURES_RESPONSE, - get_sleep_response=WITHINGS_SLEEP_RESPONSE, - get_sleep_summary_response=WITHINGS_SLEEP_SUMMARY_RESPONSE, - ) - - await configure_integration( - hass=hass, - aiohttp_client=aiohttp_client, - aioclient_mock=aioclient_mock, - profiles=profiles, - profile_index=1, - get_device_response=WITHINGS_GET_DEVICE_RESPONSE_EMPTY, - getmeasures_response=WITHINGS_MEASURES_RESPONSE_EMPTY, - get_sleep_response=WITHINGS_SLEEP_RESPONSE_EMPTY, - get_sleep_summary_response=WITHINGS_SLEEP_SUMMARY_RESPONSE_EMPTY, - ) - - await configure_integration( - hass=hass, - aiohttp_client=aiohttp_client, - aioclient_mock=aioclient_mock, - profiles=profiles, - profile_index=2, - get_device_response=WITHINGS_GET_DEVICE_RESPONSE_EMPTY, - getmeasures_response=WITHINGS_MEASURES_RESPONSE_EMPTY, - get_sleep_response={ - "status": 0, - "body": { - "model": SleepModel.TRACKER.real, - "series": [ - { - "startdate": "2019-02-01 00:00:00", - "enddate": "2019-02-01 01:00:00", - "state": SleepState.REM.real, - }, - { - "startdate": "2019-02-01 01:00:00", - "enddate": "2019-02-01 02:00:00", - "state": SleepState.AWAKE.real, - }, - ], - }, +async def test_set_convert_unique_id_to_string(hass: HomeAssistant) -> None: + """Test upgrading configs to use a unique id.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + data={ + "token": {"userid": 1234}, + "auth_implementation": "withings", + "profile": "person0", }, - get_sleep_summary_response=WITHINGS_SLEEP_SUMMARY_RESPONSE_EMPTY, ) + config_entry.add_to_hass(hass) - await configure_integration( - hass=hass, - aiohttp_client=aiohttp_client, - aioclient_mock=aioclient_mock, - profiles=profiles, - profile_index=3, - get_device_response=WITHINGS_GET_DEVICE_RESPONSE_EMPTY, - getmeasures_response=WITHINGS_MEASURES_RESPONSE_EMPTY, - get_sleep_response={ - "status": 0, - "body": { - "model": SleepModel.TRACKER.real, - "series": [ - { - "startdate": "2019-02-01 01:00:00", - "enddate": "2019-02-01 02:00:00", - "state": SleepState.LIGHT.real, - }, - { - "startdate": "2019-02-01 00:00:00", - "enddate": "2019-02-01 01:00:00", - "state": SleepState.REM.real, - }, - ], - }, + hass_config = { + HA_DOMAIN: { + CONF_UNIT_SYSTEM: CONF_UNIT_SYSTEM_METRIC, + CONF_EXTERNAL_URL: "http://127.0.0.1:8080/", }, - get_sleep_summary_response=WITHINGS_SLEEP_SUMMARY_RESPONSE_EMPTY, - ) - - await configure_integration( - hass=hass, - aiohttp_client=aiohttp_client, - aioclient_mock=aioclient_mock, - profiles=profiles, - profile_index=4, - get_device_response=WITHINGS_GET_DEVICE_RESPONSE_EMPTY, - getmeasures_response=WITHINGS_MEASURES_RESPONSE_EMPTY, - get_sleep_response={ - "status": 0, - "body": { - "model": SleepModel.TRACKER.real, - "series": [ - { - "startdate": "2019-02-01 00:00:00", - "enddate": "2019-02-01 01:00:00", - "state": SleepState.LIGHT.real, - }, - { - "startdate": "2019-02-01 02:00:00", - "enddate": "2019-02-01 03:00:00", - "state": SleepState.REM.real, - }, - { - "startdate": "2019-02-01 01:00:00", - "enddate": "2019-02-01 02:00:00", - "state": SleepState.AWAKE.real, - }, - ], - }, + const.DOMAIN: { + CONF_CLIENT_ID: "my_client_id", + CONF_CLIENT_SECRET: "my_client_secret", + const.CONF_USE_WEBHOOK: False, }, - get_sleep_summary_response=WITHINGS_SLEEP_SUMMARY_RESPONSE_EMPTY, - ) + } - # Test the states of the entities. - expected_states = ( - (profiles[0], const.MEAS_WEIGHT_KG, 70.0), - (profiles[0], const.MEAS_FAT_MASS_KG, 5.0), - (profiles[0], const.MEAS_FAT_FREE_MASS_KG, 60.0), - (profiles[0], const.MEAS_MUSCLE_MASS_KG, 50.0), - (profiles[0], const.MEAS_BONE_MASS_KG, 10.0), - (profiles[0], const.MEAS_HEIGHT_M, 2.0), - (profiles[0], const.MEAS_FAT_RATIO_PCT, 0.07), - (profiles[0], const.MEAS_DIASTOLIC_MMHG, 70.0), - (profiles[0], const.MEAS_SYSTOLIC_MMGH, 100.0), - (profiles[0], const.MEAS_HEART_PULSE_BPM, 60.0), - (profiles[0], const.MEAS_SPO2_PCT, 0.95), - (profiles[0], const.MEAS_HYDRATION, 0.95), - (profiles[0], const.MEAS_PWV, 100.0), - (profiles[0], const.MEAS_SLEEP_WAKEUP_DURATION_SECONDS, 320), - (profiles[0], const.MEAS_SLEEP_LIGHT_DURATION_SECONDS, 520), - (profiles[0], const.MEAS_SLEEP_DEEP_DURATION_SECONDS, 720), - (profiles[0], const.MEAS_SLEEP_REM_DURATION_SECONDS, 920), - (profiles[0], const.MEAS_SLEEP_WAKEUP_COUNT, 1120), - (profiles[0], const.MEAS_SLEEP_TOSLEEP_DURATION_SECONDS, 1320), - (profiles[0], const.MEAS_SLEEP_TOWAKEUP_DURATION_SECONDS, 1520), - (profiles[0], const.MEAS_SLEEP_HEART_RATE_AVERAGE, 1720), - (profiles[0], const.MEAS_SLEEP_HEART_RATE_MIN, 1920), - (profiles[0], const.MEAS_SLEEP_HEART_RATE_MAX, 2120), - (profiles[0], const.MEAS_SLEEP_RESPIRATORY_RATE_AVERAGE, 2320), - (profiles[0], const.MEAS_SLEEP_RESPIRATORY_RATE_MIN, 2520), - (profiles[0], const.MEAS_SLEEP_RESPIRATORY_RATE_MAX, 2720), - (profiles[1], const.MEAS_HYDRATION, STATE_UNKNOWN), - (profiles[3], const.MEAS_FAT_FREE_MASS_KG, STATE_UNKNOWN), - ) - for (profile, meas, value) in expected_states: - assert_state_equals(hass, profile, meas, value) + with patch( + "homeassistant.components.withings.common.ConfigEntryWithingsApi", + spec=ConfigEntryWithingsApi, + ): + await async_process_ha_core_config(hass, hass_config.get(HA_DOMAIN)) + assert await async_setup_component(hass, HA_DOMAIN, {}) + assert await async_setup_component(hass, webhook.DOMAIN, hass_config) + assert await async_setup_component(hass, const.DOMAIN, hass_config) + await hass.async_block_till_done() - # Tear down setup entries. - entries = hass.config_entries.async_entries(const.DOMAIN) - assert entries - - for entry in entries: - await hass.config_entries.async_unload(entry.entry_id) - - await hass.async_block_till_done() + assert config_entry.unique_id == "1234" diff --git a/tests/components/withings/test_sensor.py b/tests/components/withings/test_sensor.py new file mode 100644 index 00000000000..abfc4758251 --- /dev/null +++ b/tests/components/withings/test_sensor.py @@ -0,0 +1,335 @@ +"""Tests for the Withings component.""" +import time +from typing import Any +from unittest.mock import patch + +import arrow +import pytz +from withings_api.common import ( + GetSleepSummaryData, + GetSleepSummarySerie, + MeasureGetMeasGroup, + MeasureGetMeasGroupAttrib, + MeasureGetMeasGroupCategory, + MeasureGetMeasMeasure, + MeasureGetMeasResponse, + MeasureType, + NotifyAppli, + SleepGetSummaryResponse, + SleepModel, +) + +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.components.withings.common import ( + WITHINGS_MEASUREMENTS_MAP, + WithingsAttribute, + async_get_entity_id, + get_platform_attributes, +) +from homeassistant.components.withings.const import Measurement +from homeassistant.core import HomeAssistant, State +from homeassistant.helpers.entity_registry import EntityRegistry + +from .common import ComponentFactory, new_profile_config + +PERSON0 = new_profile_config( + "person0", + 0, + api_response_measure_get_meas=MeasureGetMeasResponse( + measuregrps=( + MeasureGetMeasGroup( + attrib=MeasureGetMeasGroupAttrib.DEVICE_ENTRY_FOR_USER, + category=MeasureGetMeasGroupCategory.REAL, + created=time.time(), + date=time.time(), + deviceid="DEV_ID", + grpid=1, + measures=( + MeasureGetMeasMeasure(type=MeasureType.WEIGHT, unit=0, value=70), + MeasureGetMeasMeasure( + type=MeasureType.FAT_MASS_WEIGHT, unit=0, value=5 + ), + MeasureGetMeasMeasure( + type=MeasureType.FAT_FREE_MASS, unit=0, value=60 + ), + MeasureGetMeasMeasure( + type=MeasureType.MUSCLE_MASS, unit=0, value=50 + ), + MeasureGetMeasMeasure(type=MeasureType.BONE_MASS, unit=0, value=10), + MeasureGetMeasMeasure(type=MeasureType.HEIGHT, unit=0, value=2), + MeasureGetMeasMeasure( + type=MeasureType.TEMPERATURE, unit=0, value=40 + ), + MeasureGetMeasMeasure( + type=MeasureType.BODY_TEMPERATURE, unit=0, value=40 + ), + MeasureGetMeasMeasure( + type=MeasureType.SKIN_TEMPERATURE, unit=0, value=20 + ), + MeasureGetMeasMeasure( + type=MeasureType.FAT_RATIO, unit=-3, value=70 + ), + MeasureGetMeasMeasure( + type=MeasureType.DIASTOLIC_BLOOD_PRESSURE, unit=0, value=70 + ), + MeasureGetMeasMeasure( + type=MeasureType.SYSTOLIC_BLOOD_PRESSURE, unit=0, value=100 + ), + MeasureGetMeasMeasure( + type=MeasureType.HEART_RATE, unit=0, value=60 + ), + MeasureGetMeasMeasure(type=MeasureType.SP02, unit=-2, value=95), + MeasureGetMeasMeasure( + type=MeasureType.HYDRATION, unit=-2, value=95 + ), + MeasureGetMeasMeasure( + type=MeasureType.PULSE_WAVE_VELOCITY, unit=0, value=100 + ), + ), + ), + MeasureGetMeasGroup( + attrib=MeasureGetMeasGroupAttrib.DEVICE_ENTRY_FOR_USER_AMBIGUOUS, + category=MeasureGetMeasGroupCategory.REAL, + created=time.time(), + date=time.time(), + deviceid="DEV_ID", + grpid=1, + measures=( + MeasureGetMeasMeasure(type=MeasureType.WEIGHT, unit=0, value=71), + MeasureGetMeasMeasure( + type=MeasureType.FAT_MASS_WEIGHT, unit=0, value=4 + ), + MeasureGetMeasMeasure( + type=MeasureType.FAT_FREE_MASS, unit=0, value=40 + ), + MeasureGetMeasMeasure( + type=MeasureType.MUSCLE_MASS, unit=0, value=51 + ), + MeasureGetMeasMeasure(type=MeasureType.BONE_MASS, unit=0, value=11), + MeasureGetMeasMeasure(type=MeasureType.HEIGHT, unit=0, value=201), + MeasureGetMeasMeasure( + type=MeasureType.TEMPERATURE, unit=0, value=41 + ), + MeasureGetMeasMeasure( + type=MeasureType.BODY_TEMPERATURE, unit=0, value=34 + ), + MeasureGetMeasMeasure( + type=MeasureType.SKIN_TEMPERATURE, unit=0, value=21 + ), + MeasureGetMeasMeasure( + type=MeasureType.FAT_RATIO, unit=-3, value=71 + ), + MeasureGetMeasMeasure( + type=MeasureType.DIASTOLIC_BLOOD_PRESSURE, unit=0, value=71 + ), + MeasureGetMeasMeasure( + type=MeasureType.SYSTOLIC_BLOOD_PRESSURE, unit=0, value=101 + ), + MeasureGetMeasMeasure( + type=MeasureType.HEART_RATE, unit=0, value=61 + ), + MeasureGetMeasMeasure(type=MeasureType.SP02, unit=-2, value=98), + MeasureGetMeasMeasure( + type=MeasureType.HYDRATION, unit=-2, value=96 + ), + MeasureGetMeasMeasure( + type=MeasureType.PULSE_WAVE_VELOCITY, unit=0, value=102 + ), + ), + ), + ), + more=False, + timezone=pytz.UTC, + updatetime=arrow.get("2019-08-01"), + offset=0, + ), + api_response_sleep_get_summary=SleepGetSummaryResponse( + more=False, + offset=0, + series=( + GetSleepSummarySerie( + timezone=pytz.UTC, + model=SleepModel.SLEEP_MONITOR, + startdate=arrow.get("2019-02-01"), + enddate=arrow.get("2019-02-01"), + date=arrow.get("2019-02-01"), + modified=arrow.get(12345), + data=GetSleepSummaryData( + breathing_disturbances_intensity=110, + deepsleepduration=111, + durationtosleep=112, + durationtowakeup=113, + hr_average=114, + hr_max=115, + hr_min=116, + lightsleepduration=117, + remsleepduration=118, + rr_average=119, + rr_max=120, + rr_min=121, + sleep_score=122, + snoring=123, + snoringepisodecount=124, + wakeupcount=125, + wakeupduration=126, + ), + ), + GetSleepSummarySerie( + timezone=pytz.UTC, + model=SleepModel.SLEEP_MONITOR, + startdate=arrow.get("2019-02-01"), + enddate=arrow.get("2019-02-01"), + date=arrow.get("2019-02-01"), + modified=arrow.get(12345), + data=GetSleepSummaryData( + breathing_disturbances_intensity=210, + deepsleepduration=211, + durationtosleep=212, + durationtowakeup=213, + hr_average=214, + hr_max=215, + hr_min=216, + lightsleepduration=217, + remsleepduration=218, + rr_average=219, + rr_max=220, + rr_min=221, + sleep_score=222, + snoring=223, + snoringepisodecount=224, + wakeupcount=225, + wakeupduration=226, + ), + ), + ), + ), +) + +EXPECTED_DATA = ( + (PERSON0, Measurement.WEIGHT_KG, 70.0), + (PERSON0, Measurement.FAT_MASS_KG, 5.0), + (PERSON0, Measurement.FAT_FREE_MASS_KG, 60.0), + (PERSON0, Measurement.MUSCLE_MASS_KG, 50.0), + (PERSON0, Measurement.BONE_MASS_KG, 10.0), + (PERSON0, Measurement.HEIGHT_M, 2.0), + (PERSON0, Measurement.FAT_RATIO_PCT, 0.07), + (PERSON0, Measurement.DIASTOLIC_MMHG, 70.0), + (PERSON0, Measurement.SYSTOLIC_MMGH, 100.0), + (PERSON0, Measurement.HEART_PULSE_BPM, 60.0), + (PERSON0, Measurement.SPO2_PCT, 0.95), + (PERSON0, Measurement.HYDRATION, 0.95), + (PERSON0, Measurement.PWV, 100.0), + (PERSON0, Measurement.SLEEP_BREATHING_DISTURBANCES_INTENSITY, 160.0), + (PERSON0, Measurement.SLEEP_DEEP_DURATION_SECONDS, 322), + (PERSON0, Measurement.SLEEP_HEART_RATE_AVERAGE, 164.0), + (PERSON0, Measurement.SLEEP_HEART_RATE_MAX, 165.0), + (PERSON0, Measurement.SLEEP_HEART_RATE_MIN, 166.0), + (PERSON0, Measurement.SLEEP_LIGHT_DURATION_SECONDS, 334), + (PERSON0, Measurement.SLEEP_REM_DURATION_SECONDS, 336), + (PERSON0, Measurement.SLEEP_RESPIRATORY_RATE_AVERAGE, 169.0), + (PERSON0, Measurement.SLEEP_RESPIRATORY_RATE_MAX, 170.0), + (PERSON0, Measurement.SLEEP_RESPIRATORY_RATE_MIN, 171.0), + (PERSON0, Measurement.SLEEP_SCORE, 222), + (PERSON0, Measurement.SLEEP_SNORING, 173.0), + (PERSON0, Measurement.SLEEP_SNORING_EPISODE_COUNT, 348), + (PERSON0, Measurement.SLEEP_TOSLEEP_DURATION_SECONDS, 162.0), + (PERSON0, Measurement.SLEEP_TOWAKEUP_DURATION_SECONDS, 163.0), + (PERSON0, Measurement.SLEEP_WAKEUP_COUNT, 350), + (PERSON0, Measurement.SLEEP_WAKEUP_DURATION_SECONDS, 176.0), +) + + +def async_assert_state_equals( + entity_id: str, state_obj: State, expected: Any, attribute: WithingsAttribute +) -> None: + """Assert at given state matches what is expected.""" + assert state_obj, f"Expected entity {entity_id} to exist but it did not" + + assert state_obj.state == str(expected), ( + f"Expected {expected} but was {state_obj.state} " + f"for measure {attribute.measurement}, {entity_id}" + ) + + +async def test_sensor_default_enabled_entities( + hass: HomeAssistant, component_factory: ComponentFactory +) -> None: + """Test entities enabled by default.""" + entity_registry: EntityRegistry = await hass.helpers.entity_registry.async_get_registry() + + await component_factory.configure_component(profile_configs=(PERSON0,)) + + # Assert entities should not exist yet. + for attribute in get_platform_attributes(SENSOR_DOMAIN): + assert not await async_get_entity_id(hass, attribute, PERSON0.user_id) + + # person 0 + await component_factory.setup_profile(PERSON0.user_id) + + # Assert entities should exist. + for attribute in get_platform_attributes(SENSOR_DOMAIN): + entity_id = await async_get_entity_id(hass, attribute, PERSON0.user_id) + assert entity_id + assert entity_registry.async_is_registered(entity_id) + + resp = await component_factory.call_webhook(PERSON0.user_id, NotifyAppli.SLEEP) + assert resp.message_code == 0 + + resp = await component_factory.call_webhook(PERSON0.user_id, NotifyAppli.WEIGHT) + assert resp.message_code == 0 + + for person, measurement, expected in EXPECTED_DATA: + attribute = WITHINGS_MEASUREMENTS_MAP[measurement] + entity_id = await async_get_entity_id(hass, attribute, person.user_id) + state_obj = hass.states.get(entity_id) + + if attribute.enabled_by_default: + async_assert_state_equals(entity_id, state_obj, expected, attribute) + else: + assert state_obj is None + + # Unload + await component_factory.unload(PERSON0) + + +async def test_all_entities( + hass: HomeAssistant, component_factory: ComponentFactory +) -> None: + """Test all entities.""" + entity_registry: EntityRegistry = await hass.helpers.entity_registry.async_get_registry() + + with patch( + "homeassistant.components.withings.sensor.BaseWithingsSensor.entity_registry_enabled_default" + ) as enabled_by_default_mock: + enabled_by_default_mock.return_value = True + + await component_factory.configure_component(profile_configs=(PERSON0,)) + + # Assert entities should not exist yet. + for attribute in get_platform_attributes(SENSOR_DOMAIN): + assert not await async_get_entity_id(hass, attribute, PERSON0.user_id) + + # person 0 + await component_factory.setup_profile(PERSON0.user_id) + + # Assert entities should exist. + for attribute in get_platform_attributes(SENSOR_DOMAIN): + entity_id = await async_get_entity_id(hass, attribute, PERSON0.user_id) + assert entity_id + assert entity_registry.async_is_registered(entity_id) + + resp = await component_factory.call_webhook(PERSON0.user_id, NotifyAppli.SLEEP) + assert resp.message_code == 0 + + resp = await component_factory.call_webhook(PERSON0.user_id, NotifyAppli.WEIGHT) + assert resp.message_code == 0 + + for person, measurement, expected in EXPECTED_DATA: + attribute = WITHINGS_MEASUREMENTS_MAP[measurement] + entity_id = await async_get_entity_id(hass, attribute, person.user_id) + state_obj = hass.states.get(entity_id) + + async_assert_state_equals(entity_id, state_obj, expected, attribute) + + # Unload + await component_factory.unload(PERSON0) diff --git a/tests/components/worldclock/test_sensor.py b/tests/components/worldclock/test_sensor.py index 3d5fc7ab5a7..783ca41afff 100644 --- a/tests/components/worldclock/test_sensor.py +++ b/tests/components/worldclock/test_sensor.py @@ -19,10 +19,7 @@ class TestWorldClockSensor(unittest.TestCase): assert setup_component(self.hass, "sensor", config) self.hass.block_till_done() - - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + self.addCleanup(self.hass.stop) def test_time(self): """Test the time at a different location.""" diff --git a/tests/components/wsdot/test_sensor.py b/tests/components/wsdot/test_sensor.py index b548c099f40..73190b6897a 100644 --- a/tests/components/wsdot/test_sensor.py +++ b/tests/components/wsdot/test_sensor.py @@ -42,8 +42,9 @@ class TestWSDOT(unittest.TestCase): CONF_TRAVEL_TIMES: [{CONF_ID: 96, CONF_NAME: "I90 EB"}], } self.entities = [] + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() diff --git a/tests/components/xiaomi_aqara/__init__.py b/tests/components/xiaomi_aqara/__init__.py new file mode 100644 index 00000000000..c8f1dbe6a13 --- /dev/null +++ b/tests/components/xiaomi_aqara/__init__.py @@ -0,0 +1 @@ +"""Tests for the Xiaomi Aqara integration.""" diff --git a/tests/components/xiaomi_aqara/test_config_flow.py b/tests/components/xiaomi_aqara/test_config_flow.py new file mode 100644 index 00000000000..b7762317fdf --- /dev/null +++ b/tests/components/xiaomi_aqara/test_config_flow.py @@ -0,0 +1,368 @@ +"""Test the Xiaomi Aqara config flow.""" +from socket import gaierror + +import pytest + +from homeassistant import config_entries +from homeassistant.components import zeroconf +from homeassistant.components.xiaomi_aqara import config_flow, const +from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME, CONF_PORT + +from tests.async_mock import Mock, patch + +ZEROCONF_NAME = "name" +ZEROCONF_PROP = "properties" +ZEROCONF_MAC = "mac" + +TEST_HOST = "1.2.3.4" +TEST_HOST_2 = "5.6.7.8" +TEST_KEY = "1234567890123456" +TEST_PORT = 1234 +TEST_NAME = "Test_Aqara_Gateway" +TEST_SID = "abcdefghijkl" +TEST_PROTOCOL = "1.1.1" +TEST_MAC = "ab:cd:ef:gh:ij:kl" +TEST_GATEWAY_ID = TEST_MAC +TEST_ZEROCONF_NAME = "lumi-gateway-v3_miio12345678._miio._udp.local." + + +@pytest.fixture(name="xiaomi_aqara", autouse=True) +def xiaomi_aqara_fixture(): + """Mock xiaomi_aqara discovery and entry setup.""" + mock_gateway_discovery = get_mock_discovery([TEST_HOST]) + + with patch( + "homeassistant.components.xiaomi_aqara.config_flow.XiaomiGatewayDiscovery", + return_value=mock_gateway_discovery, + ), patch( + "homeassistant.components.xiaomi_aqara.async_setup_entry", return_value=True + ): + yield + + +def get_mock_discovery(host_list, invalid_interface=False, invalid_key=False): + """Return a mock gateway info instance.""" + gateway_discovery = Mock() + + gateway_dict = {} + for host in host_list: + gateway = Mock() + + gateway.ip_adress = host + gateway.port = TEST_PORT + gateway.sid = TEST_SID + gateway.proto = TEST_PROTOCOL + + if invalid_key: + gateway.write_to_hub = Mock(return_value=False) + + gateway_dict[host] = gateway + + gateway_discovery.gateways = gateway_dict + + if invalid_interface: + gateway_discovery.discover_gateways = Mock(side_effect=gaierror) + + return gateway_discovery + + +async def test_config_flow_user_success(hass): + """Test a successful config flow initialized by the user.""" + result = await hass.config_entries.flow.async_init( + const.DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] == "form" + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {const.CONF_INTERFACE: config_flow.DEFAULT_INTERFACE}, + ) + + assert result["type"] == "form" + assert result["step_id"] == "settings" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {const.CONF_KEY: TEST_KEY, CONF_NAME: TEST_NAME}, + ) + + assert result["type"] == "create_entry" + assert result["title"] == TEST_NAME + assert result["data"] == { + CONF_HOST: TEST_HOST, + CONF_PORT: TEST_PORT, + CONF_MAC: TEST_MAC, + const.CONF_INTERFACE: config_flow.DEFAULT_INTERFACE, + const.CONF_PROTOCOL: TEST_PROTOCOL, + const.CONF_KEY: TEST_KEY, + const.CONF_SID: TEST_SID, + } + + +async def test_config_flow_user_multiple_success(hass): + """Test a successful config flow initialized by the user with multiple gateways discoverd.""" + result = await hass.config_entries.flow.async_init( + const.DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] == "form" + assert result["step_id"] == "user" + assert result["errors"] == {} + + mock_gateway_discovery = get_mock_discovery([TEST_HOST, TEST_HOST_2]) + + with patch( + "homeassistant.components.xiaomi_aqara.config_flow.XiaomiGatewayDiscovery", + return_value=mock_gateway_discovery, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {const.CONF_INTERFACE: config_flow.DEFAULT_INTERFACE}, + ) + + assert result["type"] == "form" + assert result["step_id"] == "select" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {"select_ip": TEST_HOST_2}, + ) + + assert result["type"] == "form" + assert result["step_id"] == "settings" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {const.CONF_KEY: TEST_KEY, CONF_NAME: TEST_NAME}, + ) + + assert result["type"] == "create_entry" + assert result["title"] == TEST_NAME + assert result["data"] == { + CONF_HOST: TEST_HOST_2, + CONF_PORT: TEST_PORT, + CONF_MAC: TEST_MAC, + const.CONF_INTERFACE: config_flow.DEFAULT_INTERFACE, + const.CONF_PROTOCOL: TEST_PROTOCOL, + const.CONF_KEY: TEST_KEY, + const.CONF_SID: TEST_SID, + } + + +async def test_config_flow_user_no_key_success(hass): + """Test a successful config flow initialized by the user without a key.""" + result = await hass.config_entries.flow.async_init( + const.DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] == "form" + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {const.CONF_INTERFACE: config_flow.DEFAULT_INTERFACE}, + ) + + assert result["type"] == "form" + assert result["step_id"] == "settings" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_NAME: TEST_NAME}, + ) + + assert result["type"] == "create_entry" + assert result["title"] == TEST_NAME + assert result["data"] == { + CONF_HOST: TEST_HOST, + CONF_PORT: TEST_PORT, + CONF_MAC: TEST_MAC, + const.CONF_INTERFACE: config_flow.DEFAULT_INTERFACE, + const.CONF_PROTOCOL: TEST_PROTOCOL, + const.CONF_KEY: None, + const.CONF_SID: TEST_SID, + } + + +async def test_config_flow_user_discovery_error(hass): + """Test a failed config flow initialized by the user with no gateways discoverd.""" + result = await hass.config_entries.flow.async_init( + const.DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] == "form" + assert result["step_id"] == "user" + assert result["errors"] == {} + + mock_gateway_discovery = get_mock_discovery([]) + + with patch( + "homeassistant.components.xiaomi_aqara.config_flow.XiaomiGatewayDiscovery", + return_value=mock_gateway_discovery, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {const.CONF_INTERFACE: config_flow.DEFAULT_INTERFACE}, + ) + + assert result["type"] == "form" + assert result["step_id"] == "user" + assert result["errors"] == {"base": "discovery_error"} + + +async def test_config_flow_user_invalid_interface(hass): + """Test a failed config flow initialized by the user with an invalid interface.""" + result = await hass.config_entries.flow.async_init( + const.DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] == "form" + assert result["step_id"] == "user" + assert result["errors"] == {} + + mock_gateway_discovery = get_mock_discovery([], invalid_interface=True) + + with patch( + "homeassistant.components.xiaomi_aqara.config_flow.XiaomiGatewayDiscovery", + return_value=mock_gateway_discovery, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {const.CONF_INTERFACE: config_flow.DEFAULT_INTERFACE}, + ) + + assert result["type"] == "form" + assert result["step_id"] == "user" + assert result["errors"] == {const.CONF_INTERFACE: "invalid_interface"} + + +async def test_config_flow_user_invalid_key(hass): + """Test a failed config flow initialized by the user with an invalid key.""" + result = await hass.config_entries.flow.async_init( + const.DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] == "form" + assert result["step_id"] == "user" + assert result["errors"] == {} + + mock_gateway_discovery = get_mock_discovery([TEST_HOST], invalid_key=True) + + with patch( + "homeassistant.components.xiaomi_aqara.config_flow.XiaomiGatewayDiscovery", + return_value=mock_gateway_discovery, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {const.CONF_INTERFACE: config_flow.DEFAULT_INTERFACE}, + ) + + assert result["type"] == "form" + assert result["step_id"] == "settings" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {const.CONF_KEY: TEST_KEY, CONF_NAME: TEST_NAME}, + ) + + assert result["type"] == "form" + assert result["step_id"] == "settings" + assert result["errors"] == {const.CONF_KEY: "invalid_key"} + + +async def test_zeroconf_success(hass): + """Test a successful zeroconf discovery of a xiaomi aqara gateway.""" + result = await hass.config_entries.flow.async_init( + const.DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data={ + zeroconf.ATTR_HOST: TEST_HOST, + ZEROCONF_NAME: TEST_ZEROCONF_NAME, + ZEROCONF_PROP: {ZEROCONF_MAC: TEST_MAC}, + }, + ) + + assert result["type"] == "form" + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {const.CONF_INTERFACE: config_flow.DEFAULT_INTERFACE}, + ) + + assert result["type"] == "form" + assert result["step_id"] == "settings" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {const.CONF_KEY: TEST_KEY, CONF_NAME: TEST_NAME}, + ) + + assert result["type"] == "create_entry" + assert result["title"] == TEST_NAME + assert result["data"] == { + CONF_HOST: TEST_HOST, + CONF_PORT: TEST_PORT, + CONF_MAC: TEST_MAC, + const.CONF_INTERFACE: config_flow.DEFAULT_INTERFACE, + const.CONF_PROTOCOL: TEST_PROTOCOL, + const.CONF_KEY: TEST_KEY, + const.CONF_SID: TEST_SID, + } + + +async def test_zeroconf_missing_data(hass): + """Test a failed zeroconf discovery because of missing data.""" + result = await hass.config_entries.flow.async_init( + const.DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data={zeroconf.ATTR_HOST: TEST_HOST, ZEROCONF_NAME: TEST_ZEROCONF_NAME}, + ) + + assert result["type"] == "abort" + assert result["reason"] == "not_xiaomi_aqara" + + +async def test_zeroconf_unknown_device(hass): + """Test a failed zeroconf discovery because of a unknown device.""" + result = await hass.config_entries.flow.async_init( + const.DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data={ + zeroconf.ATTR_HOST: TEST_HOST, + ZEROCONF_NAME: "not-a-xiaomi-aqara-gateway", + ZEROCONF_PROP: {ZEROCONF_MAC: TEST_MAC}, + }, + ) + + assert result["type"] == "abort" + assert result["reason"] == "not_xiaomi_aqara" + + +async def test_zeroconf_not_found_error(hass): + """Test a failed zeroconf discovery because the correct gateway could not be found.""" + result = await hass.config_entries.flow.async_init( + const.DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data={ + zeroconf.ATTR_HOST: TEST_HOST, + ZEROCONF_NAME: TEST_ZEROCONF_NAME, + ZEROCONF_PROP: {ZEROCONF_MAC: TEST_MAC}, + }, + ) + + assert result["type"] == "form" + assert result["step_id"] == "user" + assert result["errors"] == {} + + mock_gateway_discovery = get_mock_discovery([TEST_HOST_2]) + + with patch( + "homeassistant.components.xiaomi_aqara.config_flow.XiaomiGatewayDiscovery", + return_value=mock_gateway_discovery, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {const.CONF_INTERFACE: config_flow.DEFAULT_INTERFACE}, + ) + + assert result["type"] == "form" + assert result["step_id"] == "user" + assert result["errors"] == {"base": "not_found_error"} diff --git a/tests/components/xiaomi_miio/test_vacuum.py b/tests/components/xiaomi_miio/test_vacuum.py index d497aec0dca..3b1234622ad 100644 --- a/tests/components/xiaomi_miio/test_vacuum.py +++ b/tests/components/xiaomi_miio/test_vacuum.py @@ -1,8 +1,9 @@ """The tests for the Xiaomi vacuum platform.""" -from datetime import time, timedelta +from datetime import datetime, time, timedelta from unittest import mock import pytest +from pytz import utc from homeassistant.components.vacuum import ( ATTR_BATTERY_ICON, @@ -19,6 +20,7 @@ from homeassistant.components.vacuum import ( STATE_CLEANING, STATE_ERROR, ) +from homeassistant.components.xiaomi_miio.const import DOMAIN as XIAOMI_DOMAIN from homeassistant.components.xiaomi_miio.vacuum import ( ATTR_CLEANED_AREA, ATTR_CLEANED_TOTAL_AREA, @@ -32,11 +34,13 @@ from homeassistant.components.xiaomi_miio.vacuum import ( ATTR_FILTER_LEFT, ATTR_MAIN_BRUSH_LEFT, ATTR_SIDE_BRUSH_LEFT, + ATTR_TIMERS, CONF_HOST, CONF_NAME, CONF_TOKEN, - DOMAIN as XIAOMI_DOMAIN, + SERVICE_CLEAN_SEGMENT, SERVICE_CLEAN_ZONE, + SERVICE_GOTO, SERVICE_MOVE_REMOTE_CONTROL, SERVICE_MOVE_REMOTE_CONTROL_STEP, SERVICE_START_REMOTE_CONTROL, @@ -59,6 +63,7 @@ STATUS_CALLS = [ mock.call.consumable_status(), mock.call.clean_history(), mock.call.dnd_status(), + mock.call.timer(), ] @@ -93,6 +98,18 @@ def mirobo_is_got_error_fixture(): mock_vacuum.dnd_status().start = time(hour=22, minute=0) mock_vacuum.dnd_status().end = time(hour=6, minute=0) + mock_timer_1 = mock.MagicMock() + mock_timer_1.enabled = True + mock_timer_1.cron = "5 5 1 8 1" + mock_timer_1.next_schedule = datetime(2020, 5, 23, 13, 21, 10, tzinfo=utc) + + mock_timer_2 = mock.MagicMock() + mock_timer_2.enabled = False + mock_timer_2.cron = "5 5 1 8 2" + mock_timer_2.next_schedule = datetime(2020, 5, 23, 13, 21, 10, tzinfo=utc) + + mock_vacuum.timer.return_value = [mock_timer_1, mock_timer_2] + with mock.patch( "homeassistant.components.xiaomi_miio.vacuum.Vacuum" ) as mock_vaccum_cls: @@ -159,6 +176,18 @@ def mirobo_is_on_fixture(): mock_vacuum.status().state_code = 5 mock_vacuum.dnd_status().enabled = False + mock_timer_1 = mock.MagicMock() + mock_timer_1.enabled = True + mock_timer_1.cron = "5 5 1 8 1" + mock_timer_1.next_schedule = datetime(2020, 5, 23, 13, 21, 10, tzinfo=utc) + + mock_timer_2 = mock.MagicMock() + mock_timer_2.enabled = False + mock_timer_2.cron = "5 5 1 8 2" + mock_timer_2.next_schedule = datetime(2020, 5, 23, 13, 21, 10, tzinfo=utc) + + mock_vacuum.timer.return_value = [mock_timer_1, mock_timer_2] + with mock.patch( "homeassistant.components.xiaomi_miio.vacuum.Vacuum" ) as mock_vaccum_cls: @@ -181,21 +210,9 @@ def mirobo_errors_fixture(): async def test_xiaomi_exceptions(hass, caplog, mock_mirobo_errors): """Test vacuum supported features.""" entity_name = "test_vacuum_cleaner_error" - await async_setup_component( - hass, - DOMAIN, - { - DOMAIN: { - CONF_PLATFORM: PLATFORM, - CONF_HOST: "127.0.0.1", - CONF_NAME: entity_name, - CONF_TOKEN: "12345678901234567890123456789012", - } - }, - ) - await hass.async_block_till_done() + await setup_component(hass, entity_name) - assert "Initializing with host 127.0.0.1 (token 12345...)" in caplog.text + assert "Initializing with host 192.168.1.100 (token 12345...)" in caplog.text assert mock_mirobo_errors.status.call_count == 1 assert "ERROR" in caplog.text assert "Got OSError while fetching the state" in caplog.text @@ -204,23 +221,9 @@ async def test_xiaomi_exceptions(hass, caplog, mock_mirobo_errors): async def test_xiaomi_vacuum_services(hass, caplog, mock_mirobo_is_got_error): """Test vacuum supported features.""" entity_name = "test_vacuum_cleaner_1" - entity_id = f"{DOMAIN}.{entity_name}" + entity_id = await setup_component(hass, entity_name) - await async_setup_component( - hass, - DOMAIN, - { - DOMAIN: { - CONF_PLATFORM: PLATFORM, - CONF_HOST: "127.0.0.1", - CONF_NAME: entity_name, - CONF_TOKEN: "12345678901234567890123456789012", - } - }, - ) - await hass.async_block_till_done() - - assert "Initializing with host 127.0.0.1 (token 12345...)" in caplog.text + assert "Initializing with host 192.168.1.100 (token 12345...)" in caplog.text # Check state attributes state = hass.states.get(entity_id) @@ -240,6 +243,18 @@ async def test_xiaomi_vacuum_services(hass, caplog, mock_mirobo_is_got_error): assert state.attributes.get(ATTR_CLEANING_COUNT) == 35 assert state.attributes.get(ATTR_CLEANED_TOTAL_AREA) == 123 assert state.attributes.get(ATTR_CLEANING_TOTAL_TIME) == 695 + assert state.attributes.get(ATTR_TIMERS) == [ + { + "enabled": True, + "cron": "5 5 1 8 1", + "next_schedule": datetime(2020, 5, 23, 13, 21, 10, tzinfo=utc), + }, + { + "enabled": False, + "cron": "5 5 1 8 2", + "next_schedule": datetime(2020, 5, 23, 13, 21, 10, tzinfo=utc), + }, + ] # Call services await hass.services.async_call( @@ -307,21 +322,7 @@ async def test_xiaomi_vacuum_services(hass, caplog, mock_mirobo_is_got_error): async def test_xiaomi_specific_services(hass, caplog, mock_mirobo_is_on): """Test vacuum supported features.""" entity_name = "test_vacuum_cleaner_2" - entity_id = f"{DOMAIN}.{entity_name}" - - await async_setup_component( - hass, - DOMAIN, - { - DOMAIN: { - CONF_PLATFORM: PLATFORM, - CONF_HOST: "192.168.1.100", - CONF_NAME: entity_name, - CONF_TOKEN: "12345678901234567890123456789012", - } - }, - ) - await hass.async_block_till_done() + entity_id = await setup_component(hass, entity_name) assert "Initializing with host 192.168.1.100 (token 12345" in caplog.text @@ -340,6 +341,18 @@ async def test_xiaomi_specific_services(hass, caplog, mock_mirobo_is_on): assert state.attributes.get(ATTR_CLEANING_COUNT) == 41 assert state.attributes.get(ATTR_CLEANED_TOTAL_AREA) == 323 assert state.attributes.get(ATTR_CLEANING_TOTAL_TIME) == 675 + assert state.attributes.get(ATTR_TIMERS) == [ + { + "enabled": True, + "cron": "5 5 1 8 1", + "next_schedule": datetime(2020, 5, 23, 13, 21, 10, tzinfo=utc), + }, + { + "enabled": False, + "cron": "5 5 1 8 2", + "next_schedule": datetime(2020, 5, 23, 13, 21, 10, tzinfo=utc), + }, + ] # Xiaomi vacuum specific services: await hass.services.async_call( @@ -355,7 +368,10 @@ async def test_xiaomi_specific_services(hass, caplog, mock_mirobo_is_on): control = {"duration": 1000, "rotation": -40, "velocity": -0.1} await hass.services.async_call( - XIAOMI_DOMAIN, SERVICE_MOVE_REMOTE_CONTROL, control, blocking=True + XIAOMI_DOMAIN, + SERVICE_MOVE_REMOTE_CONTROL, + {**control, ATTR_ENTITY_ID: entity_id}, + blocking=True, ) mock_mirobo_is_on.manual_control.assert_has_calls( [mock.call(**control)], any_order=True @@ -364,7 +380,10 @@ async def test_xiaomi_specific_services(hass, caplog, mock_mirobo_is_on): mock_mirobo_is_on.reset_mock() await hass.services.async_call( - XIAOMI_DOMAIN, SERVICE_STOP_REMOTE_CONTROL, {}, blocking=True + XIAOMI_DOMAIN, + SERVICE_STOP_REMOTE_CONTROL, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, ) mock_mirobo_is_on.assert_has_calls([mock.call.manual_stop()], any_order=True) mock_mirobo_is_on.assert_has_calls(STATUS_CALLS, any_order=True) @@ -372,7 +391,10 @@ async def test_xiaomi_specific_services(hass, caplog, mock_mirobo_is_on): control_once = {"duration": 2000, "rotation": 120, "velocity": 0.1} await hass.services.async_call( - XIAOMI_DOMAIN, SERVICE_MOVE_REMOTE_CONTROL_STEP, control_once, blocking=True + XIAOMI_DOMAIN, + SERVICE_MOVE_REMOTE_CONTROL_STEP, + {**control_once, ATTR_ENTITY_ID: entity_id}, + blocking=True, ) mock_mirobo_is_on.manual_control_once.assert_has_calls( [mock.call(**control_once)], any_order=True @@ -382,7 +404,10 @@ async def test_xiaomi_specific_services(hass, caplog, mock_mirobo_is_on): control = {"zone": [[123, 123, 123, 123]], "repeats": 2} await hass.services.async_call( - XIAOMI_DOMAIN, SERVICE_CLEAN_ZONE, control, blocking=True + XIAOMI_DOMAIN, + SERVICE_CLEAN_ZONE, + {**control, ATTR_ENTITY_ID: entity_id}, + blocking=True, ) mock_mirobo_is_on.zoned_clean.assert_has_calls( [mock.call([[123, 123, 123, 123, 2]])], any_order=True @@ -394,21 +419,7 @@ async def test_xiaomi_specific_services(hass, caplog, mock_mirobo_is_on): async def test_xiaomi_vacuum_fanspeeds(hass, caplog, mock_mirobo_fanspeeds): """Test Xiaomi vacuum fanspeeds.""" entity_name = "test_vacuum_cleaner_2" - entity_id = f"{DOMAIN}.{entity_name}" - - await async_setup_component( - hass, - DOMAIN, - { - DOMAIN: { - CONF_PLATFORM: PLATFORM, - CONF_HOST: "192.168.1.100", - CONF_NAME: entity_name, - CONF_TOKEN: "12345678901234567890123456789012", - } - }, - ) - await hass.async_block_till_done() + entity_id = await setup_component(hass, entity_name) assert "Initializing with host 192.168.1.100 (token 12345" in caplog.text @@ -453,3 +464,68 @@ async def test_xiaomi_vacuum_fanspeeds(hass, caplog, mock_mirobo_fanspeeds): blocking=True, ) assert "ERROR" in caplog.text + + +async def test_xiaomi_vacuum_goto_service(hass, caplog, mock_mirobo_is_on): + """Test vacuum supported features.""" + entity_name = "test_vacuum_cleaner_2" + entity_id = await setup_component(hass, entity_name) + + data = {"entity_id": entity_id, "x_coord": 25500, "y_coord": 25500} + await hass.services.async_call(XIAOMI_DOMAIN, SERVICE_GOTO, data, blocking=True) + mock_mirobo_is_on.goto.assert_has_calls( + [mock.call(x_coord=data["x_coord"], y_coord=data["y_coord"])], any_order=True + ) + mock_mirobo_is_on.assert_has_calls(STATUS_CALLS, any_order=True) + + +async def test_xiaomi_vacuum_clean_segment_service(hass, caplog, mock_mirobo_is_on): + """Test vacuum supported features.""" + entity_name = "test_vacuum_cleaner_2" + entity_id = await setup_component(hass, entity_name) + + data = {"entity_id": entity_id, "segments": ["1", "2"]} + await hass.services.async_call( + XIAOMI_DOMAIN, SERVICE_CLEAN_SEGMENT, data, blocking=True + ) + mock_mirobo_is_on.segment_clean.assert_has_calls( + [mock.call(segments=[int(i) for i in data["segments"]])], any_order=True + ) + mock_mirobo_is_on.assert_has_calls(STATUS_CALLS, any_order=True) + + +async def test_xiaomi_vacuum_clean_segment_service_single_segment( + hass, caplog, mock_mirobo_is_on +): + """Test vacuum supported features.""" + entity_name = "test_vacuum_cleaner_2" + entity_id = await setup_component(hass, entity_name) + + data = {"entity_id": entity_id, "segments": 1} + await hass.services.async_call( + XIAOMI_DOMAIN, SERVICE_CLEAN_SEGMENT, data, blocking=True + ) + mock_mirobo_is_on.segment_clean.assert_has_calls( + [mock.call(segments=[data["segments"]])], any_order=True + ) + mock_mirobo_is_on.assert_has_calls(STATUS_CALLS, any_order=True) + + +async def setup_component(hass, entity_name): + """Set up vacuum component.""" + entity_id = f"{DOMAIN}.{entity_name}" + + await async_setup_component( + hass, + DOMAIN, + { + DOMAIN: { + CONF_PLATFORM: PLATFORM, + CONF_HOST: "192.168.1.100", + CONF_NAME: entity_name, + CONF_TOKEN: "12345678901234567890123456789012", + } + }, + ) + await hass.async_block_till_done() + return entity_id diff --git a/tests/components/yamaha/test_media_player.py b/tests/components/yamaha/test_media_player.py index c0a296bb25b..6a13c1d46e1 100644 --- a/tests/components/yamaha/test_media_player.py +++ b/tests/components/yamaha/test_media_player.py @@ -1,12 +1,15 @@ """The tests for the Yamaha Media player platform.""" -import unittest +import pytest import homeassistant.components.media_player as mp from homeassistant.components.yamaha import media_player as yamaha -from homeassistant.setup import setup_component +from homeassistant.components.yamaha.const import DOMAIN +from homeassistant.helpers.discovery import async_load_platform +from homeassistant.setup import async_setup_component -from tests.async_mock import MagicMock, patch -from tests.common import get_test_home_assistant +from tests.async_mock import MagicMock, PropertyMock, call, patch + +CONFIG = {"media_player": {"platform": "yamaha", "host": "127.0.0.1"}} def _create_zone_mock(name, url): @@ -23,54 +26,142 @@ class FakeYamahaDevice: """Initialize the fake Yamaha device.""" self.ctrl_url = ctrl_url self.name = name - self.zones = zones or [] + self._zones = zones or [] def zone_controllers(self): """Return controllers for all available zones.""" - return self.zones + return self._zones -class TestYamahaMediaPlayer(unittest.TestCase): - """Test the Yamaha media player.""" +@pytest.fixture(name="main_zone") +def main_zone_fixture(): + """Mock the main zone.""" + return _create_zone_mock("Main zone", "http://main") - def setUp(self): - """Set up things to be run when tests are started.""" - self.hass = get_test_home_assistant() - self.main_zone = _create_zone_mock("Main zone", "http://main") - self.device = FakeYamahaDevice( - "http://receiver", "Receiver", zones=[self.main_zone] + +@pytest.fixture(name="device") +def device_fixture(main_zone): + """Mock the yamaha device.""" + device = FakeYamahaDevice("http://receiver", "Receiver", zones=[main_zone]) + with patch("rxv.RXV", return_value=device): + yield device + + +async def test_setup_host(hass, device, main_zone): + """Test set up integration with host.""" + assert await async_setup_component(hass, mp.DOMAIN, CONFIG) + await hass.async_block_till_done() + + state = hass.states.get("media_player.yamaha_receiver_main_zone") + + assert state is not None + assert state.state == "off" + + +async def test_setup_no_host(hass, device, main_zone): + """Test set up integration without host.""" + with patch("rxv.find", return_value=[device]): + assert await async_setup_component( + hass, mp.DOMAIN, {"media_player": {"platform": "yamaha"}} ) + await hass.async_block_till_done() - def tearDown(self): - """Stop everything that was started.""" - self.hass.stop() + state = hass.states.get("media_player.yamaha_receiver_main_zone") - def enable_output(self, port, enabled): - """Enable output on a specific port.""" - data = { - "entity_id": "media_player.yamaha_receiver_main_zone", - "port": port, - "enabled": enabled, - } + assert state is not None + assert state.state == "off" - self.hass.services.call(yamaha.DOMAIN, yamaha.SERVICE_ENABLE_OUTPUT, data, True) - def create_receiver(self, mock_rxv): - """Create a mocked receiver.""" - mock_rxv.return_value = self.device +async def test_setup_discovery(hass, device, main_zone): + """Test set up integration via discovery.""" + discovery_info = { + "name": "Yamaha Receiver", + "model_name": "Yamaha", + "control_url": "http://receiver", + "description_url": "http://receiver/description", + } + await async_load_platform( + hass, mp.DOMAIN, "yamaha", discovery_info, {mp.DOMAIN: {}} + ) + await hass.async_block_till_done() - config = {"media_player": {"platform": "yamaha", "host": "127.0.0.1"}} + state = hass.states.get("media_player.yamaha_receiver_main_zone") - assert setup_component(self.hass, mp.DOMAIN, config) - self.hass.block_till_done() + assert state is not None + assert state.state == "off" - @patch("rxv.RXV") - def test_enable_output(self, mock_rxv): - """Test enabling and disabling outputs.""" - self.create_receiver(mock_rxv) - self.enable_output("hdmi1", True) - self.main_zone.enable_output.assert_called_with("hdmi1", True) +async def test_setup_zone_ignore(hass, device, main_zone): + """Test set up integration without host.""" + assert await async_setup_component( + hass, + mp.DOMAIN, + { + "media_player": { + "platform": "yamaha", + "host": "127.0.0.1", + "zone_ignore": "Main zone", + } + }, + ) + await hass.async_block_till_done() - self.enable_output("hdmi2", False) - self.main_zone.enable_output.assert_called_with("hdmi2", False) + state = hass.states.get("media_player.yamaha_receiver_main_zone") + + assert state is None + + +async def test_enable_output(hass, device, main_zone): + """Test enable output service.""" + assert await async_setup_component(hass, mp.DOMAIN, CONFIG) + await hass.async_block_till_done() + + port = "hdmi1" + enabled = True + data = { + "entity_id": "media_player.yamaha_receiver_main_zone", + "port": port, + "enabled": enabled, + } + + await hass.services.async_call(DOMAIN, yamaha.SERVICE_ENABLE_OUTPUT, data, True) + + assert main_zone.enable_output.call_count == 1 + assert main_zone.enable_output.call_args == call(port, enabled) + + +async def test_select_scene(hass, device, main_zone, caplog): + """Test select scene service.""" + scene_prop = PropertyMock(return_value=None) + type(main_zone).scene = scene_prop + + assert await async_setup_component(hass, mp.DOMAIN, CONFIG) + await hass.async_block_till_done() + + scene = "TV Viewing" + data = { + "entity_id": "media_player.yamaha_receiver_main_zone", + "scene": scene, + } + + await hass.services.async_call(DOMAIN, yamaha.SERVICE_SELECT_SCENE, data, True) + + assert scene_prop.call_count == 1 + assert scene_prop.call_args == call(scene) + + scene = "BD/DVD Movie Viewing" + data["scene"] = scene + + await hass.services.async_call(DOMAIN, yamaha.SERVICE_SELECT_SCENE, data, True) + + assert scene_prop.call_count == 2 + assert scene_prop.call_args == call(scene) + + scene_prop.side_effect = AssertionError() + + missing_scene = "Missing scene" + data["scene"] = missing_scene + + await hass.services.async_call(DOMAIN, yamaha.SERVICE_SELECT_SCENE, data, True) + + assert f"Scene '{missing_scene}' does not exist!" in caplog.text diff --git a/tests/components/yandex_transport/test_yandex_transport_sensor.py b/tests/components/yandex_transport/test_yandex_transport_sensor.py index 3583dfa0bdf..e5b6f31990b 100644 --- a/tests/components/yandex_transport/test_yandex_transport_sensor.py +++ b/tests/components/yandex_transport/test_yandex_transport_sensor.py @@ -6,10 +6,11 @@ import pytest import homeassistant.components.sensor as sensor from homeassistant.const import CONF_NAME +from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import patch -from tests.common import assert_setup_component, async_setup_component, load_fixture +from tests.common import assert_setup_component, load_fixture REPLY = json.loads(load_fixture("yandex_transport_reply.json")) diff --git a/tests/components/zha/common.py b/tests/components/zha/common.py index fd5621137ae..11237f6cd73 100644 --- a/tests/components/zha/common.py +++ b/tests/components/zha/common.py @@ -175,10 +175,10 @@ def async_find_group_entity_id(hass, domain, group): return None -async def async_enable_traffic(hass, zha_devices): +async def async_enable_traffic(hass, zha_devices, enabled=True): """Allow traffic to flow through the gateway and the zha device.""" for zha_device in zha_devices: - zha_device.update_available(True) + zha_device.update_available(enabled) await hass.async_block_till_done() diff --git a/tests/components/zha/conftest.py b/tests/components/zha/conftest.py index 6df46273354..a538c1b7f3c 100644 --- a/tests/components/zha/conftest.py +++ b/tests/components/zha/conftest.py @@ -7,6 +7,7 @@ import zigpy.config import zigpy.group import zigpy.types +from homeassistant.components.zha import DOMAIN import homeassistant.components.zha.core.const as zha_const import homeassistant.components.zha.core.device as zha_core_device from homeassistant.setup import async_setup_component @@ -140,11 +141,27 @@ def zha_device_joined(hass, setup_zha): @pytest.fixture -def zha_device_restored(hass, zigpy_app_controller, setup_zha): +def zha_device_restored(hass, zigpy_app_controller, setup_zha, hass_storage): """Return a restored ZHA device.""" - async def _zha_device(zigpy_dev): + async def _zha_device(zigpy_dev, last_seen=None): zigpy_app_controller.devices[zigpy_dev.ieee] = zigpy_dev + + if last_seen is not None: + hass_storage[f"{DOMAIN}.storage"] = { + "key": f"{DOMAIN}.storage", + "version": 1, + "data": { + "devices": [ + { + "ieee": str(zigpy_dev.ieee), + "last_seen": last_seen, + "name": f"{zigpy_dev.manufacturer} {zigpy_dev.model}", + } + ], + }, + } + await setup_zha() zha_gateway = hass.data[zha_const.DATA_ZHA][zha_const.DATA_ZHA_GATEWAY] return zha_gateway.get_device(zigpy_dev.ieee) diff --git a/tests/components/zha/test_api.py b/tests/components/zha/test_api.py index 88fd1e8437f..0587bd14c8c 100644 --- a/tests/components/zha/test_api.py +++ b/tests/components/zha/test_api.py @@ -42,7 +42,7 @@ async def device_switch(hass, zigpy_device_mock, zha_device_joined): ieee=IEEE_SWITCH_DEVICE, ) zha_device = await zha_device_joined(zigpy_device) - zha_device.set_available(True) + zha_device.available = True return zha_device @@ -65,7 +65,7 @@ async def device_groupable(hass, zigpy_device_mock, zha_device_joined): ieee=IEEE_GROUPABLE_DEVICE, ) zha_device = await zha_device_joined(zigpy_device) - zha_device.set_available(True) + zha_device.available = True return zha_device @@ -266,7 +266,7 @@ async def test_list_groupable_devices(zha_client, device_groupable): # Make sure there are no groupable devices when the device is unavailable # Make device unavailable - device_groupable.set_available(False) + device_groupable.available = False await zha_client.send_json({ID: 11, TYPE: "zha/devices/groupable"}) diff --git a/tests/components/zha/test_binary_sensor.py b/tests/components/zha/test_binary_sensor.py index 730c7c844f2..afa86e90f2c 100644 --- a/tests/components/zha/test_binary_sensor.py +++ b/tests/components/zha/test_binary_sensor.py @@ -59,7 +59,7 @@ async def async_test_iaszone_on_off(hass, cluster, entity_id): "device, on_off_test, cluster_name, reporting", [ (DEVICE_IAS, async_test_iaszone_on_off, "ias_zone", (0,)), - (DEVICE_OCCUPANCY, async_test_binary_sensor_on_off, "occupancy", (1,)), + # (DEVICE_OCCUPANCY, async_test_binary_sensor_on_off, "occupancy", (1,)), ], ) async def test_binary_sensor( @@ -75,9 +75,10 @@ async def test_binary_sensor( zigpy_device = zigpy_device_mock(device) zha_device = await zha_device_joined_restored(zigpy_device) entity_id = await find_entity_id(DOMAIN, zha_device, hass) - assert entity_id is not None + assert hass.states.get(entity_id).state == STATE_OFF + await async_enable_traffic(hass, [zha_device], enabled=False) # test that the sensors exist and are in the unavailable state assert hass.states.get(entity_id).state == STATE_UNAVAILABLE diff --git a/tests/components/zha/test_cover.py b/tests/components/zha/test_cover.py index 2c497f6880f..d32eac130b0 100644 --- a/tests/components/zha/test_cover.py +++ b/tests/components/zha/test_cover.py @@ -119,6 +119,7 @@ async def test_cover(m1, hass, zha_device_joined_restored, zigpy_cover_device): entity_id = await find_entity_id(DOMAIN, zha_device, hass) assert entity_id is not None + await async_enable_traffic(hass, [zha_device], enabled=False) # test that the cover was created and that it is unavailable assert hass.states.get(entity_id).state == STATE_UNAVAILABLE @@ -207,6 +208,7 @@ async def test_shade(hass, zha_device_joined_restored, zigpy_shade_device): entity_id = await find_entity_id(DOMAIN, zha_device, hass) assert entity_id is not None + await async_enable_traffic(hass, [zha_device], enabled=False) # test that the cover was created and that it is unavailable assert hass.states.get(entity_id).state == STATE_UNAVAILABLE @@ -355,6 +357,7 @@ async def test_keen_vent(hass, zha_device_joined_restored, zigpy_keen_vent): entity_id = await find_entity_id(DOMAIN, zha_device, hass) assert entity_id is not None + await async_enable_traffic(hass, [zha_device], enabled=False) # test that the cover was created and that it is unavailable assert hass.states.get(entity_id).state == STATE_UNAVAILABLE diff --git a/tests/components/zha/test_device.py b/tests/components/zha/test_device.py index 6e528299911..a408f655ea3 100644 --- a/tests/components/zha/test_device.py +++ b/tests/components/zha/test_device.py @@ -7,6 +7,7 @@ import pytest import zigpy.zcl.clusters.general as general import homeassistant.components.zha.core.device as zha_core_device +from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE import homeassistant.helpers.device_registry as ha_dev_reg import homeassistant.util.dt as dt_util @@ -107,13 +108,13 @@ async def test_check_available_success( basic_ch.read_attributes.reset_mock() device_with_basic_channel.last_seen = None assert zha_device.available is True - _send_time_changed(hass, zha_core_device._CONSIDER_UNAVAILABLE_MAINS + 2) + _send_time_changed(hass, zha_core_device.CONSIDER_UNAVAILABLE_MAINS + 2) await hass.async_block_till_done() assert zha_device.available is False assert basic_ch.read_attributes.await_count == 0 device_with_basic_channel.last_seen = ( - time.time() - zha_core_device._CONSIDER_UNAVAILABLE_MAINS - 2 + time.time() - zha_core_device.CONSIDER_UNAVAILABLE_MAINS - 2 ) _seens = [time.time(), device_with_basic_channel.last_seen] @@ -162,7 +163,7 @@ async def test_check_available_unsuccessful( assert basic_ch.read_attributes.await_count == 0 device_with_basic_channel.last_seen = ( - time.time() - zha_core_device._CONSIDER_UNAVAILABLE_MAINS - 2 + time.time() - zha_core_device.CONSIDER_UNAVAILABLE_MAINS - 2 ) # unsuccessfuly ping zigpy device, but zha_device is still available @@ -203,7 +204,7 @@ async def test_check_available_no_basic_channel( assert zha_device.available is True device_without_basic_channel.last_seen = ( - time.time() - zha_core_device._CONSIDER_UNAVAILABLE_BATTERY - 2 + time.time() - zha_core_device.CONSIDER_UNAVAILABLE_BATTERY - 2 ) assert "does not have a mandatory basic cluster" not in caplog.text @@ -228,3 +229,46 @@ async def test_ota_sw_version(hass, ota_zha_device): await hass.async_block_till_done() entry = dev_registry.async_get(ota_zha_device.device_id) assert int(entry.sw_version, base=16) == sw_version + + +@pytest.mark.parametrize( + "device, last_seen_delta, is_available", + ( + ("zigpy_device", 0, True), + ("zigpy_device", zha_core_device.CONSIDER_UNAVAILABLE_MAINS + 2, True,), + ("zigpy_device", zha_core_device.CONSIDER_UNAVAILABLE_BATTERY - 2, True,), + ("zigpy_device", zha_core_device.CONSIDER_UNAVAILABLE_BATTERY + 2, False,), + ("zigpy_device_mains", 0, True), + ("zigpy_device_mains", zha_core_device.CONSIDER_UNAVAILABLE_MAINS - 2, True,), + ("zigpy_device_mains", zha_core_device.CONSIDER_UNAVAILABLE_MAINS + 2, False,), + ( + "zigpy_device_mains", + zha_core_device.CONSIDER_UNAVAILABLE_BATTERY - 2, + False, + ), + ( + "zigpy_device_mains", + zha_core_device.CONSIDER_UNAVAILABLE_BATTERY + 2, + False, + ), + ), +) +async def test_device_restore_availability( + hass, request, device, last_seen_delta, is_available, zha_device_restored +): + """Test initial availability for restored devices.""" + + zigpy_device = request.getfixturevalue(device)() + zha_device = await zha_device_restored( + zigpy_device, last_seen=time.time() - last_seen_delta + ) + entity_id = "switch.fakemanufacturer_fakemodel_e769900a_on_off" + + await hass.async_block_till_done() + # ensure the switch entity was created + assert hass.states.get(entity_id).state is not None + assert zha_device.available is is_available + if is_available: + assert hass.states.get(entity_id).state == STATE_OFF + else: + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE diff --git a/tests/components/zha/test_device_tracker.py b/tests/components/zha/test_device_tracker.py index 330153e5f8c..0cc2b6f25c1 100644 --- a/tests/components/zha/test_device_tracker.py +++ b/tests/components/zha/test_device_tracker.py @@ -49,6 +49,8 @@ async def test_device_tracker(hass, zha_device_joined_restored, zigpy_device_dt) entity_id = await find_entity_id(DOMAIN, zha_device, hass) assert entity_id is not None + assert hass.states.get(entity_id).state == STATE_HOME + await async_enable_traffic(hass, [zha_device], enabled=False) # test that the device tracker was created and that it is unavailable assert hass.states.get(entity_id).state == STATE_UNAVAILABLE diff --git a/tests/components/zha/test_fan.py b/tests/components/zha/test_fan.py index 91819e6f457..b163edbd49a 100644 --- a/tests/components/zha/test_fan.py +++ b/tests/components/zha/test_fan.py @@ -65,7 +65,7 @@ async def coordinator(hass, zigpy_device_mock, zha_device_joined): node_descriptor=b"\xf8\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", ) zha_device = await zha_device_joined(zigpy_device) - zha_device.set_available(True) + zha_device.available = True return zha_device @@ -83,7 +83,7 @@ async def device_fan_1(hass, zigpy_device_mock, zha_device_joined): ieee=IEEE_GROUPABLE_DEVICE, ) zha_device = await zha_device_joined(zigpy_device) - zha_device.set_available(True) + zha_device.available = True return zha_device @@ -105,7 +105,7 @@ async def device_fan_2(hass, zigpy_device_mock, zha_device_joined): ieee=IEEE_GROUPABLE_DEVICE2, ) zha_device = await zha_device_joined(zigpy_device) - zha_device.set_available(True) + zha_device.available = True return zha_device @@ -117,6 +117,8 @@ async def test_fan(hass, zha_device_joined_restored, zigpy_device): entity_id = await find_entity_id(DOMAIN, zha_device, hass) assert entity_id is not None + assert hass.states.get(entity_id).state == STATE_OFF + await async_enable_traffic(hass, [zha_device], enabled=False) # test that the fan was created and that it is unavailable assert hass.states.get(entity_id).state == STATE_UNAVAILABLE diff --git a/tests/components/zha/test_gateway.py b/tests/components/zha/test_gateway.py index 379e4d56492..cc9e811cb49 100644 --- a/tests/components/zha/test_gateway.py +++ b/tests/components/zha/test_gateway.py @@ -11,6 +11,7 @@ import zigpy.zcl.clusters.lighting as lighting from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN from homeassistant.components.zha.core.group import GroupMember +from homeassistant.components.zha.core.store import TOMBSTONE_LIFETIME from .common import async_enable_traffic, async_find_group_entity_id, get_zha_gateway @@ -58,7 +59,7 @@ async def coordinator(hass, zigpy_device_mock, zha_device_joined): node_descriptor=b"\xf8\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", ) zha_device = await zha_device_joined(zigpy_device) - zha_device.set_available(True) + zha_device.available = True return zha_device @@ -82,7 +83,7 @@ async def device_light_1(hass, zigpy_device_mock, zha_device_joined): ieee=IEEE_GROUPABLE_DEVICE, ) zha_device = await zha_device_joined(zigpy_device) - zha_device.set_available(True) + zha_device.available = True return zha_device @@ -106,19 +107,17 @@ async def device_light_2(hass, zigpy_device_mock, zha_device_joined): ieee=IEEE_GROUPABLE_DEVICE2, ) zha_device = await zha_device_joined(zigpy_device) - zha_device.set_available(True) + zha_device.available = True return zha_device async def test_device_left(hass, zigpy_dev_basic, zha_dev_basic): """Device leaving the network should become unavailable.""" - assert zha_dev_basic.available is False - - await async_enable_traffic(hass, [zha_dev_basic]) assert zha_dev_basic.available is True get_zha_gateway(hass).device_left(zigpy_dev_basic) + await hass.async_block_till_done() assert zha_dev_basic.available is False @@ -214,3 +213,25 @@ async def test_updating_device_store(hass, zigpy_dev_basic, zha_dev_basic): await hass.async_block_till_done() entry = zha_gateway.zha_storage.async_get_or_create_device(zha_dev_basic) assert entry.last_seen == last_seen + + +async def test_cleaning_up_storage(hass, zigpy_dev_basic, zha_dev_basic, hass_storage): + """Test cleaning up zha storage and remove stale devices.""" + zha_gateway = get_zha_gateway(hass) + assert zha_gateway is not None + await async_enable_traffic(hass, [zha_dev_basic]) + + assert zha_dev_basic.last_seen is not None + await zha_gateway.zha_storage.async_save() + await hass.async_block_till_done() + + assert hass_storage["zha.storage"]["data"]["devices"] + device = hass_storage["zha.storage"]["data"]["devices"][0] + assert device["ieee"] == str(zha_dev_basic.ieee) + + zha_dev_basic.device.last_seen = time.time() - TOMBSTONE_LIFETIME - 1 + await zha_gateway.async_update_device_storage() + await hass.async_block_till_done() + await zha_gateway.zha_storage.async_save() + await hass.async_block_till_done() + assert not hass_storage["zha.storage"]["data"]["devices"] diff --git a/tests/components/zha/test_light.py b/tests/components/zha/test_light.py index 09c6d97808c..6b94354ed59 100644 --- a/tests/components/zha/test_light.py +++ b/tests/components/zha/test_light.py @@ -88,7 +88,7 @@ async def coordinator(hass, zigpy_device_mock, zha_device_joined): node_descriptor=b"\xf8\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", ) zha_device = await zha_device_joined(zigpy_device) - zha_device.set_available(True) + zha_device.available = True return zha_device @@ -114,7 +114,7 @@ async def device_light_1(hass, zigpy_device_mock, zha_device_joined): nwk=0xB79D, ) zha_device = await zha_device_joined(zigpy_device) - zha_device.set_available(True) + zha_device.available = True return zha_device @@ -140,7 +140,7 @@ async def device_light_2(hass, zigpy_device_mock, zha_device_joined): nwk=0xC79E, ) zha_device = await zha_device_joined(zigpy_device) - zha_device.set_available(True) + zha_device.available = True return zha_device @@ -166,7 +166,7 @@ async def device_light_3(hass, zigpy_device_mock, zha_device_joined): nwk=0xB89F, ) zha_device = await zha_device_joined(zigpy_device) - zha_device.set_available(True) + zha_device.available = True return zha_device @@ -245,6 +245,8 @@ async def test_light( cluster_color = getattr(zigpy_device.endpoints[1], "light_color", None) cluster_identify = getattr(zigpy_device.endpoints[1], "identify", None) + assert hass.states.get(entity_id).state == STATE_OFF + await async_enable_traffic(hass, [zha_device], enabled=False) # test that the lights were created and that they are unavailable assert hass.states.get(entity_id).state == STATE_UNAVAILABLE @@ -516,6 +518,10 @@ async def test_zha_group_light_entity( dev1_cluster_level = device_light_1.device.endpoints[1].level + await async_enable_traffic( + hass, [device_light_1, device_light_2, device_light_3], enabled=False + ) + await hass.async_block_till_done() # test that the lights were created and that they are unavailable assert hass.states.get(group_entity_id).state == STATE_UNAVAILABLE diff --git a/tests/components/zha/test_lock.py b/tests/components/zha/test_lock.py index 86ec266ffa2..6c464efd7b2 100644 --- a/tests/components/zha/test_lock.py +++ b/tests/components/zha/test_lock.py @@ -43,6 +43,8 @@ async def test_lock(hass, lock): entity_id = await find_entity_id(DOMAIN, zha_device, hass) assert entity_id is not None + assert hass.states.get(entity_id).state == STATE_UNLOCKED + await async_enable_traffic(hass, [zha_device], enabled=False) # test that the lock was created and that it is unavailable assert hass.states.get(entity_id).state == STATE_UNAVAILABLE diff --git a/tests/components/zha/test_sensor.py b/tests/components/zha/test_sensor.py index d560fe2cbba..064b0251e6b 100644 --- a/tests/components/zha/test_sensor.py +++ b/tests/components/zha/test_sensor.py @@ -128,6 +128,8 @@ async def test_sensor( zha_device = await zha_device_joined_restored(zigpy_device) entity_id = await find_entity_id(DOMAIN, zha_device, hass) + await async_enable_traffic(hass, [zha_device], enabled=False) + await hass.async_block_till_done() # ensure the sensor entity was created assert hass.states.get(entity_id).state == STATE_UNAVAILABLE @@ -247,6 +249,7 @@ async def test_temp_uom( entity_id = await find_entity_id(DOMAIN, zha_device, hass) if not restore: + await async_enable_traffic(hass, [zha_device], enabled=False) assert hass.states.get(entity_id).state == STATE_UNAVAILABLE # allow traffic to flow through the gateway and devices diff --git a/tests/components/zha/test_switch.py b/tests/components/zha/test_switch.py index 7bdf2ccc4d2..b1c0c643bbc 100644 --- a/tests/components/zha/test_switch.py +++ b/tests/components/zha/test_switch.py @@ -56,7 +56,7 @@ async def coordinator(hass, zigpy_device_mock, zha_device_joined): node_descriptor=b"\xf8\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", ) zha_device = await zha_device_joined(zigpy_device) - zha_device.set_available(True) + zha_device.available = True return zha_device @@ -75,7 +75,7 @@ async def device_switch_1(hass, zigpy_device_mock, zha_device_joined): ieee=IEEE_GROUPABLE_DEVICE, ) zha_device = await zha_device_joined(zigpy_device) - zha_device.set_available(True) + zha_device.available = True return zha_device @@ -94,7 +94,7 @@ async def device_switch_2(hass, zigpy_device_mock, zha_device_joined): ieee=IEEE_GROUPABLE_DEVICE2, ) zha_device = await zha_device_joined(zigpy_device) - zha_device.set_available(True) + zha_device.available = True return zha_device @@ -106,6 +106,8 @@ async def test_switch(hass, zha_device_joined_restored, zigpy_device): entity_id = await find_entity_id(DOMAIN, zha_device, hass) assert entity_id is not None + assert hass.states.get(entity_id).state == STATE_OFF + await async_enable_traffic(hass, [zha_device], enabled=False) # test that the switch was created and that its state is unavailable assert hass.states.get(entity_id).state == STATE_UNAVAILABLE diff --git a/tests/components/zha/zha_devices_list.py b/tests/components/zha/zha_devices_list.py index d4ea1377d97..6de4bd79a72 100644 --- a/tests/components/zha/zha_devices_list.py +++ b/tests/components/zha/zha_devices_list.py @@ -1372,8 +1372,6 @@ DEVICES = [ }, }, "entities": [ - "sensor.lumi_lumi_plug_maus01_77665544_analog_input", - "sensor.lumi_lumi_plug_maus01_77665544_analog_input_2", "sensor.lumi_lumi_plug_maus01_77665544_electrical_measurement", "switch.lumi_lumi_plug_maus01_77665544_on_off", ], @@ -1388,16 +1386,6 @@ DEVICES = [ "entity_class": "ElectricalMeasurement", "entity_id": "sensor.lumi_lumi_plug_maus01_77665544_electrical_measurement", }, - ("sensor", "00:11:22:33:44:55:66:77-2-12"): { - "channels": ["analog_input"], - "entity_class": "AnalogInput", - "entity_id": "sensor.lumi_lumi_plug_maus01_77665544_analog_input", - }, - ("sensor", "00:11:22:33:44:55:66:77-3-12"): { - "channels": ["analog_input"], - "entity_class": "AnalogInput", - "entity_id": "sensor.lumi_lumi_plug_maus01_77665544_analog_input_2", - }, }, "event_channels": ["1:0x0019"], "manufacturer": "LUMI", @@ -1426,7 +1414,6 @@ DEVICES = [ "entities": [ "light.lumi_lumi_relay_c2acn01_77665544_on_off", "light.lumi_lumi_relay_c2acn01_77665544_on_off_2", - "sensor.lumi_lumi_relay_c2acn01_77665544_analog_input", "sensor.lumi_lumi_relay_c2acn01_77665544_electrical_measurement", ], "entity_map": { @@ -1435,11 +1422,6 @@ DEVICES = [ "entity_class": "Light", "entity_id": "light.lumi_lumi_relay_c2acn01_77665544_on_off", }, - ("sensor", "00:11:22:33:44:55:66:77-1-12"): { - "channels": ["analog_input"], - "entity_class": "AnalogInput", - "entity_id": "sensor.lumi_lumi_relay_c2acn01_77665544_analog_input", - }, ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { "channels": ["electrical_measurement"], "entity_class": "ElectricalMeasurement", @@ -1482,33 +1464,13 @@ DEVICES = [ "profile_id": 260, }, }, - "entities": [ - "sensor.lumi_lumi_remote_b186acn01_77665544_multistate_input", - "sensor.lumi_lumi_remote_b186acn01_77665544_multistate_input_2", - "sensor.lumi_lumi_remote_b186acn01_77665544_multistate_input_3", - "sensor.lumi_lumi_remote_b186acn01_77665544_power", - ], + "entities": ["sensor.lumi_lumi_remote_b186acn01_77665544_power"], "entity_map": { ("sensor", "00:11:22:33:44:55:66:77-1-1"): { "channels": ["power"], "entity_class": "Battery", "entity_id": "sensor.lumi_lumi_remote_b186acn01_77665544_power", }, - ("sensor", "00:11:22:33:44:55:66:77-1-18"): { - "channels": ["multistate_input"], - "entity_class": "Text", - "entity_id": "sensor.lumi_lumi_remote_b186acn01_77665544_multistate_input_2", - }, - ("sensor", "00:11:22:33:44:55:66:77-2-18"): { - "channels": ["multistate_input"], - "entity_class": "Text", - "entity_id": "sensor.lumi_lumi_remote_b186acn01_77665544_multistate_input_3", - }, - ("sensor", "00:11:22:33:44:55:66:77-3-18"): { - "channels": ["multistate_input"], - "entity_class": "Text", - "entity_id": "sensor.lumi_lumi_remote_b186acn01_77665544_multistate_input", - }, }, "event_channels": ["1:0x0005", "1:0x0019", "2:0x0005", "3:0x0005"], "manufacturer": "LUMI", @@ -1541,33 +1503,13 @@ DEVICES = [ "profile_id": 260, }, }, - "entities": [ - "sensor.lumi_lumi_remote_b286acn01_77665544_multistate_input", - "sensor.lumi_lumi_remote_b286acn01_77665544_multistate_input_2", - "sensor.lumi_lumi_remote_b286acn01_77665544_multistate_input_3", - "sensor.lumi_lumi_remote_b286acn01_77665544_power", - ], + "entities": ["sensor.lumi_lumi_remote_b286acn01_77665544_power"], "entity_map": { ("sensor", "00:11:22:33:44:55:66:77-1-1"): { "channels": ["power"], "entity_class": "Battery", "entity_id": "sensor.lumi_lumi_remote_b286acn01_77665544_power", }, - ("sensor", "00:11:22:33:44:55:66:77-1-18"): { - "channels": ["multistate_input"], - "entity_class": "Text", - "entity_id": "sensor.lumi_lumi_remote_b286acn01_77665544_multistate_input_3", - }, - ("sensor", "00:11:22:33:44:55:66:77-2-18"): { - "channels": ["multistate_input"], - "entity_class": "Text", - "entity_id": "sensor.lumi_lumi_remote_b286acn01_77665544_multistate_input_2", - }, - ("sensor", "00:11:22:33:44:55:66:77-3-18"): { - "channels": ["multistate_input"], - "entity_class": "Text", - "entity_id": "sensor.lumi_lumi_remote_b286acn01_77665544_multistate_input", - }, }, "event_channels": ["1:0x0005", "1:0x0019", "2:0x0005", "3:0x0005"], "manufacturer": "LUMI", @@ -1897,33 +1839,13 @@ DEVICES = [ "profile_id": 260, }, }, - "entities": [ - "sensor.lumi_lumi_sensor_86sw1_77665544_multistate_input", - "sensor.lumi_lumi_sensor_86sw1_77665544_multistate_input_2", - "sensor.lumi_lumi_sensor_86sw1_77665544_multistate_input_3", - "sensor.lumi_lumi_sensor_86sw1_77665544_power", - ], + "entities": ["sensor.lumi_lumi_sensor_86sw1_77665544_power"], "entity_map": { ("sensor", "00:11:22:33:44:55:66:77-1-1"): { "channels": ["power"], "entity_class": "Battery", "entity_id": "sensor.lumi_lumi_sensor_86sw1_77665544_power", }, - ("sensor", "00:11:22:33:44:55:66:77-1-18"): { - "channels": ["multistate_input"], - "entity_class": "Text", - "entity_id": "sensor.lumi_lumi_sensor_86sw1_77665544_multistate_input_3", - }, - ("sensor", "00:11:22:33:44:55:66:77-2-18"): { - "channels": ["multistate_input"], - "entity_class": "Text", - "entity_id": "sensor.lumi_lumi_sensor_86sw1_77665544_multistate_input_2", - }, - ("sensor", "00:11:22:33:44:55:66:77-3-18"): { - "channels": ["multistate_input"], - "entity_class": "Text", - "entity_id": "sensor.lumi_lumi_sensor_86sw1_77665544_multistate_input", - }, }, "event_channels": ["1:0x0005", "1:0x0019", "2:0x0005", "3:0x0005"], "manufacturer": "LUMI", @@ -1956,27 +1878,13 @@ DEVICES = [ "profile_id": 260, }, }, - "entities": [ - "sensor.lumi_lumi_sensor_cube_aqgl01_77665544_analog_input", - "sensor.lumi_lumi_sensor_cube_aqgl01_77665544_multistate_input", - "sensor.lumi_lumi_sensor_cube_aqgl01_77665544_power", - ], + "entities": ["sensor.lumi_lumi_sensor_cube_aqgl01_77665544_power"], "entity_map": { ("sensor", "00:11:22:33:44:55:66:77-1-1"): { "channels": ["power"], "entity_class": "Battery", "entity_id": "sensor.lumi_lumi_sensor_cube_aqgl01_77665544_power", }, - ("sensor", "00:11:22:33:44:55:66:77-2-18"): { - "channels": ["multistate_input"], - "entity_class": "Text", - "entity_id": "sensor.lumi_lumi_sensor_cube_aqgl01_77665544_multistate_input", - }, - ("sensor", "00:11:22:33:44:55:66:77-3-12"): { - "channels": ["analog_input"], - "entity_class": "AnalogInput", - "entity_id": "sensor.lumi_lumi_sensor_cube_aqgl01_77665544_analog_input", - }, }, "event_channels": ["1:0x0005", "1:0x0019", "2:0x0005", "3:0x0005"], "manufacturer": "LUMI", @@ -2161,8 +2069,6 @@ DEVICES = [ }, "entities": [ "binary_sensor.lumi_lumi_sensor_smoke_77665544_ias_zone", - "sensor.lumi_lumi_sensor_smoke_77665544_analog_input", - "sensor.lumi_lumi_sensor_smoke_77665544_multistate_input", "sensor.lumi_lumi_sensor_smoke_77665544_power", ], "entity_map": { @@ -2171,16 +2077,6 @@ DEVICES = [ "entity_class": "Battery", "entity_id": "sensor.lumi_lumi_sensor_smoke_77665544_power", }, - ("sensor", "00:11:22:33:44:55:66:77-1-12"): { - "channels": ["analog_input"], - "entity_class": "AnalogInput", - "entity_id": "sensor.lumi_lumi_sensor_smoke_77665544_analog_input", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-18"): { - "channels": ["multistate_input"], - "entity_class": "Text", - "entity_id": "sensor.lumi_lumi_sensor_smoke_77665544_multistate_input", - }, ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { "channels": ["ias_zone"], "entity_class": "IASZone", @@ -2254,21 +2150,13 @@ DEVICES = [ "profile_id": 260, } }, - "entities": [ - "sensor.lumi_lumi_sensor_switch_aq3_77665544_multistate_input", - "sensor.lumi_lumi_sensor_switch_aq3_77665544_power", - ], + "entities": ["sensor.lumi_lumi_sensor_switch_aq3_77665544_power"], "entity_map": { ("sensor", "00:11:22:33:44:55:66:77-1-1"): { "channels": ["power"], "entity_class": "Battery", "entity_id": "sensor.lumi_lumi_sensor_switch_aq3_77665544_power", }, - ("sensor", "00:11:22:33:44:55:66:77-1-18"): { - "channels": ["multistate_input"], - "entity_class": "Text", - "entity_id": "sensor.lumi_lumi_sensor_switch_aq3_77665544_multistate_input", - }, }, "event_channels": ["1:0x0006"], "manufacturer": "LUMI", diff --git a/tests/components/zwave/test_init.py b/tests/components/zwave/test_init.py index 19733b045dc..d1f141582ca 100644 --- a/tests/components/zwave/test_init.py +++ b/tests/components/zwave/test_init.py @@ -859,8 +859,9 @@ class TestZWaveDeviceEntityValues(unittest.TestCase): self.entity_id = "mock_component.mock_node_mock_value" self.zwave_config = {"zwave": {}} self.device_config = {self.entity_id: {}} + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.stop() @@ -1204,8 +1205,9 @@ class TestZWaveServices(unittest.TestCase): self.zwave_network.state = MockNetwork.STATE_READY self.hass.bus.fire(EVENT_HOMEASSISTANT_START) self.hass.block_till_done() + self.addCleanup(self.tear_down_cleanup) - def tearDown(self): # pylint: disable=invalid-name + def tear_down_cleanup(self): """Stop everything that was started.""" self.hass.services.call("zwave", "stop_network", {}) self.hass.block_till_done() diff --git a/tests/conftest.py b/tests/conftest.py index efaf1ff7dff..a2fa8e8b2fd 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,9 +5,10 @@ import logging import pytest import requests_mock as _requests_mock -from homeassistant import util +from homeassistant import core as ha, util from homeassistant.auth.const import GROUP_ID_ADMIN, GROUP_ID_READ_ONLY from homeassistant.auth.providers import homeassistant, legacy_api_password +from homeassistant.components import mqtt from homeassistant.components.websocket_api.auth import ( TYPE_AUTH, TYPE_AUTH_OK, @@ -18,7 +19,7 @@ from homeassistant.exceptions import ServiceNotFound from homeassistant.setup import async_setup_component from homeassistant.util import location -from tests.async_mock import patch +from tests.async_mock import MagicMock, patch from tests.ignore_uncaught_exceptions import IGNORE_UNCAUGHT_EXCEPTIONS pytest.register_assert_rewrite("tests.common") @@ -27,6 +28,7 @@ from tests.common import ( # noqa: E402, isort:skip CLIENT_ID, INSTANCES, MockUser, + async_fire_mqtt_message, async_test_home_assistant, mock_storage as mock_storage, ) @@ -267,3 +269,49 @@ def fail_on_log_exception(request, monkeypatch): raise monkeypatch.setattr("homeassistant.util.logging.log_exception", log_exception) + + +@pytest.fixture +def mqtt_config(): + """Fixture to allow overriding MQTT config.""" + return None + + +@pytest.fixture +def mqtt_client_mock(hass): + """Fixture to mock MQTT client.""" + + @ha.callback + def _async_fire_mqtt_message(topic, payload, qos, retain): + async_fire_mqtt_message(hass, topic, payload, qos, retain) + + with patch("paho.mqtt.client.Client") as mock_client: + mock_client = mock_client.return_value + mock_client.connect.return_value = 0 + mock_client.subscribe.return_value = (0, 0) + mock_client.unsubscribe.return_value = (0, 0) + mock_client.publish.side_effect = _async_fire_mqtt_message + yield mock_client + + +@pytest.fixture +async def mqtt_mock(hass, mqtt_client_mock, mqtt_config): + """Fixture to mock MQTT component.""" + if mqtt_config is None: + mqtt_config = {mqtt.CONF_BROKER: "mock-broker"} + + result = await async_setup_component(hass, mqtt.DOMAIN, {mqtt.DOMAIN: mqtt_config}) + assert result + await hass.async_block_till_done() + + mqtt_component_mock = MagicMock( + return_value=hass.data["mqtt"], + spec_set=hass.data["mqtt"], + wraps=hass.data["mqtt"], + ) + mqtt_component_mock._mqttc = mqtt_client_mock + + hass.data["mqtt"] = mqtt_component_mock + component = hass.data["mqtt"] + component.reset_mock() + return component diff --git a/tests/fixtures/awair/awair-offline.json b/tests/fixtures/awair/awair-offline.json new file mode 100644 index 00000000000..f93ccdf4b7b --- /dev/null +++ b/tests/fixtures/awair/awair-offline.json @@ -0,0 +1 @@ +{"data":[]} diff --git a/tests/fixtures/awair/awair-r2.json b/tests/fixtures/awair/awair-r2.json new file mode 100644 index 00000000000..e0150eed54f --- /dev/null +++ b/tests/fixtures/awair/awair-r2.json @@ -0,0 +1 @@ +{"data":[{"timestamp":"2020-04-10T16:41:57.771Z","score":97.0,"sensors":[{"comp":"temp","value":18.829999923706055},{"comp":"humid","value":50.52000045776367},{"comp":"co2","value":431.0},{"comp":"voc","value":57.0},{"comp":"pm25","value":2.0}],"indices":[{"comp":"temp","value":0.0},{"comp":"humid","value":1.0},{"comp":"co2","value":0.0},{"comp":"voc","value":0.0},{"comp":"pm25","value":0.0}]}]} diff --git a/tests/fixtures/awair/awair.json b/tests/fixtures/awair/awair.json new file mode 100644 index 00000000000..590c4a08642 --- /dev/null +++ b/tests/fixtures/awair/awair.json @@ -0,0 +1 @@ +{"data":[{"timestamp":"2020-04-10T15:38:24.111Z","score":88.0,"sensors":[{"comp":"temp","value":21.770000457763672},{"comp":"humid","value":41.59000015258789},{"comp":"co2","value":654.0},{"comp":"voc","value":366.0},{"comp":"dust","value":14.300000190734863}],"indices":[{"comp":"temp","value":-1.0},{"comp":"humid","value":0.0},{"comp":"co2","value":0.0},{"comp":"voc","value":1.0},{"comp":"dust","value":1.0}]}]} diff --git a/tests/fixtures/awair/devices.json b/tests/fixtures/awair/devices.json new file mode 100644 index 00000000000..413d488c634 --- /dev/null +++ b/tests/fixtures/awair/devices.json @@ -0,0 +1 @@ +{"devices":[{"name":"Living Room","macAddress":"70886B104941","latitude":0.0,"preference":"GENERAL","timezone":"","roomType":"LIVING_ROOM","deviceType":"awair","longitude":0.0,"spaceType":"HOME","deviceUUID":"awair_24947","deviceId":24947,"locationName":"Chicago, IL"}]} diff --git a/tests/fixtures/awair/glow.json b/tests/fixtures/awair/glow.json new file mode 100644 index 00000000000..2274905afc7 --- /dev/null +++ b/tests/fixtures/awair/glow.json @@ -0,0 +1 @@ +{"data":[{"timestamp":"2020-04-10T16:46:15.486Z","score":93.0,"sensors":[{"comp":"temp","value":21.93000030517578},{"comp":"humid","value":42.31999969482422},{"comp":"co2","value":429.0},{"comp":"voc","value":288.0}],"indices":[{"comp":"temp","value":-1.0},{"comp":"humid","value":0.0},{"comp":"co2","value":0.0},{"comp":"voc","value":0.0}]}]} diff --git a/tests/fixtures/awair/mint.json b/tests/fixtures/awair/mint.json new file mode 100644 index 00000000000..2a7cefa8ad7 --- /dev/null +++ b/tests/fixtures/awair/mint.json @@ -0,0 +1 @@ +{"data":[{"timestamp":"2020-04-10T16:25:03.606Z","score":98.0,"sensors":[{"comp":"temp","value":20.639999389648438},{"comp":"humid","value":45.04999923706055},{"comp":"voc","value":269.0},{"comp":"pm25","value":1.0},{"comp":"lux","value":441.70001220703125}],"indices":[{"comp":"temp","value":0.0},{"comp":"humid","value":0.0},{"comp":"voc","value":0.0},{"comp":"pm25","value":0.0}]}]} diff --git a/tests/fixtures/awair/no_devices.json b/tests/fixtures/awair/no_devices.json new file mode 100644 index 00000000000..f5732d79e1e --- /dev/null +++ b/tests/fixtures/awair/no_devices.json @@ -0,0 +1 @@ +{"devices":[]} diff --git a/tests/fixtures/awair/omni.json b/tests/fixtures/awair/omni.json new file mode 100644 index 00000000000..9a3dc3dd063 --- /dev/null +++ b/tests/fixtures/awair/omni.json @@ -0,0 +1 @@ +{"data":[{"timestamp":"2020-04-10T16:18:10.298Z","score":99.0,"sensors":[{"comp":"temp","value":21.40999984741211},{"comp":"humid","value":42.7400016784668},{"comp":"co2","value":436.0},{"comp":"voc","value":171.0},{"comp":"pm25","value":0.0},{"comp":"lux","value":804.9000244140625},{"comp":"spl_a","value":47.0}],"indices":[{"comp":"temp","value":0.0},{"comp":"humid","value":0.0},{"comp":"co2","value":0.0},{"comp":"voc","value":0.0},{"comp":"pm25","value":0.0}]}]} diff --git a/tests/fixtures/awair/user.json b/tests/fixtures/awair/user.json new file mode 100644 index 00000000000..f0fe94caf6d --- /dev/null +++ b/tests/fixtures/awair/user.json @@ -0,0 +1 @@ + {"dobDay":8,"usages":[{"scope":"API_USAGE","usage":302},{"scope":"USER_DEVICE_LIST","usage":50},{"scope":"USER_INFO","usage":80}],"tier":"Large_developer","email":"foo@bar.com","dobYear":2020,"permissions":[{"scope":"USER_DEVICE_LIST","quota":2147483647},{"scope":"USER_INFO","quota":2147483647},{"scope":"FIFTEEN_MIN","quota":30000},{"scope":"FIVE_MIN","quota":30000},{"scope":"RAW","quota":30000},{"scope":"LATEST","quota":30000},{"scope":"PUT_PREFERENCE","quota":30000},{"scope":"PUT_DISPLAY_MODE","quota":30000},{"scope":"PUT_LED_MODE","quota":30000},{"scope":"PUT_KNOCKING_MODE","quota":30000},{"scope":"PUT_TIMEZONE","quota":30000},{"scope":"PUT_DEVICE_NAME","quota":30000},{"scope":"PUT_LOCATION","quota":30000},{"scope":"PUT_ROOM_TYPE","quota":30000},{"scope":"PUT_SPACE_TYPE","quota":30000},{"scope":"GET_DISPLAY_MODE","quota":30000},{"scope":"GET_LED_MODE","quota":30000},{"scope":"GET_KNOCKING_MODE","quota":30000},{"scope":"GET_POWER_STATUS","quota":30000},{"scope":"GET_TIMEZONE","quota":30000}],"dobMonth":4,"sex":"MALE","lastName":"Hayworth","firstName":"Andrew","id":"32406"} diff --git a/tests/fixtures/awair_air_data_latest.json b/tests/fixtures/awair_air_data_latest.json deleted file mode 100644 index 674c0662197..00000000000 --- a/tests/fixtures/awair_air_data_latest.json +++ /dev/null @@ -1,50 +0,0 @@ -[ - { - "timestamp": "2018-11-21T15:46:16.346Z", - "score": 78, - "sensors": [ - { - "component": "TEMP", - "value": 22.4 - }, - { - "component": "HUMID", - "value": 32.73 - }, - { - "component": "CO2", - "value": 612 - }, - { - "component": "VOC", - "value": 1012 - }, - { - "component": "DUST", - "value": 6.2 - } - ], - "indices": [ - { - "component": "TEMP", - "value": 0 - }, - { - "component": "HUMID", - "value": -2 - }, - { - "component": "CO2", - "value": 0 - }, - { - "component": "VOC", - "value": 2 - }, - { - "component": "DUST", - "value": 0 - } - ] - } -] diff --git a/tests/fixtures/awair_air_data_latest_updated.json b/tests/fixtures/awair_air_data_latest_updated.json deleted file mode 100644 index 05ad8371232..00000000000 --- a/tests/fixtures/awair_air_data_latest_updated.json +++ /dev/null @@ -1,50 +0,0 @@ -[ - { - "timestamp": "2018-11-21T15:46:16.346Z", - "score": 79, - "sensors": [ - { - "component": "TEMP", - "value": 23.4 - }, - { - "component": "HUMID", - "value": 33.73 - }, - { - "component": "CO2", - "value": 613 - }, - { - "component": "VOC", - "value": 1013 - }, - { - "component": "DUST", - "value": 7.2 - } - ], - "indices": [ - { - "component": "TEMP", - "value": 0 - }, - { - "component": "HUMID", - "value": -2 - }, - { - "component": "CO2", - "value": 0 - }, - { - "component": "VOC", - "value": 2 - }, - { - "component": "DUST", - "value": 0 - } - ] - } -] diff --git a/tests/fixtures/awair_devices.json b/tests/fixtures/awair_devices.json deleted file mode 100644 index 899ad4eed72..00000000000 --- a/tests/fixtures/awair_devices.json +++ /dev/null @@ -1,25 +0,0 @@ -[ - { - "uuid": "awair_12345", - "deviceType": "awair", - "deviceId": "12345", - "name": "Awair", - "preference": "GENERAL", - "macAddress": "FFFFFFFFFFFF", - "room": { - "id": "ffffffff-ffff-ffff-ffff-ffffffffffff", - "name": "My Room", - "kind": "LIVING_ROOM", - "Space": { - "id": "ffffffff-ffff-ffff-ffff-ffffffffffff", - "kind": "HOME", - "location": { - "name": "Chicago, IL", - "timezone": "", - "lat": 0, - "lon": -0 - } - } - } - } -] diff --git a/tests/fixtures/homematicip_cloud.json b/tests/fixtures/homematicip_cloud.json index e85401aa1ec..1aa3bfa48ad 100644 --- a/tests/fixtures/homematicip_cloud.json +++ b/tests/fixtures/homematicip_cloud.json @@ -14,6 +14,210 @@ } }, "devices": { + "3014F7110SHUTTER_OPTICAL": { + "availableFirmwareVersion": "1.16.10", + "connectionType": "HMIP_RF", + "firmwareVersion": "1.16.10", + "firmwareVersionInteger": 69642, + "functionalChannels": { + "0": { + "coProFaulty": false, + "coProRestartNeeded": false, + "coProUpdateFailure": false, + "configPending": false, + "deviceId": "3014F7110SHUTTER_OPTICAL", + "deviceOverheated": false, + "deviceOverloaded": false, + "devicePowerFailureDetected": false, + "deviceUndervoltage": false, + "dutyCycle": false, + "functionalChannelType": "DEVICE_SABOTAGE", + "groupIndex": 0, + "groups": [], + "index": 0, + "label": "", + "lowBat": false, + "routerModuleEnabled": false, + "routerModuleSupported": false, + "rssiDeviceValue": -72, + "rssiPeerValue": null, + "sabotage": false, + "supportedOptionalFeatures": { + "IFeatureDeviceCoProError": false, + "IFeatureDeviceCoProRestart": false, + "IFeatureDeviceCoProUpdate": false, + "IFeatureDeviceIdentify": false, + "IFeatureDeviceOverheated": false, + "IFeatureDeviceOverloaded": false, + "IFeatureDevicePowerFailure": false, + "IFeatureDeviceTemperatureOutOfRange": false, + "IFeatureDeviceUndervoltage": false + }, + "temperatureOutOfRange": false, + "unreach": false + }, + "1": { + "deviceId": "3014F7110SHUTTER_OPTICAL", + "eventDelay": 0, + "functionalChannelType": "SHUTTER_CONTACT_CHANNEL", + "groupIndex": 1, + "groups": [ + "00000000-0000-0000-0000-000000000016", + "00000000-0000-0000-0000-000000000044", + "00000000-0000-0000-0000-000000000009" + ], + "index": 1, + "label": "", + "windowState": "CLOSED" + } + }, + "homeId": "00000000-0000-0000-0000-000000000001", + "id": "3014F7110SHUTTER_OPTICAL", + "label": "Sitzplatzt\u00fcre", + "lastStatusUpdate": 1589401621441, + "liveUpdateState": "LIVE_UPDATE_NOT_SUPPORTED", + "manufacturerCode": 1, + "modelId": 398, + "modelType": "HmIP-SWDO-PL", + "oem": "eQ-3", + "permanentlyReachable": false, + "serializedGlobalTradeItemNumber": "3014F7110SHUTTER_OPTICAL", + "type": "SHUTTER_CONTACT_OPTICAL_PLUS", + "updateState": "UP_TO_DATE" + }, + "3014F7110000000HmIPFSI16": { + "availableFirmwareVersion": "0.0.0", + "connectionType": "HMIP_RF", + "firmwareVersion": "1.16.2", + "firmwareVersionInteger": 69634, + "functionalChannels": { + "0": { + "coProFaulty": false, + "coProRestartNeeded": false, + "coProUpdateFailure": false, + "configPending": false, + "deviceId": "3014F7110000000HmIPFSI16", + "deviceOverheated": false, + "deviceOverloaded": false, + "devicePowerFailureDetected": false, + "deviceUndervoltage": false, + "dutyCycle": false, + "functionalChannelType": "DEVICE_BASE", + "groupIndex": 0, + "groups": [], + "index": 0, + "label": "", + "lowBat": null, + "routerModuleEnabled": false, + "routerModuleSupported": false, + "rssiDeviceValue": -57, + "rssiPeerValue": -54, + "supportedOptionalFeatures": { + "IFeatureDeviceCoProError": false, + "IFeatureDeviceCoProRestart": false, + "IFeatureDeviceCoProUpdate": false, + "IFeatureDeviceIdentify": false, + "IFeatureDeviceOverheated": true, + "IFeatureDeviceOverloaded": false, + "IFeatureDevicePowerFailure": false, + "IFeatureDeviceTemperatureOutOfRange": false, + "IFeatureDeviceUndervoltage": false + }, + "temperatureOutOfRange": false, + "unreach": false + }, + "1": { + "binaryBehaviorType": "NORMALLY_CLOSE", + "deviceId": "3014F7110000000HmIPFSI16", + "functionalChannelType": "MULTI_MODE_INPUT_SWITCH_CHANNEL", + "groupIndex": 1, + "groups": [], + "index": 1, + "label": "", + "multiModeInputMode": "KEY_BEHAVIOR", + "on": true, + "profileMode": "AUTOMATIC", + "userDesiredProfileMode": "AUTOMATIC" + } + }, + "homeId": "00000000-0000-0000-0000-000000000001", + "id": "3014F7110000000HmIPFSI16", + "label": "Wohnzimmer Beleuchtung", + "lastStatusUpdate": 1587233145096, + "liveUpdateState": "LIVE_UPDATE_NOT_SUPPORTED", + "manufacturerCode": 1, + "modelId": 404, + "modelType": "HmIP-FSI16", + "oem": "eQ-3", + "permanentlyReachable": true, + "serializedGlobalTradeItemNumber": "3014F7110000000HmIPFSI16", + "type": "FULL_FLUSH_INPUT_SWITCH", + "updateState": "UP_TO_DATE" + }, + "3014F7110000000HOERMANN": { + "availableFirmwareVersion": "0.0.0", + "firmwareVersion": "1.0.14", + "firmwareVersionInteger": 65550, + "functionalChannels": { + "0": { + "coProFaulty": false, + "coProRestartNeeded": false, + "coProUpdateFailure": false, + "configPending": false, + "deviceId": "3014F7110000000HOERMANN", + "deviceOverheated": false, + "deviceOverloaded": false, + "deviceUndervoltage": false, + "dutyCycle": false, + "functionalChannelType": "DEVICE_BASE", + "groupIndex": 0, + "groups": [], + "index": 0, + "label": "", + "lowBat": null, + "routerModuleEnabled": false, + "routerModuleSupported": false, + "rssiDeviceValue": -71, + "rssiPeerValue": -76, + "supportedOptionalFeatures": { + "IFeatureDeviceCoProError": false, + "IFeatureDeviceCoProRestart": false, + "IFeatureDeviceCoProUpdate": false, + "IFeatureDeviceOverheated": false, + "IFeatureDeviceOverloaded": false, + "IFeatureDeviceTemperatureOutOfRange": false, + "IFeatureDeviceUndervoltage": false + }, + "temperatureOutOfRange": false, + "unreach": false + }, + "1": { + "deviceId": "3014F7110000000HOERMANN", + "doorState": "CLOSED", + "functionalChannelType": "DOOR_CHANNEL", + "groupIndex": 1, + "groups": [], + "index": 1, + "label": "", + "on": false, + "processing": false, + "ventilationPositionSupported": true + } + }, + "homeId": "00000000-0000-0000-0000-000000000001", + "id": "3014F7110000000HOERMANN", + "label": "Garage door", + "lastStatusUpdate": 1584029477755, + "liveUpdateState": "LIVE_UPDATE_NOT_SUPPORTED", + "manufacturerCode": 1, + "modelId": 399, + "modelType": "HmIP-MOD-HO", + "oem": "eQ-3", + "permanentlyReachable": true, + "serializedGlobalTradeItemNumber": "3014F7110000000HOERMANN", + "type": "HOERMANN_DRIVES_MODULE", + "updateState": "UP_TO_DATE" + }, "3014F711000BBBB000000000": { "availableFirmwareVersion": "2.0.2", "firmwareVersion": "2.0.2", diff --git a/tests/fixtures/hvv_departures/check_name.json b/tests/fixtures/hvv_departures/check_name.json new file mode 100644 index 00000000000..7f1bf50d39b --- /dev/null +++ b/tests/fixtures/hvv_departures/check_name.json @@ -0,0 +1,15 @@ +{ + "returnCode": "OK", + "results": [ + { + "name": "Wartenau", + "city": "Hamburg", + "combinedName": "Wartenau", + "id": "Master:10901", + "type": "STATION", + "coordinate": {"x": 10.035515, "y": 53.56478}, + "serviceTypes": ["bus", "u"], + "hasStationInformation": true + } + ] +} \ No newline at end of file diff --git a/tests/fixtures/hvv_departures/config_entry.json b/tests/fixtures/hvv_departures/config_entry.json new file mode 100644 index 00000000000..f878280953d --- /dev/null +++ b/tests/fixtures/hvv_departures/config_entry.json @@ -0,0 +1,16 @@ +{ + "host": "api-test.geofox.de", + "username": "test-username", + "password": "test-password", + "station": { + "city": "Schmalfeld", + "combinedName": "Schmalfeld, Holstenstra\u00dfe", + "coordinate": {"x": 9.986115, "y": 53.874122}, + "hasStationInformation": false, + "id": "Master:75279", + "name": "Holstenstra\u00dfe", + "serviceTypes": ["bus"], + "type": "STATION" + }, + "stationInformation": {"returnCode": "OK"} +} \ No newline at end of file diff --git a/tests/fixtures/hvv_departures/departure_list.json b/tests/fixtures/hvv_departures/departure_list.json new file mode 100644 index 00000000000..95099a0ab17 --- /dev/null +++ b/tests/fixtures/hvv_departures/departure_list.json @@ -0,0 +1,162 @@ +{ + "returnCode": "OK", + "time": {"date": "26.01.2020", "time": "22:52"}, + "departures": [ + { + "line": { + "name": "U1", + "direction": "Großhansdorf", + "origin": "Norderstedt Mitte", + "type": { + "simpleType": "TRAIN", + "shortInfo": "U", + "longInfo": "U-Bahn", + "model": "DT4" + }, + "id": "HHA-U:U1_HHA-U" + }, + "timeOffset": 0, + "delay": 0, + "serviceId": 1482563187, + "station": {"combinedName": "Wartenau", "id": "Master:10901"}, + "attributes": [{"isPlanned": true, "types": ["REALTIME", "ACCURATE"]}] + }, + { + "line": { + "name": "25", + "direction": "Bf. Altona", + "origin": "U Burgstraße", + "type": { + "simpleType": "BUS", + "shortInfo": "Bus", + "longInfo": "Niederflur Metrobus", + "model": "Gelenkbus" + }, + "id": "HHA-B:25_HHA-B" + }, + "timeOffset": 1, + "delay": 0, + "serviceId": 74567, + "station": {"combinedName": "U Wartenau", "id": "Master:60015"}, + "attributes": [{"isPlanned": true, "types": ["REALTIME", "ACCURATE"]}] + }, + { + "line": { + "name": "25", + "direction": "U Burgstraße", + "origin": "Bf. Altona", + "type": { + "simpleType": "BUS", + "shortInfo": "Bus", + "longInfo": "Niederflur Metrobus", + "model": "Gelenkbus" + }, + "id": "HHA-B:25_HHA-B" + }, + "timeOffset": 5, + "delay": 0, + "serviceId": 74328, + "station": {"combinedName": "U Wartenau", "id": "Master:60015"}, + "attributes": [{"isPlanned": true, "types": ["REALTIME", "ACCURATE"]}] + }, + { + "line": { + "name": "U1", + "direction": "Norderstedt Mitte", + "origin": "Großhansdorf", + "type": { + "simpleType": "TRAIN", + "shortInfo": "U", + "longInfo": "U-Bahn", + "model": "DT4" + }, + "id": "HHA-U:U1_HHA-U" + }, + "timeOffset": 8, + "delay": 0, + "station": {"combinedName": "Wartenau", "id": "Master:10901"}, + "attributes": [{"isPlanned": true, "types": ["REALTIME", "ACCURATE"]}] + }, + { + "line": { + "name": "U1", + "direction": "Ohlstedt", + "origin": "Norderstedt Mitte", + "type": { + "simpleType": "TRAIN", + "shortInfo": "U", + "longInfo": "U-Bahn", + "model": "DT4" + }, + "id": "HHA-U:U1_HHA-U" + }, + "timeOffset": 10, + "delay": 0, + "station": {"combinedName": "Wartenau", "id": "Master:10901"}, + "attributes": [{"isPlanned": true, "types": ["REALTIME", "ACCURATE"]}] + } + ], + "filter": [ + { + "serviceID": "HHA-U:U1_HHA-U", + "stationIDs": ["Master:10902"], + "label": "Fuhlsbüttel Nord / Ochsenzoll / Norderstedt Mitte / Kellinghusenstraße / Ohlsdorf / Garstedt", + "serviceName": "U1" + }, + { + "serviceID": "HHA-U:U1_HHA-U", + "stationIDs": ["Master:60904"], + "label": "Volksdorf / Farmsen / Großhansdorf / Ohlstedt", + "serviceName": "U1" + }, + { + "serviceID": "HHA-B:25_HHA-B", + "stationIDs": ["Master:10047"], + "label": "Sachsenstraße / U Burgstraße", + "serviceName": "25" + }, + { + "serviceID": "HHA-B:25_HHA-B", + "stationIDs": ["Master:60029"], + "label": "Winterhuder Marktplatz / Bf. Altona", + "serviceName": "25" + }, + { + "serviceID": "HHA-B:36_HHA-B", + "stationIDs": ["Master:10049"], + "label": "S Blankenese / Rathausmarkt", + "serviceName": "36" + }, + { + "serviceID": "HHA-B:36_HHA-B", + "stationIDs": ["Master:60013"], + "label": "Berner Heerweg", + "serviceName": "36" + }, + { + "serviceID": "HHA-B:606_HHA-B", + "stationIDs": ["Master:10047"], + "label": "S Landwehr (Ramazan-Avci-Platz) - Rathausmarkt", + "serviceName": "606" + }, + { + "serviceID": "HHA-B:606_HHA-B", + "stationIDs": ["Master:60029"], + "label": "Uferstraße - Winterhuder Marktplatz / Uferstraße - S Hamburg Airport / Uferstraße - U Langenhorn Markt (Krohnstieg)", + "serviceName": "606" + }, + { + "serviceID": "HHA-B:608_HHA-B", + "stationIDs": ["Master:10048"], + "label": "Rathausmarkt / S Reeperbahn", + "serviceName": "608" + }, + { + "serviceID": "HHA-B:608_HHA-B", + "stationIDs": ["Master:60012"], + "label": "Bf. Rahlstedt (Amtsstraße) / Großlohe", + "serviceName": "608" + } + ], + "serviceTypes": ["UBAHN", "BUS", "METROBUS", "SCHNELLBUS", "NACHTBUS"] +} \ No newline at end of file diff --git a/tests/fixtures/hvv_departures/init.json b/tests/fixtures/hvv_departures/init.json new file mode 100644 index 00000000000..a20a96363c7 --- /dev/null +++ b/tests/fixtures/hvv_departures/init.json @@ -0,0 +1,10 @@ +{ + "returnCode": "OK", + "beginOfService": "04.06.2020", + "endOfService": "13.12.2020", + "id": "1.80.0", + "dataId": "32.55.01", + "buildDate": "04.06.2020", + "buildTime": "14:29:59", + "buildText": "Regelfahrplan 2020" +} \ No newline at end of file diff --git a/tests/fixtures/hvv_departures/options.json b/tests/fixtures/hvv_departures/options.json new file mode 100644 index 00000000000..f2e288d760a --- /dev/null +++ b/tests/fixtures/hvv_departures/options.json @@ -0,0 +1,12 @@ +{ + "filter": [ + { + "label": "S Landwehr (Ramazan-Avci-Platz) - Rathausmarkt", + "serviceID": "HHA-B:606_HHA-B", + "serviceName": "606", + "stationIDs": ["Master:10047"] + } + ], + "offset": 10, + "realtime": true +} \ No newline at end of file diff --git a/tests/fixtures/hvv_departures/station_information.json b/tests/fixtures/hvv_departures/station_information.json new file mode 100644 index 00000000000..52a2cd8da25 --- /dev/null +++ b/tests/fixtures/hvv_departures/station_information.json @@ -0,0 +1,32 @@ +{ + "returnCode": "OK", + "partialStations": [ + { + "stationOutline": "http://www.geofox.de/images/mobi/stationDescriptions/U_Wartenau.ZM3.jpg", + "elevators": [ + { + "label": "A", + "cabinWidth": 124, + "cabinLength": 147, + "doorWidth": 110, + "description": "Zugang Landwehr <-> Schalterhalle", + "elevatorType": "Durchlader", + "buttonType": "BRAILLE", + "state": "READY" + }, + { + "lines": ["U1"], + "label": "B", + "cabinWidth": 123, + "cabinLength": 145, + "doorWidth": 90, + "description": "Schalterhalle <-> U1", + "elevatorType": "Durchlader", + "buttonType": "COMBI", + "state": "READY" + } + ] + } + ], + "lastUpdate": {"date": "26.01.2020", "time": "22:49"} +} \ No newline at end of file diff --git a/tests/fixtures/metoffice.json b/tests/fixtures/metoffice.json new file mode 100644 index 00000000000..c2b8707ca7a --- /dev/null +++ b/tests/fixtures/metoffice.json @@ -0,0 +1,1499 @@ +{ + "all_sites": { + "Locations": { + "Location": [ + { + "elevation": "47.0", + "id": "354107", + "latitude": "53.3986", + "longitude": "-2.9256", + "name": "Wavertree", + "region": "nw", + "unitaryAuthArea": "Merseyside" + }, + { + "elevation": "5.0", + "id": "322380", + "latitude": "52.7561", + "longitude": "0.4019", + "name": "King's Lynn", + "region": "ee", + "unitaryAuthArea": "Norfolk" + } + ] + } + }, + "wavertree_hourly": { + "SiteRep": { + "Wx": { + "Param": [ + { + "name": "F", + "units": "C", + "$": "Feels Like Temperature" + }, + { + "name": "G", + "units": "mph", + "$": "Wind Gust" + }, + { + "name": "H", + "units": "%", + "$": "Screen Relative Humidity" + }, + { + "name": "T", + "units": "C", + "$": "Temperature" + }, + { + "name": "V", + "units": "", + "$": "Visibility" + }, + { + "name": "D", + "units": "compass", + "$": "Wind Direction" + }, + { + "name": "S", + "units": "mph", + "$": "Wind Speed" + }, + { + "name": "U", + "units": "", + "$": "Max UV Index" + }, + { + "name": "W", + "units": "", + "$": "Weather Type" + }, + { + "name": "Pp", + "units": "%", + "$": "Precipitation Probability" + } + ] + }, + "DV": { + "dataDate": "2020-04-25T08:00:00Z", + "type": "Forecast", + "Location": { + "i": "354107", + "lat": "53.3986", + "lon": "-2.9256", + "name": "WAVERTREE", + "country": "ENGLAND", + "continent": "EUROPE", + "elevation": "47.0", + "Period": [ + { + "type": "Day", + "value": "2020-04-25Z", + "Rep": [ + { + "D": "SE", + "F": "7", + "G": "25", + "H": "63", + "Pp": "0", + "S": "9", + "T": "9", + "V": "VG", + "W": "0", + "U": "0", + "$": "180" + }, + { + "D": "ESE", + "F": "4", + "G": "22", + "H": "76", + "Pp": "0", + "S": "11", + "T": "7", + "V": "GO", + "W": "1", + "U": "1", + "$": "360" + }, + { + "D": "SSE", + "F": "8", + "G": "18", + "H": "70", + "Pp": "0", + "S": "9", + "T": "10", + "V": "MO", + "W": "1", + "U": "3", + "$": "540" + }, + { + "D": "SSE", + "F": "14", + "G": "16", + "H": "50", + "Pp": "0", + "S": "9", + "T": "17", + "V": "GO", + "W": "1", + "U": "5", + "$": "720" + }, + { + "D": "S", + "F": "17", + "G": "9", + "H": "43", + "Pp": "1", + "S": "4", + "T": "19", + "V": "GO", + "W": "1", + "U": "2", + "$": "900" + }, + { + "D": "WNW", + "F": "15", + "G": "13", + "H": "55", + "Pp": "2", + "S": "7", + "T": "17", + "V": "GO", + "W": "3", + "U": "1", + "$": "1080" + }, + { + "D": "NW", + "F": "14", + "G": "7", + "H": "64", + "Pp": "1", + "S": "2", + "T": "14", + "V": "GO", + "W": "2", + "U": "0", + "$": "1260" + } + ] + }, + { + "type": "Day", + "value": "2020-04-26Z", + "Rep": [ + { + "D": "WSW", + "F": "13", + "G": "4", + "H": "73", + "Pp": "1", + "S": "2", + "T": "13", + "V": "GO", + "W": "2", + "U": "0", + "$": "0" + }, + { + "D": "WNW", + "F": "12", + "G": "9", + "H": "77", + "Pp": "2", + "S": "4", + "T": "12", + "V": "GO", + "W": "2", + "U": "0", + "$": "180" + }, + { + "D": "NW", + "F": "10", + "G": "9", + "H": "82", + "Pp": "5", + "S": "4", + "T": "11", + "V": "MO", + "W": "7", + "U": "1", + "$": "360" + }, + { + "D": "WNW", + "F": "11", + "G": "7", + "H": "79", + "Pp": "5", + "S": "4", + "T": "12", + "V": "MO", + "W": "7", + "U": "3", + "$": "540" + }, + { + "D": "WNW", + "F": "10", + "G": "18", + "H": "78", + "Pp": "6", + "S": "9", + "T": "12", + "V": "MO", + "W": "7", + "U": "4", + "$": "720" + }, + { + "D": "NW", + "F": "10", + "G": "18", + "H": "71", + "Pp": "5", + "S": "9", + "T": "12", + "V": "GO", + "W": "7", + "U": "2", + "$": "900" + }, + { + "D": "NW", + "F": "9", + "G": "16", + "H": "68", + "Pp": "9", + "S": "9", + "T": "11", + "V": "VG", + "W": "7", + "U": "1", + "$": "1080" + }, + { + "D": "NW", + "F": "8", + "G": "11", + "H": "68", + "Pp": "9", + "S": "7", + "T": "10", + "V": "VG", + "W": "8", + "U": "0", + "$": "1260" + } + ] + }, + { + "type": "Day", + "value": "2020-04-27Z", + "Rep": [ + { + "D": "WNW", + "F": "8", + "G": "9", + "H": "72", + "Pp": "11", + "S": "4", + "T": "9", + "V": "VG", + "W": "8", + "U": "0", + "$": "0" + }, + { + "D": "WNW", + "F": "7", + "G": "11", + "H": "77", + "Pp": "12", + "S": "7", + "T": "8", + "V": "VG", + "W": "7", + "U": "0", + "$": "180" + }, + { + "D": "NW", + "F": "7", + "G": "9", + "H": "80", + "Pp": "14", + "S": "4", + "T": "8", + "V": "GO", + "W": "7", + "U": "1", + "$": "360" + }, + { + "D": "NW", + "F": "7", + "G": "18", + "H": "73", + "Pp": "6", + "S": "9", + "T": "9", + "V": "VG", + "W": "3", + "U": "2", + "$": "540" + }, + { + "D": "NW", + "F": "8", + "G": "20", + "H": "59", + "Pp": "4", + "S": "9", + "T": "10", + "V": "VG", + "W": "3", + "U": "3", + "$": "720" + }, + { + "D": "NW", + "F": "8", + "G": "20", + "H": "58", + "Pp": "1", + "S": "9", + "T": "10", + "V": "VG", + "W": "1", + "U": "2", + "$": "900" + }, + { + "D": "NW", + "F": "8", + "G": "16", + "H": "57", + "Pp": "1", + "S": "7", + "T": "10", + "V": "VG", + "W": "1", + "U": "1", + "$": "1080" + }, + { + "D": "NW", + "F": "8", + "G": "11", + "H": "67", + "Pp": "1", + "S": "4", + "T": "9", + "V": "VG", + "W": "0", + "U": "0", + "$": "1260" + } + ] + }, + { + "type": "Day", + "value": "2020-04-28Z", + "Rep": [ + { + "D": "NNW", + "F": "7", + "G": "7", + "H": "80", + "Pp": "2", + "S": "4", + "T": "8", + "V": "VG", + "W": "0", + "U": "0", + "$": "0" + }, + { + "D": "W", + "F": "6", + "G": "7", + "H": "86", + "Pp": "3", + "S": "4", + "T": "7", + "V": "GO", + "W": "0", + "U": "0", + "$": "180" + }, + { + "D": "S", + "F": "5", + "G": "9", + "H": "86", + "Pp": "5", + "S": "4", + "T": "6", + "V": "GO", + "W": "1", + "U": "1", + "$": "360" + }, + { + "D": "ENE", + "F": "7", + "G": "13", + "H": "72", + "Pp": "6", + "S": "7", + "T": "9", + "V": "GO", + "W": "3", + "U": "3", + "$": "540" + }, + { + "D": "ENE", + "F": "10", + "G": "16", + "H": "57", + "Pp": "10", + "S": "7", + "T": "11", + "V": "GO", + "W": "7", + "U": "4", + "$": "720" + }, + { + "D": "N", + "F": "11", + "G": "16", + "H": "58", + "Pp": "10", + "S": "7", + "T": "12", + "V": "GO", + "W": "7", + "U": "2", + "$": "900" + }, + { + "D": "N", + "F": "10", + "G": "16", + "H": "63", + "Pp": "10", + "S": "7", + "T": "11", + "V": "VG", + "W": "7", + "U": "1", + "$": "1080" + }, + { + "D": "NNE", + "F": "9", + "G": "11", + "H": "72", + "Pp": "9", + "S": "4", + "T": "10", + "V": "VG", + "W": "7", + "U": "0", + "$": "1260" + } + ] + }, + { + "type": "Day", + "value": "2020-04-29Z", + "Rep": [ + { + "D": "E", + "F": "8", + "G": "9", + "H": "79", + "Pp": "6", + "S": "4", + "T": "9", + "V": "VG", + "W": "7", + "U": "0", + "$": "0" + }, + { + "D": "SSE", + "F": "7", + "G": "11", + "H": "81", + "Pp": "3", + "S": "7", + "T": "8", + "V": "GO", + "W": "2", + "U": "0", + "$": "180" + }, + { + "D": "SE", + "F": "5", + "G": "16", + "H": "86", + "Pp": "9", + "S": "9", + "T": "8", + "V": "GO", + "W": "7", + "U": "1", + "$": "360" + }, + { + "D": "SE", + "F": "8", + "G": "22", + "H": "74", + "Pp": "12", + "S": "11", + "T": "10", + "V": "GO", + "W": "7", + "U": "3", + "$": "540" + }, + { + "D": "SE", + "F": "10", + "G": "27", + "H": "72", + "Pp": "47", + "S": "13", + "T": "12", + "V": "GO", + "W": "12", + "U": "3", + "$": "720" + }, + { + "D": "SSE", + "F": "10", + "G": "29", + "H": "73", + "Pp": "59", + "S": "13", + "T": "13", + "V": "GO", + "W": "14", + "U": "2", + "$": "900" + }, + { + "D": "SSE", + "F": "10", + "G": "20", + "H": "69", + "Pp": "39", + "S": "11", + "T": "12", + "V": "VG", + "W": "10", + "U": "1", + "$": "1080" + }, + { + "D": "SSE", + "F": "9", + "G": "22", + "H": "79", + "Pp": "19", + "S": "13", + "T": "11", + "V": "GO", + "W": "7", + "U": "0", + "$": "1260" + } + ] + } + ] + } + } + } + }, + "wavertree_daily": { + "SiteRep": { + "Wx": { + "Param": [ + { + "name": "FDm", + "units": "C", + "$": "Feels Like Day Maximum Temperature" + }, + { + "name": "FNm", + "units": "C", + "$": "Feels Like Night Minimum Temperature" + }, + { + "name": "Dm", + "units": "C", + "$": "Day Maximum Temperature" + }, + { + "name": "Nm", + "units": "C", + "$": "Night Minimum Temperature" + }, + { + "name": "Gn", + "units": "mph", + "$": "Wind Gust Noon" + }, + { + "name": "Gm", + "units": "mph", + "$": "Wind Gust Midnight" + }, + { + "name": "Hn", + "units": "%", + "$": "Screen Relative Humidity Noon" + }, + { + "name": "Hm", + "units": "%", + "$": "Screen Relative Humidity Midnight" + }, + { + "name": "V", + "units": "", + "$": "Visibility" + }, + { + "name": "D", + "units": "compass", + "$": "Wind Direction" + }, + { + "name": "S", + "units": "mph", + "$": "Wind Speed" + }, + { + "name": "U", + "units": "", + "$": "Max UV Index" + }, + { + "name": "W", + "units": "", + "$": "Weather Type" + }, + { + "name": "PPd", + "units": "%", + "$": "Precipitation Probability Day" + }, + { + "name": "PPn", + "units": "%", + "$": "Precipitation Probability Night" + } + ] + }, + "DV": { + "dataDate": "2020-04-25T08:00:00Z", + "type": "Forecast", + "Location": { + "i": "354107", + "lat": "53.3986", + "lon": "-2.9256", + "name": "WAVERTREE", + "country": "ENGLAND", + "continent": "EUROPE", + "elevation": "47.0", + "Period": [ + { + "type": "Day", + "value": "2020-04-25Z", + "Rep": [ + { + "D": "SSE", + "Gn": "16", + "Hn": "50", + "PPd": "2", + "S": "9", + "V": "GO", + "Dm": "19", + "FDm": "18", + "W": "1", + "U": "5", + "$": "Day" + }, + { + "D": "WSW", + "Gm": "4", + "Hm": "73", + "PPn": "2", + "S": "2", + "V": "GO", + "Nm": "11", + "FNm": "11", + "W": "2", + "$": "Night" + } + ] + }, + { + "type": "Day", + "value": "2020-04-26Z", + "Rep": [ + { + "D": "WNW", + "Gn": "18", + "Hn": "78", + "PPd": "9", + "S": "9", + "V": "MO", + "Dm": "13", + "FDm": "11", + "W": "7", + "U": "4", + "$": "Day" + }, + { + "D": "WNW", + "Gm": "9", + "Hm": "72", + "PPn": "12", + "S": "4", + "V": "VG", + "Nm": "8", + "FNm": "7", + "W": "8", + "$": "Night" + } + ] + }, + { + "type": "Day", + "value": "2020-04-27Z", + "Rep": [ + { + "D": "NW", + "Gn": "20", + "Hn": "59", + "PPd": "14", + "S": "9", + "V": "VG", + "Dm": "11", + "FDm": "8", + "W": "3", + "U": "3", + "$": "Day" + }, + { + "D": "NNW", + "Gm": "7", + "Hm": "80", + "PPn": "3", + "S": "4", + "V": "VG", + "Nm": "6", + "FNm": "5", + "W": "0", + "$": "Night" + } + ] + }, + { + "type": "Day", + "value": "2020-04-28Z", + "Rep": [ + { + "D": "ENE", + "Gn": "16", + "Hn": "57", + "PPd": "10", + "S": "7", + "V": "GO", + "Dm": "12", + "FDm": "11", + "W": "7", + "U": "4", + "$": "Day" + }, + { + "D": "E", + "Gm": "9", + "Hm": "79", + "PPn": "9", + "S": "4", + "V": "VG", + "Nm": "7", + "FNm": "6", + "W": "7", + "$": "Night" + } + ] + }, + { + "type": "Day", + "value": "2020-04-29Z", + "Rep": [ + { + "D": "SE", + "Gn": "27", + "Hn": "72", + "PPd": "59", + "S": "13", + "V": "GO", + "Dm": "13", + "FDm": "10", + "W": "12", + "U": "3", + "$": "Day" + }, + { + "D": "SSE", + "Gm": "18", + "Hm": "85", + "PPn": "19", + "S": "11", + "V": "VG", + "Nm": "8", + "FNm": "6", + "W": "7", + "$": "Night" + } + ] + } + ] + } + } + } + }, + "kingslynn_hourly": { + "SiteRep": { + "Wx": { + "Param": [ + { + "name": "F", + "units": "C", + "$": "Feels Like Temperature" + }, + { + "name": "G", + "units": "mph", + "$": "Wind Gust" + }, + { + "name": "H", + "units": "%", + "$": "Screen Relative Humidity" + }, + { + "name": "T", + "units": "C", + "$": "Temperature" + }, + { + "name": "V", + "units": "", + "$": "Visibility" + }, + { + "name": "D", + "units": "compass", + "$": "Wind Direction" + }, + { + "name": "S", + "units": "mph", + "$": "Wind Speed" + }, + { + "name": "U", + "units": "", + "$": "Max UV Index" + }, + { + "name": "W", + "units": "", + "$": "Weather Type" + }, + { + "name": "Pp", + "units": "%", + "$": "Precipitation Probability" + } + ] + }, + "DV": { + "dataDate": "2020-04-25T08:00:00Z", + "type": "Forecast", + "Location": { + "i": "322380", + "lat": "52.7561", + "lon": "0.4019", + "name": "KING'S LYNN", + "country": "ENGLAND", + "continent": "EUROPE", + "elevation": "5.0", + "Period": [ + { + "type": "Day", + "value": "2020-04-25Z", + "Rep": [ + { + "D": "SSE", + "F": "4", + "G": "9", + "H": "88", + "Pp": "7", + "S": "9", + "T": "7", + "V": "GO", + "W": "8", + "U": "0", + "$": "180" + }, + { + "D": "ESE", + "F": "5", + "G": "7", + "H": "86", + "Pp": "9", + "S": "4", + "T": "7", + "V": "GO", + "W": "8", + "U": "1", + "$": "360" + }, + { + "D": "ESE", + "F": "8", + "G": "4", + "H": "75", + "Pp": "9", + "S": "4", + "T": "9", + "V": "VG", + "W": "8", + "U": "3", + "$": "540" + }, + { + "D": "E", + "F": "13", + "G": "7", + "H": "60", + "Pp": "0", + "S": "2", + "T": "14", + "V": "VG", + "W": "1", + "U": "6", + "$": "720" + }, + { + "D": "NNW", + "F": "14", + "G": "9", + "H": "57", + "Pp": "0", + "S": "4", + "T": "15", + "V": "VG", + "W": "1", + "U": "3", + "$": "900" + }, + { + "D": "ENE", + "F": "14", + "G": "9", + "H": "58", + "Pp": "0", + "S": "4", + "T": "14", + "V": "VG", + "W": "1", + "U": "1", + "$": "1080" + }, + { + "D": "SE", + "F": "8", + "G": "18", + "H": "76", + "Pp": "0", + "S": "9", + "T": "10", + "V": "VG", + "W": "0", + "U": "0", + "$": "1260" + } + ] + }, + { + "type": "Day", + "value": "2020-04-26Z", + "Rep": [ + { + "D": "SSE", + "F": "5", + "G": "16", + "H": "84", + "Pp": "0", + "S": "7", + "T": "7", + "V": "VG", + "W": "0", + "U": "0", + "$": "0" + }, + { + "D": "S", + "F": "4", + "G": "16", + "H": "89", + "Pp": "0", + "S": "7", + "T": "6", + "V": "GO", + "W": "0", + "U": "0", + "$": "180" + }, + { + "D": "S", + "F": "4", + "G": "16", + "H": "87", + "Pp": "0", + "S": "7", + "T": "7", + "V": "GO", + "W": "1", + "U": "1", + "$": "360" + }, + { + "D": "SSW", + "F": "11", + "G": "13", + "H": "69", + "Pp": "0", + "S": "9", + "T": "13", + "V": "VG", + "W": "1", + "U": "4", + "$": "540" + }, + { + "D": "SW", + "F": "15", + "G": "18", + "H": "50", + "Pp": "8", + "S": "9", + "T": "17", + "V": "VG", + "W": "1", + "U": "5", + "$": "720" + }, + { + "D": "SW", + "F": "16", + "G": "16", + "H": "47", + "Pp": "8", + "S": "7", + "T": "18", + "V": "VG", + "W": "7", + "U": "2", + "$": "900" + }, + { + "D": "SW", + "F": "15", + "G": "13", + "H": "56", + "Pp": "3", + "S": "7", + "T": "17", + "V": "VG", + "W": "3", + "U": "1", + "$": "1080" + }, + { + "D": "SW", + "F": "13", + "G": "11", + "H": "76", + "Pp": "4", + "S": "4", + "T": "13", + "V": "VG", + "W": "7", + "U": "0", + "$": "1260" + } + ] + }, + { + "type": "Day", + "value": "2020-04-27Z", + "Rep": [ + { + "D": "SSW", + "F": "10", + "G": "13", + "H": "75", + "Pp": "5", + "S": "7", + "T": "11", + "V": "GO", + "W": "7", + "U": "0", + "$": "0" + }, + { + "D": "W", + "F": "9", + "G": "13", + "H": "84", + "Pp": "9", + "S": "7", + "T": "10", + "V": "GO", + "W": "7", + "U": "0", + "$": "180" + }, + { + "D": "NW", + "F": "7", + "G": "16", + "H": "85", + "Pp": "50", + "S": "9", + "T": "9", + "V": "GO", + "W": "12", + "U": "1", + "$": "360" + }, + { + "D": "NW", + "F": "9", + "G": "11", + "H": "78", + "Pp": "36", + "S": "4", + "T": "10", + "V": "VG", + "W": "7", + "U": "3", + "$": "540" + }, + { + "D": "WNW", + "F": "11", + "G": "11", + "H": "66", + "Pp": "9", + "S": "4", + "T": "12", + "V": "VG", + "W": "7", + "U": "4", + "$": "720" + }, + { + "D": "W", + "F": "11", + "G": "13", + "H": "62", + "Pp": "9", + "S": "7", + "T": "13", + "V": "VG", + "W": "7", + "U": "2", + "$": "900" + }, + { + "D": "E", + "F": "11", + "G": "11", + "H": "64", + "Pp": "10", + "S": "7", + "T": "12", + "V": "VG", + "W": "7", + "U": "1", + "$": "1080" + }, + { + "D": "SE", + "F": "9", + "G": "13", + "H": "78", + "Pp": "9", + "S": "7", + "T": "10", + "V": "VG", + "W": "7", + "U": "0", + "$": "1260" + } + ] + }, + { + "type": "Day", + "value": "2020-04-28Z", + "Rep": [ + { + "D": "SE", + "F": "7", + "G": "13", + "H": "85", + "Pp": "9", + "S": "7", + "T": "9", + "V": "VG", + "W": "7", + "U": "0", + "$": "0" + }, + { + "D": "E", + "F": "7", + "G": "9", + "H": "91", + "Pp": "11", + "S": "4", + "T": "8", + "V": "GO", + "W": "7", + "U": "0", + "$": "180" + }, + { + "D": "ESE", + "F": "7", + "G": "9", + "H": "92", + "Pp": "12", + "S": "4", + "T": "8", + "V": "GO", + "W": "7", + "U": "1", + "$": "360" + }, + { + "D": "ESE", + "F": "9", + "G": "13", + "H": "77", + "Pp": "14", + "S": "7", + "T": "11", + "V": "GO", + "W": "7", + "U": "3", + "$": "540" + }, + { + "D": "ESE", + "F": "12", + "G": "16", + "H": "64", + "Pp": "14", + "S": "7", + "T": "13", + "V": "GO", + "W": "7", + "U": "3", + "$": "720" + }, + { + "D": "ESE", + "F": "12", + "G": "18", + "H": "66", + "Pp": "15", + "S": "9", + "T": "13", + "V": "GO", + "W": "7", + "U": "2", + "$": "900" + }, + { + "D": "SSE", + "F": "11", + "G": "13", + "H": "73", + "Pp": "15", + "S": "7", + "T": "12", + "V": "GO", + "W": "7", + "U": "1", + "$": "1080" + }, + { + "D": "SE", + "F": "9", + "G": "13", + "H": "81", + "Pp": "13", + "S": "7", + "T": "10", + "V": "GO", + "W": "7", + "U": "0", + "$": "1260" + } + ] + }, + { + "type": "Day", + "value": "2020-04-29Z", + "Rep": [ + { + "D": "SSE", + "F": "7", + "G": "13", + "H": "87", + "Pp": "11", + "S": "7", + "T": "9", + "V": "GO", + "W": "7", + "U": "0", + "$": "0" + }, + { + "D": "SSE", + "F": "7", + "G": "13", + "H": "91", + "Pp": "15", + "S": "7", + "T": "9", + "V": "GO", + "W": "8", + "U": "0", + "$": "180" + }, + { + "D": "ESE", + "F": "7", + "G": "13", + "H": "89", + "Pp": "8", + "S": "7", + "T": "9", + "V": "GO", + "W": "7", + "U": "1", + "$": "360" + }, + { + "D": "SSE", + "F": "10", + "G": "20", + "H": "75", + "Pp": "8", + "S": "11", + "T": "12", + "V": "VG", + "W": "7", + "U": "3", + "$": "540" + }, + { + "D": "S", + "F": "12", + "G": "22", + "H": "68", + "Pp": "11", + "S": "11", + "T": "14", + "V": "GO", + "W": "7", + "U": "3", + "$": "720" + }, + { + "D": "S", + "F": "12", + "G": "27", + "H": "68", + "Pp": "55", + "S": "13", + "T": "14", + "V": "GO", + "W": "12", + "U": "1", + "$": "900" + }, + { + "D": "SSE", + "F": "11", + "G": "22", + "H": "76", + "Pp": "34", + "S": "11", + "T": "13", + "V": "VG", + "W": "10", + "U": "1", + "$": "1080" + }, + { + "D": "SSE", + "F": "9", + "G": "20", + "H": "86", + "Pp": "20", + "S": "11", + "T": "11", + "V": "VG", + "W": "7", + "U": "0", + "$": "1260" + } + ] + } + ] + } + } + } + } +} \ No newline at end of file diff --git a/tests/fixtures/roku/active-app-pluto.xml b/tests/fixtures/roku/active-app-pluto.xml new file mode 100644 index 00000000000..cb3b85dc51c --- /dev/null +++ b/tests/fixtures/roku/active-app-pluto.xml @@ -0,0 +1,4 @@ + + + Pluto TV - It's Free TV + diff --git a/tests/fixtures/roku/apps-tv.xml b/tests/fixtures/roku/apps-tv.xml index 93452c22235..e5862268d90 100644 --- a/tests/fixtures/roku/apps-tv.xml +++ b/tests/fixtures/roku/apps-tv.xml @@ -10,4 +10,5 @@ Free FrameChannel Service Mediafly Pandora + Pluto TV - It's Free TV diff --git a/tests/fixtures/roku/apps.xml b/tests/fixtures/roku/apps.xml index 416da25091e..477304c09e8 100644 --- a/tests/fixtures/roku/apps.xml +++ b/tests/fixtures/roku/apps.xml @@ -7,4 +7,5 @@ Free FrameChannel Service Mediafly Pandora + Pluto TV - It's Free TV diff --git a/tests/fixtures/roku/media-player-close.xml b/tests/fixtures/roku/media-player-close.xml new file mode 100644 index 00000000000..0f542941d8c --- /dev/null +++ b/tests/fixtures/roku/media-player-close.xml @@ -0,0 +1,5 @@ + + + + false + diff --git a/tests/fixtures/roku/media-player-live.xml b/tests/fixtures/roku/media-player-live.xml new file mode 100644 index 00000000000..62d819f228c --- /dev/null +++ b/tests/fixtures/roku/media-player-live.xml @@ -0,0 +1,12 @@ + + + + + + + 73313 ms + 95000 ms + true + 25106 ms + + diff --git a/tests/fixtures/roku/media-player-pause.xml b/tests/fixtures/roku/media-player-pause.xml new file mode 100644 index 00000000000..a771208ef57 --- /dev/null +++ b/tests/fixtures/roku/media-player-pause.xml @@ -0,0 +1,12 @@ + + + + + + + 313813 ms + 6496762 ms + false + 15000 ms + + diff --git a/tests/fixtures/roku/media-player-play.xml b/tests/fixtures/roku/media-player-play.xml new file mode 100644 index 00000000000..eceb3ce59a2 --- /dev/null +++ b/tests/fixtures/roku/media-player-play.xml @@ -0,0 +1,12 @@ + + + + + + + 38813 ms + 6496762 ms + false + 15000 ms + + diff --git a/tests/helpers/test_condition.py b/tests/helpers/test_condition.py index c4b87b667fa..b2cb1ff100c 100644 --- a/tests/helpers/test_condition.py +++ b/tests/helpers/test_condition.py @@ -266,6 +266,198 @@ async def test_if_numeric_state_not_raise_on_unavailable(hass): assert len(logwarn.mock_calls) == 0 +async def test_state_multiple_entities(hass): + """Test with multiple entities in condition.""" + test = await condition.async_from_config( + hass, + { + "condition": "and", + "conditions": [ + { + "condition": "state", + "entity_id": ["sensor.temperature_1", "sensor.temperature_2"], + "state": "100", + }, + ], + }, + ) + + hass.states.async_set("sensor.temperature_1", 100) + hass.states.async_set("sensor.temperature_2", 100) + assert test(hass) + + hass.states.async_set("sensor.temperature_1", 101) + hass.states.async_set("sensor.temperature_2", 100) + assert not test(hass) + + hass.states.async_set("sensor.temperature_1", 100) + hass.states.async_set("sensor.temperature_2", 101) + assert not test(hass) + + +async def test_multiple_states(hass): + """Test with multiple states in condition.""" + test = await condition.async_from_config( + hass, + { + "condition": "and", + "conditions": [ + { + "condition": "state", + "entity_id": "sensor.temperature", + "state": ["100", "200"], + }, + ], + }, + ) + + hass.states.async_set("sensor.temperature", 100) + assert test(hass) + + hass.states.async_set("sensor.temperature", 200) + assert test(hass) + + hass.states.async_set("sensor.temperature", 42) + assert not test(hass) + + +async def test_numeric_state_multiple_entities(hass): + """Test with multiple entities in condition.""" + test = await condition.async_from_config( + hass, + { + "condition": "and", + "conditions": [ + { + "condition": "numeric_state", + "entity_id": ["sensor.temperature_1", "sensor.temperature_2"], + "below": 50, + }, + ], + }, + ) + + hass.states.async_set("sensor.temperature_1", 49) + hass.states.async_set("sensor.temperature_2", 49) + assert test(hass) + + hass.states.async_set("sensor.temperature_1", 50) + hass.states.async_set("sensor.temperature_2", 49) + assert not test(hass) + + hass.states.async_set("sensor.temperature_1", 49) + hass.states.async_set("sensor.temperature_2", 50) + assert not test(hass) + + +async def test_zone_multiple_entities(hass): + """Test with multiple entities in condition.""" + test = await condition.async_from_config( + hass, + { + "condition": "and", + "conditions": [ + { + "condition": "zone", + "entity_id": ["device_tracker.person_1", "device_tracker.person_2"], + "zone": "zone.home", + }, + ], + }, + ) + + hass.states.async_set( + "zone.home", + "zoning", + {"name": "home", "latitude": 2.1, "longitude": 1.1, "radius": 10}, + ) + + hass.states.async_set( + "device_tracker.person_1", + "home", + {"friendly_name": "person_1", "latitude": 2.1, "longitude": 1.1}, + ) + hass.states.async_set( + "device_tracker.person_2", + "home", + {"friendly_name": "person_2", "latitude": 2.1, "longitude": 1.1}, + ) + assert test(hass) + + hass.states.async_set( + "device_tracker.person_1", + "home", + {"friendly_name": "person_1", "latitude": 20.1, "longitude": 10.1}, + ) + hass.states.async_set( + "device_tracker.person_2", + "home", + {"friendly_name": "person_2", "latitude": 2.1, "longitude": 1.1}, + ) + assert not test(hass) + + hass.states.async_set( + "device_tracker.person_1", + "home", + {"friendly_name": "person_1", "latitude": 2.1, "longitude": 1.1}, + ) + hass.states.async_set( + "device_tracker.person_2", + "home", + {"friendly_name": "person_2", "latitude": 20.1, "longitude": 10.1}, + ) + assert not test(hass) + + +async def test_multiple_zones(hass): + """Test with multiple entities in condition.""" + test = await condition.async_from_config( + hass, + { + "condition": "and", + "conditions": [ + { + "condition": "zone", + "entity_id": "device_tracker.person", + "zone": ["zone.home", "zone.work"], + }, + ], + }, + ) + + hass.states.async_set( + "zone.home", + "zoning", + {"name": "home", "latitude": 2.1, "longitude": 1.1, "radius": 10}, + ) + hass.states.async_set( + "zone.work", + "zoning", + {"name": "work", "latitude": 20.1, "longitude": 10.1, "radius": 10}, + ) + + hass.states.async_set( + "device_tracker.person", + "home", + {"friendly_name": "person", "latitude": 2.1, "longitude": 1.1}, + ) + assert test(hass) + + hass.states.async_set( + "device_tracker.person", + "home", + {"friendly_name": "person", "latitude": 20.1, "longitude": 10.1}, + ) + assert test(hass) + + hass.states.async_set( + "device_tracker.person", + "home", + {"friendly_name": "person", "latitude": 50.1, "longitude": 20.1}, + ) + assert not test(hass) + + async def test_extract_entities(): """Test extracting entities.""" assert condition.async_extract_entities( @@ -312,6 +504,16 @@ async def test_extract_entities(): }, ], }, + { + "condition": "state", + "entity_id": ["sensor.temperature_7", "sensor.temperature_8"], + "state": "100", + }, + { + "condition": "numeric_state", + "entity_id": ["sensor.temperature_9", "sensor.temperature_10"], + "below": 110, + }, ], } ) == { @@ -321,6 +523,10 @@ async def test_extract_entities(): "sensor.temperature_4", "sensor.temperature_5", "sensor.temperature_6", + "sensor.temperature_7", + "sensor.temperature_8", + "sensor.temperature_9", + "sensor.temperature_10", } diff --git a/tests/helpers/test_config_entry_flow.py b/tests/helpers/test_config_entry_flow.py index 7130514f47f..7893650d420 100644 --- a/tests/helpers/test_config_entry_flow.py +++ b/tests/helpers/test_config_entry_flow.py @@ -66,15 +66,19 @@ async def test_user_no_devices_found(hass, discovery_flow_conf): async def test_user_has_confirmation(hass, discovery_flow_conf): - """Test user requires no confirmation to setup.""" - flow = config_entries.HANDLERS["test"]() - flow.hass = hass - flow.context = {} + """Test user requires confirmation to setup.""" discovery_flow_conf["discovered"] = True + mock_entity_platform(hass, "config_flow.test", None) - result = await flow.async_step_user() + result = await hass.config_entries.flow.async_init( + "test", context={"source": config_entries.SOURCE_USER}, data={} + ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM + assert result["step_id"] == "confirm" + + result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) + assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY @pytest.mark.parametrize("source", ["discovery", "ssdp", "zeroconf"]) @@ -149,6 +153,27 @@ async def test_only_one_in_progress(hass, discovery_flow_conf): assert len(hass.config_entries.flow.async_progress()) == 0 +async def test_import_abort_discovery(hass, discovery_flow_conf): + """Test import will finish and cancel discovered one.""" + mock_entity_platform(hass, "config_flow.test", None) + + # Discovery starts flow + result = await hass.config_entries.flow.async_init( + "test", context={"source": config_entries.SOURCE_DISCOVERY}, data={} + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_FORM + + # Start import flow + result = await hass.config_entries.flow.async_init( + "test", context={"source": config_entries.SOURCE_IMPORT}, data={} + ) + + assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY + + # Discovery flow has been aborted + assert len(hass.config_entries.flow.async_progress()) == 0 + + async def test_import_no_confirmation(hass, discovery_flow_conf): """Test import requires no confirmation to set up.""" flow = config_entries.HANDLERS["test"]() diff --git a/tests/helpers/test_config_entry_oauth2_flow.py b/tests/helpers/test_config_entry_oauth2_flow.py index 801ea49bfbb..957bd507af7 100644 --- a/tests/helpers/test_config_entry_oauth2_flow.py +++ b/tests/helpers/test_config_entry_oauth2_flow.py @@ -69,6 +69,11 @@ class MockOAuth2Implementation(config_entry_oauth2_flow.AbstractOAuth2Implementa """Domain that is providing the implementation.""" return "test" + @property + def extra_authorize_data(self) -> dict: + """Extra data that needs to be appended to the authorize url.""" + return {"extra": "data"} + async def async_generate_authorize_url(self, flow_id: str) -> str: """Generate a url for the user to authorize.""" return "http://example.com/auth" diff --git a/tests/helpers/test_config_validation.py b/tests/helpers/test_config_validation.py index d0f19f356ae..e6c3757ec55 100644 --- a/tests/helpers/test_config_validation.py +++ b/tests/helpers/test_config_validation.py @@ -642,8 +642,8 @@ def test_deprecated_with_invalidation_version(caplog, schema, version): invalidated_schema(test_data) assert str(exc_info.value) == ( "The 'mars' option is deprecated, " - "please remove it from your configuration. This option will " - "become invalid in version 0.1.0" + "please remove it from your configuration. This option became " + "invalid in version 0.1.0" ) @@ -702,7 +702,7 @@ def test_deprecated_with_replacement_key_and_invalidation_version( invalidated_schema(test_data) assert str(exc_info.value) == ( "The 'mars' option is deprecated, " - "please replace it with 'jupiter'. This option will become " + "please replace it with 'jupiter'. This option became " "invalid in version 0.1.0" ) @@ -851,7 +851,7 @@ def test_deprecated_with_replacement_key_invalidation_version_default( invalidated_schema(test_data) assert str(exc_info.value) == ( "The 'mars' option is deprecated, " - "please replace it with 'jupiter'. This option will become " + "please replace it with 'jupiter'. This option became " "invalid in version 0.1.0" ) diff --git a/tests/helpers/test_discovery.py b/tests/helpers/test_discovery.py index 3b0996d676a..64f39fb13bd 100644 --- a/tests/helpers/test_discovery.py +++ b/tests/helpers/test_discovery.py @@ -1,11 +1,8 @@ """Test discovery helpers.""" from unittest.mock import patch -import pytest - from homeassistant import setup from homeassistant.core import callback -from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import discovery from tests.common import ( @@ -216,15 +213,3 @@ class TestHelpersDiscovery: # test_component will only be setup once assert len(component_calls) == 1 - - -async def test_load_platform_forbids_config(): - """Test you cannot setup config component with load_platform.""" - with pytest.raises(HomeAssistantError): - await discovery.async_load_platform(None, "config", "zwave", {}, {"config": {}}) - - -async def test_discover_forbids_config(): - """Test you cannot setup config component with load_platform.""" - with pytest.raises(HomeAssistantError): - await discovery.async_discover(None, None, None, "config", {}) diff --git a/tests/helpers/test_entity.py b/tests/helpers/test_entity.py index 70b72b1752f..49f8fbdef7c 100644 --- a/tests/helpers/test_entity.py +++ b/tests/helpers/test_entity.py @@ -6,11 +6,9 @@ import threading import pytest -from homeassistant.config import DATA_CUSTOMIZE -from homeassistant.const import ATTR_DEVICE_CLASS, ATTR_HIDDEN, STATE_UNAVAILABLE +from homeassistant.const import ATTR_DEVICE_CLASS, STATE_UNAVAILABLE from homeassistant.core import Context from homeassistant.helpers import entity, entity_registry -from homeassistant.helpers.entity_values import EntityValues from tests.async_mock import MagicMock, PropertyMock, patch from tests.common import get_test_home_assistant, mock_registry @@ -89,21 +87,6 @@ class TestHelpersEntity: """Stop everything that was started.""" self.hass.stop() - def test_default_hidden_not_in_attributes(self): - """Test that the default hidden property is set to False.""" - assert ATTR_HIDDEN not in self.hass.states.get(self.entity.entity_id).attributes - - def test_overwriting_hidden_property_to_true(self): - """Test we can overwrite hidden property to True.""" - self.hass.data[DATA_CUSTOMIZE] = EntityValues( - {self.entity.entity_id: {ATTR_HIDDEN: True}} - ) - self.entity.schedule_update_ha_state() - self.hass.block_till_done() - - state = self.hass.states.get(self.entity.entity_id) - assert state.attributes.get(ATTR_HIDDEN) - def test_generate_entity_id_given_hass(self): """Test generating an entity id given hass object.""" fmt = "test.{}" diff --git a/tests/helpers/test_entity_platform.py b/tests/helpers/test_entity_platform.py index 11dded7416f..8917e69e3ae 100644 --- a/tests/helpers/test_entity_platform.py +++ b/tests/helpers/test_entity_platform.py @@ -7,7 +7,7 @@ import pytest from homeassistant.const import UNIT_PERCENTAGE from homeassistant.core import callback -from homeassistant.exceptions import PlatformNotReady +from homeassistant.exceptions import HomeAssistantError, PlatformNotReady from homeassistant.helpers import entity_platform, entity_registry from homeassistant.helpers.entity import async_generate_entity_id from homeassistant.helpers.entity_component import ( @@ -359,6 +359,11 @@ async def test_raise_error_on_update(hass): assert len(updates) == 1 assert 1 in updates + assert entity1.hass is None + assert entity1.platform is None + assert entity2.hass is not None + assert entity2.platform is not None + async def test_async_remove_with_platform(hass): """Remove an entity from a platform.""" @@ -380,10 +385,11 @@ async def test_not_adding_duplicate_entities_with_unique_id(hass): assert len(hass.states.async_entity_ids()) == 1 - await component.async_add_entities( - [MockEntity(name="test2", unique_id="not_very_unique")] - ) + ent2 = MockEntity(name="test2", unique_id="not_very_unique") + await component.async_add_entities([ent2]) + assert ent2.hass is None + assert ent2.platform is None assert len(hass.states.async_entity_ids()) == 1 @@ -792,6 +798,11 @@ async def test_entity_disabled_by_integration(hass): await component.async_add_entities([entity_default, entity_disabled]) + assert entity_default.hass is not None + assert entity_default.platform is not None + assert entity_disabled.hass is None + assert entity_disabled.platform is None + registry = await hass.helpers.entity_registry.async_get_registry() entry_default = registry.async_get_or_create(DOMAIN, DOMAIN, "default") @@ -889,3 +900,13 @@ async def test_platforms_sharing_services(hass): assert len(entities) == 2 assert entity1 in entities assert entity2 in entities + + +async def test_invalid_entity_id(hass): + """Test specifying an invalid entity id.""" + platform = MockEntityPlatform(hass) + entity = MockEntity(entity_id="invalid_entity_id") + with pytest.raises(HomeAssistantError): + await platform.async_add_entities([entity]) + assert entity.hass is None + assert entity.platform is None diff --git a/tests/helpers/test_entityfilter.py b/tests/helpers/test_entityfilter.py index 726e6bd92d0..5bc37216f81 100644 --- a/tests/helpers/test_entityfilter.py +++ b/tests/helpers/test_entityfilter.py @@ -1,5 +1,9 @@ """The tests for the EntityFilter component.""" -from homeassistant.helpers.entityfilter import FILTER_SCHEMA, generate_filter +from homeassistant.helpers.entityfilter import ( + FILTER_SCHEMA, + INCLUDE_EXCLUDE_FILTER_SCHEMA, + generate_filter, +) def test_no_filters_case_1(): @@ -29,6 +33,27 @@ def test_includes_only_case_2(): assert testfilter("sun.sun") is False +def test_includes_only_with_glob_case_2(): + """If include specified, only pass if specified (Case 2).""" + incl_dom = {"light", "sensor"} + incl_glob = {"cover.*_window"} + incl_ent = {"binary_sensor.working"} + excl_dom = {} + excl_glob = {} + excl_ent = {} + testfilter = generate_filter( + incl_dom, incl_ent, excl_dom, excl_ent, incl_glob, excl_glob + ) + + assert testfilter("sensor.test") + assert testfilter("light.test") + assert testfilter("cover.bedroom_window") + assert testfilter("binary_sensor.working") + assert testfilter("binary_sensor.notworking") is False + assert testfilter("sun.sun") is False + assert testfilter("cover.garage_door") is False + + def test_excludes_only_case_3(): """If exclude specified, pass all but specified (Case 3).""" incl_dom = {} @@ -44,6 +69,27 @@ def test_excludes_only_case_3(): assert testfilter("sun.sun") is True +def test_excludes_only_with_glob_case_3(): + """If exclude specified, pass all but specified (Case 3).""" + incl_dom = {} + incl_glob = {} + incl_ent = {} + excl_dom = {"light", "sensor"} + excl_glob = {"cover.*_window"} + excl_ent = {"binary_sensor.working"} + testfilter = generate_filter( + incl_dom, incl_ent, excl_dom, excl_ent, incl_glob, excl_glob + ) + + assert testfilter("sensor.test") is False + assert testfilter("light.test") is False + assert testfilter("cover.bedroom_window") is False + assert testfilter("binary_sensor.working") is False + assert testfilter("binary_sensor.another") + assert testfilter("sun.sun") is True + assert testfilter("cover.garage_door") + + def test_with_include_domain_case4a(): """Test case 4a - include and exclude specified, with included domain.""" incl_dom = {"light", "sensor"} @@ -61,6 +107,49 @@ def test_with_include_domain_case4a(): assert testfilter("sun.sun") is False +def test_with_include_glob_case4a(): + """Test case 4a - include and exclude specified, with included glob.""" + incl_dom = {} + incl_glob = {"light.*", "sensor.*"} + incl_ent = {"binary_sensor.working"} + excl_dom = {} + excl_glob = {} + excl_ent = {"light.ignoreme", "sensor.notworking"} + testfilter = generate_filter( + incl_dom, incl_ent, excl_dom, excl_ent, incl_glob, excl_glob + ) + + assert testfilter("sensor.test") + assert testfilter("sensor.notworking") is False + assert testfilter("light.test") + assert testfilter("light.ignoreme") is False + assert testfilter("binary_sensor.working") + assert testfilter("binary_sensor.another") is False + assert testfilter("sun.sun") is False + + +def test_with_include_domain_glob_filtering_case4a(): + """Test case 4a - include and exclude specified, both have domains and globs.""" + incl_dom = {"light"} + incl_glob = {"*working"} + incl_ent = {} + excl_dom = {"binary_sensor"} + excl_glob = {"*notworking"} + excl_ent = {"light.ignoreme"} + testfilter = generate_filter( + incl_dom, incl_ent, excl_dom, excl_ent, incl_glob, excl_glob + ) + + assert testfilter("sensor.working") + assert testfilter("sensor.notworking") is False + assert testfilter("light.test") + assert testfilter("light.notworking") is False + assert testfilter("light.ignoreme") is False + assert testfilter("binary_sensor.not_working") is False + assert testfilter("binary_sensor.another") is False + assert testfilter("sun.sun") is False + + def test_exclude_domain_case4b(): """Test case 4b - include and exclude specified, with excluded domain.""" incl_dom = {} @@ -78,6 +167,27 @@ def test_exclude_domain_case4b(): assert testfilter("sun.sun") is True +def test_exclude_glob_case4b(): + """Test case 4b - include and exclude specified, with excluded glob.""" + incl_dom = {} + incl_glob = {} + incl_ent = {"binary_sensor.working"} + excl_dom = {} + excl_glob = {"binary_sensor.*"} + excl_ent = {"light.ignoreme", "sensor.notworking"} + testfilter = generate_filter( + incl_dom, incl_ent, excl_dom, excl_ent, incl_glob, excl_glob + ) + + assert testfilter("sensor.test") + assert testfilter("sensor.notworking") is False + assert testfilter("light.test") + assert testfilter("light.ignoreme") is False + assert testfilter("binary_sensor.working") + assert testfilter("binary_sensor.another") is False + assert testfilter("sun.sun") is True + + def test_no_domain_case4c(): """Test case 4c - include and exclude specified, with no domains.""" incl_dom = {} @@ -104,4 +214,37 @@ def test_filter_schema(): "exclude_entities": ["light.kitchen"], } filt = FILTER_SCHEMA(conf) + conf.update({"include_entity_globs": [], "exclude_entity_globs": []}) + assert filt.config == conf + + +def test_filter_schema_with_globs(): + """Test filter schema with glob options.""" + conf = { + "include_domains": ["light"], + "include_entity_globs": ["sensor.kitchen_*"], + "include_entities": ["switch.kitchen"], + "exclude_domains": ["cover"], + "exclude_entity_globs": ["sensor.weather_*"], + "exclude_entities": ["light.kitchen"], + } + filt = FILTER_SCHEMA(conf) + assert filt.config == conf + + +def test_filter_schema_include_exclude(): + """Test the include exclude filter schema.""" + conf = { + "include": { + "domains": ["light"], + "entity_globs": ["sensor.kitchen_*"], + "entities": ["switch.kitchen"], + }, + "exclude": { + "domains": ["cover"], + "entity_globs": ["sensor.weather_*"], + "entities": ["light.kitchen"], + }, + } + filt = INCLUDE_EXCLUDE_FILTER_SCHEMA(conf) assert filt.config == conf diff --git a/tests/test_bootstrap.py b/tests/test_bootstrap.py index e14afdca28a..f08ed5746b5 100644 --- a/tests/test_bootstrap.py +++ b/tests/test_bootstrap.py @@ -7,7 +7,7 @@ from unittest.mock import Mock import pytest -from homeassistant import bootstrap +from homeassistant import bootstrap, core import homeassistant.config as config_util from homeassistant.exceptions import HomeAssistantError import homeassistant.util.dt as dt_util @@ -16,9 +16,11 @@ from tests.async_mock import patch from tests.common import ( MockConfigEntry, MockModule, + MockPlatform, flush_store, get_test_config_dir, mock_coro, + mock_entity_platform, mock_integration, ) @@ -28,6 +30,11 @@ VERSION_PATH = os.path.join(get_test_config_dir(), config_util.VERSION_FILE) _LOGGER = logging.getLogger(__name__) +@pytest.fixture(autouse=True) +def apply_mock_storage(hass_storage): + """Apply the storage mock.""" + + @patch("homeassistant.bootstrap.async_enable_logging", Mock()) async def test_home_assistant_core_config_validation(hass): """Test if we pass in wrong information for HA conf.""" @@ -76,7 +83,7 @@ async def test_core_failure_loads_safe_mode(hass, caplog): assert "group" not in hass.config.components -async def test_setting_up_config(hass, caplog): +async def test_setting_up_config(hass): """Test we set up domains in config.""" await bootstrap._async_set_up_integrations( hass, {"group hello": {}, "homeassistant": {}} @@ -85,9 +92,8 @@ async def test_setting_up_config(hass, caplog): assert "group" in hass.config.components -async def test_setup_after_deps_all_present(hass, caplog): +async def test_setup_after_deps_all_present(hass): """Test after_dependencies when all present.""" - caplog.set_level(logging.DEBUG) order = [] def gen_domain_setup(domain): @@ -117,19 +123,115 @@ async def test_setup_after_deps_all_present(hass, caplog): ), ) - await bootstrap._async_set_up_integrations( - hass, {"root": {}, "first_dep": {}, "second_dep": {}} - ) + with patch( + "homeassistant.components.logger.async_setup", gen_domain_setup("logger") + ): + await bootstrap._async_set_up_integrations( + hass, {"root": {}, "first_dep": {}, "second_dep": {}, "logger": {}} + ) assert "root" in hass.config.components assert "first_dep" in hass.config.components assert "second_dep" in hass.config.components - assert order == ["root", "first_dep", "second_dep"] + assert order == ["logger", "root", "first_dep", "second_dep"] -async def test_setup_after_deps_not_trigger_load(hass, caplog): +async def test_setup_after_deps_in_stage_1_ignored(hass): + """Test after_dependencies are ignored in stage 1.""" + # This test relies on this + assert "cloud" in bootstrap.STAGE_1_INTEGRATIONS + order = [] + + def gen_domain_setup(domain): + async def async_setup(hass, config): + order.append(domain) + return True + + return async_setup + + mock_integration( + hass, + MockModule( + domain="normal_integration", + async_setup=gen_domain_setup("normal_integration"), + partial_manifest={"after_dependencies": ["an_after_dep"]}, + ), + ) + mock_integration( + hass, + MockModule( + domain="an_after_dep", async_setup=gen_domain_setup("an_after_dep"), + ), + ) + mock_integration( + hass, + MockModule( + domain="cloud", + async_setup=gen_domain_setup("cloud"), + partial_manifest={"after_dependencies": ["normal_integration"]}, + ), + ) + + await bootstrap._async_set_up_integrations( + hass, {"cloud": {}, "normal_integration": {}, "an_after_dep": {}} + ) + + assert "normal_integration" in hass.config.components + assert "cloud" in hass.config.components + assert order == ["cloud", "an_after_dep", "normal_integration"] + + +async def test_setup_after_deps_via_platform(hass): + """Test after_dependencies set up via platform.""" + order = [] + after_dep_event = asyncio.Event() + + def gen_domain_setup(domain): + async def async_setup(hass, config): + if domain == "after_dep_of_platform_int": + await after_dep_event.wait() + + order.append(domain) + return True + + return async_setup + + mock_integration( + hass, + MockModule( + domain="after_dep_of_platform_int", + async_setup=gen_domain_setup("after_dep_of_platform_int"), + ), + ) + mock_integration( + hass, + MockModule( + domain="platform_int", + async_setup=gen_domain_setup("platform_int"), + partial_manifest={"after_dependencies": ["after_dep_of_platform_int"]}, + ), + ) + mock_entity_platform(hass, "light.platform_int", MockPlatform()) + + @core.callback + def continue_loading(_): + """When light component loaded, continue other loading.""" + after_dep_event.set() + + hass.bus.async_listen_once("component_loaded", continue_loading) + + await bootstrap._async_set_up_integrations( + hass, {"light": {"platform": "platform_int"}, "after_dep_of_platform_int": {}} + ) + + assert "light" in hass.config.components + assert "after_dep_of_platform_int" in hass.config.components + assert "platform_int" in hass.config.components + assert order == ["after_dep_of_platform_int", "platform_int"] + + +async def test_setup_after_deps_not_trigger_load(hass): """Test after_dependencies does not trigger loading it.""" - caplog.set_level(logging.DEBUG) order = [] def gen_domain_setup(domain): @@ -164,12 +266,10 @@ async def test_setup_after_deps_not_trigger_load(hass, caplog): assert "root" in hass.config.components assert "first_dep" not in hass.config.components assert "second_dep" in hass.config.components - assert order == ["root", "second_dep"] -async def test_setup_after_deps_not_present(hass, caplog): +async def test_setup_after_deps_not_present(hass): """Test after_dependencies when referenced integration doesn't exist.""" - caplog.set_level(logging.DEBUG) order = [] def gen_domain_setup(domain): @@ -456,7 +556,14 @@ async def test_setup_safe_mode_if_no_frontend( with patch( "homeassistant.config.async_hass_config_yaml", - return_value={"map": {}, "person": {"invalid": True}}, + return_value={ + "homeassistant": { + "internal_url": "http://192.168.1.100:8123", + "external_url": "https://abcdef.ui.nabu.casa", + }, + "map": {}, + "person": {"invalid": True}, + }, ), patch("homeassistant.components.http.start_http_server_and_save_config"): hass = await bootstrap.async_setup_hass( config_dir=get_test_config_dir(), @@ -469,3 +576,7 @@ async def test_setup_safe_mode_if_no_frontend( ) assert "safe_mode" in hass.config.components + assert hass.config.config_dir == get_test_config_dir() + assert hass.config.skip_pip + assert hass.config.internal_url == "http://192.168.1.100:8123" + assert hass.config.external_url == "https://abcdef.ui.nabu.casa" diff --git a/tests/test_config.py b/tests/test_config.py index e374ab3ae69..c27ebb2b6e8 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -14,7 +14,6 @@ import homeassistant.config as config_util from homeassistant.const import ( ATTR_ASSUMED_STATE, ATTR_FRIENDLY_NAME, - ATTR_HIDDEN, CONF_AUTH_MFA_MODULES, CONF_AUTH_PROVIDERS, CONF_CUSTOMIZE, @@ -203,7 +202,7 @@ def test_core_config_schema(): def test_customize_dict_schema(): """Test basic customize config validation.""" - values = ({ATTR_FRIENDLY_NAME: None}, {ATTR_HIDDEN: "2"}, {ATTR_ASSUMED_STATE: "2"}) + values = ({ATTR_FRIENDLY_NAME: None}, {ATTR_ASSUMED_STATE: "2"}) for val in values: print(val) @@ -211,8 +210,8 @@ def test_customize_dict_schema(): config_util.CUSTOMIZE_DICT_SCHEMA(val) assert config_util.CUSTOMIZE_DICT_SCHEMA( - {ATTR_FRIENDLY_NAME: 2, ATTR_HIDDEN: "1", ATTR_ASSUMED_STATE: "0"} - ) == {ATTR_FRIENDLY_NAME: "2", ATTR_HIDDEN: True, ATTR_ASSUMED_STATE: False} + {ATTR_FRIENDLY_NAME: 2, ATTR_ASSUMED_STATE: "0"} + ) == {ATTR_FRIENDLY_NAME: "2", ATTR_ASSUMED_STATE: False} def test_customize_glob_is_ordered(): diff --git a/tests/test_config_entries.py b/tests/test_config_entries.py index 12b9c7308aa..6d513697daf 100644 --- a/tests/test_config_entries.py +++ b/tests/test_config_entries.py @@ -554,13 +554,15 @@ async def test_discovery_notification(hass): VERSION = 5 - async def async_step_discovery(self, user_input=None): + async def async_step_discovery(self, discovery_info): """Test discovery step.""" - if user_input is not None: - return self.async_create_entry( - title="Test Title", data={"token": "abcd"} - ) - return self.async_show_form(step_id="discovery") + return self.async_show_form(step_id="discovery_confirm") + + async def async_step_discovery_confirm(self, discovery_info): + """Test discovery confirm step.""" + return self.async_create_entry( + title="Test Title", data={"token": "abcd"} + ) result = await hass.config_entries.flow.async_init( "test", context={"source": config_entries.SOURCE_DISCOVERY} @@ -589,7 +591,7 @@ async def test_discovery_notification_not_created(hass): VERSION = 5 - async def async_step_discovery(self, user_input=None): + async def async_step_discovery(self, discovery_info): """Test discovery step.""" return self.async_abort(reason="test") @@ -1447,7 +1449,7 @@ async def test_partial_flows_hidden(hass, manager): VERSION = 1 - async def async_step_discovery(self, user_input): + async def async_step_discovery(self, discovery_info): """Test discovery step.""" discovery_started.set() await pause_discovery.wait() @@ -1577,3 +1579,182 @@ async def test_async_setup_update_entry(hass): assert len(entries) == 1 assert entries[0].state == config_entries.ENTRY_STATE_LOADED assert entries[0].data == {"value": "updated"} + + +@pytest.mark.parametrize( + "discovery_source", + ( + config_entries.SOURCE_DISCOVERY, + config_entries.SOURCE_SSDP, + config_entries.SOURCE_HOMEKIT, + config_entries.SOURCE_ZEROCONF, + config_entries.SOURCE_HASSIO, + ), +) +async def test_flow_with_default_discovery(hass, manager, discovery_source): + """Test that finishing a default discovery flow removes the unique ID in the entry.""" + mock_integration( + hass, MockModule("comp", async_setup_entry=AsyncMock(return_value=True)), + ) + mock_entity_platform(hass, "config_flow.comp", None) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + VERSION = 1 + + async def async_step_user(self, user_input=None): + """Test user step.""" + if user_input is None: + return self.async_show_form(step_id="user") + + return self.async_create_entry(title="yo", data={}) + + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + # Create one to be in progress + result = await manager.flow.async_init( + "comp", context={"source": discovery_source} + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_FORM + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + assert ( + flows[0]["context"]["unique_id"] + == config_entries.DEFAULT_DISCOVERY_UNIQUE_ID + ) + + # Finish flow + result2 = await manager.flow.async_configure( + result["flow_id"], user_input={"fake": "data"} + ) + assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY + + assert len(hass.config_entries.flow.async_progress()) == 0 + + entry = hass.config_entries.async_entries("comp")[0] + assert entry.title == "yo" + assert entry.source == discovery_source + assert entry.unique_id is None + + +async def test_flow_with_default_discovery_with_unique_id(hass, manager): + """Test discovery flow using the default discovery is ignored when unique ID is set.""" + mock_integration(hass, MockModule("comp")) + mock_entity_platform(hass, "config_flow.comp", None) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + VERSION = 1 + + async def async_step_discovery(self, discovery_info): + """Test discovery step.""" + await self.async_set_unique_id("mock-unique-id") + # This call should make no difference, as a unique ID is set + await self._async_handle_discovery_without_unique_id() + return self.async_show_form(step_id="mock") + + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + result = await manager.flow.async_init( + "comp", context={"source": config_entries.SOURCE_DISCOVERY} + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_FORM + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + assert flows[0]["context"]["unique_id"] == "mock-unique-id" + + +async def test_default_discovery_abort_existing_entries(hass, manager): + """Test that a flow without discovery implementation aborts when a config entry exists.""" + hass.config.components.add("comp") + entry = MockConfigEntry(domain="comp", data={}, unique_id="mock-unique-id") + entry.add_to_hass(hass) + + mock_integration(hass, MockModule("comp")) + mock_entity_platform(hass, "config_flow.comp", None) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + VERSION = 1 + + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + result = await manager.flow.async_init( + "comp", context={"source": config_entries.SOURCE_DISCOVERY} + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT + assert result["reason"] == "already_configured" + + +async def test_default_discovery_in_progress(hass, manager): + """Test that a flow using default discovery can only be triggered once.""" + mock_integration(hass, MockModule("comp")) + mock_entity_platform(hass, "config_flow.comp", None) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + VERSION = 1 + + async def async_step_discovery(self, discovery_info): + """Test discovery step.""" + await self.async_set_unique_id(discovery_info.get("unique_id")) + await self._async_handle_discovery_without_unique_id() + return self.async_show_form(step_id="mock") + + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + result = await manager.flow.async_init( + "comp", + context={"source": config_entries.SOURCE_DISCOVERY}, + data={"unique_id": "mock-unique-id"}, + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_FORM + + # Second discovery without a unique ID + result2 = await manager.flow.async_init( + "comp", context={"source": config_entries.SOURCE_DISCOVERY}, data={} + ) + assert result2["type"] == data_entry_flow.RESULT_TYPE_ABORT + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + assert flows[0]["context"]["unique_id"] == "mock-unique-id" + + +async def test_default_discovery_abort_on_new_unique_flow(hass, manager): + """Test that a flow using default discovery is aborted when a second flow with unique ID is created.""" + mock_integration(hass, MockModule("comp")) + mock_entity_platform(hass, "config_flow.comp", None) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + VERSION = 1 + + async def async_step_discovery(self, discovery_info): + """Test discovery step.""" + await self.async_set_unique_id(discovery_info.get("unique_id")) + await self._async_handle_discovery_without_unique_id() + return self.async_show_form(step_id="mock") + + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + # First discovery with default, no unique ID + result2 = await manager.flow.async_init( + "comp", context={"source": config_entries.SOURCE_DISCOVERY}, data={} + ) + assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM + + # Second discovery brings in a unique ID + result = await manager.flow.async_init( + "comp", + context={"source": config_entries.SOURCE_DISCOVERY}, + data={"unique_id": "mock-unique-id"}, + ) + assert result["type"] == data_entry_flow.RESULT_TYPE_FORM + + # Ensure the first one is cancelled and we end up with just the last one + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + assert flows[0]["context"]["unique_id"] == "mock-unique-id" diff --git a/tests/test_loader.py b/tests/test_loader.py index eb99cb3a8ea..20669588180 100644 --- a/tests/test_loader.py +++ b/tests/test_loader.py @@ -13,27 +13,43 @@ async def test_component_dependencies(hass): """Test if we can get the proper load order of components.""" mock_integration(hass, MockModule("mod1")) mock_integration(hass, MockModule("mod2", ["mod1"])) - mock_integration(hass, MockModule("mod3", ["mod2"])) + mod_3 = mock_integration(hass, MockModule("mod3", ["mod2"])) - assert {"mod1", "mod2", "mod3"} == await loader.async_component_dependencies( - hass, "mod3" + assert {"mod1", "mod2", "mod3"} == await loader._async_component_dependencies( + hass, "mod_3", mod_3, set(), set() ) # Create circular dependency mock_integration(hass, MockModule("mod1", ["mod3"])) with pytest.raises(loader.CircularDependency): - print(await loader.async_component_dependencies(hass, "mod3")) + print( + await loader._async_component_dependencies( + hass, "mod_3", mod_3, set(), set() + ) + ) # Depend on non-existing component - mock_integration(hass, MockModule("mod1", ["nonexisting"])) + mod_1 = mock_integration(hass, MockModule("mod1", ["nonexisting"])) with pytest.raises(loader.IntegrationNotFound): - print(await loader.async_component_dependencies(hass, "mod1")) + print( + await loader._async_component_dependencies( + hass, "mod_1", mod_1, set(), set() + ) + ) - # Try to get dependencies for non-existing component - with pytest.raises(loader.IntegrationNotFound): - print(await loader.async_component_dependencies(hass, "nonexisting")) + # Having an after dependency 2 deps down that is circular + mod_1 = mock_integration( + hass, MockModule("mod1", partial_manifest={"after_dependencies": ["mod_3"]}) + ) + + with pytest.raises(loader.CircularDependency): + print( + await loader._async_component_dependencies( + hass, "mod_3", mod_3, set(), set() + ) + ) def test_component_loader(hass): diff --git a/tests/test_setup.py b/tests/test_setup.py index 4ff380d0cc8..cb63f8fa865 100644 --- a/tests/test_setup.py +++ b/tests/test_setup.py @@ -480,12 +480,6 @@ class TestSetup: assert call_order == [1, 1, 2] -async def test_component_cannot_depend_config(hass): - """Test config is not allowed to be a dependency.""" - result = await setup._async_process_dependencies(hass, None, "test", ["config"]) - assert not result - - async def test_component_warn_slow_setup(hass): """Warn we log when a component setup takes a long time.""" mock_integration(hass, MockModule("test_component1"))