mirror of
https://github.com/home-assistant/core.git
synced 2025-07-26 22:57:17 +00:00
Merge pull request #38332 from home-assistant/rc
Co-authored-by: Franck Nijhof <git@frenck.dev> Co-authored-by: Martin Hjelmare <marhje52@gmail.com> Co-authored-by: J. Nick Koston <nick@koston.org> Co-authored-by: Phil Bruckner <pnbruckner@gmail.com> Co-authored-by: Joakim Plate <elupus@ecce.se> Co-authored-by: Jeroen Van den Keybus <jeroen.vandenkeybus@gmail.com> Co-authored-by: Mister Wil <1091741+MisterWil@users.noreply.github.com> Co-authored-by: Greg Dowling <pavoni@users.noreply.github.com> Co-authored-by: Marcio Granzotto Rodrigues <oscensores@gmail.com> Co-authored-by: Teemu R <tpr@iki.fi> Co-authored-by: Kyle Hendricks <kylehendricks@users.noreply.github.com>
This commit is contained in:
commit
d24b02646e
@ -1,5 +1,4 @@
|
||||
"""Support for AdGuard Home."""
|
||||
from distutils.version import LooseVersion
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
@ -11,7 +10,6 @@ from homeassistant.components.adguard.const import (
|
||||
DATA_ADGUARD_CLIENT,
|
||||
DATA_ADGUARD_VERION,
|
||||
DOMAIN,
|
||||
MIN_ADGUARD_HOME_VERSION,
|
||||
SERVICE_ADD_URL,
|
||||
SERVICE_DISABLE_URL,
|
||||
SERVICE_ENABLE_URL,
|
||||
@ -67,16 +65,10 @@ async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool
|
||||
hass.data.setdefault(DOMAIN, {})[DATA_ADGUARD_CLIENT] = adguard
|
||||
|
||||
try:
|
||||
version = await adguard.version()
|
||||
await adguard.version()
|
||||
except AdGuardHomeConnectionError as exception:
|
||||
raise ConfigEntryNotReady from exception
|
||||
|
||||
if version and LooseVersion(MIN_ADGUARD_HOME_VERSION) > LooseVersion(version):
|
||||
_LOGGER.error(
|
||||
"This integration requires AdGuard Home v0.99.0 or higher to work correctly"
|
||||
)
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
for component in "sensor", "switch":
|
||||
hass.async_create_task(
|
||||
hass.config_entries.async_forward_entry_setup(entry, component)
|
||||
|
@ -1,12 +1,11 @@
|
||||
"""Config flow to configure the AdGuard Home integration."""
|
||||
from distutils.version import LooseVersion
|
||||
import logging
|
||||
|
||||
from adguardhome import AdGuardHome, AdGuardHomeConnectionError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.adguard.const import DOMAIN, MIN_ADGUARD_HOME_VERSION
|
||||
from homeassistant.components.adguard.const import DOMAIN
|
||||
from homeassistant.config_entries import ConfigFlow
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
@ -79,20 +78,11 @@ class AdGuardHomeFlowHandler(ConfigFlow):
|
||||
)
|
||||
|
||||
try:
|
||||
version = await adguard.version()
|
||||
await adguard.version()
|
||||
except AdGuardHomeConnectionError:
|
||||
errors["base"] = "connection_error"
|
||||
return await self._show_setup_form(errors)
|
||||
|
||||
if version and LooseVersion(MIN_ADGUARD_HOME_VERSION) > LooseVersion(version):
|
||||
return self.async_abort(
|
||||
reason="adguard_home_outdated",
|
||||
description_placeholders={
|
||||
"current_version": version,
|
||||
"minimal_version": MIN_ADGUARD_HOME_VERSION,
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_HOST],
|
||||
data={
|
||||
@ -160,20 +150,11 @@ class AdGuardHomeFlowHandler(ConfigFlow):
|
||||
)
|
||||
|
||||
try:
|
||||
version = await adguard.version()
|
||||
await adguard.version()
|
||||
except AdGuardHomeConnectionError:
|
||||
errors["base"] = "connection_error"
|
||||
return await self._show_hassio_form(errors)
|
||||
|
||||
if LooseVersion(MIN_ADGUARD_HOME_VERSION) > LooseVersion(version):
|
||||
return self.async_abort(
|
||||
reason="adguard_home_addon_outdated",
|
||||
description_placeholders={
|
||||
"current_version": version,
|
||||
"minimal_version": MIN_ADGUARD_HOME_VERSION,
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_create_entry(
|
||||
title=self._hassio_discovery["addon"],
|
||||
data={
|
||||
|
@ -7,8 +7,6 @@ DATA_ADGUARD_VERION = "adguard_version"
|
||||
|
||||
CONF_FORCE = "force"
|
||||
|
||||
MIN_ADGUARD_HOME_VERSION = "v0.99.0"
|
||||
|
||||
SERVICE_ADD_URL = "add_url"
|
||||
SERVICE_DISABLE_URL = "disable_url"
|
||||
SERVICE_ENABLE_URL = "enable_url"
|
||||
|
@ -19,8 +19,6 @@
|
||||
},
|
||||
"error": { "connection_error": "Failed to connect." },
|
||||
"abort": {
|
||||
"adguard_home_outdated": "This integration requires AdGuard Home {minimal_version} or higher, you have {current_version}.",
|
||||
"adguard_home_addon_outdated": "This integration requires AdGuard Home {minimal_version} or higher, you have {current_version}. Please update your Hass.io AdGuard Home add-on.",
|
||||
"existing_instance_updated": "Updated existing configuration.",
|
||||
"single_instance_allowed": "Only a single configuration of AdGuard Home is allowed."
|
||||
}
|
||||
|
@ -455,6 +455,8 @@ class AutomationEntity(ToggleEntity, RestoreEntity):
|
||||
self._async_detach_triggers()
|
||||
self._async_detach_triggers = None
|
||||
|
||||
await self.action_script.async_stop()
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def _async_attach_triggers(
|
||||
|
@ -2,7 +2,7 @@
|
||||
"domain": "discovery",
|
||||
"name": "Discovery",
|
||||
"documentation": "https://www.home-assistant.io/integrations/discovery",
|
||||
"requirements": ["netdisco==2.8.0"],
|
||||
"requirements": ["netdisco==2.8.1"],
|
||||
"after_dependencies": ["zeroconf"],
|
||||
"codeowners": [],
|
||||
"quality_scale": "internal"
|
||||
|
@ -86,6 +86,9 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
parsed_url = urlparse(discovery_info[ssdp.ATTR_SSDP_LOCATION])
|
||||
friendly_name = discovery_info[ssdp.ATTR_UPNP_FRIENDLY_NAME]
|
||||
|
||||
if self._host_already_configured(parsed_url.hostname):
|
||||
return self.async_abort(reason="already_configured")
|
||||
|
||||
# pylint: disable=no-member
|
||||
self.context["title_placeholders"] = {"name": friendly_name}
|
||||
|
||||
@ -158,6 +161,13 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
|
||||
return self.async_create_entry(title=validated[CONF_NAME], data=data)
|
||||
|
||||
def _host_already_configured(self, host):
|
||||
"""See if we already have a harmony entry matching the host."""
|
||||
for entry in self._async_current_entries():
|
||||
if entry.data[CONF_HOST] == host:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _options_from_user_input(user_input):
|
||||
options = {}
|
||||
|
@ -112,21 +112,29 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
try:
|
||||
async with async_timeout.timeout(10):
|
||||
device_info = await async_get_device_info(pv_request)
|
||||
except HUB_EXCEPTIONS:
|
||||
_LOGGER.error("Connection error to PowerView hub: %s", hub_address)
|
||||
raise ConfigEntryNotReady
|
||||
if not device_info:
|
||||
_LOGGER.error("Unable to initialize PowerView hub: %s", hub_address)
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
async with async_timeout.timeout(10):
|
||||
rooms = Rooms(pv_request)
|
||||
room_data = _async_map_data_by_id((await rooms.get_resources())[ROOM_DATA])
|
||||
|
||||
async with async_timeout.timeout(10):
|
||||
scenes = Scenes(pv_request)
|
||||
scene_data = _async_map_data_by_id((await scenes.get_resources())[SCENE_DATA])
|
||||
scene_data = _async_map_data_by_id(
|
||||
(await scenes.get_resources())[SCENE_DATA]
|
||||
)
|
||||
|
||||
async with async_timeout.timeout(10):
|
||||
shades = Shades(pv_request)
|
||||
shade_data = _async_map_data_by_id((await shades.get_resources())[SHADE_DATA])
|
||||
shade_data = _async_map_data_by_id(
|
||||
(await shades.get_resources())[SHADE_DATA]
|
||||
)
|
||||
except HUB_EXCEPTIONS:
|
||||
_LOGGER.error("Connection error to PowerView hub: %s", hub_address)
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
if not device_info:
|
||||
_LOGGER.error("Unable to initialize PowerView hub: %s", hub_address)
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
async def async_update_data():
|
||||
"""Fetch data from shade endpoint."""
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
import asyncio
|
||||
|
||||
from aiohttp.client_exceptions import ServerDisconnectedError
|
||||
from aiopvapi.helpers.aiorequest import PvApiConnectionError
|
||||
|
||||
DOMAIN = "hunterdouglas_powerview"
|
||||
@ -64,7 +65,7 @@ PV_SHADE_DATA = "pv_shade_data"
|
||||
PV_ROOM_DATA = "pv_room_data"
|
||||
COORDINATOR = "coordinator"
|
||||
|
||||
HUB_EXCEPTIONS = (asyncio.TimeoutError, PvApiConnectionError)
|
||||
HUB_EXCEPTIONS = (ServerDisconnectedError, asyncio.TimeoutError, PvApiConnectionError)
|
||||
|
||||
LEGACY_DEVICE_SUB_REVISION = 1
|
||||
LEGACY_DEVICE_REVISION = 0
|
||||
|
@ -120,7 +120,7 @@ def determine_zones(receiver):
|
||||
out = {"zone2": False, "zone3": False}
|
||||
try:
|
||||
_LOGGER.debug("Checking for zone 2 capability")
|
||||
receiver.raw("ZPW")
|
||||
receiver.raw("ZPWQSTN")
|
||||
out["zone2"] = True
|
||||
except ValueError as error:
|
||||
if str(error) != TIMEOUT_MESSAGE:
|
||||
@ -128,7 +128,7 @@ def determine_zones(receiver):
|
||||
_LOGGER.debug("Zone 2 timed out, assuming no functionality")
|
||||
try:
|
||||
_LOGGER.debug("Checking for zone 3 capability")
|
||||
receiver.raw("PW3")
|
||||
receiver.raw("PW3QSTN")
|
||||
out["zone3"] = True
|
||||
except ValueError as error:
|
||||
if str(error) != TIMEOUT_MESSAGE:
|
||||
|
@ -6,7 +6,7 @@ from aiohttp.client_exceptions import ServerDisconnectedError
|
||||
from onvif import ONVIFCamera, ONVIFService
|
||||
from zeep.exceptions import Fault
|
||||
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.core import CALLBACK_TYPE, CoreState, HomeAssistant, callback
|
||||
from homeassistant.helpers.event import async_track_point_in_utc_time
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
@ -114,6 +114,7 @@ class EventManager:
|
||||
|
||||
async def async_pull_messages(self, _now: dt = None) -> None:
|
||||
"""Pull messages from device."""
|
||||
if self.hass.state == CoreState.running:
|
||||
try:
|
||||
pullpoint = self.device.create_pullpoint_service()
|
||||
req = pullpoint.create_type("PullMessages")
|
||||
|
@ -3,6 +3,6 @@
|
||||
"name": "Tesla Powerwall",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/powerwall",
|
||||
"requirements": ["tesla-powerwall==0.2.11"],
|
||||
"requirements": ["tesla-powerwall==0.2.12"],
|
||||
"codeowners": ["@bdraco", "@jrester"]
|
||||
}
|
||||
|
@ -250,7 +250,7 @@ class SamsungTVWSBridge(SamsungTVBridge):
|
||||
host=self.host,
|
||||
port=self.port,
|
||||
token=self.token,
|
||||
timeout=1,
|
||||
timeout=10,
|
||||
name=VALUE_CONF_NAME,
|
||||
)
|
||||
self._remote.open()
|
||||
|
@ -5,7 +5,12 @@ from requests.exceptions import ConnectTimeout, HTTPError
|
||||
from skybellpy import Skybell
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_ATTRIBUTION, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.const import (
|
||||
ATTR_ATTRIBUTION,
|
||||
CONF_PASSWORD,
|
||||
CONF_USERNAME,
|
||||
__version__,
|
||||
)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
@ -20,6 +25,8 @@ DOMAIN = "skybell"
|
||||
DEFAULT_CACHEDB = "./skybell_cache.pickle"
|
||||
DEFAULT_ENTITY_NAMESPACE = "skybell"
|
||||
|
||||
AGENT_IDENTIFIER = f"HomeAssistant/{__version__}"
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.Schema(
|
||||
@ -42,7 +49,11 @@ def setup(hass, config):
|
||||
try:
|
||||
cache = hass.config.path(DEFAULT_CACHEDB)
|
||||
skybell = Skybell(
|
||||
username=username, password=password, get_devices=True, cache_path=cache
|
||||
username=username,
|
||||
password=password,
|
||||
get_devices=True,
|
||||
cache_path=cache,
|
||||
agent_identifier=AGENT_IDENTIFIER,
|
||||
)
|
||||
|
||||
hass.data[DOMAIN] = skybell
|
||||
|
@ -2,6 +2,6 @@
|
||||
"domain": "skybell",
|
||||
"name": "SkyBell",
|
||||
"documentation": "https://www.home-assistant.io/integrations/skybell",
|
||||
"requirements": ["skybellpy==0.4.0"],
|
||||
"requirements": ["skybellpy==0.6.1"],
|
||||
"codeowners": []
|
||||
}
|
||||
|
@ -6,7 +6,12 @@ import speedtest
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_IMPORT
|
||||
from homeassistant.const import CONF_MONITORED_CONDITIONS, CONF_SCAN_INTERVAL
|
||||
from homeassistant.const import (
|
||||
CONF_MONITORED_CONDITIONS,
|
||||
CONF_SCAN_INTERVAL,
|
||||
EVENT_HOMEASSISTANT_STARTED,
|
||||
)
|
||||
from homeassistant.core import CoreState
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
@ -70,10 +75,25 @@ async def async_setup_entry(hass, config_entry):
|
||||
coordinator = SpeedTestDataCoordinator(hass, config_entry)
|
||||
await coordinator.async_setup()
|
||||
|
||||
if not config_entry.options[CONF_MANUAL]:
|
||||
async def _enable_scheduled_speedtests(*_):
|
||||
"""Activate the data update coordinator."""
|
||||
coordinator.update_interval = timedelta(
|
||||
minutes=config_entry.options.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL)
|
||||
)
|
||||
await coordinator.async_refresh()
|
||||
|
||||
if not config_entry.options[CONF_MANUAL]:
|
||||
if hass.state == CoreState.running:
|
||||
await _enable_scheduled_speedtests()
|
||||
if not coordinator.last_update_success:
|
||||
raise ConfigEntryNotReady
|
||||
else:
|
||||
# Running a speed test during startup can prevent
|
||||
# integrations from being able to setup because it
|
||||
# can saturate the network interface.
|
||||
hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_STARTED, _enable_scheduled_speedtests
|
||||
)
|
||||
|
||||
hass.data[DOMAIN] = coordinator
|
||||
|
||||
@ -107,12 +127,6 @@ class SpeedTestDataCoordinator(DataUpdateCoordinator):
|
||||
super().__init__(
|
||||
self.hass, _LOGGER, name=DOMAIN, update_method=self.async_update,
|
||||
)
|
||||
if not self.config_entry.options.get(CONF_MANUAL):
|
||||
self.update_interval = timedelta(
|
||||
minutes=self.config_entry.options.get(
|
||||
CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL
|
||||
)
|
||||
)
|
||||
|
||||
def update_servers(self):
|
||||
"""Update list of test servers."""
|
||||
|
@ -12,7 +12,6 @@ from .const import (
|
||||
ATTR_SERVER_ID,
|
||||
ATTR_SERVER_NAME,
|
||||
ATTRIBUTION,
|
||||
CONF_MANUAL,
|
||||
DEFAULT_NAME,
|
||||
DOMAIN,
|
||||
ICON,
|
||||
@ -97,7 +96,6 @@ class SpeedtestSensor(RestoreEntity):
|
||||
async def async_added_to_hass(self):
|
||||
"""Handle entity which will be added."""
|
||||
await super().async_added_to_hass()
|
||||
if self.coordinator.config_entry.options[CONF_MANUAL]:
|
||||
state = await self.async_get_last_state()
|
||||
if state:
|
||||
self._state = state.state
|
||||
|
@ -2,7 +2,7 @@
|
||||
"domain": "ssdp",
|
||||
"name": "Simple Service Discovery Protocol (SSDP)",
|
||||
"documentation": "https://www.home-assistant.io/integrations/ssdp",
|
||||
"requirements": ["defusedxml==0.6.0", "netdisco==2.8.0"],
|
||||
"requirements": ["defusedxml==0.6.0", "netdisco==2.8.1"],
|
||||
"after_dependencies": ["zeroconf"],
|
||||
"codeowners": []
|
||||
}
|
||||
|
@ -194,7 +194,9 @@ class VeraDevice(Entity):
|
||||
slugify(vera_device.name), vera_device.device_id
|
||||
)
|
||||
|
||||
self.controller.register(vera_device, self._update_callback)
|
||||
async def async_added_to_hass(self):
|
||||
"""Subscribe to updates."""
|
||||
self.controller.register(self.vera_device, self._update_callback)
|
||||
|
||||
def _update_callback(self, _device):
|
||||
"""Update the state."""
|
||||
|
@ -2,6 +2,6 @@
|
||||
"domain": "xbox_live",
|
||||
"name": "Xbox Live",
|
||||
"documentation": "https://www.home-assistant.io/integrations/xbox_live",
|
||||
"requirements": ["xboxapi==2.0.0"],
|
||||
"requirements": ["xboxapi==2.0.1"],
|
||||
"codeowners": ["@MartinHjelmare"]
|
||||
}
|
||||
|
@ -3,7 +3,7 @@
|
||||
"name": "Xiaomi Miio",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/xiaomi_miio",
|
||||
"requirements": ["construct==2.9.45", "python-miio==0.5.2.1"],
|
||||
"requirements": ["construct==2.9.45", "python-miio==0.5.3"],
|
||||
"codeowners": ["@rytilahti", "@syssi"],
|
||||
"zeroconf": ["_miio._udp.local."]
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""Constants used by Home Assistant components."""
|
||||
MAJOR_VERSION = 0
|
||||
MINOR_VERSION = 113
|
||||
PATCH_VERSION = "1"
|
||||
PATCH_VERSION = "2"
|
||||
__short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__ = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER = (3, 7, 1)
|
||||
|
@ -392,12 +392,28 @@ class HomeAssistant:
|
||||
"""Block until all pending work is done."""
|
||||
# To flush out any call_soon_threadsafe
|
||||
await asyncio.sleep(0)
|
||||
start_time: Optional[float] = None
|
||||
|
||||
while self._pending_tasks:
|
||||
pending = [task for task in self._pending_tasks if not task.done()]
|
||||
self._pending_tasks.clear()
|
||||
if pending:
|
||||
await self._await_and_log_pending(pending)
|
||||
|
||||
if start_time is None:
|
||||
# Avoid calling monotonic() until we know
|
||||
# we may need to start logging blocked tasks.
|
||||
start_time = 0
|
||||
elif start_time == 0:
|
||||
# If we have waited twice then we set the start
|
||||
# time
|
||||
start_time = monotonic()
|
||||
elif monotonic() - start_time > BLOCK_LOG_TIMEOUT:
|
||||
# We have waited at least three loops and new tasks
|
||||
# continue to block. At this point we start
|
||||
# logging all waiting tasks.
|
||||
for task in pending:
|
||||
_LOGGER.debug("Waiting for task: %s", task)
|
||||
else:
|
||||
await asyncio.sleep(0)
|
||||
|
||||
|
@ -4,7 +4,19 @@ from datetime import datetime
|
||||
from functools import partial
|
||||
import itertools
|
||||
import logging
|
||||
from typing import Any, Callable, Dict, List, Optional, Sequence, Set, Tuple
|
||||
from types import MappingProxyType
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
List,
|
||||
Optional,
|
||||
Sequence,
|
||||
Set,
|
||||
Tuple,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
from async_timeout import timeout
|
||||
import voluptuous as vol
|
||||
@ -134,13 +146,13 @@ class _ScriptRun:
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
script: "Script",
|
||||
variables: Optional[Sequence],
|
||||
variables: Dict[str, Any],
|
||||
context: Optional[Context],
|
||||
log_exceptions: bool,
|
||||
) -> None:
|
||||
self._hass = hass
|
||||
self._script = script
|
||||
self._variables = variables or {}
|
||||
self._variables = variables
|
||||
self._context = context
|
||||
self._log_exceptions = log_exceptions
|
||||
self._step = -1
|
||||
@ -595,6 +607,9 @@ async def _async_stop_scripts_at_shutdown(hass, event):
|
||||
)
|
||||
|
||||
|
||||
_VarsType = Union[Dict[str, Any], MappingProxyType]
|
||||
|
||||
|
||||
class Script:
|
||||
"""Representation of a script."""
|
||||
|
||||
@ -617,6 +632,7 @@ class Script:
|
||||
hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_STOP, partial(_async_stop_scripts_at_shutdown, hass)
|
||||
)
|
||||
self._top_level = top_level
|
||||
if top_level:
|
||||
all_scripts.append(
|
||||
{"instance": self, "started_before_shutdown": not hass.is_stopping}
|
||||
@ -732,14 +748,16 @@ class Script:
|
||||
self._referenced_entities = referenced
|
||||
return referenced
|
||||
|
||||
def run(self, variables=None, context=None):
|
||||
def run(
|
||||
self, variables: Optional[_VarsType] = None, context: Optional[Context] = None
|
||||
) -> None:
|
||||
"""Run script."""
|
||||
asyncio.run_coroutine_threadsafe(
|
||||
self.async_run(variables, context), self._hass.loop
|
||||
).result()
|
||||
|
||||
async def async_run(
|
||||
self, variables: Optional[Sequence] = None, context: Optional[Context] = None
|
||||
self, variables: Optional[_VarsType] = None, context: Optional[Context] = None
|
||||
) -> None:
|
||||
"""Run script."""
|
||||
if self.is_running:
|
||||
@ -753,11 +771,19 @@ class Script:
|
||||
self._log("Maximum number of runs exceeded", level=logging.WARNING)
|
||||
return
|
||||
|
||||
# If this is a top level Script then make a copy of the variables in case they
|
||||
# are read-only, but more importantly, so as not to leak any variables created
|
||||
# during the run back to the caller.
|
||||
if self._top_level:
|
||||
variables = dict(variables) if variables is not None else {}
|
||||
|
||||
if self.script_mode != SCRIPT_MODE_QUEUED:
|
||||
cls = _ScriptRun
|
||||
else:
|
||||
cls = _QueuedScriptRun
|
||||
run = cls(self._hass, self, variables, context, self._log_exceptions)
|
||||
run = cls(
|
||||
self._hass, self, cast(dict, variables), context, self._log_exceptions
|
||||
)
|
||||
self._runs.append(run)
|
||||
|
||||
try:
|
||||
@ -795,6 +821,7 @@ class Script:
|
||||
action[CONF_REPEAT][CONF_SEQUENCE],
|
||||
f"{self.name}: {step_name}",
|
||||
script_mode=SCRIPT_MODE_PARALLEL,
|
||||
max_runs=self.max_runs,
|
||||
logger=self._logger,
|
||||
top_level=False,
|
||||
)
|
||||
@ -822,6 +849,7 @@ class Script:
|
||||
choice[CONF_SEQUENCE],
|
||||
f"{self.name}: {step_name}: choice {idx}",
|
||||
script_mode=SCRIPT_MODE_PARALLEL,
|
||||
max_runs=self.max_runs,
|
||||
logger=self._logger,
|
||||
top_level=False,
|
||||
)
|
||||
@ -836,6 +864,7 @@ class Script:
|
||||
action[CONF_DEFAULT],
|
||||
f"{self.name}: {step_name}: default",
|
||||
script_mode=SCRIPT_MODE_PARALLEL,
|
||||
max_runs=self.max_runs,
|
||||
logger=self._logger,
|
||||
top_level=False,
|
||||
)
|
||||
|
@ -16,7 +16,7 @@ hass-nabucasa==0.34.7
|
||||
home-assistant-frontend==20200716.0
|
||||
importlib-metadata==1.6.0;python_version<'3.8'
|
||||
jinja2>=2.11.1
|
||||
netdisco==2.8.0
|
||||
netdisco==2.8.1
|
||||
paho-mqtt==1.5.0
|
||||
pip>=8.0.3
|
||||
python-slugify==4.0.0
|
||||
|
@ -936,7 +936,7 @@ netdata==0.2.0
|
||||
|
||||
# homeassistant.components.discovery
|
||||
# homeassistant.components.ssdp
|
||||
netdisco==2.8.0
|
||||
netdisco==2.8.1
|
||||
|
||||
# homeassistant.components.neurio_energy
|
||||
neurio==0.3.1
|
||||
@ -1701,7 +1701,7 @@ python-juicenet==1.0.1
|
||||
# python-lirc==1.2.3
|
||||
|
||||
# homeassistant.components.xiaomi_miio
|
||||
python-miio==0.5.2.1
|
||||
python-miio==0.5.3
|
||||
|
||||
# homeassistant.components.mpd
|
||||
python-mpd2==1.0.0
|
||||
@ -1945,7 +1945,7 @@ simplisafe-python==9.2.1
|
||||
sisyphus-control==2.2.1
|
||||
|
||||
# homeassistant.components.skybell
|
||||
skybellpy==0.4.0
|
||||
skybellpy==0.6.1
|
||||
|
||||
# homeassistant.components.slack
|
||||
slackclient==2.5.0
|
||||
@ -2080,7 +2080,7 @@ temperusb==1.5.3
|
||||
# tensorflow==1.13.2
|
||||
|
||||
# homeassistant.components.powerwall
|
||||
tesla-powerwall==0.2.11
|
||||
tesla-powerwall==0.2.12
|
||||
|
||||
# homeassistant.components.tesla
|
||||
teslajsonpy==0.9.3
|
||||
@ -2204,7 +2204,7 @@ wled==0.4.3
|
||||
xbee-helper==0.0.7
|
||||
|
||||
# homeassistant.components.xbox_live
|
||||
xboxapi==2.0.0
|
||||
xboxapi==2.0.1
|
||||
|
||||
# homeassistant.components.xfinity
|
||||
xfinity-gateway==0.0.4
|
||||
|
@ -431,7 +431,7 @@ nessclient==0.9.15
|
||||
|
||||
# homeassistant.components.discovery
|
||||
# homeassistant.components.ssdp
|
||||
netdisco==2.8.0
|
||||
netdisco==2.8.1
|
||||
|
||||
# homeassistant.components.nexia
|
||||
nexia==0.9.3
|
||||
@ -767,7 +767,7 @@ python-izone==1.1.2
|
||||
python-juicenet==1.0.1
|
||||
|
||||
# homeassistant.components.xiaomi_miio
|
||||
python-miio==0.5.2.1
|
||||
python-miio==0.5.3
|
||||
|
||||
# homeassistant.components.nest
|
||||
python-nest==4.1.0
|
||||
@ -909,7 +909,7 @@ sunwatcher==0.2.1
|
||||
tellduslive==0.10.11
|
||||
|
||||
# homeassistant.components.powerwall
|
||||
tesla-powerwall==0.2.11
|
||||
tesla-powerwall==0.2.12
|
||||
|
||||
# homeassistant.components.tesla
|
||||
teslajsonpy==0.9.3
|
||||
|
@ -4,7 +4,7 @@ import aiohttp
|
||||
|
||||
from homeassistant import config_entries, data_entry_flow
|
||||
from homeassistant.components.adguard import config_flow
|
||||
from homeassistant.components.adguard.const import DOMAIN, MIN_ADGUARD_HOME_VERSION
|
||||
from homeassistant.components.adguard.const import DOMAIN
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
@ -229,52 +229,3 @@ async def test_hassio_connection_error(hass, aioclient_mock):
|
||||
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
|
||||
assert result["step_id"] == "hassio_confirm"
|
||||
assert result["errors"] == {"base": "connection_error"}
|
||||
|
||||
|
||||
async def test_outdated_adguard_version(hass, aioclient_mock):
|
||||
"""Test we show abort when connecting with unsupported AdGuard version."""
|
||||
aioclient_mock.get(
|
||||
f"{'https' if FIXTURE_USER_INPUT[CONF_SSL] else 'http'}"
|
||||
f"://{FIXTURE_USER_INPUT[CONF_HOST]}"
|
||||
f":{FIXTURE_USER_INPUT[CONF_PORT]}/control/status",
|
||||
json={"version": "v0.98.0"},
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
|
||||
flow = config_flow.AdGuardHomeFlowHandler()
|
||||
flow.hass = hass
|
||||
result = await flow.async_step_user(user_input=None)
|
||||
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
|
||||
assert result["step_id"] == "user"
|
||||
|
||||
result = await flow.async_step_user(user_input=FIXTURE_USER_INPUT)
|
||||
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "adguard_home_outdated"
|
||||
assert result["description_placeholders"] == {
|
||||
"current_version": "v0.98.0",
|
||||
"minimal_version": MIN_ADGUARD_HOME_VERSION,
|
||||
}
|
||||
|
||||
|
||||
async def test_outdated_adguard_addon_version(hass, aioclient_mock):
|
||||
"""Test we show abort when connecting with unsupported AdGuard add-on version."""
|
||||
aioclient_mock.get(
|
||||
"http://mock-adguard:3000/control/status",
|
||||
json={"version": "v0.98.0"},
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
"adguard",
|
||||
data={"addon": "AdGuard Home Addon", "host": "mock-adguard", "port": 3000},
|
||||
context={"source": "hassio"},
|
||||
)
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
|
||||
|
||||
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "adguard_home_addon_outdated"
|
||||
assert result["description_placeholders"] == {
|
||||
"current_version": "v0.98.0",
|
||||
"minimal_version": MIN_ADGUARD_HOME_VERSION,
|
||||
}
|
||||
|
@ -1,4 +1,6 @@
|
||||
"""The tests for the automation component."""
|
||||
import asyncio
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components import logbook
|
||||
@ -12,10 +14,11 @@ from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_NAME,
|
||||
EVENT_HOMEASSISTANT_STARTED,
|
||||
SERVICE_TURN_OFF,
|
||||
STATE_OFF,
|
||||
STATE_ON,
|
||||
)
|
||||
from homeassistant.core import Context, CoreState, State
|
||||
from homeassistant.core import Context, CoreState, State, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, Unauthorized
|
||||
from homeassistant.setup import async_setup_component
|
||||
import homeassistant.util.dt as dt_util
|
||||
@ -553,6 +556,58 @@ async def test_reload_config_handles_load_fails(hass, calls):
|
||||
assert len(calls) == 2
|
||||
|
||||
|
||||
@pytest.mark.parametrize("service", ["turn_off", "reload"])
|
||||
async def test_automation_stops(hass, calls, service):
|
||||
"""Test that turning off / reloading an automation stops any running actions."""
|
||||
entity_id = "automation.hello"
|
||||
test_entity = "test.entity"
|
||||
|
||||
config = {
|
||||
automation.DOMAIN: {
|
||||
"alias": "hello",
|
||||
"trigger": {"platform": "event", "event_type": "test_event"},
|
||||
"action": [
|
||||
{"event": "running"},
|
||||
{"wait_template": "{{ is_state('test.entity', 'goodbye') }}"},
|
||||
{"service": "test.automation"},
|
||||
],
|
||||
}
|
||||
}
|
||||
assert await async_setup_component(hass, automation.DOMAIN, config,)
|
||||
|
||||
running = asyncio.Event()
|
||||
|
||||
@callback
|
||||
def running_cb(event):
|
||||
running.set()
|
||||
|
||||
hass.bus.async_listen_once("running", running_cb)
|
||||
hass.states.async_set(test_entity, "hello")
|
||||
|
||||
hass.bus.async_fire("test_event")
|
||||
await running.wait()
|
||||
|
||||
if service == "turn_off":
|
||||
await hass.services.async_call(
|
||||
automation.DOMAIN,
|
||||
SERVICE_TURN_OFF,
|
||||
{ATTR_ENTITY_ID: entity_id},
|
||||
blocking=True,
|
||||
)
|
||||
else:
|
||||
with patch(
|
||||
"homeassistant.config.load_yaml_config_file",
|
||||
autospec=True,
|
||||
return_value=config,
|
||||
):
|
||||
await common.async_reload(hass)
|
||||
|
||||
hass.states.async_set(test_entity, "goodbye")
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(calls) == 0
|
||||
|
||||
|
||||
async def test_automation_restore_state(hass):
|
||||
"""Ensure states are restored on startup."""
|
||||
time = dt_util.utcnow()
|
||||
|
@ -145,6 +145,30 @@ async def test_form_ssdp(hass):
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_form_ssdp_aborts_before_checking_remoteid_if_host_known(hass):
|
||||
"""Test we abort without connecting if the host is already known."""
|
||||
await setup.async_setup_component(hass, "persistent_notification", {})
|
||||
config_entry = MockConfigEntry(
|
||||
domain=DOMAIN, data={"host": "2.2.2.2", "name": "any"},
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
harmonyapi = _get_mock_harmonyapi(connect=True)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.harmony.util.HarmonyAPI", return_value=harmonyapi,
|
||||
):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_SSDP},
|
||||
data={
|
||||
"friendlyName": "Harmony Hub",
|
||||
"ssdp_location": "http://2.2.2.2:8088/description",
|
||||
},
|
||||
)
|
||||
assert result["type"] == "abort"
|
||||
|
||||
|
||||
async def test_form_cannot_connect(hass):
|
||||
"""Test we handle cannot connect error."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
|
@ -97,7 +97,7 @@ MOCK_CALLS_ENTRY_WS = {
|
||||
"host": "fake",
|
||||
"name": "HomeAssistant",
|
||||
"port": 8001,
|
||||
"timeout": 1,
|
||||
"timeout": 10,
|
||||
"token": "abcde",
|
||||
}
|
||||
|
||||
|
@ -4,6 +4,7 @@ import asyncio
|
||||
from contextlib import contextmanager
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from types import MappingProxyType
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
@ -122,7 +123,7 @@ async def test_firing_event_template(hass):
|
||||
)
|
||||
script_obj = script.Script(hass, sequence)
|
||||
|
||||
await script_obj.async_run({"is_world": "yes"}, context=context)
|
||||
await script_obj.async_run(MappingProxyType({"is_world": "yes"}), context=context)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(events) == 1
|
||||
@ -175,7 +176,7 @@ async def test_calling_service_template(hass):
|
||||
)
|
||||
script_obj = script.Script(hass, sequence)
|
||||
|
||||
await script_obj.async_run({"is_world": "yes"}, context=context)
|
||||
await script_obj.async_run(MappingProxyType({"is_world": "yes"}), context=context)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(calls) == 1
|
||||
@ -235,15 +236,17 @@ async def test_multiple_runs_no_wait(hass):
|
||||
logger.debug("starting 1st script")
|
||||
hass.async_create_task(
|
||||
script_obj.async_run(
|
||||
MappingProxyType(
|
||||
{"fire1": "1", "listen1": "2", "fire2": "3", "listen2": "4"}
|
||||
)
|
||||
)
|
||||
)
|
||||
await asyncio.wait_for(heard_event.wait(), 1)
|
||||
unsub()
|
||||
|
||||
logger.debug("starting 2nd script")
|
||||
await script_obj.async_run(
|
||||
{"fire1": "2", "listen1": "3", "fire2": "4", "listen2": "4"}
|
||||
MappingProxyType({"fire1": "2", "listen1": "3", "fire2": "4", "listen2": "4"})
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@ -670,7 +673,9 @@ async def test_wait_template_variables(hass):
|
||||
|
||||
try:
|
||||
hass.states.async_set("switch.test", "on")
|
||||
hass.async_create_task(script_obj.async_run({"data": "switch.test"}))
|
||||
hass.async_create_task(
|
||||
script_obj.async_run(MappingProxyType({"data": "switch.test"}))
|
||||
)
|
||||
await asyncio.wait_for(wait_started_flag.wait(), 1)
|
||||
|
||||
assert script_obj.is_running
|
||||
@ -882,7 +887,14 @@ async def test_repeat_var_in_condition(hass, condition):
|
||||
assert len(events) == 2
|
||||
|
||||
|
||||
async def test_repeat_nested(hass):
|
||||
@pytest.mark.parametrize(
|
||||
"variables,first_last,inside_x",
|
||||
[
|
||||
(None, {"repeat": "None", "x": "None"}, "None"),
|
||||
(MappingProxyType({"x": 1}), {"repeat": "None", "x": "1"}, "1"),
|
||||
],
|
||||
)
|
||||
async def test_repeat_nested(hass, variables, first_last, inside_x):
|
||||
"""Test nested repeats."""
|
||||
event = "test_event"
|
||||
events = async_capture_events(hass, event)
|
||||
@ -892,7 +904,8 @@ async def test_repeat_nested(hass):
|
||||
{
|
||||
"event": event,
|
||||
"event_data_template": {
|
||||
"repeat": "{{ None if repeat is not defined else repeat }}"
|
||||
"repeat": "{{ None if repeat is not defined else repeat }}",
|
||||
"x": "{{ None if x is not defined else x }}",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -905,6 +918,7 @@ async def test_repeat_nested(hass):
|
||||
"first": "{{ repeat.first }}",
|
||||
"index": "{{ repeat.index }}",
|
||||
"last": "{{ repeat.last }}",
|
||||
"x": "{{ None if x is not defined else x }}",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -916,6 +930,7 @@ async def test_repeat_nested(hass):
|
||||
"first": "{{ repeat.first }}",
|
||||
"index": "{{ repeat.index }}",
|
||||
"last": "{{ repeat.last }}",
|
||||
"x": "{{ None if x is not defined else x }}",
|
||||
},
|
||||
},
|
||||
}
|
||||
@ -926,6 +941,7 @@ async def test_repeat_nested(hass):
|
||||
"first": "{{ repeat.first }}",
|
||||
"index": "{{ repeat.index }}",
|
||||
"last": "{{ repeat.last }}",
|
||||
"x": "{{ None if x is not defined else x }}",
|
||||
},
|
||||
},
|
||||
],
|
||||
@ -934,7 +950,8 @@ async def test_repeat_nested(hass):
|
||||
{
|
||||
"event": event,
|
||||
"event_data_template": {
|
||||
"repeat": "{{ None if repeat is not defined else repeat }}"
|
||||
"repeat": "{{ None if repeat is not defined else repeat }}",
|
||||
"x": "{{ None if x is not defined else x }}",
|
||||
},
|
||||
},
|
||||
]
|
||||
@ -945,21 +962,21 @@ async def test_repeat_nested(hass):
|
||||
"homeassistant.helpers.condition._LOGGER.error",
|
||||
side_effect=AssertionError("Template Error"),
|
||||
):
|
||||
await script_obj.async_run()
|
||||
await script_obj.async_run(variables)
|
||||
|
||||
assert len(events) == 10
|
||||
assert events[0].data == {"repeat": "None"}
|
||||
assert events[-1].data == {"repeat": "None"}
|
||||
assert events[0].data == first_last
|
||||
assert events[-1].data == first_last
|
||||
for index, result in enumerate(
|
||||
(
|
||||
("True", "1", "False"),
|
||||
("True", "1", "False"),
|
||||
("False", "2", "True"),
|
||||
("True", "1", "False"),
|
||||
("False", "2", "True"),
|
||||
("True", "1", "False"),
|
||||
("False", "2", "True"),
|
||||
("False", "2", "True"),
|
||||
("True", "1", "False", inside_x),
|
||||
("True", "1", "False", inside_x),
|
||||
("False", "2", "True", inside_x),
|
||||
("True", "1", "False", inside_x),
|
||||
("False", "2", "True", inside_x),
|
||||
("True", "1", "False", inside_x),
|
||||
("False", "2", "True", inside_x),
|
||||
("False", "2", "True", inside_x),
|
||||
),
|
||||
1,
|
||||
):
|
||||
@ -967,6 +984,7 @@ async def test_repeat_nested(hass):
|
||||
"first": result[0],
|
||||
"index": result[1],
|
||||
"last": result[2],
|
||||
"x": result[3],
|
||||
}
|
||||
|
||||
|
||||
@ -998,13 +1016,38 @@ async def test_choose(hass, var, result):
|
||||
)
|
||||
script_obj = script.Script(hass, sequence)
|
||||
|
||||
await script_obj.async_run({"var": var})
|
||||
await script_obj.async_run(MappingProxyType({"var": var}))
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(events) == 1
|
||||
assert events[0].data["choice"] == result
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"action",
|
||||
[
|
||||
{"repeat": {"count": 1, "sequence": {"event": "abc"}}},
|
||||
{"choose": {"conditions": [], "sequence": {"event": "abc"}}},
|
||||
{"choose": [], "default": {"event": "abc"}},
|
||||
],
|
||||
)
|
||||
async def test_multiple_runs_repeat_choose(hass, caplog, action):
|
||||
"""Test parallel runs with repeat & choose actions & max_runs > default."""
|
||||
max_runs = script.DEFAULT_MAX + 1
|
||||
script_obj = script.Script(
|
||||
hass, cv.SCRIPT_SCHEMA(action), script_mode="parallel", max_runs=max_runs
|
||||
)
|
||||
|
||||
events = async_capture_events(hass, "abc")
|
||||
for _ in range(max_runs):
|
||||
hass.async_create_task(script_obj.async_run())
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert "WARNING" not in caplog.text
|
||||
assert "ERROR" not in caplog.text
|
||||
assert len(events) == max_runs
|
||||
|
||||
|
||||
async def test_last_triggered(hass):
|
||||
"""Test the last_triggered."""
|
||||
event = "test_event"
|
||||
|
@ -1408,9 +1408,62 @@ async def test_log_blocking_events(hass, caplog):
|
||||
hass.async_create_task(_wait_a_bit_1())
|
||||
await hass.async_block_till_done()
|
||||
|
||||
with patch.object(ha, "BLOCK_LOG_TIMEOUT", 0.00001):
|
||||
with patch.object(ha, "BLOCK_LOG_TIMEOUT", 0.0001):
|
||||
hass.async_create_task(_wait_a_bit_2())
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert "_wait_a_bit_2" in caplog.text
|
||||
assert "_wait_a_bit_1" not in caplog.text
|
||||
|
||||
|
||||
async def test_chained_logging_hits_log_timeout(hass, caplog):
|
||||
"""Ensure we log which task is blocking startup when there is a task chain and debug logging is on."""
|
||||
caplog.set_level(logging.DEBUG)
|
||||
|
||||
created = 0
|
||||
|
||||
async def _task_chain_1():
|
||||
nonlocal created
|
||||
created += 1
|
||||
if created > 10:
|
||||
return
|
||||
hass.async_create_task(_task_chain_2())
|
||||
|
||||
async def _task_chain_2():
|
||||
nonlocal created
|
||||
created += 1
|
||||
if created > 10:
|
||||
return
|
||||
hass.async_create_task(_task_chain_1())
|
||||
|
||||
with patch.object(ha, "BLOCK_LOG_TIMEOUT", 0.0001):
|
||||
hass.async_create_task(_task_chain_1())
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert "_task_chain_" in caplog.text
|
||||
|
||||
|
||||
async def test_chained_logging_misses_log_timeout(hass, caplog):
|
||||
"""Ensure we do not log which task is blocking startup if we do not hit the timeout."""
|
||||
caplog.set_level(logging.DEBUG)
|
||||
|
||||
created = 0
|
||||
|
||||
async def _task_chain_1():
|
||||
nonlocal created
|
||||
created += 1
|
||||
if created > 10:
|
||||
return
|
||||
hass.async_create_task(_task_chain_2())
|
||||
|
||||
async def _task_chain_2():
|
||||
nonlocal created
|
||||
created += 1
|
||||
if created > 10:
|
||||
return
|
||||
hass.async_create_task(_task_chain_1())
|
||||
|
||||
hass.async_create_task(_task_chain_1())
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert "_task_chain_" not in caplog.text
|
||||
|
Loading…
x
Reference in New Issue
Block a user