diff --git a/homeassistant/components/media_extractor/manifest.json b/homeassistant/components/media_extractor/manifest.json index d7dd665b9d9..69c4b3782c2 100644 --- a/homeassistant/components/media_extractor/manifest.json +++ b/homeassistant/components/media_extractor/manifest.json @@ -2,7 +2,7 @@ "domain": "media_extractor", "name": "Media Extractor", "documentation": "https://www.home-assistant.io/integrations/media_extractor", - "requirements": ["youtube_dl==2021.04.26"], + "requirements": ["youtube_dl==2021.06.06"], "dependencies": ["media_player"], "codeowners": [], "quality_scale": "internal", diff --git a/homeassistant/components/modem_callerid/config_flow.py b/homeassistant/components/modem_callerid/config_flow.py index fbb68381c41..fd90f46d94a 100644 --- a/homeassistant/components/modem_callerid/config_flow.py +++ b/homeassistant/components/modem_callerid/config_flow.py @@ -62,6 +62,7 @@ class PhoneModemFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> FlowResult: """Handle a flow initiated by the user.""" + errors: dict[str, str] | None = {} if self._async_in_progress(): return self.async_abort(reason="already_in_progress") ports = await self.hass.async_add_executor_job(serial.tools.list_ports.comports) @@ -88,7 +89,7 @@ class PhoneModemFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): dev_path = await self.hass.async_add_executor_job( usb.get_serial_by_id, port.device ) - errors: dict | None = await self.validate_device_errors( + errors = await self.validate_device_errors( dev_path=dev_path, unique_id=_generate_unique_id(port) ) if errors is None: @@ -98,9 +99,7 @@ class PhoneModemFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): ) user_input = user_input or {} schema = vol.Schema({vol.Required(CONF_DEVICE): vol.In(unused_ports)}) - return self.async_show_form( - step_id="user", data_schema=schema, errors=errors or {} - ) + return self.async_show_form(step_id="user", data_schema=schema, errors=errors) async def async_step_import(self, config: dict[str, Any]) -> FlowResult: """Import a config entry from configuration.yaml.""" diff --git a/homeassistant/components/netgear/config_flow.py b/homeassistant/components/netgear/config_flow.py index 62985c7104c..871cba5a95d 100644 --- a/homeassistant/components/netgear/config_flow.py +++ b/homeassistant/components/netgear/config_flow.py @@ -17,7 +17,14 @@ from homeassistant.const import ( from homeassistant.core import callback from homeassistant.data_entry_flow import FlowResult -from .const import CONF_CONSIDER_HOME, DEFAULT_CONSIDER_HOME, DEFAULT_NAME, DOMAIN +from .const import ( + CONF_CONSIDER_HOME, + DEFAULT_CONSIDER_HOME, + DEFAULT_NAME, + DOMAIN, + MODELS_V2, + ORBI_PORT, +) from .errors import CannotLoginException from .router import get_api @@ -133,8 +140,10 @@ class NetgearFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(discovery_info[ssdp.ATTR_UPNP_SERIAL]) self._abort_if_unique_id_configured(updates=updated_data) - if device_url.port: - updated_data[CONF_PORT] = device_url.port + updated_data[CONF_PORT] = DEFAULT_PORT + for model in MODELS_V2: + if discovery_info.get(ssdp.ATTR_UPNP_MODEL_NUMBER, "").startswith(model): + updated_data[CONF_PORT] = ORBI_PORT self.placeholders.update(updated_data) self.discovered = True diff --git a/homeassistant/components/netgear/const.py b/homeassistant/components/netgear/const.py index bfcf76a6119..cba2d7ff875 100644 --- a/homeassistant/components/netgear/const.py +++ b/homeassistant/components/netgear/const.py @@ -29,6 +29,7 @@ MODELS_V2 = [ "SXR", "SXS", ] +ORBI_PORT = 80 # Icons DEVICE_ICONS = { diff --git a/homeassistant/components/recorder/__init__.py b/homeassistant/components/recorder/__init__.py index 1b090c331a7..7e9bab0ed4e 100644 --- a/homeassistant/components/recorder/__init__.py +++ b/homeassistant/components/recorder/__init__.py @@ -413,6 +413,7 @@ class Recorder(threading.Thread): self.async_migration_event = asyncio.Event() self.migration_in_progress = False self._queue_watcher = None + self._db_supports_row_number = True self.enabled = True @@ -972,6 +973,7 @@ class Recorder(threading.Thread): def setup_recorder_connection(dbapi_connection, connection_record): """Dbapi specific connection settings.""" setup_connection_for_dialect( + self, self.engine.dialect.name, dbapi_connection, not self._completed_first_database_setup, diff --git a/homeassistant/components/recorder/statistics.py b/homeassistant/components/recorder/statistics.py index d253d1e2275..200da8d192d 100644 --- a/homeassistant/components/recorder/statistics.py +++ b/homeassistant/components/recorder/statistics.py @@ -89,6 +89,13 @@ QUERY_STATISTICS_SUMMARY_SUM = [ .label("rownum"), ] +QUERY_STATISTICS_SUMMARY_SUM_LEGACY = [ + StatisticsShortTerm.metadata_id, + StatisticsShortTerm.last_reset, + StatisticsShortTerm.state, + StatisticsShortTerm.sum, +] + QUERY_STATISTIC_META = [ StatisticsMeta.id, StatisticsMeta.statistic_id, @@ -275,37 +282,81 @@ def compile_hourly_statistics( } # Get last hour's last sum - subquery = ( - session.query(*QUERY_STATISTICS_SUMMARY_SUM) - .filter(StatisticsShortTerm.start >= bindparam("start_time")) - .filter(StatisticsShortTerm.start < bindparam("end_time")) - .subquery() - ) - query = ( - session.query(subquery) - .filter(subquery.c.rownum == 1) - .order_by(subquery.c.metadata_id) - ) - stats = execute(query.params(start_time=start_time, end_time=end_time)) + if instance._db_supports_row_number: # pylint: disable=[protected-access] + subquery = ( + session.query(*QUERY_STATISTICS_SUMMARY_SUM) + .filter(StatisticsShortTerm.start >= bindparam("start_time")) + .filter(StatisticsShortTerm.start < bindparam("end_time")) + .subquery() + ) + query = ( + session.query(subquery) + .filter(subquery.c.rownum == 1) + .order_by(subquery.c.metadata_id) + ) + stats = execute(query.params(start_time=start_time, end_time=end_time)) - if stats: - for stat in stats: - metadata_id, start, last_reset, state, _sum, _ = stat - if metadata_id in summary: - summary[metadata_id].update( - { + if stats: + for stat in stats: + metadata_id, start, last_reset, state, _sum, _ = stat + if metadata_id in summary: + summary[metadata_id].update( + { + "last_reset": process_timestamp(last_reset), + "state": state, + "sum": _sum, + } + ) + else: + summary[metadata_id] = { + "start": start_time, + "last_reset": process_timestamp(last_reset), + "state": state, + "sum": _sum, + } + else: + baked_query = instance.hass.data[STATISTICS_SHORT_TERM_BAKERY]( + lambda session: session.query(*QUERY_STATISTICS_SUMMARY_SUM_LEGACY) + ) + + baked_query += lambda q: q.filter( + StatisticsShortTerm.start >= bindparam("start_time") + ) + baked_query += lambda q: q.filter( + StatisticsShortTerm.start < bindparam("end_time") + ) + baked_query += lambda q: q.order_by( + StatisticsShortTerm.metadata_id, StatisticsShortTerm.start.desc() + ) + + stats = execute( + baked_query(session).params(start_time=start_time, end_time=end_time) + ) + + if stats: + for metadata_id, group in groupby(stats, lambda stat: stat["metadata_id"]): # type: ignore + ( + metadata_id, + last_reset, + state, + _sum, + ) = next(group) + if metadata_id in summary: + summary[metadata_id].update( + { + "start": start_time, + "last_reset": process_timestamp(last_reset), + "state": state, + "sum": _sum, + } + ) + else: + summary[metadata_id] = { + "start": start_time, "last_reset": process_timestamp(last_reset), "state": state, "sum": _sum, } - ) - else: - summary[metadata_id] = { - "start": start_time, - "last_reset": process_timestamp(last_reset), - "state": state, - "sum": _sum, - } # Insert compiled hourly statistics in the database for metadata_id, stat in summary.items(): diff --git a/homeassistant/components/recorder/util.py b/homeassistant/components/recorder/util.py index 101915c7117..567164d4325 100644 --- a/homeassistant/components/recorder/util.py +++ b/homeassistant/components/recorder/util.py @@ -266,7 +266,18 @@ def execute_on_connection(dbapi_connection, statement): cursor.close() -def setup_connection_for_dialect(dialect_name, dbapi_connection, first_connection): +def query_on_connection(dbapi_connection, statement): + """Execute a single statement with a dbapi connection and return the result.""" + cursor = dbapi_connection.cursor() + cursor.execute(statement) + result = cursor.fetchall() + cursor.close() + return result + + +def setup_connection_for_dialect( + instance, dialect_name, dbapi_connection, first_connection +): """Execute statements needed for dialect connection.""" # Returns False if the the connection needs to be setup # on the next connection, returns True if the connection @@ -280,6 +291,13 @@ def setup_connection_for_dialect(dialect_name, dbapi_connection, first_connectio # WAL mode only needs to be setup once # instead of every time we open the sqlite connection # as its persistent and isn't free to call every time. + result = query_on_connection(dbapi_connection, "SELECT sqlite_version()") + version = result[0][0] + major, minor, _patch = version.split(".", 2) + if int(major) == 3 and int(minor) < 25: + instance._db_supports_row_number = ( # pylint: disable=[protected-access] + False + ) # approximately 8MiB of memory execute_on_connection(dbapi_connection, "PRAGMA cache_size = -8192") @@ -289,6 +307,14 @@ def setup_connection_for_dialect(dialect_name, dbapi_connection, first_connectio if dialect_name == "mysql": execute_on_connection(dbapi_connection, "SET session wait_timeout=28800") + if first_connection: + result = query_on_connection(dbapi_connection, "SELECT VERSION()") + version = result[0][0] + major, minor, _patch = version.split(".", 2) + if int(major) == 5 and int(minor) < 8: + instance._db_supports_row_number = ( # pylint: disable=[protected-access] + False + ) def end_incomplete_runs(session, start_time): diff --git a/homeassistant/components/upnp/__init__.py b/homeassistant/components/upnp/__init__.py index 6db8b087378..d2d59d78c0e 100644 --- a/homeassistant/components/upnp/__init__.py +++ b/homeassistant/components/upnp/__init__.py @@ -198,6 +198,7 @@ class UpnpBinarySensorEntityDescription(BinarySensorEntityDescription): """A class that describes UPnP entities.""" format: str = "s" + unique_id: str | None = None @dataclass @@ -205,6 +206,7 @@ class UpnpSensorEntityDescription(SensorEntityDescription): """A class that describes a sensor UPnP entities.""" format: str = "s" + unique_id: str | None = None class UpnpDataUpdateCoordinator(DataUpdateCoordinator): @@ -250,7 +252,7 @@ class UpnpEntity(CoordinatorEntity): self._device = coordinator.device self.entity_description = entity_description self._attr_name = f"{coordinator.device.name} {entity_description.name}" - self._attr_unique_id = f"{coordinator.device.udn}_{entity_description.key}" + self._attr_unique_id = f"{coordinator.device.udn}_{entity_description.unique_id or entity_description.key}" self._attr_device_info = { "connections": {(dr.CONNECTION_UPNP, coordinator.device.udn)}, "name": coordinator.device.name, diff --git a/homeassistant/components/upnp/binary_sensor.py b/homeassistant/components/upnp/binary_sensor.py index 3bf9635c78b..c4e7264c34b 100644 --- a/homeassistant/components/upnp/binary_sensor.py +++ b/homeassistant/components/upnp/binary_sensor.py @@ -30,14 +30,16 @@ async def async_setup_entry( LOGGER.debug("Adding binary sensor") - async_add_entities( + entities = [ UpnpStatusBinarySensor( coordinator=coordinator, entity_description=entity_description, ) for entity_description in BINARYSENSOR_ENTITY_DESCRIPTIONS if coordinator.data.get(entity_description.key) is not None - ) + ] + LOGGER.debug("Adding entities: %s", entities) + async_add_entities(entities) class UpnpStatusBinarySensor(UpnpEntity, BinarySensorEntity): diff --git a/homeassistant/components/upnp/sensor.py b/homeassistant/components/upnp/sensor.py index 8ad8677b647..334dc9e8c22 100644 --- a/homeassistant/components/upnp/sensor.py +++ b/homeassistant/components/upnp/sensor.py @@ -15,6 +15,7 @@ from .const import ( DATA_RATE_PACKETS_PER_SECOND, DOMAIN, KIBIBYTE, + LOGGER, PACKETS_RECEIVED, PACKETS_SENT, ROUTER_IP, @@ -74,28 +75,32 @@ RAW_SENSORS: tuple[UpnpSensorEntityDescription, ...] = ( DERIVED_SENSORS: tuple[UpnpSensorEntityDescription, ...] = ( UpnpSensorEntityDescription( - key="KiB/sec_received", + key=BYTES_RECEIVED, + unique_id="KiB/sec_received", name=f"{DATA_RATE_KIBIBYTES_PER_SECOND} received", icon="mdi:server-network", native_unit_of_measurement=DATA_RATE_KIBIBYTES_PER_SECOND, format=".1f", ), UpnpSensorEntityDescription( - key="KiB/sent", + key=BYTES_SENT, + unique_id="KiB/sent", name=f"{DATA_RATE_KIBIBYTES_PER_SECOND} sent", icon="mdi:server-network", native_unit_of_measurement=DATA_RATE_KIBIBYTES_PER_SECOND, format=".1f", ), UpnpSensorEntityDescription( - key="packets/sec_received", + key=PACKETS_RECEIVED, + unique_id="packets/sec_received", name=f"{DATA_RATE_PACKETS_PER_SECOND} received", icon="mdi:server-network", native_unit_of_measurement=DATA_RATE_PACKETS_PER_SECOND, format=".1f", ), UpnpSensorEntityDescription( - key="packets/sent", + key=PACKETS_SENT, + unique_id="packets/sent", name=f"{DATA_RATE_PACKETS_PER_SECOND} sent", icon="mdi:server-network", native_unit_of_measurement=DATA_RATE_PACKETS_PER_SECOND, @@ -131,6 +136,7 @@ async def async_setup_entry( ] ) + LOGGER.debug("Adding entities: %s", entities) async_add_entities(entities) diff --git a/homeassistant/components/yamaha_musiccast/manifest.json b/homeassistant/components/yamaha_musiccast/manifest.json index be52b8a4558..f7751dfe859 100644 --- a/homeassistant/components/yamaha_musiccast/manifest.json +++ b/homeassistant/components/yamaha_musiccast/manifest.json @@ -4,7 +4,7 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/yamaha_musiccast", "requirements": [ - "aiomusiccast==0.9.2" + "aiomusiccast==0.10.0" ], "ssdp": [ { diff --git a/homeassistant/components/yeelight/__init__.py b/homeassistant/components/yeelight/__init__.py index fb908775d1b..64fa7b01f28 100644 --- a/homeassistant/components/yeelight/__init__.py +++ b/homeassistant/components/yeelight/__init__.py @@ -26,10 +26,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.dispatcher import ( - async_dispatcher_connect, - async_dispatcher_send, -) +from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.entity import DeviceInfo, Entity from homeassistant.helpers.event import async_call_later, async_track_time_interval from homeassistant.helpers.typing import ConfigType @@ -42,7 +39,6 @@ POWER_STATE_CHANGE_TIME = 1 # seconds DOMAIN = "yeelight" DATA_YEELIGHT = DOMAIN DATA_UPDATED = "yeelight_{}_data_updated" -DEVICE_INITIALIZED = "yeelight_{}_device_initialized" DEFAULT_NAME = "Yeelight" DEFAULT_TRANSITION = 350 @@ -203,24 +199,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def _async_initialize( hass: HomeAssistant, entry: ConfigEntry, - host: str, - device: YeelightDevice | None = None, + device: YeelightDevice, ) -> None: - entry_data = hass.data[DOMAIN][DATA_CONFIG_ENTRIES][entry.entry_id] = { - DATA_PLATFORMS_LOADED: False - } - - @callback - def _async_load_platforms(): - if entry_data[DATA_PLATFORMS_LOADED]: - return - entry_data[DATA_PLATFORMS_LOADED] = True - hass.config_entries.async_setup_platforms(entry, PLATFORMS) - - if not device: - # get device and start listening for local pushes - device = await _async_get_device(hass, host, entry) - + entry_data = hass.data[DOMAIN][DATA_CONFIG_ENTRIES][entry.entry_id] = {} await device.async_setup() entry_data[DATA_DEVICE] = device @@ -232,15 +213,9 @@ async def _async_initialize( entry, options={**entry.options, CONF_MODEL: device.capabilities["model"]} ) - entry.async_on_unload(entry.add_update_listener(_async_update_listener)) - entry.async_on_unload( - async_dispatcher_connect( - hass, DEVICE_INITIALIZED.format(host), _async_load_platforms - ) - ) - # fetch initial state - asyncio.create_task(device.async_update()) + await device.async_update() + entry.async_on_unload(entry.add_update_listener(_async_update_listener)) @callback @@ -256,7 +231,7 @@ def _async_normalize_config_entry(hass: HomeAssistant, entry: ConfigEntry) -> No entry, data={ CONF_HOST: entry.data.get(CONF_HOST), - CONF_ID: entry.data.get(CONF_ID, entry.unique_id), + CONF_ID: entry.data.get(CONF_ID) or entry.unique_id, }, options={ CONF_NAME: entry.data.get(CONF_NAME, ""), @@ -270,68 +245,44 @@ def _async_normalize_config_entry(hass: HomeAssistant, entry: ConfigEntry) -> No CONF_NIGHTLIGHT_SWITCH, DEFAULT_NIGHTLIGHT_SWITCH ), }, + unique_id=entry.unique_id or entry.data.get(CONF_ID), ) elif entry.unique_id and not entry.data.get(CONF_ID): hass.config_entries.async_update_entry( entry, data={CONF_HOST: entry.data.get(CONF_HOST), CONF_ID: entry.unique_id}, ) + elif entry.data.get(CONF_ID) and not entry.unique_id: + hass.config_entries.async_update_entry( + entry, + unique_id=entry.data[CONF_ID], + ) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Yeelight from a config entry.""" _async_normalize_config_entry(hass, entry) - if entry.data.get(CONF_HOST): - try: - device = await _async_get_device(hass, entry.data[CONF_HOST], entry) - except BULB_EXCEPTIONS as ex: - # Always retry later since bulbs can stop responding to SSDP - # sometimes even though they are online. If it has changed - # IP we will update it via discovery to the config flow - raise ConfigEntryNotReady from ex - else: - # Since device is passed this cannot throw an exception anymore - await _async_initialize(hass, entry, entry.data[CONF_HOST], device=device) - return True + if not entry.data.get(CONF_HOST): + bulb_id = async_format_id(entry.data.get(CONF_ID, entry.unique_id)) + raise ConfigEntryNotReady(f"Waiting for {bulb_id} to be discovered") - async def _async_from_discovery(capabilities: dict[str, str]) -> None: - host = urlparse(capabilities["location"]).hostname - try: - await _async_initialize(hass, entry, host) - except BULB_EXCEPTIONS: - _LOGGER.exception("Failed to connect to bulb at %s", host) + try: + device = await _async_get_device(hass, entry.data[CONF_HOST], entry) + await _async_initialize(hass, entry, device) + except BULB_EXCEPTIONS as ex: + raise ConfigEntryNotReady from ex + + hass.config_entries.async_setup_platforms(entry, PLATFORMS) - scanner = YeelightScanner.async_get(hass) - await scanner.async_register_callback(entry.data[CONF_ID], _async_from_discovery) return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - if entry.data.get(CONF_ID): - # discovery - scanner = YeelightScanner.async_get(hass) - scanner.async_unregister_callback(entry.data[CONF_ID]) - data_config_entries = hass.data[DOMAIN][DATA_CONFIG_ENTRIES] - if entry.entry_id not in data_config_entries: - # Device not online - return True - - entry_data = data_config_entries[entry.entry_id] - unload_ok = True - if entry_data[DATA_PLATFORMS_LOADED]: - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - if DATA_DEVICE in entry_data: - device = entry_data[DATA_DEVICE] - _LOGGER.debug("Shutting down Yeelight Listener") - await device.bulb.async_stop_listening() - _LOGGER.debug("Yeelight Listener stopped") - data_config_entries.pop(entry.entry_id) - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) @callback @@ -380,7 +331,6 @@ class YeelightScanner: def __init__(self, hass: HomeAssistant) -> None: """Initialize class.""" self._hass = hass - self._callbacks = {} self._host_discovered_events = {} self._unique_id_capabilities = {} self._host_capabilities = {} @@ -391,7 +341,7 @@ class YeelightScanner: async def async_setup(self): """Set up the scanner.""" if self._connected_events: - await asyncio.gather(*(event.wait() for event in self._connected_events)) + await self._async_wait_connected() return for idx, source_ip in enumerate(await self._async_build_source_set()): @@ -434,9 +384,16 @@ class YeelightScanner: for listener in failed_listeners: self._listeners.remove(listener) - await asyncio.gather(*(event.wait() for event in self._connected_events)) + await self._async_wait_connected() + self._track_interval = async_track_time_interval( + self._hass, self.async_scan, DISCOVERY_INTERVAL + ) self.async_scan() + async def _async_wait_connected(self): + """Wait for the listeners to be up and connected.""" + await asyncio.gather(*(event.wait() for event in self._connected_events)) + async def _async_build_source_set(self) -> set[IPv4Address]: """Build the list of ssdp sources.""" adapters = await network.async_get_adapters(self._hass) @@ -453,6 +410,7 @@ class YeelightScanner: async def async_discover(self): """Discover bulbs.""" + _LOGGER.debug("Yeelight discover with interval %s", DISCOVERY_SEARCH_INTERVAL) await self.async_setup() for _ in range(DISCOVERY_ATTEMPTS): self.async_scan() @@ -513,45 +471,6 @@ class YeelightScanner: self._unique_id_capabilities[unique_id] = response for event in self._host_discovered_events.get(host, []): event.set() - if unique_id in self._callbacks: - self._hass.async_create_task(self._callbacks[unique_id](response)) - self._callbacks.pop(unique_id) - if not self._callbacks: - self._async_stop_scan() - - async def _async_start_scan(self): - """Start scanning for Yeelight devices.""" - _LOGGER.debug("Start scanning") - await self.async_setup() - if not self._track_interval: - self._track_interval = async_track_time_interval( - self._hass, self.async_scan, DISCOVERY_INTERVAL - ) - self.async_scan() - - @callback - def _async_stop_scan(self): - """Stop scanning.""" - if self._track_interval is None: - return - _LOGGER.debug("Stop scanning interval") - self._track_interval() - self._track_interval = None - - async def async_register_callback(self, unique_id, callback_func): - """Register callback function.""" - if capabilities := self._unique_id_capabilities.get(unique_id): - self._hass.async_create_task(callback_func(capabilities)) - return - self._callbacks[unique_id] = callback_func - await self._async_start_scan() - - @callback - def async_unregister_callback(self, unique_id): - """Unregister callback function.""" - self._callbacks.pop(unique_id, None) - if not self._callbacks: - self._async_stop_scan() def update_needs_bg_power_workaround(data): @@ -675,7 +594,6 @@ class YeelightDevice: self._available = True if not self._initialized: self._initialized = True - async_dispatcher_send(self._hass, DEVICE_INITIALIZED.format(self._host)) except BULB_NETWORK_EXCEPTIONS as ex: if self._available: # just inform once _LOGGER.error( @@ -725,9 +643,6 @@ class YeelightDevice: ): # On reconnect the properties may be out of sync # - # We need to make sure the DEVICE_INITIALIZED dispatcher is setup - # before we can update on reconnect by checking self._did_first_update - # # If the device drops the connection right away, we do not want to # do a property resync via async_update since its about # to be called when async_setup_entry reaches the end of the @@ -743,10 +658,7 @@ class YeelightEntity(Entity): def __init__(self, device: YeelightDevice, entry: ConfigEntry) -> None: """Initialize the entity.""" self._device = device - self._unique_id = entry.entry_id - if entry.unique_id is not None: - # Use entry unique id (device id) whenever possible - self._unique_id = entry.unique_id + self._unique_id = entry.unique_id or entry.entry_id @property def unique_id(self) -> str: @@ -794,12 +706,19 @@ async def _async_get_device( # register stop callback to shutdown listening for local pushes async def async_stop_listen_task(event): - """Stop listen thread.""" - _LOGGER.debug("Shutting down Yeelight Listener") + """Stop listen task.""" + _LOGGER.debug("Shutting down Yeelight Listener (stop event)") await device.bulb.async_stop_listening() + @callback + def _async_stop_listen_on_unload(): + """Stop listen task.""" + _LOGGER.debug("Shutting down Yeelight Listener (unload)") + hass.async_create_task(device.bulb.async_stop_listening()) + entry.async_on_unload( hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, async_stop_listen_task) ) + entry.async_on_unload(_async_stop_listen_on_unload) return device diff --git a/homeassistant/components/youless/manifest.json b/homeassistant/components/youless/manifest.json index 04a66d507ef..514c73fbd2c 100644 --- a/homeassistant/components/youless/manifest.json +++ b/homeassistant/components/youless/manifest.json @@ -3,7 +3,7 @@ "name": "YouLess", "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/youless", - "requirements": ["youless-api==0.13"], + "requirements": ["youless-api==0.14"], "codeowners": ["@gjong"], "iot_class": "local_polling" } diff --git a/homeassistant/const.py b/homeassistant/const.py index 17e7abfdf53..2bd8a2e7719 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -5,7 +5,7 @@ from typing import Final MAJOR_VERSION: Final = 2021 MINOR_VERSION: Final = 10 -PATCH_VERSION: Final = "3" +PATCH_VERSION: Final = "4" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 8, 0) diff --git a/requirements_all.txt b/requirements_all.txt index dabaa995c00..bd074b3c2c0 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -216,7 +216,7 @@ aiolyric==1.0.7 aiomodernforms==0.1.8 # homeassistant.components.yamaha_musiccast -aiomusiccast==0.9.2 +aiomusiccast==0.10.0 # homeassistant.components.nanoleaf aionanoleaf==0.0.3 @@ -2465,10 +2465,10 @@ yeelight==0.7.7 yeelightsunflower==0.0.10 # homeassistant.components.youless -youless-api==0.13 +youless-api==0.14 # homeassistant.components.media_extractor -youtube_dl==2021.04.26 +youtube_dl==2021.06.06 # homeassistant.components.zengge zengge==0.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 84eba6ada81..d6e24b75288 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -143,7 +143,7 @@ aiolyric==1.0.7 aiomodernforms==0.1.8 # homeassistant.components.yamaha_musiccast -aiomusiccast==0.9.2 +aiomusiccast==0.10.0 # homeassistant.components.nanoleaf aionanoleaf==0.0.3 @@ -1406,7 +1406,7 @@ yalexs==1.1.13 yeelight==0.7.7 # homeassistant.components.youless -youless-api==0.13 +youless-api==0.14 # homeassistant.components.zeroconf zeroconf==0.36.8 diff --git a/tests/components/netgear/test_config_flow.py b/tests/components/netgear/test_config_flow.py index de4f4fba510..ad060b60d36 100644 --- a/tests/components/netgear/test_config_flow.py +++ b/tests/components/netgear/test_config_flow.py @@ -7,7 +7,7 @@ import pytest from homeassistant import data_entry_flow from homeassistant.components import ssdp -from homeassistant.components.netgear.const import CONF_CONSIDER_HOME, DOMAIN +from homeassistant.components.netgear.const import CONF_CONSIDER_HOME, DOMAIN, ORBI_PORT from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_SSDP, SOURCE_USER from homeassistant.const import ( CONF_HOST, @@ -247,7 +247,7 @@ async def test_ssdp(hass, service): assert result["result"].unique_id == SERIAL assert result["title"] == TITLE assert result["data"].get(CONF_HOST) == HOST - assert result["data"].get(CONF_PORT) == PORT + assert result["data"].get(CONF_PORT) == ORBI_PORT assert result["data"].get(CONF_SSL) == SSL assert result["data"].get(CONF_USERNAME) == DEFAULT_USER assert result["data"][CONF_PASSWORD] == PASSWORD diff --git a/tests/components/recorder/test_util.py b/tests/components/recorder/test_util.py index f193993ffe5..8b5de5cff16 100644 --- a/tests/components/recorder/test_util.py +++ b/tests/components/recorder/test_util.py @@ -122,44 +122,88 @@ async def test_last_run_was_recently_clean(hass): ) -def test_setup_connection_for_dialect_mysql(): +@pytest.mark.parametrize( + "mysql_version, db_supports_row_number", + [ + ("10.0.0", True), + ("5.8.0", True), + ("5.7.0", False), + ], +) +def test_setup_connection_for_dialect_mysql(mysql_version, db_supports_row_number): """Test setting up the connection for a mysql dialect.""" - execute_mock = MagicMock() + instance_mock = MagicMock(_db_supports_row_number=True) + execute_args = [] close_mock = MagicMock() + def execute_mock(statement): + nonlocal execute_args + execute_args.append(statement) + + def fetchall_mock(): + nonlocal execute_args + if execute_args[-1] == "SELECT VERSION()": + return [[mysql_version]] + return None + def _make_cursor_mock(*_): - return MagicMock(execute=execute_mock, close=close_mock) + return MagicMock(execute=execute_mock, close=close_mock, fetchall=fetchall_mock) dbapi_connection = MagicMock(cursor=_make_cursor_mock) - util.setup_connection_for_dialect("mysql", dbapi_connection, True) + util.setup_connection_for_dialect(instance_mock, "mysql", dbapi_connection, True) - assert execute_mock.call_args[0][0] == "SET session wait_timeout=28800" + assert len(execute_args) == 2 + assert execute_args[0] == "SET session wait_timeout=28800" + assert execute_args[1] == "SELECT VERSION()" + + assert instance_mock._db_supports_row_number == db_supports_row_number -def test_setup_connection_for_dialect_sqlite(): +@pytest.mark.parametrize( + "sqlite_version, db_supports_row_number", + [ + ("3.25.0", True), + ("3.24.0", False), + ], +) +def test_setup_connection_for_dialect_sqlite(sqlite_version, db_supports_row_number): """Test setting up the connection for a sqlite dialect.""" - execute_mock = MagicMock() + instance_mock = MagicMock(_db_supports_row_number=True) + execute_args = [] close_mock = MagicMock() + def execute_mock(statement): + nonlocal execute_args + execute_args.append(statement) + + def fetchall_mock(): + nonlocal execute_args + if execute_args[-1] == "SELECT sqlite_version()": + return [[sqlite_version]] + return None + def _make_cursor_mock(*_): - return MagicMock(execute=execute_mock, close=close_mock) + return MagicMock(execute=execute_mock, close=close_mock, fetchall=fetchall_mock) dbapi_connection = MagicMock(cursor=_make_cursor_mock) - util.setup_connection_for_dialect("sqlite", dbapi_connection, True) + util.setup_connection_for_dialect(instance_mock, "sqlite", dbapi_connection, True) - assert len(execute_mock.call_args_list) == 3 - assert execute_mock.call_args_list[0][0][0] == "PRAGMA journal_mode=WAL" - assert execute_mock.call_args_list[1][0][0] == "PRAGMA cache_size = -8192" - assert execute_mock.call_args_list[2][0][0] == "PRAGMA foreign_keys=ON" + assert len(execute_args) == 4 + assert execute_args[0] == "PRAGMA journal_mode=WAL" + assert execute_args[1] == "SELECT sqlite_version()" + assert execute_args[2] == "PRAGMA cache_size = -8192" + assert execute_args[3] == "PRAGMA foreign_keys=ON" - execute_mock.reset_mock() - util.setup_connection_for_dialect("sqlite", dbapi_connection, False) + execute_args = [] + util.setup_connection_for_dialect(instance_mock, "sqlite", dbapi_connection, False) - assert len(execute_mock.call_args_list) == 2 - assert execute_mock.call_args_list[0][0][0] == "PRAGMA cache_size = -8192" - assert execute_mock.call_args_list[1][0][0] == "PRAGMA foreign_keys=ON" + assert len(execute_args) == 2 + assert execute_args[0] == "PRAGMA cache_size = -8192" + assert execute_args[1] == "PRAGMA foreign_keys=ON" + + assert instance_mock._db_supports_row_number == db_supports_row_number def test_basic_sanity_check(hass_recorder): diff --git a/tests/components/sensor/test_recorder.py b/tests/components/sensor/test_recorder.py index 9ae4b467da5..8a0da39cde3 100644 --- a/tests/components/sensor/test_recorder.py +++ b/tests/components/sensor/test_recorder.py @@ -1806,7 +1806,13 @@ def test_compile_hourly_statistics_changing_statistics( assert "Error while processing event StatisticsTask" not in caplog.text -def test_compile_statistics_hourly_summary(hass_recorder, caplog): +@pytest.mark.parametrize( + "db_supports_row_number,in_log,not_in_log", + [(True, "row_number", None), (False, None, "row_number")], +) +def test_compile_statistics_hourly_summary( + hass_recorder, caplog, db_supports_row_number, in_log, not_in_log +): """Test compiling hourly statistics.""" zero = dt_util.utcnow() zero = zero.replace(minute=0, second=0, microsecond=0) @@ -1815,6 +1821,7 @@ def test_compile_statistics_hourly_summary(hass_recorder, caplog): zero += timedelta(hours=1) hass = hass_recorder() recorder = hass.data[DATA_INSTANCE] + recorder._db_supports_row_number = db_supports_row_number setup_component(hass, "sensor", {}) attributes = { "device_class": None, @@ -2052,6 +2059,10 @@ def test_compile_statistics_hourly_summary(hass_recorder, caplog): end += timedelta(hours=1) assert stats == expected_stats assert "Error while processing event StatisticsTask" not in caplog.text + if in_log: + assert in_log in caplog.text + if not_in_log: + assert not_in_log not in caplog.text def record_states(hass, zero, entity_id, attributes, seq=None): diff --git a/tests/components/yeelight/test_init.py b/tests/components/yeelight/test_init.py index 3ad99fa34ac..7ddb2845ac8 100644 --- a/tests/components/yeelight/test_init.py +++ b/tests/components/yeelight/test_init.py @@ -111,7 +111,9 @@ async def test_ip_changes_id_missing_cannot_fallback(hass: HomeAssistant): async def test_setup_discovery(hass: HomeAssistant): """Test setting up Yeelight by discovery.""" - config_entry = MockConfigEntry(domain=DOMAIN, data=CONFIG_ENTRY_DATA) + config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: IP_ADDRESS, **CONFIG_ENTRY_DATA} + ) config_entry.add_to_hass(hass) mocked_bulb = _mocked_bulb() @@ -151,7 +153,9 @@ async def test_setup_discovery_with_manually_configured_network_adapter( hass: HomeAssistant, ): """Test setting up Yeelight by discovery with a manually configured network adapter.""" - config_entry = MockConfigEntry(domain=DOMAIN, data=CONFIG_ENTRY_DATA) + config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: IP_ADDRESS, **CONFIG_ENTRY_DATA} + ) config_entry.add_to_hass(hass) mocked_bulb = _mocked_bulb() @@ -205,7 +209,9 @@ async def test_setup_discovery_with_manually_configured_network_adapter_one_fail hass: HomeAssistant, caplog ): """Test setting up Yeelight by discovery with a manually configured network adapter with one that fails to bind.""" - config_entry = MockConfigEntry(domain=DOMAIN, data=CONFIG_ENTRY_DATA) + config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: IP_ADDRESS, **CONFIG_ENTRY_DATA} + ) config_entry.add_to_hass(hass) mocked_bulb = _mocked_bulb() @@ -268,7 +274,7 @@ async def test_unique_ids_device(hass: HomeAssistant): """Test Yeelight unique IDs from yeelight device IDs.""" config_entry = MockConfigEntry( domain=DOMAIN, - data={**CONFIG_ENTRY_DATA, CONF_NIGHTLIGHT_SWITCH: True}, + data={CONF_HOST: IP_ADDRESS, **CONFIG_ENTRY_DATA, CONF_NIGHTLIGHT_SWITCH: True}, unique_id=ID, ) config_entry.add_to_hass(hass) @@ -292,7 +298,8 @@ async def test_unique_ids_device(hass: HomeAssistant): async def test_unique_ids_entry(hass: HomeAssistant): """Test Yeelight unique IDs from entry IDs.""" config_entry = MockConfigEntry( - domain=DOMAIN, data={**CONFIG_ENTRY_DATA, CONF_NIGHTLIGHT_SWITCH: True} + domain=DOMAIN, + data={CONF_HOST: IP_ADDRESS, CONF_NIGHTLIGHT_SWITCH: True}, ) config_entry.add_to_hass(hass) @@ -357,18 +364,16 @@ async def test_async_listen_error_late_discovery(hass, caplog): await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - assert "Failed to connect to bulb at" in caplog.text - await hass.config_entries.async_unload(config_entry.entry_id) + assert config_entry.state is ConfigEntryState.SETUP_RETRY await hass.async_block_till_done() - - caplog.clear() + assert "Waiting for 0x15243f to be discovered" in caplog.text with _patch_discovery(), patch(f"{MODULE}.AsyncBulb", return_value=_mocked_bulb()): - await hass.config_entries.async_setup(config_entry.entry_id) + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=5)) + await hass.async_block_till_done() + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=10)) await hass.async_block_till_done() - assert "Failed to connect to bulb at" not in caplog.text assert config_entry.state is ConfigEntryState.LOADED assert config_entry.options[CONF_MODEL] == MODEL @@ -386,7 +391,7 @@ async def test_unload_before_discovery(hass, caplog): await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED + assert config_entry.state is ConfigEntryState.SETUP_RETRY await hass.config_entries.async_unload(config_entry.entry_id) await hass.async_block_till_done() @@ -451,6 +456,31 @@ async def test_async_setup_with_missing_id(hass: HomeAssistant): assert config_entry.state is ConfigEntryState.LOADED +async def test_async_setup_with_missing_unique_id(hass: HomeAssistant): + """Test that setting adds the missing unique_id from CONF_ID.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + data={CONF_HOST: "127.0.0.1", CONF_ID: ID}, + options={CONF_NAME: "Test name"}, + ) + config_entry.add_to_hass(hass) + + with _patch_discovery(), _patch_discovery_timeout(), _patch_discovery_interval(), patch( + f"{MODULE}.AsyncBulb", return_value=_mocked_bulb(cannot_connect=True) + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.SETUP_RETRY + assert config_entry.unique_id == ID + + with _patch_discovery(), _patch_discovery_timeout(), _patch_discovery_interval(), patch( + f"{MODULE}.AsyncBulb", return_value=_mocked_bulb() + ): + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=2)) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + async def test_connection_dropped_resyncs_properties(hass: HomeAssistant): """Test handling a connection drop results in a property resync.""" config_entry = MockConfigEntry(