Merge pull request #66103 from home-assistant/rc

This commit is contained in:
Paulus Schoutsen 2022-02-08 14:48:49 -08:00 committed by GitHub
commit a869c1bc88
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
33 changed files with 542 additions and 185 deletions

View File

@ -45,7 +45,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
raise ConfigEntryAuthFailed from err
except asyncio.TimeoutError as err:
raise ConfigEntryNotReady("Timed out connecting to august api") from err
except (ClientResponseError, CannotConnect) as err:
except (AugustApiAIOHTTPError, ClientResponseError, CannotConnect) as err:
raise ConfigEntryNotReady from err

View File

@ -50,6 +50,7 @@ from homeassistant.const import (
CAST_APP_ID_HOMEASSISTANT_LOVELACE,
EVENT_HOMEASSISTANT_STOP,
STATE_IDLE,
STATE_OFF,
STATE_PAUSED,
STATE_PLAYING,
)
@ -636,7 +637,7 @@ class CastDevice(MediaPlayerEntity):
return STATE_PLAYING
return STATE_IDLE
if self._chromecast is not None and self._chromecast.is_idle:
return STATE_IDLE
return STATE_OFF
return None
@property

View File

@ -4,7 +4,7 @@
"config_flow": true,
"dependencies": ["network"],
"documentation": "https://www.home-assistant.io/integrations/flux_led",
"requirements": ["flux_led==0.28.21"],
"requirements": ["flux_led==0.28.22"],
"quality_scale": "platinum",
"codeowners": ["@icemanch", "@bdraco"],
"iot_class": "local_push",

View File

@ -35,7 +35,7 @@ from homeassistant.const import (
Platform,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import ENTITY_CATEGORIES_SCHEMA
from homeassistant.helpers.entity import validate_entity_category
from .const import (
CONF_INVERT,
@ -320,7 +320,7 @@ class BinarySensorSchema(KNXPlatformSchema):
),
vol.Optional(CONF_DEVICE_CLASS): BINARY_SENSOR_DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_RESET_AFTER): cv.positive_float,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
}
),
)
@ -356,7 +356,7 @@ class ButtonSchema(KNXPlatformSchema):
vol.Exclusive(
CONF_TYPE, "length_or_type", msg=length_or_type_msg
): object,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
}
),
vol.Any(
@ -500,7 +500,7 @@ class ClimateSchema(KNXPlatformSchema):
): vol.In(HVAC_MODES),
vol.Optional(CONF_MIN_TEMP): vol.Coerce(float),
vol.Optional(CONF_MAX_TEMP): vol.Coerce(float),
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
}
),
)
@ -555,7 +555,7 @@ class CoverSchema(KNXPlatformSchema):
vol.Optional(CONF_INVERT_POSITION, default=False): cv.boolean,
vol.Optional(CONF_INVERT_ANGLE, default=False): cv.boolean,
vol.Optional(CONF_DEVICE_CLASS): COVER_DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
}
),
)
@ -618,7 +618,7 @@ class FanSchema(KNXPlatformSchema):
vol.Optional(CONF_OSCILLATION_ADDRESS): ga_list_validator,
vol.Optional(CONF_OSCILLATION_STATE_ADDRESS): ga_list_validator,
vol.Optional(CONF_MAX_STEP): cv.byte,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
}
)
@ -722,7 +722,7 @@ class LightSchema(KNXPlatformSchema):
vol.Optional(CONF_MAX_KELVIN, default=DEFAULT_MAX_KELVIN): vol.All(
vol.Coerce(int), vol.Range(min=1)
),
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
}
),
vol.Any(
@ -802,7 +802,7 @@ class NumberSchema(KNXPlatformSchema):
vol.Optional(CONF_MAX): vol.Coerce(float),
vol.Optional(CONF_MIN): vol.Coerce(float),
vol.Optional(CONF_STEP): cv.positive_float,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
}
),
number_limit_sub_validator,
@ -824,7 +824,7 @@ class SceneSchema(KNXPlatformSchema):
vol.Required(CONF_SCENE_NUMBER): vol.All(
vol.Coerce(int), vol.Range(min=1, max=64)
),
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
}
)
@ -855,7 +855,7 @@ class SelectSchema(KNXPlatformSchema):
],
vol.Required(KNX_ADDRESS): ga_list_validator,
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
}
),
select_options_sub_validator,
@ -880,7 +880,7 @@ class SensorSchema(KNXPlatformSchema):
vol.Optional(CONF_STATE_CLASS): STATE_CLASSES_SCHEMA,
vol.Required(CONF_TYPE): sensor_type_validator,
vol.Required(CONF_STATE_ADDRESS): ga_list_validator,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
}
)
@ -901,7 +901,7 @@ class SwitchSchema(KNXPlatformSchema):
vol.Optional(CONF_RESPOND_TO_READ, default=False): cv.boolean,
vol.Required(KNX_ADDRESS): ga_list_validator,
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
}
)
@ -948,7 +948,7 @@ class WeatherSchema(KNXPlatformSchema):
vol.Optional(CONF_KNX_DAY_NIGHT_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_AIR_PRESSURE_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_HUMIDITY_ADDRESS): ga_list_validator,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
}
),
)

View File

@ -44,7 +44,7 @@ from homeassistant.helpers import (
template,
)
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.entity import ENTITY_CATEGORIES_SCHEMA
from homeassistant.helpers.entity import validate_entity_category
from homeassistant.util.decorator import Registry
from .const import (
@ -423,7 +423,7 @@ def _validate_state_class_sensor(value: dict):
vol.Optional(ATTR_SENSOR_STATE, default=None): vol.Any(
None, bool, str, int, float
),
vol.Optional(ATTR_SENSOR_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
vol.Optional(ATTR_SENSOR_ENTITY_CATEGORY): validate_entity_category,
vol.Optional(ATTR_SENSOR_ICON, default="mdi:cellphone"): cv.icon,
vol.Optional(ATTR_SENSOR_STATE_CLASS): vol.In(SENSOSR_STATE_CLASSES),
},

View File

@ -595,6 +595,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
)
)
debug_info.initialize(hass)
return True

View File

@ -133,6 +133,10 @@ class MqttBinarySensor(MqttEntity, BinarySensorEntity, RestoreEntity):
self._expired = False
self._state = last_state.state
if self._expiration_trigger:
# We might have set up a trigger already after subscribing from
# super().async_added_to_hass()
self._expiration_trigger()
self._expiration_trigger = async_track_point_in_utc_time(
self.hass, self._value_is_expired, expiration_at
)
@ -189,7 +193,6 @@ class MqttBinarySensor(MqttEntity, BinarySensorEntity, RestoreEntity):
# Reset old trigger
if self._expiration_trigger:
self._expiration_trigger()
self._expiration_trigger = None
# Set new trigger
expiration_at = dt_util.utcnow() + timedelta(seconds=expire_after)

View File

@ -15,6 +15,11 @@ DATA_MQTT_DEBUG_INFO = "mqtt_debug_info"
STORED_MESSAGES = 10
def initialize(hass: HomeAssistant):
"""Initialize MQTT debug info."""
hass.data[DATA_MQTT_DEBUG_INFO] = {"entities": {}, "triggers": {}}
def log_messages(
hass: HomeAssistant, entity_id: str
) -> Callable[[MessageCallbackType], MessageCallbackType]:
@ -45,9 +50,7 @@ def log_messages(
def add_subscription(hass, message_callback, subscription):
"""Prepare debug data for subscription."""
if entity_id := getattr(message_callback, "__entity_id", None):
debug_info = hass.data.setdefault(
DATA_MQTT_DEBUG_INFO, {"entities": {}, "triggers": {}}
)
debug_info = hass.data[DATA_MQTT_DEBUG_INFO]
entity_info = debug_info["entities"].setdefault(
entity_id, {"subscriptions": {}, "discovery_data": {}}
)
@ -76,9 +79,7 @@ def remove_subscription(hass, message_callback, subscription):
def add_entity_discovery_data(hass, discovery_data, entity_id):
"""Add discovery data."""
debug_info = hass.data.setdefault(
DATA_MQTT_DEBUG_INFO, {"entities": {}, "triggers": {}}
)
debug_info = hass.data[DATA_MQTT_DEBUG_INFO]
entity_info = debug_info["entities"].setdefault(
entity_id, {"subscriptions": {}, "discovery_data": {}}
)
@ -93,14 +94,13 @@ def update_entity_discovery_data(hass, discovery_payload, entity_id):
def remove_entity_data(hass, entity_id):
"""Remove discovery data."""
hass.data[DATA_MQTT_DEBUG_INFO]["entities"].pop(entity_id)
if entity_id in hass.data[DATA_MQTT_DEBUG_INFO]["entities"]:
hass.data[DATA_MQTT_DEBUG_INFO]["entities"].pop(entity_id)
def add_trigger_discovery_data(hass, discovery_hash, discovery_data, device_id):
"""Add discovery data."""
debug_info = hass.data.setdefault(
DATA_MQTT_DEBUG_INFO, {"entities": {}, "triggers": {}}
)
debug_info = hass.data[DATA_MQTT_DEBUG_INFO]
debug_info["triggers"][discovery_hash] = {
"device_id": device_id,
"discovery_data": discovery_data,
@ -126,9 +126,7 @@ async def info_for_device(hass, device_id):
entries = hass.helpers.entity_registry.async_entries_for_device(
entity_registry, device_id, include_disabled_entities=True
)
mqtt_debug_info = hass.data.setdefault(
DATA_MQTT_DEBUG_INFO, {"entities": {}, "triggers": {}}
)
mqtt_debug_info = hass.data[DATA_MQTT_DEBUG_INFO]
for entry in entries:
if entry.entity_id not in mqtt_debug_info["entities"]:
continue

View File

@ -30,11 +30,11 @@ from homeassistant.helpers.dispatcher import (
async_dispatcher_send,
)
from homeassistant.helpers.entity import (
ENTITY_CATEGORIES_SCHEMA,
DeviceInfo,
Entity,
EntityCategory,
async_generate_entity_id,
validate_entity_category,
)
from homeassistant.helpers.typing import ConfigType
@ -191,7 +191,7 @@ MQTT_ENTITY_COMMON_SCHEMA = MQTT_AVAILABILITY_SCHEMA.extend(
{
vol.Optional(CONF_DEVICE): MQTT_ENTITY_DEVICE_INFO_SCHEMA,
vol.Optional(CONF_ENABLED_BY_DEFAULT, default=True): cv.boolean,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
vol.Optional(CONF_ICON): cv.icon,
vol.Optional(CONF_JSON_ATTRS_TOPIC): valid_subscribe_topic,
vol.Optional(CONF_JSON_ATTRS_TEMPLATE): cv.template,
@ -549,7 +549,6 @@ class MqttDiscoveryUpdate(Entity):
def _cleanup_discovery_on_remove(self) -> None:
"""Stop listening to signal and cleanup discovery data."""
if self._discovery_data and not self._removed_from_hass:
debug_info.remove_entity_data(self.hass, self.entity_id)
clear_discovery_hash(self.hass, self._discovery_data[ATTR_DISCOVERY_HASH])
self._removed_from_hass = True
@ -677,6 +676,7 @@ class MqttEntity(
await MqttAttributes.async_will_remove_from_hass(self)
await MqttAvailability.async_will_remove_from_hass(self)
await MqttDiscoveryUpdate.async_will_remove_from_hass(self)
debug_info.remove_entity_data(self.hass, self.entity_id)
@staticmethod
@abstractmethod

View File

@ -180,6 +180,10 @@ class MqttSensor(MqttEntity, SensorEntity, RestoreEntity):
self._expired = False
self._state = last_state.state
if self._expiration_trigger:
# We might have set up a trigger already after subscribing from
# super().async_added_to_hass()
self._expiration_trigger()
self._expiration_trigger = async_track_point_in_utc_time(
self.hass, self._value_is_expired, expiration_at
)
@ -227,7 +231,6 @@ class MqttSensor(MqttEntity, SensorEntity, RestoreEntity):
# Reset old trigger
if self._expiration_trigger:
self._expiration_trigger()
self._expiration_trigger = None
# Set new trigger
expiration_at = dt_util.utcnow() + timedelta(seconds=expire_after)

View File

@ -4,7 +4,7 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/plex",
"requirements": [
"plexapi==4.9.1",
"plexapi==4.9.2",
"plexauth==0.0.6",
"plexwebsocket==0.0.13"
],

View File

@ -101,15 +101,15 @@ def _create_index(instance, table_name, index_name):
"be patient!",
index_name,
)
try:
with session_scope(session=instance.get_session()) as session:
with session_scope(session=instance.get_session()) as session:
try:
connection = session.connection()
index.create(connection)
except (InternalError, OperationalError, ProgrammingError) as err:
raise_if_exception_missing_str(err, ["already exists", "duplicate"])
_LOGGER.warning(
"Index %s already exists on %s, continuing", index_name, table_name
)
except (InternalError, OperationalError, ProgrammingError) as err:
raise_if_exception_missing_str(err, ["already exists", "duplicate"])
_LOGGER.warning(
"Index %s already exists on %s, continuing", index_name, table_name
)
_LOGGER.debug("Finished creating %s", index_name)
@ -129,19 +129,19 @@ def _drop_index(instance, table_name, index_name):
success = False
# Engines like DB2/Oracle
try:
with session_scope(session=instance.get_session()) as session:
with session_scope(session=instance.get_session()) as session:
try:
connection = session.connection()
connection.execute(text(f"DROP INDEX {index_name}"))
except SQLAlchemyError:
pass
else:
success = True
except SQLAlchemyError:
pass
else:
success = True
# Engines like SQLite, SQL Server
if not success:
try:
with session_scope(session=instance.get_session()) as session:
with session_scope(session=instance.get_session()) as session:
try:
connection = session.connection()
connection.execute(
text(
@ -150,15 +150,15 @@ def _drop_index(instance, table_name, index_name):
)
)
)
except SQLAlchemyError:
pass
else:
success = True
except SQLAlchemyError:
pass
else:
success = True
if not success:
# Engines like MySQL, MS Access
try:
with session_scope(session=instance.get_session()) as session:
with session_scope(session=instance.get_session()) as session:
try:
connection = session.connection()
connection.execute(
text(
@ -167,10 +167,10 @@ def _drop_index(instance, table_name, index_name):
)
)
)
except SQLAlchemyError:
pass
else:
success = True
except SQLAlchemyError:
pass
else:
success = True
if success:
_LOGGER.debug(
@ -203,8 +203,8 @@ def _add_columns(instance, table_name, columns_def):
columns_def = [f"ADD {col_def}" for col_def in columns_def]
try:
with session_scope(session=instance.get_session()) as session:
with session_scope(session=instance.get_session()) as session:
try:
connection = session.connection()
connection.execute(
text(
@ -214,14 +214,14 @@ def _add_columns(instance, table_name, columns_def):
)
)
return
except (InternalError, OperationalError, ProgrammingError):
# Some engines support adding all columns at once,
# this error is when they don't
_LOGGER.info("Unable to use quick column add. Adding 1 by 1")
except (InternalError, OperationalError, ProgrammingError):
# Some engines support adding all columns at once,
# this error is when they don't
_LOGGER.info("Unable to use quick column add. Adding 1 by 1")
for column_def in columns_def:
try:
with session_scope(session=instance.get_session()) as session:
with session_scope(session=instance.get_session()) as session:
try:
connection = session.connection()
connection.execute(
text(
@ -230,13 +230,13 @@ def _add_columns(instance, table_name, columns_def):
)
)
)
except (InternalError, OperationalError, ProgrammingError) as err:
raise_if_exception_missing_str(err, ["already exists", "duplicate"])
_LOGGER.warning(
"Column %s already exists on %s, continuing",
column_def.split(" ")[1],
table_name,
)
except (InternalError, OperationalError, ProgrammingError) as err:
raise_if_exception_missing_str(err, ["already exists", "duplicate"])
_LOGGER.warning(
"Column %s already exists on %s, continuing",
column_def.split(" ")[1],
table_name,
)
def _modify_columns(instance, engine, table_name, columns_def):
@ -271,8 +271,8 @@ def _modify_columns(instance, engine, table_name, columns_def):
else:
columns_def = [f"MODIFY {col_def}" for col_def in columns_def]
try:
with session_scope(session=instance.get_session()) as session:
with session_scope(session=instance.get_session()) as session:
try:
connection = session.connection()
connection.execute(
text(
@ -282,12 +282,12 @@ def _modify_columns(instance, engine, table_name, columns_def):
)
)
return
except (InternalError, OperationalError):
_LOGGER.info("Unable to use quick column modify. Modifying 1 by 1")
except (InternalError, OperationalError):
_LOGGER.info("Unable to use quick column modify. Modifying 1 by 1")
for column_def in columns_def:
try:
with session_scope(session=instance.get_session()) as session:
with session_scope(session=instance.get_session()) as session:
try:
connection = session.connection()
connection.execute(
text(
@ -296,10 +296,10 @@ def _modify_columns(instance, engine, table_name, columns_def):
)
)
)
except (InternalError, OperationalError):
_LOGGER.exception(
"Could not modify column %s in table %s", column_def, table_name
)
except (InternalError, OperationalError):
_LOGGER.exception(
"Could not modify column %s in table %s", column_def, table_name
)
def _update_states_table_with_foreign_key_options(instance, engine):
@ -330,17 +330,17 @@ def _update_states_table_with_foreign_key_options(instance, engine):
)
for alter in alters:
try:
with session_scope(session=instance.get_session()) as session:
with session_scope(session=instance.get_session()) as session:
try:
connection = session.connection()
connection.execute(DropConstraint(alter["old_fk"]))
for fkc in states_key_constraints:
if fkc.column_keys == alter["columns"]:
connection.execute(AddConstraint(fkc))
except (InternalError, OperationalError):
_LOGGER.exception(
"Could not update foreign options in %s table", TABLE_STATES
)
except (InternalError, OperationalError):
_LOGGER.exception(
"Could not update foreign options in %s table", TABLE_STATES
)
def _drop_foreign_key_constraints(instance, engine, table, columns):
@ -361,16 +361,16 @@ def _drop_foreign_key_constraints(instance, engine, table, columns):
)
for drop in drops:
try:
with session_scope(session=instance.get_session()) as session:
with session_scope(session=instance.get_session()) as session:
try:
connection = session.connection()
connection.execute(DropConstraint(drop))
except (InternalError, OperationalError):
_LOGGER.exception(
"Could not drop foreign constraints in %s table on %s",
TABLE_STATES,
columns,
)
except (InternalError, OperationalError):
_LOGGER.exception(
"Could not drop foreign constraints in %s table on %s",
TABLE_STATES,
columns,
)
def _apply_update(instance, new_version, old_version): # noqa: C901

View File

@ -290,7 +290,7 @@ def _find_duplicates(
)
.filter(subquery.c.is_duplicate == 1)
.order_by(table.metadata_id, table.start, table.id.desc())
.limit(MAX_ROWS_TO_PURGE)
.limit(1000 * MAX_ROWS_TO_PURGE)
)
duplicates = execute(query)
original_as_dict = {}
@ -343,12 +343,13 @@ def _delete_duplicates_from_table(
if not duplicate_ids:
break
all_non_identical_duplicates.extend(non_identical_duplicates)
deleted_rows = (
session.query(table)
.filter(table.id.in_(duplicate_ids))
.delete(synchronize_session=False)
)
total_deleted_rows += deleted_rows
for i in range(0, len(duplicate_ids), MAX_ROWS_TO_PURGE):
deleted_rows = (
session.query(table)
.filter(table.id.in_(duplicate_ids[i : i + MAX_ROWS_TO_PURGE]))
.delete(synchronize_session=False)
)
total_deleted_rows += deleted_rows
return (total_deleted_rows, all_non_identical_duplicates)

View File

@ -399,13 +399,20 @@ class SonosSpeaker:
return_exceptions=True,
)
for result in results:
if isinstance(result, Exception):
_LOGGER.debug(
"Unsubscribe failed for %s: %s",
self.zone_name,
result,
exc_info=result,
)
if isinstance(result, asyncio.exceptions.TimeoutError):
message = "Request timed out"
exc_info = None
elif isinstance(result, Exception):
message = result
exc_info = result if not str(result) else None
else:
continue
_LOGGER.debug(
"Unsubscribe failed for %s: %s",
self.zone_name,
message,
exc_info=exc_info,
)
self._subscriptions = []
@callback
@ -422,19 +429,18 @@ class SonosSpeaker:
if not self.available:
return
if getattr(exception, "status", None) == 412:
_LOGGER.warning(
"Subscriptions for %s failed, speaker may have lost power",
self.zone_name,
)
if isinstance(exception, asyncio.exceptions.TimeoutError):
message = "Request timed out"
exc_info = None
else:
exc_info = exception if _LOGGER.isEnabledFor(logging.DEBUG) else None
_LOGGER.error(
"Subscription renewals for %s failed: %s",
self.zone_name,
exception,
exc_info=exc_info,
)
message = exception
exc_info = exception if not str(exception) else None
_LOGGER.warning(
"Subscription renewals for %s failed, marking unavailable: %s",
self.zone_name,
message,
exc_info=exc_info,
)
await self.async_offline()
@callback

View File

@ -161,6 +161,21 @@ class TodSensor(BinarySensorEntity):
self._time_before = before_event_date
# We are calculating the _time_after value assuming that it will happen today
# But that is not always true, e.g. after 23:00, before 12:00 and now is 10:00
# If _time_before and _time_after are ahead of nowutc:
# _time_before is set to 12:00 next day
# _time_after is set to 23:00 today
# nowutc is set to 10:00 today
if (
not is_sun_event(self._after)
and self._time_after > nowutc
and self._time_before > nowutc + timedelta(days=1)
):
# remove one day from _time_before and _time_after
self._time_after -= timedelta(days=1)
self._time_before -= timedelta(days=1)
# Add offset to utc boundaries according to the configuration
self._time_after += self._after_offset
self._time_before += self._before_offset

View File

@ -12,7 +12,7 @@ from async_upnp_client.exceptions import UpnpError
from async_upnp_client.profiles.igd import IgdDevice
from homeassistant.components import ssdp
from homeassistant.components.ssdp import SsdpChange
from homeassistant.components.ssdp import SsdpChange, SsdpServiceInfo
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
@ -71,19 +71,22 @@ class Device:
return device
async def async_ssdp_callback(
self, headers: Mapping[str, Any], change: SsdpChange
self, service_info: SsdpServiceInfo, change: SsdpChange
) -> None:
"""SSDP callback, update if needed."""
_LOGGER.debug("SSDP Callback, change: %s, headers: %s", change, headers)
if ssdp.ATTR_SSDP_LOCATION not in headers:
_LOGGER.debug(
"SSDP Callback, change: %s, headers: %s", change, service_info.ssdp_headers
)
if service_info.ssdp_location is None:
return
location = headers[ssdp.ATTR_SSDP_LOCATION]
device = self._igd_device.device
if location == device.device_url:
if service_info.ssdp_location == device.device_url:
return
new_upnp_device = await async_create_upnp_device(self.hass, location)
new_upnp_device = await async_create_upnp_device(
self.hass, service_info.ssdp_location
)
device.reinit(new_upnp_device)
@property

View File

@ -3,7 +3,6 @@ from __future__ import annotations
from typing import Any
from pyhaversion.consts import HaVersionChannel, HaVersionSource
import voluptuous as vol
from homeassistant import config_entries
@ -75,8 +74,8 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
self._entry_data.update(user_input)
if not self.show_advanced_options or user_input[CONF_SOURCE] in (
HaVersionSource.LOCAL,
HaVersionSource.HAIO,
"local",
"haio",
):
return self.async_create_entry(
title=self._config_entry_name,
@ -92,8 +91,8 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle the version_source step."""
if user_input is None:
if self._entry_data[CONF_SOURCE] in (
HaVersionSource.SUPERVISOR,
HaVersionSource.CONTAINER,
"supervisor",
"container",
):
data_schema = vol.Schema(
{
@ -102,7 +101,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
): vol.In(VALID_CHANNELS),
}
)
if self._entry_data[CONF_SOURCE] == HaVersionSource.SUPERVISOR:
if self._entry_data[CONF_SOURCE] == "supervisor":
data_schema = data_schema.extend(
{
vol.Required(CONF_IMAGE, default=DEFAULT_IMAGE): vol.In(
@ -151,7 +150,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
@property
def _config_entry_name(self) -> str:
"""Return the name of the config entry."""
if self._entry_data[CONF_SOURCE] == HaVersionSource.LOCAL:
if self._entry_data[CONF_SOURCE] == "local":
return DEFAULT_NAME_CURRENT
name = self._entry_data[CONF_VERSION_SOURCE]
@ -166,21 +165,21 @@ def _convert_imported_configuration(config: dict[str, Any]) -> Any:
"""Convert a key from the imported configuration."""
data = DEFAULT_CONFIGURATION.copy()
if config.get(CONF_BETA):
data[CONF_CHANNEL] = HaVersionChannel.BETA
data[CONF_CHANNEL] = "beta"
if (source := config.get(CONF_SOURCE)) and source != DEFAULT_SOURCE:
if source == SOURCE_HASSIO:
data[CONF_SOURCE] = HaVersionSource.SUPERVISOR
data[CONF_SOURCE] = "supervisor"
data[CONF_VERSION_SOURCE] = VERSION_SOURCE_VERSIONS
elif source == SOURCE_DOKCER:
data[CONF_SOURCE] = HaVersionSource.CONTAINER
data[CONF_SOURCE] = "container"
data[CONF_VERSION_SOURCE] = VERSION_SOURCE_DOCKER_HUB
else:
data[CONF_SOURCE] = source
data[CONF_VERSION_SOURCE] = VERSION_SOURCE_MAP_INVERTED[source]
if (image := config.get(CONF_IMAGE)) and image != DEFAULT_IMAGE:
if data[CONF_SOURCE] == HaVersionSource.CONTAINER:
if data[CONF_SOURCE] == "container":
data[CONF_IMAGE] = f"{config[CONF_IMAGE]}{POSTFIX_CONTAINER_NAME}"
else:
data[CONF_IMAGE] = config[CONF_IMAGE]
@ -188,7 +187,7 @@ def _convert_imported_configuration(config: dict[str, Any]) -> Any:
if (name := config.get(CONF_NAME)) and name != DEFAULT_NAME:
data[CONF_NAME] = config[CONF_NAME]
else:
if data[CONF_SOURCE] == HaVersionSource.LOCAL:
if data[CONF_SOURCE] == "local":
data[CONF_NAME] = DEFAULT_NAME_CURRENT
else:
data[CONF_NAME] = DEFAULT_NAME_LATEST

View File

@ -41,12 +41,12 @@ VERSION_SOURCE_VERSIONS: Final = "Home Assistant Versions"
DEFAULT_BETA: Final = False
DEFAULT_BOARD: Final = "OVA"
DEFAULT_CHANNEL: Final[HaVersionChannel] = HaVersionChannel.STABLE
DEFAULT_CHANNEL: Final = "stable"
DEFAULT_IMAGE: Final = "default"
DEFAULT_NAME_CURRENT: Final = "Current Version"
DEFAULT_NAME_LATEST: Final = "Latest Version"
DEFAULT_NAME: Final = ""
DEFAULT_SOURCE: Final[HaVersionSource] = HaVersionSource.LOCAL
DEFAULT_SOURCE: Final = "local"
DEFAULT_CONFIGURATION: Final[dict[str, Any]] = {
CONF_NAME: DEFAULT_NAME,
CONF_CHANNEL: DEFAULT_CHANNEL,
@ -81,22 +81,22 @@ BOARD_MAP: Final[dict[str, str]] = {
VALID_BOARDS: Final[list[str]] = list(BOARD_MAP)
VERSION_SOURCE_MAP: Final[dict[str, HaVersionSource]] = {
VERSION_SOURCE_LOCAL: HaVersionSource.LOCAL,
VERSION_SOURCE_VERSIONS: HaVersionSource.SUPERVISOR,
VERSION_SOURCE_HAIO: HaVersionSource.HAIO,
VERSION_SOURCE_DOCKER_HUB: HaVersionSource.CONTAINER,
VERSION_SOURCE_PYPI: HaVersionSource.PYPI,
VERSION_SOURCE_MAP: Final[dict[str, str]] = {
VERSION_SOURCE_LOCAL: "local",
VERSION_SOURCE_VERSIONS: "supervisor",
VERSION_SOURCE_HAIO: "haio",
VERSION_SOURCE_DOCKER_HUB: "container",
VERSION_SOURCE_PYPI: "pypi",
}
VERSION_SOURCE_MAP_INVERTED: Final[dict[HaVersionSource, str]] = {
VERSION_SOURCE_MAP_INVERTED: Final[dict[str, str]] = {
value: key for key, value in VERSION_SOURCE_MAP.items()
}
VALID_SOURCES: Final[list[str]] = HA_VERSION_SOURCES + [
SOURCE_HASSIO, # Kept to not break existing configurations
SOURCE_DOKCER, # Kept to not break existing configurations
"hassio", # Kept to not break existing configurations
"docker", # Kept to not break existing configurations
]
VALID_IMAGES: Final = [

View File

@ -0,0 +1,56 @@
"""Provides diagnostics for Version."""
from __future__ import annotations
from typing import Any
from attr import asdict
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr, entity_registry as er
from .const import DOMAIN
async def async_get_config_entry_diagnostics(
hass: HomeAssistant,
config_entry: ConfigEntry,
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
coordinator = hass.data[DOMAIN][config_entry.entry_id]
device_registry = dr.async_get(hass)
entity_registry = er.async_get(hass)
devices = []
registry_devices = dr.async_entries_for_config_entry(
device_registry, config_entry.entry_id
)
for device in registry_devices:
entities = []
registry_entities = er.async_entries_for_device(
entity_registry,
device_id=device.id,
include_disabled_entities=True,
)
for entity in registry_entities:
state_dict = None
if state := hass.states.get(entity.entity_id):
state_dict = dict(state.as_dict())
state_dict.pop("context", None)
entities.append({"entry": asdict(entity), "state": state_dict})
devices.append({"device": asdict(device), "entities": entities})
return {
"entry": config_entry.as_dict(),
"coordinator_data": {
"version": coordinator.version,
"version_data": coordinator.version_data,
},
"devices": devices,
}

View File

@ -133,6 +133,7 @@ APPLICATION_VERSION = "application_version"
MAX_INCLUSION_REQUEST_INTERVAL = "max_inclusion_request_interval"
UUID = "uuid"
SUPPORTED_PROTOCOLS = "supported_protocols"
ADDITIONAL_PROPERTIES = "additional_properties"
FEATURE = "feature"
UNPROVISION = "unprovision"
@ -170,6 +171,7 @@ def convert_qr_provisioning_information(info: dict) -> QRProvisioningInformation
max_inclusion_request_interval=info.get(MAX_INCLUSION_REQUEST_INTERVAL),
uuid=info.get(UUID),
supported_protocols=protocols if protocols else None,
additional_properties=info.get(ADDITIONAL_PROPERTIES, {}),
)
return info
@ -212,6 +214,7 @@ QR_PROVISIONING_INFORMATION_SCHEMA = vol.All(
cv.ensure_list,
[vol.Coerce(Protocols)],
),
vol.Optional(ADDITIONAL_PROPERTIES): dict,
}
),
convert_qr_provisioning_information,

View File

@ -7,7 +7,7 @@ from .backports.enum import StrEnum
MAJOR_VERSION: Final = 2022
MINOR_VERSION: Final = 2
PATCH_VERSION: Final = "3"
PATCH_VERSION: Final = "4"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 9, 0)

View File

@ -11,7 +11,7 @@ import logging
import math
import sys
from timeit import default_timer as timer
from typing import Any, Final, Literal, TypedDict, final
from typing import Any, Literal, TypedDict, final
import voluptuous as vol
@ -58,7 +58,13 @@ SOURCE_PLATFORM_CONFIG = "platform_config"
FLOAT_PRECISION = abs(int(math.floor(math.log10(abs(sys.float_info.epsilon))))) - 1
ENTITY_CATEGORIES_SCHEMA: Final = vol.In(ENTITY_CATEGORIES)
def validate_entity_category(value: Any | None) -> EntityCategory:
"""Validate entity category configuration."""
value = vol.In(ENTITY_CATEGORIES)(value)
return EntityCategory(value)
ENTITY_CATEGORIES_SCHEMA = validate_entity_category
@callback

View File

@ -681,7 +681,7 @@ fjaraskupan==1.0.2
flipr-api==1.4.1
# homeassistant.components.flux_led
flux_led==0.28.21
flux_led==0.28.22
# homeassistant.components.homekit
fnvhash==0.1.0
@ -1255,7 +1255,7 @@ pillow==9.0.1
pizzapi==0.0.3
# homeassistant.components.plex
plexapi==4.9.1
plexapi==4.9.2
# homeassistant.components.plex
plexauth==0.0.6

View File

@ -427,7 +427,7 @@ fjaraskupan==1.0.2
flipr-api==1.4.1
# homeassistant.components.flux_led
flux_led==0.28.21
flux_led==0.28.22
# homeassistant.components.homekit
fnvhash==0.1.0
@ -774,7 +774,7 @@ pilight==0.1.1
pillow==9.0.1
# homeassistant.components.plex
plexapi==4.9.1
plexapi==4.9.2
# homeassistant.components.plex
plexauth==0.0.6

View File

@ -1,6 +1,6 @@
[metadata]
name = homeassistant
version = 2022.2.3
version = 2022.2.4
author = The Home Assistant Authors
author_email = hello@home-assistant.io
license = Apache-2.0

View File

@ -595,7 +595,7 @@ async def test_entity_availability(hass: HomeAssistant):
conn_status_cb(connection_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "idle"
assert state.state == "off"
connection_status = MagicMock()
connection_status.status = "DISCONNECTED"
@ -624,7 +624,7 @@ async def test_entity_cast_status(hass: HomeAssistant):
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "idle"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
# No media status, pause, play, stop not supported
@ -642,8 +642,8 @@ async def test_entity_cast_status(hass: HomeAssistant):
cast_status_cb(cast_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
# Volume not hidden even if no app is active
assert state.attributes.get("volume_level") == 0.5
# Volume hidden if no app is active
assert state.attributes.get("volume_level") is None
assert not state.attributes.get("is_volume_muted")
chromecast.app_id = "1234"
@ -747,7 +747,7 @@ async def test_supported_features(
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "idle"
assert state.state == "off"
assert state.attributes.get("supported_features") == supported_features_no_media
media_status = MagicMock(images=None)
@ -882,7 +882,7 @@ async def test_entity_play_media(hass: HomeAssistant, quick_play_mock):
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "idle"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
# Play_media
@ -928,7 +928,7 @@ async def test_entity_play_media_cast(hass: HomeAssistant, quick_play_mock):
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "idle"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
# Play_media - cast with app ID
@ -970,7 +970,7 @@ async def test_entity_play_media_cast_invalid(hass, caplog, quick_play_mock):
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "idle"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
# play_media - media_type cast with invalid JSON
@ -1042,7 +1042,7 @@ async def test_entity_media_content_type(hass: HomeAssistant):
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "idle"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
media_status = MagicMock(images=None)
@ -1213,7 +1213,7 @@ async def test_entity_media_states(hass: HomeAssistant, app_id, state_no_media):
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "idle"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
# App id updated, but no media status
@ -1258,7 +1258,7 @@ async def test_entity_media_states(hass: HomeAssistant, app_id, state_no_media):
cast_status_cb(cast_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "idle"
assert state.state == "off"
# No cast status
chromecast.is_idle = False
@ -1286,7 +1286,7 @@ async def test_entity_media_states_lovelace_app(hass: HomeAssistant):
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "idle"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
chromecast.app_id = CAST_APP_ID_HOMEASSISTANT_LOVELACE
@ -1326,7 +1326,7 @@ async def test_entity_media_states_lovelace_app(hass: HomeAssistant):
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "idle"
assert state.state == "off"
chromecast.is_idle = False
media_status_cb(media_status)
@ -1355,7 +1355,7 @@ async def test_group_media_states(hass, mz_mock):
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "idle"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
group_media_status = MagicMock(images=None)
@ -1406,7 +1406,7 @@ async def test_group_media_control(hass, mz_mock, quick_play_mock):
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "idle"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
group_media_status = MagicMock(images=None)

View File

@ -1764,7 +1764,7 @@ async def test_debug_info_multiple_entities_triggers(hass, mqtt_mock):
} in discovery_data
async def test_debug_info_non_mqtt(hass, device_reg, entity_reg):
async def test_debug_info_non_mqtt(hass, device_reg, entity_reg, mqtt_mock):
"""Test we get empty debug_info for a device with non MQTT entities."""
DOMAIN = "sensor"
platform = getattr(hass.components, f"test.{DOMAIN}")

View File

@ -855,6 +855,179 @@ def test_delete_duplicates(caplog, tmpdir):
assert "Found duplicated" not in caplog.text
def test_delete_duplicates_many(caplog, tmpdir):
"""Test removal of duplicated statistics."""
test_db_file = tmpdir.mkdir("sqlite").join("test_run_info.db")
dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}"
module = "tests.components.recorder.models_schema_23"
importlib.import_module(module)
old_models = sys.modules[module]
period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00"))
period3 = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
period4 = dt_util.as_utc(dt_util.parse_datetime("2021-10-31 23:00:00"))
external_energy_statistics_1 = (
{
"start": period1,
"last_reset": None,
"state": 0,
"sum": 2,
},
{
"start": period2,
"last_reset": None,
"state": 1,
"sum": 3,
},
{
"start": period3,
"last_reset": None,
"state": 2,
"sum": 4,
},
{
"start": period4,
"last_reset": None,
"state": 3,
"sum": 5,
},
{
"start": period4,
"last_reset": None,
"state": 3,
"sum": 5,
},
)
external_energy_metadata_1 = {
"has_mean": False,
"has_sum": True,
"name": "Total imported energy",
"source": "test",
"statistic_id": "test:total_energy_import_tariff_1",
"unit_of_measurement": "kWh",
}
external_energy_statistics_2 = (
{
"start": period1,
"last_reset": None,
"state": 0,
"sum": 20,
},
{
"start": period2,
"last_reset": None,
"state": 1,
"sum": 30,
},
{
"start": period3,
"last_reset": None,
"state": 2,
"sum": 40,
},
{
"start": period4,
"last_reset": None,
"state": 3,
"sum": 50,
},
{
"start": period4,
"last_reset": None,
"state": 3,
"sum": 50,
},
)
external_energy_metadata_2 = {
"has_mean": False,
"has_sum": True,
"name": "Total imported energy",
"source": "test",
"statistic_id": "test:total_energy_import_tariff_2",
"unit_of_measurement": "kWh",
}
external_co2_statistics = (
{
"start": period1,
"last_reset": None,
"mean": 10,
},
{
"start": period2,
"last_reset": None,
"mean": 30,
},
{
"start": period3,
"last_reset": None,
"mean": 60,
},
{
"start": period4,
"last_reset": None,
"mean": 90,
},
)
external_co2_metadata = {
"has_mean": True,
"has_sum": False,
"name": "Fossil percentage",
"source": "test",
"statistic_id": "test:fossil_percentage",
"unit_of_measurement": "%",
}
# Create some duplicated statistics with schema version 23
with patch.object(recorder, "models", old_models), patch.object(
recorder.migration, "SCHEMA_VERSION", old_models.SCHEMA_VERSION
), patch(
"homeassistant.components.recorder.create_engine", new=_create_engine_test
):
hass = get_test_home_assistant()
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
wait_recording_done(hass)
wait_recording_done(hass)
with session_scope(hass=hass) as session:
session.add(
recorder.models.StatisticsMeta.from_meta(external_energy_metadata_1)
)
session.add(
recorder.models.StatisticsMeta.from_meta(external_energy_metadata_2)
)
session.add(recorder.models.StatisticsMeta.from_meta(external_co2_metadata))
with session_scope(hass=hass) as session:
for stat in external_energy_statistics_1:
session.add(recorder.models.Statistics.from_stats(1, stat))
for _ in range(3000):
session.add(
recorder.models.Statistics.from_stats(
1, external_energy_statistics_1[-1]
)
)
for stat in external_energy_statistics_2:
session.add(recorder.models.Statistics.from_stats(2, stat))
for stat in external_co2_statistics:
session.add(recorder.models.Statistics.from_stats(3, stat))
hass.stop()
# Test that the duplicates are removed during migration from schema 23
hass = get_test_home_assistant()
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
hass.start()
wait_recording_done(hass)
wait_recording_done(hass)
hass.stop()
assert "Deleted 3002 duplicated statistics rows" in caplog.text
assert "Found non identical" not in caplog.text
assert "Found duplicated" not in caplog.text
@pytest.mark.freeze_time("2021-08-01 00:00:00+00:00")
def test_delete_duplicates_non_identical(caplog, tmpdir):
"""Test removal of duplicated statistics."""

View File

@ -163,6 +163,25 @@ async def test_midnight_turnover_before_midnight_outside_period(hass):
assert state.state == STATE_OFF
async def test_after_happens_tomorrow(hass):
"""Test when both before and after are in the future, and after is later than before."""
test_time = datetime(2019, 1, 10, 10, 00, 0, tzinfo=dt_util.UTC)
config = {
"binary_sensor": [
{"platform": "tod", "name": "Night", "after": "23:00", "before": "12:00"}
]
}
with patch(
"homeassistant.components.tod.binary_sensor.dt_util.utcnow",
return_value=test_time,
):
await async_setup_component(hass, "binary_sensor", config)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.night")
assert state.state == STATE_ON
async def test_midnight_turnover_after_midnight_outside_period(hass):
"""Test midnight turnover setting before midnight inside period ."""
test_time = datetime(2019, 1, 10, 20, 0, 0, tzinfo=dt_util.UTC)

View File

@ -45,8 +45,13 @@ async def test_reinitialize_device(
# Reinit.
new_location = "http://192.168.1.1:12345/desc.xml"
headers = {
ssdp.ATTR_SSDP_LOCATION: new_location,
}
await device.async_ssdp_callback(headers, ...)
await device.async_ssdp_callback(
ssdp.SsdpServiceInfo(
ssdp_usn="mock_usn",
ssdp_st="mock_st",
ssdp_location="http://192.168.1.1:12345/desc.xml",
upnp={},
),
...,
)
assert device._igd_device.device.device_url == new_location

View File

@ -0,0 +1,36 @@
"""Test version diagnostics."""
from aioaseko import ClientSession
from homeassistant.core import HomeAssistant
from .common import MOCK_VERSION, setup_version_integration
from tests.components.diagnostics import get_diagnostics_for_config_entry
async def test_diagnostics(
hass: HomeAssistant,
hass_client: ClientSession,
) -> None:
"""Test diagnostic information."""
config_entry = await setup_version_integration(hass)
diagnostics = await get_diagnostics_for_config_entry(
hass, hass_client, config_entry
)
assert diagnostics["entry"]["data"] == {
"name": "",
"channel": "stable",
"image": "default",
"board": "OVA",
"version_source": "Local installation",
"source": "local",
}
assert diagnostics["coordinator_data"] == {
"version": MOCK_VERSION,
"version_data": None,
}
assert len(diagnostics["devices"]) == 1

View File

@ -29,6 +29,7 @@ from zwave_js_server.model.node import Node
from homeassistant.components.websocket_api.const import ERR_NOT_FOUND
from homeassistant.components.zwave_js.api import (
ADDITIONAL_PROPERTIES,
APPLICATION_VERSION,
CLIENT_SIDE_AUTH,
COMMAND_CLASS_ID,
@ -837,6 +838,7 @@ async def test_provision_smart_start_node(hass, integration, client, hass_ws_cli
PRODUCT_TYPE: 1,
PRODUCT_ID: 1,
APPLICATION_VERSION: "test",
ADDITIONAL_PROPERTIES: {"name": "test"},
},
}
)
@ -861,6 +863,7 @@ async def test_provision_smart_start_node(hass, integration, client, hass_ws_cli
max_inclusion_request_interval=None,
uuid=None,
supported_protocols=None,
additional_properties={"name": "test"},
).to_dict(),
}

View File

@ -6,6 +6,7 @@ import threading
from unittest.mock import MagicMock, PropertyMock, patch
import pytest
import voluptuous as vol
from homeassistant.const import (
ATTR_ATTRIBUTION,
@ -829,3 +830,27 @@ async def test_entity_category_property(hass):
)
mock_entity2.entity_id = "hello.world"
assert mock_entity2.entity_category == "config"
@pytest.mark.parametrize(
"value,expected",
(
("config", entity.EntityCategory.CONFIG),
("diagnostic", entity.EntityCategory.DIAGNOSTIC),
("system", entity.EntityCategory.SYSTEM),
),
)
def test_entity_category_schema(value, expected):
"""Test entity category schema."""
schema = vol.Schema(entity.ENTITY_CATEGORIES_SCHEMA)
result = schema(value)
assert result == expected
assert isinstance(result, entity.EntityCategory)
@pytest.mark.parametrize("value", (None, "non_existing"))
def test_entity_category_schema_error(value):
"""Test entity category schema."""
schema = vol.Schema(entity.ENTITY_CATEGORIES_SCHEMA)
with pytest.raises(vol.Invalid):
schema(value)