mirror of
https://github.com/home-assistant/core.git
synced 2025-07-15 09:17:10 +00:00
Merge pull request #66103 from home-assistant/rc
This commit is contained in:
commit
a869c1bc88
@ -45,7 +45,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
raise ConfigEntryAuthFailed from err
|
raise ConfigEntryAuthFailed from err
|
||||||
except asyncio.TimeoutError as err:
|
except asyncio.TimeoutError as err:
|
||||||
raise ConfigEntryNotReady("Timed out connecting to august api") from err
|
raise ConfigEntryNotReady("Timed out connecting to august api") from err
|
||||||
except (ClientResponseError, CannotConnect) as err:
|
except (AugustApiAIOHTTPError, ClientResponseError, CannotConnect) as err:
|
||||||
raise ConfigEntryNotReady from err
|
raise ConfigEntryNotReady from err
|
||||||
|
|
||||||
|
|
||||||
|
@ -50,6 +50,7 @@ from homeassistant.const import (
|
|||||||
CAST_APP_ID_HOMEASSISTANT_LOVELACE,
|
CAST_APP_ID_HOMEASSISTANT_LOVELACE,
|
||||||
EVENT_HOMEASSISTANT_STOP,
|
EVENT_HOMEASSISTANT_STOP,
|
||||||
STATE_IDLE,
|
STATE_IDLE,
|
||||||
|
STATE_OFF,
|
||||||
STATE_PAUSED,
|
STATE_PAUSED,
|
||||||
STATE_PLAYING,
|
STATE_PLAYING,
|
||||||
)
|
)
|
||||||
@ -636,7 +637,7 @@ class CastDevice(MediaPlayerEntity):
|
|||||||
return STATE_PLAYING
|
return STATE_PLAYING
|
||||||
return STATE_IDLE
|
return STATE_IDLE
|
||||||
if self._chromecast is not None and self._chromecast.is_idle:
|
if self._chromecast is not None and self._chromecast.is_idle:
|
||||||
return STATE_IDLE
|
return STATE_OFF
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"dependencies": ["network"],
|
"dependencies": ["network"],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/flux_led",
|
"documentation": "https://www.home-assistant.io/integrations/flux_led",
|
||||||
"requirements": ["flux_led==0.28.21"],
|
"requirements": ["flux_led==0.28.22"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"codeowners": ["@icemanch", "@bdraco"],
|
"codeowners": ["@icemanch", "@bdraco"],
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
|
@ -35,7 +35,7 @@ from homeassistant.const import (
|
|||||||
Platform,
|
Platform,
|
||||||
)
|
)
|
||||||
import homeassistant.helpers.config_validation as cv
|
import homeassistant.helpers.config_validation as cv
|
||||||
from homeassistant.helpers.entity import ENTITY_CATEGORIES_SCHEMA
|
from homeassistant.helpers.entity import validate_entity_category
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
CONF_INVERT,
|
CONF_INVERT,
|
||||||
@ -320,7 +320,7 @@ class BinarySensorSchema(KNXPlatformSchema):
|
|||||||
),
|
),
|
||||||
vol.Optional(CONF_DEVICE_CLASS): BINARY_SENSOR_DEVICE_CLASSES_SCHEMA,
|
vol.Optional(CONF_DEVICE_CLASS): BINARY_SENSOR_DEVICE_CLASSES_SCHEMA,
|
||||||
vol.Optional(CONF_RESET_AFTER): cv.positive_float,
|
vol.Optional(CONF_RESET_AFTER): cv.positive_float,
|
||||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
@ -356,7 +356,7 @@ class ButtonSchema(KNXPlatformSchema):
|
|||||||
vol.Exclusive(
|
vol.Exclusive(
|
||||||
CONF_TYPE, "length_or_type", msg=length_or_type_msg
|
CONF_TYPE, "length_or_type", msg=length_or_type_msg
|
||||||
): object,
|
): object,
|
||||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
vol.Any(
|
vol.Any(
|
||||||
@ -500,7 +500,7 @@ class ClimateSchema(KNXPlatformSchema):
|
|||||||
): vol.In(HVAC_MODES),
|
): vol.In(HVAC_MODES),
|
||||||
vol.Optional(CONF_MIN_TEMP): vol.Coerce(float),
|
vol.Optional(CONF_MIN_TEMP): vol.Coerce(float),
|
||||||
vol.Optional(CONF_MAX_TEMP): vol.Coerce(float),
|
vol.Optional(CONF_MAX_TEMP): vol.Coerce(float),
|
||||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
@ -555,7 +555,7 @@ class CoverSchema(KNXPlatformSchema):
|
|||||||
vol.Optional(CONF_INVERT_POSITION, default=False): cv.boolean,
|
vol.Optional(CONF_INVERT_POSITION, default=False): cv.boolean,
|
||||||
vol.Optional(CONF_INVERT_ANGLE, default=False): cv.boolean,
|
vol.Optional(CONF_INVERT_ANGLE, default=False): cv.boolean,
|
||||||
vol.Optional(CONF_DEVICE_CLASS): COVER_DEVICE_CLASSES_SCHEMA,
|
vol.Optional(CONF_DEVICE_CLASS): COVER_DEVICE_CLASSES_SCHEMA,
|
||||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
@ -618,7 +618,7 @@ class FanSchema(KNXPlatformSchema):
|
|||||||
vol.Optional(CONF_OSCILLATION_ADDRESS): ga_list_validator,
|
vol.Optional(CONF_OSCILLATION_ADDRESS): ga_list_validator,
|
||||||
vol.Optional(CONF_OSCILLATION_STATE_ADDRESS): ga_list_validator,
|
vol.Optional(CONF_OSCILLATION_STATE_ADDRESS): ga_list_validator,
|
||||||
vol.Optional(CONF_MAX_STEP): cv.byte,
|
vol.Optional(CONF_MAX_STEP): cv.byte,
|
||||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -722,7 +722,7 @@ class LightSchema(KNXPlatformSchema):
|
|||||||
vol.Optional(CONF_MAX_KELVIN, default=DEFAULT_MAX_KELVIN): vol.All(
|
vol.Optional(CONF_MAX_KELVIN, default=DEFAULT_MAX_KELVIN): vol.All(
|
||||||
vol.Coerce(int), vol.Range(min=1)
|
vol.Coerce(int), vol.Range(min=1)
|
||||||
),
|
),
|
||||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
vol.Any(
|
vol.Any(
|
||||||
@ -802,7 +802,7 @@ class NumberSchema(KNXPlatformSchema):
|
|||||||
vol.Optional(CONF_MAX): vol.Coerce(float),
|
vol.Optional(CONF_MAX): vol.Coerce(float),
|
||||||
vol.Optional(CONF_MIN): vol.Coerce(float),
|
vol.Optional(CONF_MIN): vol.Coerce(float),
|
||||||
vol.Optional(CONF_STEP): cv.positive_float,
|
vol.Optional(CONF_STEP): cv.positive_float,
|
||||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
number_limit_sub_validator,
|
number_limit_sub_validator,
|
||||||
@ -824,7 +824,7 @@ class SceneSchema(KNXPlatformSchema):
|
|||||||
vol.Required(CONF_SCENE_NUMBER): vol.All(
|
vol.Required(CONF_SCENE_NUMBER): vol.All(
|
||||||
vol.Coerce(int), vol.Range(min=1, max=64)
|
vol.Coerce(int), vol.Range(min=1, max=64)
|
||||||
),
|
),
|
||||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -855,7 +855,7 @@ class SelectSchema(KNXPlatformSchema):
|
|||||||
],
|
],
|
||||||
vol.Required(KNX_ADDRESS): ga_list_validator,
|
vol.Required(KNX_ADDRESS): ga_list_validator,
|
||||||
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
|
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
|
||||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
select_options_sub_validator,
|
select_options_sub_validator,
|
||||||
@ -880,7 +880,7 @@ class SensorSchema(KNXPlatformSchema):
|
|||||||
vol.Optional(CONF_STATE_CLASS): STATE_CLASSES_SCHEMA,
|
vol.Optional(CONF_STATE_CLASS): STATE_CLASSES_SCHEMA,
|
||||||
vol.Required(CONF_TYPE): sensor_type_validator,
|
vol.Required(CONF_TYPE): sensor_type_validator,
|
||||||
vol.Required(CONF_STATE_ADDRESS): ga_list_validator,
|
vol.Required(CONF_STATE_ADDRESS): ga_list_validator,
|
||||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -901,7 +901,7 @@ class SwitchSchema(KNXPlatformSchema):
|
|||||||
vol.Optional(CONF_RESPOND_TO_READ, default=False): cv.boolean,
|
vol.Optional(CONF_RESPOND_TO_READ, default=False): cv.boolean,
|
||||||
vol.Required(KNX_ADDRESS): ga_list_validator,
|
vol.Required(KNX_ADDRESS): ga_list_validator,
|
||||||
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
|
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
|
||||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -948,7 +948,7 @@ class WeatherSchema(KNXPlatformSchema):
|
|||||||
vol.Optional(CONF_KNX_DAY_NIGHT_ADDRESS): ga_list_validator,
|
vol.Optional(CONF_KNX_DAY_NIGHT_ADDRESS): ga_list_validator,
|
||||||
vol.Optional(CONF_KNX_AIR_PRESSURE_ADDRESS): ga_list_validator,
|
vol.Optional(CONF_KNX_AIR_PRESSURE_ADDRESS): ga_list_validator,
|
||||||
vol.Optional(CONF_KNX_HUMIDITY_ADDRESS): ga_list_validator,
|
vol.Optional(CONF_KNX_HUMIDITY_ADDRESS): ga_list_validator,
|
||||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
@ -44,7 +44,7 @@ from homeassistant.helpers import (
|
|||||||
template,
|
template,
|
||||||
)
|
)
|
||||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||||
from homeassistant.helpers.entity import ENTITY_CATEGORIES_SCHEMA
|
from homeassistant.helpers.entity import validate_entity_category
|
||||||
from homeassistant.util.decorator import Registry
|
from homeassistant.util.decorator import Registry
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
@ -423,7 +423,7 @@ def _validate_state_class_sensor(value: dict):
|
|||||||
vol.Optional(ATTR_SENSOR_STATE, default=None): vol.Any(
|
vol.Optional(ATTR_SENSOR_STATE, default=None): vol.Any(
|
||||||
None, bool, str, int, float
|
None, bool, str, int, float
|
||||||
),
|
),
|
||||||
vol.Optional(ATTR_SENSOR_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
vol.Optional(ATTR_SENSOR_ENTITY_CATEGORY): validate_entity_category,
|
||||||
vol.Optional(ATTR_SENSOR_ICON, default="mdi:cellphone"): cv.icon,
|
vol.Optional(ATTR_SENSOR_ICON, default="mdi:cellphone"): cv.icon,
|
||||||
vol.Optional(ATTR_SENSOR_STATE_CLASS): vol.In(SENSOSR_STATE_CLASSES),
|
vol.Optional(ATTR_SENSOR_STATE_CLASS): vol.In(SENSOSR_STATE_CLASSES),
|
||||||
},
|
},
|
||||||
|
@ -595,6 +595,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
debug_info.initialize(hass)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@ -133,6 +133,10 @@ class MqttBinarySensor(MqttEntity, BinarySensorEntity, RestoreEntity):
|
|||||||
self._expired = False
|
self._expired = False
|
||||||
self._state = last_state.state
|
self._state = last_state.state
|
||||||
|
|
||||||
|
if self._expiration_trigger:
|
||||||
|
# We might have set up a trigger already after subscribing from
|
||||||
|
# super().async_added_to_hass()
|
||||||
|
self._expiration_trigger()
|
||||||
self._expiration_trigger = async_track_point_in_utc_time(
|
self._expiration_trigger = async_track_point_in_utc_time(
|
||||||
self.hass, self._value_is_expired, expiration_at
|
self.hass, self._value_is_expired, expiration_at
|
||||||
)
|
)
|
||||||
@ -189,7 +193,6 @@ class MqttBinarySensor(MqttEntity, BinarySensorEntity, RestoreEntity):
|
|||||||
# Reset old trigger
|
# Reset old trigger
|
||||||
if self._expiration_trigger:
|
if self._expiration_trigger:
|
||||||
self._expiration_trigger()
|
self._expiration_trigger()
|
||||||
self._expiration_trigger = None
|
|
||||||
|
|
||||||
# Set new trigger
|
# Set new trigger
|
||||||
expiration_at = dt_util.utcnow() + timedelta(seconds=expire_after)
|
expiration_at = dt_util.utcnow() + timedelta(seconds=expire_after)
|
||||||
|
@ -15,6 +15,11 @@ DATA_MQTT_DEBUG_INFO = "mqtt_debug_info"
|
|||||||
STORED_MESSAGES = 10
|
STORED_MESSAGES = 10
|
||||||
|
|
||||||
|
|
||||||
|
def initialize(hass: HomeAssistant):
|
||||||
|
"""Initialize MQTT debug info."""
|
||||||
|
hass.data[DATA_MQTT_DEBUG_INFO] = {"entities": {}, "triggers": {}}
|
||||||
|
|
||||||
|
|
||||||
def log_messages(
|
def log_messages(
|
||||||
hass: HomeAssistant, entity_id: str
|
hass: HomeAssistant, entity_id: str
|
||||||
) -> Callable[[MessageCallbackType], MessageCallbackType]:
|
) -> Callable[[MessageCallbackType], MessageCallbackType]:
|
||||||
@ -45,9 +50,7 @@ def log_messages(
|
|||||||
def add_subscription(hass, message_callback, subscription):
|
def add_subscription(hass, message_callback, subscription):
|
||||||
"""Prepare debug data for subscription."""
|
"""Prepare debug data for subscription."""
|
||||||
if entity_id := getattr(message_callback, "__entity_id", None):
|
if entity_id := getattr(message_callback, "__entity_id", None):
|
||||||
debug_info = hass.data.setdefault(
|
debug_info = hass.data[DATA_MQTT_DEBUG_INFO]
|
||||||
DATA_MQTT_DEBUG_INFO, {"entities": {}, "triggers": {}}
|
|
||||||
)
|
|
||||||
entity_info = debug_info["entities"].setdefault(
|
entity_info = debug_info["entities"].setdefault(
|
||||||
entity_id, {"subscriptions": {}, "discovery_data": {}}
|
entity_id, {"subscriptions": {}, "discovery_data": {}}
|
||||||
)
|
)
|
||||||
@ -76,9 +79,7 @@ def remove_subscription(hass, message_callback, subscription):
|
|||||||
|
|
||||||
def add_entity_discovery_data(hass, discovery_data, entity_id):
|
def add_entity_discovery_data(hass, discovery_data, entity_id):
|
||||||
"""Add discovery data."""
|
"""Add discovery data."""
|
||||||
debug_info = hass.data.setdefault(
|
debug_info = hass.data[DATA_MQTT_DEBUG_INFO]
|
||||||
DATA_MQTT_DEBUG_INFO, {"entities": {}, "triggers": {}}
|
|
||||||
)
|
|
||||||
entity_info = debug_info["entities"].setdefault(
|
entity_info = debug_info["entities"].setdefault(
|
||||||
entity_id, {"subscriptions": {}, "discovery_data": {}}
|
entity_id, {"subscriptions": {}, "discovery_data": {}}
|
||||||
)
|
)
|
||||||
@ -93,14 +94,13 @@ def update_entity_discovery_data(hass, discovery_payload, entity_id):
|
|||||||
|
|
||||||
def remove_entity_data(hass, entity_id):
|
def remove_entity_data(hass, entity_id):
|
||||||
"""Remove discovery data."""
|
"""Remove discovery data."""
|
||||||
|
if entity_id in hass.data[DATA_MQTT_DEBUG_INFO]["entities"]:
|
||||||
hass.data[DATA_MQTT_DEBUG_INFO]["entities"].pop(entity_id)
|
hass.data[DATA_MQTT_DEBUG_INFO]["entities"].pop(entity_id)
|
||||||
|
|
||||||
|
|
||||||
def add_trigger_discovery_data(hass, discovery_hash, discovery_data, device_id):
|
def add_trigger_discovery_data(hass, discovery_hash, discovery_data, device_id):
|
||||||
"""Add discovery data."""
|
"""Add discovery data."""
|
||||||
debug_info = hass.data.setdefault(
|
debug_info = hass.data[DATA_MQTT_DEBUG_INFO]
|
||||||
DATA_MQTT_DEBUG_INFO, {"entities": {}, "triggers": {}}
|
|
||||||
)
|
|
||||||
debug_info["triggers"][discovery_hash] = {
|
debug_info["triggers"][discovery_hash] = {
|
||||||
"device_id": device_id,
|
"device_id": device_id,
|
||||||
"discovery_data": discovery_data,
|
"discovery_data": discovery_data,
|
||||||
@ -126,9 +126,7 @@ async def info_for_device(hass, device_id):
|
|||||||
entries = hass.helpers.entity_registry.async_entries_for_device(
|
entries = hass.helpers.entity_registry.async_entries_for_device(
|
||||||
entity_registry, device_id, include_disabled_entities=True
|
entity_registry, device_id, include_disabled_entities=True
|
||||||
)
|
)
|
||||||
mqtt_debug_info = hass.data.setdefault(
|
mqtt_debug_info = hass.data[DATA_MQTT_DEBUG_INFO]
|
||||||
DATA_MQTT_DEBUG_INFO, {"entities": {}, "triggers": {}}
|
|
||||||
)
|
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
if entry.entity_id not in mqtt_debug_info["entities"]:
|
if entry.entity_id not in mqtt_debug_info["entities"]:
|
||||||
continue
|
continue
|
||||||
|
@ -30,11 +30,11 @@ from homeassistant.helpers.dispatcher import (
|
|||||||
async_dispatcher_send,
|
async_dispatcher_send,
|
||||||
)
|
)
|
||||||
from homeassistant.helpers.entity import (
|
from homeassistant.helpers.entity import (
|
||||||
ENTITY_CATEGORIES_SCHEMA,
|
|
||||||
DeviceInfo,
|
DeviceInfo,
|
||||||
Entity,
|
Entity,
|
||||||
EntityCategory,
|
EntityCategory,
|
||||||
async_generate_entity_id,
|
async_generate_entity_id,
|
||||||
|
validate_entity_category,
|
||||||
)
|
)
|
||||||
from homeassistant.helpers.typing import ConfigType
|
from homeassistant.helpers.typing import ConfigType
|
||||||
|
|
||||||
@ -191,7 +191,7 @@ MQTT_ENTITY_COMMON_SCHEMA = MQTT_AVAILABILITY_SCHEMA.extend(
|
|||||||
{
|
{
|
||||||
vol.Optional(CONF_DEVICE): MQTT_ENTITY_DEVICE_INFO_SCHEMA,
|
vol.Optional(CONF_DEVICE): MQTT_ENTITY_DEVICE_INFO_SCHEMA,
|
||||||
vol.Optional(CONF_ENABLED_BY_DEFAULT, default=True): cv.boolean,
|
vol.Optional(CONF_ENABLED_BY_DEFAULT, default=True): cv.boolean,
|
||||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category,
|
||||||
vol.Optional(CONF_ICON): cv.icon,
|
vol.Optional(CONF_ICON): cv.icon,
|
||||||
vol.Optional(CONF_JSON_ATTRS_TOPIC): valid_subscribe_topic,
|
vol.Optional(CONF_JSON_ATTRS_TOPIC): valid_subscribe_topic,
|
||||||
vol.Optional(CONF_JSON_ATTRS_TEMPLATE): cv.template,
|
vol.Optional(CONF_JSON_ATTRS_TEMPLATE): cv.template,
|
||||||
@ -549,7 +549,6 @@ class MqttDiscoveryUpdate(Entity):
|
|||||||
def _cleanup_discovery_on_remove(self) -> None:
|
def _cleanup_discovery_on_remove(self) -> None:
|
||||||
"""Stop listening to signal and cleanup discovery data."""
|
"""Stop listening to signal and cleanup discovery data."""
|
||||||
if self._discovery_data and not self._removed_from_hass:
|
if self._discovery_data and not self._removed_from_hass:
|
||||||
debug_info.remove_entity_data(self.hass, self.entity_id)
|
|
||||||
clear_discovery_hash(self.hass, self._discovery_data[ATTR_DISCOVERY_HASH])
|
clear_discovery_hash(self.hass, self._discovery_data[ATTR_DISCOVERY_HASH])
|
||||||
self._removed_from_hass = True
|
self._removed_from_hass = True
|
||||||
|
|
||||||
@ -677,6 +676,7 @@ class MqttEntity(
|
|||||||
await MqttAttributes.async_will_remove_from_hass(self)
|
await MqttAttributes.async_will_remove_from_hass(self)
|
||||||
await MqttAvailability.async_will_remove_from_hass(self)
|
await MqttAvailability.async_will_remove_from_hass(self)
|
||||||
await MqttDiscoveryUpdate.async_will_remove_from_hass(self)
|
await MqttDiscoveryUpdate.async_will_remove_from_hass(self)
|
||||||
|
debug_info.remove_entity_data(self.hass, self.entity_id)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
@ -180,6 +180,10 @@ class MqttSensor(MqttEntity, SensorEntity, RestoreEntity):
|
|||||||
self._expired = False
|
self._expired = False
|
||||||
self._state = last_state.state
|
self._state = last_state.state
|
||||||
|
|
||||||
|
if self._expiration_trigger:
|
||||||
|
# We might have set up a trigger already after subscribing from
|
||||||
|
# super().async_added_to_hass()
|
||||||
|
self._expiration_trigger()
|
||||||
self._expiration_trigger = async_track_point_in_utc_time(
|
self._expiration_trigger = async_track_point_in_utc_time(
|
||||||
self.hass, self._value_is_expired, expiration_at
|
self.hass, self._value_is_expired, expiration_at
|
||||||
)
|
)
|
||||||
@ -227,7 +231,6 @@ class MqttSensor(MqttEntity, SensorEntity, RestoreEntity):
|
|||||||
# Reset old trigger
|
# Reset old trigger
|
||||||
if self._expiration_trigger:
|
if self._expiration_trigger:
|
||||||
self._expiration_trigger()
|
self._expiration_trigger()
|
||||||
self._expiration_trigger = None
|
|
||||||
|
|
||||||
# Set new trigger
|
# Set new trigger
|
||||||
expiration_at = dt_util.utcnow() + timedelta(seconds=expire_after)
|
expiration_at = dt_util.utcnow() + timedelta(seconds=expire_after)
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/plex",
|
"documentation": "https://www.home-assistant.io/integrations/plex",
|
||||||
"requirements": [
|
"requirements": [
|
||||||
"plexapi==4.9.1",
|
"plexapi==4.9.2",
|
||||||
"plexauth==0.0.6",
|
"plexauth==0.0.6",
|
||||||
"plexwebsocket==0.0.13"
|
"plexwebsocket==0.0.13"
|
||||||
],
|
],
|
||||||
|
@ -101,8 +101,8 @@ def _create_index(instance, table_name, index_name):
|
|||||||
"be patient!",
|
"be patient!",
|
||||||
index_name,
|
index_name,
|
||||||
)
|
)
|
||||||
try:
|
|
||||||
with session_scope(session=instance.get_session()) as session:
|
with session_scope(session=instance.get_session()) as session:
|
||||||
|
try:
|
||||||
connection = session.connection()
|
connection = session.connection()
|
||||||
index.create(connection)
|
index.create(connection)
|
||||||
except (InternalError, OperationalError, ProgrammingError) as err:
|
except (InternalError, OperationalError, ProgrammingError) as err:
|
||||||
@ -129,8 +129,8 @@ def _drop_index(instance, table_name, index_name):
|
|||||||
success = False
|
success = False
|
||||||
|
|
||||||
# Engines like DB2/Oracle
|
# Engines like DB2/Oracle
|
||||||
try:
|
|
||||||
with session_scope(session=instance.get_session()) as session:
|
with session_scope(session=instance.get_session()) as session:
|
||||||
|
try:
|
||||||
connection = session.connection()
|
connection = session.connection()
|
||||||
connection.execute(text(f"DROP INDEX {index_name}"))
|
connection.execute(text(f"DROP INDEX {index_name}"))
|
||||||
except SQLAlchemyError:
|
except SQLAlchemyError:
|
||||||
@ -140,8 +140,8 @@ def _drop_index(instance, table_name, index_name):
|
|||||||
|
|
||||||
# Engines like SQLite, SQL Server
|
# Engines like SQLite, SQL Server
|
||||||
if not success:
|
if not success:
|
||||||
try:
|
|
||||||
with session_scope(session=instance.get_session()) as session:
|
with session_scope(session=instance.get_session()) as session:
|
||||||
|
try:
|
||||||
connection = session.connection()
|
connection = session.connection()
|
||||||
connection.execute(
|
connection.execute(
|
||||||
text(
|
text(
|
||||||
@ -157,8 +157,8 @@ def _drop_index(instance, table_name, index_name):
|
|||||||
|
|
||||||
if not success:
|
if not success:
|
||||||
# Engines like MySQL, MS Access
|
# Engines like MySQL, MS Access
|
||||||
try:
|
|
||||||
with session_scope(session=instance.get_session()) as session:
|
with session_scope(session=instance.get_session()) as session:
|
||||||
|
try:
|
||||||
connection = session.connection()
|
connection = session.connection()
|
||||||
connection.execute(
|
connection.execute(
|
||||||
text(
|
text(
|
||||||
@ -203,8 +203,8 @@ def _add_columns(instance, table_name, columns_def):
|
|||||||
|
|
||||||
columns_def = [f"ADD {col_def}" for col_def in columns_def]
|
columns_def = [f"ADD {col_def}" for col_def in columns_def]
|
||||||
|
|
||||||
try:
|
|
||||||
with session_scope(session=instance.get_session()) as session:
|
with session_scope(session=instance.get_session()) as session:
|
||||||
|
try:
|
||||||
connection = session.connection()
|
connection = session.connection()
|
||||||
connection.execute(
|
connection.execute(
|
||||||
text(
|
text(
|
||||||
@ -220,8 +220,8 @@ def _add_columns(instance, table_name, columns_def):
|
|||||||
_LOGGER.info("Unable to use quick column add. Adding 1 by 1")
|
_LOGGER.info("Unable to use quick column add. Adding 1 by 1")
|
||||||
|
|
||||||
for column_def in columns_def:
|
for column_def in columns_def:
|
||||||
try:
|
|
||||||
with session_scope(session=instance.get_session()) as session:
|
with session_scope(session=instance.get_session()) as session:
|
||||||
|
try:
|
||||||
connection = session.connection()
|
connection = session.connection()
|
||||||
connection.execute(
|
connection.execute(
|
||||||
text(
|
text(
|
||||||
@ -271,8 +271,8 @@ def _modify_columns(instance, engine, table_name, columns_def):
|
|||||||
else:
|
else:
|
||||||
columns_def = [f"MODIFY {col_def}" for col_def in columns_def]
|
columns_def = [f"MODIFY {col_def}" for col_def in columns_def]
|
||||||
|
|
||||||
try:
|
|
||||||
with session_scope(session=instance.get_session()) as session:
|
with session_scope(session=instance.get_session()) as session:
|
||||||
|
try:
|
||||||
connection = session.connection()
|
connection = session.connection()
|
||||||
connection.execute(
|
connection.execute(
|
||||||
text(
|
text(
|
||||||
@ -286,8 +286,8 @@ def _modify_columns(instance, engine, table_name, columns_def):
|
|||||||
_LOGGER.info("Unable to use quick column modify. Modifying 1 by 1")
|
_LOGGER.info("Unable to use quick column modify. Modifying 1 by 1")
|
||||||
|
|
||||||
for column_def in columns_def:
|
for column_def in columns_def:
|
||||||
try:
|
|
||||||
with session_scope(session=instance.get_session()) as session:
|
with session_scope(session=instance.get_session()) as session:
|
||||||
|
try:
|
||||||
connection = session.connection()
|
connection = session.connection()
|
||||||
connection.execute(
|
connection.execute(
|
||||||
text(
|
text(
|
||||||
@ -330,8 +330,8 @@ def _update_states_table_with_foreign_key_options(instance, engine):
|
|||||||
)
|
)
|
||||||
|
|
||||||
for alter in alters:
|
for alter in alters:
|
||||||
try:
|
|
||||||
with session_scope(session=instance.get_session()) as session:
|
with session_scope(session=instance.get_session()) as session:
|
||||||
|
try:
|
||||||
connection = session.connection()
|
connection = session.connection()
|
||||||
connection.execute(DropConstraint(alter["old_fk"]))
|
connection.execute(DropConstraint(alter["old_fk"]))
|
||||||
for fkc in states_key_constraints:
|
for fkc in states_key_constraints:
|
||||||
@ -361,8 +361,8 @@ def _drop_foreign_key_constraints(instance, engine, table, columns):
|
|||||||
)
|
)
|
||||||
|
|
||||||
for drop in drops:
|
for drop in drops:
|
||||||
try:
|
|
||||||
with session_scope(session=instance.get_session()) as session:
|
with session_scope(session=instance.get_session()) as session:
|
||||||
|
try:
|
||||||
connection = session.connection()
|
connection = session.connection()
|
||||||
connection.execute(DropConstraint(drop))
|
connection.execute(DropConstraint(drop))
|
||||||
except (InternalError, OperationalError):
|
except (InternalError, OperationalError):
|
||||||
|
@ -290,7 +290,7 @@ def _find_duplicates(
|
|||||||
)
|
)
|
||||||
.filter(subquery.c.is_duplicate == 1)
|
.filter(subquery.c.is_duplicate == 1)
|
||||||
.order_by(table.metadata_id, table.start, table.id.desc())
|
.order_by(table.metadata_id, table.start, table.id.desc())
|
||||||
.limit(MAX_ROWS_TO_PURGE)
|
.limit(1000 * MAX_ROWS_TO_PURGE)
|
||||||
)
|
)
|
||||||
duplicates = execute(query)
|
duplicates = execute(query)
|
||||||
original_as_dict = {}
|
original_as_dict = {}
|
||||||
@ -343,9 +343,10 @@ def _delete_duplicates_from_table(
|
|||||||
if not duplicate_ids:
|
if not duplicate_ids:
|
||||||
break
|
break
|
||||||
all_non_identical_duplicates.extend(non_identical_duplicates)
|
all_non_identical_duplicates.extend(non_identical_duplicates)
|
||||||
|
for i in range(0, len(duplicate_ids), MAX_ROWS_TO_PURGE):
|
||||||
deleted_rows = (
|
deleted_rows = (
|
||||||
session.query(table)
|
session.query(table)
|
||||||
.filter(table.id.in_(duplicate_ids))
|
.filter(table.id.in_(duplicate_ids[i : i + MAX_ROWS_TO_PURGE]))
|
||||||
.delete(synchronize_session=False)
|
.delete(synchronize_session=False)
|
||||||
)
|
)
|
||||||
total_deleted_rows += deleted_rows
|
total_deleted_rows += deleted_rows
|
||||||
|
@ -399,12 +399,19 @@ class SonosSpeaker:
|
|||||||
return_exceptions=True,
|
return_exceptions=True,
|
||||||
)
|
)
|
||||||
for result in results:
|
for result in results:
|
||||||
if isinstance(result, Exception):
|
if isinstance(result, asyncio.exceptions.TimeoutError):
|
||||||
|
message = "Request timed out"
|
||||||
|
exc_info = None
|
||||||
|
elif isinstance(result, Exception):
|
||||||
|
message = result
|
||||||
|
exc_info = result if not str(result) else None
|
||||||
|
else:
|
||||||
|
continue
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Unsubscribe failed for %s: %s",
|
"Unsubscribe failed for %s: %s",
|
||||||
self.zone_name,
|
self.zone_name,
|
||||||
result,
|
message,
|
||||||
exc_info=result,
|
exc_info=exc_info,
|
||||||
)
|
)
|
||||||
self._subscriptions = []
|
self._subscriptions = []
|
||||||
|
|
||||||
@ -422,17 +429,16 @@ class SonosSpeaker:
|
|||||||
if not self.available:
|
if not self.available:
|
||||||
return
|
return
|
||||||
|
|
||||||
if getattr(exception, "status", None) == 412:
|
if isinstance(exception, asyncio.exceptions.TimeoutError):
|
||||||
_LOGGER.warning(
|
message = "Request timed out"
|
||||||
"Subscriptions for %s failed, speaker may have lost power",
|
exc_info = None
|
||||||
self.zone_name,
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
exc_info = exception if _LOGGER.isEnabledFor(logging.DEBUG) else None
|
message = exception
|
||||||
_LOGGER.error(
|
exc_info = exception if not str(exception) else None
|
||||||
"Subscription renewals for %s failed: %s",
|
_LOGGER.warning(
|
||||||
|
"Subscription renewals for %s failed, marking unavailable: %s",
|
||||||
self.zone_name,
|
self.zone_name,
|
||||||
exception,
|
message,
|
||||||
exc_info=exc_info,
|
exc_info=exc_info,
|
||||||
)
|
)
|
||||||
await self.async_offline()
|
await self.async_offline()
|
||||||
|
@ -161,6 +161,21 @@ class TodSensor(BinarySensorEntity):
|
|||||||
|
|
||||||
self._time_before = before_event_date
|
self._time_before = before_event_date
|
||||||
|
|
||||||
|
# We are calculating the _time_after value assuming that it will happen today
|
||||||
|
# But that is not always true, e.g. after 23:00, before 12:00 and now is 10:00
|
||||||
|
# If _time_before and _time_after are ahead of nowutc:
|
||||||
|
# _time_before is set to 12:00 next day
|
||||||
|
# _time_after is set to 23:00 today
|
||||||
|
# nowutc is set to 10:00 today
|
||||||
|
if (
|
||||||
|
not is_sun_event(self._after)
|
||||||
|
and self._time_after > nowutc
|
||||||
|
and self._time_before > nowutc + timedelta(days=1)
|
||||||
|
):
|
||||||
|
# remove one day from _time_before and _time_after
|
||||||
|
self._time_after -= timedelta(days=1)
|
||||||
|
self._time_before -= timedelta(days=1)
|
||||||
|
|
||||||
# Add offset to utc boundaries according to the configuration
|
# Add offset to utc boundaries according to the configuration
|
||||||
self._time_after += self._after_offset
|
self._time_after += self._after_offset
|
||||||
self._time_before += self._before_offset
|
self._time_before += self._before_offset
|
||||||
|
@ -12,7 +12,7 @@ from async_upnp_client.exceptions import UpnpError
|
|||||||
from async_upnp_client.profiles.igd import IgdDevice
|
from async_upnp_client.profiles.igd import IgdDevice
|
||||||
|
|
||||||
from homeassistant.components import ssdp
|
from homeassistant.components import ssdp
|
||||||
from homeassistant.components.ssdp import SsdpChange
|
from homeassistant.components.ssdp import SsdpChange, SsdpServiceInfo
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||||
@ -71,19 +71,22 @@ class Device:
|
|||||||
return device
|
return device
|
||||||
|
|
||||||
async def async_ssdp_callback(
|
async def async_ssdp_callback(
|
||||||
self, headers: Mapping[str, Any], change: SsdpChange
|
self, service_info: SsdpServiceInfo, change: SsdpChange
|
||||||
) -> None:
|
) -> None:
|
||||||
"""SSDP callback, update if needed."""
|
"""SSDP callback, update if needed."""
|
||||||
_LOGGER.debug("SSDP Callback, change: %s, headers: %s", change, headers)
|
_LOGGER.debug(
|
||||||
if ssdp.ATTR_SSDP_LOCATION not in headers:
|
"SSDP Callback, change: %s, headers: %s", change, service_info.ssdp_headers
|
||||||
|
)
|
||||||
|
if service_info.ssdp_location is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
location = headers[ssdp.ATTR_SSDP_LOCATION]
|
|
||||||
device = self._igd_device.device
|
device = self._igd_device.device
|
||||||
if location == device.device_url:
|
if service_info.ssdp_location == device.device_url:
|
||||||
return
|
return
|
||||||
|
|
||||||
new_upnp_device = await async_create_upnp_device(self.hass, location)
|
new_upnp_device = await async_create_upnp_device(
|
||||||
|
self.hass, service_info.ssdp_location
|
||||||
|
)
|
||||||
device.reinit(new_upnp_device)
|
device.reinit(new_upnp_device)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -3,7 +3,6 @@ from __future__ import annotations
|
|||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from pyhaversion.consts import HaVersionChannel, HaVersionSource
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant import config_entries
|
from homeassistant import config_entries
|
||||||
@ -75,8 +74,8 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
self._entry_data.update(user_input)
|
self._entry_data.update(user_input)
|
||||||
|
|
||||||
if not self.show_advanced_options or user_input[CONF_SOURCE] in (
|
if not self.show_advanced_options or user_input[CONF_SOURCE] in (
|
||||||
HaVersionSource.LOCAL,
|
"local",
|
||||||
HaVersionSource.HAIO,
|
"haio",
|
||||||
):
|
):
|
||||||
return self.async_create_entry(
|
return self.async_create_entry(
|
||||||
title=self._config_entry_name,
|
title=self._config_entry_name,
|
||||||
@ -92,8 +91,8 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
"""Handle the version_source step."""
|
"""Handle the version_source step."""
|
||||||
if user_input is None:
|
if user_input is None:
|
||||||
if self._entry_data[CONF_SOURCE] in (
|
if self._entry_data[CONF_SOURCE] in (
|
||||||
HaVersionSource.SUPERVISOR,
|
"supervisor",
|
||||||
HaVersionSource.CONTAINER,
|
"container",
|
||||||
):
|
):
|
||||||
data_schema = vol.Schema(
|
data_schema = vol.Schema(
|
||||||
{
|
{
|
||||||
@ -102,7 +101,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
): vol.In(VALID_CHANNELS),
|
): vol.In(VALID_CHANNELS),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
if self._entry_data[CONF_SOURCE] == HaVersionSource.SUPERVISOR:
|
if self._entry_data[CONF_SOURCE] == "supervisor":
|
||||||
data_schema = data_schema.extend(
|
data_schema = data_schema.extend(
|
||||||
{
|
{
|
||||||
vol.Required(CONF_IMAGE, default=DEFAULT_IMAGE): vol.In(
|
vol.Required(CONF_IMAGE, default=DEFAULT_IMAGE): vol.In(
|
||||||
@ -151,7 +150,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
@property
|
@property
|
||||||
def _config_entry_name(self) -> str:
|
def _config_entry_name(self) -> str:
|
||||||
"""Return the name of the config entry."""
|
"""Return the name of the config entry."""
|
||||||
if self._entry_data[CONF_SOURCE] == HaVersionSource.LOCAL:
|
if self._entry_data[CONF_SOURCE] == "local":
|
||||||
return DEFAULT_NAME_CURRENT
|
return DEFAULT_NAME_CURRENT
|
||||||
|
|
||||||
name = self._entry_data[CONF_VERSION_SOURCE]
|
name = self._entry_data[CONF_VERSION_SOURCE]
|
||||||
@ -166,21 +165,21 @@ def _convert_imported_configuration(config: dict[str, Any]) -> Any:
|
|||||||
"""Convert a key from the imported configuration."""
|
"""Convert a key from the imported configuration."""
|
||||||
data = DEFAULT_CONFIGURATION.copy()
|
data = DEFAULT_CONFIGURATION.copy()
|
||||||
if config.get(CONF_BETA):
|
if config.get(CONF_BETA):
|
||||||
data[CONF_CHANNEL] = HaVersionChannel.BETA
|
data[CONF_CHANNEL] = "beta"
|
||||||
|
|
||||||
if (source := config.get(CONF_SOURCE)) and source != DEFAULT_SOURCE:
|
if (source := config.get(CONF_SOURCE)) and source != DEFAULT_SOURCE:
|
||||||
if source == SOURCE_HASSIO:
|
if source == SOURCE_HASSIO:
|
||||||
data[CONF_SOURCE] = HaVersionSource.SUPERVISOR
|
data[CONF_SOURCE] = "supervisor"
|
||||||
data[CONF_VERSION_SOURCE] = VERSION_SOURCE_VERSIONS
|
data[CONF_VERSION_SOURCE] = VERSION_SOURCE_VERSIONS
|
||||||
elif source == SOURCE_DOKCER:
|
elif source == SOURCE_DOKCER:
|
||||||
data[CONF_SOURCE] = HaVersionSource.CONTAINER
|
data[CONF_SOURCE] = "container"
|
||||||
data[CONF_VERSION_SOURCE] = VERSION_SOURCE_DOCKER_HUB
|
data[CONF_VERSION_SOURCE] = VERSION_SOURCE_DOCKER_HUB
|
||||||
else:
|
else:
|
||||||
data[CONF_SOURCE] = source
|
data[CONF_SOURCE] = source
|
||||||
data[CONF_VERSION_SOURCE] = VERSION_SOURCE_MAP_INVERTED[source]
|
data[CONF_VERSION_SOURCE] = VERSION_SOURCE_MAP_INVERTED[source]
|
||||||
|
|
||||||
if (image := config.get(CONF_IMAGE)) and image != DEFAULT_IMAGE:
|
if (image := config.get(CONF_IMAGE)) and image != DEFAULT_IMAGE:
|
||||||
if data[CONF_SOURCE] == HaVersionSource.CONTAINER:
|
if data[CONF_SOURCE] == "container":
|
||||||
data[CONF_IMAGE] = f"{config[CONF_IMAGE]}{POSTFIX_CONTAINER_NAME}"
|
data[CONF_IMAGE] = f"{config[CONF_IMAGE]}{POSTFIX_CONTAINER_NAME}"
|
||||||
else:
|
else:
|
||||||
data[CONF_IMAGE] = config[CONF_IMAGE]
|
data[CONF_IMAGE] = config[CONF_IMAGE]
|
||||||
@ -188,7 +187,7 @@ def _convert_imported_configuration(config: dict[str, Any]) -> Any:
|
|||||||
if (name := config.get(CONF_NAME)) and name != DEFAULT_NAME:
|
if (name := config.get(CONF_NAME)) and name != DEFAULT_NAME:
|
||||||
data[CONF_NAME] = config[CONF_NAME]
|
data[CONF_NAME] = config[CONF_NAME]
|
||||||
else:
|
else:
|
||||||
if data[CONF_SOURCE] == HaVersionSource.LOCAL:
|
if data[CONF_SOURCE] == "local":
|
||||||
data[CONF_NAME] = DEFAULT_NAME_CURRENT
|
data[CONF_NAME] = DEFAULT_NAME_CURRENT
|
||||||
else:
|
else:
|
||||||
data[CONF_NAME] = DEFAULT_NAME_LATEST
|
data[CONF_NAME] = DEFAULT_NAME_LATEST
|
||||||
|
@ -41,12 +41,12 @@ VERSION_SOURCE_VERSIONS: Final = "Home Assistant Versions"
|
|||||||
|
|
||||||
DEFAULT_BETA: Final = False
|
DEFAULT_BETA: Final = False
|
||||||
DEFAULT_BOARD: Final = "OVA"
|
DEFAULT_BOARD: Final = "OVA"
|
||||||
DEFAULT_CHANNEL: Final[HaVersionChannel] = HaVersionChannel.STABLE
|
DEFAULT_CHANNEL: Final = "stable"
|
||||||
DEFAULT_IMAGE: Final = "default"
|
DEFAULT_IMAGE: Final = "default"
|
||||||
DEFAULT_NAME_CURRENT: Final = "Current Version"
|
DEFAULT_NAME_CURRENT: Final = "Current Version"
|
||||||
DEFAULT_NAME_LATEST: Final = "Latest Version"
|
DEFAULT_NAME_LATEST: Final = "Latest Version"
|
||||||
DEFAULT_NAME: Final = ""
|
DEFAULT_NAME: Final = ""
|
||||||
DEFAULT_SOURCE: Final[HaVersionSource] = HaVersionSource.LOCAL
|
DEFAULT_SOURCE: Final = "local"
|
||||||
DEFAULT_CONFIGURATION: Final[dict[str, Any]] = {
|
DEFAULT_CONFIGURATION: Final[dict[str, Any]] = {
|
||||||
CONF_NAME: DEFAULT_NAME,
|
CONF_NAME: DEFAULT_NAME,
|
||||||
CONF_CHANNEL: DEFAULT_CHANNEL,
|
CONF_CHANNEL: DEFAULT_CHANNEL,
|
||||||
@ -81,22 +81,22 @@ BOARD_MAP: Final[dict[str, str]] = {
|
|||||||
|
|
||||||
VALID_BOARDS: Final[list[str]] = list(BOARD_MAP)
|
VALID_BOARDS: Final[list[str]] = list(BOARD_MAP)
|
||||||
|
|
||||||
VERSION_SOURCE_MAP: Final[dict[str, HaVersionSource]] = {
|
VERSION_SOURCE_MAP: Final[dict[str, str]] = {
|
||||||
VERSION_SOURCE_LOCAL: HaVersionSource.LOCAL,
|
VERSION_SOURCE_LOCAL: "local",
|
||||||
VERSION_SOURCE_VERSIONS: HaVersionSource.SUPERVISOR,
|
VERSION_SOURCE_VERSIONS: "supervisor",
|
||||||
VERSION_SOURCE_HAIO: HaVersionSource.HAIO,
|
VERSION_SOURCE_HAIO: "haio",
|
||||||
VERSION_SOURCE_DOCKER_HUB: HaVersionSource.CONTAINER,
|
VERSION_SOURCE_DOCKER_HUB: "container",
|
||||||
VERSION_SOURCE_PYPI: HaVersionSource.PYPI,
|
VERSION_SOURCE_PYPI: "pypi",
|
||||||
}
|
}
|
||||||
|
|
||||||
VERSION_SOURCE_MAP_INVERTED: Final[dict[HaVersionSource, str]] = {
|
VERSION_SOURCE_MAP_INVERTED: Final[dict[str, str]] = {
|
||||||
value: key for key, value in VERSION_SOURCE_MAP.items()
|
value: key for key, value in VERSION_SOURCE_MAP.items()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
VALID_SOURCES: Final[list[str]] = HA_VERSION_SOURCES + [
|
VALID_SOURCES: Final[list[str]] = HA_VERSION_SOURCES + [
|
||||||
SOURCE_HASSIO, # Kept to not break existing configurations
|
"hassio", # Kept to not break existing configurations
|
||||||
SOURCE_DOKCER, # Kept to not break existing configurations
|
"docker", # Kept to not break existing configurations
|
||||||
]
|
]
|
||||||
|
|
||||||
VALID_IMAGES: Final = [
|
VALID_IMAGES: Final = [
|
||||||
|
56
homeassistant/components/version/diagnostics.py
Normal file
56
homeassistant/components/version/diagnostics.py
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
"""Provides diagnostics for Version."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from attr import asdict
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||||
|
|
||||||
|
from .const import DOMAIN
|
||||||
|
|
||||||
|
|
||||||
|
async def async_get_config_entry_diagnostics(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
config_entry: ConfigEntry,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Return diagnostics for a config entry."""
|
||||||
|
coordinator = hass.data[DOMAIN][config_entry.entry_id]
|
||||||
|
device_registry = dr.async_get(hass)
|
||||||
|
entity_registry = er.async_get(hass)
|
||||||
|
|
||||||
|
devices = []
|
||||||
|
|
||||||
|
registry_devices = dr.async_entries_for_config_entry(
|
||||||
|
device_registry, config_entry.entry_id
|
||||||
|
)
|
||||||
|
|
||||||
|
for device in registry_devices:
|
||||||
|
entities = []
|
||||||
|
|
||||||
|
registry_entities = er.async_entries_for_device(
|
||||||
|
entity_registry,
|
||||||
|
device_id=device.id,
|
||||||
|
include_disabled_entities=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
for entity in registry_entities:
|
||||||
|
state_dict = None
|
||||||
|
if state := hass.states.get(entity.entity_id):
|
||||||
|
state_dict = dict(state.as_dict())
|
||||||
|
state_dict.pop("context", None)
|
||||||
|
|
||||||
|
entities.append({"entry": asdict(entity), "state": state_dict})
|
||||||
|
|
||||||
|
devices.append({"device": asdict(device), "entities": entities})
|
||||||
|
|
||||||
|
return {
|
||||||
|
"entry": config_entry.as_dict(),
|
||||||
|
"coordinator_data": {
|
||||||
|
"version": coordinator.version,
|
||||||
|
"version_data": coordinator.version_data,
|
||||||
|
},
|
||||||
|
"devices": devices,
|
||||||
|
}
|
@ -133,6 +133,7 @@ APPLICATION_VERSION = "application_version"
|
|||||||
MAX_INCLUSION_REQUEST_INTERVAL = "max_inclusion_request_interval"
|
MAX_INCLUSION_REQUEST_INTERVAL = "max_inclusion_request_interval"
|
||||||
UUID = "uuid"
|
UUID = "uuid"
|
||||||
SUPPORTED_PROTOCOLS = "supported_protocols"
|
SUPPORTED_PROTOCOLS = "supported_protocols"
|
||||||
|
ADDITIONAL_PROPERTIES = "additional_properties"
|
||||||
|
|
||||||
FEATURE = "feature"
|
FEATURE = "feature"
|
||||||
UNPROVISION = "unprovision"
|
UNPROVISION = "unprovision"
|
||||||
@ -170,6 +171,7 @@ def convert_qr_provisioning_information(info: dict) -> QRProvisioningInformation
|
|||||||
max_inclusion_request_interval=info.get(MAX_INCLUSION_REQUEST_INTERVAL),
|
max_inclusion_request_interval=info.get(MAX_INCLUSION_REQUEST_INTERVAL),
|
||||||
uuid=info.get(UUID),
|
uuid=info.get(UUID),
|
||||||
supported_protocols=protocols if protocols else None,
|
supported_protocols=protocols if protocols else None,
|
||||||
|
additional_properties=info.get(ADDITIONAL_PROPERTIES, {}),
|
||||||
)
|
)
|
||||||
return info
|
return info
|
||||||
|
|
||||||
@ -212,6 +214,7 @@ QR_PROVISIONING_INFORMATION_SCHEMA = vol.All(
|
|||||||
cv.ensure_list,
|
cv.ensure_list,
|
||||||
[vol.Coerce(Protocols)],
|
[vol.Coerce(Protocols)],
|
||||||
),
|
),
|
||||||
|
vol.Optional(ADDITIONAL_PROPERTIES): dict,
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
convert_qr_provisioning_information,
|
convert_qr_provisioning_information,
|
||||||
|
@ -7,7 +7,7 @@ from .backports.enum import StrEnum
|
|||||||
|
|
||||||
MAJOR_VERSION: Final = 2022
|
MAJOR_VERSION: Final = 2022
|
||||||
MINOR_VERSION: Final = 2
|
MINOR_VERSION: Final = 2
|
||||||
PATCH_VERSION: Final = "3"
|
PATCH_VERSION: Final = "4"
|
||||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 9, 0)
|
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 9, 0)
|
||||||
|
@ -11,7 +11,7 @@ import logging
|
|||||||
import math
|
import math
|
||||||
import sys
|
import sys
|
||||||
from timeit import default_timer as timer
|
from timeit import default_timer as timer
|
||||||
from typing import Any, Final, Literal, TypedDict, final
|
from typing import Any, Literal, TypedDict, final
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
@ -58,7 +58,13 @@ SOURCE_PLATFORM_CONFIG = "platform_config"
|
|||||||
FLOAT_PRECISION = abs(int(math.floor(math.log10(abs(sys.float_info.epsilon))))) - 1
|
FLOAT_PRECISION = abs(int(math.floor(math.log10(abs(sys.float_info.epsilon))))) - 1
|
||||||
|
|
||||||
|
|
||||||
ENTITY_CATEGORIES_SCHEMA: Final = vol.In(ENTITY_CATEGORIES)
|
def validate_entity_category(value: Any | None) -> EntityCategory:
|
||||||
|
"""Validate entity category configuration."""
|
||||||
|
value = vol.In(ENTITY_CATEGORIES)(value)
|
||||||
|
return EntityCategory(value)
|
||||||
|
|
||||||
|
|
||||||
|
ENTITY_CATEGORIES_SCHEMA = validate_entity_category
|
||||||
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
|
@ -681,7 +681,7 @@ fjaraskupan==1.0.2
|
|||||||
flipr-api==1.4.1
|
flipr-api==1.4.1
|
||||||
|
|
||||||
# homeassistant.components.flux_led
|
# homeassistant.components.flux_led
|
||||||
flux_led==0.28.21
|
flux_led==0.28.22
|
||||||
|
|
||||||
# homeassistant.components.homekit
|
# homeassistant.components.homekit
|
||||||
fnvhash==0.1.0
|
fnvhash==0.1.0
|
||||||
@ -1255,7 +1255,7 @@ pillow==9.0.1
|
|||||||
pizzapi==0.0.3
|
pizzapi==0.0.3
|
||||||
|
|
||||||
# homeassistant.components.plex
|
# homeassistant.components.plex
|
||||||
plexapi==4.9.1
|
plexapi==4.9.2
|
||||||
|
|
||||||
# homeassistant.components.plex
|
# homeassistant.components.plex
|
||||||
plexauth==0.0.6
|
plexauth==0.0.6
|
||||||
|
@ -427,7 +427,7 @@ fjaraskupan==1.0.2
|
|||||||
flipr-api==1.4.1
|
flipr-api==1.4.1
|
||||||
|
|
||||||
# homeassistant.components.flux_led
|
# homeassistant.components.flux_led
|
||||||
flux_led==0.28.21
|
flux_led==0.28.22
|
||||||
|
|
||||||
# homeassistant.components.homekit
|
# homeassistant.components.homekit
|
||||||
fnvhash==0.1.0
|
fnvhash==0.1.0
|
||||||
@ -774,7 +774,7 @@ pilight==0.1.1
|
|||||||
pillow==9.0.1
|
pillow==9.0.1
|
||||||
|
|
||||||
# homeassistant.components.plex
|
# homeassistant.components.plex
|
||||||
plexapi==4.9.1
|
plexapi==4.9.2
|
||||||
|
|
||||||
# homeassistant.components.plex
|
# homeassistant.components.plex
|
||||||
plexauth==0.0.6
|
plexauth==0.0.6
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[metadata]
|
[metadata]
|
||||||
name = homeassistant
|
name = homeassistant
|
||||||
version = 2022.2.3
|
version = 2022.2.4
|
||||||
author = The Home Assistant Authors
|
author = The Home Assistant Authors
|
||||||
author_email = hello@home-assistant.io
|
author_email = hello@home-assistant.io
|
||||||
license = Apache-2.0
|
license = Apache-2.0
|
||||||
|
@ -595,7 +595,7 @@ async def test_entity_availability(hass: HomeAssistant):
|
|||||||
conn_status_cb(connection_status)
|
conn_status_cb(connection_status)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
state = hass.states.get(entity_id)
|
state = hass.states.get(entity_id)
|
||||||
assert state.state == "idle"
|
assert state.state == "off"
|
||||||
|
|
||||||
connection_status = MagicMock()
|
connection_status = MagicMock()
|
||||||
connection_status.status = "DISCONNECTED"
|
connection_status.status = "DISCONNECTED"
|
||||||
@ -624,7 +624,7 @@ async def test_entity_cast_status(hass: HomeAssistant):
|
|||||||
state = hass.states.get(entity_id)
|
state = hass.states.get(entity_id)
|
||||||
assert state is not None
|
assert state is not None
|
||||||
assert state.name == "Speaker"
|
assert state.name == "Speaker"
|
||||||
assert state.state == "idle"
|
assert state.state == "off"
|
||||||
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
|
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
|
||||||
|
|
||||||
# No media status, pause, play, stop not supported
|
# No media status, pause, play, stop not supported
|
||||||
@ -642,8 +642,8 @@ async def test_entity_cast_status(hass: HomeAssistant):
|
|||||||
cast_status_cb(cast_status)
|
cast_status_cb(cast_status)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
state = hass.states.get(entity_id)
|
state = hass.states.get(entity_id)
|
||||||
# Volume not hidden even if no app is active
|
# Volume hidden if no app is active
|
||||||
assert state.attributes.get("volume_level") == 0.5
|
assert state.attributes.get("volume_level") is None
|
||||||
assert not state.attributes.get("is_volume_muted")
|
assert not state.attributes.get("is_volume_muted")
|
||||||
|
|
||||||
chromecast.app_id = "1234"
|
chromecast.app_id = "1234"
|
||||||
@ -747,7 +747,7 @@ async def test_supported_features(
|
|||||||
state = hass.states.get(entity_id)
|
state = hass.states.get(entity_id)
|
||||||
assert state is not None
|
assert state is not None
|
||||||
assert state.name == "Speaker"
|
assert state.name == "Speaker"
|
||||||
assert state.state == "idle"
|
assert state.state == "off"
|
||||||
assert state.attributes.get("supported_features") == supported_features_no_media
|
assert state.attributes.get("supported_features") == supported_features_no_media
|
||||||
|
|
||||||
media_status = MagicMock(images=None)
|
media_status = MagicMock(images=None)
|
||||||
@ -882,7 +882,7 @@ async def test_entity_play_media(hass: HomeAssistant, quick_play_mock):
|
|||||||
state = hass.states.get(entity_id)
|
state = hass.states.get(entity_id)
|
||||||
assert state is not None
|
assert state is not None
|
||||||
assert state.name == "Speaker"
|
assert state.name == "Speaker"
|
||||||
assert state.state == "idle"
|
assert state.state == "off"
|
||||||
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
|
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
|
||||||
|
|
||||||
# Play_media
|
# Play_media
|
||||||
@ -928,7 +928,7 @@ async def test_entity_play_media_cast(hass: HomeAssistant, quick_play_mock):
|
|||||||
state = hass.states.get(entity_id)
|
state = hass.states.get(entity_id)
|
||||||
assert state is not None
|
assert state is not None
|
||||||
assert state.name == "Speaker"
|
assert state.name == "Speaker"
|
||||||
assert state.state == "idle"
|
assert state.state == "off"
|
||||||
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
|
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
|
||||||
|
|
||||||
# Play_media - cast with app ID
|
# Play_media - cast with app ID
|
||||||
@ -970,7 +970,7 @@ async def test_entity_play_media_cast_invalid(hass, caplog, quick_play_mock):
|
|||||||
state = hass.states.get(entity_id)
|
state = hass.states.get(entity_id)
|
||||||
assert state is not None
|
assert state is not None
|
||||||
assert state.name == "Speaker"
|
assert state.name == "Speaker"
|
||||||
assert state.state == "idle"
|
assert state.state == "off"
|
||||||
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
|
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
|
||||||
|
|
||||||
# play_media - media_type cast with invalid JSON
|
# play_media - media_type cast with invalid JSON
|
||||||
@ -1042,7 +1042,7 @@ async def test_entity_media_content_type(hass: HomeAssistant):
|
|||||||
state = hass.states.get(entity_id)
|
state = hass.states.get(entity_id)
|
||||||
assert state is not None
|
assert state is not None
|
||||||
assert state.name == "Speaker"
|
assert state.name == "Speaker"
|
||||||
assert state.state == "idle"
|
assert state.state == "off"
|
||||||
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
|
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
|
||||||
|
|
||||||
media_status = MagicMock(images=None)
|
media_status = MagicMock(images=None)
|
||||||
@ -1213,7 +1213,7 @@ async def test_entity_media_states(hass: HomeAssistant, app_id, state_no_media):
|
|||||||
state = hass.states.get(entity_id)
|
state = hass.states.get(entity_id)
|
||||||
assert state is not None
|
assert state is not None
|
||||||
assert state.name == "Speaker"
|
assert state.name == "Speaker"
|
||||||
assert state.state == "idle"
|
assert state.state == "off"
|
||||||
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
|
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
|
||||||
|
|
||||||
# App id updated, but no media status
|
# App id updated, but no media status
|
||||||
@ -1258,7 +1258,7 @@ async def test_entity_media_states(hass: HomeAssistant, app_id, state_no_media):
|
|||||||
cast_status_cb(cast_status)
|
cast_status_cb(cast_status)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
state = hass.states.get(entity_id)
|
state = hass.states.get(entity_id)
|
||||||
assert state.state == "idle"
|
assert state.state == "off"
|
||||||
|
|
||||||
# No cast status
|
# No cast status
|
||||||
chromecast.is_idle = False
|
chromecast.is_idle = False
|
||||||
@ -1286,7 +1286,7 @@ async def test_entity_media_states_lovelace_app(hass: HomeAssistant):
|
|||||||
state = hass.states.get(entity_id)
|
state = hass.states.get(entity_id)
|
||||||
assert state is not None
|
assert state is not None
|
||||||
assert state.name == "Speaker"
|
assert state.name == "Speaker"
|
||||||
assert state.state == "idle"
|
assert state.state == "off"
|
||||||
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
|
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
|
||||||
|
|
||||||
chromecast.app_id = CAST_APP_ID_HOMEASSISTANT_LOVELACE
|
chromecast.app_id = CAST_APP_ID_HOMEASSISTANT_LOVELACE
|
||||||
@ -1326,7 +1326,7 @@ async def test_entity_media_states_lovelace_app(hass: HomeAssistant):
|
|||||||
media_status_cb(media_status)
|
media_status_cb(media_status)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
state = hass.states.get(entity_id)
|
state = hass.states.get(entity_id)
|
||||||
assert state.state == "idle"
|
assert state.state == "off"
|
||||||
|
|
||||||
chromecast.is_idle = False
|
chromecast.is_idle = False
|
||||||
media_status_cb(media_status)
|
media_status_cb(media_status)
|
||||||
@ -1355,7 +1355,7 @@ async def test_group_media_states(hass, mz_mock):
|
|||||||
state = hass.states.get(entity_id)
|
state = hass.states.get(entity_id)
|
||||||
assert state is not None
|
assert state is not None
|
||||||
assert state.name == "Speaker"
|
assert state.name == "Speaker"
|
||||||
assert state.state == "idle"
|
assert state.state == "off"
|
||||||
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
|
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
|
||||||
|
|
||||||
group_media_status = MagicMock(images=None)
|
group_media_status = MagicMock(images=None)
|
||||||
@ -1406,7 +1406,7 @@ async def test_group_media_control(hass, mz_mock, quick_play_mock):
|
|||||||
state = hass.states.get(entity_id)
|
state = hass.states.get(entity_id)
|
||||||
assert state is not None
|
assert state is not None
|
||||||
assert state.name == "Speaker"
|
assert state.name == "Speaker"
|
||||||
assert state.state == "idle"
|
assert state.state == "off"
|
||||||
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
|
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
|
||||||
|
|
||||||
group_media_status = MagicMock(images=None)
|
group_media_status = MagicMock(images=None)
|
||||||
|
@ -1764,7 +1764,7 @@ async def test_debug_info_multiple_entities_triggers(hass, mqtt_mock):
|
|||||||
} in discovery_data
|
} in discovery_data
|
||||||
|
|
||||||
|
|
||||||
async def test_debug_info_non_mqtt(hass, device_reg, entity_reg):
|
async def test_debug_info_non_mqtt(hass, device_reg, entity_reg, mqtt_mock):
|
||||||
"""Test we get empty debug_info for a device with non MQTT entities."""
|
"""Test we get empty debug_info for a device with non MQTT entities."""
|
||||||
DOMAIN = "sensor"
|
DOMAIN = "sensor"
|
||||||
platform = getattr(hass.components, f"test.{DOMAIN}")
|
platform = getattr(hass.components, f"test.{DOMAIN}")
|
||||||
|
@ -855,6 +855,179 @@ def test_delete_duplicates(caplog, tmpdir):
|
|||||||
assert "Found duplicated" not in caplog.text
|
assert "Found duplicated" not in caplog.text
|
||||||
|
|
||||||
|
|
||||||
|
def test_delete_duplicates_many(caplog, tmpdir):
|
||||||
|
"""Test removal of duplicated statistics."""
|
||||||
|
test_db_file = tmpdir.mkdir("sqlite").join("test_run_info.db")
|
||||||
|
dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}"
|
||||||
|
|
||||||
|
module = "tests.components.recorder.models_schema_23"
|
||||||
|
importlib.import_module(module)
|
||||||
|
old_models = sys.modules[module]
|
||||||
|
|
||||||
|
period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
|
||||||
|
period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00"))
|
||||||
|
period3 = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
|
||||||
|
period4 = dt_util.as_utc(dt_util.parse_datetime("2021-10-31 23:00:00"))
|
||||||
|
|
||||||
|
external_energy_statistics_1 = (
|
||||||
|
{
|
||||||
|
"start": period1,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 0,
|
||||||
|
"sum": 2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period2,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 1,
|
||||||
|
"sum": 3,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period3,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 2,
|
||||||
|
"sum": 4,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period4,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 3,
|
||||||
|
"sum": 5,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period4,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 3,
|
||||||
|
"sum": 5,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
external_energy_metadata_1 = {
|
||||||
|
"has_mean": False,
|
||||||
|
"has_sum": True,
|
||||||
|
"name": "Total imported energy",
|
||||||
|
"source": "test",
|
||||||
|
"statistic_id": "test:total_energy_import_tariff_1",
|
||||||
|
"unit_of_measurement": "kWh",
|
||||||
|
}
|
||||||
|
external_energy_statistics_2 = (
|
||||||
|
{
|
||||||
|
"start": period1,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 0,
|
||||||
|
"sum": 20,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period2,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 1,
|
||||||
|
"sum": 30,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period3,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 2,
|
||||||
|
"sum": 40,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period4,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 3,
|
||||||
|
"sum": 50,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period4,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": 3,
|
||||||
|
"sum": 50,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
external_energy_metadata_2 = {
|
||||||
|
"has_mean": False,
|
||||||
|
"has_sum": True,
|
||||||
|
"name": "Total imported energy",
|
||||||
|
"source": "test",
|
||||||
|
"statistic_id": "test:total_energy_import_tariff_2",
|
||||||
|
"unit_of_measurement": "kWh",
|
||||||
|
}
|
||||||
|
external_co2_statistics = (
|
||||||
|
{
|
||||||
|
"start": period1,
|
||||||
|
"last_reset": None,
|
||||||
|
"mean": 10,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period2,
|
||||||
|
"last_reset": None,
|
||||||
|
"mean": 30,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period3,
|
||||||
|
"last_reset": None,
|
||||||
|
"mean": 60,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"start": period4,
|
||||||
|
"last_reset": None,
|
||||||
|
"mean": 90,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
external_co2_metadata = {
|
||||||
|
"has_mean": True,
|
||||||
|
"has_sum": False,
|
||||||
|
"name": "Fossil percentage",
|
||||||
|
"source": "test",
|
||||||
|
"statistic_id": "test:fossil_percentage",
|
||||||
|
"unit_of_measurement": "%",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Create some duplicated statistics with schema version 23
|
||||||
|
with patch.object(recorder, "models", old_models), patch.object(
|
||||||
|
recorder.migration, "SCHEMA_VERSION", old_models.SCHEMA_VERSION
|
||||||
|
), patch(
|
||||||
|
"homeassistant.components.recorder.create_engine", new=_create_engine_test
|
||||||
|
):
|
||||||
|
hass = get_test_home_assistant()
|
||||||
|
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
|
||||||
|
wait_recording_done(hass)
|
||||||
|
wait_recording_done(hass)
|
||||||
|
|
||||||
|
with session_scope(hass=hass) as session:
|
||||||
|
session.add(
|
||||||
|
recorder.models.StatisticsMeta.from_meta(external_energy_metadata_1)
|
||||||
|
)
|
||||||
|
session.add(
|
||||||
|
recorder.models.StatisticsMeta.from_meta(external_energy_metadata_2)
|
||||||
|
)
|
||||||
|
session.add(recorder.models.StatisticsMeta.from_meta(external_co2_metadata))
|
||||||
|
with session_scope(hass=hass) as session:
|
||||||
|
for stat in external_energy_statistics_1:
|
||||||
|
session.add(recorder.models.Statistics.from_stats(1, stat))
|
||||||
|
for _ in range(3000):
|
||||||
|
session.add(
|
||||||
|
recorder.models.Statistics.from_stats(
|
||||||
|
1, external_energy_statistics_1[-1]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
for stat in external_energy_statistics_2:
|
||||||
|
session.add(recorder.models.Statistics.from_stats(2, stat))
|
||||||
|
for stat in external_co2_statistics:
|
||||||
|
session.add(recorder.models.Statistics.from_stats(3, stat))
|
||||||
|
|
||||||
|
hass.stop()
|
||||||
|
|
||||||
|
# Test that the duplicates are removed during migration from schema 23
|
||||||
|
hass = get_test_home_assistant()
|
||||||
|
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
|
||||||
|
hass.start()
|
||||||
|
wait_recording_done(hass)
|
||||||
|
wait_recording_done(hass)
|
||||||
|
hass.stop()
|
||||||
|
|
||||||
|
assert "Deleted 3002 duplicated statistics rows" in caplog.text
|
||||||
|
assert "Found non identical" not in caplog.text
|
||||||
|
assert "Found duplicated" not in caplog.text
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.freeze_time("2021-08-01 00:00:00+00:00")
|
@pytest.mark.freeze_time("2021-08-01 00:00:00+00:00")
|
||||||
def test_delete_duplicates_non_identical(caplog, tmpdir):
|
def test_delete_duplicates_non_identical(caplog, tmpdir):
|
||||||
"""Test removal of duplicated statistics."""
|
"""Test removal of duplicated statistics."""
|
||||||
|
@ -163,6 +163,25 @@ async def test_midnight_turnover_before_midnight_outside_period(hass):
|
|||||||
assert state.state == STATE_OFF
|
assert state.state == STATE_OFF
|
||||||
|
|
||||||
|
|
||||||
|
async def test_after_happens_tomorrow(hass):
|
||||||
|
"""Test when both before and after are in the future, and after is later than before."""
|
||||||
|
test_time = datetime(2019, 1, 10, 10, 00, 0, tzinfo=dt_util.UTC)
|
||||||
|
config = {
|
||||||
|
"binary_sensor": [
|
||||||
|
{"platform": "tod", "name": "Night", "after": "23:00", "before": "12:00"}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
with patch(
|
||||||
|
"homeassistant.components.tod.binary_sensor.dt_util.utcnow",
|
||||||
|
return_value=test_time,
|
||||||
|
):
|
||||||
|
await async_setup_component(hass, "binary_sensor", config)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
state = hass.states.get("binary_sensor.night")
|
||||||
|
assert state.state == STATE_ON
|
||||||
|
|
||||||
|
|
||||||
async def test_midnight_turnover_after_midnight_outside_period(hass):
|
async def test_midnight_turnover_after_midnight_outside_period(hass):
|
||||||
"""Test midnight turnover setting before midnight inside period ."""
|
"""Test midnight turnover setting before midnight inside period ."""
|
||||||
test_time = datetime(2019, 1, 10, 20, 0, 0, tzinfo=dt_util.UTC)
|
test_time = datetime(2019, 1, 10, 20, 0, 0, tzinfo=dt_util.UTC)
|
||||||
|
@ -45,8 +45,13 @@ async def test_reinitialize_device(
|
|||||||
|
|
||||||
# Reinit.
|
# Reinit.
|
||||||
new_location = "http://192.168.1.1:12345/desc.xml"
|
new_location = "http://192.168.1.1:12345/desc.xml"
|
||||||
headers = {
|
await device.async_ssdp_callback(
|
||||||
ssdp.ATTR_SSDP_LOCATION: new_location,
|
ssdp.SsdpServiceInfo(
|
||||||
}
|
ssdp_usn="mock_usn",
|
||||||
await device.async_ssdp_callback(headers, ...)
|
ssdp_st="mock_st",
|
||||||
|
ssdp_location="http://192.168.1.1:12345/desc.xml",
|
||||||
|
upnp={},
|
||||||
|
),
|
||||||
|
...,
|
||||||
|
)
|
||||||
assert device._igd_device.device.device_url == new_location
|
assert device._igd_device.device.device_url == new_location
|
||||||
|
36
tests/components/version/test_diagnostics.py
Normal file
36
tests/components/version/test_diagnostics.py
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
"""Test version diagnostics."""
|
||||||
|
|
||||||
|
|
||||||
|
from aioaseko import ClientSession
|
||||||
|
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
|
from .common import MOCK_VERSION, setup_version_integration
|
||||||
|
|
||||||
|
from tests.components.diagnostics import get_diagnostics_for_config_entry
|
||||||
|
|
||||||
|
|
||||||
|
async def test_diagnostics(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
hass_client: ClientSession,
|
||||||
|
) -> None:
|
||||||
|
"""Test diagnostic information."""
|
||||||
|
config_entry = await setup_version_integration(hass)
|
||||||
|
|
||||||
|
diagnostics = await get_diagnostics_for_config_entry(
|
||||||
|
hass, hass_client, config_entry
|
||||||
|
)
|
||||||
|
assert diagnostics["entry"]["data"] == {
|
||||||
|
"name": "",
|
||||||
|
"channel": "stable",
|
||||||
|
"image": "default",
|
||||||
|
"board": "OVA",
|
||||||
|
"version_source": "Local installation",
|
||||||
|
"source": "local",
|
||||||
|
}
|
||||||
|
|
||||||
|
assert diagnostics["coordinator_data"] == {
|
||||||
|
"version": MOCK_VERSION,
|
||||||
|
"version_data": None,
|
||||||
|
}
|
||||||
|
assert len(diagnostics["devices"]) == 1
|
@ -29,6 +29,7 @@ from zwave_js_server.model.node import Node
|
|||||||
|
|
||||||
from homeassistant.components.websocket_api.const import ERR_NOT_FOUND
|
from homeassistant.components.websocket_api.const import ERR_NOT_FOUND
|
||||||
from homeassistant.components.zwave_js.api import (
|
from homeassistant.components.zwave_js.api import (
|
||||||
|
ADDITIONAL_PROPERTIES,
|
||||||
APPLICATION_VERSION,
|
APPLICATION_VERSION,
|
||||||
CLIENT_SIDE_AUTH,
|
CLIENT_SIDE_AUTH,
|
||||||
COMMAND_CLASS_ID,
|
COMMAND_CLASS_ID,
|
||||||
@ -837,6 +838,7 @@ async def test_provision_smart_start_node(hass, integration, client, hass_ws_cli
|
|||||||
PRODUCT_TYPE: 1,
|
PRODUCT_TYPE: 1,
|
||||||
PRODUCT_ID: 1,
|
PRODUCT_ID: 1,
|
||||||
APPLICATION_VERSION: "test",
|
APPLICATION_VERSION: "test",
|
||||||
|
ADDITIONAL_PROPERTIES: {"name": "test"},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@ -861,6 +863,7 @@ async def test_provision_smart_start_node(hass, integration, client, hass_ws_cli
|
|||||||
max_inclusion_request_interval=None,
|
max_inclusion_request_interval=None,
|
||||||
uuid=None,
|
uuid=None,
|
||||||
supported_protocols=None,
|
supported_protocols=None,
|
||||||
|
additional_properties={"name": "test"},
|
||||||
).to_dict(),
|
).to_dict(),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6,6 +6,7 @@ import threading
|
|||||||
from unittest.mock import MagicMock, PropertyMock, patch
|
from unittest.mock import MagicMock, PropertyMock, patch
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
ATTR_ATTRIBUTION,
|
ATTR_ATTRIBUTION,
|
||||||
@ -829,3 +830,27 @@ async def test_entity_category_property(hass):
|
|||||||
)
|
)
|
||||||
mock_entity2.entity_id = "hello.world"
|
mock_entity2.entity_id = "hello.world"
|
||||||
assert mock_entity2.entity_category == "config"
|
assert mock_entity2.entity_category == "config"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"value,expected",
|
||||||
|
(
|
||||||
|
("config", entity.EntityCategory.CONFIG),
|
||||||
|
("diagnostic", entity.EntityCategory.DIAGNOSTIC),
|
||||||
|
("system", entity.EntityCategory.SYSTEM),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
def test_entity_category_schema(value, expected):
|
||||||
|
"""Test entity category schema."""
|
||||||
|
schema = vol.Schema(entity.ENTITY_CATEGORIES_SCHEMA)
|
||||||
|
result = schema(value)
|
||||||
|
assert result == expected
|
||||||
|
assert isinstance(result, entity.EntityCategory)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("value", (None, "non_existing"))
|
||||||
|
def test_entity_category_schema_error(value):
|
||||||
|
"""Test entity category schema."""
|
||||||
|
schema = vol.Schema(entity.ENTITY_CATEGORIES_SCHEMA)
|
||||||
|
with pytest.raises(vol.Invalid):
|
||||||
|
schema(value)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user