mirror of
https://github.com/home-assistant/core.git
synced 2025-07-23 21:27:38 +00:00
Merge pull request #37586 from home-assistant/rc
This commit is contained in:
commit
a773534809
@ -59,7 +59,7 @@ homeassistant/components/blink/* @fronzbot
|
||||
homeassistant/components/bmp280/* @belidzs
|
||||
homeassistant/components/bmw_connected_drive/* @gerard33 @rikroe
|
||||
homeassistant/components/bom/* @maddenp
|
||||
homeassistant/components/braviatv/* @robbiet480 @bieniu
|
||||
homeassistant/components/braviatv/* @bieniu
|
||||
homeassistant/components/broadlink/* @danielhiversen @felipediel
|
||||
homeassistant/components/brother/* @bieniu
|
||||
homeassistant/components/brunt/* @eavanvalkenburg
|
||||
|
@ -55,7 +55,8 @@ class BraviaTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
self.braviarc.connect, pin, CLIENTID_PREFIX, NICKNAME
|
||||
)
|
||||
|
||||
if not self.braviarc.is_connected():
|
||||
connected = await self.hass.async_add_executor_job(self.braviarc.is_connected)
|
||||
if not connected:
|
||||
raise CannotConnect()
|
||||
|
||||
system_info = await self.hass.async_add_executor_job(
|
||||
@ -161,7 +162,8 @@ class BraviaTVOptionsFlowHandler(config_entries.OptionsFlow):
|
||||
async def async_step_init(self, user_input=None):
|
||||
"""Manage the options."""
|
||||
self.braviarc = self.hass.data[DOMAIN][self.config_entry.entry_id][BRAVIARC]
|
||||
if not self.braviarc.is_connected():
|
||||
connected = await self.hass.async_add_executor_job(self.braviarc.is_connected)
|
||||
if not connected:
|
||||
await self.hass.async_add_executor_job(
|
||||
self.braviarc.connect, self.pin, CLIENTID_PREFIX, NICKNAME
|
||||
)
|
||||
|
@ -2,7 +2,7 @@
|
||||
"domain": "braviatv",
|
||||
"name": "Sony Bravia TV",
|
||||
"documentation": "https://www.home-assistant.io/integrations/braviatv",
|
||||
"requirements": ["bravia-tv==1.0.5"],
|
||||
"codeowners": ["@robbiet480", "@bieniu"],
|
||||
"requirements": ["bravia-tv==1.0.6"],
|
||||
"codeowners": ["@bieniu"],
|
||||
"config_flow": true
|
||||
}
|
||||
|
@ -148,33 +148,31 @@ class BraviaTVDevice(MediaPlayerEntity):
|
||||
self._device_info = device_info
|
||||
self._ignored_sources = ignored_sources
|
||||
self._state_lock = asyncio.Lock()
|
||||
self._need_refresh = True
|
||||
|
||||
async def async_update(self):
|
||||
"""Update TV info."""
|
||||
if self._state_lock.locked():
|
||||
return
|
||||
|
||||
if self._state == STATE_OFF:
|
||||
self._need_refresh = True
|
||||
|
||||
power_status = await self.hass.async_add_executor_job(
|
||||
self._braviarc.get_power_status
|
||||
)
|
||||
if power_status == "active":
|
||||
if self._need_refresh:
|
||||
|
||||
if power_status != "off":
|
||||
connected = await self.hass.async_add_executor_job(
|
||||
self._braviarc.is_connected
|
||||
)
|
||||
if not connected:
|
||||
try:
|
||||
connected = await self.hass.async_add_executor_job(
|
||||
self._braviarc.connect, self._pin, CLIENTID_PREFIX, NICKNAME
|
||||
)
|
||||
except NoIPControl:
|
||||
_LOGGER.error("IP Control is disabled in the TV settings")
|
||||
self._need_refresh = False
|
||||
else:
|
||||
connected = self._braviarc.is_connected()
|
||||
if not connected:
|
||||
return
|
||||
power_status = "off"
|
||||
|
||||
if power_status == "active":
|
||||
self._state = STATE_ON
|
||||
if (
|
||||
await self._async_refresh_volume()
|
||||
|
@ -20,6 +20,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
DOMAIN = "denonavr"
|
||||
|
||||
SUPPORTED_MANUFACTURERS = ["Denon", "DENON", "Marantz"]
|
||||
IGNORED_MODELS = ["HEOS 1", "HEOS 3", "HEOS 5", "HEOS 7"]
|
||||
|
||||
CONF_SHOW_ALL_SOURCES = "show_all_sources"
|
||||
CONF_ZONE2 = "zone2"
|
||||
@ -217,6 +218,9 @@ class DenonAvrFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
self.serial_number = discovery_info[ssdp.ATTR_UPNP_SERIAL]
|
||||
self.host = urlparse(discovery_info[ssdp.ATTR_SSDP_LOCATION]).hostname
|
||||
|
||||
if self.model_name in IGNORED_MODELS:
|
||||
return self.async_abort(reason="not_denonavr_manufacturer")
|
||||
|
||||
unique_id = self.construct_unique_id(self.model_name, self.serial_number)
|
||||
await self.async_set_unique_id(unique_id)
|
||||
self._abort_if_unique_id_configured({CONF_HOST: self.host})
|
||||
|
@ -2,7 +2,7 @@
|
||||
"domain": "frontend",
|
||||
"name": "Home Assistant Frontend",
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"requirements": ["home-assistant-frontend==20200702.0"],
|
||||
"requirements": ["home-assistant-frontend==20200702.1"],
|
||||
"dependencies": [
|
||||
"api",
|
||||
"auth",
|
||||
|
@ -394,16 +394,9 @@ def get_state(hass, utc_point_in_time, entity_id, run=None):
|
||||
|
||||
async def async_setup(hass, config):
|
||||
"""Set up the history hooks."""
|
||||
filters = Filters()
|
||||
conf = config.get(DOMAIN, {})
|
||||
exclude = conf.get(CONF_EXCLUDE)
|
||||
if exclude:
|
||||
filters.excluded_entities = exclude.get(CONF_ENTITIES, [])
|
||||
filters.excluded_domains = exclude.get(CONF_DOMAINS, [])
|
||||
include = conf.get(CONF_INCLUDE)
|
||||
if include:
|
||||
filters.included_entities = include.get(CONF_ENTITIES, [])
|
||||
filters.included_domains = include.get(CONF_DOMAINS, [])
|
||||
|
||||
filters = sqlalchemy_filter_from_include_exclude_conf(conf)
|
||||
use_include_order = conf.get(CONF_ORDER)
|
||||
|
||||
hass.http.register_view(HistoryPeriodView(filters, use_include_order))
|
||||
@ -530,6 +523,20 @@ class HistoryPeriodView(HomeAssistantView):
|
||||
return self.json(result)
|
||||
|
||||
|
||||
def sqlalchemy_filter_from_include_exclude_conf(conf):
|
||||
"""Build a sql filter from config."""
|
||||
filters = Filters()
|
||||
exclude = conf.get(CONF_EXCLUDE)
|
||||
if exclude:
|
||||
filters.excluded_entities = exclude.get(CONF_ENTITIES, [])
|
||||
filters.excluded_domains = exclude.get(CONF_DOMAINS, [])
|
||||
include = conf.get(CONF_INCLUDE)
|
||||
if include:
|
||||
filters.included_entities = include.get(CONF_ENTITIES, [])
|
||||
filters.included_domains = include.get(CONF_DOMAINS, [])
|
||||
return filters
|
||||
|
||||
|
||||
class Filters:
|
||||
"""Container for the configured include and exclude filters."""
|
||||
|
||||
@ -556,26 +563,34 @@ class Filters:
|
||||
return query.filter(States.entity_id.in_(entity_ids))
|
||||
query = query.filter(~States.domain.in_(IGNORE_DOMAINS))
|
||||
|
||||
filter_query = None
|
||||
entity_filter = self.entity_filter()
|
||||
if entity_filter is not None:
|
||||
query = query.filter(entity_filter)
|
||||
|
||||
return query
|
||||
|
||||
def entity_filter(self):
|
||||
"""Generate the entity filter query."""
|
||||
entity_filter = None
|
||||
# filter if only excluded domain is configured
|
||||
if self.excluded_domains and not self.included_domains:
|
||||
filter_query = ~States.domain.in_(self.excluded_domains)
|
||||
entity_filter = ~States.domain.in_(self.excluded_domains)
|
||||
if self.included_entities:
|
||||
filter_query &= States.entity_id.in_(self.included_entities)
|
||||
entity_filter &= States.entity_id.in_(self.included_entities)
|
||||
# filter if only included domain is configured
|
||||
elif not self.excluded_domains and self.included_domains:
|
||||
filter_query = States.domain.in_(self.included_domains)
|
||||
entity_filter = States.domain.in_(self.included_domains)
|
||||
if self.included_entities:
|
||||
filter_query |= States.entity_id.in_(self.included_entities)
|
||||
entity_filter |= States.entity_id.in_(self.included_entities)
|
||||
# filter if included and excluded domain is configured
|
||||
elif self.excluded_domains and self.included_domains:
|
||||
filter_query = ~States.domain.in_(self.excluded_domains)
|
||||
entity_filter = ~States.domain.in_(self.excluded_domains)
|
||||
if self.included_entities:
|
||||
filter_query &= States.domain.in_(
|
||||
entity_filter &= States.domain.in_(
|
||||
self.included_domains
|
||||
) | States.entity_id.in_(self.included_entities)
|
||||
else:
|
||||
filter_query &= States.domain.in_(
|
||||
entity_filter &= States.domain.in_(
|
||||
self.included_domains
|
||||
) & ~States.domain.in_(self.excluded_domains)
|
||||
# no domain filter just included entities
|
||||
@ -584,13 +599,17 @@ class Filters:
|
||||
and not self.included_domains
|
||||
and self.included_entities
|
||||
):
|
||||
filter_query = States.entity_id.in_(self.included_entities)
|
||||
if filter_query is not None:
|
||||
query = query.filter(filter_query)
|
||||
entity_filter = States.entity_id.in_(self.included_entities)
|
||||
# finally apply excluded entities filter if configured
|
||||
if self.excluded_entities:
|
||||
query = query.filter(~States.entity_id.in_(self.excluded_entities))
|
||||
return query
|
||||
if entity_filter is not None:
|
||||
entity_filter = (entity_filter) & ~States.entity_id.in_(
|
||||
self.excluded_entities
|
||||
)
|
||||
else:
|
||||
entity_filter = ~States.entity_id.in_(self.excluded_entities)
|
||||
|
||||
return entity_filter
|
||||
|
||||
|
||||
class LazyState(State):
|
||||
|
@ -280,7 +280,6 @@ class TelevisionMediaPlayer(HomeAccessory):
|
||||
|
||||
serv_tv = self.add_preload_service(SERV_TELEVISION, self.chars_tv)
|
||||
self.set_primary_service(serv_tv)
|
||||
serv_tv.configure_char(CHAR_CONFIGURED_NAME, value=self.display_name)
|
||||
serv_tv.configure_char(CHAR_SLEEP_DISCOVER_MODE, value=True)
|
||||
self.char_active = serv_tv.configure_char(
|
||||
CHAR_ACTIVE, setter_callback=self.set_on_off
|
||||
@ -431,7 +430,7 @@ class TelevisionMediaPlayer(HomeAccessory):
|
||||
index = self.sources.index(source_name)
|
||||
if self.char_input_source.value != index:
|
||||
self.char_input_source.set_value(index)
|
||||
else:
|
||||
elif hk_state:
|
||||
_LOGGER.warning(
|
||||
"%s: Sources out of sync. Restart Home Assistant", self.entity_id,
|
||||
)
|
||||
|
@ -3,13 +3,13 @@ from datetime import timedelta
|
||||
from itertools import groupby
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
import sqlalchemy
|
||||
from sqlalchemy.orm import aliased
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import sun
|
||||
from homeassistant.components.history import sqlalchemy_filter_from_include_exclude_conf
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.components.recorder.models import (
|
||||
Events,
|
||||
@ -17,20 +17,13 @@ from homeassistant.components.recorder.models import (
|
||||
process_timestamp,
|
||||
process_timestamp_to_utc_isoformat,
|
||||
)
|
||||
from homeassistant.components.recorder.util import (
|
||||
QUERY_RETRY_WAIT,
|
||||
RETRIES,
|
||||
session_scope,
|
||||
)
|
||||
from homeassistant.components.recorder.util import session_scope
|
||||
from homeassistant.const import (
|
||||
ATTR_DEVICE_CLASS,
|
||||
ATTR_DOMAIN,
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_FRIENDLY_NAME,
|
||||
ATTR_NAME,
|
||||
ATTR_UNIT_OF_MEASUREMENT,
|
||||
CONF_EXCLUDE,
|
||||
CONF_INCLUDE,
|
||||
EVENT_HOMEASSISTANT_START,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
EVENT_LOGBOOK_ENTRY,
|
||||
@ -66,6 +59,8 @@ DOMAIN = "logbook"
|
||||
GROUP_BY_MINUTES = 15
|
||||
|
||||
EMPTY_JSON_OBJECT = "{}"
|
||||
UNIT_OF_MEASUREMENT_JSON = '"unit_of_measurement":'
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{DOMAIN: INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA}, extra=vol.ALLOW_EXTRA
|
||||
)
|
||||
@ -127,12 +122,21 @@ async def async_setup(hass, config):
|
||||
message = message.async_render()
|
||||
async_log_entry(hass, name, message, domain, entity_id)
|
||||
|
||||
hass.http.register_view(LogbookView(config.get(DOMAIN, {})))
|
||||
|
||||
hass.components.frontend.async_register_built_in_panel(
|
||||
"logbook", "logbook", "hass:format-list-bulleted-type"
|
||||
)
|
||||
|
||||
conf = config.get(DOMAIN, {})
|
||||
|
||||
if conf:
|
||||
filters = sqlalchemy_filter_from_include_exclude_conf(conf)
|
||||
entities_filter = convert_include_exclude_filter(conf)
|
||||
else:
|
||||
filters = None
|
||||
entities_filter = None
|
||||
|
||||
hass.http.register_view(LogbookView(conf, filters, entities_filter))
|
||||
|
||||
hass.services.async_register(DOMAIN, "log", log_message, schema=LOG_MESSAGE_SCHEMA)
|
||||
|
||||
await async_process_integration_platforms(hass, DOMAIN, _process_logbook_platform)
|
||||
@ -158,9 +162,11 @@ class LogbookView(HomeAssistantView):
|
||||
name = "api:logbook"
|
||||
extra_urls = ["/api/logbook/{datetime}"]
|
||||
|
||||
def __init__(self, config):
|
||||
def __init__(self, config, filters, entities_filter):
|
||||
"""Initialize the logbook view."""
|
||||
self.config = config
|
||||
self.filters = filters
|
||||
self.entities_filter = entities_filter
|
||||
|
||||
async def get(self, request, datetime=None):
|
||||
"""Retrieve logbook entries."""
|
||||
@ -195,7 +201,15 @@ class LogbookView(HomeAssistantView):
|
||||
def json_events():
|
||||
"""Fetch events and generate JSON."""
|
||||
return self.json(
|
||||
_get_events(hass, self.config, start_day, end_day, entity_id)
|
||||
_get_events(
|
||||
hass,
|
||||
self.config,
|
||||
start_day,
|
||||
end_day,
|
||||
entity_id,
|
||||
self.filters,
|
||||
self.entities_filter,
|
||||
)
|
||||
)
|
||||
|
||||
return await hass.async_add_job(json_events)
|
||||
@ -331,38 +345,9 @@ def humanify(hass, events, entity_attr_cache, prev_states=None):
|
||||
}
|
||||
|
||||
|
||||
def _get_related_entity_ids(session, entity_filter):
|
||||
timer_start = time.perf_counter()
|
||||
|
||||
query = session.query(States).with_entities(States.entity_id).distinct()
|
||||
|
||||
for tryno in range(RETRIES):
|
||||
try:
|
||||
result = [row.entity_id for row in query if entity_filter(row.entity_id)]
|
||||
|
||||
if _LOGGER.isEnabledFor(logging.DEBUG):
|
||||
elapsed = time.perf_counter() - timer_start
|
||||
_LOGGER.debug(
|
||||
"fetching %d distinct domain/entity_id pairs took %fs",
|
||||
len(result),
|
||||
elapsed,
|
||||
)
|
||||
|
||||
return result
|
||||
except SQLAlchemyError as err:
|
||||
_LOGGER.error("Error executing query: %s", err)
|
||||
|
||||
if tryno == RETRIES - 1:
|
||||
raise
|
||||
time.sleep(QUERY_RETRY_WAIT)
|
||||
|
||||
|
||||
def _all_entities_filter(_):
|
||||
"""Filter that accepts all entities."""
|
||||
return True
|
||||
|
||||
|
||||
def _get_events(hass, config, start_day, end_day, entity_id=None):
|
||||
def _get_events(
|
||||
hass, config, start_day, end_day, entity_id=None, filters=None, entities_filter=None
|
||||
):
|
||||
"""Get events for a period of time."""
|
||||
entity_attr_cache = EntityAttributeCache(hass)
|
||||
|
||||
@ -370,19 +355,17 @@ def _get_events(hass, config, start_day, end_day, entity_id=None):
|
||||
"""Yield Events that are not filtered away."""
|
||||
for row in query.yield_per(1000):
|
||||
event = LazyEventPartialState(row)
|
||||
if _keep_event(hass, event, entities_filter, entity_attr_cache):
|
||||
if _keep_event(hass, event, entities_filter):
|
||||
yield event
|
||||
|
||||
with session_scope(hass=hass) as session:
|
||||
if entity_id is not None:
|
||||
entity_ids = [entity_id.lower()]
|
||||
entities_filter = generate_filter([], entity_ids, [], [])
|
||||
elif config.get(CONF_EXCLUDE) or config.get(CONF_INCLUDE):
|
||||
entities_filter = convert_include_exclude_filter(config)
|
||||
entity_ids = _get_related_entity_ids(session, entities_filter)
|
||||
apply_sql_entities_filter = False
|
||||
else:
|
||||
entities_filter = _all_entities_filter
|
||||
entity_ids = None
|
||||
apply_sql_entities_filter = True
|
||||
|
||||
old_state = aliased(States, name="old_state")
|
||||
|
||||
@ -392,12 +375,10 @@ def _get_events(hass, config, start_day, end_day, entity_id=None):
|
||||
Events.event_data,
|
||||
Events.time_fired,
|
||||
Events.context_user_id,
|
||||
States.state_id,
|
||||
States.state,
|
||||
States.entity_id,
|
||||
States.domain,
|
||||
States.attributes,
|
||||
old_state.state_id.label("old_state_id"),
|
||||
)
|
||||
.order_by(Events.time_fired)
|
||||
.outerjoin(States, (Events.event_id == States.event_id))
|
||||
@ -417,9 +398,19 @@ def _get_events(hass, config, start_day, end_day, entity_id=None):
|
||||
| (
|
||||
(States.state_id.isnot(None))
|
||||
& (old_state.state_id.isnot(None))
|
||||
& (States.state.isnot(None))
|
||||
& (States.state != old_state.state)
|
||||
)
|
||||
)
|
||||
#
|
||||
# Prefilter out continuous domains that have
|
||||
# ATTR_UNIT_OF_MEASUREMENT as its much faster in sql.
|
||||
#
|
||||
.filter(
|
||||
(Events.event_type != EVENT_STATE_CHANGED)
|
||||
| sqlalchemy.not_(States.domain.in_(CONTINUOUS_DOMAINS))
|
||||
| sqlalchemy.not_(States.attributes.contains(UNIT_OF_MEASUREMENT_JSON))
|
||||
)
|
||||
.filter(
|
||||
Events.event_type.in_(ALL_EVENT_TYPES + list(hass.data.get(DOMAIN, {})))
|
||||
)
|
||||
@ -440,27 +431,25 @@ def _get_events(hass, config, start_day, end_day, entity_id=None):
|
||||
| (States.state_id.is_(None))
|
||||
)
|
||||
|
||||
if apply_sql_entities_filter and filters:
|
||||
entity_filter = filters.entity_filter()
|
||||
if entity_filter is not None:
|
||||
query = query.filter(
|
||||
entity_filter | (Events.event_type != EVENT_STATE_CHANGED)
|
||||
)
|
||||
|
||||
# When all data is schema v8 or later, prev_states can be removed
|
||||
prev_states = {}
|
||||
return list(humanify(hass, yield_events(query), entity_attr_cache, prev_states))
|
||||
|
||||
|
||||
def _keep_event(hass, event, entities_filter, entity_attr_cache):
|
||||
def _keep_event(hass, event, entities_filter):
|
||||
if event.event_type == EVENT_STATE_CHANGED:
|
||||
entity_id = event.entity_id
|
||||
if entity_id is None:
|
||||
return False
|
||||
|
||||
# Do not report on new entities
|
||||
# Do not report on entity removal
|
||||
if not event.has_old_and_new_state:
|
||||
return False
|
||||
|
||||
if event.domain in CONTINUOUS_DOMAINS and entity_attr_cache.get(
|
||||
entity_id, ATTR_UNIT_OF_MEASUREMENT, event
|
||||
):
|
||||
# Don't show continuous sensor value changes in the logbook
|
||||
return False
|
||||
elif event.event_type in HOMEASSISTANT_EVENTS:
|
||||
entity_id = f"{HA_DOMAIN}."
|
||||
elif event.event_type in hass.data[DOMAIN] and ATTR_ENTITY_ID not in event.data:
|
||||
@ -479,7 +468,7 @@ def _keep_event(hass, event, entities_filter, entity_attr_cache):
|
||||
return False
|
||||
entity_id = f"{domain}."
|
||||
|
||||
return entities_filter(entity_id)
|
||||
return entities_filter is None or entities_filter(entity_id)
|
||||
|
||||
|
||||
def _entry_message_from_event(hass, entity_id, domain, event, entity_attr_cache):
|
||||
@ -657,9 +646,12 @@ class LazyEventPartialState:
|
||||
# Delete this check once all states are saved in the v8 schema
|
||||
# format or later (they have the old_state_id column).
|
||||
|
||||
# New events in v8 schema format
|
||||
# New events in v8+ schema format
|
||||
if self._row.event_data == EMPTY_JSON_OBJECT:
|
||||
return self._row.state_id is not None and self._row.old_state_id is not None
|
||||
# Events are already pre-filtered in sql
|
||||
# to exclude missing old and new state
|
||||
# if they are in v8+ format
|
||||
return True
|
||||
|
||||
# Old events not in v8 schema format
|
||||
return (
|
||||
|
@ -104,9 +104,9 @@ async def async_start(
|
||||
base = payload.pop(TOPIC_BASE)
|
||||
for key, value in payload.items():
|
||||
if isinstance(value, str) and value:
|
||||
if value[0] == TOPIC_BASE and key.endswith("_topic"):
|
||||
if value[0] == TOPIC_BASE and key.endswith("topic"):
|
||||
payload[key] = f"{base}{value[1:]}"
|
||||
if value[-1] == TOPIC_BASE and key.endswith("_topic"):
|
||||
if value[-1] == TOPIC_BASE and key.endswith("topic"):
|
||||
payload[key] = f"{value[:-1]}{base}"
|
||||
|
||||
# If present, the node_id will be included in the discovered object id
|
||||
|
@ -326,6 +326,8 @@ class PlexServer:
|
||||
_LOGGER.debug("plex.tv resource connection successful: %s", client)
|
||||
except NotFound:
|
||||
_LOGGER.error("plex.tv resource connection failed: %s", resource.name)
|
||||
else:
|
||||
client.proxyThroughServer(value=False, server=self._plex_server)
|
||||
|
||||
self._plextv_device_cache[client_id] = client
|
||||
return client
|
||||
|
@ -386,11 +386,14 @@ class Recorder(threading.Thread):
|
||||
if dbevent and event.event_type == EVENT_STATE_CHANGED:
|
||||
try:
|
||||
dbstate = States.from_event(event)
|
||||
has_new_state = event.data.get("new_state")
|
||||
dbstate.old_state_id = self._old_state_ids.get(dbstate.entity_id)
|
||||
if not has_new_state:
|
||||
dbstate.state = None
|
||||
dbstate.event_id = dbevent.event_id
|
||||
self.event_session.add(dbstate)
|
||||
self.event_session.flush()
|
||||
if "new_state" in event.data:
|
||||
if has_new_state:
|
||||
self._old_state_ids[dbstate.entity_id] = dbstate.state_id
|
||||
elif dbstate.entity_id in self._old_state_ids:
|
||||
del self._old_state_ids[dbstate.entity_id]
|
||||
|
@ -70,9 +70,10 @@ async def async_setup_entry(hass, config_entry):
|
||||
coordinator = SpeedTestDataCoordinator(hass, config_entry)
|
||||
await coordinator.async_setup()
|
||||
|
||||
await coordinator.async_refresh()
|
||||
if not coordinator.last_update_success:
|
||||
raise ConfigEntryNotReady
|
||||
if not config_entry.options[CONF_MANUAL]:
|
||||
await coordinator.async_refresh()
|
||||
if not coordinator.last_update_success:
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
hass.data[DOMAIN] = coordinator
|
||||
|
||||
@ -115,9 +116,12 @@ class SpeedTestDataCoordinator(DataUpdateCoordinator):
|
||||
),
|
||||
)
|
||||
|
||||
def update_data(self):
|
||||
"""Get the latest data from speedtest.net."""
|
||||
server_list = self.api.get_servers()
|
||||
def update_servers(self):
|
||||
"""Update list of test servers."""
|
||||
try:
|
||||
server_list = self.api.get_servers()
|
||||
except speedtest.ConfigRetrievalError:
|
||||
return
|
||||
|
||||
self.servers[DEFAULT_SERVER] = {}
|
||||
for server in sorted(
|
||||
@ -125,14 +129,20 @@ class SpeedTestDataCoordinator(DataUpdateCoordinator):
|
||||
):
|
||||
self.servers[f"{server[0]['country']} - {server[0]['sponsor']}"] = server[0]
|
||||
|
||||
def update_data(self):
|
||||
"""Get the latest data from speedtest.net."""
|
||||
self.update_servers()
|
||||
|
||||
self.api.closest.clear()
|
||||
if self.config_entry.options.get(CONF_SERVER_ID):
|
||||
server_id = self.config_entry.options.get(CONF_SERVER_ID)
|
||||
self.api.closest.clear()
|
||||
self.api.get_servers(servers=[server_id])
|
||||
|
||||
self.api.get_best_server()
|
||||
_LOGGER.debug(
|
||||
"Executing speedtest.net speed test with server_id: %s", self.api.best["id"]
|
||||
)
|
||||
self.api.get_best_server()
|
||||
|
||||
self.api.download()
|
||||
self.api.upload()
|
||||
return self.api.results.dict()
|
||||
@ -170,6 +180,8 @@ class SpeedTestDataCoordinator(DataUpdateCoordinator):
|
||||
|
||||
await self.async_set_options()
|
||||
|
||||
await self.hass.async_add_executor_job(self.update_servers)
|
||||
|
||||
self.hass.services.async_register(DOMAIN, SPEED_TEST_SERVICE, request_update)
|
||||
|
||||
self.config_entry.add_update_listener(options_updated_listener)
|
||||
|
@ -85,7 +85,7 @@ class SpeedTestOptionsFlowHandler(config_entries.OptionsFlow):
|
||||
|
||||
self._servers = self.hass.data[DOMAIN].servers
|
||||
|
||||
server_name = DEFAULT_SERVER
|
||||
server = []
|
||||
if self.config_entry.options.get(
|
||||
CONF_SERVER_ID
|
||||
) and not self.config_entry.options.get(CONF_SERVER_NAME):
|
||||
@ -94,7 +94,7 @@ class SpeedTestOptionsFlowHandler(config_entries.OptionsFlow):
|
||||
for (key, value) in self._servers.items()
|
||||
if value.get("id") == self.config_entry.options[CONF_SERVER_ID]
|
||||
]
|
||||
server_name = server[0] if server else ""
|
||||
server_name = server[0] if server else DEFAULT_SERVER
|
||||
|
||||
options = {
|
||||
vol.Optional(
|
||||
|
@ -2,7 +2,8 @@
|
||||
import logging
|
||||
|
||||
from homeassistant.const import ATTR_ATTRIBUTION
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
|
||||
from .const import (
|
||||
ATTR_BYTES_RECEIVED,
|
||||
@ -11,6 +12,7 @@ from .const import (
|
||||
ATTR_SERVER_ID,
|
||||
ATTR_SERVER_NAME,
|
||||
ATTRIBUTION,
|
||||
CONF_MANUAL,
|
||||
DEFAULT_NAME,
|
||||
DOMAIN,
|
||||
ICON,
|
||||
@ -32,7 +34,7 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class SpeedtestSensor(Entity):
|
||||
class SpeedtestSensor(RestoreEntity):
|
||||
"""Implementation of a speedtest.net sensor."""
|
||||
|
||||
def __init__(self, coordinator, sensor_type):
|
||||
@ -41,6 +43,7 @@ class SpeedtestSensor(Entity):
|
||||
self.coordinator = coordinator
|
||||
self.type = sensor_type
|
||||
self._unit_of_measurement = SENSOR_TYPES[self.type][1]
|
||||
self._state = None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@ -55,14 +58,7 @@ class SpeedtestSensor(Entity):
|
||||
@property
|
||||
def state(self):
|
||||
"""Return the state of the device."""
|
||||
state = None
|
||||
if self.type == "ping":
|
||||
state = self.coordinator.data["ping"]
|
||||
elif self.type == "download":
|
||||
state = round(self.coordinator.data["download"] / 10 ** 6, 2)
|
||||
elif self.type == "upload":
|
||||
state = round(self.coordinator.data["upload"] / 10 ** 6, 2)
|
||||
return state
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def unit_of_measurement(self):
|
||||
@ -82,6 +78,8 @@ class SpeedtestSensor(Entity):
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return the state attributes."""
|
||||
if not self.coordinator.data:
|
||||
return None
|
||||
attributes = {
|
||||
ATTR_ATTRIBUTION: ATTRIBUTION,
|
||||
ATTR_SERVER_NAME: self.coordinator.data["server"]["name"],
|
||||
@ -98,10 +96,30 @@ class SpeedtestSensor(Entity):
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Handle entity which will be added."""
|
||||
await super().async_added_to_hass()
|
||||
if self.coordinator.config_entry.options[CONF_MANUAL]:
|
||||
state = await self.async_get_last_state()
|
||||
if state:
|
||||
self._state = state.state
|
||||
|
||||
self.async_on_remove(
|
||||
self.coordinator.async_add_listener(self.async_write_ha_state)
|
||||
)
|
||||
@callback
|
||||
def update():
|
||||
"""Update state."""
|
||||
self._update_state()
|
||||
self.async_write_ha_state()
|
||||
|
||||
self.async_on_remove(self.coordinator.async_add_listener(update))
|
||||
self._update_state()
|
||||
|
||||
def _update_state(self):
|
||||
"""Update sensors state."""
|
||||
if self.coordinator.data:
|
||||
if self.type == "ping":
|
||||
self._state = self.coordinator.data["ping"]
|
||||
elif self.type == "download":
|
||||
self._state = round(self.coordinator.data["download"] / 10 ** 6, 2)
|
||||
elif self.type == "upload":
|
||||
self._state = round(self.coordinator.data["upload"] / 10 ** 6, 2)
|
||||
|
||||
async def async_update(self):
|
||||
"""Request coordinator to update data."""
|
||||
|
@ -5,7 +5,7 @@ import requests
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
DEVICE_CLASS_POWER,
|
||||
BinarySensorDevice,
|
||||
BinarySensorEntity,
|
||||
)
|
||||
from homeassistant.const import CONF_DEVICE_CLASS, CONF_NAME
|
||||
|
||||
@ -77,7 +77,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
)
|
||||
|
||||
|
||||
class ViCareBinarySensor(BinarySensorDevice):
|
||||
class ViCareBinarySensor(BinarySensorEntity):
|
||||
"""Representation of a ViCare sensor."""
|
||||
|
||||
def __init__(self, name, api, sensor_type):
|
||||
|
@ -450,7 +450,7 @@ WITHINGS_ATTRIBUTES = [
|
||||
NotifyAppli.BED_IN,
|
||||
"In bed",
|
||||
"",
|
||||
"mdi:bed",
|
||||
"mdi:hotel",
|
||||
BINARY_SENSOR_DOMAIN,
|
||||
True,
|
||||
UpdateType.WEBHOOK,
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""Constants used by Home Assistant components."""
|
||||
MAJOR_VERSION = 0
|
||||
MINOR_VERSION = 112
|
||||
PATCH_VERSION = "2"
|
||||
PATCH_VERSION = "3"
|
||||
__short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__ = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER = (3, 7, 0)
|
||||
|
@ -13,7 +13,7 @@ defusedxml==0.6.0
|
||||
distro==1.5.0
|
||||
emoji==0.5.4
|
||||
hass-nabucasa==0.34.7
|
||||
home-assistant-frontend==20200702.0
|
||||
home-assistant-frontend==20200702.1
|
||||
importlib-metadata==1.6.0;python_version<'3.8'
|
||||
jinja2>=2.11.1
|
||||
netdisco==2.7.1
|
||||
|
@ -186,7 +186,7 @@ async def _logbook_filtering(hass, last_changed, last_updated):
|
||||
def yield_events(event):
|
||||
for _ in range(10 ** 5):
|
||||
# pylint: disable=protected-access
|
||||
if logbook._keep_event(hass, event, entities_filter, entity_attr_cache):
|
||||
if logbook._keep_event(hass, event, entities_filter):
|
||||
yield event
|
||||
|
||||
start = timer()
|
||||
|
@ -374,7 +374,7 @@ bomradarloop==0.1.4
|
||||
boto3==1.9.252
|
||||
|
||||
# homeassistant.components.braviatv
|
||||
bravia-tv==1.0.5
|
||||
bravia-tv==1.0.6
|
||||
|
||||
# homeassistant.components.broadlink
|
||||
broadlink==0.14.0
|
||||
@ -738,7 +738,7 @@ hole==0.5.1
|
||||
holidays==0.10.2
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20200702.0
|
||||
home-assistant-frontend==20200702.1
|
||||
|
||||
# homeassistant.components.zwave
|
||||
homeassistant-pyozw==0.1.10
|
||||
|
@ -177,7 +177,7 @@ blinkpy==0.15.0
|
||||
bomradarloop==0.1.4
|
||||
|
||||
# homeassistant.components.braviatv
|
||||
bravia-tv==1.0.5
|
||||
bravia-tv==1.0.6
|
||||
|
||||
# homeassistant.components.broadlink
|
||||
broadlink==0.14.0
|
||||
@ -343,7 +343,7 @@ hole==0.5.1
|
||||
holidays==0.10.2
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20200702.0
|
||||
home-assistant-frontend==20200702.1
|
||||
|
||||
# homeassistant.components.zwave
|
||||
homeassistant-pyozw==0.1.10
|
||||
|
@ -23,6 +23,7 @@ TEST_MAC = "ab:cd:ef:gh"
|
||||
TEST_HOST2 = "5.6.7.8"
|
||||
TEST_NAME = "Test_Receiver"
|
||||
TEST_MODEL = "model5"
|
||||
TEST_IGNORED_MODEL = "HEOS 7"
|
||||
TEST_RECEIVER_TYPE = "avr-x"
|
||||
TEST_SERIALNUMBER = "123456789"
|
||||
TEST_MANUFACTURER = "Denon"
|
||||
@ -470,6 +471,27 @@ async def test_config_flow_ssdp_missing_info(hass):
|
||||
assert result["reason"] == "not_denonavr_missing"
|
||||
|
||||
|
||||
async def test_config_flow_ssdp_ignored_model(hass):
|
||||
"""
|
||||
Failed flow initialized by ssdp discovery.
|
||||
|
||||
Model in the ignored models list.
|
||||
"""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_SSDP},
|
||||
data={
|
||||
ssdp.ATTR_UPNP_MANUFACTURER: TEST_MANUFACTURER,
|
||||
ssdp.ATTR_UPNP_MODEL_NAME: TEST_IGNORED_MODEL,
|
||||
ssdp.ATTR_UPNP_SERIAL: TEST_SERIALNUMBER,
|
||||
ssdp.ATTR_SSDP_LOCATION: TEST_SSDP_LOCATION,
|
||||
},
|
||||
)
|
||||
|
||||
assert result["type"] == "abort"
|
||||
assert result["reason"] == "not_denonavr_manufacturer"
|
||||
|
||||
|
||||
async def test_options_flow(hass):
|
||||
"""Test specifying non default settings using options flow."""
|
||||
config_entry = MockConfigEntry(
|
||||
|
@ -20,6 +20,8 @@ from homeassistant.const import (
|
||||
ATTR_NAME,
|
||||
CONF_DOMAINS,
|
||||
CONF_ENTITIES,
|
||||
CONF_EXCLUDE,
|
||||
CONF_INCLUDE,
|
||||
EVENT_HOMEASSISTANT_START,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
EVENT_STATE_CHANGED,
|
||||
@ -163,99 +165,6 @@ class TestComponentLogbook(unittest.TestCase):
|
||||
entries[1], pointC, "bla", domain="sensor", entity_id=entity_id
|
||||
)
|
||||
|
||||
def test_filter_continuous_sensor_values(self):
|
||||
"""Test remove continuous sensor events from logbook."""
|
||||
entity_id = "sensor.bla"
|
||||
pointA = dt_util.utcnow()
|
||||
entity_attr_cache = logbook.EntityAttributeCache(self.hass)
|
||||
attributes = {"unit_of_measurement": "foo"}
|
||||
eventA = self.create_state_changed_event(pointA, entity_id, 10, attributes)
|
||||
|
||||
entities_filter = convert_include_exclude_filter(
|
||||
logbook.CONFIG_SCHEMA({logbook.DOMAIN: {}})[logbook.DOMAIN]
|
||||
)
|
||||
assert (
|
||||
logbook._keep_event(self.hass, eventA, entities_filter, entity_attr_cache)
|
||||
is False
|
||||
)
|
||||
|
||||
def test_exclude_new_entities(self):
|
||||
"""Test if events are excluded on first update."""
|
||||
entity_id = "sensor.bla"
|
||||
entity_id2 = "sensor.blu"
|
||||
pointA = dt_util.utcnow()
|
||||
pointB = pointA + timedelta(minutes=logbook.GROUP_BY_MINUTES)
|
||||
entity_attr_cache = logbook.EntityAttributeCache(self.hass)
|
||||
|
||||
state_on = ha.State(
|
||||
entity_id, "on", {"brightness": 200}, pointA, pointA
|
||||
).as_dict()
|
||||
|
||||
eventA = self.create_state_changed_event_from_old_new(
|
||||
entity_id, pointA, None, state_on
|
||||
)
|
||||
eventB = self.create_state_changed_event(pointB, entity_id2, 20)
|
||||
|
||||
entities_filter = convert_include_exclude_filter(
|
||||
logbook.CONFIG_SCHEMA({logbook.DOMAIN: {}})[logbook.DOMAIN]
|
||||
)
|
||||
events = [
|
||||
e
|
||||
for e in (
|
||||
MockLazyEventPartialState(EVENT_HOMEASSISTANT_STOP),
|
||||
eventA,
|
||||
eventB,
|
||||
)
|
||||
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
|
||||
]
|
||||
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
|
||||
|
||||
assert len(entries) == 2
|
||||
self.assert_entry(
|
||||
entries[0], name="Home Assistant", message="stopped", domain=ha.DOMAIN
|
||||
)
|
||||
self.assert_entry(
|
||||
entries[1], pointB, "blu", domain="sensor", entity_id=entity_id2
|
||||
)
|
||||
|
||||
def test_exclude_removed_entities(self):
|
||||
"""Test if events are excluded on last update."""
|
||||
entity_id = "sensor.bla"
|
||||
entity_id2 = "sensor.blu"
|
||||
pointA = dt_util.utcnow()
|
||||
pointB = pointA + timedelta(minutes=logbook.GROUP_BY_MINUTES)
|
||||
entity_attr_cache = logbook.EntityAttributeCache(self.hass)
|
||||
|
||||
state_on = ha.State(
|
||||
entity_id, "on", {"brightness": 200}, pointA, pointA
|
||||
).as_dict()
|
||||
eventA = self.create_state_changed_event_from_old_new(
|
||||
None, pointA, state_on, None,
|
||||
)
|
||||
eventB = self.create_state_changed_event(pointB, entity_id2, 20)
|
||||
|
||||
entities_filter = convert_include_exclude_filter(
|
||||
logbook.CONFIG_SCHEMA({logbook.DOMAIN: {}})[logbook.DOMAIN]
|
||||
)
|
||||
events = [
|
||||
e
|
||||
for e in (
|
||||
MockLazyEventPartialState(EVENT_HOMEASSISTANT_STOP),
|
||||
eventA,
|
||||
eventB,
|
||||
)
|
||||
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
|
||||
]
|
||||
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
|
||||
|
||||
assert len(entries) == 2
|
||||
self.assert_entry(
|
||||
entries[0], name="Home Assistant", message="stopped", domain=ha.DOMAIN
|
||||
)
|
||||
self.assert_entry(
|
||||
entries[1], pointB, "blu", domain="sensor", entity_id=entity_id2
|
||||
)
|
||||
|
||||
def test_exclude_events_entity(self):
|
||||
"""Test if events are filtered if entity is excluded in config."""
|
||||
entity_id = "sensor.bla"
|
||||
@ -270,7 +179,7 @@ class TestComponentLogbook(unittest.TestCase):
|
||||
config = logbook.CONFIG_SCHEMA(
|
||||
{
|
||||
ha.DOMAIN: {},
|
||||
logbook.DOMAIN: {logbook.CONF_EXCLUDE: {CONF_ENTITIES: [entity_id]}},
|
||||
logbook.DOMAIN: {CONF_EXCLUDE: {CONF_ENTITIES: [entity_id]}},
|
||||
}
|
||||
)
|
||||
entities_filter = convert_include_exclude_filter(config[logbook.DOMAIN])
|
||||
@ -281,7 +190,7 @@ class TestComponentLogbook(unittest.TestCase):
|
||||
eventA,
|
||||
eventB,
|
||||
)
|
||||
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
|
||||
if logbook._keep_event(self.hass, e, entities_filter)
|
||||
]
|
||||
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
|
||||
|
||||
@ -307,9 +216,7 @@ class TestComponentLogbook(unittest.TestCase):
|
||||
config = logbook.CONFIG_SCHEMA(
|
||||
{
|
||||
ha.DOMAIN: {},
|
||||
logbook.DOMAIN: {
|
||||
logbook.CONF_EXCLUDE: {CONF_DOMAINS: ["switch", "alexa"]}
|
||||
},
|
||||
logbook.DOMAIN: {CONF_EXCLUDE: {CONF_DOMAINS: ["switch", "alexa"]}},
|
||||
}
|
||||
)
|
||||
entities_filter = convert_include_exclude_filter(config[logbook.DOMAIN])
|
||||
@ -321,7 +228,7 @@ class TestComponentLogbook(unittest.TestCase):
|
||||
eventA,
|
||||
eventB,
|
||||
)
|
||||
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
|
||||
if logbook._keep_event(self.hass, e, entities_filter)
|
||||
]
|
||||
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
|
||||
|
||||
@ -351,7 +258,7 @@ class TestComponentLogbook(unittest.TestCase):
|
||||
{
|
||||
ha.DOMAIN: {},
|
||||
logbook.DOMAIN: {
|
||||
logbook.CONF_EXCLUDE: {
|
||||
CONF_EXCLUDE: {
|
||||
CONF_DOMAINS: ["switch", "alexa"],
|
||||
CONF_ENTITY_GLOBS: "*.excluded",
|
||||
}
|
||||
@ -368,7 +275,7 @@ class TestComponentLogbook(unittest.TestCase):
|
||||
eventB,
|
||||
eventC,
|
||||
)
|
||||
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
|
||||
if logbook._keep_event(self.hass, e, entities_filter)
|
||||
]
|
||||
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
|
||||
|
||||
@ -395,7 +302,7 @@ class TestComponentLogbook(unittest.TestCase):
|
||||
{
|
||||
ha.DOMAIN: {},
|
||||
logbook.DOMAIN: {
|
||||
logbook.CONF_INCLUDE: {
|
||||
CONF_INCLUDE: {
|
||||
CONF_DOMAINS: ["homeassistant"],
|
||||
CONF_ENTITIES: [entity_id2],
|
||||
}
|
||||
@ -410,7 +317,7 @@ class TestComponentLogbook(unittest.TestCase):
|
||||
eventA,
|
||||
eventB,
|
||||
)
|
||||
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
|
||||
if logbook._keep_event(self.hass, e, entities_filter)
|
||||
]
|
||||
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
|
||||
|
||||
@ -443,9 +350,7 @@ class TestComponentLogbook(unittest.TestCase):
|
||||
{
|
||||
ha.DOMAIN: {},
|
||||
logbook.DOMAIN: {
|
||||
logbook.CONF_INCLUDE: {
|
||||
CONF_DOMAINS: ["homeassistant", "sensor", "alexa"]
|
||||
}
|
||||
CONF_INCLUDE: {CONF_DOMAINS: ["homeassistant", "sensor", "alexa"]}
|
||||
},
|
||||
}
|
||||
)
|
||||
@ -458,7 +363,7 @@ class TestComponentLogbook(unittest.TestCase):
|
||||
eventA,
|
||||
eventB,
|
||||
)
|
||||
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
|
||||
if logbook._keep_event(self.hass, e, entities_filter)
|
||||
]
|
||||
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
|
||||
|
||||
@ -495,7 +400,7 @@ class TestComponentLogbook(unittest.TestCase):
|
||||
{
|
||||
ha.DOMAIN: {},
|
||||
logbook.DOMAIN: {
|
||||
logbook.CONF_INCLUDE: {
|
||||
CONF_INCLUDE: {
|
||||
CONF_DOMAINS: ["homeassistant", "sensor", "alexa"],
|
||||
CONF_ENTITY_GLOBS: ["*.included"],
|
||||
}
|
||||
@ -512,7 +417,7 @@ class TestComponentLogbook(unittest.TestCase):
|
||||
eventB,
|
||||
eventC,
|
||||
)
|
||||
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
|
||||
if logbook._keep_event(self.hass, e, entities_filter)
|
||||
]
|
||||
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
|
||||
|
||||
@ -547,11 +452,11 @@ class TestComponentLogbook(unittest.TestCase):
|
||||
{
|
||||
ha.DOMAIN: {},
|
||||
logbook.DOMAIN: {
|
||||
logbook.CONF_INCLUDE: {
|
||||
CONF_INCLUDE: {
|
||||
CONF_DOMAINS: ["sensor", "homeassistant"],
|
||||
CONF_ENTITIES: ["switch.bla"],
|
||||
},
|
||||
logbook.CONF_EXCLUDE: {
|
||||
CONF_EXCLUDE: {
|
||||
CONF_DOMAINS: ["switch"],
|
||||
CONF_ENTITIES: ["sensor.bli"],
|
||||
},
|
||||
@ -569,7 +474,7 @@ class TestComponentLogbook(unittest.TestCase):
|
||||
eventB1,
|
||||
eventB2,
|
||||
)
|
||||
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
|
||||
if logbook._keep_event(self.hass, e, entities_filter)
|
||||
]
|
||||
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
|
||||
|
||||
@ -616,12 +521,12 @@ class TestComponentLogbook(unittest.TestCase):
|
||||
{
|
||||
ha.DOMAIN: {},
|
||||
logbook.DOMAIN: {
|
||||
logbook.CONF_INCLUDE: {
|
||||
CONF_INCLUDE: {
|
||||
CONF_DOMAINS: ["sensor", "homeassistant"],
|
||||
CONF_ENTITIES: ["switch.bla"],
|
||||
CONF_ENTITY_GLOBS: ["*.included"],
|
||||
},
|
||||
logbook.CONF_EXCLUDE: {
|
||||
CONF_EXCLUDE: {
|
||||
CONF_DOMAINS: ["switch"],
|
||||
CONF_ENTITY_GLOBS: ["*.excluded"],
|
||||
CONF_ENTITIES: ["sensor.bli"],
|
||||
@ -643,7 +548,7 @@ class TestComponentLogbook(unittest.TestCase):
|
||||
eventC2,
|
||||
eventC3,
|
||||
)
|
||||
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
|
||||
if logbook._keep_event(self.hass, e, entities_filter)
|
||||
]
|
||||
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
|
||||
|
||||
@ -695,7 +600,7 @@ class TestComponentLogbook(unittest.TestCase):
|
||||
events = [
|
||||
e
|
||||
for e in (eventA, eventB)
|
||||
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
|
||||
if logbook._keep_event(self.hass, e, entities_filter)
|
||||
]
|
||||
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
|
||||
|
||||
@ -1647,10 +1552,7 @@ async def test_exclude_described_event(hass, hass_client):
|
||||
logbook.DOMAIN,
|
||||
{
|
||||
logbook.DOMAIN: {
|
||||
logbook.CONF_EXCLUDE: {
|
||||
CONF_DOMAINS: ["sensor"],
|
||||
CONF_ENTITIES: [entity_id],
|
||||
}
|
||||
CONF_EXCLUDE: {CONF_DOMAINS: ["sensor"], CONF_ENTITIES: [entity_id]}
|
||||
}
|
||||
},
|
||||
)
|
||||
@ -1820,6 +1722,119 @@ async def test_logbook_entity_filter_with_automations(hass, hass_client):
|
||||
assert json_dict[0]["entity_id"] == entity_id_second
|
||||
|
||||
|
||||
async def test_filter_continuous_sensor_values(hass, hass_client):
|
||||
"""Test remove continuous sensor events from logbook."""
|
||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
||||
await async_setup_component(hass, "logbook", {})
|
||||
await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
||||
|
||||
entity_id_test = "switch.test"
|
||||
hass.states.async_set(entity_id_test, STATE_OFF)
|
||||
hass.states.async_set(entity_id_test, STATE_ON)
|
||||
entity_id_second = "sensor.bla"
|
||||
hass.states.async_set(entity_id_second, STATE_OFF, {"unit_of_measurement": "foo"})
|
||||
hass.states.async_set(entity_id_second, STATE_ON, {"unit_of_measurement": "foo"})
|
||||
entity_id_third = "light.bla"
|
||||
hass.states.async_set(entity_id_third, STATE_OFF, {"unit_of_measurement": "foo"})
|
||||
hass.states.async_set(entity_id_third, STATE_ON, {"unit_of_measurement": "foo"})
|
||||
|
||||
await hass.async_add_job(partial(trigger_db_commit, hass))
|
||||
await hass.async_block_till_done()
|
||||
await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
||||
|
||||
client = await hass_client()
|
||||
|
||||
# Today time 00:00:00
|
||||
start = dt_util.utcnow().date()
|
||||
start_date = datetime(start.year, start.month, start.day)
|
||||
|
||||
# Test today entries without filters
|
||||
response = await client.get(f"/api/logbook/{start_date.isoformat()}")
|
||||
assert response.status == 200
|
||||
response_json = await response.json()
|
||||
|
||||
assert len(response_json) == 2
|
||||
assert response_json[0]["entity_id"] == entity_id_test
|
||||
assert response_json[1]["entity_id"] == entity_id_third
|
||||
|
||||
|
||||
async def test_exclude_new_entities(hass, hass_client):
|
||||
"""Test if events are excluded on first update."""
|
||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
||||
await async_setup_component(hass, "logbook", {})
|
||||
await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
||||
|
||||
entity_id = "climate.bla"
|
||||
entity_id2 = "climate.blu"
|
||||
|
||||
hass.states.async_set(entity_id, STATE_OFF)
|
||||
hass.states.async_set(entity_id2, STATE_ON)
|
||||
hass.states.async_set(entity_id2, STATE_OFF)
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
|
||||
await hass.async_add_job(partial(trigger_db_commit, hass))
|
||||
await hass.async_block_till_done()
|
||||
await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
||||
|
||||
client = await hass_client()
|
||||
|
||||
# Today time 00:00:00
|
||||
start = dt_util.utcnow().date()
|
||||
start_date = datetime(start.year, start.month, start.day)
|
||||
|
||||
# Test today entries without filters
|
||||
response = await client.get(f"/api/logbook/{start_date.isoformat()}")
|
||||
assert response.status == 200
|
||||
response_json = await response.json()
|
||||
|
||||
assert len(response_json) == 2
|
||||
assert response_json[0]["entity_id"] == entity_id2
|
||||
assert response_json[1]["domain"] == "homeassistant"
|
||||
assert response_json[1]["message"] == "started"
|
||||
|
||||
|
||||
async def test_exclude_removed_entities(hass, hass_client):
|
||||
"""Test if events are excluded on last update."""
|
||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
||||
await async_setup_component(hass, "logbook", {})
|
||||
await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
||||
|
||||
entity_id = "climate.bla"
|
||||
entity_id2 = "climate.blu"
|
||||
|
||||
hass.states.async_set(entity_id, STATE_ON)
|
||||
hass.states.async_set(entity_id, STATE_OFF)
|
||||
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
|
||||
hass.states.async_set(entity_id2, STATE_ON)
|
||||
hass.states.async_set(entity_id2, STATE_OFF)
|
||||
|
||||
hass.states.async_remove(entity_id)
|
||||
hass.states.async_remove(entity_id2)
|
||||
|
||||
await hass.async_add_job(partial(trigger_db_commit, hass))
|
||||
await hass.async_block_till_done()
|
||||
await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
||||
|
||||
client = await hass_client()
|
||||
|
||||
# Today time 00:00:00
|
||||
start = dt_util.utcnow().date()
|
||||
start_date = datetime(start.year, start.month, start.day)
|
||||
|
||||
# Test today entries without filters
|
||||
response = await client.get(f"/api/logbook/{start_date.isoformat()}")
|
||||
assert response.status == 200
|
||||
response_json = await response.json()
|
||||
|
||||
assert len(response_json) == 3
|
||||
assert response_json[0]["entity_id"] == entity_id
|
||||
assert response_json[1]["domain"] == "homeassistant"
|
||||
assert response_json[1]["message"] == "started"
|
||||
assert response_json[2]["entity_id"] == entity_id2
|
||||
|
||||
|
||||
class MockLazyEventPartialState(ha.Event):
|
||||
"""Minimal mock of a Lazy event."""
|
||||
|
||||
|
@ -229,6 +229,10 @@ class MockPlexClient:
|
||||
"""Mock the version attribute."""
|
||||
return "1.0"
|
||||
|
||||
def proxyThroughServer(self, value=True, server=None):
|
||||
"""Mock the proxyThroughServer method."""
|
||||
pass
|
||||
|
||||
def playMedia(self, item):
|
||||
"""Mock the playMedia method."""
|
||||
pass
|
||||
|
@ -16,7 +16,7 @@ from homeassistant.components.recorder import (
|
||||
from homeassistant.components.recorder.const import DATA_INSTANCE
|
||||
from homeassistant.components.recorder.models import Events, RecorderRuns, States
|
||||
from homeassistant.components.recorder.util import session_scope
|
||||
from homeassistant.const import MATCH_ALL
|
||||
from homeassistant.const import MATCH_ALL, STATE_LOCKED, STATE_UNLOCKED
|
||||
from homeassistant.core import ATTR_NOW, EVENT_TIME_CHANGED, Context, callback
|
||||
from homeassistant.setup import async_setup_component
|
||||
from homeassistant.util import dt as dt_util
|
||||
@ -261,6 +261,27 @@ def test_saving_state_include_domain_glob_exclude_entity(hass_recorder):
|
||||
assert _state_empty_context(hass, "test.ok").state == "state2"
|
||||
|
||||
|
||||
def test_saving_state_and_removing_entity(hass, hass_recorder):
|
||||
"""Test saving the state of a removed entity."""
|
||||
hass = hass_recorder()
|
||||
entity_id = "lock.mine"
|
||||
hass.states.set(entity_id, STATE_LOCKED)
|
||||
hass.states.set(entity_id, STATE_UNLOCKED)
|
||||
hass.states.async_remove(entity_id)
|
||||
|
||||
wait_recording_done(hass)
|
||||
|
||||
with session_scope(hass=hass) as session:
|
||||
states = list(session.query(States))
|
||||
assert len(states) == 3
|
||||
assert states[0].entity_id == entity_id
|
||||
assert states[0].state == STATE_LOCKED
|
||||
assert states[1].entity_id == entity_id
|
||||
assert states[1].state == STATE_UNLOCKED
|
||||
assert states[2].entity_id == entity_id
|
||||
assert states[2].state is None
|
||||
|
||||
|
||||
def test_recorder_setup_failure():
|
||||
"""Test some exceptions."""
|
||||
hass = get_test_home_assistant()
|
||||
|
Loading…
x
Reference in New Issue
Block a user