Compare commits

..

24 Commits

Author SHA1 Message Date
Paulus Schoutsen
a773534809 Merge pull request #37586 from home-assistant/rc 2020-07-06 16:42:07 -07:00
Paulus Schoutsen
e5a2b0d2d8 Bumped version to 0.112.3 2020-07-06 22:36:05 +00:00
Paulus Schoutsen
7ba9bcebed Fix CODEOWNERS 2020-07-06 22:35:38 +00:00
starkillerOG
3bd5d83c9c Ignore HEOS 1, 3, 5 and 7 for DenonAvr ssdp discovery (#37579) 2020-07-06 22:21:13 +00:00
jjlawren
0f3b7b73af Fix Plex client controls when connected via plex.tv resource (#37572) 2020-07-06 22:21:13 +00:00
J. Nick Koston
38d2410156 Ensure homekit tv names can be saved (#37571) 2020-07-06 22:21:12 +00:00
J. Nick Koston
82693d9dca Suppress spurious homekit warning about media player sources when the device is off (#37567) 2020-07-06 22:21:11 +00:00
Bram Kragten
a88ac1e1fc Update frontend to 20200702.1 (#37566) 2020-07-06 22:21:11 +00:00
Sean Mooney
37bef01f67 Fix default icon for Withings sleep sensor (#37502)
The icon for Withings sleep sensor was using `mdi:bed` (which is correct for MDI v5.0.45 and later). However Home Assistant still uses an older version of Material Design Icons (4.9.95), so this `mdi:bed` icon was not displaying at all.  It should be`mdi:hotel` instead, which you can see here: https://cdn.materialdesignicons.com/4.9.95/
2020-07-06 22:21:10 +00:00
David Nielsen
7c3e64673f Fix braviatv authentication refresh (#37482)
- Bumps bravia-tv lib to 1.0.6 which fixes is_connected() to actually
      return True only when API is connected, instead of just returning whether
      or not cookies are cached (regardless if they actually worked).
    - Wrap is_connected() because it now performs io.
    - Remove unnecessary logic to refresh cookies. Now that
      is_connected() works, the bravia instance only needs to be
      reconnected when is_connected is False and TV is not off.
2020-07-06 22:20:57 +00:00
Martin
16b59220f3 Fix base class for ViCare binary sensor to remove warning (#37478) 2020-07-06 22:19:58 +00:00
Erik Montnemery
050a558243 Fix base topic for 'topic' (#37475) 2020-07-06 22:19:57 +00:00
Rami Mosleh
ddb049e884 Stop Speedtest sensors update on startup if manual option is enabled (#37403)
Co-authored-by: Paulus Schoutsen <paulus@home-assistant.io>
2020-07-06 22:19:57 +00:00
J. Nick Koston
891640972b Ensure removed entities are not displayed in logbook (#37395) 2020-07-06 22:19:56 +00:00
J. Nick Koston
2e824f3fa5 Ensure logbook performs well when filtering is configured (#37292) 2020-07-06 22:19:55 +00:00
J. Nick Koston
4ca643342e Move logbook continuous domain filtering to sql (#37115)
* Move logbook continuous domain filtering to sql

sensors tend to generate a significant amount of states
that are filtered out by logbook.  In testing 75% of
states can be filtered away in sql to avoid the
sqlalchemy ORM overhead of creating objects that will
be discarded.

* remove un-needed nesting
2020-07-06 22:19:54 +00:00
Paulus Schoutsen
b76d7edf74 Merge pull request #37443 from home-assistant/rc 2020-07-03 22:14:46 -07:00
Paulus Schoutsen
18c16c464e Bumped version to 0.112.2 2020-07-03 22:43:02 +00:00
Alan Tse
30e980d389 Bump teslajsonpy to 0.9.2 (#37434)
* Bump teslajsonpy to 0.9.1
closes #37340

* Bump teslajsonpy to 0.9.2
2020-07-03 22:42:54 +00:00
Teemu R
e3d3b87f2e Bump python-miio to 0.5.2.1 (#37422) 2020-07-03 22:42:53 +00:00
Aaron Bach
032a6f3143 Bump pytile to 4.0.0 (#37398) 2020-07-03 22:42:52 +00:00
J. Nick Koston
d7ecbb8ebe Ensure logbook entries appear when the logbook.log (#37388)
service without a domain or entity_id
2020-07-03 22:42:51 +00:00
J. Nick Koston
ed086e5200 Handle index already existing on db migration with MySQLdb backend (#37384)
_create_index needed the same check as _add_columns since
the MySQLdb backend throws OperationalError instead
of InternalError in this case
2020-07-03 22:42:51 +00:00
Erik Montnemery
08ebc4ce62 Don't print MQTT credentials to log (#37364) 2020-07-03 22:42:50 +00:00
31 changed files with 419 additions and 282 deletions

View File

@@ -59,7 +59,7 @@ homeassistant/components/blink/* @fronzbot
homeassistant/components/bmp280/* @belidzs
homeassistant/components/bmw_connected_drive/* @gerard33 @rikroe
homeassistant/components/bom/* @maddenp
homeassistant/components/braviatv/* @robbiet480 @bieniu
homeassistant/components/braviatv/* @bieniu
homeassistant/components/broadlink/* @danielhiversen @felipediel
homeassistant/components/brother/* @bieniu
homeassistant/components/brunt/* @eavanvalkenburg

View File

@@ -55,7 +55,8 @@ class BraviaTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
self.braviarc.connect, pin, CLIENTID_PREFIX, NICKNAME
)
if not self.braviarc.is_connected():
connected = await self.hass.async_add_executor_job(self.braviarc.is_connected)
if not connected:
raise CannotConnect()
system_info = await self.hass.async_add_executor_job(
@@ -161,7 +162,8 @@ class BraviaTVOptionsFlowHandler(config_entries.OptionsFlow):
async def async_step_init(self, user_input=None):
"""Manage the options."""
self.braviarc = self.hass.data[DOMAIN][self.config_entry.entry_id][BRAVIARC]
if not self.braviarc.is_connected():
connected = await self.hass.async_add_executor_job(self.braviarc.is_connected)
if not connected:
await self.hass.async_add_executor_job(
self.braviarc.connect, self.pin, CLIENTID_PREFIX, NICKNAME
)

View File

@@ -2,7 +2,7 @@
"domain": "braviatv",
"name": "Sony Bravia TV",
"documentation": "https://www.home-assistant.io/integrations/braviatv",
"requirements": ["bravia-tv==1.0.5"],
"codeowners": ["@robbiet480", "@bieniu"],
"requirements": ["bravia-tv==1.0.6"],
"codeowners": ["@bieniu"],
"config_flow": true
}

View File

@@ -148,33 +148,31 @@ class BraviaTVDevice(MediaPlayerEntity):
self._device_info = device_info
self._ignored_sources = ignored_sources
self._state_lock = asyncio.Lock()
self._need_refresh = True
async def async_update(self):
"""Update TV info."""
if self._state_lock.locked():
return
if self._state == STATE_OFF:
self._need_refresh = True
power_status = await self.hass.async_add_executor_job(
self._braviarc.get_power_status
)
if power_status == "active":
if self._need_refresh:
if power_status != "off":
connected = await self.hass.async_add_executor_job(
self._braviarc.is_connected
)
if not connected:
try:
connected = await self.hass.async_add_executor_job(
self._braviarc.connect, self._pin, CLIENTID_PREFIX, NICKNAME
)
except NoIPControl:
_LOGGER.error("IP Control is disabled in the TV settings")
self._need_refresh = False
else:
connected = self._braviarc.is_connected()
if not connected:
return
power_status = "off"
if power_status == "active":
self._state = STATE_ON
if (
await self._async_refresh_volume()

View File

@@ -20,6 +20,7 @@ _LOGGER = logging.getLogger(__name__)
DOMAIN = "denonavr"
SUPPORTED_MANUFACTURERS = ["Denon", "DENON", "Marantz"]
IGNORED_MODELS = ["HEOS 1", "HEOS 3", "HEOS 5", "HEOS 7"]
CONF_SHOW_ALL_SOURCES = "show_all_sources"
CONF_ZONE2 = "zone2"
@@ -217,6 +218,9 @@ class DenonAvrFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
self.serial_number = discovery_info[ssdp.ATTR_UPNP_SERIAL]
self.host = urlparse(discovery_info[ssdp.ATTR_SSDP_LOCATION]).hostname
if self.model_name in IGNORED_MODELS:
return self.async_abort(reason="not_denonavr_manufacturer")
unique_id = self.construct_unique_id(self.model_name, self.serial_number)
await self.async_set_unique_id(unique_id)
self._abort_if_unique_id_configured({CONF_HOST: self.host})

View File

@@ -2,7 +2,7 @@
"domain": "frontend",
"name": "Home Assistant Frontend",
"documentation": "https://www.home-assistant.io/integrations/frontend",
"requirements": ["home-assistant-frontend==20200702.0"],
"requirements": ["home-assistant-frontend==20200702.1"],
"dependencies": [
"api",
"auth",

View File

@@ -394,16 +394,9 @@ def get_state(hass, utc_point_in_time, entity_id, run=None):
async def async_setup(hass, config):
"""Set up the history hooks."""
filters = Filters()
conf = config.get(DOMAIN, {})
exclude = conf.get(CONF_EXCLUDE)
if exclude:
filters.excluded_entities = exclude.get(CONF_ENTITIES, [])
filters.excluded_domains = exclude.get(CONF_DOMAINS, [])
include = conf.get(CONF_INCLUDE)
if include:
filters.included_entities = include.get(CONF_ENTITIES, [])
filters.included_domains = include.get(CONF_DOMAINS, [])
filters = sqlalchemy_filter_from_include_exclude_conf(conf)
use_include_order = conf.get(CONF_ORDER)
hass.http.register_view(HistoryPeriodView(filters, use_include_order))
@@ -530,6 +523,20 @@ class HistoryPeriodView(HomeAssistantView):
return self.json(result)
def sqlalchemy_filter_from_include_exclude_conf(conf):
"""Build a sql filter from config."""
filters = Filters()
exclude = conf.get(CONF_EXCLUDE)
if exclude:
filters.excluded_entities = exclude.get(CONF_ENTITIES, [])
filters.excluded_domains = exclude.get(CONF_DOMAINS, [])
include = conf.get(CONF_INCLUDE)
if include:
filters.included_entities = include.get(CONF_ENTITIES, [])
filters.included_domains = include.get(CONF_DOMAINS, [])
return filters
class Filters:
"""Container for the configured include and exclude filters."""
@@ -556,26 +563,34 @@ class Filters:
return query.filter(States.entity_id.in_(entity_ids))
query = query.filter(~States.domain.in_(IGNORE_DOMAINS))
filter_query = None
entity_filter = self.entity_filter()
if entity_filter is not None:
query = query.filter(entity_filter)
return query
def entity_filter(self):
"""Generate the entity filter query."""
entity_filter = None
# filter if only excluded domain is configured
if self.excluded_domains and not self.included_domains:
filter_query = ~States.domain.in_(self.excluded_domains)
entity_filter = ~States.domain.in_(self.excluded_domains)
if self.included_entities:
filter_query &= States.entity_id.in_(self.included_entities)
entity_filter &= States.entity_id.in_(self.included_entities)
# filter if only included domain is configured
elif not self.excluded_domains and self.included_domains:
filter_query = States.domain.in_(self.included_domains)
entity_filter = States.domain.in_(self.included_domains)
if self.included_entities:
filter_query |= States.entity_id.in_(self.included_entities)
entity_filter |= States.entity_id.in_(self.included_entities)
# filter if included and excluded domain is configured
elif self.excluded_domains and self.included_domains:
filter_query = ~States.domain.in_(self.excluded_domains)
entity_filter = ~States.domain.in_(self.excluded_domains)
if self.included_entities:
filter_query &= States.domain.in_(
entity_filter &= States.domain.in_(
self.included_domains
) | States.entity_id.in_(self.included_entities)
else:
filter_query &= States.domain.in_(
entity_filter &= States.domain.in_(
self.included_domains
) & ~States.domain.in_(self.excluded_domains)
# no domain filter just included entities
@@ -584,13 +599,17 @@ class Filters:
and not self.included_domains
and self.included_entities
):
filter_query = States.entity_id.in_(self.included_entities)
if filter_query is not None:
query = query.filter(filter_query)
entity_filter = States.entity_id.in_(self.included_entities)
# finally apply excluded entities filter if configured
if self.excluded_entities:
query = query.filter(~States.entity_id.in_(self.excluded_entities))
return query
if entity_filter is not None:
entity_filter = (entity_filter) & ~States.entity_id.in_(
self.excluded_entities
)
else:
entity_filter = ~States.entity_id.in_(self.excluded_entities)
return entity_filter
class LazyState(State):

View File

@@ -280,7 +280,6 @@ class TelevisionMediaPlayer(HomeAccessory):
serv_tv = self.add_preload_service(SERV_TELEVISION, self.chars_tv)
self.set_primary_service(serv_tv)
serv_tv.configure_char(CHAR_CONFIGURED_NAME, value=self.display_name)
serv_tv.configure_char(CHAR_SLEEP_DISCOVER_MODE, value=True)
self.char_active = serv_tv.configure_char(
CHAR_ACTIVE, setter_callback=self.set_on_off
@@ -431,7 +430,7 @@ class TelevisionMediaPlayer(HomeAccessory):
index = self.sources.index(source_name)
if self.char_input_source.value != index:
self.char_input_source.set_value(index)
else:
elif hk_state:
_LOGGER.warning(
"%s: Sources out of sync. Restart Home Assistant", self.entity_id,
)

View File

@@ -3,13 +3,13 @@ from datetime import timedelta
from itertools import groupby
import json
import logging
import time
from sqlalchemy.exc import SQLAlchemyError
import sqlalchemy
from sqlalchemy.orm import aliased
import voluptuous as vol
from homeassistant.components import sun
from homeassistant.components.history import sqlalchemy_filter_from_include_exclude_conf
from homeassistant.components.http import HomeAssistantView
from homeassistant.components.recorder.models import (
Events,
@@ -17,20 +17,13 @@ from homeassistant.components.recorder.models import (
process_timestamp,
process_timestamp_to_utc_isoformat,
)
from homeassistant.components.recorder.util import (
QUERY_RETRY_WAIT,
RETRIES,
session_scope,
)
from homeassistant.components.recorder.util import session_scope
from homeassistant.const import (
ATTR_DEVICE_CLASS,
ATTR_DOMAIN,
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
ATTR_NAME,
ATTR_UNIT_OF_MEASUREMENT,
CONF_EXCLUDE,
CONF_INCLUDE,
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP,
EVENT_LOGBOOK_ENTRY,
@@ -66,6 +59,8 @@ DOMAIN = "logbook"
GROUP_BY_MINUTES = 15
EMPTY_JSON_OBJECT = "{}"
UNIT_OF_MEASUREMENT_JSON = '"unit_of_measurement":'
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA}, extra=vol.ALLOW_EXTRA
)
@@ -117,16 +112,31 @@ async def async_setup(hass, config):
domain = service.data.get(ATTR_DOMAIN)
entity_id = service.data.get(ATTR_ENTITY_ID)
if entity_id is None and domain is None:
# If there is no entity_id or
# domain, the event will get filtered
# away so we use the "logbook" domain
domain = DOMAIN
message.hass = hass
message = message.async_render()
async_log_entry(hass, name, message, domain, entity_id)
hass.http.register_view(LogbookView(config.get(DOMAIN, {})))
hass.components.frontend.async_register_built_in_panel(
"logbook", "logbook", "hass:format-list-bulleted-type"
)
conf = config.get(DOMAIN, {})
if conf:
filters = sqlalchemy_filter_from_include_exclude_conf(conf)
entities_filter = convert_include_exclude_filter(conf)
else:
filters = None
entities_filter = None
hass.http.register_view(LogbookView(conf, filters, entities_filter))
hass.services.async_register(DOMAIN, "log", log_message, schema=LOG_MESSAGE_SCHEMA)
await async_process_integration_platforms(hass, DOMAIN, _process_logbook_platform)
@@ -152,9 +162,11 @@ class LogbookView(HomeAssistantView):
name = "api:logbook"
extra_urls = ["/api/logbook/{datetime}"]
def __init__(self, config):
def __init__(self, config, filters, entities_filter):
"""Initialize the logbook view."""
self.config = config
self.filters = filters
self.entities_filter = entities_filter
async def get(self, request, datetime=None):
"""Retrieve logbook entries."""
@@ -189,7 +201,15 @@ class LogbookView(HomeAssistantView):
def json_events():
"""Fetch events and generate JSON."""
return self.json(
_get_events(hass, self.config, start_day, end_day, entity_id)
_get_events(
hass,
self.config,
start_day,
end_day,
entity_id,
self.filters,
self.entities_filter,
)
)
return await hass.async_add_job(json_events)
@@ -325,38 +345,9 @@ def humanify(hass, events, entity_attr_cache, prev_states=None):
}
def _get_related_entity_ids(session, entity_filter):
timer_start = time.perf_counter()
query = session.query(States).with_entities(States.entity_id).distinct()
for tryno in range(RETRIES):
try:
result = [row.entity_id for row in query if entity_filter(row.entity_id)]
if _LOGGER.isEnabledFor(logging.DEBUG):
elapsed = time.perf_counter() - timer_start
_LOGGER.debug(
"fetching %d distinct domain/entity_id pairs took %fs",
len(result),
elapsed,
)
return result
except SQLAlchemyError as err:
_LOGGER.error("Error executing query: %s", err)
if tryno == RETRIES - 1:
raise
time.sleep(QUERY_RETRY_WAIT)
def _all_entities_filter(_):
"""Filter that accepts all entities."""
return True
def _get_events(hass, config, start_day, end_day, entity_id=None):
def _get_events(
hass, config, start_day, end_day, entity_id=None, filters=None, entities_filter=None
):
"""Get events for a period of time."""
entity_attr_cache = EntityAttributeCache(hass)
@@ -364,19 +355,17 @@ def _get_events(hass, config, start_day, end_day, entity_id=None):
"""Yield Events that are not filtered away."""
for row in query.yield_per(1000):
event = LazyEventPartialState(row)
if _keep_event(hass, event, entities_filter, entity_attr_cache):
if _keep_event(hass, event, entities_filter):
yield event
with session_scope(hass=hass) as session:
if entity_id is not None:
entity_ids = [entity_id.lower()]
entities_filter = generate_filter([], entity_ids, [], [])
elif config.get(CONF_EXCLUDE) or config.get(CONF_INCLUDE):
entities_filter = convert_include_exclude_filter(config)
entity_ids = _get_related_entity_ids(session, entities_filter)
apply_sql_entities_filter = False
else:
entities_filter = _all_entities_filter
entity_ids = None
apply_sql_entities_filter = True
old_state = aliased(States, name="old_state")
@@ -386,12 +375,10 @@ def _get_events(hass, config, start_day, end_day, entity_id=None):
Events.event_data,
Events.time_fired,
Events.context_user_id,
States.state_id,
States.state,
States.entity_id,
States.domain,
States.attributes,
old_state.state_id.label("old_state_id"),
)
.order_by(Events.time_fired)
.outerjoin(States, (Events.event_id == States.event_id))
@@ -411,9 +398,19 @@ def _get_events(hass, config, start_day, end_day, entity_id=None):
| (
(States.state_id.isnot(None))
& (old_state.state_id.isnot(None))
& (States.state.isnot(None))
& (States.state != old_state.state)
)
)
#
# Prefilter out continuous domains that have
# ATTR_UNIT_OF_MEASUREMENT as its much faster in sql.
#
.filter(
(Events.event_type != EVENT_STATE_CHANGED)
| sqlalchemy.not_(States.domain.in_(CONTINUOUS_DOMAINS))
| sqlalchemy.not_(States.attributes.contains(UNIT_OF_MEASUREMENT_JSON))
)
.filter(
Events.event_type.in_(ALL_EVENT_TYPES + list(hass.data.get(DOMAIN, {})))
)
@@ -434,27 +431,25 @@ def _get_events(hass, config, start_day, end_day, entity_id=None):
| (States.state_id.is_(None))
)
if apply_sql_entities_filter and filters:
entity_filter = filters.entity_filter()
if entity_filter is not None:
query = query.filter(
entity_filter | (Events.event_type != EVENT_STATE_CHANGED)
)
# When all data is schema v8 or later, prev_states can be removed
prev_states = {}
return list(humanify(hass, yield_events(query), entity_attr_cache, prev_states))
def _keep_event(hass, event, entities_filter, entity_attr_cache):
def _keep_event(hass, event, entities_filter):
if event.event_type == EVENT_STATE_CHANGED:
entity_id = event.entity_id
if entity_id is None:
return False
# Do not report on new entities
# Do not report on entity removal
if not event.has_old_and_new_state:
return False
if event.domain in CONTINUOUS_DOMAINS and entity_attr_cache.get(
entity_id, ATTR_UNIT_OF_MEASUREMENT, event
):
# Don't show continuous sensor value changes in the logbook
return False
elif event.event_type in HOMEASSISTANT_EVENTS:
entity_id = f"{HA_DOMAIN}."
elif event.event_type in hass.data[DOMAIN] and ATTR_ENTITY_ID not in event.data:
@@ -473,7 +468,7 @@ def _keep_event(hass, event, entities_filter, entity_attr_cache):
return False
entity_id = f"{domain}."
return entities_filter(entity_id)
return entities_filter is None or entities_filter(entity_id)
def _entry_message_from_event(hass, entity_id, domain, event, entity_attr_cache):
@@ -651,9 +646,12 @@ class LazyEventPartialState:
# Delete this check once all states are saved in the v8 schema
# format or later (they have the old_state_id column).
# New events in v8 schema format
# New events in v8+ schema format
if self._row.event_data == EMPTY_JSON_OBJECT:
return self._row.state_id is not None and self._row.old_state_id is not None
# Events are already pre-filtered in sql
# to exclude missing old and new state
# if they are in v8+ format
return True
# Old events not in v8 schema format
return (

View File

@@ -476,10 +476,14 @@ async def async_setup_entry(hass, entry):
if conf is None:
conf = CONFIG_SCHEMA({DOMAIN: dict(entry.data)})[DOMAIN]
elif any(key in conf for key in entry.data):
_LOGGER.warning(
shared_keys = conf.keys() & entry.data.keys()
override = {k: entry.data[k] for k in shared_keys}
if CONF_PASSWORD in override:
override[CONF_PASSWORD] = "********"
_LOGGER.info(
"Data in your configuration entry is going to override your "
"configuration.yaml: %s",
entry.data,
override,
)
conf = _merge_config(entry, conf)

View File

@@ -104,9 +104,9 @@ async def async_start(
base = payload.pop(TOPIC_BASE)
for key, value in payload.items():
if isinstance(value, str) and value:
if value[0] == TOPIC_BASE and key.endswith("_topic"):
if value[0] == TOPIC_BASE and key.endswith("topic"):
payload[key] = f"{base}{value[1:]}"
if value[-1] == TOPIC_BASE and key.endswith("_topic"):
if value[-1] == TOPIC_BASE and key.endswith("topic"):
payload[key] = f"{value[:-1]}{base}"
# If present, the node_id will be included in the discovered object id

View File

@@ -326,6 +326,8 @@ class PlexServer:
_LOGGER.debug("plex.tv resource connection successful: %s", client)
except NotFound:
_LOGGER.error("plex.tv resource connection failed: %s", resource.name)
else:
client.proxyThroughServer(value=False, server=self._plex_server)
self._plextv_device_cache[client_id] = client
return client

View File

@@ -386,11 +386,14 @@ class Recorder(threading.Thread):
if dbevent and event.event_type == EVENT_STATE_CHANGED:
try:
dbstate = States.from_event(event)
has_new_state = event.data.get("new_state")
dbstate.old_state_id = self._old_state_ids.get(dbstate.entity_id)
if not has_new_state:
dbstate.state = None
dbstate.event_id = dbevent.event_id
self.event_session.add(dbstate)
self.event_session.flush()
if "new_state" in event.data:
if has_new_state:
self._old_state_ids[dbstate.entity_id] = dbstate.state_id
elif dbstate.entity_id in self._old_state_ids:
del self._old_state_ids[dbstate.entity_id]

View File

@@ -81,7 +81,9 @@ def _create_index(engine, table_name, index_name):
try:
index.create(engine)
except OperationalError as err:
if "already exists" not in str(err).lower():
lower_err_str = str(err).lower()
if "already exists" not in lower_err_str and "duplicate" not in lower_err_str:
raise
_LOGGER.warning(

View File

@@ -70,9 +70,10 @@ async def async_setup_entry(hass, config_entry):
coordinator = SpeedTestDataCoordinator(hass, config_entry)
await coordinator.async_setup()
await coordinator.async_refresh()
if not coordinator.last_update_success:
raise ConfigEntryNotReady
if not config_entry.options[CONF_MANUAL]:
await coordinator.async_refresh()
if not coordinator.last_update_success:
raise ConfigEntryNotReady
hass.data[DOMAIN] = coordinator
@@ -115,9 +116,12 @@ class SpeedTestDataCoordinator(DataUpdateCoordinator):
),
)
def update_data(self):
"""Get the latest data from speedtest.net."""
server_list = self.api.get_servers()
def update_servers(self):
"""Update list of test servers."""
try:
server_list = self.api.get_servers()
except speedtest.ConfigRetrievalError:
return
self.servers[DEFAULT_SERVER] = {}
for server in sorted(
@@ -125,14 +129,20 @@ class SpeedTestDataCoordinator(DataUpdateCoordinator):
):
self.servers[f"{server[0]['country']} - {server[0]['sponsor']}"] = server[0]
def update_data(self):
"""Get the latest data from speedtest.net."""
self.update_servers()
self.api.closest.clear()
if self.config_entry.options.get(CONF_SERVER_ID):
server_id = self.config_entry.options.get(CONF_SERVER_ID)
self.api.closest.clear()
self.api.get_servers(servers=[server_id])
self.api.get_best_server()
_LOGGER.debug(
"Executing speedtest.net speed test with server_id: %s", self.api.best["id"]
)
self.api.get_best_server()
self.api.download()
self.api.upload()
return self.api.results.dict()
@@ -170,6 +180,8 @@ class SpeedTestDataCoordinator(DataUpdateCoordinator):
await self.async_set_options()
await self.hass.async_add_executor_job(self.update_servers)
self.hass.services.async_register(DOMAIN, SPEED_TEST_SERVICE, request_update)
self.config_entry.add_update_listener(options_updated_listener)

View File

@@ -85,7 +85,7 @@ class SpeedTestOptionsFlowHandler(config_entries.OptionsFlow):
self._servers = self.hass.data[DOMAIN].servers
server_name = DEFAULT_SERVER
server = []
if self.config_entry.options.get(
CONF_SERVER_ID
) and not self.config_entry.options.get(CONF_SERVER_NAME):
@@ -94,7 +94,7 @@ class SpeedTestOptionsFlowHandler(config_entries.OptionsFlow):
for (key, value) in self._servers.items()
if value.get("id") == self.config_entry.options[CONF_SERVER_ID]
]
server_name = server[0] if server else ""
server_name = server[0] if server else DEFAULT_SERVER
options = {
vol.Optional(

View File

@@ -2,7 +2,8 @@
import logging
from homeassistant.const import ATTR_ATTRIBUTION
from homeassistant.helpers.entity import Entity
from homeassistant.core import callback
from homeassistant.helpers.restore_state import RestoreEntity
from .const import (
ATTR_BYTES_RECEIVED,
@@ -11,6 +12,7 @@ from .const import (
ATTR_SERVER_ID,
ATTR_SERVER_NAME,
ATTRIBUTION,
CONF_MANUAL,
DEFAULT_NAME,
DOMAIN,
ICON,
@@ -32,7 +34,7 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
async_add_entities(entities)
class SpeedtestSensor(Entity):
class SpeedtestSensor(RestoreEntity):
"""Implementation of a speedtest.net sensor."""
def __init__(self, coordinator, sensor_type):
@@ -41,6 +43,7 @@ class SpeedtestSensor(Entity):
self.coordinator = coordinator
self.type = sensor_type
self._unit_of_measurement = SENSOR_TYPES[self.type][1]
self._state = None
@property
def name(self):
@@ -55,14 +58,7 @@ class SpeedtestSensor(Entity):
@property
def state(self):
"""Return the state of the device."""
state = None
if self.type == "ping":
state = self.coordinator.data["ping"]
elif self.type == "download":
state = round(self.coordinator.data["download"] / 10 ** 6, 2)
elif self.type == "upload":
state = round(self.coordinator.data["upload"] / 10 ** 6, 2)
return state
return self._state
@property
def unit_of_measurement(self):
@@ -82,6 +78,8 @@ class SpeedtestSensor(Entity):
@property
def device_state_attributes(self):
"""Return the state attributes."""
if not self.coordinator.data:
return None
attributes = {
ATTR_ATTRIBUTION: ATTRIBUTION,
ATTR_SERVER_NAME: self.coordinator.data["server"]["name"],
@@ -98,10 +96,30 @@ class SpeedtestSensor(Entity):
async def async_added_to_hass(self):
"""Handle entity which will be added."""
await super().async_added_to_hass()
if self.coordinator.config_entry.options[CONF_MANUAL]:
state = await self.async_get_last_state()
if state:
self._state = state.state
self.async_on_remove(
self.coordinator.async_add_listener(self.async_write_ha_state)
)
@callback
def update():
"""Update state."""
self._update_state()
self.async_write_ha_state()
self.async_on_remove(self.coordinator.async_add_listener(update))
self._update_state()
def _update_state(self):
"""Update sensors state."""
if self.coordinator.data:
if self.type == "ping":
self._state = self.coordinator.data["ping"]
elif self.type == "download":
self._state = round(self.coordinator.data["download"] / 10 ** 6, 2)
elif self.type == "upload":
self._state = round(self.coordinator.data["upload"] / 10 ** 6, 2)
async def async_update(self):
"""Request coordinator to update data."""

View File

@@ -3,6 +3,6 @@
"name": "Tesla",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/tesla",
"requirements": ["teslajsonpy==0.9.0"],
"requirements": ["teslajsonpy==0.9.2"],
"codeowners": ["@zabuldon", "@alandtse"]
}

View File

@@ -3,6 +3,6 @@
"name": "Tile",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/tile",
"requirements": ["pytile==3.0.6"],
"requirements": ["pytile==4.0.0"],
"codeowners": ["@bachya"]
}

View File

@@ -5,7 +5,7 @@ import requests
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_POWER,
BinarySensorDevice,
BinarySensorEntity,
)
from homeassistant.const import CONF_DEVICE_CLASS, CONF_NAME
@@ -77,7 +77,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
)
class ViCareBinarySensor(BinarySensorDevice):
class ViCareBinarySensor(BinarySensorEntity):
"""Representation of a ViCare sensor."""
def __init__(self, name, api, sensor_type):

View File

@@ -450,7 +450,7 @@ WITHINGS_ATTRIBUTES = [
NotifyAppli.BED_IN,
"In bed",
"",
"mdi:bed",
"mdi:hotel",
BINARY_SENSOR_DOMAIN,
True,
UpdateType.WEBHOOK,

View File

@@ -3,7 +3,7 @@
"name": "Xiaomi Miio",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/xiaomi_miio",
"requirements": ["construct==2.9.45", "python-miio==0.5.1"],
"requirements": ["construct==2.9.45", "python-miio==0.5.2.1"],
"codeowners": ["@rytilahti", "@syssi"],
"zeroconf": ["_miio._udp.local."]
}

View File

@@ -1,7 +1,7 @@
"""Constants used by Home Assistant components."""
MAJOR_VERSION = 0
MINOR_VERSION = 112
PATCH_VERSION = "1"
PATCH_VERSION = "3"
__short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__ = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER = (3, 7, 0)

View File

@@ -13,7 +13,7 @@ defusedxml==0.6.0
distro==1.5.0
emoji==0.5.4
hass-nabucasa==0.34.7
home-assistant-frontend==20200702.0
home-assistant-frontend==20200702.1
importlib-metadata==1.6.0;python_version<'3.8'
jinja2>=2.11.1
netdisco==2.7.1

View File

@@ -186,7 +186,7 @@ async def _logbook_filtering(hass, last_changed, last_updated):
def yield_events(event):
for _ in range(10 ** 5):
# pylint: disable=protected-access
if logbook._keep_event(hass, event, entities_filter, entity_attr_cache):
if logbook._keep_event(hass, event, entities_filter):
yield event
start = timer()

View File

@@ -374,7 +374,7 @@ bomradarloop==0.1.4
boto3==1.9.252
# homeassistant.components.braviatv
bravia-tv==1.0.5
bravia-tv==1.0.6
# homeassistant.components.broadlink
broadlink==0.14.0
@@ -738,7 +738,7 @@ hole==0.5.1
holidays==0.10.2
# homeassistant.components.frontend
home-assistant-frontend==20200702.0
home-assistant-frontend==20200702.1
# homeassistant.components.zwave
homeassistant-pyozw==0.1.10
@@ -1712,7 +1712,7 @@ python-juicenet==1.0.1
# python-lirc==1.2.3
# homeassistant.components.xiaomi_miio
python-miio==0.5.1
python-miio==0.5.2.1
# homeassistant.components.mpd
python-mpd2==1.0.0
@@ -1778,7 +1778,7 @@ python_opendata_transport==0.2.1
pythonegardia==1.0.40
# homeassistant.components.tile
pytile==3.0.6
pytile==4.0.0
# homeassistant.components.touchline
pytouchline==0.7
@@ -2094,7 +2094,7 @@ temperusb==1.5.3
tesla-powerwall==0.2.11
# homeassistant.components.tesla
teslajsonpy==0.9.0
teslajsonpy==0.9.2
# homeassistant.components.thermoworks_smoke
thermoworks_smoke==0.1.8

View File

@@ -177,7 +177,7 @@ blinkpy==0.15.0
bomradarloop==0.1.4
# homeassistant.components.braviatv
bravia-tv==1.0.5
bravia-tv==1.0.6
# homeassistant.components.broadlink
broadlink==0.14.0
@@ -343,7 +343,7 @@ hole==0.5.1
holidays==0.10.2
# homeassistant.components.frontend
home-assistant-frontend==20200702.0
home-assistant-frontend==20200702.1
# homeassistant.components.zwave
homeassistant-pyozw==0.1.10
@@ -748,7 +748,7 @@ python-izone==1.1.2
python-juicenet==1.0.1
# homeassistant.components.xiaomi_miio
python-miio==0.5.1
python-miio==0.5.2.1
# homeassistant.components.nest
python-nest==4.1.0
@@ -775,7 +775,7 @@ python-velbus==2.0.43
python_awair==0.1.1
# homeassistant.components.tile
pytile==3.0.6
pytile==4.0.0
# homeassistant.components.traccar
pytraccar==0.9.0
@@ -890,7 +890,7 @@ tellduslive==0.10.11
tesla-powerwall==0.2.11
# homeassistant.components.tesla
teslajsonpy==0.9.0
teslajsonpy==0.9.2
# homeassistant.components.toon
toonapi==0.1.0

View File

@@ -23,6 +23,7 @@ TEST_MAC = "ab:cd:ef:gh"
TEST_HOST2 = "5.6.7.8"
TEST_NAME = "Test_Receiver"
TEST_MODEL = "model5"
TEST_IGNORED_MODEL = "HEOS 7"
TEST_RECEIVER_TYPE = "avr-x"
TEST_SERIALNUMBER = "123456789"
TEST_MANUFACTURER = "Denon"
@@ -470,6 +471,27 @@ async def test_config_flow_ssdp_missing_info(hass):
assert result["reason"] == "not_denonavr_missing"
async def test_config_flow_ssdp_ignored_model(hass):
"""
Failed flow initialized by ssdp discovery.
Model in the ignored models list.
"""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_SSDP},
data={
ssdp.ATTR_UPNP_MANUFACTURER: TEST_MANUFACTURER,
ssdp.ATTR_UPNP_MODEL_NAME: TEST_IGNORED_MODEL,
ssdp.ATTR_UPNP_SERIAL: TEST_SERIALNUMBER,
ssdp.ATTR_SSDP_LOCATION: TEST_SSDP_LOCATION,
},
)
assert result["type"] == "abort"
assert result["reason"] == "not_denonavr_manufacturer"
async def test_options_flow(hass):
"""Test specifying non default settings using options flow."""
config_entry = MockConfigEntry(

View File

@@ -20,6 +20,8 @@ from homeassistant.const import (
ATTR_NAME,
CONF_DOMAINS,
CONF_ENTITIES,
CONF_EXCLUDE,
CONF_INCLUDE,
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP,
EVENT_STATE_CHANGED,
@@ -77,7 +79,15 @@ class TestComponentLogbook(unittest.TestCase):
},
True,
)
self.hass.services.call(
logbook.DOMAIN,
"log",
{
logbook.ATTR_NAME: "This entry",
logbook.ATTR_MESSAGE: "has no domain or entity_id",
},
True,
)
# Logbook entry service call results in firing an event.
# Our service call will unblock when the event listeners have been
# scheduled. This means that they may not have been processed yet.
@@ -92,15 +102,21 @@ class TestComponentLogbook(unittest.TestCase):
dt_util.utcnow() + timedelta(hours=1),
)
)
assert len(events) == 1
assert len(events) == 2
assert len(calls) == 2
first_call = calls[-2]
assert first_call.data.get(logbook.ATTR_NAME) == "Alarm"
assert first_call.data.get(logbook.ATTR_MESSAGE) == "is triggered"
assert first_call.data.get(logbook.ATTR_DOMAIN) == "switch"
assert first_call.data.get(logbook.ATTR_ENTITY_ID) == "switch.test_switch"
assert len(calls) == 1
last_call = calls[-1]
assert last_call.data.get(logbook.ATTR_NAME) == "Alarm"
assert last_call.data.get(logbook.ATTR_MESSAGE) == "is triggered"
assert last_call.data.get(logbook.ATTR_DOMAIN) == "switch"
assert last_call.data.get(logbook.ATTR_ENTITY_ID) == "switch.test_switch"
assert last_call.data.get(logbook.ATTR_NAME) == "This entry"
assert last_call.data.get(logbook.ATTR_MESSAGE) == "has no domain or entity_id"
assert last_call.data.get(logbook.ATTR_DOMAIN) == "logbook"
def test_service_call_create_log_book_entry_no_message(self):
"""Test if service call create log book entry without message."""
@@ -149,99 +165,6 @@ class TestComponentLogbook(unittest.TestCase):
entries[1], pointC, "bla", domain="sensor", entity_id=entity_id
)
def test_filter_continuous_sensor_values(self):
"""Test remove continuous sensor events from logbook."""
entity_id = "sensor.bla"
pointA = dt_util.utcnow()
entity_attr_cache = logbook.EntityAttributeCache(self.hass)
attributes = {"unit_of_measurement": "foo"}
eventA = self.create_state_changed_event(pointA, entity_id, 10, attributes)
entities_filter = convert_include_exclude_filter(
logbook.CONFIG_SCHEMA({logbook.DOMAIN: {}})[logbook.DOMAIN]
)
assert (
logbook._keep_event(self.hass, eventA, entities_filter, entity_attr_cache)
is False
)
def test_exclude_new_entities(self):
"""Test if events are excluded on first update."""
entity_id = "sensor.bla"
entity_id2 = "sensor.blu"
pointA = dt_util.utcnow()
pointB = pointA + timedelta(minutes=logbook.GROUP_BY_MINUTES)
entity_attr_cache = logbook.EntityAttributeCache(self.hass)
state_on = ha.State(
entity_id, "on", {"brightness": 200}, pointA, pointA
).as_dict()
eventA = self.create_state_changed_event_from_old_new(
entity_id, pointA, None, state_on
)
eventB = self.create_state_changed_event(pointB, entity_id2, 20)
entities_filter = convert_include_exclude_filter(
logbook.CONFIG_SCHEMA({logbook.DOMAIN: {}})[logbook.DOMAIN]
)
events = [
e
for e in (
MockLazyEventPartialState(EVENT_HOMEASSISTANT_STOP),
eventA,
eventB,
)
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
]
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
assert len(entries) == 2
self.assert_entry(
entries[0], name="Home Assistant", message="stopped", domain=ha.DOMAIN
)
self.assert_entry(
entries[1], pointB, "blu", domain="sensor", entity_id=entity_id2
)
def test_exclude_removed_entities(self):
"""Test if events are excluded on last update."""
entity_id = "sensor.bla"
entity_id2 = "sensor.blu"
pointA = dt_util.utcnow()
pointB = pointA + timedelta(minutes=logbook.GROUP_BY_MINUTES)
entity_attr_cache = logbook.EntityAttributeCache(self.hass)
state_on = ha.State(
entity_id, "on", {"brightness": 200}, pointA, pointA
).as_dict()
eventA = self.create_state_changed_event_from_old_new(
None, pointA, state_on, None,
)
eventB = self.create_state_changed_event(pointB, entity_id2, 20)
entities_filter = convert_include_exclude_filter(
logbook.CONFIG_SCHEMA({logbook.DOMAIN: {}})[logbook.DOMAIN]
)
events = [
e
for e in (
MockLazyEventPartialState(EVENT_HOMEASSISTANT_STOP),
eventA,
eventB,
)
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
]
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
assert len(entries) == 2
self.assert_entry(
entries[0], name="Home Assistant", message="stopped", domain=ha.DOMAIN
)
self.assert_entry(
entries[1], pointB, "blu", domain="sensor", entity_id=entity_id2
)
def test_exclude_events_entity(self):
"""Test if events are filtered if entity is excluded in config."""
entity_id = "sensor.bla"
@@ -256,7 +179,7 @@ class TestComponentLogbook(unittest.TestCase):
config = logbook.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
logbook.DOMAIN: {logbook.CONF_EXCLUDE: {CONF_ENTITIES: [entity_id]}},
logbook.DOMAIN: {CONF_EXCLUDE: {CONF_ENTITIES: [entity_id]}},
}
)
entities_filter = convert_include_exclude_filter(config[logbook.DOMAIN])
@@ -267,7 +190,7 @@ class TestComponentLogbook(unittest.TestCase):
eventA,
eventB,
)
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
if logbook._keep_event(self.hass, e, entities_filter)
]
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
@@ -293,9 +216,7 @@ class TestComponentLogbook(unittest.TestCase):
config = logbook.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
logbook.DOMAIN: {
logbook.CONF_EXCLUDE: {CONF_DOMAINS: ["switch", "alexa"]}
},
logbook.DOMAIN: {CONF_EXCLUDE: {CONF_DOMAINS: ["switch", "alexa"]}},
}
)
entities_filter = convert_include_exclude_filter(config[logbook.DOMAIN])
@@ -307,7 +228,7 @@ class TestComponentLogbook(unittest.TestCase):
eventA,
eventB,
)
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
if logbook._keep_event(self.hass, e, entities_filter)
]
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
@@ -337,7 +258,7 @@ class TestComponentLogbook(unittest.TestCase):
{
ha.DOMAIN: {},
logbook.DOMAIN: {
logbook.CONF_EXCLUDE: {
CONF_EXCLUDE: {
CONF_DOMAINS: ["switch", "alexa"],
CONF_ENTITY_GLOBS: "*.excluded",
}
@@ -354,7 +275,7 @@ class TestComponentLogbook(unittest.TestCase):
eventB,
eventC,
)
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
if logbook._keep_event(self.hass, e, entities_filter)
]
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
@@ -381,7 +302,7 @@ class TestComponentLogbook(unittest.TestCase):
{
ha.DOMAIN: {},
logbook.DOMAIN: {
logbook.CONF_INCLUDE: {
CONF_INCLUDE: {
CONF_DOMAINS: ["homeassistant"],
CONF_ENTITIES: [entity_id2],
}
@@ -396,7 +317,7 @@ class TestComponentLogbook(unittest.TestCase):
eventA,
eventB,
)
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
if logbook._keep_event(self.hass, e, entities_filter)
]
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
@@ -429,9 +350,7 @@ class TestComponentLogbook(unittest.TestCase):
{
ha.DOMAIN: {},
logbook.DOMAIN: {
logbook.CONF_INCLUDE: {
CONF_DOMAINS: ["homeassistant", "sensor", "alexa"]
}
CONF_INCLUDE: {CONF_DOMAINS: ["homeassistant", "sensor", "alexa"]}
},
}
)
@@ -444,7 +363,7 @@ class TestComponentLogbook(unittest.TestCase):
eventA,
eventB,
)
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
if logbook._keep_event(self.hass, e, entities_filter)
]
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
@@ -481,7 +400,7 @@ class TestComponentLogbook(unittest.TestCase):
{
ha.DOMAIN: {},
logbook.DOMAIN: {
logbook.CONF_INCLUDE: {
CONF_INCLUDE: {
CONF_DOMAINS: ["homeassistant", "sensor", "alexa"],
CONF_ENTITY_GLOBS: ["*.included"],
}
@@ -498,7 +417,7 @@ class TestComponentLogbook(unittest.TestCase):
eventB,
eventC,
)
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
if logbook._keep_event(self.hass, e, entities_filter)
]
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
@@ -533,11 +452,11 @@ class TestComponentLogbook(unittest.TestCase):
{
ha.DOMAIN: {},
logbook.DOMAIN: {
logbook.CONF_INCLUDE: {
CONF_INCLUDE: {
CONF_DOMAINS: ["sensor", "homeassistant"],
CONF_ENTITIES: ["switch.bla"],
},
logbook.CONF_EXCLUDE: {
CONF_EXCLUDE: {
CONF_DOMAINS: ["switch"],
CONF_ENTITIES: ["sensor.bli"],
},
@@ -555,7 +474,7 @@ class TestComponentLogbook(unittest.TestCase):
eventB1,
eventB2,
)
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
if logbook._keep_event(self.hass, e, entities_filter)
]
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
@@ -602,12 +521,12 @@ class TestComponentLogbook(unittest.TestCase):
{
ha.DOMAIN: {},
logbook.DOMAIN: {
logbook.CONF_INCLUDE: {
CONF_INCLUDE: {
CONF_DOMAINS: ["sensor", "homeassistant"],
CONF_ENTITIES: ["switch.bla"],
CONF_ENTITY_GLOBS: ["*.included"],
},
logbook.CONF_EXCLUDE: {
CONF_EXCLUDE: {
CONF_DOMAINS: ["switch"],
CONF_ENTITY_GLOBS: ["*.excluded"],
CONF_ENTITIES: ["sensor.bli"],
@@ -629,7 +548,7 @@ class TestComponentLogbook(unittest.TestCase):
eventC2,
eventC3,
)
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
if logbook._keep_event(self.hass, e, entities_filter)
]
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
@@ -681,7 +600,7 @@ class TestComponentLogbook(unittest.TestCase):
events = [
e
for e in (eventA, eventB)
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
if logbook._keep_event(self.hass, e, entities_filter)
]
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
@@ -1633,10 +1552,7 @@ async def test_exclude_described_event(hass, hass_client):
logbook.DOMAIN,
{
logbook.DOMAIN: {
logbook.CONF_EXCLUDE: {
CONF_DOMAINS: ["sensor"],
CONF_ENTITIES: [entity_id],
}
CONF_EXCLUDE: {CONF_DOMAINS: ["sensor"], CONF_ENTITIES: [entity_id]}
}
},
)
@@ -1806,6 +1722,119 @@ async def test_logbook_entity_filter_with_automations(hass, hass_client):
assert json_dict[0]["entity_id"] == entity_id_second
async def test_filter_continuous_sensor_values(hass, hass_client):
"""Test remove continuous sensor events from logbook."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
entity_id_test = "switch.test"
hass.states.async_set(entity_id_test, STATE_OFF)
hass.states.async_set(entity_id_test, STATE_ON)
entity_id_second = "sensor.bla"
hass.states.async_set(entity_id_second, STATE_OFF, {"unit_of_measurement": "foo"})
hass.states.async_set(entity_id_second, STATE_ON, {"unit_of_measurement": "foo"})
entity_id_third = "light.bla"
hass.states.async_set(entity_id_third, STATE_OFF, {"unit_of_measurement": "foo"})
hass.states.async_set(entity_id_third, STATE_ON, {"unit_of_measurement": "foo"})
await hass.async_add_job(partial(trigger_db_commit, hass))
await hass.async_block_till_done()
await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries without filters
response = await client.get(f"/api/logbook/{start_date.isoformat()}")
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 2
assert response_json[0]["entity_id"] == entity_id_test
assert response_json[1]["entity_id"] == entity_id_third
async def test_exclude_new_entities(hass, hass_client):
"""Test if events are excluded on first update."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
entity_id = "climate.bla"
entity_id2 = "climate.blu"
hass.states.async_set(entity_id, STATE_OFF)
hass.states.async_set(entity_id2, STATE_ON)
hass.states.async_set(entity_id2, STATE_OFF)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_add_job(partial(trigger_db_commit, hass))
await hass.async_block_till_done()
await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries without filters
response = await client.get(f"/api/logbook/{start_date.isoformat()}")
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 2
assert response_json[0]["entity_id"] == entity_id2
assert response_json[1]["domain"] == "homeassistant"
assert response_json[1]["message"] == "started"
async def test_exclude_removed_entities(hass, hass_client):
"""Test if events are excluded on last update."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
entity_id = "climate.bla"
entity_id2 = "climate.blu"
hass.states.async_set(entity_id, STATE_ON)
hass.states.async_set(entity_id, STATE_OFF)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.states.async_set(entity_id2, STATE_ON)
hass.states.async_set(entity_id2, STATE_OFF)
hass.states.async_remove(entity_id)
hass.states.async_remove(entity_id2)
await hass.async_add_job(partial(trigger_db_commit, hass))
await hass.async_block_till_done()
await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries without filters
response = await client.get(f"/api/logbook/{start_date.isoformat()}")
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 3
assert response_json[0]["entity_id"] == entity_id
assert response_json[1]["domain"] == "homeassistant"
assert response_json[1]["message"] == "started"
assert response_json[2]["entity_id"] == entity_id2
class MockLazyEventPartialState(ha.Event):
"""Minimal mock of a Lazy event."""

View File

@@ -229,6 +229,10 @@ class MockPlexClient:
"""Mock the version attribute."""
return "1.0"
def proxyThroughServer(self, value=True, server=None):
"""Mock the proxyThroughServer method."""
pass
def playMedia(self, item):
"""Mock the playMedia method."""
pass

View File

@@ -16,7 +16,7 @@ from homeassistant.components.recorder import (
from homeassistant.components.recorder.const import DATA_INSTANCE
from homeassistant.components.recorder.models import Events, RecorderRuns, States
from homeassistant.components.recorder.util import session_scope
from homeassistant.const import MATCH_ALL
from homeassistant.const import MATCH_ALL, STATE_LOCKED, STATE_UNLOCKED
from homeassistant.core import ATTR_NOW, EVENT_TIME_CHANGED, Context, callback
from homeassistant.setup import async_setup_component
from homeassistant.util import dt as dt_util
@@ -261,6 +261,27 @@ def test_saving_state_include_domain_glob_exclude_entity(hass_recorder):
assert _state_empty_context(hass, "test.ok").state == "state2"
def test_saving_state_and_removing_entity(hass, hass_recorder):
"""Test saving the state of a removed entity."""
hass = hass_recorder()
entity_id = "lock.mine"
hass.states.set(entity_id, STATE_LOCKED)
hass.states.set(entity_id, STATE_UNLOCKED)
hass.states.async_remove(entity_id)
wait_recording_done(hass)
with session_scope(hass=hass) as session:
states = list(session.query(States))
assert len(states) == 3
assert states[0].entity_id == entity_id
assert states[0].state == STATE_LOCKED
assert states[1].entity_id == entity_id
assert states[1].state == STATE_UNLOCKED
assert states[2].entity_id == entity_id
assert states[2].state is None
def test_recorder_setup_failure():
"""Test some exceptions."""
hass = get_test_home_assistant()