Compare commits

..

18 Commits

Author SHA1 Message Date
Paulus Schoutsen
4c052643ca Bumped version to 0.112.0b3 2020-06-30 00:25:44 +00:00
Paulus Schoutsen
b7a071b23f Updated frontend to 20200629.0 (#37240) 2020-06-30 00:25:37 +00:00
David F. Mulcahey
f0a8e8ea04 Bump ZHA Quirks to 0.0.41 (#37235) 2020-06-30 00:25:36 +00:00
Aaron Bach
caf306799b Fix Tile location accuracy bug (#37233)
Co-authored-by: Paulus Schoutsen <paulus@home-assistant.io>
2020-06-30 00:25:35 +00:00
definitio
d9a2cc93ba Fixes after PR #36479 (#37230) 2020-06-30 00:25:34 +00:00
Franck Nijhof
dbdd4f0e39 Ensure recorder data integrity and MySQL lock error handling (#37228) 2020-06-30 00:25:34 +00:00
MatthewFlamm
edc44230b4 Fix wind speed change in NWS (#37222) 2020-06-30 00:25:33 +00:00
Rami Mosleh
4d7a468c0e Fix updating ping sensor (#37220) 2020-06-30 00:25:32 +00:00
Aaron Bach
a06595c08d Fix bug where Tile session would expire (#37185) 2020-06-30 00:25:32 +00:00
Alan Tse
ff13b4c6b3 Bump teslajsonpy to 0.9.0 (#37162) 2020-06-30 00:25:31 +00:00
Tom Harris
8a755e790f Fix issue with Insteon devices not responding to device changes (#37160) 2020-06-30 00:25:30 +00:00
Paulus Schoutsen
6a6dfdff4d Bumped version to 0.112.0b2 2020-06-26 21:26:42 +00:00
Paulus Schoutsen
b9c233f013 Fix OwnTracks race condition (#37152) 2020-06-26 21:26:37 +00:00
Paulus Schoutsen
7418011d6d Fix speedtest blowing up (#37151) 2020-06-26 21:26:36 +00:00
Paulus Schoutsen
3a6a439c02 Updated frontend to 20200626.1 (#37150) 2020-06-26 21:26:35 +00:00
jjlawren
34c4dc2e80 Catch additional exception for Plex account login failures (#37143) 2020-06-26 21:26:34 +00:00
J. Nick Koston
56853787e7 Fix repack when using pymysql (#37142) 2020-06-26 21:26:34 +00:00
Franck Nijhof
976cbdd2aa Fix recorder purging by batch processing purges (#37140) 2020-06-26 21:26:33 +00:00
23 changed files with 154 additions and 74 deletions

View File

@@ -2,7 +2,9 @@
"domain": "frontend", "domain": "frontend",
"name": "Home Assistant Frontend", "name": "Home Assistant Frontend",
"documentation": "https://www.home-assistant.io/integrations/frontend", "documentation": "https://www.home-assistant.io/integrations/frontend",
"requirements": ["home-assistant-frontend==20200626.0"], "requirements": [
"home-assistant-frontend==20200629.0"
],
"dependencies": [ "dependencies": [
"api", "api",
"auth", "auth",
@@ -15,6 +17,8 @@
"system_log", "system_log",
"websocket_api" "websocket_api"
], ],
"codeowners": ["@home-assistant/frontend"], "codeowners": [
"@home-assistant/frontend"
],
"quality_scale": "internal" "quality_scale": "internal"
} }

View File

@@ -2,6 +2,6 @@
"domain": "insteon", "domain": "insteon",
"name": "Insteon", "name": "Insteon",
"documentation": "https://www.home-assistant.io/integrations/insteon", "documentation": "https://www.home-assistant.io/integrations/insteon",
"requirements": ["pyinsteon==1.0.4"], "requirements": ["pyinsteon==1.0.5"],
"codeowners": ["@teharris1"] "codeowners": ["@teharris1"]
} }

View File

@@ -994,6 +994,9 @@ class MqttAvailability(Entity):
await self._availability_subscribe_topics() await self._availability_subscribe_topics()
async_dispatcher_connect(self.hass, MQTT_CONNECTED, self.async_mqtt_connect) async_dispatcher_connect(self.hass, MQTT_CONNECTED, self.async_mqtt_connect)
async_dispatcher_connect(self.hass, MQTT_DISCONNECTED, self.async_mqtt_connect) async_dispatcher_connect(self.hass, MQTT_DISCONNECTED, self.async_mqtt_connect)
self.async_on_remove(
async_dispatcher_connect(self.hass, MQTT_CONNECTED, self.async_mqtt_connect)
)
async def availability_discovery_update(self, config: dict): async def availability_discovery_update(self, config: dict):
"""Handle updated discovery message.""" """Handle updated discovery message."""
@@ -1029,7 +1032,8 @@ class MqttAvailability(Entity):
@callback @callback
def async_mqtt_connect(self): def async_mqtt_connect(self):
"""Update state on connection/disconnection to MQTT broker.""" """Update state on connection/disconnection to MQTT broker."""
self.async_write_ha_state() if self.hass.is_running:
self.async_write_ha_state()
async def async_will_remove_from_hass(self): async def async_will_remove_from_hass(self):
"""Unsubscribe when removed.""" """Unsubscribe when removed."""

View File

@@ -190,17 +190,16 @@ class NWSWeather(WeatherEntity):
@property @property
def wind_speed(self): def wind_speed(self):
"""Return the current windspeed.""" """Return the current windspeed."""
wind_m_s = None wind_km_hr = None
if self.observation: if self.observation:
wind_m_s = self.observation.get("windSpeed") wind_km_hr = self.observation.get("windSpeed")
if wind_m_s is None: if wind_km_hr is None:
return None return None
wind_m_hr = wind_m_s * 3600
if self.is_metric: if self.is_metric:
wind = convert_distance(wind_m_hr, LENGTH_METERS, LENGTH_KILOMETERS) wind = wind_km_hr
else: else:
wind = convert_distance(wind_m_hr, LENGTH_METERS, LENGTH_MILES) wind = convert_distance(wind_km_hr, LENGTH_KILOMETERS, LENGTH_MILES)
return round(wind) return round(wind)
@property @property

View File

@@ -24,6 +24,19 @@ _LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, entry, async_add_entities): async def async_setup_entry(hass, entry, async_add_entities):
"""Set up OwnTracks based off an entry.""" """Set up OwnTracks based off an entry."""
# Restore previously loaded devices
dev_reg = await device_registry.async_get_registry(hass)
dev_ids = {
identifier[1]
for device in dev_reg.devices.values()
for identifier in device.identifiers
if identifier[0] == OT_DOMAIN
}
entities = []
for dev_id in dev_ids:
entity = hass.data[OT_DOMAIN]["devices"][dev_id] = OwnTracksEntity(dev_id)
entities.append(entity)
@callback @callback
def _receive_data(dev_id, **data): def _receive_data(dev_id, **data):
@@ -39,24 +52,8 @@ async def async_setup_entry(hass, entry, async_add_entities):
hass.data[OT_DOMAIN]["context"].set_async_see(_receive_data) hass.data[OT_DOMAIN]["context"].set_async_see(_receive_data)
# Restore previously loaded devices if entities:
dev_reg = await device_registry.async_get_registry(hass) async_add_entities(entities)
dev_ids = {
identifier[1]
for device in dev_reg.devices.values()
for identifier in device.identifiers
if identifier[0] == OT_DOMAIN
}
if not dev_ids:
return True
entities = []
for dev_id in dev_ids:
entity = hass.data[OT_DOMAIN]["devices"][dev_id] = OwnTracksEntity(dev_id)
entities.append(entity)
async_add_entities(entities)
return True return True

View File

@@ -164,7 +164,7 @@ async def async_setup_entry(hass, entry):
def get_plex_account(plex_server): def get_plex_account(plex_server):
try: try:
return plex_server.account return plex_server.account
except plexapi.exceptions.Unauthorized: except (plexapi.exceptions.BadRequest, plexapi.exceptions.Unauthorized):
return None return None
plex_account = await hass.async_add_executor_job(get_plex_account, plex_server) plex_account = await hass.async_add_executor_job(get_plex_account, plex_server)

View File

@@ -4,7 +4,7 @@ import ssl
import time import time
from urllib.parse import urlparse from urllib.parse import urlparse
from plexapi.exceptions import NotFound, Unauthorized from plexapi.exceptions import BadRequest, NotFound, Unauthorized
import plexapi.myplex import plexapi.myplex
import plexapi.playqueue import plexapi.playqueue
import plexapi.server import plexapi.server
@@ -98,7 +98,7 @@ class PlexServer:
if not self._plex_account and self._use_plex_tv: if not self._plex_account and self._use_plex_tv:
try: try:
self._plex_account = plexapi.myplex.MyPlexAccount(token=self._token) self._plex_account = plexapi.myplex.MyPlexAccount(token=self._token)
except Unauthorized: except (BadRequest, Unauthorized):
self._use_plex_tv = False self._use_plex_tv = False
_LOGGER.error("Not authorized to access plex.tv with provided token") _LOGGER.error("Not authorized to access plex.tv with provided token")
raise raise

View File

@@ -335,7 +335,7 @@ class Recorder(threading.Thread):
self.event_session = self.get_session() self.event_session = self.get_session()
# Use a session for the event read loop # Use a session for the event read loop
# with a commit every time the event time # with a commit every time the event time
# has changed. This reduces the disk io. # has changed. This reduces the disk io.
while True: while True:
event = self.queue.get() event = self.queue.get()
if event is None: if event is None:
@@ -344,7 +344,9 @@ class Recorder(threading.Thread):
self.queue.task_done() self.queue.task_done()
return return
if isinstance(event, PurgeTask): if isinstance(event, PurgeTask):
purge.purge_old_data(self, event.keep_days, event.repack) # Schedule a new purge task if this one didn't finish
if not purge.purge_old_data(self, event.keep_days, event.repack):
self.queue.put(PurgeTask(event.keep_days, event.repack))
self.queue.task_done() self.queue.task_done()
continue continue
if event.event_type == EVENT_TIME_CHANGED: if event.event_type == EVENT_TIME_CHANGED:

View File

@@ -64,7 +64,7 @@ class Events(Base): # type: ignore
context_parent_id=event.context.parent_id, context_parent_id=event.context.parent_id,
) )
def to_native(self): def to_native(self, validate_entity_id=True):
"""Convert to a natve HA Event.""" """Convert to a natve HA Event."""
context = Context( context = Context(
id=self.context_id, id=self.context_id,
@@ -183,7 +183,7 @@ class RecorderRuns(Base): # type: ignore
return [row[0] for row in query] return [row[0] for row in query]
def to_native(self): def to_native(self, validate_entity_id=True):
"""Return self, native format is this model.""" """Return self, native format is this model."""
return self return self

View File

@@ -1,38 +1,68 @@
"""Purge old data helper.""" """Purge old data helper."""
from datetime import timedelta from datetime import timedelta
import logging import logging
import time
from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.exc import OperationalError, SQLAlchemyError
import homeassistant.util.dt as dt_util import homeassistant.util.dt as dt_util
from .models import Events, RecorderRuns, States from .models import Events, RecorderRuns, States
from .util import session_scope from .util import execute, session_scope
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
def purge_old_data(instance, purge_days, repack): def purge_old_data(instance, purge_days: int, repack: bool) -> bool:
"""Purge events and states older than purge_days ago.""" """Purge events and states older than purge_days ago.
Cleans up an timeframe of an hour, based on the oldest record.
"""
purge_before = dt_util.utcnow() - timedelta(days=purge_days) purge_before = dt_util.utcnow() - timedelta(days=purge_days)
_LOGGER.debug("Purging events before %s", purge_before) _LOGGER.debug("Purging states and events before target %s", purge_before)
try: try:
with session_scope(session=instance.get_session()) as session: with session_scope(session=instance.get_session()) as session:
# Purge a max of 1 hour, based on the oldest states or events record
batch_purge_before = purge_before
query = session.query(States).order_by(States.last_updated.asc()).limit(1)
states = execute(query, to_native=True, validate_entity_ids=False)
if states:
batch_purge_before = min(
batch_purge_before, states[0].last_updated + timedelta(hours=1),
)
query = session.query(Events).order_by(Events.time_fired.asc()).limit(1)
events = execute(query, to_native=True)
if events:
batch_purge_before = min(
batch_purge_before, events[0].time_fired + timedelta(hours=1),
)
_LOGGER.debug("Purging states and events before %s", batch_purge_before)
deleted_rows = ( deleted_rows = (
session.query(States) session.query(States)
.filter(States.last_updated < purge_before) .filter(States.last_updated < batch_purge_before)
.delete(synchronize_session=False) .delete(synchronize_session=False)
) )
_LOGGER.debug("Deleted %s states", deleted_rows) _LOGGER.debug("Deleted %s states", deleted_rows)
deleted_rows = ( deleted_rows = (
session.query(Events) session.query(Events)
.filter(Events.time_fired < purge_before) .filter(Events.time_fired < batch_purge_before)
.delete(synchronize_session=False) .delete(synchronize_session=False)
) )
_LOGGER.debug("Deleted %s events", deleted_rows) _LOGGER.debug("Deleted %s events", deleted_rows)
# If states or events purging isn't processing the purge_before yet,
# return false, as we are not done yet.
if batch_purge_before != purge_before:
_LOGGER.debug("Purging hasn't fully completed yet.")
return False
# Recorder runs is small, no need to batch run it
deleted_rows = ( deleted_rows = (
session.query(RecorderRuns) session.query(RecorderRuns)
.filter(RecorderRuns.start < purge_before) .filter(RecorderRuns.start < purge_before)
@@ -46,9 +76,25 @@ def purge_old_data(instance, purge_days, repack):
_LOGGER.debug("Vacuuming SQL DB to free space") _LOGGER.debug("Vacuuming SQL DB to free space")
instance.engine.execute("VACUUM") instance.engine.execute("VACUUM")
# Optimize mysql / mariadb tables to free up space on disk # Optimize mysql / mariadb tables to free up space on disk
elif instance.engine.driver == "mysqldb": elif instance.engine.driver in ("mysqldb", "pymysql"):
_LOGGER.debug("Optimizing SQL DB to free space") _LOGGER.debug("Optimizing SQL DB to free space")
instance.engine.execute("OPTIMIZE TABLE states, events, recorder_runs") instance.engine.execute("OPTIMIZE TABLE states, events, recorder_runs")
except OperationalError as err:
# Retry when one of the following MySQL errors occurred:
# 1205: Lock wait timeout exceeded; try restarting transaction
# 1206: The total number of locks exceeds the lock table size
# 1213: Deadlock found when trying to get lock; try restarting transaction
if instance.engine.driver in ("mysqldb", "pymysql") and err.orig.args[0] in (
1205,
1206,
1213,
):
_LOGGER.info("%s; purge not completed, retrying", err.orig.args[1])
time.sleep(instance.db_retry_wait)
return False
_LOGGER.warning("Error purging history: %s.", err)
except SQLAlchemyError as err: except SQLAlchemyError as err:
_LOGGER.warning("Error purging history: %s.", err) _LOGGER.warning("Error purging history: %s.", err)
return True

View File

@@ -129,11 +129,10 @@ class SpeedTestDataCoordinator(DataUpdateCoordinator):
server_id = self.config_entry.options.get(CONF_SERVER_ID) server_id = self.config_entry.options.get(CONF_SERVER_ID)
self.api.closest.clear() self.api.closest.clear()
self.api.get_servers(servers=[server_id]) self.api.get_servers(servers=[server_id])
self.api.get_best_server()
_LOGGER.debug( _LOGGER.debug(
"Executing speedtest.net speed test with server_id: %s", self.api.best["id"] "Executing speedtest.net speed test with server_id: %s", self.api.best["id"]
) )
self.api.get_best_server()
self.api.download() self.api.download()
self.api.upload() self.api.upload()
return self.api.results.dict() return self.api.results.dict()

View File

@@ -94,7 +94,7 @@ class SpeedTestOptionsFlowHandler(config_entries.OptionsFlow):
for (key, value) in self._servers.items() for (key, value) in self._servers.items()
if value.get("id") == self.config_entry.options[CONF_SERVER_ID] if value.get("id") == self.config_entry.options[CONF_SERVER_ID]
] ]
server_name = server[0] server_name = server[0] if server else ""
options = { options = {
vol.Optional( vol.Optional(

View File

@@ -3,6 +3,6 @@
"name": "Tesla", "name": "Tesla",
"config_flow": true, "config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/tesla", "documentation": "https://www.home-assistant.io/integrations/tesla",
"requirements": ["teslajsonpy==0.8.1"], "requirements": ["teslajsonpy==0.9.0"],
"codeowners": ["@zabuldon", "@alandtse"] "codeowners": ["@zabuldon", "@alandtse"]
} }

View File

@@ -3,7 +3,7 @@ import asyncio
from datetime import timedelta from datetime import timedelta
from pytile import async_login from pytile import async_login
from pytile.errors import TileError from pytile.errors import SessionExpiredError, TileError
from homeassistant.const import ATTR_ATTRIBUTION, CONF_PASSWORD, CONF_USERNAME from homeassistant.const import ATTR_ATTRIBUTION, CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import callback from homeassistant.core import callback
@@ -44,6 +44,9 @@ async def async_setup_entry(hass, config_entry):
"""Get new data from the API.""" """Get new data from the API."""
try: try:
return await client.tiles.all() return await client.tiles.all()
except SessionExpiredError:
LOGGER.info("Tile session expired; creating a new one")
await client.async_init()
except TileError as err: except TileError as err:
raise UpdateFailed(f"Error while retrieving data: {err}") raise UpdateFailed(f"Error while retrieving data: {err}")

View File

@@ -84,13 +84,26 @@ class TileDeviceTracker(TileEntity, TrackerEntity):
Value in meters. Value in meters.
""" """
return round( state = self._tile["last_tile_state"]
( h_accuracy = state.get("h_accuracy")
self._tile["last_tile_state"]["h_accuracy"] v_accuracy = state.get("v_accuracy")
+ self._tile["last_tile_state"]["v_accuracy"]
if h_accuracy is not None and v_accuracy is not None:
return round(
(
self._tile["last_tile_state"]["h_accuracy"]
+ self._tile["last_tile_state"]["v_accuracy"]
)
/ 2
) )
/ 2
) if h_accuracy is not None:
return h_accuracy
if v_accuracy is not None:
return v_accuracy
return None
@property @property
def latitude(self) -> float: def latitude(self) -> float:

View File

@@ -6,7 +6,7 @@
"requirements": [ "requirements": [
"bellows==0.17.0", "bellows==0.17.0",
"pyserial==3.4", "pyserial==3.4",
"zha-quirks==0.0.40", "zha-quirks==0.0.41",
"zigpy-cc==0.4.4", "zigpy-cc==0.4.4",
"zigpy-deconz==0.9.2", "zigpy-deconz==0.9.2",
"zigpy==0.21.0", "zigpy==0.21.0",

View File

@@ -1,7 +1,7 @@
"""Constants used by Home Assistant components.""" """Constants used by Home Assistant components."""
MAJOR_VERSION = 0 MAJOR_VERSION = 0
MINOR_VERSION = 112 MINOR_VERSION = 112
PATCH_VERSION = "0b1" PATCH_VERSION = "0b3"
__short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}" __short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__ = f"{__short_version__}.{PATCH_VERSION}" __version__ = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER = (3, 7, 0) REQUIRED_PYTHON_VER = (3, 7, 0)

View File

@@ -13,7 +13,7 @@ defusedxml==0.6.0
distro==1.5.0 distro==1.5.0
emoji==0.5.4 emoji==0.5.4
hass-nabucasa==0.34.7 hass-nabucasa==0.34.7
home-assistant-frontend==20200626.0 home-assistant-frontend==20200629.0
importlib-metadata==1.6.0;python_version<'3.8' importlib-metadata==1.6.0;python_version<'3.8'
jinja2>=2.11.1 jinja2>=2.11.1
netdisco==2.7.1 netdisco==2.7.1

View File

@@ -738,7 +738,7 @@ hole==0.5.1
holidays==0.10.2 holidays==0.10.2
# homeassistant.components.frontend # homeassistant.components.frontend
home-assistant-frontend==20200626.0 home-assistant-frontend==20200629.0
# homeassistant.components.zwave # homeassistant.components.zwave
homeassistant-pyozw==0.1.10 homeassistant-pyozw==0.1.10
@@ -1388,7 +1388,7 @@ pyialarm==0.3
pyicloud==0.9.7 pyicloud==0.9.7
# homeassistant.components.insteon # homeassistant.components.insteon
pyinsteon==1.0.4 pyinsteon==1.0.5
# homeassistant.components.intesishome # homeassistant.components.intesishome
pyintesishome==1.7.5 pyintesishome==1.7.5
@@ -2094,7 +2094,7 @@ temperusb==1.5.3
tesla-powerwall==0.2.11 tesla-powerwall==0.2.11
# homeassistant.components.tesla # homeassistant.components.tesla
teslajsonpy==0.8.1 teslajsonpy==0.9.0
# homeassistant.components.thermoworks_smoke # homeassistant.components.thermoworks_smoke
thermoworks_smoke==0.1.8 thermoworks_smoke==0.1.8
@@ -2255,7 +2255,7 @@ zengge==0.2
zeroconf==0.27.1 zeroconf==0.27.1
# homeassistant.components.zha # homeassistant.components.zha
zha-quirks==0.0.40 zha-quirks==0.0.41
# homeassistant.components.zhong_hong # homeassistant.components.zhong_hong
zhong_hong_hvac==1.0.9 zhong_hong_hvac==1.0.9

View File

@@ -343,7 +343,7 @@ hole==0.5.1
holidays==0.10.2 holidays==0.10.2
# homeassistant.components.frontend # homeassistant.components.frontend
home-assistant-frontend==20200626.0 home-assistant-frontend==20200629.0
# homeassistant.components.zwave # homeassistant.components.zwave
homeassistant-pyozw==0.1.10 homeassistant-pyozw==0.1.10
@@ -890,7 +890,7 @@ tellduslive==0.10.11
tesla-powerwall==0.2.11 tesla-powerwall==0.2.11
# homeassistant.components.tesla # homeassistant.components.tesla
teslajsonpy==0.8.1 teslajsonpy==0.9.0
# homeassistant.components.toon # homeassistant.components.toon
toonapi==0.1.0 toonapi==0.1.0
@@ -961,7 +961,7 @@ ya_ma==0.3.8
zeroconf==0.27.1 zeroconf==0.27.1
# homeassistant.components.zha # homeassistant.components.zha
zha-quirks==0.0.40 zha-quirks==0.0.41
# homeassistant.components.zha # homeassistant.components.zha
zigpy-cc==0.4.4 zigpy-cc==0.4.4

View File

@@ -60,7 +60,7 @@ EXPECTED_OBSERVATION_IMPERIAL = {
), ),
ATTR_WEATHER_WIND_BEARING: 180, ATTR_WEATHER_WIND_BEARING: 180,
ATTR_WEATHER_WIND_SPEED: round( ATTR_WEATHER_WIND_SPEED: round(
convert_distance(10, LENGTH_METERS, LENGTH_MILES) * 3600 convert_distance(10, LENGTH_KILOMETERS, LENGTH_MILES)
), ),
ATTR_WEATHER_PRESSURE: round( ATTR_WEATHER_PRESSURE: round(
convert_pressure(100000, PRESSURE_PA, PRESSURE_INHG), 2 convert_pressure(100000, PRESSURE_PA, PRESSURE_INHG), 2
@@ -74,9 +74,7 @@ EXPECTED_OBSERVATION_IMPERIAL = {
EXPECTED_OBSERVATION_METRIC = { EXPECTED_OBSERVATION_METRIC = {
ATTR_WEATHER_TEMPERATURE: 10, ATTR_WEATHER_TEMPERATURE: 10,
ATTR_WEATHER_WIND_BEARING: 180, ATTR_WEATHER_WIND_BEARING: 180,
ATTR_WEATHER_WIND_SPEED: round( ATTR_WEATHER_WIND_SPEED: 10,
convert_distance(10, LENGTH_METERS, LENGTH_KILOMETERS) * 3600
),
ATTR_WEATHER_PRESSURE: round(convert_pressure(100000, PRESSURE_PA, PRESSURE_HPA)), ATTR_WEATHER_PRESSURE: round(convert_pressure(100000, PRESSURE_PA, PRESSURE_HPA)),
ATTR_WEATHER_VISIBILITY: round( ATTR_WEATHER_VISIBILITY: round(
convert_distance(10000, LENGTH_METERS, LENGTH_KILOMETERS) convert_distance(10000, LENGTH_METERS, LENGTH_KILOMETERS)

View File

@@ -317,7 +317,7 @@ def test_auto_purge(hass_recorder):
test_time = tz.localize(datetime(2020, 1, 1, 4, 12, 0)) test_time = tz.localize(datetime(2020, 1, 1, 4, 12, 0))
with patch( with patch(
"homeassistant.components.recorder.purge.purge_old_data" "homeassistant.components.recorder.purge.purge_old_data", return_value=True
) as purge_old_data: ) as purge_old_data:
for delta in (-1, 0, 1): for delta in (-1, 0, 1):
hass.bus.fire( hass.bus.fire(

View File

@@ -130,9 +130,16 @@ class TestRecorderPurge(unittest.TestCase):
assert states.count() == 6 assert states.count() == 6
# run purge_old_data() # run purge_old_data()
purge_old_data(self.hass.data[DATA_INSTANCE], 4, repack=False) finished = purge_old_data(self.hass.data[DATA_INSTANCE], 4, repack=False)
assert not finished
assert states.count() == 4
# we should only have 2 states left after purging finished = purge_old_data(self.hass.data[DATA_INSTANCE], 4, repack=False)
assert not finished
assert states.count() == 2
finished = purge_old_data(self.hass.data[DATA_INSTANCE], 4, repack=False)
assert finished
assert states.count() == 2 assert states.count() == 2
def test_purge_old_events(self): def test_purge_old_events(self):
@@ -144,9 +151,17 @@ class TestRecorderPurge(unittest.TestCase):
assert events.count() == 6 assert events.count() == 6
# run purge_old_data() # run purge_old_data()
purge_old_data(self.hass.data[DATA_INSTANCE], 4, repack=False) finished = purge_old_data(self.hass.data[DATA_INSTANCE], 4, repack=False)
assert not finished
assert events.count() == 4
finished = purge_old_data(self.hass.data[DATA_INSTANCE], 4, repack=False)
assert not finished
assert events.count() == 2
# we should only have 2 events left # we should only have 2 events left
finished = purge_old_data(self.hass.data[DATA_INSTANCE], 4, repack=False)
assert finished
assert events.count() == 2 assert events.count() == 2
def test_purge_method(self): def test_purge_method(self):
@@ -209,6 +224,6 @@ class TestRecorderPurge(unittest.TestCase):
self.hass.block_till_done() self.hass.block_till_done()
self.hass.data[DATA_INSTANCE].block_till_done() self.hass.data[DATA_INSTANCE].block_till_done()
assert ( assert (
mock_logger.debug.mock_calls[4][1][0] mock_logger.debug.mock_calls[5][1][0]
== "Vacuuming SQL DB to free space" == "Vacuuming SQL DB to free space"
) )