Fix bugs with AirVisual auto-leveling API (#36097)

* Fix bugs with AirVisual auto-leveling API

* Code review

* Code review
This commit is contained in:
Aaron Bach 2020-05-26 01:00:05 -06:00 committed by GitHub
parent 67a9622209
commit 599d3ae930
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 52 additions and 29 deletions

View File

@ -38,7 +38,7 @@ from .const import (
PLATFORMS = ["air_quality", "sensor"] PLATFORMS = ["air_quality", "sensor"]
DEFAULT_ATTRIBUTION = "Data provided by AirVisual" DEFAULT_ATTRIBUTION = "Data provided by AirVisual"
DEFAULT_NODE_PRO_SCAN_INTERVAL = timedelta(minutes=1) DEFAULT_NODE_PRO_UPDATE_INTERVAL = timedelta(minutes=1)
DEFAULT_OPTIONS = {CONF_SHOW_ON_MAP: True} DEFAULT_OPTIONS = {CONF_SHOW_ON_MAP: True}
GEOGRAPHY_COORDINATES_SCHEMA = vol.Schema( GEOGRAPHY_COORDINATES_SCHEMA = vol.Schema(
@ -95,27 +95,43 @@ def async_get_cloud_api_update_interval(hass, api_key):
This will shift based on the number of active consumers, thus keeping the user This will shift based on the number of active consumers, thus keeping the user
under the monthly API limit. under the monthly API limit.
""" """
num_consumers = len( num_consumers = len(async_get_cloud_coordinators_by_api_key(hass, api_key))
{
config_entry
for config_entry in hass.config_entries.async_entries(DOMAIN)
if config_entry.data.get(CONF_API_KEY) == api_key
}
)
# Assuming 10,000 calls per month and a "smallest possible month" of 28 days; note # Assuming 10,000 calls per month and a "smallest possible month" of 28 days; note
# that we give a buffer of 1500 API calls for any drift, restarts, etc.: # that we give a buffer of 1500 API calls for any drift, restarts, etc.:
minutes_between_api_calls = ceil(1 / (8500 / 28 / 24 / 60 / num_consumers)) minutes_between_api_calls = ceil(1 / (8500 / 28 / 24 / 60 / num_consumers))
LOGGER.debug(
"Leveling API key usage (%s): %s consumers, %s minutes between updates",
api_key,
num_consumers,
minutes_between_api_calls,
)
return timedelta(minutes=minutes_between_api_calls) return timedelta(minutes=minutes_between_api_calls)
@callback @callback
def async_reset_coordinator_update_intervals(hass, update_interval): def async_get_cloud_coordinators_by_api_key(hass, api_key):
"""Update any existing data coordinators with a new update interval.""" """Get all DataUpdateCoordinator objects related to a particular API key."""
if not hass.data[DOMAIN][DATA_COORDINATOR]: coordinators = []
return for entry_id, coordinator in hass.data[DOMAIN][DATA_COORDINATOR].items():
config_entry = hass.config_entries.async_get_entry(entry_id)
if config_entry.data.get(CONF_API_KEY) == api_key:
coordinators.append(coordinator)
return coordinators
for coordinator in hass.data[DOMAIN][DATA_COORDINATOR].values():
@callback
def async_sync_geo_coordinator_update_intervals(hass, api_key):
"""Sync the update interval for geography-based data coordinators (by API key)."""
update_interval = async_get_cloud_api_update_interval(hass, api_key)
for coordinator in async_get_cloud_coordinators_by_api_key(hass, api_key):
LOGGER.debug(
"Updating interval for coordinator: %s, %s",
coordinator.name,
update_interval,
)
coordinator.update_interval = update_interval coordinator.update_interval = update_interval
@ -194,10 +210,6 @@ async def async_setup_entry(hass, config_entry):
client = Client(api_key=config_entry.data[CONF_API_KEY], session=websession) client = Client(api_key=config_entry.data[CONF_API_KEY], session=websession)
update_interval = async_get_cloud_api_update_interval(
hass, config_entry.data[CONF_API_KEY]
)
async def async_update_data(): async def async_update_data():
"""Get new data from the API.""" """Get new data from the API."""
if CONF_CITY in config_entry.data: if CONF_CITY in config_entry.data:
@ -219,14 +231,19 @@ async def async_setup_entry(hass, config_entry):
coordinator = DataUpdateCoordinator( coordinator = DataUpdateCoordinator(
hass, hass,
LOGGER, LOGGER,
name="geography data", name=async_get_geography_id(config_entry.data),
update_interval=update_interval, # We give a placeholder update interval in order to create the coordinator;
# then, below, we use the coordinator's presence (along with any other
# coordinators using the same API key) to calculate an actual, leveled
# update interval:
update_interval=timedelta(minutes=5),
update_method=async_update_data, update_method=async_update_data,
) )
# Ensure any other, existing config entries that use this API key are updated hass.data[DOMAIN][DATA_COORDINATOR][config_entry.entry_id] = coordinator
# with the new scan interval: async_sync_geo_coordinator_update_intervals(
async_reset_coordinator_update_intervals(hass, update_interval) hass, config_entry.data[CONF_API_KEY]
)
# Only geography-based entries have options: # Only geography-based entries have options:
config_entry.add_update_listener(async_update_options) config_entry.add_update_listener(async_update_options)
@ -251,14 +268,14 @@ async def async_setup_entry(hass, config_entry):
hass, hass,
LOGGER, LOGGER,
name="Node/Pro data", name="Node/Pro data",
update_interval=DEFAULT_NODE_PRO_SCAN_INTERVAL, update_interval=DEFAULT_NODE_PRO_UPDATE_INTERVAL,
update_method=async_update_data, update_method=async_update_data,
) )
await coordinator.async_refresh()
hass.data[DOMAIN][DATA_COORDINATOR][config_entry.entry_id] = coordinator hass.data[DOMAIN][DATA_COORDINATOR][config_entry.entry_id] = coordinator
await coordinator.async_refresh()
for component in PLATFORMS: for component in PLATFORMS:
hass.async_create_task( hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, component) hass.config_entries.async_forward_entry_setup(config_entry, component)
@ -317,6 +334,12 @@ async def async_unload_entry(hass, config_entry):
) )
if unload_ok: if unload_ok:
hass.data[DOMAIN][DATA_COORDINATOR].pop(config_entry.entry_id) hass.data[DOMAIN][DATA_COORDINATOR].pop(config_entry.entry_id)
if config_entry.data[CONF_INTEGRATION_TYPE] == INTEGRATION_TYPE_GEOGRAPHY:
# Re-calculate the update interval period for any remaining consumes of this
# API key:
async_sync_geo_coordinator_update_intervals(
hass, config_entry.data[CONF_API_KEY]
)
return unload_ok return unload_ok

View File

@ -8,7 +8,7 @@ from .const import (
CONF_INTEGRATION_TYPE, CONF_INTEGRATION_TYPE,
DATA_COORDINATOR, DATA_COORDINATOR,
DOMAIN, DOMAIN,
INTEGRATION_TYPE_GEOGRAPHY, INTEGRATION_TYPE_NODE_PRO,
) )
ATTR_HUMIDITY = "humidity" ATTR_HUMIDITY = "humidity"
@ -18,12 +18,12 @@ ATTR_VOC = "voc"
async def async_setup_entry(hass, config_entry, async_add_entities): async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up AirVisual air quality entities based on a config entry.""" """Set up AirVisual air quality entities based on a config entry."""
coordinator = hass.data[DOMAIN][DATA_COORDINATOR][config_entry.entry_id]
# Geography-based AirVisual integrations don't utilize this platform: # Geography-based AirVisual integrations don't utilize this platform:
if config_entry.data[CONF_INTEGRATION_TYPE] == INTEGRATION_TYPE_GEOGRAPHY: if config_entry.data[CONF_INTEGRATION_TYPE] != INTEGRATION_TYPE_NODE_PRO:
return return
coordinator = hass.data[DOMAIN][DATA_COORDINATOR][config_entry.entry_id]
async_add_entities([AirVisualNodeProSensor(coordinator)], True) async_add_entities([AirVisualNodeProSensor(coordinator)], True)