Migrate geo_json_events to async library (#68236)

* use async integration library

* migrating to async library

* migrating tests to async library

* renamed method and fixed comment

* removed callback annotation

* use async dispatcher
This commit is contained in:
Malte Franken 2022-03-18 06:40:43 +11:00 committed by GitHub
parent 0adc7042dc
commit a8dae97917
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 73 additions and 48 deletions

View File

@ -4,7 +4,7 @@ from __future__ import annotations
from datetime import timedelta from datetime import timedelta
import logging import logging
from geojson_client.generic_feed import GenericFeedManager from aio_geojson_generic_client import GenericFeedManager
import voluptuous as vol import voluptuous as vol
from homeassistant.components.geo_location import PLATFORM_SCHEMA, GeolocationEvent from homeassistant.components.geo_location import PLATFORM_SCHEMA, GeolocationEvent
@ -18,10 +18,14 @@ from homeassistant.const import (
LENGTH_KILOMETERS, LENGTH_KILOMETERS,
) )
from homeassistant.core import HomeAssistant, callback from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import aiohttp_client
import homeassistant.helpers.config_validation as cv import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect, dispatcher_send from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.event import track_time_interval from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@ -44,10 +48,10 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
) )
def setup_platform( async def async_setup_platform(
hass: HomeAssistant, hass: HomeAssistant,
config: ConfigType, config: ConfigType,
add_entities: AddEntitiesCallback, async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None, discovery_info: DiscoveryInfoType | None = None,
) -> None: ) -> None:
"""Set up the GeoJSON Events platform.""" """Set up the GeoJSON Events platform."""
@ -59,27 +63,30 @@ def setup_platform(
) )
radius_in_km = config[CONF_RADIUS] radius_in_km = config[CONF_RADIUS]
# Initialize the entity manager. # Initialize the entity manager.
feed = GeoJsonFeedEntityManager( manager = GeoJsonFeedEntityManager(
hass, add_entities, scan_interval, coordinates, url, radius_in_km hass, async_add_entities, scan_interval, coordinates, url, radius_in_km
) )
await manager.async_init()
def start_feed_manager(event): async def start_feed_manager(event=None):
"""Start feed manager.""" """Start feed manager."""
feed.startup() await manager.async_update()
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, start_feed_manager) hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, start_feed_manager)
class GeoJsonFeedEntityManager: class GeoJsonFeedEntityManager:
"""Feed Entity Manager for GeoJSON feeds.""" """Feed Entity Manager for GeoJSON feeds."""
def __init__( def __init__(
self, hass, add_entities, scan_interval, coordinates, url, radius_in_km self, hass, async_add_entities, scan_interval, coordinates, url, radius_in_km
): ):
"""Initialize the GeoJSON Feed Manager.""" """Initialize the GeoJSON Feed Manager."""
self._hass = hass self._hass = hass
websession = aiohttp_client.async_get_clientsession(hass)
self._feed_manager = GenericFeedManager( self._feed_manager = GenericFeedManager(
websession,
self._generate_entity, self._generate_entity,
self._update_entity, self._update_entity,
self._remove_entity, self._remove_entity,
@ -87,37 +94,42 @@ class GeoJsonFeedEntityManager:
url, url,
filter_radius=radius_in_km, filter_radius=radius_in_km,
) )
self._add_entities = add_entities self._async_add_entities = async_add_entities
self._scan_interval = scan_interval self._scan_interval = scan_interval
def startup(self): async def async_init(self):
"""Start up this manager.""" """Schedule initial and regular updates based on configured time interval."""
self._feed_manager.update()
self._init_regular_updates()
def _init_regular_updates(self): async def update(event_time):
"""Schedule regular updates at the specified interval.""" """Update."""
track_time_interval( await self.async_update()
self._hass, lambda now: self._feed_manager.update(), self._scan_interval
) # Trigger updates at regular intervals.
async_track_time_interval(self._hass, update, self._scan_interval)
_LOGGER.debug("Feed entity manager initialized")
async def async_update(self):
"""Refresh data."""
await self._feed_manager.update()
_LOGGER.debug("Feed entity manager updated")
def get_entry(self, external_id): def get_entry(self, external_id):
"""Get feed entry by external id.""" """Get feed entry by external id."""
return self._feed_manager.feed_entries.get(external_id) return self._feed_manager.feed_entries.get(external_id)
def _generate_entity(self, external_id): async def _generate_entity(self, external_id):
"""Generate new entity.""" """Generate new entity."""
new_entity = GeoJsonLocationEvent(self, external_id) new_entity = GeoJsonLocationEvent(self, external_id)
# Add new entities to HA. # Add new entities to HA.
self._add_entities([new_entity], True) self._async_add_entities([new_entity], True)
def _update_entity(self, external_id): async def _update_entity(self, external_id):
"""Update entity.""" """Update entity."""
dispatcher_send(self._hass, f"geo_json_events_update_{external_id}") async_dispatcher_send(self._hass, f"geo_json_events_update_{external_id}")
def _remove_entity(self, external_id): async def _remove_entity(self, external_id):
"""Remove entity.""" """Remove entity."""
dispatcher_send(self._hass, f"geo_json_events_delete_{external_id}") async_dispatcher_send(self._hass, f"geo_json_events_delete_{external_id}")
class GeoJsonLocationEvent(GeolocationEvent): class GeoJsonLocationEvent(GeolocationEvent):

View File

@ -2,8 +2,8 @@
"domain": "geo_json_events", "domain": "geo_json_events",
"name": "GeoJSON", "name": "GeoJSON",
"documentation": "https://www.home-assistant.io/integrations/geo_json_events", "documentation": "https://www.home-assistant.io/integrations/geo_json_events",
"requirements": ["geojson_client==0.6"], "requirements": ["aio_geojson_generic_client==0.1"],
"codeowners": ["@exxamalte"], "codeowners": ["@exxamalte"],
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"loggers": ["geojson_client"] "loggers": ["aio_geojson_generic_client"]
} }

View File

@ -94,6 +94,9 @@ afsapi==0.0.4
# homeassistant.components.agent_dvr # homeassistant.components.agent_dvr
agent-py==0.0.23 agent-py==0.0.23
# homeassistant.components.geo_json_events
aio_geojson_generic_client==0.1
# homeassistant.components.geonetnz_quakes # homeassistant.components.geonetnz_quakes
aio_geojson_geonetnz_quakes==0.13 aio_geojson_geonetnz_quakes==0.13
@ -678,7 +681,6 @@ garages-amsterdam==3.0.0
# homeassistant.components.geniushub # homeassistant.components.geniushub
geniushub-client==0.6.30 geniushub-client==0.6.30
# homeassistant.components.geo_json_events
# homeassistant.components.usgs_earthquakes_feed # homeassistant.components.usgs_earthquakes_feed
geojson_client==0.6 geojson_client==0.6

View File

@ -75,6 +75,9 @@ advantage_air==0.3.1
# homeassistant.components.agent_dvr # homeassistant.components.agent_dvr
agent-py==0.0.23 agent-py==0.0.23
# homeassistant.components.geo_json_events
aio_geojson_generic_client==0.1
# homeassistant.components.geonetnz_quakes # homeassistant.components.geonetnz_quakes
aio_geojson_geonetnz_quakes==0.13 aio_geojson_geonetnz_quakes==0.13
@ -461,7 +464,6 @@ gTTS==2.2.4
# homeassistant.components.garages_amsterdam # homeassistant.components.garages_amsterdam
garages-amsterdam==3.0.0 garages-amsterdam==3.0.0
# homeassistant.components.geo_json_events
# homeassistant.components.usgs_earthquakes_feed # homeassistant.components.usgs_earthquakes_feed
geojson_client==0.6 geojson_client==0.6

View File

@ -1,5 +1,7 @@
"""The tests for the geojson platform.""" """The tests for the geojson platform."""
from unittest.mock import MagicMock, call, patch from unittest.mock import ANY, MagicMock, call, patch
from aio_geojson_generic_client import GenericFeed
from homeassistant.components import geo_location from homeassistant.components import geo_location
from homeassistant.components.geo_json_events.geo_location import ( from homeassistant.components.geo_json_events.geo_location import (
@ -66,9 +68,9 @@ async def test_setup(hass, legacy_patchable_time):
# Patching 'utcnow' to gain more control over the timed update. # Patching 'utcnow' to gain more control over the timed update.
utcnow = dt_util.utcnow() utcnow = dt_util.utcnow()
with patch("homeassistant.util.dt.utcnow", return_value=utcnow), patch( with patch("homeassistant.util.dt.utcnow", return_value=utcnow), patch(
"geojson_client.generic_feed.GenericFeed" "aio_geojson_client.feed.GeoJsonFeed.update"
) as mock_feed: ) as mock_feed_update:
mock_feed.return_value.update.return_value = ( mock_feed_update.return_value = (
"OK", "OK",
[mock_entry_1, mock_entry_2, mock_entry_3], [mock_entry_1, mock_entry_2, mock_entry_3],
) )
@ -124,7 +126,7 @@ async def test_setup(hass, legacy_patchable_time):
# Simulate an update - one existing, one new entry, # Simulate an update - one existing, one new entry,
# one outdated entry # one outdated entry
mock_feed.return_value.update.return_value = ( mock_feed_update.return_value = (
"OK", "OK",
[mock_entry_1, mock_entry_4, mock_entry_3], [mock_entry_1, mock_entry_4, mock_entry_3],
) )
@ -136,7 +138,7 @@ async def test_setup(hass, legacy_patchable_time):
# Simulate an update - empty data, but successful update, # Simulate an update - empty data, but successful update,
# so no changes to entities. # so no changes to entities.
mock_feed.return_value.update.return_value = "OK_NO_DATA", None mock_feed_update.return_value = "OK_NO_DATA", None
async_fire_time_changed(hass, utcnow + 2 * SCAN_INTERVAL) async_fire_time_changed(hass, utcnow + 2 * SCAN_INTERVAL)
await hass.async_block_till_done() await hass.async_block_till_done()
@ -144,7 +146,7 @@ async def test_setup(hass, legacy_patchable_time):
assert len(all_states) == 3 assert len(all_states) == 3
# Simulate an update - empty data, removes all entities # Simulate an update - empty data, removes all entities
mock_feed.return_value.update.return_value = "ERROR", None mock_feed_update.return_value = "ERROR", None
async_fire_time_changed(hass, utcnow + 3 * SCAN_INTERVAL) async_fire_time_changed(hass, utcnow + 3 * SCAN_INTERVAL)
await hass.async_block_till_done() await hass.async_block_till_done()
@ -157,8 +159,13 @@ async def test_setup_with_custom_location(hass):
# Set up some mock feed entries for this test. # Set up some mock feed entries for this test.
mock_entry_1 = _generate_mock_feed_entry("1234", "Title 1", 2000.5, (-31.1, 150.1)) mock_entry_1 = _generate_mock_feed_entry("1234", "Title 1", 2000.5, (-31.1, 150.1))
with patch("geojson_client.generic_feed.GenericFeed") as mock_feed: with patch(
mock_feed.return_value.update.return_value = "OK", [mock_entry_1] "aio_geojson_generic_client.feed_manager.GenericFeed",
wraps=GenericFeed,
) as mock_feed, patch(
"aio_geojson_client.feed.GeoJsonFeed.update"
) as mock_feed_update:
mock_feed_update.return_value = "OK", [mock_entry_1]
with assert_setup_component(1, geo_location.DOMAIN): with assert_setup_component(1, geo_location.DOMAIN):
assert await async_setup_component( assert await async_setup_component(
@ -174,7 +181,9 @@ async def test_setup_with_custom_location(hass):
all_states = hass.states.async_all() all_states = hass.states.async_all()
assert len(all_states) == 1 assert len(all_states) == 1
assert mock_feed.call_args == call((15.1, 25.2), URL, filter_radius=200.0) assert mock_feed.call_args == call(
ANY, (15.1, 25.2), URL, filter_radius=200.0
)
async def test_setup_race_condition(hass, legacy_patchable_time): async def test_setup_race_condition(hass, legacy_patchable_time):
@ -197,12 +206,12 @@ async def test_setup_race_condition(hass, legacy_patchable_time):
# Patching 'utcnow' to gain more control over the timed update. # Patching 'utcnow' to gain more control over the timed update.
utcnow = dt_util.utcnow() utcnow = dt_util.utcnow()
with patch("homeassistant.util.dt.utcnow", return_value=utcnow), patch( with patch("homeassistant.util.dt.utcnow", return_value=utcnow), patch(
"geojson_client.generic_feed.GenericFeed" "aio_geojson_client.feed.GeoJsonFeed.update"
) as mock_feed, assert_setup_component(1, geo_location.DOMAIN): ) as mock_feed_update, assert_setup_component(1, geo_location.DOMAIN):
assert await async_setup_component(hass, geo_location.DOMAIN, CONFIG) assert await async_setup_component(hass, geo_location.DOMAIN, CONFIG)
await hass.async_block_till_done() await hass.async_block_till_done()
mock_feed.return_value.update.return_value = "OK", [mock_entry_1] mock_feed_update.return_value = "OK", [mock_entry_1]
# Artificially trigger update. # Artificially trigger update.
hass.bus.async_fire(EVENT_HOMEASSISTANT_START) hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
@ -215,7 +224,7 @@ async def test_setup_race_condition(hass, legacy_patchable_time):
assert len(hass.data[DATA_DISPATCHER][update_signal]) == 1 assert len(hass.data[DATA_DISPATCHER][update_signal]) == 1
# Simulate an update - empty data, removes all entities # Simulate an update - empty data, removes all entities
mock_feed.return_value.update.return_value = "ERROR", None mock_feed_update.return_value = "ERROR", None
async_fire_time_changed(hass, utcnow + SCAN_INTERVAL) async_fire_time_changed(hass, utcnow + SCAN_INTERVAL)
await hass.async_block_till_done() await hass.async_block_till_done()
@ -225,7 +234,7 @@ async def test_setup_race_condition(hass, legacy_patchable_time):
assert len(hass.data[DATA_DISPATCHER][update_signal]) == 0 assert len(hass.data[DATA_DISPATCHER][update_signal]) == 0
# Simulate an update - 1 entry # Simulate an update - 1 entry
mock_feed.return_value.update.return_value = "OK", [mock_entry_1] mock_feed_update.return_value = "OK", [mock_entry_1]
async_fire_time_changed(hass, utcnow + 2 * SCAN_INTERVAL) async_fire_time_changed(hass, utcnow + 2 * SCAN_INTERVAL)
await hass.async_block_till_done() await hass.async_block_till_done()
@ -235,7 +244,7 @@ async def test_setup_race_condition(hass, legacy_patchable_time):
assert len(hass.data[DATA_DISPATCHER][update_signal]) == 1 assert len(hass.data[DATA_DISPATCHER][update_signal]) == 1
# Simulate an update - 1 entry # Simulate an update - 1 entry
mock_feed.return_value.update.return_value = "OK", [mock_entry_1] mock_feed_update.return_value = "OK", [mock_entry_1]
async_fire_time_changed(hass, utcnow + 3 * SCAN_INTERVAL) async_fire_time_changed(hass, utcnow + 3 * SCAN_INTERVAL)
await hass.async_block_till_done() await hass.async_block_till_done()
@ -245,7 +254,7 @@ async def test_setup_race_condition(hass, legacy_patchable_time):
assert len(hass.data[DATA_DISPATCHER][update_signal]) == 1 assert len(hass.data[DATA_DISPATCHER][update_signal]) == 1
# Simulate an update - empty data, removes all entities # Simulate an update - empty data, removes all entities
mock_feed.return_value.update.return_value = "ERROR", None mock_feed_update.return_value = "ERROR", None
async_fire_time_changed(hass, utcnow + 4 * SCAN_INTERVAL) async_fire_time_changed(hass, utcnow + 4 * SCAN_INTERVAL)
await hass.async_block_till_done() await hass.async_block_till_done()