diff --git a/homeassistant/components/feedreader/__init__.py b/homeassistant/components/feedreader/__init__.py index 36ffe545996..b9f0b006e2a 100644 --- a/homeassistant/components/feedreader/__init__.py +++ b/homeassistant/components/feedreader/__init__.py @@ -5,7 +5,7 @@ from __future__ import annotations import voluptuous as vol from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.const import CONF_SCAN_INTERVAL, CONF_URL +from homeassistant.const import CONF_SCAN_INTERVAL, CONF_URL, Platform from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant import homeassistant.helpers.config_validation as cv from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue @@ -89,14 +89,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: FeedReaderConfigEntry) - storage, ) - await coordinator.async_config_entry_first_refresh() - - # workaround because coordinators without listeners won't update - # can be removed when we have entities to update - coordinator.async_add_listener(lambda: None) + await coordinator.async_setup() entry.runtime_data = coordinator + # we need to setup event entities before the first coordinator data fetch + # so that the event entities can already fetch the events during the first fetch + await hass.config_entries.async_forward_entry_setups(entry, [Platform.EVENT]) + + await coordinator.async_config_entry_first_refresh() + entry.async_on_unload(entry.add_update_listener(_async_update_listener)) return True @@ -110,7 +112,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: FeedReaderConfigEntry) # if this is the last entry, remove the storage if len(entries) == 1: hass.data.pop(MY_KEY) - return True + return await hass.config_entries.async_unload_platforms(entry, Platform.EVENT) async def _async_update_listener( diff --git a/homeassistant/components/feedreader/const.py b/homeassistant/components/feedreader/const.py index c0aa6633669..efaa0e9d972 100644 --- a/homeassistant/components/feedreader/const.py +++ b/homeassistant/components/feedreader/const.py @@ -1,9 +1,12 @@ """Constants for RSS/Atom feeds.""" from datetime import timedelta +from typing import Final -DOMAIN = "feedreader" +DOMAIN: Final[str] = "feedreader" -CONF_MAX_ENTRIES = "max_entries" -DEFAULT_MAX_ENTRIES = 20 -DEFAULT_SCAN_INTERVAL = timedelta(hours=1) +CONF_MAX_ENTRIES: Final[str] = "max_entries" +DEFAULT_MAX_ENTRIES: Final[int] = 20 +DEFAULT_SCAN_INTERVAL: Final[timedelta] = timedelta(hours=1) + +EVENT_FEEDREADER: Final[str] = "feedreader" diff --git a/homeassistant/components/feedreader/coordinator.py b/homeassistant/components/feedreader/coordinator.py index e116d804b3d..e429979b1da 100644 --- a/homeassistant/components/feedreader/coordinator.py +++ b/homeassistant/components/feedreader/coordinator.py @@ -10,24 +10,28 @@ from urllib.error import URLError import feedparser +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.storage import Store from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.util import dt as dt_util -from .const import DEFAULT_SCAN_INTERVAL, DOMAIN +from .const import DEFAULT_SCAN_INTERVAL, DOMAIN, EVENT_FEEDREADER DELAY_SAVE = 30 -EVENT_FEEDREADER = "feedreader" STORAGE_VERSION = 1 _LOGGER = getLogger(__name__) -class FeedReaderCoordinator(DataUpdateCoordinator[None]): +class FeedReaderCoordinator( + DataUpdateCoordinator[list[feedparser.FeedParserDict] | None] +): """Abstraction over Feedparser module.""" + config_entry: ConfigEntry + def __init__( self, hass: HomeAssistant, @@ -42,34 +46,36 @@ class FeedReaderCoordinator(DataUpdateCoordinator[None]): name=f"{DOMAIN} {url}", update_interval=DEFAULT_SCAN_INTERVAL, ) - self._url = url + self.url = url + self.feed_author: str | None = None + self.feed_version: str | None = None self._max_entries = max_entries - self._feed: feedparser.FeedParserDict | None = None self._storage = storage self._last_entry_timestamp: struct_time | None = None self._event_type = EVENT_FEEDREADER + self._feed: feedparser.FeedParserDict | None = None self._feed_id = url @callback def _log_no_entries(self) -> None: """Send no entries log at debug level.""" - _LOGGER.debug("No new entries to be published in feed %s", self._url) + _LOGGER.debug("No new entries to be published in feed %s", self.url) - def _fetch_feed(self) -> feedparser.FeedParserDict: + async def _async_fetch_feed(self) -> feedparser.FeedParserDict: """Fetch the feed data.""" - return feedparser.parse( - self._url, - etag=None if not self._feed else self._feed.get("etag"), - modified=None if not self._feed else self._feed.get("modified"), - ) + _LOGGER.debug("Fetching new data from feed %s", self.url) - async def _async_update_data(self) -> None: - """Update the feed and publish new entries to the event bus.""" - _LOGGER.debug("Fetching new data from feed %s", self._url) - self._feed = await self.hass.async_add_executor_job(self._fetch_feed) + def _parse_feed() -> feedparser.FeedParserDict: + return feedparser.parse( + self.url, + etag=None if not self._feed else self._feed.get("etag"), + modified=None if not self._feed else self._feed.get("modified"), + ) - if not self._feed: - raise UpdateFailed(f"Error fetching feed data from {self._url}") + feed = await self.hass.async_add_executor_job(_parse_feed) + + if not feed: + raise UpdateFailed(f"Error fetching feed data from {self.url}") # The 'bozo' flag really only indicates that there was an issue # during the initial parsing of the XML, but it doesn't indicate @@ -77,37 +83,57 @@ class FeedReaderCoordinator(DataUpdateCoordinator[None]): # feedparser lib is trying a less strict parsing approach. # If an error is detected here, log warning message but continue # processing the feed entries if present. - if self._feed.bozo != 0: - if isinstance(self._feed.bozo_exception, URLError): + if feed.bozo != 0: + if isinstance(feed.bozo_exception, URLError): raise UpdateFailed( - f"Error fetching feed data from {self._url}: {self._feed.bozo_exception}" + f"Error fetching feed data from {self.url} : {feed.bozo_exception}" ) # no connection issue, but parsing issue _LOGGER.warning( "Possible issue parsing feed %s: %s", - self._url, - self._feed.bozo_exception, + self.url, + feed.bozo_exception, ) + return feed + + async def async_setup(self) -> None: + """Set up the feed manager.""" + feed = await self._async_fetch_feed() + self.logger.debug("Feed data fetched from %s : %s", self.url, feed["feed"]) + self.feed_author = feed["feed"].get("author") + self.feed_version = feedparser.api.SUPPORTED_VERSIONS.get(feed["version"]) + self._feed = feed + + async def _async_update_data(self) -> list[feedparser.FeedParserDict] | None: + """Update the feed and publish new entries to the event bus.""" + assert self._feed is not None + # _last_entry_timestamp is not set during async_setup, but we have already + # fetched data, so we can use them, instead of fetch again + if self._last_entry_timestamp: + self._feed = await self._async_fetch_feed() + # Using etag and modified, if there's no new data available, # the entries list will be empty _LOGGER.debug( "%s entri(es) available in feed %s", len(self._feed.entries), - self._url, + self.url, ) - if not self._feed.entries: + if not isinstance(self._feed.entries, list): self._log_no_entries() return None self._filter_entries() self._publish_new_entries() - _LOGGER.debug("Fetch from feed %s completed", self._url) + _LOGGER.debug("Fetch from feed %s completed", self.url) if self._last_entry_timestamp: self._storage.async_put_timestamp(self._feed_id, self._last_entry_timestamp) + return self._feed.entries + @callback def _filter_entries(self) -> None: """Filter the entries provided and return the ones to keep.""" @@ -116,7 +142,7 @@ class FeedReaderCoordinator(DataUpdateCoordinator[None]): _LOGGER.debug( "Processing only the first %s entries in feed %s", self._max_entries, - self._url, + self.url, ) self._feed.entries = self._feed.entries[0 : self._max_entries] @@ -132,7 +158,7 @@ class FeedReaderCoordinator(DataUpdateCoordinator[None]): "No updated_parsed or published_parsed info available for entry %s", entry, ) - entry["feed_url"] = self._url + entry["feed_url"] = self.url self.hass.bus.async_fire(self._event_type, entry) _LOGGER.debug("New event fired for entry %s", entry.get("link")) @@ -164,7 +190,7 @@ class FeedReaderCoordinator(DataUpdateCoordinator[None]): if new_entry_count == 0: self._log_no_entries() else: - _LOGGER.debug("%d entries published in feed %s", new_entry_count, self._url) + _LOGGER.debug("%d entries published in feed %s", new_entry_count, self.url) class StoredData: diff --git a/homeassistant/components/feedreader/event.py b/homeassistant/components/feedreader/event.py new file mode 100644 index 00000000000..c9bf39e83ca --- /dev/null +++ b/homeassistant/components/feedreader/event.py @@ -0,0 +1,90 @@ +"""Event entities for RSS/Atom feeds.""" + +from __future__ import annotations + +import logging + +from feedparser import FeedParserDict + +from homeassistant.components.event import EventEntity +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from . import FeedReaderConfigEntry +from .const import DOMAIN, EVENT_FEEDREADER +from .coordinator import FeedReaderCoordinator + +LOGGER = logging.getLogger(__name__) + +ATTR_CONTENT = "content" +ATTR_LINK = "link" +ATTR_TITLE = "title" + + +async def async_setup_entry( + hass: HomeAssistant, + entry: FeedReaderConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up event entities for feedreader.""" + coordinator: FeedReaderCoordinator = entry.runtime_data + + async_add_entities([FeedReaderEvent(coordinator)]) + + +class FeedReaderEvent(CoordinatorEntity[FeedReaderCoordinator], EventEntity): + """Representation of a feedreader event.""" + + _attr_event_types = [EVENT_FEEDREADER] + _attr_name = None + _attr_has_entity_name = True + _unrecorded_attributes = frozenset({ATTR_CONTENT, ATTR_TITLE, ATTR_LINK}) + coordinator: FeedReaderCoordinator + + def __init__(self, coordinator: FeedReaderCoordinator) -> None: + """Initialize the feedreader event.""" + super().__init__(coordinator) + self._attr_unique_id = f"{coordinator.config_entry.entry_id}_latest_feed" + self._attr_translation_key = "latest_feed" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, coordinator.config_entry.entry_id)}, + name=coordinator.config_entry.title, + configuration_url=coordinator.url, + manufacturer=coordinator.feed_author, + sw_version=coordinator.feed_version, + entry_type=DeviceEntryType.SERVICE, + ) + + async def async_added_to_hass(self) -> None: + """Entity added to hass.""" + await super().async_added_to_hass() + self.async_on_remove( + self.coordinator.async_add_listener(self._async_handle_update) + ) + + @callback + def _async_handle_update(self) -> None: + if (data := self.coordinator.data) is None or not data: + return + + # RSS feeds are normally sorted reverse chronologically by published date + # so we always take the first entry in list, since we only care about the latest entry + feed_data: FeedParserDict = data[0] + + if content := feed_data.get("content"): + if isinstance(content, list) and isinstance(content[0], dict): + content = content[0].get("value") + else: + content = feed_data.get("summary") + + self._trigger_event( + EVENT_FEEDREADER, + { + ATTR_TITLE: feed_data.get("title"), + ATTR_LINK: feed_data.get("link"), + ATTR_CONTENT: content, + }, + ) + self.async_write_ha_state() diff --git a/homeassistant/components/feedreader/icons.json b/homeassistant/components/feedreader/icons.json new file mode 100644 index 00000000000..0cad13cd141 --- /dev/null +++ b/homeassistant/components/feedreader/icons.json @@ -0,0 +1,9 @@ +{ + "entity": { + "event": { + "latest_feed": { + "default": "mdi:rss" + } + } + } +} diff --git a/tests/components/feedreader/fixtures/feedreader.xml b/tests/components/feedreader/fixtures/feedreader.xml index 8c85a4975ee..17402cad081 100644 --- a/tests/components/feedreader/fixtures/feedreader.xml +++ b/tests/components/feedreader/fixtures/feedreader.xml @@ -14,6 +14,7 @@ http://www.example.com/link/1 GUID 1 Mon, 30 Apr 2018 15:10:00 +1000 + Content 1 diff --git a/tests/components/feedreader/fixtures/feedreader1.xml b/tests/components/feedreader/fixtures/feedreader1.xml index ff856125779..c71507c15b7 100644 --- a/tests/components/feedreader/fixtures/feedreader1.xml +++ b/tests/components/feedreader/fixtures/feedreader1.xml @@ -8,19 +8,21 @@ Mon, 30 Apr 2018 15:00:00 +1000 1800 - - Title 1 - Description 1 - http://www.example.com/link/1 - GUID 1 - Mon, 30 Apr 2018 15:10:00 +1000 - Title 2 Description 2 http://www.example.com/link/2 GUID 2 Mon, 30 Apr 2018 15:11:00 +1000 + Content 2 + + + Title 1 + Description 1 + http://www.example.com/link/1 + GUID 1 + Mon, 30 Apr 2018 15:10:00 +1000 + Content 1 diff --git a/tests/components/feedreader/fixtures/feedreader2.xml b/tests/components/feedreader/fixtures/feedreader2.xml index 653a16e4561..2471d70edcb 100644 --- a/tests/components/feedreader/fixtures/feedreader2.xml +++ b/tests/components/feedreader/fixtures/feedreader2.xml @@ -9,88 +9,109 @@ 1800 - Title 1 - Mon, 30 Apr 2018 15:00:00 +1000 - - - Title 2 - Mon, 30 Apr 2018 15:01:00 +1000 - - - Title 3 - Mon, 30 Apr 2018 15:02:00 +1000 - - - Title 4 - Mon, 30 Apr 2018 15:03:00 +1000 - - - Title 5 - Mon, 30 Apr 2018 15:04:00 +1000 - - - Title 6 - Mon, 30 Apr 2018 15:05:00 +1000 - - - Title 7 - Mon, 30 Apr 2018 15:06:00 +1000 - - - Title 8 - Mon, 30 Apr 2018 15:07:00 +1000 - - - Title 9 - Mon, 30 Apr 2018 15:08:00 +1000 - - - Title 10 - Mon, 30 Apr 2018 15:09:00 +1000 - - - Title 11 - Mon, 30 Apr 2018 15:10:00 +1000 - - - Title 12 - Mon, 30 Apr 2018 15:11:00 +1000 - - - Title 13 - Mon, 30 Apr 2018 15:12:00 +1000 - - - Title 14 - Mon, 30 Apr 2018 15:13:00 +1000 - - - Title 15 - Mon, 30 Apr 2018 15:14:00 +1000 - - - Title 16 - Mon, 30 Apr 2018 15:15:00 +1000 - - - Title 17 - Mon, 30 Apr 2018 15:16:00 +1000 - - - Title 18 - Mon, 30 Apr 2018 15:17:00 +1000 - - - Title 19 - Mon, 30 Apr 2018 15:18:00 +1000 + Title 21 + Mon, 30 Apr 2018 15:20:00 +1000 + Content 21 Title 20 Mon, 30 Apr 2018 15:19:00 +1000 + Content 20 - Title 21 - Mon, 30 Apr 2018 15:20:00 +1000 + Title 19 + Mon, 30 Apr 2018 15:18:00 +1000 + Content 19 + + + Title 18 + Mon, 30 Apr 2018 15:17:00 +1000 + Content 18 + + + Title 17 + Mon, 30 Apr 2018 15:16:00 +1000 + Content 17 + + + Title 16 + Mon, 30 Apr 2018 15:15:00 +1000 + Content 16 + + + Title 15 + Mon, 30 Apr 2018 15:14:00 +1000 + Content 15 + + + Title 14 + Mon, 30 Apr 2018 15:13:00 +1000 + Content 14 + + + Title 13 + Mon, 30 Apr 2018 15:12:00 +1000 + Content 13 + + + Title 12 + Mon, 30 Apr 2018 15:11:00 +1000 + Content 12 + + + Title 11 + Mon, 30 Apr 2018 15:10:00 +1000 + Content 11 + + + Title 10 + Mon, 30 Apr 2018 15:09:00 +1000 + Content 10 + + + Title 9 + Mon, 30 Apr 2018 15:08:00 +1000 + Content 9 + + + Title 8 + Mon, 30 Apr 2018 15:07:00 +1000 + Content 8 + + + Title 7 + Mon, 30 Apr 2018 15:06:00 +1000 + Content 7 + + + Title 6 + Mon, 30 Apr 2018 15:05:00 +1000 + Content 6 + + + Title 5 + Mon, 30 Apr 2018 15:04:00 +1000 + Content 5 + + + Title 4 + Mon, 30 Apr 2018 15:03:00 +1000 + Content 4 + + + Title 3 + Mon, 30 Apr 2018 15:02:00 +1000 + Content 3 + + + Title 1 + Mon, 30 Apr 2018 15:00:00 +1000 + Content 1 + + + Title 2 + Mon, 30 Apr 2018 15:01:00 +1000 + Content 2 diff --git a/tests/components/feedreader/fixtures/feedreader3.xml b/tests/components/feedreader/fixtures/feedreader3.xml index d8ccd119306..67daef20fe8 100644 --- a/tests/components/feedreader/fixtures/feedreader3.xml +++ b/tests/components/feedreader/fixtures/feedreader3.xml @@ -14,17 +14,20 @@ http://www.example.com/link/1 GUID 1 Mon, 30 Apr 2018 15:10:00 +1000 + Content 1 Title 2 Description 2 http://www.example.com/link/2 GUID 2 + Content 2 Description 3 http://www.example.com/link/3 GUID 3 + Content 3 diff --git a/tests/components/feedreader/fixtures/feedreader4.xml b/tests/components/feedreader/fixtures/feedreader4.xml index 81828ccb6e2..11c8d501395 100644 --- a/tests/components/feedreader/fixtures/feedreader4.xml +++ b/tests/components/feedreader/fixtures/feedreader4.xml @@ -14,6 +14,7 @@ http://www.example.com/link/1 GUID 1 26.10.2019 - 12:06:24 + Content 1 diff --git a/tests/components/feedreader/fixtures/feedreader5.xml b/tests/components/feedreader/fixtures/feedreader5.xml index d9b1dda1ad2..562fd45ea93 100644 --- a/tests/components/feedreader/fixtures/feedreader5.xml +++ b/tests/components/feedreader/fixtures/feedreader5.xml @@ -14,5 +14,6 @@ urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a 2003-12-13T18:30:02Z Some text. + Content 1 diff --git a/tests/components/feedreader/fixtures/feedreader6.xml b/tests/components/feedreader/fixtures/feedreader6.xml index 621c89787e8..48abd06b95b 100644 --- a/tests/components/feedreader/fixtures/feedreader6.xml +++ b/tests/components/feedreader/fixtures/feedreader6.xml @@ -14,6 +14,7 @@ http://www.example.com/link/1 GUID 1 Mon, 30 Apr 2018 15:10:00 +0000 + Content 1 Title 2 @@ -21,6 +22,7 @@ http://www.example.com/link/2 GUID 2 Mon, 30 Apr 2018 15:10:00 +0000 + Content 2 diff --git a/tests/components/feedreader/test_event.py b/tests/components/feedreader/test_event.py new file mode 100644 index 00000000000..23fec371860 --- /dev/null +++ b/tests/components/feedreader/test_event.py @@ -0,0 +1,47 @@ +"""The tests for the feedreader event entity.""" + +from datetime import timedelta +from unittest.mock import patch + +from homeassistant.components.feedreader.event import ( + ATTR_CONTENT, + ATTR_LINK, + ATTR_TITLE, +) +from homeassistant.core import HomeAssistant +import homeassistant.util.dt as dt_util + +from . import create_mock_entry +from .const import VALID_CONFIG_DEFAULT + +from tests.common import async_fire_time_changed + + +async def test_event_entity( + hass: HomeAssistant, feed_one_event, feed_two_event +) -> None: + """Test feed event entity.""" + entry = create_mock_entry(VALID_CONFIG_DEFAULT) + entry.add_to_hass(hass) + with patch( + "homeassistant.components.feedreader.coordinator.feedparser.http.get", + side_effect=[feed_one_event, feed_two_event], + ): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("event.mock_title") + assert state + assert state.attributes[ATTR_TITLE] == "Title 1" + assert state.attributes[ATTR_LINK] == "http://www.example.com/link/1" + assert state.attributes[ATTR_CONTENT] == "Content 1" + + future = dt_util.utcnow() + timedelta(hours=1, seconds=1) + async_fire_time_changed(hass, future) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("event.mock_title") + assert state + assert state.attributes[ATTR_TITLE] == "Title 2" + assert state.attributes[ATTR_LINK] == "http://www.example.com/link/2" + assert state.attributes[ATTR_CONTENT] == "Content 2" diff --git a/tests/components/feedreader/test_init.py b/tests/components/feedreader/test_init.py index 1dcbf5ba45d..61e3f13ced7 100644 --- a/tests/components/feedreader/test_init.py +++ b/tests/components/feedreader/test_init.py @@ -296,7 +296,7 @@ async def test_feed_errors( async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) assert ( - "Error fetching feed data from http://some.rss.local/rss_feed.xml: " + "Error fetching feed data from http://some.rss.local/rss_feed.xml : " in caplog.text )