Add event entity to Feedreader (#118147)

This commit is contained in:
Michael 2024-07-07 21:44:41 +02:00 committed by GitHub
parent f126360c67
commit 0558e480ce
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
14 changed files with 332 additions and 124 deletions

View File

@ -5,7 +5,7 @@ from __future__ import annotations
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import CONF_SCAN_INTERVAL, CONF_URL
from homeassistant.const import CONF_SCAN_INTERVAL, CONF_URL, Platform
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
@ -89,14 +89,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: FeedReaderConfigEntry) -
storage,
)
await coordinator.async_config_entry_first_refresh()
# workaround because coordinators without listeners won't update
# can be removed when we have entities to update
coordinator.async_add_listener(lambda: None)
await coordinator.async_setup()
entry.runtime_data = coordinator
# we need to setup event entities before the first coordinator data fetch
# so that the event entities can already fetch the events during the first fetch
await hass.config_entries.async_forward_entry_setups(entry, [Platform.EVENT])
await coordinator.async_config_entry_first_refresh()
entry.async_on_unload(entry.add_update_listener(_async_update_listener))
return True
@ -110,7 +112,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: FeedReaderConfigEntry)
# if this is the last entry, remove the storage
if len(entries) == 1:
hass.data.pop(MY_KEY)
return True
return await hass.config_entries.async_unload_platforms(entry, Platform.EVENT)
async def _async_update_listener(

View File

@ -1,9 +1,12 @@
"""Constants for RSS/Atom feeds."""
from datetime import timedelta
from typing import Final
DOMAIN = "feedreader"
DOMAIN: Final[str] = "feedreader"
CONF_MAX_ENTRIES = "max_entries"
DEFAULT_MAX_ENTRIES = 20
DEFAULT_SCAN_INTERVAL = timedelta(hours=1)
CONF_MAX_ENTRIES: Final[str] = "max_entries"
DEFAULT_MAX_ENTRIES: Final[int] = 20
DEFAULT_SCAN_INTERVAL: Final[timedelta] = timedelta(hours=1)
EVENT_FEEDREADER: Final[str] = "feedreader"

View File

@ -10,24 +10,28 @@ from urllib.error import URLError
import feedparser
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.storage import Store
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util import dt as dt_util
from .const import DEFAULT_SCAN_INTERVAL, DOMAIN
from .const import DEFAULT_SCAN_INTERVAL, DOMAIN, EVENT_FEEDREADER
DELAY_SAVE = 30
EVENT_FEEDREADER = "feedreader"
STORAGE_VERSION = 1
_LOGGER = getLogger(__name__)
class FeedReaderCoordinator(DataUpdateCoordinator[None]):
class FeedReaderCoordinator(
DataUpdateCoordinator[list[feedparser.FeedParserDict] | None]
):
"""Abstraction over Feedparser module."""
config_entry: ConfigEntry
def __init__(
self,
hass: HomeAssistant,
@ -42,34 +46,36 @@ class FeedReaderCoordinator(DataUpdateCoordinator[None]):
name=f"{DOMAIN} {url}",
update_interval=DEFAULT_SCAN_INTERVAL,
)
self._url = url
self.url = url
self.feed_author: str | None = None
self.feed_version: str | None = None
self._max_entries = max_entries
self._feed: feedparser.FeedParserDict | None = None
self._storage = storage
self._last_entry_timestamp: struct_time | None = None
self._event_type = EVENT_FEEDREADER
self._feed: feedparser.FeedParserDict | None = None
self._feed_id = url
@callback
def _log_no_entries(self) -> None:
"""Send no entries log at debug level."""
_LOGGER.debug("No new entries to be published in feed %s", self._url)
_LOGGER.debug("No new entries to be published in feed %s", self.url)
def _fetch_feed(self) -> feedparser.FeedParserDict:
async def _async_fetch_feed(self) -> feedparser.FeedParserDict:
"""Fetch the feed data."""
return feedparser.parse(
self._url,
etag=None if not self._feed else self._feed.get("etag"),
modified=None if not self._feed else self._feed.get("modified"),
)
_LOGGER.debug("Fetching new data from feed %s", self.url)
async def _async_update_data(self) -> None:
"""Update the feed and publish new entries to the event bus."""
_LOGGER.debug("Fetching new data from feed %s", self._url)
self._feed = await self.hass.async_add_executor_job(self._fetch_feed)
def _parse_feed() -> feedparser.FeedParserDict:
return feedparser.parse(
self.url,
etag=None if not self._feed else self._feed.get("etag"),
modified=None if not self._feed else self._feed.get("modified"),
)
if not self._feed:
raise UpdateFailed(f"Error fetching feed data from {self._url}")
feed = await self.hass.async_add_executor_job(_parse_feed)
if not feed:
raise UpdateFailed(f"Error fetching feed data from {self.url}")
# The 'bozo' flag really only indicates that there was an issue
# during the initial parsing of the XML, but it doesn't indicate
@ -77,37 +83,57 @@ class FeedReaderCoordinator(DataUpdateCoordinator[None]):
# feedparser lib is trying a less strict parsing approach.
# If an error is detected here, log warning message but continue
# processing the feed entries if present.
if self._feed.bozo != 0:
if isinstance(self._feed.bozo_exception, URLError):
if feed.bozo != 0:
if isinstance(feed.bozo_exception, URLError):
raise UpdateFailed(
f"Error fetching feed data from {self._url}: {self._feed.bozo_exception}"
f"Error fetching feed data from {self.url} : {feed.bozo_exception}"
)
# no connection issue, but parsing issue
_LOGGER.warning(
"Possible issue parsing feed %s: %s",
self._url,
self._feed.bozo_exception,
self.url,
feed.bozo_exception,
)
return feed
async def async_setup(self) -> None:
"""Set up the feed manager."""
feed = await self._async_fetch_feed()
self.logger.debug("Feed data fetched from %s : %s", self.url, feed["feed"])
self.feed_author = feed["feed"].get("author")
self.feed_version = feedparser.api.SUPPORTED_VERSIONS.get(feed["version"])
self._feed = feed
async def _async_update_data(self) -> list[feedparser.FeedParserDict] | None:
"""Update the feed and publish new entries to the event bus."""
assert self._feed is not None
# _last_entry_timestamp is not set during async_setup, but we have already
# fetched data, so we can use them, instead of fetch again
if self._last_entry_timestamp:
self._feed = await self._async_fetch_feed()
# Using etag and modified, if there's no new data available,
# the entries list will be empty
_LOGGER.debug(
"%s entri(es) available in feed %s",
len(self._feed.entries),
self._url,
self.url,
)
if not self._feed.entries:
if not isinstance(self._feed.entries, list):
self._log_no_entries()
return None
self._filter_entries()
self._publish_new_entries()
_LOGGER.debug("Fetch from feed %s completed", self._url)
_LOGGER.debug("Fetch from feed %s completed", self.url)
if self._last_entry_timestamp:
self._storage.async_put_timestamp(self._feed_id, self._last_entry_timestamp)
return self._feed.entries
@callback
def _filter_entries(self) -> None:
"""Filter the entries provided and return the ones to keep."""
@ -116,7 +142,7 @@ class FeedReaderCoordinator(DataUpdateCoordinator[None]):
_LOGGER.debug(
"Processing only the first %s entries in feed %s",
self._max_entries,
self._url,
self.url,
)
self._feed.entries = self._feed.entries[0 : self._max_entries]
@ -132,7 +158,7 @@ class FeedReaderCoordinator(DataUpdateCoordinator[None]):
"No updated_parsed or published_parsed info available for entry %s",
entry,
)
entry["feed_url"] = self._url
entry["feed_url"] = self.url
self.hass.bus.async_fire(self._event_type, entry)
_LOGGER.debug("New event fired for entry %s", entry.get("link"))
@ -164,7 +190,7 @@ class FeedReaderCoordinator(DataUpdateCoordinator[None]):
if new_entry_count == 0:
self._log_no_entries()
else:
_LOGGER.debug("%d entries published in feed %s", new_entry_count, self._url)
_LOGGER.debug("%d entries published in feed %s", new_entry_count, self.url)
class StoredData:

View File

@ -0,0 +1,90 @@
"""Event entities for RSS/Atom feeds."""
from __future__ import annotations
import logging
from feedparser import FeedParserDict
from homeassistant.components.event import EventEntity
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import FeedReaderConfigEntry
from .const import DOMAIN, EVENT_FEEDREADER
from .coordinator import FeedReaderCoordinator
LOGGER = logging.getLogger(__name__)
ATTR_CONTENT = "content"
ATTR_LINK = "link"
ATTR_TITLE = "title"
async def async_setup_entry(
hass: HomeAssistant,
entry: FeedReaderConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up event entities for feedreader."""
coordinator: FeedReaderCoordinator = entry.runtime_data
async_add_entities([FeedReaderEvent(coordinator)])
class FeedReaderEvent(CoordinatorEntity[FeedReaderCoordinator], EventEntity):
"""Representation of a feedreader event."""
_attr_event_types = [EVENT_FEEDREADER]
_attr_name = None
_attr_has_entity_name = True
_unrecorded_attributes = frozenset({ATTR_CONTENT, ATTR_TITLE, ATTR_LINK})
coordinator: FeedReaderCoordinator
def __init__(self, coordinator: FeedReaderCoordinator) -> None:
"""Initialize the feedreader event."""
super().__init__(coordinator)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_latest_feed"
self._attr_translation_key = "latest_feed"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, coordinator.config_entry.entry_id)},
name=coordinator.config_entry.title,
configuration_url=coordinator.url,
manufacturer=coordinator.feed_author,
sw_version=coordinator.feed_version,
entry_type=DeviceEntryType.SERVICE,
)
async def async_added_to_hass(self) -> None:
"""Entity added to hass."""
await super().async_added_to_hass()
self.async_on_remove(
self.coordinator.async_add_listener(self._async_handle_update)
)
@callback
def _async_handle_update(self) -> None:
if (data := self.coordinator.data) is None or not data:
return
# RSS feeds are normally sorted reverse chronologically by published date
# so we always take the first entry in list, since we only care about the latest entry
feed_data: FeedParserDict = data[0]
if content := feed_data.get("content"):
if isinstance(content, list) and isinstance(content[0], dict):
content = content[0].get("value")
else:
content = feed_data.get("summary")
self._trigger_event(
EVENT_FEEDREADER,
{
ATTR_TITLE: feed_data.get("title"),
ATTR_LINK: feed_data.get("link"),
ATTR_CONTENT: content,
},
)
self.async_write_ha_state()

View File

@ -0,0 +1,9 @@
{
"entity": {
"event": {
"latest_feed": {
"default": "mdi:rss"
}
}
}
}

View File

@ -14,6 +14,7 @@
<link>http://www.example.com/link/1</link>
<guid isPermaLink="false">GUID 1</guid>
<pubDate>Mon, 30 Apr 2018 15:10:00 +1000</pubDate>
<content>Content 1</content>
</item>
</channel>

View File

@ -8,19 +8,21 @@
<pubDate>Mon, 30 Apr 2018 15:00:00 +1000</pubDate>
<ttl>1800</ttl>
<item>
<title>Title 1</title>
<description>Description 1</description>
<link>http://www.example.com/link/1</link>
<guid isPermaLink="false">GUID 1</guid>
<pubDate>Mon, 30 Apr 2018 15:10:00 +1000</pubDate>
</item>
<item>
<title>Title 2</title>
<description>Description 2</description>
<link>http://www.example.com/link/2</link>
<guid isPermaLink="false">GUID 2</guid>
<pubDate>Mon, 30 Apr 2018 15:11:00 +1000</pubDate>
<content>Content 2</content>
</item>
<item>
<title>Title 1</title>
<description>Description 1</description>
<link>http://www.example.com/link/1</link>
<guid isPermaLink="false">GUID 1</guid>
<pubDate>Mon, 30 Apr 2018 15:10:00 +1000</pubDate>
<content>Content 1</content>
</item>
</channel>

View File

@ -9,88 +9,109 @@
<ttl>1800</ttl>
<item>
<title>Title 1</title>
<pubDate>Mon, 30 Apr 2018 15:00:00 +1000</pubDate>
</item>
<item>
<title>Title 2</title>
<pubDate>Mon, 30 Apr 2018 15:01:00 +1000</pubDate>
</item>
<item>
<title>Title 3</title>
<pubDate>Mon, 30 Apr 2018 15:02:00 +1000</pubDate>
</item>
<item>
<title>Title 4</title>
<pubDate>Mon, 30 Apr 2018 15:03:00 +1000</pubDate>
</item>
<item>
<title>Title 5</title>
<pubDate>Mon, 30 Apr 2018 15:04:00 +1000</pubDate>
</item>
<item>
<title>Title 6</title>
<pubDate>Mon, 30 Apr 2018 15:05:00 +1000</pubDate>
</item>
<item>
<title>Title 7</title>
<pubDate>Mon, 30 Apr 2018 15:06:00 +1000</pubDate>
</item>
<item>
<title>Title 8</title>
<pubDate>Mon, 30 Apr 2018 15:07:00 +1000</pubDate>
</item>
<item>
<title>Title 9</title>
<pubDate>Mon, 30 Apr 2018 15:08:00 +1000</pubDate>
</item>
<item>
<title>Title 10</title>
<pubDate>Mon, 30 Apr 2018 15:09:00 +1000</pubDate>
</item>
<item>
<title>Title 11</title>
<pubDate>Mon, 30 Apr 2018 15:10:00 +1000</pubDate>
</item>
<item>
<title>Title 12</title>
<pubDate>Mon, 30 Apr 2018 15:11:00 +1000</pubDate>
</item>
<item>
<title>Title 13</title>
<pubDate>Mon, 30 Apr 2018 15:12:00 +1000</pubDate>
</item>
<item>
<title>Title 14</title>
<pubDate>Mon, 30 Apr 2018 15:13:00 +1000</pubDate>
</item>
<item>
<title>Title 15</title>
<pubDate>Mon, 30 Apr 2018 15:14:00 +1000</pubDate>
</item>
<item>
<title>Title 16</title>
<pubDate>Mon, 30 Apr 2018 15:15:00 +1000</pubDate>
</item>
<item>
<title>Title 17</title>
<pubDate>Mon, 30 Apr 2018 15:16:00 +1000</pubDate>
</item>
<item>
<title>Title 18</title>
<pubDate>Mon, 30 Apr 2018 15:17:00 +1000</pubDate>
</item>
<item>
<title>Title 19</title>
<pubDate>Mon, 30 Apr 2018 15:18:00 +1000</pubDate>
<title>Title 21</title>
<pubDate>Mon, 30 Apr 2018 15:20:00 +1000</pubDate>
<content>Content 21</content>
</item>
<item>
<title>Title 20</title>
<pubDate>Mon, 30 Apr 2018 15:19:00 +1000</pubDate>
<content>Content 20</content>
</item>
<item>
<title>Title 21</title>
<pubDate>Mon, 30 Apr 2018 15:20:00 +1000</pubDate>
<title>Title 19</title>
<pubDate>Mon, 30 Apr 2018 15:18:00 +1000</pubDate>
<content>Content 19</content>
</item>
<item>
<title>Title 18</title>
<pubDate>Mon, 30 Apr 2018 15:17:00 +1000</pubDate>
<content>Content 18</content>
</item>
<item>
<title>Title 17</title>
<pubDate>Mon, 30 Apr 2018 15:16:00 +1000</pubDate>
<content>Content 17</content>
</item>
<item>
<title>Title 16</title>
<pubDate>Mon, 30 Apr 2018 15:15:00 +1000</pubDate>
<content>Content 16</content>
</item>
<item>
<title>Title 15</title>
<pubDate>Mon, 30 Apr 2018 15:14:00 +1000</pubDate>
<content>Content 15</content>
</item>
<item>
<title>Title 14</title>
<pubDate>Mon, 30 Apr 2018 15:13:00 +1000</pubDate>
<content>Content 14</content>
</item>
<item>
<title>Title 13</title>
<pubDate>Mon, 30 Apr 2018 15:12:00 +1000</pubDate>
<content>Content 13</content>
</item>
<item>
<title>Title 12</title>
<pubDate>Mon, 30 Apr 2018 15:11:00 +1000</pubDate>
<content>Content 12</content>
</item>
<item>
<title>Title 11</title>
<pubDate>Mon, 30 Apr 2018 15:10:00 +1000</pubDate>
<content>Content 11</content>
</item>
<item>
<title>Title 10</title>
<pubDate>Mon, 30 Apr 2018 15:09:00 +1000</pubDate>
<content>Content 10</content>
</item>
<item>
<title>Title 9</title>
<pubDate>Mon, 30 Apr 2018 15:08:00 +1000</pubDate>
<content>Content 9</content>
</item>
<item>
<title>Title 8</title>
<pubDate>Mon, 30 Apr 2018 15:07:00 +1000</pubDate>
<content>Content 8</content>
</item>
<item>
<title>Title 7</title>
<pubDate>Mon, 30 Apr 2018 15:06:00 +1000</pubDate>
<content>Content 7</content>
</item>
<item>
<title>Title 6</title>
<pubDate>Mon, 30 Apr 2018 15:05:00 +1000</pubDate>
<content>Content 6</content>
</item>
<item>
<title>Title 5</title>
<pubDate>Mon, 30 Apr 2018 15:04:00 +1000</pubDate>
<content>Content 5</content>
</item>
<item>
<title>Title 4</title>
<pubDate>Mon, 30 Apr 2018 15:03:00 +1000</pubDate>
<content>Content 4</content>
</item>
<item>
<title>Title 3</title>
<pubDate>Mon, 30 Apr 2018 15:02:00 +1000</pubDate>
<content>Content 3</content>
</item>
<item>
<title>Title 1</title>
<pubDate>Mon, 30 Apr 2018 15:00:00 +1000</pubDate>
<content>Content 1</content>
</item>
<item>
<title>Title 2</title>
<pubDate>Mon, 30 Apr 2018 15:01:00 +1000</pubDate>
<content>Content 2</content>
</item>
</channel>

View File

@ -14,17 +14,20 @@
<link>http://www.example.com/link/1</link>
<guid isPermaLink="false">GUID 1</guid>
<pubDate>Mon, 30 Apr 2018 15:10:00 +1000</pubDate>
<content>Content 1</content>
</item>
<item>
<title>Title 2</title>
<description>Description 2</description>
<link>http://www.example.com/link/2</link>
<guid isPermaLink="false">GUID 2</guid>
<content>Content 2</content>
</item>
<item>
<description>Description 3</description>
<link>http://www.example.com/link/3</link>
<guid isPermaLink="false">GUID 3</guid>
<content>Content 3</content>
</item>
</channel>

View File

@ -14,6 +14,7 @@
<link>http://www.example.com/link/1</link>
<guid isPermaLink="false">GUID 1</guid>
<pubDate>26.10.2019 - 12:06:24</pubDate>
<content>Content 1</content>
</item>
</channel>

View File

@ -14,5 +14,6 @@
<id>urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a</id>
<updated>2003-12-13T18:30:02Z</updated>
<summary>Some text.</summary>
<content>Content 1</content>
</entry>
</feed>

View File

@ -14,6 +14,7 @@
<link>http://www.example.com/link/1</link>
<guid isPermaLink="false">GUID 1</guid>
<pubDate>Mon, 30 Apr 2018 15:10:00 +0000</pubDate>
<content>Content 1</content>
</item>
<item>
<title>Title 2</title>
@ -21,6 +22,7 @@
<link>http://www.example.com/link/2</link>
<guid isPermaLink="false">GUID 2</guid>
<pubDate>Mon, 30 Apr 2018 15:10:00 +0000</pubDate>
<content>Content 2</content>
</item>
</channel>

View File

@ -0,0 +1,47 @@
"""The tests for the feedreader event entity."""
from datetime import timedelta
from unittest.mock import patch
from homeassistant.components.feedreader.event import (
ATTR_CONTENT,
ATTR_LINK,
ATTR_TITLE,
)
from homeassistant.core import HomeAssistant
import homeassistant.util.dt as dt_util
from . import create_mock_entry
from .const import VALID_CONFIG_DEFAULT
from tests.common import async_fire_time_changed
async def test_event_entity(
hass: HomeAssistant, feed_one_event, feed_two_event
) -> None:
"""Test feed event entity."""
entry = create_mock_entry(VALID_CONFIG_DEFAULT)
entry.add_to_hass(hass)
with patch(
"homeassistant.components.feedreader.coordinator.feedparser.http.get",
side_effect=[feed_one_event, feed_two_event],
):
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
state = hass.states.get("event.mock_title")
assert state
assert state.attributes[ATTR_TITLE] == "Title 1"
assert state.attributes[ATTR_LINK] == "http://www.example.com/link/1"
assert state.attributes[ATTR_CONTENT] == "Content 1"
future = dt_util.utcnow() + timedelta(hours=1, seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done(wait_background_tasks=True)
state = hass.states.get("event.mock_title")
assert state
assert state.attributes[ATTR_TITLE] == "Title 2"
assert state.attributes[ATTR_LINK] == "http://www.example.com/link/2"
assert state.attributes[ATTR_CONTENT] == "Content 2"

View File

@ -296,7 +296,7 @@ async def test_feed_errors(
async_fire_time_changed(hass)
await hass.async_block_till_done(wait_background_tasks=True)
assert (
"Error fetching feed data from http://some.rss.local/rss_feed.xml: <urlopen error Test>"
"Error fetching feed data from http://some.rss.local/rss_feed.xml : <urlopen error Test>"
in caplog.text
)