diff --git a/.strict-typing b/.strict-typing index a215c2187ab..1e6a47f508b 100644 --- a/.strict-typing +++ b/.strict-typing @@ -98,6 +98,7 @@ homeassistant.components.energy.* homeassistant.components.evil_genius_labs.* homeassistant.components.fan.* homeassistant.components.fastdotcom.* +homeassistant.components.feedreader.* homeassistant.components.file_upload.* homeassistant.components.filesize.* homeassistant.components.fitbit.* diff --git a/homeassistant/components/feedreader/__init__.py b/homeassistant/components/feedreader/__init__.py index b3f1a916012..0ee4d3c39f3 100644 --- a/homeassistant/components/feedreader/__init__.py +++ b/homeassistant/components/feedreader/__init__.py @@ -1,9 +1,13 @@ """Support for RSS/Atom feeds.""" +from __future__ import annotations + from datetime import datetime, timedelta from logging import getLogger from os.path import exists import pickle from threading import Lock +from time import struct_time +from typing import cast import feedparser import voluptuous as vol @@ -44,9 +48,9 @@ CONFIG_SCHEMA = vol.Schema( def setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Feedreader component.""" - urls = config[DOMAIN][CONF_URLS] - scan_interval = config[DOMAIN].get(CONF_SCAN_INTERVAL) - max_entries = config[DOMAIN].get(CONF_MAX_ENTRIES) + urls: list[str] = config[DOMAIN][CONF_URLS] + scan_interval: timedelta = config[DOMAIN][CONF_SCAN_INTERVAL] + max_entries: int = config[DOMAIN][CONF_MAX_ENTRIES] data_file = hass.config.path(f"{DOMAIN}.pickle") storage = StoredData(data_file) feeds = [ @@ -58,16 +62,23 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: class FeedManager: """Abstraction over Feedparser module.""" - def __init__(self, url, scan_interval, max_entries, hass, storage): + def __init__( + self, + url: str, + scan_interval: timedelta, + max_entries: int, + hass: HomeAssistant, + storage: StoredData, + ) -> None: """Initialize the FeedManager object, poll as per scan interval.""" self._url = url self._scan_interval = scan_interval self._max_entries = max_entries - self._feed = None + self._feed: feedparser.FeedParserDict | None = None self._hass = hass self._firstrun = True self._storage = storage - self._last_entry_timestamp = None + self._last_entry_timestamp: struct_time | None = None self._last_update_successful = False self._has_published_parsed = False self._has_updated_parsed = False @@ -76,23 +87,23 @@ class FeedManager: hass.bus.listen_once(EVENT_HOMEASSISTANT_START, lambda _: self._update()) self._init_regular_updates(hass) - def _log_no_entries(self): + def _log_no_entries(self) -> None: """Send no entries log at debug level.""" _LOGGER.debug("No new entries to be published in feed %s", self._url) - def _init_regular_updates(self, hass): + def _init_regular_updates(self, hass: HomeAssistant) -> None: """Schedule regular updates at the top of the clock.""" track_time_interval(hass, lambda now: self._update(), self._scan_interval) @property - def last_update_successful(self): + def last_update_successful(self) -> bool: """Return True if the last feed update was successful.""" return self._last_update_successful - def _update(self): + def _update(self) -> None: """Update the feed and publish new entries to the event bus.""" _LOGGER.info("Fetching new data from feed %s", self._url) - self._feed = feedparser.parse( + self._feed: feedparser.FeedParserDict = feedparser.parse( # type: ignore[no-redef] self._url, etag=None if not self._feed else self._feed.get("etag"), modified=None if not self._feed else self._feed.get("modified"), @@ -125,15 +136,16 @@ class FeedManager: self._publish_new_entries() if self._has_published_parsed or self._has_updated_parsed: self._storage.put_timestamp( - self._feed_id, self._last_entry_timestamp + self._feed_id, cast(struct_time, self._last_entry_timestamp) ) else: self._log_no_entries() self._last_update_successful = True _LOGGER.info("Fetch from feed %s completed", self._url) - def _filter_entries(self): + def _filter_entries(self) -> None: """Filter the entries provided and return the ones to keep.""" + assert self._feed is not None if len(self._feed.entries) > self._max_entries: _LOGGER.debug( "Processing only the first %s entries in feed %s", @@ -142,7 +154,7 @@ class FeedManager: ) self._feed.entries = self._feed.entries[0 : self._max_entries] - def _update_and_fire_entry(self, entry): + def _update_and_fire_entry(self, entry: feedparser.FeedParserDict) -> None: """Update last_entry_timestamp and fire entry.""" # Check if the entry has a published or updated date. if "published_parsed" in entry and entry.published_parsed: @@ -169,8 +181,9 @@ class FeedManager: entry.update({"feed_url": self._url}) self._hass.bus.fire(self._event_type, entry) - def _publish_new_entries(self): + def _publish_new_entries(self) -> None: """Publish new entries to the event bus.""" + assert self._feed is not None new_entries = False self._last_entry_timestamp = self._storage.get_timestamp(self._feed_id) if self._last_entry_timestamp: @@ -202,15 +215,15 @@ class FeedManager: class StoredData: """Abstraction over pickle data storage.""" - def __init__(self, data_file): + def __init__(self, data_file: str) -> None: """Initialize pickle data storage.""" self._data_file = data_file self._lock = Lock() self._cache_outdated = True - self._data = {} + self._data: dict[str, struct_time] = {} self._fetch_data() - def _fetch_data(self): + def _fetch_data(self) -> None: """Fetch data stored into pickle file.""" if self._cache_outdated and exists(self._data_file): try: @@ -223,20 +236,21 @@ class StoredData: "Error loading data from pickled file %s", self._data_file ) - def get_timestamp(self, feed_id): + def get_timestamp(self, feed_id: str) -> struct_time | None: """Return stored timestamp for given feed id (usually the url).""" self._fetch_data() return self._data.get(feed_id) - def put_timestamp(self, feed_id, timestamp): + def put_timestamp(self, feed_id: str, timestamp: struct_time) -> None: """Update timestamp for given feed id (usually the url).""" self._fetch_data() with self._lock, open(self._data_file, "wb") as myfile: self._data.update({feed_id: timestamp}) _LOGGER.debug( - "Overwriting feed %s timestamp in storage file %s", + "Overwriting feed %s timestamp in storage file %s: %s", feed_id, self._data_file, + timestamp, ) try: pickle.dump(self._data, myfile) diff --git a/mypy.ini b/mypy.ini index fd609d8099b..863e673401c 100644 --- a/mypy.ini +++ b/mypy.ini @@ -739,6 +739,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.feedreader.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.file_upload.*] check_untyped_defs = true disallow_incomplete_defs = true