mirror of
https://github.com/home-assistant/core.git
synced 2025-04-25 01:38:02 +00:00
2024.11.3 (#131248)
This commit is contained in:
commit
0644d782cd
@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairq"],
|
||||
"requirements": ["aioairq==0.3.2"]
|
||||
"requirements": ["aioairq==0.4.3"]
|
||||
}
|
||||
|
@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"],
|
||||
"requirements": ["aioairzone==0.9.5"]
|
||||
"requirements": ["aioairzone==0.9.7"]
|
||||
}
|
||||
|
@ -38,6 +38,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ApSystemsConfigEntry) ->
|
||||
ip_address=entry.data[CONF_IP_ADDRESS],
|
||||
port=entry.data.get(CONF_PORT, DEFAULT_PORT),
|
||||
timeout=8,
|
||||
enable_debounce=True,
|
||||
)
|
||||
coordinator = ApSystemsDataCoordinator(hass, api)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/apsystems",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["apsystems-ez1==2.2.1"]
|
||||
"requirements": ["apsystems-ez1==2.4.0"]
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ from __future__ import annotations
|
||||
from typing import Any
|
||||
|
||||
from aiohttp.client_exceptions import ClientConnectionError
|
||||
from APsystemsEZ1 import InverterReturnedError
|
||||
|
||||
from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
@ -40,7 +41,7 @@ class ApSystemsInverterSwitch(ApSystemsEntity, SwitchEntity):
|
||||
"""Update switch status and availability."""
|
||||
try:
|
||||
status = await self._api.get_device_power_status()
|
||||
except (TimeoutError, ClientConnectionError):
|
||||
except (TimeoutError, ClientConnectionError, InverterReturnedError):
|
||||
self._attr_available = False
|
||||
else:
|
||||
self._attr_available = True
|
||||
|
@ -16,7 +16,7 @@
|
||||
"requirements": [
|
||||
"bleak==0.22.3",
|
||||
"bleak-retry-connector==3.6.0",
|
||||
"bluetooth-adapters==0.20.0",
|
||||
"bluetooth-adapters==0.20.2",
|
||||
"bluetooth-auto-recovery==1.4.2",
|
||||
"bluetooth-data-tools==1.20.0",
|
||||
"dbus-fast==2.24.3",
|
||||
|
@ -53,7 +53,7 @@
|
||||
},
|
||||
"view_path": {
|
||||
"name": "View path",
|
||||
"description": "The path of the dashboard view to show."
|
||||
"description": "The URL path of the dashboard view to show."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -68,7 +68,7 @@
|
||||
}
|
||||
},
|
||||
"alarm_arm_home_instant": {
|
||||
"name": "Alarm are home instant",
|
||||
"name": "Alarm arm home instant",
|
||||
"description": "Arms the ElkM1 in home instant mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
|
@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/elmax",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["elmax_api"],
|
||||
"requirements": ["elmax-api==0.0.5"],
|
||||
"requirements": ["elmax-api==0.0.6.1"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_elmax-ssl._tcp.local."
|
||||
|
@ -179,6 +179,9 @@ class FFmpegConvertResponse(web.StreamResponse):
|
||||
# Remove metadata and cover art
|
||||
command_args.extend(["-map_metadata", "-1", "-vn"])
|
||||
|
||||
# disable progress stats on stderr
|
||||
command_args.append("-nostats")
|
||||
|
||||
# Output to stdout
|
||||
command_args.append("pipe:")
|
||||
|
||||
|
@ -118,7 +118,7 @@
|
||||
},
|
||||
"service_calls_not_allowed": {
|
||||
"title": "{name} is not permitted to perform Home Assistant actions",
|
||||
"description": "The ESPHome device attempted to perform a Home Assistant action, but this functionality is not enabled.\n\nIf you trust this device and want to allow it to perfom Home Assistant action, you can enable this functionality in the options flow."
|
||||
"description": "The ESPHome device attempted to perform a Home Assistant action, but this functionality is not enabled.\n\nIf you trust this device and want to allow it to perform Home Assistant action, you can enable this functionality in the options flow."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import html
|
||||
import logging
|
||||
from typing import Any
|
||||
import urllib.error
|
||||
@ -106,7 +107,7 @@ class FeedReaderConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.abort_on_import_error(user_input[CONF_URL], "url_error")
|
||||
return self.show_user_form(user_input, {"base": "url_error"})
|
||||
|
||||
feed_title = feed["feed"]["title"]
|
||||
feed_title = html.unescape(feed["feed"]["title"])
|
||||
|
||||
return self.async_create_entry(
|
||||
title=feed_title,
|
||||
|
@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from calendar import timegm
|
||||
from datetime import datetime
|
||||
import html
|
||||
from logging import getLogger
|
||||
from time import gmtime, struct_time
|
||||
from typing import TYPE_CHECKING
|
||||
@ -102,7 +103,8 @@ class FeedReaderCoordinator(
|
||||
"""Set up the feed manager."""
|
||||
feed = await self._async_fetch_feed()
|
||||
self.logger.debug("Feed data fetched from %s : %s", self.url, feed["feed"])
|
||||
self.feed_author = feed["feed"].get("author")
|
||||
if feed_author := feed["feed"].get("author"):
|
||||
self.feed_author = html.unescape(feed_author)
|
||||
self.feed_version = feedparser.api.SUPPORTED_VERSIONS.get(feed["version"])
|
||||
self._feed = feed
|
||||
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import html
|
||||
import logging
|
||||
|
||||
from feedparser import FeedParserDict
|
||||
@ -76,15 +77,22 @@ class FeedReaderEvent(CoordinatorEntity[FeedReaderCoordinator], EventEntity):
|
||||
# so we always take the first entry in list, since we only care about the latest entry
|
||||
feed_data: FeedParserDict = data[0]
|
||||
|
||||
if description := feed_data.get("description"):
|
||||
description = html.unescape(description)
|
||||
|
||||
if title := feed_data.get("title"):
|
||||
title = html.unescape(title)
|
||||
|
||||
if content := feed_data.get("content"):
|
||||
if isinstance(content, list) and isinstance(content[0], dict):
|
||||
content = content[0].get("value")
|
||||
content = html.unescape(content)
|
||||
|
||||
self._trigger_event(
|
||||
EVENT_FEEDREADER,
|
||||
{
|
||||
ATTR_DESCRIPTION: feed_data.get("description"),
|
||||
ATTR_TITLE: feed_data.get("title"),
|
||||
ATTR_DESCRIPTION: description,
|
||||
ATTR_TITLE: title,
|
||||
ATTR_LINK: feed_data.get("link"),
|
||||
ATTR_CONTENT: content,
|
||||
},
|
||||
|
@ -69,37 +69,29 @@ class FibaroCover(FibaroEntity, CoverEntity):
|
||||
# so if it is missing we have a device which supports open / close only
|
||||
return not self.fibaro_device.value.has_value
|
||||
|
||||
@property
|
||||
def current_cover_position(self) -> int | None:
|
||||
"""Return current position of cover. 0 is closed, 100 is open."""
|
||||
return self.bound(self.level)
|
||||
def update(self) -> None:
|
||||
"""Update the state."""
|
||||
super().update()
|
||||
|
||||
@property
|
||||
def current_cover_tilt_position(self) -> int | None:
|
||||
"""Return the current tilt position for venetian blinds."""
|
||||
return self.bound(self.level2)
|
||||
self._attr_current_cover_position = self.bound(self.level)
|
||||
self._attr_current_cover_tilt_position = self.bound(self.level2)
|
||||
|
||||
@property
|
||||
def is_opening(self) -> bool | None:
|
||||
"""Return if the cover is opening or not.
|
||||
device_state = self.fibaro_device.state
|
||||
|
||||
Be aware that this property is only available for some modern devices.
|
||||
For example the Fibaro Roller Shutter 4 reports this correctly.
|
||||
"""
|
||||
if self.fibaro_device.state.has_value:
|
||||
return self.fibaro_device.state.str_value().lower() == "opening"
|
||||
return None
|
||||
# Be aware that opening and closing is only available for some modern
|
||||
# devices.
|
||||
# For example the Fibaro Roller Shutter 4 reports this correctly.
|
||||
if device_state.has_value:
|
||||
self._attr_is_opening = device_state.str_value().lower() == "opening"
|
||||
self._attr_is_closing = device_state.str_value().lower() == "closing"
|
||||
|
||||
@property
|
||||
def is_closing(self) -> bool | None:
|
||||
"""Return if the cover is closing or not.
|
||||
|
||||
Be aware that this property is only available for some modern devices.
|
||||
For example the Fibaro Roller Shutter 4 reports this correctly.
|
||||
"""
|
||||
if self.fibaro_device.state.has_value:
|
||||
return self.fibaro_device.state.str_value().lower() == "closing"
|
||||
return None
|
||||
closed: bool | None = None
|
||||
if self._is_open_close_only():
|
||||
if device_state.has_value and device_state.str_value().lower() != "unknown":
|
||||
closed = device_state.str_value().lower() == "closed"
|
||||
elif self.current_cover_position is not None:
|
||||
closed = self.current_cover_position == 0
|
||||
self._attr_is_closed = closed
|
||||
|
||||
def set_cover_position(self, **kwargs: Any) -> None:
|
||||
"""Move the cover to a specific position."""
|
||||
@ -109,19 +101,6 @@ class FibaroCover(FibaroEntity, CoverEntity):
|
||||
"""Move the cover to a specific position."""
|
||||
self.set_level2(cast(int, kwargs.get(ATTR_TILT_POSITION)))
|
||||
|
||||
@property
|
||||
def is_closed(self) -> bool | None:
|
||||
"""Return if the cover is closed."""
|
||||
if self._is_open_close_only():
|
||||
state = self.fibaro_device.state
|
||||
if not state.has_value or state.str_value().lower() == "unknown":
|
||||
return None
|
||||
return state.str_value().lower() == "closed"
|
||||
|
||||
if self.current_cover_position is None:
|
||||
return None
|
||||
return self.current_cover_position == 0
|
||||
|
||||
def open_cover(self, **kwargs: Any) -> None:
|
||||
"""Open the cover."""
|
||||
self.action("open")
|
||||
|
@ -274,7 +274,7 @@
|
||||
"fields": {
|
||||
"addon": {
|
||||
"name": "Add-on",
|
||||
"description": "The add-on slug."
|
||||
"description": "The add-on to start."
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -284,17 +284,17 @@
|
||||
"fields": {
|
||||
"addon": {
|
||||
"name": "[%key:component::hassio::services::addon_start::fields::addon::name%]",
|
||||
"description": "[%key:component::hassio::services::addon_start::fields::addon::description%]"
|
||||
"description": "The add-on to restart."
|
||||
}
|
||||
}
|
||||
},
|
||||
"addon_stdin": {
|
||||
"name": "Write data to add-on stdin.",
|
||||
"description": "Writes data to add-on stdin.",
|
||||
"description": "Writes data to the add-on's standard input.",
|
||||
"fields": {
|
||||
"addon": {
|
||||
"name": "[%key:component::hassio::services::addon_start::fields::addon::name%]",
|
||||
"description": "[%key:component::hassio::services::addon_start::fields::addon::description%]"
|
||||
"description": "The add-on to write to."
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -304,7 +304,7 @@
|
||||
"fields": {
|
||||
"addon": {
|
||||
"name": "[%key:component::hassio::services::addon_start::fields::addon::name%]",
|
||||
"description": "[%key:component::hassio::services::addon_start::fields::addon::description%]"
|
||||
"description": "The add-on to stop."
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -314,7 +314,7 @@
|
||||
"fields": {
|
||||
"addon": {
|
||||
"name": "[%key:component::hassio::services::addon_start::fields::addon::name%]",
|
||||
"description": "[%key:component::hassio::services::addon_start::fields::addon::description%]"
|
||||
"description": "The add-on to update."
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.60", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.61", "babel==2.15.0"]
|
||||
}
|
||||
|
@ -7,5 +7,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["homematicip"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["homematicip==1.1.2"]
|
||||
"requirements": ["homematicip==1.1.3"]
|
||||
}
|
||||
|
@ -33,7 +33,7 @@ from homeassistant.const import (
|
||||
CONF_PROTOCOL,
|
||||
CONF_USERNAME,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.data_entry_flow import AbortFlow
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
@ -737,6 +737,16 @@ class MQTTOptionsFlowHandler(OptionsFlow):
|
||||
)
|
||||
|
||||
|
||||
async def _get_uploaded_file(hass: HomeAssistant, id: str) -> str:
|
||||
"""Get file content from uploaded file."""
|
||||
|
||||
def _proces_uploaded_file() -> str:
|
||||
with process_uploaded_file(hass, id) as file_path:
|
||||
return file_path.read_text(encoding=DEFAULT_ENCODING)
|
||||
|
||||
return await hass.async_add_executor_job(_proces_uploaded_file)
|
||||
|
||||
|
||||
async def async_get_broker_settings(
|
||||
flow: ConfigFlow | OptionsFlow,
|
||||
fields: OrderedDict[Any, Any],
|
||||
@ -795,8 +805,7 @@ async def async_get_broker_settings(
|
||||
return False
|
||||
certificate_id: str | None = user_input.get(CONF_CERTIFICATE)
|
||||
if certificate_id:
|
||||
with process_uploaded_file(hass, certificate_id) as certificate_file:
|
||||
certificate = certificate_file.read_text(encoding=DEFAULT_ENCODING)
|
||||
certificate = await _get_uploaded_file(hass, certificate_id)
|
||||
|
||||
# Return to form for file upload CA cert or client cert and key
|
||||
if (
|
||||
@ -812,15 +821,9 @@ async def async_get_broker_settings(
|
||||
return False
|
||||
|
||||
if client_certificate_id:
|
||||
with process_uploaded_file(
|
||||
hass, client_certificate_id
|
||||
) as client_certificate_file:
|
||||
client_certificate = client_certificate_file.read_text(
|
||||
encoding=DEFAULT_ENCODING
|
||||
)
|
||||
client_certificate = await _get_uploaded_file(hass, client_certificate_id)
|
||||
if client_key_id:
|
||||
with process_uploaded_file(hass, client_key_id) as key_file:
|
||||
client_key = key_file.read_text(encoding=DEFAULT_ENCODING)
|
||||
client_key = await _get_uploaded_file(hass, client_key_id)
|
||||
|
||||
certificate_data: dict[str, Any] = {}
|
||||
if certificate:
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"application_credentials": {
|
||||
"description": "Follow the [instructions]({more_info_url}) to give Home Assistant access to your myUplink account. You also need to create application credentials linked to your account:\n1. Go to [Applications at myUplink developer site]({create_creds_url}) and get credentials from an existing application or select **Create New Application**.\n1. Set appropriate Application name and Description\n2. Enter `{callback_url}` as Callback Url"
|
||||
"description": "Follow the [instructions]({more_info_url}) to give Home Assistant access to your myUplink account. You also need to create application credentials linked to your account:\n1. Go to [Applications at myUplink developer site]({create_creds_url}) and get credentials from an existing application or select **Create New Application**.\n1. Set appropriate Application name and Description\n1. Enter `{callback_url}` as Callback URL"
|
||||
},
|
||||
"config": {
|
||||
"step": {
|
||||
|
@ -27,6 +27,12 @@ from .const import CONF_PING_COUNT, DEFAULT_PING_COUNT, DOMAIN
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _clean_user_input(user_input: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Clean up the user input."""
|
||||
user_input[CONF_HOST] = user_input[CONF_HOST].strip()
|
||||
return user_input
|
||||
|
||||
|
||||
class PingConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Ping."""
|
||||
|
||||
@ -46,6 +52,7 @@ class PingConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
),
|
||||
)
|
||||
|
||||
user_input = _clean_user_input(user_input)
|
||||
if not is_ip_address(user_input[CONF_HOST]):
|
||||
self.async_abort(reason="invalid_ip_address")
|
||||
|
||||
@ -81,7 +88,7 @@ class OptionsFlowHandler(OptionsFlow):
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage the options."""
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(title="", data=user_input)
|
||||
return self.async_create_entry(title="", data=_clean_user_input(user_input))
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
|
@ -8,6 +8,7 @@ from typing import Any
|
||||
from rachiopy import Rachio
|
||||
from requests.exceptions import Timeout
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
@ -38,6 +39,7 @@ class RachioUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
rachio: Rachio,
|
||||
config_entry: ConfigEntry,
|
||||
base_station,
|
||||
base_count: int,
|
||||
) -> None:
|
||||
@ -48,6 +50,7 @@ class RachioUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=f"{DOMAIN} update coordinator",
|
||||
# To avoid exceeding the rate limit, increase polling interval for
|
||||
# each additional base station on the account
|
||||
@ -76,6 +79,7 @@ class RachioScheduleUpdateCoordinator(DataUpdateCoordinator[list[dict[str, Any]]
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
rachio: Rachio,
|
||||
config_entry: ConfigEntry,
|
||||
base_station,
|
||||
) -> None:
|
||||
"""Initialize a Rachio schedule coordinator."""
|
||||
@ -85,6 +89,7 @@ class RachioScheduleUpdateCoordinator(DataUpdateCoordinator[list[dict[str, Any]]
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=f"{DOMAIN} schedule update coordinator",
|
||||
update_interval=timedelta(minutes=30),
|
||||
)
|
||||
|
@ -189,8 +189,10 @@ class RachioPerson:
|
||||
RachioBaseStation(
|
||||
rachio,
|
||||
base,
|
||||
RachioUpdateCoordinator(hass, rachio, base, base_count),
|
||||
RachioScheduleUpdateCoordinator(hass, rachio, base),
|
||||
RachioUpdateCoordinator(
|
||||
hass, rachio, self.config_entry, base, base_count
|
||||
),
|
||||
RachioScheduleUpdateCoordinator(hass, rachio, self.config_entry, base),
|
||||
)
|
||||
for base in base_stations
|
||||
)
|
||||
|
@ -110,7 +110,7 @@ def purge_old_data(
|
||||
_LOGGER.debug("Purging hasn't fully completed yet")
|
||||
return False
|
||||
|
||||
if apply_filter and _purge_filtered_data(instance, session) is False:
|
||||
if apply_filter and not _purge_filtered_data(instance, session):
|
||||
_LOGGER.debug("Cleanup filtered data hasn't fully completed yet")
|
||||
return False
|
||||
|
||||
@ -631,7 +631,10 @@ def _purge_old_entity_ids(instance: Recorder, session: Session) -> None:
|
||||
|
||||
|
||||
def _purge_filtered_data(instance: Recorder, session: Session) -> bool:
|
||||
"""Remove filtered states and events that shouldn't be in the database."""
|
||||
"""Remove filtered states and events that shouldn't be in the database.
|
||||
|
||||
Returns true if all states and events are purged.
|
||||
"""
|
||||
_LOGGER.debug("Cleanup filtered data")
|
||||
database_engine = instance.database_engine
|
||||
assert database_engine is not None
|
||||
@ -639,7 +642,7 @@ def _purge_filtered_data(instance: Recorder, session: Session) -> bool:
|
||||
|
||||
# Check if excluded entity_ids are in database
|
||||
entity_filter = instance.entity_filter
|
||||
has_more_states_to_purge = False
|
||||
has_more_to_purge = False
|
||||
excluded_metadata_ids: list[str] = [
|
||||
metadata_id
|
||||
for (metadata_id, entity_id) in session.query(
|
||||
@ -648,12 +651,11 @@ def _purge_filtered_data(instance: Recorder, session: Session) -> bool:
|
||||
if entity_filter and not entity_filter(entity_id)
|
||||
]
|
||||
if excluded_metadata_ids:
|
||||
has_more_states_to_purge = _purge_filtered_states(
|
||||
has_more_to_purge |= not _purge_filtered_states(
|
||||
instance, session, excluded_metadata_ids, database_engine, now_timestamp
|
||||
)
|
||||
|
||||
# Check if excluded event_types are in database
|
||||
has_more_events_to_purge = False
|
||||
if (
|
||||
event_type_to_event_type_ids := instance.event_type_manager.get_many(
|
||||
instance.exclude_event_types, session
|
||||
@ -665,12 +667,12 @@ def _purge_filtered_data(instance: Recorder, session: Session) -> bool:
|
||||
if event_type_id is not None
|
||||
]
|
||||
):
|
||||
has_more_events_to_purge = _purge_filtered_events(
|
||||
has_more_to_purge |= not _purge_filtered_events(
|
||||
instance, session, excluded_event_type_ids, now_timestamp
|
||||
)
|
||||
|
||||
# Purge has completed if there are not more state or events to purge
|
||||
return not (has_more_states_to_purge or has_more_events_to_purge)
|
||||
return not has_more_to_purge
|
||||
|
||||
|
||||
def _purge_filtered_states(
|
||||
|
@ -326,7 +326,19 @@ def migrate_entity_ids(
|
||||
else:
|
||||
new_device_id = f"{device_uid[0]}_{host.api.camera_uid(ch)}"
|
||||
new_identifiers = {(DOMAIN, new_device_id)}
|
||||
device_reg.async_update_device(device.id, new_identifiers=new_identifiers)
|
||||
existing_device = device_reg.async_get_device(identifiers=new_identifiers)
|
||||
if existing_device is None:
|
||||
device_reg.async_update_device(
|
||||
device.id, new_identifiers=new_identifiers
|
||||
)
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Reolink device with uid %s already exists, "
|
||||
"removing device with uid %s",
|
||||
new_device_id,
|
||||
device_uid,
|
||||
)
|
||||
device_reg.async_remove_device(device.id)
|
||||
|
||||
entity_reg = er.async_get(hass)
|
||||
entities = er.async_entries_for_config_entry(entity_reg, config_entry_id)
|
||||
@ -352,4 +364,18 @@ def migrate_entity_ids(
|
||||
id_parts = entity.unique_id.split("_", 2)
|
||||
if host.api.supported(ch, "UID") and id_parts[1] != host.api.camera_uid(ch):
|
||||
new_id = f"{host.unique_id}_{host.api.camera_uid(ch)}_{id_parts[2]}"
|
||||
entity_reg.async_update_entity(entity.entity_id, new_unique_id=new_id)
|
||||
existing_entity = entity_reg.async_get_entity_id(
|
||||
entity.domain, entity.platform, new_id
|
||||
)
|
||||
if existing_entity is None:
|
||||
entity_reg.async_update_entity(
|
||||
entity.entity_id, new_unique_id=new_id
|
||||
)
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Reolink entity with unique_id %s already exists, "
|
||||
"removing device with unique_id %s",
|
||||
new_id,
|
||||
entity.unique_id,
|
||||
)
|
||||
entity_reg.async_remove(entity.entity_id)
|
||||
|
@ -110,6 +110,7 @@ class ReolinkHost:
|
||||
self._cancel_onvif_check: CALLBACK_TYPE | None = None
|
||||
self._cancel_long_poll_check: CALLBACK_TYPE | None = None
|
||||
self._poll_job = HassJob(self._async_poll_all_motion, cancel_on_shutdown=True)
|
||||
self._fast_poll_error: bool = False
|
||||
self._long_poll_task: asyncio.Task | None = None
|
||||
self._lost_subscription: bool = False
|
||||
|
||||
@ -699,14 +700,20 @@ class ReolinkHost:
|
||||
return
|
||||
|
||||
try:
|
||||
await self._api.get_motion_state_all_ch()
|
||||
if self._api.session_active:
|
||||
await self._api.get_motion_state_all_ch()
|
||||
except ReolinkError as err:
|
||||
_LOGGER.error(
|
||||
"Reolink error while polling motion state for host %s:%s: %s",
|
||||
self._api.host,
|
||||
self._api.port,
|
||||
err,
|
||||
)
|
||||
if not self._fast_poll_error:
|
||||
_LOGGER.error(
|
||||
"Reolink error while polling motion state for host %s:%s: %s",
|
||||
self._api.host,
|
||||
self._api.port,
|
||||
err,
|
||||
)
|
||||
self._fast_poll_error = True
|
||||
else:
|
||||
if self._api.session_active:
|
||||
self._fast_poll_error = False
|
||||
finally:
|
||||
# schedule next poll
|
||||
if not self._hass.is_stopping:
|
||||
|
@ -18,5 +18,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/reolink",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["reolink_aio"],
|
||||
"requirements": ["reolink-aio==0.11.1"]
|
||||
"requirements": ["reolink-aio==0.11.2"]
|
||||
}
|
||||
|
@ -178,6 +178,7 @@ AIRQ_SENSOR_TYPES: tuple[SensiboDeviceSensorEntityDescription, ...] = (
|
||||
value_fn=lambda data: data.co2,
|
||||
extra_fn=None,
|
||||
),
|
||||
*DEVICE_SENSOR_TYPES,
|
||||
)
|
||||
|
||||
ELEMENT_SENSOR_TYPES: tuple[SensiboDeviceSensorEntityDescription, ...] = (
|
||||
|
@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["twentemilieu"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["twentemilieu==2.0.1"]
|
||||
"requirements": ["twentemilieu==2.1.0"]
|
||||
}
|
||||
|
@ -1,5 +1,7 @@
|
||||
"""Support the UPB PIM."""
|
||||
|
||||
import logging
|
||||
|
||||
import upb_lib
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@ -14,6 +16,7 @@ from .const import (
|
||||
EVENT_UPB_SCENE_CHANGED,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
PLATFORMS = [Platform.LIGHT, Platform.SCENE]
|
||||
|
||||
|
||||
@ -63,3 +66,21 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
|
||||
upb.disconnect()
|
||||
hass.data[DOMAIN].pop(config_entry.entry_id)
|
||||
return unload_ok
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Migrate entry."""
|
||||
|
||||
_LOGGER.debug("Migrating from version %s", entry.version)
|
||||
|
||||
if entry.version == 1:
|
||||
# 1 -> 2: Unique ID from integer to string
|
||||
if entry.minor_version == 1:
|
||||
minor_version = 2
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, unique_id=str(entry.unique_id), minor_version=minor_version
|
||||
)
|
||||
|
||||
_LOGGER.debug("Migration successful")
|
||||
|
||||
return True
|
||||
|
@ -78,6 +78,7 @@ class UPBConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for UPB PIM."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@ -98,7 +99,7 @@ class UPBConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors["base"] = "unknown"
|
||||
|
||||
if "base" not in errors:
|
||||
await self.async_set_unique_id(network_id)
|
||||
await self.async_set_unique_id(str(network_id))
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
|
@ -7,5 +7,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["holidays"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["holidays==0.60"]
|
||||
"requirements": ["holidays==0.61"]
|
||||
}
|
||||
|
@ -86,18 +86,19 @@
|
||||
"options": {
|
||||
"armed_forces": "Armed forces",
|
||||
"bank": "Bank",
|
||||
"catholic": "Catholic",
|
||||
"chinese": "Chinese",
|
||||
"christian": "Christian",
|
||||
"government": "Government",
|
||||
"half_day": "Half day",
|
||||
"hebrew": "Hebrew",
|
||||
"hindu": "Hindu",
|
||||
"islamic": "Islamic",
|
||||
"optional": "Optional",
|
||||
"public": "Public",
|
||||
"school": "School",
|
||||
"unofficial": "Unofficial",
|
||||
"workday": "Workday",
|
||||
"chinese": "Chinese",
|
||||
"christian": "Christian",
|
||||
"hebrew": "Hebrew",
|
||||
"hindu": "Hindu",
|
||||
"islamic": "Islamic"
|
||||
"workday": "Workday"
|
||||
}
|
||||
},
|
||||
"days": {
|
||||
|
@ -297,7 +297,7 @@
|
||||
},
|
||||
"reconfigure_device": {
|
||||
"name": "Reconfigure device",
|
||||
"description": "Reconfigures a ZHA device (heal device). Use this if you are having issues with the device. If the device in question is a battery-powered device, ensure it is awake and accepting commands when you use this service.",
|
||||
"description": "Reconfigures a ZHA device (heal device). Use this if you are having issues with the device. If the device in question is a battery-powered device, ensure it is awake and accepting commands when you use this action.",
|
||||
"fields": {
|
||||
"ieee": {
|
||||
"name": "[%key:component::zha::services::permit::fields::ieee::name%]",
|
||||
|
@ -25,7 +25,7 @@ if TYPE_CHECKING:
|
||||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2024
|
||||
MINOR_VERSION: Final = 11
|
||||
PATCH_VERSION: Final = "2"
|
||||
PATCH_VERSION: Final = "3"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0)
|
||||
|
@ -17,7 +17,7 @@ awesomeversion==24.6.0
|
||||
bcrypt==4.2.0
|
||||
bleak-retry-connector==3.6.0
|
||||
bleak==0.22.3
|
||||
bluetooth-adapters==0.20.0
|
||||
bluetooth-adapters==0.20.2
|
||||
bluetooth-auto-recovery==1.4.2
|
||||
bluetooth-data-tools==1.20.0
|
||||
cached-ipaddress==0.8.0
|
||||
|
@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "homeassistant"
|
||||
version = "2024.11.2"
|
||||
version = "2024.11.3"
|
||||
license = {text = "Apache-2.0"}
|
||||
description = "Open-source home automation platform running on Python 3."
|
||||
readme = "README.rst"
|
||||
|
@ -173,13 +173,13 @@ aio-geojson-usgs-earthquakes==0.3
|
||||
aio-georss-gdacs==0.10
|
||||
|
||||
# homeassistant.components.airq
|
||||
aioairq==0.3.2
|
||||
aioairq==0.4.3
|
||||
|
||||
# homeassistant.components.airzone_cloud
|
||||
aioairzone-cloud==0.6.10
|
||||
|
||||
# homeassistant.components.airzone
|
||||
aioairzone==0.9.5
|
||||
aioairzone==0.9.7
|
||||
|
||||
# homeassistant.components.ambient_network
|
||||
# homeassistant.components.ambient_station
|
||||
@ -474,7 +474,7 @@ apprise==1.9.0
|
||||
aprslib==0.7.2
|
||||
|
||||
# homeassistant.components.apsystems
|
||||
apsystems-ez1==2.2.1
|
||||
apsystems-ez1==2.4.0
|
||||
|
||||
# homeassistant.components.aqualogic
|
||||
aqualogic==2.6
|
||||
@ -607,7 +607,7 @@ bluemaestro-ble==0.2.3
|
||||
# bluepy==1.3.0
|
||||
|
||||
# homeassistant.components.bluetooth
|
||||
bluetooth-adapters==0.20.0
|
||||
bluetooth-adapters==0.20.2
|
||||
|
||||
# homeassistant.components.bluetooth
|
||||
bluetooth-auto-recovery==1.4.2
|
||||
@ -814,7 +814,7 @@ eliqonline==1.2.2
|
||||
elkm1-lib==2.2.10
|
||||
|
||||
# homeassistant.components.elmax
|
||||
elmax-api==0.0.5
|
||||
elmax-api==0.0.6.1
|
||||
|
||||
# homeassistant.components.elvia
|
||||
elvia==0.1.0
|
||||
@ -1121,7 +1121,7 @@ hole==0.8.0
|
||||
|
||||
# homeassistant.components.holiday
|
||||
# homeassistant.components.workday
|
||||
holidays==0.60
|
||||
holidays==0.61
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20241106.2
|
||||
@ -1133,7 +1133,7 @@ home-assistant-intents==2024.11.6
|
||||
homeconnect==0.8.0
|
||||
|
||||
# homeassistant.components.homematicip_cloud
|
||||
homematicip==1.1.2
|
||||
homematicip==1.1.3
|
||||
|
||||
# homeassistant.components.horizon
|
||||
horimote==0.4.1
|
||||
@ -2547,7 +2547,7 @@ renault-api==0.2.7
|
||||
renson-endura-delta==1.7.1
|
||||
|
||||
# homeassistant.components.reolink
|
||||
reolink-aio==0.11.1
|
||||
reolink-aio==0.11.2
|
||||
|
||||
# homeassistant.components.idteck_prox
|
||||
rfk101py==0.0.1
|
||||
@ -2870,7 +2870,7 @@ ttn_client==1.2.0
|
||||
tuya-device-sharing-sdk==0.1.9
|
||||
|
||||
# homeassistant.components.twentemilieu
|
||||
twentemilieu==2.0.1
|
||||
twentemilieu==2.1.0
|
||||
|
||||
# homeassistant.components.twilio
|
||||
twilio==6.32.0
|
||||
|
@ -161,13 +161,13 @@ aio-geojson-usgs-earthquakes==0.3
|
||||
aio-georss-gdacs==0.10
|
||||
|
||||
# homeassistant.components.airq
|
||||
aioairq==0.3.2
|
||||
aioairq==0.4.3
|
||||
|
||||
# homeassistant.components.airzone_cloud
|
||||
aioairzone-cloud==0.6.10
|
||||
|
||||
# homeassistant.components.airzone
|
||||
aioairzone==0.9.5
|
||||
aioairzone==0.9.7
|
||||
|
||||
# homeassistant.components.ambient_network
|
||||
# homeassistant.components.ambient_station
|
||||
@ -447,7 +447,7 @@ apprise==1.9.0
|
||||
aprslib==0.7.2
|
||||
|
||||
# homeassistant.components.apsystems
|
||||
apsystems-ez1==2.2.1
|
||||
apsystems-ez1==2.4.0
|
||||
|
||||
# homeassistant.components.aranet
|
||||
aranet4==2.4.0
|
||||
@ -531,7 +531,7 @@ bluecurrent-api==1.2.3
|
||||
bluemaestro-ble==0.2.3
|
||||
|
||||
# homeassistant.components.bluetooth
|
||||
bluetooth-adapters==0.20.0
|
||||
bluetooth-adapters==0.20.2
|
||||
|
||||
# homeassistant.components.bluetooth
|
||||
bluetooth-auto-recovery==1.4.2
|
||||
@ -689,7 +689,7 @@ elgato==5.1.2
|
||||
elkm1-lib==2.2.10
|
||||
|
||||
# homeassistant.components.elmax
|
||||
elmax-api==0.0.5
|
||||
elmax-api==0.0.6.1
|
||||
|
||||
# homeassistant.components.elvia
|
||||
elvia==0.1.0
|
||||
@ -947,7 +947,7 @@ hole==0.8.0
|
||||
|
||||
# homeassistant.components.holiday
|
||||
# homeassistant.components.workday
|
||||
holidays==0.60
|
||||
holidays==0.61
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20241106.2
|
||||
@ -959,7 +959,7 @@ home-assistant-intents==2024.11.6
|
||||
homeconnect==0.8.0
|
||||
|
||||
# homeassistant.components.homematicip_cloud
|
||||
homematicip==1.1.2
|
||||
homematicip==1.1.3
|
||||
|
||||
# homeassistant.components.remember_the_milk
|
||||
httplib2==0.20.4
|
||||
@ -2038,7 +2038,7 @@ renault-api==0.2.7
|
||||
renson-endura-delta==1.7.1
|
||||
|
||||
# homeassistant.components.reolink
|
||||
reolink-aio==0.11.1
|
||||
reolink-aio==0.11.2
|
||||
|
||||
# homeassistant.components.rflink
|
||||
rflink==0.0.66
|
||||
@ -2283,7 +2283,7 @@ ttn_client==1.2.0
|
||||
tuya-device-sharing-sdk==0.1.9
|
||||
|
||||
# homeassistant.components.twentemilieu
|
||||
twentemilieu==2.0.1
|
||||
twentemilieu==2.1.0
|
||||
|
||||
# homeassistant.components.twilio
|
||||
twilio==6.32.0
|
||||
|
@ -64,6 +64,18 @@ def fixture_feed_only_summary(hass: HomeAssistant) -> bytes:
|
||||
return load_fixture_bytes("feedreader8.xml")
|
||||
|
||||
|
||||
@pytest.fixture(name="feed_htmlentities")
|
||||
def fixture_feed_htmlentities(hass: HomeAssistant) -> bytes:
|
||||
"""Load test feed data with HTML Entities."""
|
||||
return load_fixture_bytes("feedreader9.xml")
|
||||
|
||||
|
||||
@pytest.fixture(name="feed_atom_htmlentities")
|
||||
def fixture_feed_atom_htmlentities(hass: HomeAssistant) -> bytes:
|
||||
"""Load test ATOM feed data with HTML Entities."""
|
||||
return load_fixture_bytes("feedreader10.xml")
|
||||
|
||||
|
||||
@pytest.fixture(name="events")
|
||||
async def fixture_events(hass: HomeAssistant) -> list[Event]:
|
||||
"""Fixture that catches alexa events."""
|
||||
|
19
tests/components/feedreader/fixtures/feedreader10.xml
Normal file
19
tests/components/feedreader/fixtures/feedreader10.xml
Normal file
@ -0,0 +1,19 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<feed
|
||||
xmlns="http://www.w3.org/2005/Atom">
|
||||
<title><![CDATA[ATOM RSS en español]]></title>
|
||||
<link href="http://example.org/"/>
|
||||
<updated>2024-11-18T14:00:00Z</updated>
|
||||
<author>
|
||||
<name><![CDATA[Juan Pérez]]></name>
|
||||
</author>
|
||||
<id>urn:uuid:60a76c80-d399-11d9-b93C-0003939e0af6</id>
|
||||
<entry>
|
||||
<title><![CDATA[Título]]></title>
|
||||
<link href="http://example.org/2003/12/13/atom03"/>
|
||||
<id>urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a</id>
|
||||
<updated>2024-11-18T14:00:00Z</updated>
|
||||
<summary><![CDATA[Resumen en español]]></summary>
|
||||
<content><![CDATA[Contenido en español]]></content>
|
||||
</entry>
|
||||
</feed>
|
21
tests/components/feedreader/fixtures/feedreader9.xml
Normal file
21
tests/components/feedreader/fixtures/feedreader9.xml
Normal file
@ -0,0 +1,21 @@
|
||||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<rss version="2.0">
|
||||
<channel>
|
||||
<title><![CDATA[RSS en español]]></title>
|
||||
<description><![CDATA[Esto es un ejemplo de un feed RSS en español]]></description>
|
||||
<link>http://www.example.com/main.html</link>
|
||||
<lastBuildDate>Mon, 18 Nov 2024 15:00:00 +1000</lastBuildDate>
|
||||
<pubDate>Mon, 18 Nov 2024 15:00:00 +1000</pubDate>
|
||||
<ttl>1800</ttl>
|
||||
|
||||
<item>
|
||||
<title><![CDATA[Título 1]]></title>
|
||||
<description><![CDATA[Descripción 1]]></description>
|
||||
<link>http://www.example.com/link/1</link>
|
||||
<guid isPermaLink="false">GUID 1</guid>
|
||||
<pubDate>Mon, 18 Nov 2024 15:00:00 +1000</pubDate>
|
||||
<content><![CDATA[Contenido 1 en español]]></content>
|
||||
</item>
|
||||
|
||||
</channel>
|
||||
</rss>
|
27
tests/components/feedreader/snapshots/test_event.ambr
Normal file
27
tests/components/feedreader/snapshots/test_event.ambr
Normal file
@ -0,0 +1,27 @@
|
||||
# serializer version: 1
|
||||
# name: test_event_htmlentities[feed_atom_htmlentities]
|
||||
ReadOnlyDict({
|
||||
'content': 'Contenido en español',
|
||||
'description': 'Resumen en español',
|
||||
'event_type': 'feedreader',
|
||||
'event_types': list([
|
||||
'feedreader',
|
||||
]),
|
||||
'friendly_name': 'Mock Title',
|
||||
'link': 'http://example.org/2003/12/13/atom03',
|
||||
'title': 'Título',
|
||||
})
|
||||
# ---
|
||||
# name: test_event_htmlentities[feed_htmlentities]
|
||||
ReadOnlyDict({
|
||||
'content': 'Contenido 1 en español',
|
||||
'description': 'Descripción 1',
|
||||
'event_type': 'feedreader',
|
||||
'event_types': list([
|
||||
'feedreader',
|
||||
]),
|
||||
'friendly_name': 'Mock Title',
|
||||
'link': 'http://www.example.com/link/1',
|
||||
'title': 'Título 1',
|
||||
})
|
||||
# ---
|
@ -246,3 +246,38 @@ async def test_options_flow(hass: HomeAssistant) -> None:
|
||||
assert result["data"] == {
|
||||
CONF_MAX_ENTRIES: 10,
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("fixture_name", "expected_title"),
|
||||
[
|
||||
("feed_htmlentities", "RSS en español"),
|
||||
("feed_atom_htmlentities", "ATOM RSS en español"),
|
||||
],
|
||||
)
|
||||
async def test_feed_htmlentities(
|
||||
hass: HomeAssistant,
|
||||
feedparser,
|
||||
setup_entry,
|
||||
fixture_name,
|
||||
expected_title,
|
||||
request: pytest.FixtureRequest,
|
||||
) -> None:
|
||||
"""Test starting a flow by user from a feed with HTML Entities in the title."""
|
||||
with patch(
|
||||
"homeassistant.components.feedreader.config_flow.feedparser.http.get",
|
||||
side_effect=[request.getfixturevalue(fixture_name)],
|
||||
):
|
||||
# init user flow
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "user"
|
||||
|
||||
# success
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input={CONF_URL: URL}
|
||||
)
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == expected_title
|
||||
|
@ -3,6 +3,9 @@
|
||||
from datetime import timedelta
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.components.feedreader.event import (
|
||||
ATTR_CONTENT,
|
||||
ATTR_DESCRIPTION,
|
||||
@ -59,3 +62,31 @@ async def test_event_entity(
|
||||
assert state.attributes[ATTR_LINK] == "http://www.example.com/link/1"
|
||||
assert state.attributes[ATTR_CONTENT] == "This is a summary"
|
||||
assert state.attributes[ATTR_DESCRIPTION] == "Description 1"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("fixture_name"),
|
||||
[
|
||||
("feed_htmlentities"),
|
||||
("feed_atom_htmlentities"),
|
||||
],
|
||||
)
|
||||
async def test_event_htmlentities(
|
||||
hass: HomeAssistant,
|
||||
snapshot: SnapshotAssertion,
|
||||
fixture_name,
|
||||
request: pytest.FixtureRequest,
|
||||
) -> None:
|
||||
"""Test feed event entity with HTML Entities."""
|
||||
entry = create_mock_entry(VALID_CONFIG_DEFAULT)
|
||||
entry.add_to_hass(hass)
|
||||
with patch(
|
||||
"homeassistant.components.feedreader.coordinator.feedparser.http.get",
|
||||
side_effect=[request.getfixturevalue(fixture_name)],
|
||||
):
|
||||
assert await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
state = hass.states.get("event.mock_title")
|
||||
assert state
|
||||
assert state.attributes == snapshot
|
||||
|
@ -12,6 +12,7 @@ import pytest
|
||||
|
||||
from homeassistant.components.feedreader.const import DOMAIN
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from . import async_setup_config_entry, create_mock_entry
|
||||
@ -357,3 +358,23 @@ async def test_feed_errors(
|
||||
freezer.tick(timedelta(hours=1, seconds=1))
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done(wait_background_tasks=True)
|
||||
|
||||
|
||||
async def test_feed_atom_htmlentities(
|
||||
hass: HomeAssistant, feed_atom_htmlentities, device_registry: dr.DeviceRegistry
|
||||
) -> None:
|
||||
"""Test ATOM feed author with HTML Entities."""
|
||||
|
||||
entry = create_mock_entry(VALID_CONFIG_DEFAULT)
|
||||
entry.add_to_hass(hass)
|
||||
with patch(
|
||||
"homeassistant.components.feedreader.coordinator.feedparser.http.get",
|
||||
side_effect=[feed_atom_htmlentities],
|
||||
):
|
||||
assert await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
device_entry = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, entry.entry_id)}
|
||||
)
|
||||
assert device_entry.manufacturer == "Juan Pérez"
|
||||
|
@ -13,11 +13,15 @@ from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("host", "expected_title"),
|
||||
[("192.618.178.1", "192.618.178.1")],
|
||||
("host", "expected"),
|
||||
[
|
||||
("192.618.178.1", "192.618.178.1"),
|
||||
(" 192.618.178.1 ", "192.618.178.1"),
|
||||
(" demo.host ", "demo.host"),
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures("patch_setup")
|
||||
async def test_form(hass: HomeAssistant, host, expected_title) -> None:
|
||||
async def test_form(hass: HomeAssistant, host, expected) -> None:
|
||||
"""Test we get the form."""
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
@ -35,21 +39,25 @@ async def test_form(hass: HomeAssistant, host, expected_title) -> None:
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == expected_title
|
||||
assert result["title"] == expected
|
||||
assert result["data"] == {}
|
||||
assert result["options"] == {
|
||||
"count": 5,
|
||||
"host": host,
|
||||
"host": expected,
|
||||
"consider_home": 180,
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("host", "count", "expected_title"),
|
||||
[("192.618.178.1", 10, "192.618.178.1")],
|
||||
("host", "expected_host"),
|
||||
[
|
||||
("192.618.178.1", "192.618.178.1"),
|
||||
(" 192.618.178.1 ", "192.618.178.1"),
|
||||
(" demo.host ", "demo.host"),
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures("patch_setup")
|
||||
async def test_options(hass: HomeAssistant, host, count, expected_title) -> None:
|
||||
async def test_options(hass: HomeAssistant, host: str, expected_host: str) -> None:
|
||||
"""Test options flow."""
|
||||
|
||||
config_entry = MockConfigEntry(
|
||||
@ -57,8 +65,8 @@ async def test_options(hass: HomeAssistant, host, count, expected_title) -> None
|
||||
source=config_entries.SOURCE_USER,
|
||||
data={},
|
||||
domain=DOMAIN,
|
||||
options={"count": count, "host": host, "consider_home": 180},
|
||||
title=expected_title,
|
||||
options={"count": 1, "host": "192.168.1.1", "consider_home": 180},
|
||||
title="192.168.1.1",
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
@ -72,15 +80,15 @@ async def test_options(hass: HomeAssistant, host, count, expected_title) -> None
|
||||
result = await hass.config_entries.options.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
"host": "10.10.10.1",
|
||||
"count": count,
|
||||
"host": host,
|
||||
"count": 10,
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["data"] == {
|
||||
"count": count,
|
||||
"host": "10.10.10.1",
|
||||
"count": 10,
|
||||
"host": expected_host,
|
||||
"consider_home": 180,
|
||||
}
|
||||
|
@ -964,6 +964,171 @@ async def test_purge_filtered_states(
|
||||
assert session.query(StateAttributes).count() == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"recorder_config", [{"exclude": {"entities": ["sensor.excluded"]}}]
|
||||
)
|
||||
async def test_purge_filtered_states_multiple_rounds(
|
||||
hass: HomeAssistant,
|
||||
recorder_mock: Recorder,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test filtered states are purged when there are multiple rounds to purge."""
|
||||
assert recorder_mock.entity_filter("sensor.excluded") is False
|
||||
|
||||
def _add_db_entries(hass: HomeAssistant) -> None:
|
||||
with session_scope(hass=hass) as session:
|
||||
# Add states and state_changed events that should be purged
|
||||
for days in range(1, 4):
|
||||
timestamp = dt_util.utcnow() - timedelta(days=days)
|
||||
for event_id in range(1000, 1020):
|
||||
_add_state_with_state_attributes(
|
||||
session,
|
||||
"sensor.excluded",
|
||||
"purgeme",
|
||||
timestamp,
|
||||
event_id * days,
|
||||
)
|
||||
# Add state **without** state_changed event that should be purged
|
||||
timestamp = dt_util.utcnow() - timedelta(days=1)
|
||||
session.add(
|
||||
States(
|
||||
entity_id="sensor.excluded",
|
||||
state="purgeme",
|
||||
attributes="{}",
|
||||
last_changed_ts=dt_util.utc_to_timestamp(timestamp),
|
||||
last_updated_ts=dt_util.utc_to_timestamp(timestamp),
|
||||
)
|
||||
)
|
||||
# Add states and state_changed events that should be keeped
|
||||
timestamp = dt_util.utcnow() - timedelta(days=2)
|
||||
for event_id in range(200, 210):
|
||||
_add_state_with_state_attributes(
|
||||
session,
|
||||
"sensor.keep",
|
||||
"keep",
|
||||
timestamp,
|
||||
event_id,
|
||||
)
|
||||
# Add states with linked old_state_ids that need to be handled
|
||||
timestamp = dt_util.utcnow() - timedelta(days=0)
|
||||
state_attrs = StateAttributes(
|
||||
hash=0,
|
||||
shared_attrs=json.dumps(
|
||||
{"sensor.linked_old_state_id": "sensor.linked_old_state_id"}
|
||||
),
|
||||
)
|
||||
state_1 = States(
|
||||
entity_id="sensor.linked_old_state_id",
|
||||
state="keep",
|
||||
attributes="{}",
|
||||
last_changed_ts=dt_util.utc_to_timestamp(timestamp),
|
||||
last_updated_ts=dt_util.utc_to_timestamp(timestamp),
|
||||
old_state_id=1,
|
||||
state_attributes=state_attrs,
|
||||
)
|
||||
timestamp = dt_util.utcnow() - timedelta(days=4)
|
||||
state_2 = States(
|
||||
entity_id="sensor.linked_old_state_id",
|
||||
state="keep",
|
||||
attributes="{}",
|
||||
last_changed_ts=dt_util.utc_to_timestamp(timestamp),
|
||||
last_updated_ts=dt_util.utc_to_timestamp(timestamp),
|
||||
old_state_id=2,
|
||||
state_attributes=state_attrs,
|
||||
)
|
||||
state_3 = States(
|
||||
entity_id="sensor.linked_old_state_id",
|
||||
state="keep",
|
||||
attributes="{}",
|
||||
last_changed_ts=dt_util.utc_to_timestamp(timestamp),
|
||||
last_updated_ts=dt_util.utc_to_timestamp(timestamp),
|
||||
old_state_id=62, # keep
|
||||
state_attributes=state_attrs,
|
||||
)
|
||||
session.add_all((state_attrs, state_1, state_2, state_3))
|
||||
# Add event that should be keeped
|
||||
session.add(
|
||||
Events(
|
||||
event_id=100,
|
||||
event_type="EVENT_KEEP",
|
||||
event_data="{}",
|
||||
origin="LOCAL",
|
||||
time_fired_ts=dt_util.utc_to_timestamp(timestamp),
|
||||
)
|
||||
)
|
||||
convert_pending_states_to_meta(recorder_mock, session)
|
||||
convert_pending_events_to_event_types(recorder_mock, session)
|
||||
|
||||
service_data = {"keep_days": 10, "apply_filter": True}
|
||||
_add_db_entries(hass)
|
||||
|
||||
with session_scope(hass=hass) as session:
|
||||
states = session.query(States)
|
||||
assert states.count() == 74
|
||||
events_keep = session.query(Events).filter(
|
||||
Events.event_type_id.in_(select_event_type_ids(("EVENT_KEEP",)))
|
||||
)
|
||||
assert events_keep.count() == 1
|
||||
|
||||
await hass.services.async_call(
|
||||
RECORDER_DOMAIN, SERVICE_PURGE, service_data, blocking=True
|
||||
)
|
||||
|
||||
for _ in range(2):
|
||||
# Make sure the second round of purging runs
|
||||
await async_recorder_block_till_done(hass)
|
||||
await async_wait_purge_done(hass)
|
||||
|
||||
assert "Cleanup filtered data hasn't fully completed yet" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
with session_scope(hass=hass) as session:
|
||||
states = session.query(States)
|
||||
assert states.count() == 13
|
||||
events_keep = session.query(Events).filter(
|
||||
Events.event_type_id.in_(select_event_type_ids(("EVENT_KEEP",)))
|
||||
)
|
||||
assert events_keep.count() == 1
|
||||
|
||||
states_sensor_excluded = (
|
||||
session.query(States)
|
||||
.outerjoin(StatesMeta, States.metadata_id == StatesMeta.metadata_id)
|
||||
.filter(StatesMeta.entity_id == "sensor.excluded")
|
||||
)
|
||||
assert states_sensor_excluded.count() == 0
|
||||
query = session.query(States)
|
||||
|
||||
assert query.filter(States.state_id == 72).first().old_state_id is None
|
||||
assert query.filter(States.state_id == 72).first().attributes_id == 71
|
||||
assert query.filter(States.state_id == 73).first().old_state_id is None
|
||||
assert query.filter(States.state_id == 73).first().attributes_id == 71
|
||||
|
||||
final_keep_state = session.query(States).filter(States.state_id == 74).first()
|
||||
assert final_keep_state.old_state_id == 62 # should have been kept
|
||||
assert final_keep_state.attributes_id == 71
|
||||
|
||||
assert session.query(StateAttributes).count() == 11
|
||||
|
||||
# Do it again to make sure nothing changes
|
||||
await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data)
|
||||
await async_recorder_block_till_done(hass)
|
||||
await async_wait_purge_done(hass)
|
||||
|
||||
with session_scope(hass=hass) as session:
|
||||
final_keep_state = session.query(States).filter(States.state_id == 74).first()
|
||||
assert final_keep_state.old_state_id == 62 # should have been kept
|
||||
assert final_keep_state.attributes_id == 71
|
||||
|
||||
assert session.query(StateAttributes).count() == 11
|
||||
|
||||
for _ in range(2):
|
||||
# Make sure the second round of purging runs
|
||||
await async_recorder_block_till_done(hass)
|
||||
await async_wait_purge_done(hass)
|
||||
|
||||
assert "Cleanup filtered data hasn't fully completed yet" not in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.parametrize("use_sqlite", [True, False], indirect=True)
|
||||
@pytest.mark.parametrize(
|
||||
"recorder_config", [{"exclude": {"entities": ["sensor.excluded"]}}]
|
||||
|
@ -469,6 +469,116 @@ async def test_migrate_entity_ids(
|
||||
assert device_registry.async_get_device(identifiers={(DOMAIN, new_dev_id)})
|
||||
|
||||
|
||||
async def test_migrate_with_already_existing_device(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
reolink_connect: MagicMock,
|
||||
entity_registry: er.EntityRegistry,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
) -> None:
|
||||
"""Test device ids that need to be migrated while the new ids already exist."""
|
||||
original_dev_id = f"{TEST_MAC}_ch0"
|
||||
new_dev_id = f"{TEST_UID}_{TEST_UID_CAM}"
|
||||
domain = Platform.SWITCH
|
||||
|
||||
def mock_supported(ch, capability):
|
||||
if capability == "UID" and ch is None:
|
||||
return True
|
||||
if capability == "UID":
|
||||
return True
|
||||
return True
|
||||
|
||||
reolink_connect.channels = [0]
|
||||
reolink_connect.supported = mock_supported
|
||||
|
||||
device_registry.async_get_or_create(
|
||||
identifiers={(DOMAIN, new_dev_id)},
|
||||
config_entry_id=config_entry.entry_id,
|
||||
disabled_by=None,
|
||||
)
|
||||
|
||||
device_registry.async_get_or_create(
|
||||
identifiers={(DOMAIN, original_dev_id)},
|
||||
config_entry_id=config_entry.entry_id,
|
||||
disabled_by=None,
|
||||
)
|
||||
|
||||
assert device_registry.async_get_device(identifiers={(DOMAIN, original_dev_id)})
|
||||
assert device_registry.async_get_device(identifiers={(DOMAIN, new_dev_id)})
|
||||
|
||||
# setup CH 0 and host entities/device
|
||||
with patch("homeassistant.components.reolink.PLATFORMS", [domain]):
|
||||
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert (
|
||||
device_registry.async_get_device(identifiers={(DOMAIN, original_dev_id)})
|
||||
is None
|
||||
)
|
||||
assert device_registry.async_get_device(identifiers={(DOMAIN, new_dev_id)})
|
||||
|
||||
|
||||
async def test_migrate_with_already_existing_entity(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
reolink_connect: MagicMock,
|
||||
entity_registry: er.EntityRegistry,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
) -> None:
|
||||
"""Test entity ids that need to be migrated while the new ids already exist."""
|
||||
original_id = f"{TEST_UID}_0_record_audio"
|
||||
new_id = f"{TEST_UID}_{TEST_UID_CAM}_record_audio"
|
||||
dev_id = f"{TEST_UID}_{TEST_UID_CAM}"
|
||||
domain = Platform.SWITCH
|
||||
|
||||
def mock_supported(ch, capability):
|
||||
if capability == "UID" and ch is None:
|
||||
return True
|
||||
if capability == "UID":
|
||||
return True
|
||||
return True
|
||||
|
||||
reolink_connect.channels = [0]
|
||||
reolink_connect.supported = mock_supported
|
||||
|
||||
dev_entry = device_registry.async_get_or_create(
|
||||
identifiers={(DOMAIN, dev_id)},
|
||||
config_entry_id=config_entry.entry_id,
|
||||
disabled_by=None,
|
||||
)
|
||||
|
||||
entity_registry.async_get_or_create(
|
||||
domain=domain,
|
||||
platform=DOMAIN,
|
||||
unique_id=new_id,
|
||||
config_entry=config_entry,
|
||||
suggested_object_id=new_id,
|
||||
disabled_by=None,
|
||||
device_id=dev_entry.id,
|
||||
)
|
||||
|
||||
entity_registry.async_get_or_create(
|
||||
domain=domain,
|
||||
platform=DOMAIN,
|
||||
unique_id=original_id,
|
||||
config_entry=config_entry,
|
||||
suggested_object_id=original_id,
|
||||
disabled_by=None,
|
||||
device_id=dev_entry.id,
|
||||
)
|
||||
|
||||
assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id)
|
||||
assert entity_registry.async_get_entity_id(domain, DOMAIN, new_id)
|
||||
|
||||
# setup CH 0 and host entities/device
|
||||
with patch("homeassistant.components.reolink.PLATFORMS", [domain]):
|
||||
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) is None
|
||||
assert entity_registry.async_get_entity_id(domain, DOMAIN, new_id)
|
||||
|
||||
|
||||
async def test_no_repair_issue(
|
||||
hass: HomeAssistant, config_entry: MockConfigEntry, issue_registry: ir.IssueRegistry
|
||||
) -> None:
|
||||
|
25
tests/components/upb/test_init.py
Normal file
25
tests/components/upb/test_init.py
Normal file
@ -0,0 +1,25 @@
|
||||
"""The init tests for the UPB platform."""
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from homeassistant.components.upb.const import DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
async def test_migrate_entry_minor_version_1_2(hass: HomeAssistant) -> None:
|
||||
"""Test migrating a 1.1 config entry to 1.2."""
|
||||
with patch("homeassistant.components.upb.async_setup_entry", return_value=True):
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={"protocol": "TCP", "address": "1.2.3.4", "file_path": "upb.upe"},
|
||||
version=1,
|
||||
minor_version=1,
|
||||
unique_id=123456,
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
assert await hass.config_entries.async_setup(entry.entry_id)
|
||||
assert entry.version == 1
|
||||
assert entry.minor_version == 2
|
||||
assert entry.unique_id == "123456"
|
Loading…
x
Reference in New Issue
Block a user