This commit is contained in:
Paulus Schoutsen 2023-04-06 17:52:14 -04:00 committed by GitHub
commit 0c15c75781
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
39 changed files with 396 additions and 107 deletions

View File

@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "cloud_push",
"loggers": ["aioambient"],
"requirements": ["aioambient==2021.11.0"]
"requirements": ["aioambient==2022.10.0"]
}

View File

@ -10,9 +10,10 @@ from .wrappers import HaBleakClientWrapper, HaBleakScannerWrapper
ORIGINAL_BLEAK_SCANNER = bleak.BleakScanner
ORIGINAL_BLEAK_CLIENT = bleak.BleakClient
ORIGINAL_BLEAK_RETRY_CONNECTOR_CLIENT = (
ORIGINAL_BLEAK_RETRY_CONNECTOR_CLIENT_WITH_SERVICE_CACHE = (
bleak_retry_connector.BleakClientWithServiceCache
)
ORIGINAL_BLEAK_RETRY_CONNECTOR_CLIENT = bleak_retry_connector.BleakClient
def install_multiple_bleak_catcher() -> None:
@ -23,6 +24,7 @@ def install_multiple_bleak_catcher() -> None:
bleak.BleakScanner = HaBleakScannerWrapper # type: ignore[misc, assignment]
bleak.BleakClient = HaBleakClientWrapper # type: ignore[misc]
bleak_retry_connector.BleakClientWithServiceCache = HaBleakClientWithServiceCache # type: ignore[misc,assignment] # noqa: E501
bleak_retry_connector.BleakClient = HaBleakClientWrapper # type: ignore[misc] # noqa: E501
def uninstall_multiple_bleak_catcher() -> None:
@ -30,6 +32,9 @@ def uninstall_multiple_bleak_catcher() -> None:
bleak.BleakScanner = ORIGINAL_BLEAK_SCANNER # type: ignore[misc]
bleak.BleakClient = ORIGINAL_BLEAK_CLIENT # type: ignore[misc]
bleak_retry_connector.BleakClientWithServiceCache = ( # type: ignore[misc]
ORIGINAL_BLEAK_RETRY_CONNECTOR_CLIENT_WITH_SERVICE_CACHE
)
bleak_retry_connector.BleakClient = ( # type: ignore[misc]
ORIGINAL_BLEAK_RETRY_CONNECTOR_CLIENT
)

View File

@ -137,8 +137,11 @@ class CommandSensor(SensorEntity):
_LOGGER.warning("Unable to parse output as JSON: %s", value)
else:
_LOGGER.warning("Empty reply found when expecting JSON data")
if self._value_template is None:
self._attr_native_value = None
return
elif self._value_template is not None:
if self._value_template is not None:
self._attr_native_value = (
self._value_template.async_render_with_possible_json_value(
value,

View File

@ -7,5 +7,5 @@
"iot_class": "local_push",
"loggers": ["aiodiscover", "dnspython", "pyroute2", "scapy"],
"quality_scale": "internal",
"requirements": ["scapy==2.5.0", "aiodiscover==1.4.15"]
"requirements": ["scapy==2.5.0", "aiodiscover==1.4.16"]
}

View File

@ -345,11 +345,19 @@ async def async_setup_entry( # noqa: C901
disconnect_cb()
entry_data.disconnect_callbacks = []
entry_data.available = False
# Clear out the states so that we will always dispatch
# Mark state as stale so that we will always dispatch
# the next state update of that type when the device reconnects
for state_keys in entry_data.state.values():
state_keys.clear()
entry_data.async_update_device_state(hass)
entry_data.stale_state = {
(type(entity_state), key)
for state_dict in entry_data.state.values()
for key, entity_state in state_dict.items()
}
if not hass.is_stopping:
# Avoid marking every esphome entity as unavailable on shutdown
# since it generates a lot of state changed events and database
# writes when we already know we're shutting down and the state
# will be cleared anyway.
entry_data.async_update_device_state(hass)
async def on_connect_error(err: Exception) -> None:
"""Start reauth flow if appropriate connect error type."""

View File

@ -70,6 +70,10 @@ class RuntimeEntryData:
client: APIClient
store: Store
state: dict[type[EntityState], dict[int, EntityState]] = field(default_factory=dict)
# When the disconnect callback is called, we mark all states
# as stale so we will always dispatch a state update when the
# device reconnects. This is the same format as state_subscriptions.
stale_state: set[tuple[type[EntityState], int]] = field(default_factory=set)
info: dict[str, dict[int, EntityInfo]] = field(default_factory=dict)
# A second list of EntityInfo objects
@ -206,9 +210,11 @@ class RuntimeEntryData:
"""Distribute an update of state information to the target."""
key = state.key
state_type = type(state)
stale_state = self.stale_state
current_state_by_type = self.state[state_type]
current_state = current_state_by_type.get(key, _SENTINEL)
if current_state == state:
subscription_key = (state_type, key)
if current_state == state and subscription_key not in stale_state:
_LOGGER.debug(
"%s: ignoring duplicate update with and key %s: %s",
self.name,
@ -222,8 +228,8 @@ class RuntimeEntryData:
key,
state,
)
stale_state.discard(subscription_key)
current_state_by_type[key] = state
subscription_key = (state_type, key)
if subscription_key in self.state_subscriptions:
self.state_subscriptions[subscription_key]()

View File

@ -7,7 +7,7 @@
"documentation": "https://www.home-assistant.io/integrations/fritz",
"iot_class": "local_polling",
"loggers": ["fritzconnection"],
"requirements": ["fritzconnection==1.11.0", "xmltodict==0.13.0"],
"requirements": ["fritzconnection==1.12.0", "xmltodict==0.13.0"],
"ssdp": [
{
"st": "urn:schemas-upnp-org:device:fritzbox:1"

View File

@ -7,5 +7,5 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["fritzconnection"],
"requirements": ["fritzconnection==1.11.0"]
"requirements": ["fritzconnection==1.12.0"]
}

View File

@ -20,5 +20,5 @@
"documentation": "https://www.home-assistant.io/integrations/frontend",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20230405.0"]
"requirements": ["home-assistant-frontend==20230406.1"]
}

View File

@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/calendar.google/",
"iot_class": "cloud_polling",
"loggers": ["googleapiclient"],
"requirements": ["gcal-sync==4.1.2", "oauth2client==4.1.3"]
"requirements": ["gcal-sync==4.1.3", "oauth2client==4.1.3"]
}

View File

@ -151,7 +151,7 @@ class DeviceWithPrograms(HomeConnectDevice):
programs_available = self.appliance.get_programs_available()
except (HomeConnectError, ValueError):
_LOGGER.debug("Unable to fetch available programs. Probably offline")
programs_available = None
programs_available = []
return programs_available
def get_program_switches(self):

View File

@ -1,9 +1,11 @@
"""Support for Honeywell (US) Total Connect Comfort climate systems."""
from __future__ import annotations
import asyncio
import datetime
from typing import Any
from aiohttp import ClientConnectionError
import aiosomecomfort
from homeassistant.components.climate import (
@ -421,10 +423,7 @@ class HoneywellUSThermostat(ClimateEntity):
try:
await self._device.refresh()
self._attr_available = True
except (
aiosomecomfort.SomeComfortError,
OSError,
):
except aiosomecomfort.SomeComfortError:
try:
await self._data.client.login()
@ -433,5 +432,12 @@ class HoneywellUSThermostat(ClimateEntity):
await self.hass.async_create_task(
self.hass.config_entries.async_reload(self._data.entry_id)
)
except aiosomecomfort.SomeComfortError:
except (
aiosomecomfort.SomeComfortError,
ClientConnectionError,
asyncio.TimeoutError,
):
self._attr_available = False
except (ClientConnectionError, asyncio.TimeoutError):
self._attr_available = False

View File

@ -194,7 +194,11 @@ class ImapDataUpdateCoordinator(DataUpdateCoordinator[int | None]):
if count
else None
)
if count and last_message_id is not None:
if (
count
and last_message_id is not None
and self._last_message_id != last_message_id
):
self._last_message_id = last_message_id
await self._async_process_event(last_message_id)
@ -235,18 +239,18 @@ class ImapPollingDataUpdateCoordinator(ImapDataUpdateCoordinator):
UpdateFailed,
asyncio.TimeoutError,
) as ex:
self.async_set_update_error(ex)
await self._cleanup()
self.async_set_update_error(ex)
raise UpdateFailed() from ex
except InvalidFolder as ex:
_LOGGER.warning("Selected mailbox folder is invalid")
self.async_set_update_error(ex)
await self._cleanup()
self.async_set_update_error(ex)
raise ConfigEntryError("Selected mailbox folder is invalid.") from ex
except InvalidAuth as ex:
_LOGGER.warning("Username or password incorrect, starting reauthentication")
self.async_set_update_error(ex)
await self._cleanup()
self.async_set_update_error(ex)
raise ConfigEntryAuthFailed() from ex
@ -316,6 +320,7 @@ class ImapPushDataUpdateCoordinator(ImapDataUpdateCoordinator):
self.config_entry.data[CONF_SERVER],
BACKOFF_TIME,
)
await self._cleanup()
await asyncio.sleep(BACKOFF_TIME)
async def shutdown(self, *_) -> None:

View File

@ -17,7 +17,7 @@
"iot_class": "local_push",
"loggers": ["pyinsteon", "pypubsub"],
"requirements": [
"pyinsteon==1.4.0",
"pyinsteon==1.4.1",
"insteon-frontend-home-assistant==0.3.4"
],
"usb": [

View File

@ -2,7 +2,7 @@
from __future__ import annotations
from datetime import datetime
from datetime import date, datetime, timedelta
import logging
from typing import Any
@ -186,14 +186,23 @@ def _parse_event(event: dict[str, Any]) -> Event:
def _get_calendar_event(event: Event) -> CalendarEvent:
"""Return a CalendarEvent from an API event."""
start: datetime | date
end: datetime | date
if isinstance(event.start, datetime) and isinstance(event.end, datetime):
start = dt_util.as_local(event.start)
end = dt_util.as_local(event.end)
if (end - start) <= timedelta(seconds=0):
end = start + timedelta(minutes=30)
else:
start = event.start
end = event.end
if (end - start) <= timedelta(days=0):
end = start + timedelta(days=1)
return CalendarEvent(
summary=event.summary,
start=dt_util.as_local(event.start)
if isinstance(event.start, datetime)
else event.start,
end=dt_util.as_local(event.end)
if isinstance(event.end, datetime)
else event.end,
start=start,
end=end,
description=event.description,
uid=event.uid,
rrule=event.rrule.as_rrule_str() if event.rrule else None,

View File

@ -31,7 +31,7 @@ from homeassistant.helpers import (
entity_registry as er,
issue_registry as ir,
)
from homeassistant.helpers.network import get_url
from homeassistant.helpers.network import NoURLAvailableError, get_url
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
@ -152,9 +152,22 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
)
webhook_url = webhook.async_generate_path(entry.entry_id)
hass_url = get_url(
hass, allow_cloud=False, allow_external=False, allow_ip=True, require_ssl=False
)
try:
hass_url = get_url(
hass,
allow_cloud=False,
allow_external=False,
allow_ip=True,
require_ssl=False,
)
except NoURLAvailableError:
webhook.async_unregister(hass, entry.entry_id)
raise ConfigEntryNotReady(
f"Error registering URL for webhook {entry.entry_id}: "
"HomeAssistant URL is not available"
) from None
url = f"{hass_url}{webhook_url}"
if hass_url.startswith("https"):

View File

@ -16,9 +16,9 @@ from homeassistant.const import (
CONF_VERIFY_SSL,
Platform,
)
from homeassistant.core import HomeAssistant
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers import config_validation as cv, entity_registry as er
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.update_coordinator import (
@ -64,6 +64,38 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
_LOGGER.debug("Setting up %s integration with host %s", DOMAIN, host)
name_to_key = {
"Core Update Available": "core_update_available",
"Web Update Available": "web_update_available",
"FTL Update Available": "ftl_update_available",
"Status": "status",
"Ads Blocked Today": "ads_blocked_today",
"Ads Percentage Blocked Today": "ads_percentage_today",
"Seen Clients": "clients_ever_seen",
"DNS Queries Today": "dns_queries_today",
"Domains Blocked": "domains_being_blocked",
"DNS Queries Cached": "queries_cached",
"DNS Queries Forwarded": "queries_forwarded",
"DNS Unique Clients": "unique_clients",
"DNS Unique Domains": "unique_domains",
}
@callback
def update_unique_id(
entity_entry: er.RegistryEntry,
) -> dict[str, str] | None:
"""Update unique ID of entity entry."""
unique_id_parts = entity_entry.unique_id.split("/")
if len(unique_id_parts) == 2 and unique_id_parts[1] in name_to_key:
name = unique_id_parts[1]
new_unique_id = entity_entry.unique_id.replace(name, name_to_key[name])
_LOGGER.debug("Migrate %s to %s", entity_entry.unique_id, new_unique_id)
return {"new_unique_id": new_unique_id}
return None
await er.async_migrate_entries(hass, entry.entry_id, update_unique_id)
session = async_get_clientsession(hass, verify_tls)
api = Hole(
host,

View File

@ -164,11 +164,12 @@ async def async_setup_entry( # noqa: C901
obj_type = call.data[CONF_TYPE]
_LOGGER.critical(
"%s objects in memory: %s",
obj_type,
[_safe_repr(obj) for obj in objgraph.by_type(obj_type)],
)
for obj in objgraph.by_type(obj_type):
_LOGGER.critical(
"%s object in memory: %s",
obj_type,
_safe_repr(obj),
)
persistent_notification.create(
hass,

View File

@ -913,7 +913,7 @@ def _apply_update( # noqa: C901
_create_index(session_maker, "events", "ix_events_event_type_time_fired_ts")
_create_index(session_maker, "states", "ix_states_entity_id_last_updated_ts")
_create_index(session_maker, "states", "ix_states_last_updated_ts")
_migrate_columns_to_timestamp(session_maker, engine)
_migrate_columns_to_timestamp(instance, session_maker, engine)
elif new_version == 32:
# Migration is done in two steps to ensure we can start using
# the new columns before we wipe the old ones.
@ -966,7 +966,7 @@ def _apply_update( # noqa: C901
"ix_statistics_short_term_statistic_id_start_ts",
)
try:
_migrate_statistics_columns_to_timestamp(session_maker, engine)
_migrate_statistics_columns_to_timestamp(instance, session_maker, engine)
except IntegrityError as ex:
_LOGGER.error(
"Statistics table contains duplicate entries: %s; "
@ -979,7 +979,7 @@ def _apply_update( # noqa: C901
# and try again
with session_scope(session=session_maker()) as session:
delete_statistics_duplicates(instance, hass, session)
_migrate_statistics_columns_to_timestamp(session_maker, engine)
_migrate_statistics_columns_to_timestamp(instance, session_maker, engine)
# Log at error level to ensure the user sees this message in the log
# since we logged the error above.
_LOGGER.error(
@ -1195,8 +1195,9 @@ def _wipe_old_string_time_columns(
session.commit()
@database_job_retry_wrapper("Migrate columns to timestamp", 3)
def _migrate_columns_to_timestamp(
session_maker: Callable[[], Session], engine: Engine
instance: Recorder, session_maker: Callable[[], Session], engine: Engine
) -> None:
"""Migrate columns to use timestamp."""
# Migrate all data in Events.time_fired to Events.time_fired_ts
@ -1283,8 +1284,9 @@ def _migrate_columns_to_timestamp(
)
@database_job_retry_wrapper("Migrate statistics columns to timestamp", 3)
def _migrate_statistics_columns_to_timestamp(
session_maker: Callable[[], Session], engine: Engine
instance: Recorder, session_maker: Callable[[], Session], engine: Engine
) -> None:
"""Migrate statistics columns to use timestamp."""
# Migrate all data in statistics.start to statistics.start_ts

View File

@ -3,23 +3,36 @@ from __future__ import annotations
from contextlib import suppress
from functools import lru_cache
import logging
from uuid import UUID
from homeassistant.util.ulid import bytes_to_ulid, ulid_to_bytes
_LOGGER = logging.getLogger(__name__)
def ulid_to_bytes_or_none(ulid: str | None) -> bytes | None:
"""Convert an ulid to bytes."""
if ulid is None:
return None
return ulid_to_bytes(ulid)
try:
return ulid_to_bytes(ulid)
except ValueError as ex:
_LOGGER.error("Error converting ulid %s to bytes: %s", ulid, ex, exc_info=True)
return None
def bytes_to_ulid_or_none(_bytes: bytes | None) -> str | None:
"""Convert bytes to a ulid."""
if _bytes is None:
return None
return bytes_to_ulid(_bytes)
try:
return bytes_to_ulid(_bytes)
except ValueError as ex:
_LOGGER.error(
"Error converting bytes %s to ulid: %s", _bytes, ex, exc_info=True
)
return None
@lru_cache(maxsize=16)

View File

@ -730,7 +730,8 @@ def batch_cleanup_entity_ids() -> StatementLambdaElement:
lambda: update(States)
.where(
States.state_id.in_(
select(States.state_id).join(
select(States.state_id)
.join(
states_with_entity_ids := select(
States.state_id.label("state_id_with_entity_id")
)
@ -739,6 +740,8 @@ def batch_cleanup_entity_ids() -> StatementLambdaElement:
.subquery(),
States.state_id == states_with_entity_ids.c.state_id_with_entity_id,
)
.alias("states_with_entity_ids")
.select()
)
)
.values(entity_id=None)

View File

@ -87,7 +87,7 @@ BINARY_SENSORS = (
icon="mdi:bell-ring-outline",
icon_off="mdi:doorbell",
value=lambda api, ch: api.visitor_detected(ch),
supported=lambda api, ch: api.is_doorbell_enabled(ch),
supported=lambda api, ch: api.is_doorbell(ch),
),
)

View File

@ -18,5 +18,5 @@
"documentation": "https://www.home-assistant.io/integrations/reolink",
"iot_class": "local_push",
"loggers": ["reolink_aio"],
"requirements": ["reolink-aio==0.5.9"]
"requirements": ["reolink-aio==0.5.10"]
}

View File

@ -13,5 +13,5 @@
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["simplipy"],
"requirements": ["simplisafe-python==2022.12.0"]
"requirements": ["simplisafe-python==2023.04.0"]
}

View File

@ -188,9 +188,10 @@ class VerisureDoorlock(CoordinatorEntity[VerisureDataUpdateCoordinator], LockEnt
def disable_autolock(self) -> None:
"""Disable autolock on a doorlock."""
try:
self.coordinator.verisure.set_lock_config(
command = self.coordinator.verisure.set_autolock_enabled(
self.serial_number, auto_lock_enabled=False
)
self.coordinator.verisure.request(command)
LOGGER.debug("Disabling autolock on %s", self.serial_number)
except VerisureError as ex:
LOGGER.error("Could not disable autolock, %s", ex)
@ -198,9 +199,10 @@ class VerisureDoorlock(CoordinatorEntity[VerisureDataUpdateCoordinator], LockEnt
def enable_autolock(self) -> None:
"""Enable autolock on a doorlock."""
try:
self.coordinator.verisure.set_lock_config(
command = self.coordinator.verisure.set_autolock_enabled(
self.serial_number, auto_lock_enabled=True
)
self.coordinator.verisure.request(command)
LOGGER.debug("Enabling autolock on %s", self.serial_number)
except VerisureError as ex:
LOGGER.error("Could not enable autolock, %s", ex)

View File

@ -8,7 +8,7 @@ from .backports.enum import StrEnum
APPLICATION_NAME: Final = "HomeAssistant"
MAJOR_VERSION: Final = 2023
MINOR_VERSION: Final = 4
PATCH_VERSION: Final = "0"
PATCH_VERSION: Final = "1"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 10, 0)

View File

@ -1,7 +1,7 @@
PyJWT==2.6.0
PyNaCl==1.5.0
PyTurboJPEG==1.6.7
aiodiscover==1.4.15
aiodiscover==1.4.16
aiohttp==3.8.4
aiohttp_cors==0.7.0
astral==2.2
@ -25,7 +25,7 @@ ha-av==10.0.0
hass-nabucasa==0.63.1
hassil==1.0.6
home-assistant-bluetooth==1.9.3
home-assistant-frontend==20230405.0
home-assistant-frontend==20230406.1
home-assistant-intents==2023.3.29
httpx==0.23.3
ifaddr==0.1.7
@ -46,7 +46,7 @@ requests==2.28.2
scapy==2.5.0
sqlalchemy==2.0.7
typing-extensions>=4.5.0,<5.0
ulid-transform==0.5.1
ulid-transform==0.6.0
voluptuous-serialize==2.6.0
voluptuous==0.13.1
yarl==1.8.1
@ -157,3 +157,8 @@ uamqp==1.6.0;python_version<'3.11'
# faust-cchardet: Ensure we have a version we can build wheels
# 2.1.18 is the first version that works with our wheel builder
faust-cchardet>=2.1.18
# websockets 11.0 is missing files in the source distribution
# which break wheel builds
# https://github.com/aaugustin/websockets/issues/1329
websockets<11.0

View File

@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "homeassistant"
version = "2023.4.0"
version = "2023.4.1"
license = {text = "Apache-2.0"}
description = "Open-source home automation platform running on Python 3."
readme = "README.rst"
@ -50,7 +50,7 @@ dependencies = [
"pyyaml==6.0",
"requests==2.28.2",
"typing-extensions>=4.5.0,<5.0",
"ulid-transform==0.5.1",
"ulid-transform==0.6.0",
"voluptuous==0.13.1",
"voluptuous-serialize==2.6.0",
"yarl==1.8.1",

View File

@ -24,7 +24,7 @@ python-slugify==4.0.1
pyyaml==6.0
requests==2.28.2
typing-extensions>=4.5.0,<5.0
ulid-transform==0.5.1
ulid-transform==0.6.0
voluptuous==0.13.1
voluptuous-serialize==2.6.0
yarl==1.8.1

View File

@ -119,7 +119,7 @@ aioairq==0.2.4
aioairzone==0.5.2
# homeassistant.components.ambient_station
aioambient==2021.11.0
aioambient==2022.10.0
# homeassistant.components.aseko_pool_live
aioaseko==0.0.2
@ -137,7 +137,7 @@ aiobafi6==0.8.0
aiobotocore==2.1.0
# homeassistant.components.dhcp
aiodiscover==1.4.15
aiodiscover==1.4.16
# homeassistant.components.dnsip
# homeassistant.components.minecraft_server
@ -748,7 +748,7 @@ freesms==0.2.0
# homeassistant.components.fritz
# homeassistant.components.fritzbox_callmonitor
fritzconnection==1.11.0
fritzconnection==1.12.0
# homeassistant.components.google_translate
gTTS==2.2.4
@ -757,7 +757,7 @@ gTTS==2.2.4
gassist-text==0.0.10
# homeassistant.components.google
gcal-sync==4.1.2
gcal-sync==4.1.3
# homeassistant.components.geniushub
geniushub-client==0.7.0
@ -907,7 +907,7 @@ hole==0.8.0
holidays==0.21.13
# homeassistant.components.frontend
home-assistant-frontend==20230405.0
home-assistant-frontend==20230406.1
# homeassistant.components.conversation
home-assistant-intents==2023.3.29
@ -1684,7 +1684,7 @@ pyialarm==2.2.0
pyicloud==1.0.0
# homeassistant.components.insteon
pyinsteon==1.4.0
pyinsteon==1.4.1
# homeassistant.components.intesishome
pyintesishome==1.8.0
@ -2231,7 +2231,7 @@ regenmaschine==2022.11.0
renault-api==0.1.12
# homeassistant.components.reolink
reolink-aio==0.5.9
reolink-aio==0.5.10
# homeassistant.components.python_script
restrictedpython==6.0
@ -2343,7 +2343,7 @@ simplehound==0.3
simplepush==2.1.1
# homeassistant.components.simplisafe
simplisafe-python==2022.12.0
simplisafe-python==2023.04.0
# homeassistant.components.sisyphus
sisyphus-control==3.1.2

View File

@ -109,7 +109,7 @@ aioairq==0.2.4
aioairzone==0.5.2
# homeassistant.components.ambient_station
aioambient==2021.11.0
aioambient==2022.10.0
# homeassistant.components.aseko_pool_live
aioaseko==0.0.2
@ -127,7 +127,7 @@ aiobafi6==0.8.0
aiobotocore==2.1.0
# homeassistant.components.dhcp
aiodiscover==1.4.15
aiodiscover==1.4.16
# homeassistant.components.dnsip
# homeassistant.components.minecraft_server
@ -570,7 +570,7 @@ freebox-api==1.1.0
# homeassistant.components.fritz
# homeassistant.components.fritzbox_callmonitor
fritzconnection==1.11.0
fritzconnection==1.12.0
# homeassistant.components.google_translate
gTTS==2.2.4
@ -579,7 +579,7 @@ gTTS==2.2.4
gassist-text==0.0.10
# homeassistant.components.google
gcal-sync==4.1.2
gcal-sync==4.1.3
# homeassistant.components.geocaching
geocachingapi==0.2.1
@ -693,7 +693,7 @@ hole==0.8.0
holidays==0.21.13
# homeassistant.components.frontend
home-assistant-frontend==20230405.0
home-assistant-frontend==20230406.1
# homeassistant.components.conversation
home-assistant-intents==2023.3.29
@ -1218,7 +1218,7 @@ pyialarm==2.2.0
pyicloud==1.0.0
# homeassistant.components.insteon
pyinsteon==1.4.0
pyinsteon==1.4.1
# homeassistant.components.ipma
pyipma==3.0.6
@ -1594,7 +1594,7 @@ regenmaschine==2022.11.0
renault-api==0.1.12
# homeassistant.components.reolink
reolink-aio==0.5.9
reolink-aio==0.5.10
# homeassistant.components.python_script
restrictedpython==6.0
@ -1670,7 +1670,7 @@ simplehound==0.3
simplepush==2.1.1
# homeassistant.components.simplisafe
simplisafe-python==2022.12.0
simplisafe-python==2023.04.0
# homeassistant.components.slack
slackclient==2.5.0

View File

@ -162,6 +162,11 @@ uamqp==1.6.0;python_version<'3.11'
# faust-cchardet: Ensure we have a version we can build wheels
# 2.1.18 is the first version that works with our wheel builder
faust-cchardet>=2.1.18
# websockets 11.0 is missing files in the source distribution
# which break wheel builds
# https://github.com/aaugustin/websockets/issues/1329
websockets<11.0
"""
IGNORE_PRE_COMMIT_HOOK_ID = (

View File

@ -169,6 +169,28 @@ async def test_update_with_json_attrs(hass: HomeAssistant) -> None:
)
entity_state = hass.states.get("sensor.test")
assert entity_state
assert entity_state.state == "unknown"
assert entity_state.attributes["key"] == "some_json_value"
assert entity_state.attributes["another_key"] == "another_json_value"
assert entity_state.attributes["key_three"] == "value_three"
async def test_update_with_json_attrs_and_value_template(hass: HomeAssistant) -> None:
"""Test json_attributes can be used together with value_template."""
await setup_test_entities(
hass,
{
"command": (
'echo { \\"key\\": \\"some_json_value\\", \\"another_key\\": '
'\\"another_json_value\\", \\"key_three\\": \\"value_three\\" }'
),
"json_attributes": ["key", "another_key", "key_three"],
"value_template": '{{ value_json["key"] }}',
},
)
entity_state = hass.states.get("sensor.test")
assert entity_state
assert entity_state.state == "some_json_value"
assert entity_state.attributes["key"] == "some_json_value"
assert entity_state.attributes["another_key"] == "another_json_value"
assert entity_state.attributes["key_three"] == "value_three"

View File

@ -26,10 +26,10 @@ TEST_ENTITY = "calendar.light_schedule"
class FakeStore(LocalCalendarStore):
"""Mock storage implementation."""
def __init__(self, hass: HomeAssistant, path: Path) -> None:
def __init__(self, hass: HomeAssistant, path: Path, ics_content: str) -> None:
"""Initialize FakeStore."""
super().__init__(hass, path)
self._content = ""
self._content = ics_content
def _load(self) -> str:
"""Read from calendar storage."""
@ -40,15 +40,21 @@ class FakeStore(LocalCalendarStore):
self._content = ics_content
@pytest.fixture(name="ics_content", autouse=True)
def mock_ics_content() -> str:
"""Fixture to allow tests to set initial ics content for the calendar store."""
return ""
@pytest.fixture(name="store", autouse=True)
def mock_store() -> Generator[None, None, None]:
def mock_store(ics_content: str) -> Generator[None, None, None]:
"""Test cleanup, remove any media storage persisted during the test."""
stores: dict[Path, FakeStore] = {}
def new_store(hass: HomeAssistant, path: Path) -> FakeStore:
if path not in stores:
stores[path] = FakeStore(hass, path)
stores[path] = FakeStore(hass, path, ics_content)
return stores[path]
with patch(

View File

@ -1,6 +1,7 @@
"""Tests for calendar platform of local calendar."""
import datetime
import textwrap
import pytest
@ -940,3 +941,91 @@ async def test_create_event_service(
"location": "Test Location",
}
]
@pytest.mark.parametrize(
"ics_content",
[
textwrap.dedent(
"""\
BEGIN:VCALENDAR
BEGIN:VEVENT
SUMMARY:Bastille Day Party
DTSTART:19970714
DTEND:19970714
END:VEVENT
END:VCALENDAR
"""
),
textwrap.dedent(
"""\
BEGIN:VCALENDAR
BEGIN:VEVENT
SUMMARY:Bastille Day Party
DTSTART:19970714
DTEND:19970710
END:VEVENT
END:VCALENDAR
"""
),
],
ids=["no_duration", "negative"],
)
async def test_invalid_all_day_event(
ws_client: ClientFixture,
setup_integration: None,
get_events: GetEventsFn,
) -> None:
"""Test all day events with invalid durations, which are coerced to be valid."""
events = await get_events("1997-07-14T00:00:00Z", "1997-07-16T00:00:00Z")
assert list(map(event_fields, events)) == [
{
"summary": "Bastille Day Party",
"start": {"date": "1997-07-14"},
"end": {"date": "1997-07-15"},
}
]
@pytest.mark.parametrize(
"ics_content",
[
textwrap.dedent(
"""\
BEGIN:VCALENDAR
BEGIN:VEVENT
SUMMARY:Bastille Day Party
DTSTART:19970714T110000
DTEND:19970714T110000
END:VEVENT
END:VCALENDAR
"""
),
textwrap.dedent(
"""\
BEGIN:VCALENDAR
BEGIN:VEVENT
SUMMARY:Bastille Day Party
DTSTART:19970714T110000
DTEND:19970710T100000
END:VEVENT
END:VCALENDAR
"""
),
],
ids=["no_duration", "negative"],
)
async def test_invalid_event_duration(
ws_client: ClientFixture,
setup_integration: None,
get_events: GetEventsFn,
) -> None:
"""Test events with invalid durations, which are coerced to be valid."""
events = await get_events("1997-07-14T00:00:00Z", "1997-07-16T00:00:00Z")
assert list(map(event_fields, events)) == [
{
"summary": "Bastille Day Party",
"start": {"dateTime": "1997-07-14T11:00:00-06:00"},
"end": {"dateTime": "1997-07-14T11:30:00-06:00"},
}
]

View File

@ -66,7 +66,7 @@ from homeassistant.const import (
STATE_LOCKED,
STATE_UNLOCKED,
)
from homeassistant.core import CoreState, Event, HomeAssistant, callback
from homeassistant.core import Context, CoreState, Event, HomeAssistant, callback
from homeassistant.helpers import entity_registry as er, recorder as recorder_helper
from homeassistant.setup import async_setup_component, setup_component
from homeassistant.util import dt as dt_util
@ -854,6 +854,31 @@ def test_saving_event_with_oversized_data(
assert json_loads(events["test_event_too_big"]) == {}
def test_saving_event_invalid_context_ulid(
hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture
) -> None:
"""Test we handle invalid manually injected context ids."""
hass = hass_recorder()
event_data = {"test_attr": 5, "test_attr_10": "nice"}
hass.bus.fire("test_event", event_data, context=Context(id="invalid"))
wait_recording_done(hass)
events = {}
with session_scope(hass=hass) as session:
for _, data, event_type in (
session.query(Events.event_id, EventData.shared_data, EventTypes.event_type)
.outerjoin(EventData, Events.data_id == EventData.data_id)
.outerjoin(EventTypes, Events.event_type_id == EventTypes.event_type_id)
.where(EventTypes.event_type.in_(["test_event"]))
):
events[event_type] = data
assert "invalid" in caplog.text
assert len(events) == 1
assert json_loads(events["test_event"]) == event_data
def test_recorder_setup_failure(hass: HomeAssistant) -> None:
"""Test some exceptions."""
recorder_helper.async_initialize_recorder(hass)

View File

@ -14,9 +14,11 @@ from homeassistant.components.recorder.db_schema import (
)
from homeassistant.components.recorder.models import (
LazyState,
bytes_to_ulid_or_none,
process_datetime_to_timestamp,
process_timestamp,
process_timestamp_to_utc_isoformat,
ulid_to_bytes_or_none,
)
from homeassistant.const import EVENT_STATE_CHANGED
import homeassistant.core as ha
@ -415,3 +417,27 @@ async def test_process_datetime_to_timestamp_mirrors_utc_isoformat_behavior(
process_datetime_to_timestamp(datetime_hst_timezone)
== dt_util.parse_datetime("2016-07-09T21:00:00+00:00").timestamp()
)
def test_ulid_to_bytes_or_none(caplog: pytest.LogCaptureFixture) -> None:
"""Test ulid_to_bytes_or_none."""
assert (
ulid_to_bytes_or_none("01EYQZJXZ5Z1Z1Z1Z1Z1Z1Z1Z1")
== b"\x01w\xaf\xf9w\xe5\xf8~\x1f\x87\xe1\xf8~\x1f\x87\xe1"
)
assert ulid_to_bytes_or_none("invalid") is None
assert "invalid" in caplog.text
assert ulid_to_bytes_or_none(None) is None
def test_bytes_to_ulid_or_none(caplog: pytest.LogCaptureFixture) -> None:
"""Test bytes_to_ulid_or_none."""
assert (
bytes_to_ulid_or_none(b"\x01w\xaf\xf9w\xe5\xf8~\x1f\x87\xe1\xf8~\x1f\x87\xe1")
== "01EYQZJXZ5Z1Z1Z1Z1Z1Z1Z1Z1"
)
assert bytes_to_ulid_or_none(b"invalid") is None
assert "invalid" in caplog.text
assert bytes_to_ulid_or_none(None) is None

View File

@ -209,6 +209,29 @@
}),
'unit_of_measurement': None,
}),
dict({
'device_class': None,
'disabled': False,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.fan_air_quality',
'icon': None,
'name': None,
'original_device_class': None,
'original_icon': None,
'original_name': 'Fan Air Quality',
'state': dict({
'attributes': dict({
'friendly_name': 'Fan Air Quality',
}),
'entity_id': 'sensor.fan_air_quality',
'last_changed': str,
'last_updated': str,
'state': 'unavailable',
}),
'unit_of_measurement': None,
}),
dict({
'device_class': None,
'disabled': False,
@ -234,29 +257,6 @@
}),
'unit_of_measurement': '%',
}),
dict({
'device_class': None,
'disabled': False,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.fan_air_quality',
'icon': None,
'name': None,
'original_device_class': None,
'original_icon': None,
'original_name': 'Fan Air Quality',
'state': dict({
'attributes': dict({
'friendly_name': 'Fan Air Quality',
}),
'entity_id': 'sensor.fan_air_quality',
'last_changed': str,
'last_updated': str,
'state': 'unavailable',
}),
'unit_of_measurement': None,
}),
]),
'name': 'Fan',
'name_by_user': None,

View File

@ -85,6 +85,9 @@ async def test_async_get_device_diagnostics__single_fan(
diag = await get_diagnostics_for_device(hass, hass_client, config_entry, device)
assert isinstance(diag, dict)
diag["home_assistant"]["entities"] = sorted(
diag["home_assistant"]["entities"], key=lambda ent: ent["entity_id"]
)
assert diag == snapshot(
matcher=path_type(
{