mirror of
https://github.com/home-assistant/core.git
synced 2025-07-28 07:37:34 +00:00
2024.7.2 (#121671)
This commit is contained in:
commit
058b012e6c
@ -9,5 +9,5 @@
|
|||||||
},
|
},
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["jaraco.abode", "lomond"],
|
"loggers": ["jaraco.abode", "lomond"],
|
||||||
"requirements": ["jaraco.abode==5.1.2"]
|
"requirements": ["jaraco.abode==5.2.1"]
|
||||||
}
|
}
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/anova",
|
"documentation": "https://www.home-assistant.io/integrations/anova",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["anova_wifi"],
|
"loggers": ["anova_wifi"],
|
||||||
"requirements": ["anova-wifi==0.15.0"]
|
"requirements": ["anova-wifi==0.17.0"]
|
||||||
}
|
}
|
||||||
|
@ -28,5 +28,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["pubnub", "yalexs"],
|
"loggers": ["pubnub", "yalexs"],
|
||||||
"requirements": ["yalexs==6.4.1", "yalexs-ble==2.4.3"]
|
"requirements": ["yalexs==6.4.2", "yalexs-ble==2.4.3"]
|
||||||
}
|
}
|
||||||
|
@ -377,6 +377,14 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
|||||||
# Return if integration has migrated already
|
# Return if integration has migrated already
|
||||||
return
|
return
|
||||||
|
|
||||||
|
supported_features = self.supported_features
|
||||||
|
if supported_features & (
|
||||||
|
ClimateEntityFeature.TURN_ON | ClimateEntityFeature.TURN_OFF
|
||||||
|
):
|
||||||
|
# The entity supports both turn_on and turn_off, the backwards compatibility
|
||||||
|
# checks are not needed
|
||||||
|
return
|
||||||
|
|
||||||
supported_features = self.supported_features
|
supported_features = self.supported_features
|
||||||
if not supported_features & ClimateEntityFeature.TURN_OFF and (
|
if not supported_features & ClimateEntityFeature.TURN_OFF and (
|
||||||
type(self).async_turn_off is not ClimateEntity.async_turn_off
|
type(self).async_turn_off is not ClimateEntity.async_turn_off
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/enphase_envoy",
|
"documentation": "https://www.home-assistant.io/integrations/enphase_envoy",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["pyenphase"],
|
"loggers": ["pyenphase"],
|
||||||
"requirements": ["pyenphase==1.20.3"],
|
"requirements": ["pyenphase==1.20.6"],
|
||||||
"zeroconf": [
|
"zeroconf": [
|
||||||
{
|
{
|
||||||
"type": "_enphase-envoy._tcp.local."
|
"type": "_enphase-envoy._tcp.local."
|
||||||
|
@ -107,13 +107,6 @@ class FeedReaderConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
return self.abort_on_import_error(user_input[CONF_URL], "url_error")
|
return self.abort_on_import_error(user_input[CONF_URL], "url_error")
|
||||||
return self.show_user_form(user_input, {"base": "url_error"})
|
return self.show_user_form(user_input, {"base": "url_error"})
|
||||||
|
|
||||||
if not feed.entries:
|
|
||||||
if self.context["source"] == SOURCE_IMPORT:
|
|
||||||
return self.abort_on_import_error(
|
|
||||||
user_input[CONF_URL], "no_feed_entries"
|
|
||||||
)
|
|
||||||
return self.show_user_form(user_input, {"base": "no_feed_entries"})
|
|
||||||
|
|
||||||
feed_title = feed["feed"]["title"]
|
feed_title = feed["feed"]["title"]
|
||||||
|
|
||||||
return self.async_create_entry(
|
return self.async_create_entry(
|
||||||
@ -161,13 +154,6 @@ class FeedReaderConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
step_id="reconfigure_confirm",
|
step_id="reconfigure_confirm",
|
||||||
errors={"base": "url_error"},
|
errors={"base": "url_error"},
|
||||||
)
|
)
|
||||||
if not feed.entries:
|
|
||||||
return self.show_user_form(
|
|
||||||
user_input=user_input,
|
|
||||||
description_placeholders={"name": self._config_entry.title},
|
|
||||||
step_id="reconfigure_confirm",
|
|
||||||
errors={"base": "no_feed_entries"},
|
|
||||||
)
|
|
||||||
|
|
||||||
self.hass.config_entries.async_update_entry(self._config_entry, data=user_input)
|
self.hass.config_entries.async_update_entry(self._config_entry, data=user_input)
|
||||||
return self.async_abort(reason="reconfigure_successful")
|
return self.async_abort(reason="reconfigure_successful")
|
||||||
|
@ -18,8 +18,7 @@
|
|||||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||||
},
|
},
|
||||||
"error": {
|
"error": {
|
||||||
"url_error": "The URL could not be opened.",
|
"url_error": "The URL could not be opened."
|
||||||
"no_feed_entries": "The URL seems not to serve any feed entries."
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"options": {
|
"options": {
|
||||||
@ -38,10 +37,6 @@
|
|||||||
"import_yaml_error_url_error": {
|
"import_yaml_error_url_error": {
|
||||||
"title": "The Feedreader YAML configuration import failed",
|
"title": "The Feedreader YAML configuration import failed",
|
||||||
"description": "Configuring the Feedreader using YAML is being removed but there was a connection error when trying to import the YAML configuration for `{url}`.\n\nPlease verify that url is reachable and accessable for Home Assistant and restart Home Assistant to try again or remove the Feedreader YAML configuration from your configuration.yaml file and continue to set up the integration manually."
|
"description": "Configuring the Feedreader using YAML is being removed but there was a connection error when trying to import the YAML configuration for `{url}`.\n\nPlease verify that url is reachable and accessable for Home Assistant and restart Home Assistant to try again or remove the Feedreader YAML configuration from your configuration.yaml file and continue to set up the integration manually."
|
||||||
},
|
|
||||||
"import_yaml_error_no_feed_entries": {
|
|
||||||
"title": "[%key:component::feedreader::issues::import_yaml_error_url_error::title%]",
|
|
||||||
"description": "Configuring the Feedreader using YAML is being removed but when trying to import the YAML configuration for `{url}` no feed entries were found.\n\nPlease verify that url serves any feed entries and restart Home Assistant to try again or remove the Feedreader YAML configuration from your configuration.yaml file and continue to set up the integration manually."
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -20,5 +20,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||||
"integration_type": "system",
|
"integration_type": "system",
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": ["home-assistant-frontend==20240705.0"]
|
"requirements": ["home-assistant-frontend==20240710.0"]
|
||||||
}
|
}
|
||||||
|
@ -2,9 +2,12 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from fullykiosk import FullyKioskError
|
||||||
|
|
||||||
from homeassistant.components.camera import Camera, CameraEntityFeature
|
from homeassistant.components.camera import Camera, CameraEntityFeature
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
@ -36,7 +39,11 @@ class FullyCameraEntity(FullyKioskEntity, Camera):
|
|||||||
self, width: int | None = None, height: int | None = None
|
self, width: int | None = None, height: int | None = None
|
||||||
) -> bytes | None:
|
) -> bytes | None:
|
||||||
"""Return bytes of camera image."""
|
"""Return bytes of camera image."""
|
||||||
|
try:
|
||||||
image_bytes: bytes = await self.coordinator.fully.getCamshot()
|
image_bytes: bytes = await self.coordinator.fully.getCamshot()
|
||||||
|
except FullyKioskError as err:
|
||||||
|
raise HomeAssistantError(err) from err
|
||||||
|
else:
|
||||||
return image_bytes
|
return image_bytes
|
||||||
|
|
||||||
async def async_turn_on(self) -> None:
|
async def async_turn_on(self) -> None:
|
||||||
|
@ -156,7 +156,6 @@ SENSOR_DESCRIPTIONS: dict[str, SensorEntityDescription] = {
|
|||||||
key="GAS_POWER",
|
key="GAS_POWER",
|
||||||
native_unit_of_measurement=UnitOfVolume.CUBIC_METERS,
|
native_unit_of_measurement=UnitOfVolume.CUBIC_METERS,
|
||||||
device_class=SensorDeviceClass.GAS,
|
device_class=SensorDeviceClass.GAS,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
|
||||||
),
|
),
|
||||||
"GAS_ENERGY_COUNTER": SensorEntityDescription(
|
"GAS_ENERGY_COUNTER": SensorEntityDescription(
|
||||||
key="GAS_ENERGY_COUNTER",
|
key="GAS_ENERGY_COUNTER",
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
"description": "Enter your credentials",
|
"description": "Enter your credentials",
|
||||||
"data": {
|
"data": {
|
||||||
"username": "[%key:common::config_flow::data::email%]",
|
"username": "[%key:common::config_flow::data::email%]",
|
||||||
"password": "[%key:common::config_flow::data::password%]",
|
"password": "App-specific password",
|
||||||
"with_family": "With family"
|
"with_family": "With family"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -14,7 +14,7 @@
|
|||||||
"title": "[%key:common::config_flow::title::reauth%]",
|
"title": "[%key:common::config_flow::title::reauth%]",
|
||||||
"description": "Your previously entered password for {username} is no longer working. Update your password to keep using this integration.",
|
"description": "Your previously entered password for {username} is no longer working. Update your password to keep using this integration.",
|
||||||
"data": {
|
"data": {
|
||||||
"password": "[%key:common::config_flow::data::password%]"
|
"password": "App-specific password"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"trusted_device": {
|
"trusted_device": {
|
||||||
|
@ -641,12 +641,10 @@ class KodiEntity(MediaPlayerEntity):
|
|||||||
if self.state == MediaPlayerState.OFF:
|
if self.state == MediaPlayerState.OFF:
|
||||||
return state_attr
|
return state_attr
|
||||||
|
|
||||||
hdr_type = (
|
|
||||||
self._item.get("streamdetails", {}).get("video", [{}])[0].get("hdrtype")
|
|
||||||
)
|
|
||||||
if hdr_type == "":
|
|
||||||
state_attr["dynamic_range"] = "sdr"
|
state_attr["dynamic_range"] = "sdr"
|
||||||
else:
|
if (video_details := self._item.get("streamdetails", {}).get("video")) and (
|
||||||
|
hdr_type := video_details[0].get("hdrtype")
|
||||||
|
):
|
||||||
state_attr["dynamic_range"] = hdr_type
|
state_attr["dynamic_range"] = hdr_type
|
||||||
|
|
||||||
return state_attr
|
return state_attr
|
||||||
|
@ -145,4 +145,20 @@ DISCOVERY_SCHEMAS = [
|
|||||||
required_attributes=(clusters.BooleanState.Attributes.StateValue,),
|
required_attributes=(clusters.BooleanState.Attributes.StateValue,),
|
||||||
device_type=(device_types.RainSensor,),
|
device_type=(device_types.RainSensor,),
|
||||||
),
|
),
|
||||||
|
MatterDiscoverySchema(
|
||||||
|
platform=Platform.BINARY_SENSOR,
|
||||||
|
entity_description=MatterBinarySensorEntityDescription(
|
||||||
|
key="LockDoorStateSensor",
|
||||||
|
device_class=BinarySensorDeviceClass.DOOR,
|
||||||
|
# pylint: disable=unnecessary-lambda
|
||||||
|
measurement_to_ha=lambda x: {
|
||||||
|
clusters.DoorLock.Enums.DoorStateEnum.kDoorOpen: True,
|
||||||
|
clusters.DoorLock.Enums.DoorStateEnum.kDoorJammed: True,
|
||||||
|
clusters.DoorLock.Enums.DoorStateEnum.kDoorForcedOpen: True,
|
||||||
|
clusters.DoorLock.Enums.DoorStateEnum.kDoorClosed: False,
|
||||||
|
}.get(x),
|
||||||
|
),
|
||||||
|
entity_class=MatterBinarySensor,
|
||||||
|
required_attributes=(clusters.DoorLock.Attributes.DoorState,),
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from chip.clusters import Objects as clusters
|
from chip.clusters import Objects as clusters
|
||||||
@ -38,6 +39,7 @@ class MatterLock(MatterEntity, LockEntity):
|
|||||||
"""Representation of a Matter lock."""
|
"""Representation of a Matter lock."""
|
||||||
|
|
||||||
features: int | None = None
|
features: int | None = None
|
||||||
|
_optimistic_timer: asyncio.TimerHandle | None = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def code_format(self) -> str | None:
|
def code_format(self) -> str | None:
|
||||||
@ -90,9 +92,15 @@ class MatterLock(MatterEntity, LockEntity):
|
|||||||
|
|
||||||
async def async_lock(self, **kwargs: Any) -> None:
|
async def async_lock(self, **kwargs: Any) -> None:
|
||||||
"""Lock the lock with pin if needed."""
|
"""Lock the lock with pin if needed."""
|
||||||
|
if not self._attr_is_locked:
|
||||||
# optimistically signal locking to state machine
|
# optimistically signal locking to state machine
|
||||||
self._attr_is_locking = True
|
self._attr_is_locking = True
|
||||||
self.async_write_ha_state()
|
self.async_write_ha_state()
|
||||||
|
# the lock should acknowledge the command with an attribute update
|
||||||
|
# but bad things may happen, so guard against it with a timer.
|
||||||
|
self._optimistic_timer = self.hass.loop.call_later(
|
||||||
|
30, self._reset_optimistic_state
|
||||||
|
)
|
||||||
code: str | None = kwargs.get(ATTR_CODE)
|
code: str | None = kwargs.get(ATTR_CODE)
|
||||||
code_bytes = code.encode() if code else None
|
code_bytes = code.encode() if code else None
|
||||||
await self.send_device_command(
|
await self.send_device_command(
|
||||||
@ -101,9 +109,15 @@ class MatterLock(MatterEntity, LockEntity):
|
|||||||
|
|
||||||
async def async_unlock(self, **kwargs: Any) -> None:
|
async def async_unlock(self, **kwargs: Any) -> None:
|
||||||
"""Unlock the lock with pin if needed."""
|
"""Unlock the lock with pin if needed."""
|
||||||
|
if self._attr_is_locked:
|
||||||
# optimistically signal unlocking to state machine
|
# optimistically signal unlocking to state machine
|
||||||
self._attr_is_unlocking = True
|
self._attr_is_unlocking = True
|
||||||
self.async_write_ha_state()
|
self.async_write_ha_state()
|
||||||
|
# the lock should acknowledge the command with an attribute update
|
||||||
|
# but bad things may happen, so guard against it with a timer.
|
||||||
|
self._optimistic_timer = self.hass.loop.call_later(
|
||||||
|
30, self._reset_optimistic_state
|
||||||
|
)
|
||||||
code: str | None = kwargs.get(ATTR_CODE)
|
code: str | None = kwargs.get(ATTR_CODE)
|
||||||
code_bytes = code.encode() if code else None
|
code_bytes = code.encode() if code else None
|
||||||
if self.supports_unbolt:
|
if self.supports_unbolt:
|
||||||
@ -120,9 +134,14 @@ class MatterLock(MatterEntity, LockEntity):
|
|||||||
|
|
||||||
async def async_open(self, **kwargs: Any) -> None:
|
async def async_open(self, **kwargs: Any) -> None:
|
||||||
"""Open the door latch."""
|
"""Open the door latch."""
|
||||||
# optimistically signal unlocking to state machine
|
# optimistically signal opening to state machine
|
||||||
self._attr_is_unlocking = True
|
self._attr_is_opening = True
|
||||||
self.async_write_ha_state()
|
self.async_write_ha_state()
|
||||||
|
# the lock should acknowledge the command with an attribute update
|
||||||
|
# but bad things may happen, so guard against it with a timer.
|
||||||
|
self._optimistic_timer = self.hass.loop.call_later(
|
||||||
|
30 if self._attr_is_locked else 5, self._reset_optimistic_state
|
||||||
|
)
|
||||||
code: str | None = kwargs.get(ATTR_CODE)
|
code: str | None = kwargs.get(ATTR_CODE)
|
||||||
code_bytes = code.encode() if code else None
|
code_bytes = code.encode() if code else None
|
||||||
await self.send_device_command(
|
await self.send_device_command(
|
||||||
@ -145,38 +164,38 @@ class MatterLock(MatterEntity, LockEntity):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# always reset the optimisically (un)locking state on state update
|
# always reset the optimisically (un)locking state on state update
|
||||||
self._attr_is_locking = False
|
self._reset_optimistic_state(write_state=False)
|
||||||
self._attr_is_unlocking = False
|
|
||||||
|
|
||||||
LOGGER.debug("Lock state: %s for %s", lock_state, self.entity_id)
|
LOGGER.debug("Lock state: %s for %s", lock_state, self.entity_id)
|
||||||
|
|
||||||
|
if lock_state is clusters.DoorLock.Enums.DlLockState.kUnlatched:
|
||||||
|
self._attr_is_locked = False
|
||||||
|
self._attr_is_open = True
|
||||||
if lock_state is clusters.DoorLock.Enums.DlLockState.kLocked:
|
if lock_state is clusters.DoorLock.Enums.DlLockState.kLocked:
|
||||||
self._attr_is_locked = True
|
self._attr_is_locked = True
|
||||||
|
self._attr_is_open = False
|
||||||
elif lock_state in (
|
elif lock_state in (
|
||||||
clusters.DoorLock.Enums.DlLockState.kUnlocked,
|
clusters.DoorLock.Enums.DlLockState.kUnlocked,
|
||||||
clusters.DoorLock.Enums.DlLockState.kUnlatched,
|
|
||||||
clusters.DoorLock.Enums.DlLockState.kNotFullyLocked,
|
clusters.DoorLock.Enums.DlLockState.kNotFullyLocked,
|
||||||
):
|
):
|
||||||
self._attr_is_locked = False
|
self._attr_is_locked = False
|
||||||
|
self._attr_is_open = False
|
||||||
else:
|
else:
|
||||||
# According to the matter docs a null state can happen during device startup.
|
# Treat any other state as unknown.
|
||||||
|
# NOTE: A null state can happen during device startup.
|
||||||
self._attr_is_locked = None
|
self._attr_is_locked = None
|
||||||
|
self._attr_is_open = None
|
||||||
|
|
||||||
if self.supports_door_position_sensor:
|
@callback
|
||||||
door_state = self.get_matter_attribute_value(
|
def _reset_optimistic_state(self, write_state: bool = True) -> None:
|
||||||
clusters.DoorLock.Attributes.DoorState
|
if self._optimistic_timer and not self._optimistic_timer.cancelled():
|
||||||
)
|
self._optimistic_timer.cancel()
|
||||||
|
self._optimistic_timer = None
|
||||||
assert door_state is not None
|
self._attr_is_locking = False
|
||||||
|
self._attr_is_unlocking = False
|
||||||
LOGGER.debug("Door state: %s for %s", door_state, self.entity_id)
|
self._attr_is_opening = False
|
||||||
|
if write_state:
|
||||||
self._attr_is_jammed = (
|
self.async_write_ha_state()
|
||||||
door_state is clusters.DoorLock.Enums.DoorStateEnum.kDoorJammed
|
|
||||||
)
|
|
||||||
self._attr_is_open = (
|
|
||||||
door_state is clusters.DoorLock.Enums.DoorStateEnum.kDoorOpen
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
DISCOVERY_SCHEMAS = [
|
DISCOVERY_SCHEMAS = [
|
||||||
|
@ -60,7 +60,8 @@ class MealieMealplanCalendarEntity(MealieEntity, CalendarEntity):
|
|||||||
mealplans = self.coordinator.data[self._entry_type]
|
mealplans = self.coordinator.data[self._entry_type]
|
||||||
if not mealplans:
|
if not mealplans:
|
||||||
return None
|
return None
|
||||||
return _get_event_from_mealplan(mealplans[0])
|
sorted_mealplans = sorted(mealplans, key=lambda x: x.mealplan_date)
|
||||||
|
return _get_event_from_mealplan(sorted_mealplans[0])
|
||||||
|
|
||||||
async def async_get_events(
|
async def async_get_events(
|
||||||
self, hass: HomeAssistant, start_date: datetime, end_date: datetime
|
self, hass: HomeAssistant, start_date: datetime, end_date: datetime
|
||||||
|
@ -3,8 +3,11 @@
|
|||||||
"step": {
|
"step": {
|
||||||
"user": {
|
"user": {
|
||||||
"data": {
|
"data": {
|
||||||
"host": "[%key:common::config_flow::data::host%]",
|
"host": "[%key:common::config_flow::data::url%]",
|
||||||
"api_token": "[%key:common::config_flow::data::api_token%]"
|
"api_token": "[%key:common::config_flow::data::api_token%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"host": "The URL of your Mealie instance."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -721,10 +721,15 @@ async def webhook_get_config(
|
|||||||
"""Handle a get config webhook."""
|
"""Handle a get config webhook."""
|
||||||
hass_config = hass.config.as_dict()
|
hass_config = hass.config.as_dict()
|
||||||
|
|
||||||
|
device: dr.DeviceEntry = hass.data[DOMAIN][DATA_DEVICES][
|
||||||
|
config_entry.data[CONF_WEBHOOK_ID]
|
||||||
|
]
|
||||||
|
|
||||||
resp = {
|
resp = {
|
||||||
"latitude": hass_config["latitude"],
|
"latitude": hass_config["latitude"],
|
||||||
"longitude": hass_config["longitude"],
|
"longitude": hass_config["longitude"],
|
||||||
"elevation": hass_config["elevation"],
|
"elevation": hass_config["elevation"],
|
||||||
|
"hass_device_id": device.id,
|
||||||
"unit_system": hass_config["unit_system"],
|
"unit_system": hass_config["unit_system"],
|
||||||
"location_name": hass_config["location_name"],
|
"location_name": hass_config["location_name"],
|
||||||
"time_zone": hass_config["time_zone"],
|
"time_zone": hass_config["time_zone"],
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["pymodbus"],
|
"loggers": ["pymodbus"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["pymodbus==3.6.8"]
|
"requirements": ["pymodbus==3.6.9"]
|
||||||
}
|
}
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
"domain": "mpd",
|
"domain": "mpd",
|
||||||
"name": "Music Player Daemon (MPD)",
|
"name": "Music Player Daemon (MPD)",
|
||||||
"codeowners": [],
|
"codeowners": [],
|
||||||
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/mpd",
|
"documentation": "https://www.home-assistant.io/integrations/mpd",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["mpd"],
|
"loggers": ["mpd"],
|
||||||
|
@ -73,7 +73,7 @@ CONFIG_SCHEMA = vol.Schema(
|
|||||||
vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean,
|
vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean,
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
cv.has_at_least_one_key("auth"),
|
cv.has_at_least_one_key(CONF_API_KEY, CONF_PASSWORD),
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
extra=vol.ALLOW_EXTRA,
|
extra=vol.ALLOW_EXTRA,
|
||||||
|
@ -385,6 +385,6 @@ class PhilipsTVLightEntity(PhilipsJsEntity, LightEntity):
|
|||||||
"""Return true if entity is available."""
|
"""Return true if entity is available."""
|
||||||
if not super().available:
|
if not super().available:
|
||||||
return False
|
return False
|
||||||
if not self.coordinator.api.on:
|
if not self._tv.on:
|
||||||
return False
|
return False
|
||||||
return self.coordinator.api.powerstate == "On"
|
return True
|
||||||
|
@ -7,5 +7,5 @@
|
|||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["vehicle==2.2.1"]
|
"requirements": ["vehicle==2.2.2"]
|
||||||
}
|
}
|
||||||
|
@ -24,7 +24,7 @@ from sqlalchemy.exc import (
|
|||||||
SQLAlchemyError,
|
SQLAlchemyError,
|
||||||
)
|
)
|
||||||
from sqlalchemy.orm.session import Session
|
from sqlalchemy.orm.session import Session
|
||||||
from sqlalchemy.schema import AddConstraint, DropConstraint
|
from sqlalchemy.schema import AddConstraint, CreateTable, DropConstraint
|
||||||
from sqlalchemy.sql.expression import true
|
from sqlalchemy.sql.expression import true
|
||||||
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||||
|
|
||||||
@ -1738,10 +1738,11 @@ def cleanup_legacy_states_event_ids(instance: Recorder) -> bool:
|
|||||||
# Only drop the index if there are no more event_ids in the states table
|
# Only drop the index if there are no more event_ids in the states table
|
||||||
# ex all NULL
|
# ex all NULL
|
||||||
assert instance.engine is not None, "engine should never be None"
|
assert instance.engine is not None, "engine should never be None"
|
||||||
if instance.dialect_name != SupportedDialect.SQLITE:
|
if instance.dialect_name == SupportedDialect.SQLITE:
|
||||||
# SQLite does not support dropping foreign key constraints
|
# SQLite does not support dropping foreign key constraints
|
||||||
# so we can't drop the index at this time but we can avoid
|
# so we have to rebuild the table
|
||||||
# looking for legacy rows during purge
|
rebuild_sqlite_table(session_maker, instance.engine, States)
|
||||||
|
else:
|
||||||
_drop_foreign_key_constraints(
|
_drop_foreign_key_constraints(
|
||||||
session_maker, instance.engine, TABLE_STATES, ["event_id"]
|
session_maker, instance.engine, TABLE_STATES, ["event_id"]
|
||||||
)
|
)
|
||||||
@ -1894,3 +1895,68 @@ def _mark_migration_done(
|
|||||||
migration_id=migration.migration_id, version=migration.migration_version
|
migration_id=migration.migration_id, version=migration.migration_version
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def rebuild_sqlite_table(
|
||||||
|
session_maker: Callable[[], Session], engine: Engine, table: type[Base]
|
||||||
|
) -> None:
|
||||||
|
"""Rebuild an SQLite table.
|
||||||
|
|
||||||
|
This must only be called after all migrations are complete
|
||||||
|
and the database is in a consistent state.
|
||||||
|
|
||||||
|
If the table is not migrated to the current schema this
|
||||||
|
will likely fail.
|
||||||
|
"""
|
||||||
|
table_table = cast(Table, table.__table__)
|
||||||
|
orig_name = table_table.name
|
||||||
|
temp_name = f"{table_table.name}_temp_{int(time())}"
|
||||||
|
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Rebuilding SQLite table %s; This will take a while; Please be patient!",
|
||||||
|
orig_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# 12 step SQLite table rebuild
|
||||||
|
# https://www.sqlite.org/lang_altertable.html
|
||||||
|
with session_scope(session=session_maker()) as session:
|
||||||
|
# Step 1 - Disable foreign keys
|
||||||
|
session.connection().execute(text("PRAGMA foreign_keys=OFF"))
|
||||||
|
# Step 2 - create a transaction
|
||||||
|
with session_scope(session=session_maker()) as session:
|
||||||
|
# Step 3 - we know all the indexes, triggers, and views associated with table X
|
||||||
|
new_sql = str(CreateTable(table_table).compile(engine)).strip("\n") + ";"
|
||||||
|
source_sql = f"CREATE TABLE {orig_name}"
|
||||||
|
replacement_sql = f"CREATE TABLE {temp_name}"
|
||||||
|
assert source_sql in new_sql, f"{source_sql} should be in new_sql"
|
||||||
|
new_sql = new_sql.replace(source_sql, replacement_sql)
|
||||||
|
# Step 4 - Create temp table
|
||||||
|
session.execute(text(new_sql))
|
||||||
|
column_names = ",".join([column.name for column in table_table.columns])
|
||||||
|
# Step 5 - Transfer content
|
||||||
|
sql = f"INSERT INTO {temp_name} SELECT {column_names} FROM {orig_name};" # noqa: S608
|
||||||
|
session.execute(text(sql))
|
||||||
|
# Step 6 - Drop the original table
|
||||||
|
session.execute(text(f"DROP TABLE {orig_name}"))
|
||||||
|
# Step 7 - Rename the temp table
|
||||||
|
session.execute(text(f"ALTER TABLE {temp_name} RENAME TO {orig_name}"))
|
||||||
|
# Step 8 - Recreate indexes
|
||||||
|
for index in table_table.indexes:
|
||||||
|
index.create(session.connection())
|
||||||
|
# Step 9 - Recreate views (there are none)
|
||||||
|
# Step 10 - Check foreign keys
|
||||||
|
session.execute(text("PRAGMA foreign_key_check"))
|
||||||
|
# Step 11 - Commit transaction
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
_LOGGER.exception("Error recreating SQLite table %s", table_table.name)
|
||||||
|
# Swallow the exception since we do not want to ever raise
|
||||||
|
# an integrity error as it would cause the database
|
||||||
|
# to be discarded and recreated from scratch
|
||||||
|
else:
|
||||||
|
_LOGGER.warning("Rebuilding SQLite table %s finished", orig_name)
|
||||||
|
finally:
|
||||||
|
with session_scope(session=session_maker()) as session:
|
||||||
|
# Step 12 - Re-enable foreign keys
|
||||||
|
session.connection().execute(text("PRAGMA foreign_keys=ON"))
|
||||||
|
@ -218,7 +218,9 @@ class SmhiWeather(WeatherEntity):
|
|||||||
|
|
||||||
data.append(
|
data.append(
|
||||||
{
|
{
|
||||||
ATTR_FORECAST_TIME: forecast.valid_time.isoformat(),
|
ATTR_FORECAST_TIME: forecast.valid_time.replace(
|
||||||
|
tzinfo=dt_util.UTC
|
||||||
|
).isoformat(),
|
||||||
ATTR_FORECAST_NATIVE_TEMP: forecast.temperature_max,
|
ATTR_FORECAST_NATIVE_TEMP: forecast.temperature_max,
|
||||||
ATTR_FORECAST_NATIVE_TEMP_LOW: forecast.temperature_min,
|
ATTR_FORECAST_NATIVE_TEMP_LOW: forecast.temperature_min,
|
||||||
ATTR_FORECAST_NATIVE_PRECIPITATION: forecast.total_precipitation,
|
ATTR_FORECAST_NATIVE_PRECIPITATION: forecast.total_precipitation,
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/systemmonitor",
|
"documentation": "https://www.home-assistant.io/integrations/systemmonitor",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["psutil"],
|
"loggers": ["psutil"],
|
||||||
"requirements": ["psutil-home-assistant==0.0.1", "psutil==5.9.8"]
|
"requirements": ["psutil-home-assistant==0.0.1", "psutil==6.0.0"]
|
||||||
}
|
}
|
||||||
|
@ -7,5 +7,5 @@
|
|||||||
"integration_type": "hub",
|
"integration_type": "hub",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["tailscale==0.6.0"]
|
"requirements": ["tailscale==0.6.1"]
|
||||||
}
|
}
|
||||||
|
@ -42,6 +42,7 @@ class TessieBaseEntity(
|
|||||||
self.key = key
|
self.key = key
|
||||||
self._attr_translation_key = key
|
self._attr_translation_key = key
|
||||||
super().__init__(coordinator)
|
super().__init__(coordinator)
|
||||||
|
self._async_update_attrs()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _value(self) -> Any:
|
def _value(self) -> Any:
|
||||||
@ -132,7 +133,6 @@ class TessieEnergyEntity(TessieBaseEntity):
|
|||||||
self._attr_device_info = data.device
|
self._attr_device_info = data.device
|
||||||
|
|
||||||
super().__init__(coordinator, key)
|
super().__init__(coordinator, key)
|
||||||
self._async_update_attrs()
|
|
||||||
|
|
||||||
|
|
||||||
class TessieWallConnectorEntity(TessieBaseEntity):
|
class TessieWallConnectorEntity(TessieBaseEntity):
|
||||||
|
@ -284,7 +284,7 @@ CAMERA_SENSORS: tuple[ProtectBinaryEntityDescription, ...] = (
|
|||||||
name="Tracking: person",
|
name="Tracking: person",
|
||||||
icon="mdi:walk",
|
icon="mdi:walk",
|
||||||
entity_category=EntityCategory.DIAGNOSTIC,
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
ufp_required_field="is_ptz",
|
ufp_required_field="feature_flags.is_ptz",
|
||||||
ufp_value="is_person_tracking_enabled",
|
ufp_value="is_person_tracking_enabled",
|
||||||
ufp_perm=PermRequired.NO_WRITE,
|
ufp_perm=PermRequired.NO_WRITE,
|
||||||
),
|
),
|
||||||
|
@ -319,7 +319,7 @@ CAMERA_SWITCHES: tuple[ProtectSwitchEntityDescription, ...] = (
|
|||||||
name="Tracking: person",
|
name="Tracking: person",
|
||||||
icon="mdi:walk",
|
icon="mdi:walk",
|
||||||
entity_category=EntityCategory.CONFIG,
|
entity_category=EntityCategory.CONFIG,
|
||||||
ufp_required_field="is_ptz",
|
ufp_required_field="feature_flags.is_ptz",
|
||||||
ufp_value="is_person_tracking_enabled",
|
ufp_value="is_person_tracking_enabled",
|
||||||
ufp_set_method="set_person_track",
|
ufp_set_method="set_person_track",
|
||||||
ufp_perm=PermRequired.WRITE,
|
ufp_perm=PermRequired.WRITE,
|
||||||
|
@ -39,12 +39,13 @@ async def _validate_input(data):
|
|||||||
url = _make_url_from_data(data)
|
url = _make_url_from_data(data)
|
||||||
|
|
||||||
upb = upb_lib.UpbPim({"url": url, "UPStartExportFile": file_path})
|
upb = upb_lib.UpbPim({"url": url, "UPStartExportFile": file_path})
|
||||||
|
|
||||||
|
upb.connect(_connected_callback)
|
||||||
|
|
||||||
if not upb.config_ok:
|
if not upb.config_ok:
|
||||||
_LOGGER.error("Missing or invalid UPB file: %s", file_path)
|
_LOGGER.error("Missing or invalid UPB file: %s", file_path)
|
||||||
raise InvalidUpbFile
|
raise InvalidUpbFile
|
||||||
|
|
||||||
upb.connect(_connected_callback)
|
|
||||||
|
|
||||||
with suppress(TimeoutError):
|
with suppress(TimeoutError):
|
||||||
async with asyncio.timeout(VALIDATE_TIMEOUT):
|
async with asyncio.timeout(VALIDATE_TIMEOUT):
|
||||||
await connected_event.wait()
|
await connected_event.wait()
|
||||||
|
@ -66,12 +66,16 @@ class VelbusCover(VelbusEntity, CoverEntity):
|
|||||||
@property
|
@property
|
||||||
def is_opening(self) -> bool:
|
def is_opening(self) -> bool:
|
||||||
"""Return if the cover is opening."""
|
"""Return if the cover is opening."""
|
||||||
return self._channel.is_opening()
|
if opening := self._channel.is_opening():
|
||||||
|
self._assumed_closed = False
|
||||||
|
return opening
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_closing(self) -> bool:
|
def is_closing(self) -> bool:
|
||||||
"""Return if the cover is closing."""
|
"""Return if the cover is closing."""
|
||||||
return self._channel.is_closing()
|
if closing := self._channel.is_closing():
|
||||||
|
self._assumed_closed = True
|
||||||
|
return closing
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def current_cover_position(self) -> int | None:
|
def current_cover_position(self) -> int | None:
|
||||||
@ -89,13 +93,11 @@ class VelbusCover(VelbusEntity, CoverEntity):
|
|||||||
async def async_open_cover(self, **kwargs: Any) -> None:
|
async def async_open_cover(self, **kwargs: Any) -> None:
|
||||||
"""Open the cover."""
|
"""Open the cover."""
|
||||||
await self._channel.open()
|
await self._channel.open()
|
||||||
self._assumed_closed = False
|
|
||||||
|
|
||||||
@api_call
|
@api_call
|
||||||
async def async_close_cover(self, **kwargs: Any) -> None:
|
async def async_close_cover(self, **kwargs: Any) -> None:
|
||||||
"""Close the cover."""
|
"""Close the cover."""
|
||||||
await self._channel.close()
|
await self._channel.close()
|
||||||
self._assumed_closed = True
|
|
||||||
|
|
||||||
@api_call
|
@api_call
|
||||||
async def async_stop_cover(self, **kwargs: Any) -> None:
|
async def async_stop_cover(self, **kwargs: Any) -> None:
|
||||||
|
@ -24,5 +24,5 @@
|
|||||||
"dependencies": ["bluetooth_adapters"],
|
"dependencies": ["bluetooth_adapters"],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/xiaomi_ble",
|
"documentation": "https://www.home-assistant.io/integrations/xiaomi_ble",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"requirements": ["xiaomi-ble==0.30.0"]
|
"requirements": ["xiaomi-ble==0.30.2"]
|
||||||
}
|
}
|
||||||
|
@ -24,7 +24,7 @@ if TYPE_CHECKING:
|
|||||||
APPLICATION_NAME: Final = "HomeAssistant"
|
APPLICATION_NAME: Final = "HomeAssistant"
|
||||||
MAJOR_VERSION: Final = 2024
|
MAJOR_VERSION: Final = 2024
|
||||||
MINOR_VERSION: Final = 7
|
MINOR_VERSION: Final = 7
|
||||||
PATCH_VERSION: Final = "1"
|
PATCH_VERSION: Final = "2"
|
||||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0)
|
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0)
|
||||||
|
@ -352,6 +352,7 @@ FLOWS = {
|
|||||||
"motionblinds_ble",
|
"motionblinds_ble",
|
||||||
"motioneye",
|
"motioneye",
|
||||||
"motionmount",
|
"motionmount",
|
||||||
|
"mpd",
|
||||||
"mqtt",
|
"mqtt",
|
||||||
"mullvad",
|
"mullvad",
|
||||||
"mutesync",
|
"mutesync",
|
||||||
|
@ -3814,7 +3814,7 @@
|
|||||||
"mpd": {
|
"mpd": {
|
||||||
"name": "Music Player Daemon (MPD)",
|
"name": "Music Player Daemon (MPD)",
|
||||||
"integration_type": "hub",
|
"integration_type": "hub",
|
||||||
"config_flow": false,
|
"config_flow": true,
|
||||||
"iot_class": "local_polling"
|
"iot_class": "local_polling"
|
||||||
},
|
},
|
||||||
"mqtt": {
|
"mqtt": {
|
||||||
|
@ -483,7 +483,7 @@ def _get_exposed_entities(
|
|||||||
|
|
||||||
if attributes := {
|
if attributes := {
|
||||||
attr_name: str(attr_value)
|
attr_name: str(attr_value)
|
||||||
if isinstance(attr_value, (Enum, Decimal))
|
if isinstance(attr_value, (Enum, Decimal, int))
|
||||||
else attr_value
|
else attr_value
|
||||||
for attr_name, attr_value in state.attributes.items()
|
for attr_name, attr_value in state.attributes.items()
|
||||||
if attr_name in interesting_attributes
|
if attr_name in interesting_attributes
|
||||||
|
@ -102,6 +102,23 @@ BLOCKED_CUSTOM_INTEGRATIONS: dict[str, BlockedIntegration] = {
|
|||||||
"mydolphin_plus": BlockedIntegration(
|
"mydolphin_plus": BlockedIntegration(
|
||||||
AwesomeVersion("1.0.13"), "crashes Home Assistant"
|
AwesomeVersion("1.0.13"), "crashes Home Assistant"
|
||||||
),
|
),
|
||||||
|
# Added in 2024.7.2 because of
|
||||||
|
# https://github.com/gcobb321/icloud3/issues/349
|
||||||
|
# Note: Current version 3.0.5.2, the fixed version is a guesstimate,
|
||||||
|
# as no solution is available at time of writing.
|
||||||
|
"icloud3": BlockedIntegration(
|
||||||
|
AwesomeVersion("3.0.5.3"), "prevents recorder from working"
|
||||||
|
),
|
||||||
|
# Added in 2024.7.2 because of
|
||||||
|
# https://github.com/custom-components/places/issues/289
|
||||||
|
"places": BlockedIntegration(
|
||||||
|
AwesomeVersion("2.7.1"), "prevents recorder from working"
|
||||||
|
),
|
||||||
|
# Added in 2024.7.2 because of
|
||||||
|
# https://github.com/enkama/hass-variables/issues/120
|
||||||
|
"variable": BlockedIntegration(
|
||||||
|
AwesomeVersion("3.4.4"), "prevents recorder from working"
|
||||||
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
DATA_COMPONENTS: HassKey[dict[str, ModuleType | ComponentProtocol]] = HassKey(
|
DATA_COMPONENTS: HassKey[dict[str, ModuleType | ComponentProtocol]] = HassKey(
|
||||||
|
@ -32,7 +32,7 @@ habluetooth==3.1.3
|
|||||||
hass-nabucasa==0.81.1
|
hass-nabucasa==0.81.1
|
||||||
hassil==1.7.1
|
hassil==1.7.1
|
||||||
home-assistant-bluetooth==1.12.2
|
home-assistant-bluetooth==1.12.2
|
||||||
home-assistant-frontend==20240705.0
|
home-assistant-frontend==20240710.0
|
||||||
home-assistant-intents==2024.7.3
|
home-assistant-intents==2024.7.3
|
||||||
httpx==0.27.0
|
httpx==0.27.0
|
||||||
ifaddr==0.2.0
|
ifaddr==0.2.0
|
||||||
|
@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "homeassistant"
|
name = "homeassistant"
|
||||||
version = "2024.7.1"
|
version = "2024.7.2"
|
||||||
license = {text = "Apache-2.0"}
|
license = {text = "Apache-2.0"}
|
||||||
description = "Open-source home automation platform running on Python 3."
|
description = "Open-source home automation platform running on Python 3."
|
||||||
readme = "README.rst"
|
readme = "README.rst"
|
||||||
|
@ -449,7 +449,7 @@ androidtvremote2==0.1.1
|
|||||||
anel-pwrctrl-homeassistant==0.0.1.dev2
|
anel-pwrctrl-homeassistant==0.0.1.dev2
|
||||||
|
|
||||||
# homeassistant.components.anova
|
# homeassistant.components.anova
|
||||||
anova-wifi==0.15.0
|
anova-wifi==0.17.0
|
||||||
|
|
||||||
# homeassistant.components.anthemav
|
# homeassistant.components.anthemav
|
||||||
anthemav==1.4.1
|
anthemav==1.4.1
|
||||||
@ -1090,7 +1090,7 @@ hole==0.8.0
|
|||||||
holidays==0.52
|
holidays==0.52
|
||||||
|
|
||||||
# homeassistant.components.frontend
|
# homeassistant.components.frontend
|
||||||
home-assistant-frontend==20240705.0
|
home-assistant-frontend==20240710.0
|
||||||
|
|
||||||
# homeassistant.components.conversation
|
# homeassistant.components.conversation
|
||||||
home-assistant-intents==2024.7.3
|
home-assistant-intents==2024.7.3
|
||||||
@ -1179,7 +1179,7 @@ isal==1.6.1
|
|||||||
ismartgate==5.0.1
|
ismartgate==5.0.1
|
||||||
|
|
||||||
# homeassistant.components.abode
|
# homeassistant.components.abode
|
||||||
jaraco.abode==5.1.2
|
jaraco.abode==5.2.1
|
||||||
|
|
||||||
# homeassistant.components.jellyfin
|
# homeassistant.components.jellyfin
|
||||||
jellyfin-apiclient-python==1.9.2
|
jellyfin-apiclient-python==1.9.2
|
||||||
@ -1598,7 +1598,7 @@ proxmoxer==2.0.1
|
|||||||
psutil-home-assistant==0.0.1
|
psutil-home-assistant==0.0.1
|
||||||
|
|
||||||
# homeassistant.components.systemmonitor
|
# homeassistant.components.systemmonitor
|
||||||
psutil==5.9.8
|
psutil==6.0.0
|
||||||
|
|
||||||
# homeassistant.components.pulseaudio_loopback
|
# homeassistant.components.pulseaudio_loopback
|
||||||
pulsectl==23.5.2
|
pulsectl==23.5.2
|
||||||
@ -1827,7 +1827,7 @@ pyeiscp==0.0.7
|
|||||||
pyemoncms==0.0.7
|
pyemoncms==0.0.7
|
||||||
|
|
||||||
# homeassistant.components.enphase_envoy
|
# homeassistant.components.enphase_envoy
|
||||||
pyenphase==1.20.3
|
pyenphase==1.20.6
|
||||||
|
|
||||||
# homeassistant.components.envisalink
|
# homeassistant.components.envisalink
|
||||||
pyenvisalink==4.7
|
pyenvisalink==4.7
|
||||||
@ -2004,7 +2004,7 @@ pymitv==1.4.3
|
|||||||
pymochad==0.2.0
|
pymochad==0.2.0
|
||||||
|
|
||||||
# homeassistant.components.modbus
|
# homeassistant.components.modbus
|
||||||
pymodbus==3.6.8
|
pymodbus==3.6.9
|
||||||
|
|
||||||
# homeassistant.components.monoprice
|
# homeassistant.components.monoprice
|
||||||
pymonoprice==0.4
|
pymonoprice==0.4
|
||||||
@ -2683,7 +2683,7 @@ systembridgeconnector==4.0.3
|
|||||||
systembridgemodels==4.0.4
|
systembridgemodels==4.0.4
|
||||||
|
|
||||||
# homeassistant.components.tailscale
|
# homeassistant.components.tailscale
|
||||||
tailscale==0.6.0
|
tailscale==0.6.1
|
||||||
|
|
||||||
# homeassistant.components.tank_utility
|
# homeassistant.components.tank_utility
|
||||||
tank-utility==1.5.0
|
tank-utility==1.5.0
|
||||||
@ -2827,7 +2827,7 @@ vacuum-map-parser-roborock==0.1.2
|
|||||||
vallox-websocket-api==5.3.0
|
vallox-websocket-api==5.3.0
|
||||||
|
|
||||||
# homeassistant.components.rdw
|
# homeassistant.components.rdw
|
||||||
vehicle==2.2.1
|
vehicle==2.2.2
|
||||||
|
|
||||||
# homeassistant.components.velbus
|
# homeassistant.components.velbus
|
||||||
velbus-aio==2024.7.5
|
velbus-aio==2024.7.5
|
||||||
@ -2906,7 +2906,7 @@ wyoming==1.5.4
|
|||||||
xbox-webapi==2.0.11
|
xbox-webapi==2.0.11
|
||||||
|
|
||||||
# homeassistant.components.xiaomi_ble
|
# homeassistant.components.xiaomi_ble
|
||||||
xiaomi-ble==0.30.0
|
xiaomi-ble==0.30.2
|
||||||
|
|
||||||
# homeassistant.components.knx
|
# homeassistant.components.knx
|
||||||
xknx==2.12.2
|
xknx==2.12.2
|
||||||
@ -2933,7 +2933,7 @@ yalesmartalarmclient==0.3.9
|
|||||||
yalexs-ble==2.4.3
|
yalexs-ble==2.4.3
|
||||||
|
|
||||||
# homeassistant.components.august
|
# homeassistant.components.august
|
||||||
yalexs==6.4.1
|
yalexs==6.4.2
|
||||||
|
|
||||||
# homeassistant.components.yeelight
|
# homeassistant.components.yeelight
|
||||||
yeelight==0.7.14
|
yeelight==0.7.14
|
||||||
|
@ -413,7 +413,7 @@ androidtv[async]==0.0.73
|
|||||||
androidtvremote2==0.1.1
|
androidtvremote2==0.1.1
|
||||||
|
|
||||||
# homeassistant.components.anova
|
# homeassistant.components.anova
|
||||||
anova-wifi==0.15.0
|
anova-wifi==0.17.0
|
||||||
|
|
||||||
# homeassistant.components.anthemav
|
# homeassistant.components.anthemav
|
||||||
anthemav==1.4.1
|
anthemav==1.4.1
|
||||||
@ -895,7 +895,7 @@ hole==0.8.0
|
|||||||
holidays==0.52
|
holidays==0.52
|
||||||
|
|
||||||
# homeassistant.components.frontend
|
# homeassistant.components.frontend
|
||||||
home-assistant-frontend==20240705.0
|
home-assistant-frontend==20240710.0
|
||||||
|
|
||||||
# homeassistant.components.conversation
|
# homeassistant.components.conversation
|
||||||
home-assistant-intents==2024.7.3
|
home-assistant-intents==2024.7.3
|
||||||
@ -966,7 +966,7 @@ isal==1.6.1
|
|||||||
ismartgate==5.0.1
|
ismartgate==5.0.1
|
||||||
|
|
||||||
# homeassistant.components.abode
|
# homeassistant.components.abode
|
||||||
jaraco.abode==5.1.2
|
jaraco.abode==5.2.1
|
||||||
|
|
||||||
# homeassistant.components.jellyfin
|
# homeassistant.components.jellyfin
|
||||||
jellyfin-apiclient-python==1.9.2
|
jellyfin-apiclient-python==1.9.2
|
||||||
@ -1275,7 +1275,7 @@ prometheus-client==0.17.1
|
|||||||
psutil-home-assistant==0.0.1
|
psutil-home-assistant==0.0.1
|
||||||
|
|
||||||
# homeassistant.components.systemmonitor
|
# homeassistant.components.systemmonitor
|
||||||
psutil==5.9.8
|
psutil==6.0.0
|
||||||
|
|
||||||
# homeassistant.components.androidtv
|
# homeassistant.components.androidtv
|
||||||
pure-python-adb[async]==0.3.0.dev0
|
pure-python-adb[async]==0.3.0.dev0
|
||||||
@ -1435,7 +1435,7 @@ pyefergy==22.5.0
|
|||||||
pyegps==0.2.5
|
pyegps==0.2.5
|
||||||
|
|
||||||
# homeassistant.components.enphase_envoy
|
# homeassistant.components.enphase_envoy
|
||||||
pyenphase==1.20.3
|
pyenphase==1.20.6
|
||||||
|
|
||||||
# homeassistant.components.everlights
|
# homeassistant.components.everlights
|
||||||
pyeverlights==0.1.0
|
pyeverlights==0.1.0
|
||||||
@ -1576,7 +1576,7 @@ pymeteoclimatic==0.1.0
|
|||||||
pymochad==0.2.0
|
pymochad==0.2.0
|
||||||
|
|
||||||
# homeassistant.components.modbus
|
# homeassistant.components.modbus
|
||||||
pymodbus==3.6.8
|
pymodbus==3.6.9
|
||||||
|
|
||||||
# homeassistant.components.monoprice
|
# homeassistant.components.monoprice
|
||||||
pymonoprice==0.4
|
pymonoprice==0.4
|
||||||
@ -2096,7 +2096,7 @@ systembridgeconnector==4.0.3
|
|||||||
systembridgemodels==4.0.4
|
systembridgemodels==4.0.4
|
||||||
|
|
||||||
# homeassistant.components.tailscale
|
# homeassistant.components.tailscale
|
||||||
tailscale==0.6.0
|
tailscale==0.6.1
|
||||||
|
|
||||||
# homeassistant.components.tellduslive
|
# homeassistant.components.tellduslive
|
||||||
tellduslive==0.10.11
|
tellduslive==0.10.11
|
||||||
@ -2201,7 +2201,7 @@ vacuum-map-parser-roborock==0.1.2
|
|||||||
vallox-websocket-api==5.3.0
|
vallox-websocket-api==5.3.0
|
||||||
|
|
||||||
# homeassistant.components.rdw
|
# homeassistant.components.rdw
|
||||||
vehicle==2.2.1
|
vehicle==2.2.2
|
||||||
|
|
||||||
# homeassistant.components.velbus
|
# homeassistant.components.velbus
|
||||||
velbus-aio==2024.7.5
|
velbus-aio==2024.7.5
|
||||||
@ -2268,7 +2268,7 @@ wyoming==1.5.4
|
|||||||
xbox-webapi==2.0.11
|
xbox-webapi==2.0.11
|
||||||
|
|
||||||
# homeassistant.components.xiaomi_ble
|
# homeassistant.components.xiaomi_ble
|
||||||
xiaomi-ble==0.30.0
|
xiaomi-ble==0.30.2
|
||||||
|
|
||||||
# homeassistant.components.knx
|
# homeassistant.components.knx
|
||||||
xknx==2.12.2
|
xknx==2.12.2
|
||||||
@ -2292,7 +2292,7 @@ yalesmartalarmclient==0.3.9
|
|||||||
yalexs-ble==2.4.3
|
yalexs-ble==2.4.3
|
||||||
|
|
||||||
# homeassistant.components.august
|
# homeassistant.components.august
|
||||||
yalexs==6.4.1
|
yalexs==6.4.2
|
||||||
|
|
||||||
# homeassistant.components.yeelight
|
# homeassistant.components.yeelight
|
||||||
yeelight==0.7.14
|
yeelight==0.7.14
|
||||||
|
@ -36,7 +36,7 @@
|
|||||||
"currentFirmwareVersion": "2.27.0",
|
"currentFirmwareVersion": "2.27.0",
|
||||||
"battery": {},
|
"battery": {},
|
||||||
"batteryLevel": "Low",
|
"batteryLevel": "Low",
|
||||||
"batteryRaw": 170
|
"batteryRaw": 128
|
||||||
},
|
},
|
||||||
"OfflineKeys": {
|
"OfflineKeys": {
|
||||||
"created": [],
|
"created": [],
|
||||||
|
@ -88,7 +88,7 @@ async def test_create_lock_with_linked_keypad(
|
|||||||
assert entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_device_battery"
|
assert entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_device_battery"
|
||||||
|
|
||||||
state = hass.states.get("sensor.front_door_lock_keypad_battery")
|
state = hass.states.get("sensor.front_door_lock_keypad_battery")
|
||||||
assert state.state == "60"
|
assert state.state == "62"
|
||||||
assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE
|
assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE
|
||||||
entry = entity_registry.async_get("sensor.front_door_lock_keypad_battery")
|
entry = entity_registry.async_get("sensor.front_door_lock_keypad_battery")
|
||||||
assert entry
|
assert entry
|
||||||
|
@ -709,6 +709,68 @@ async def test_no_warning_integration_has_migrated(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_no_warning_integration_implement_feature_flags(
|
||||||
|
hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None
|
||||||
|
) -> None:
|
||||||
|
"""Test no warning when integration uses the correct feature flags."""
|
||||||
|
|
||||||
|
class MockClimateEntityTest(MockClimateEntity):
|
||||||
|
"""Mock Climate device."""
|
||||||
|
|
||||||
|
_attr_supported_features = (
|
||||||
|
ClimateEntityFeature.FAN_MODE
|
||||||
|
| ClimateEntityFeature.PRESET_MODE
|
||||||
|
| ClimateEntityFeature.SWING_MODE
|
||||||
|
| ClimateEntityFeature.TURN_OFF
|
||||||
|
| ClimateEntityFeature.TURN_ON
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_setup_entry_init(
|
||||||
|
hass: HomeAssistant, config_entry: ConfigEntry
|
||||||
|
) -> bool:
|
||||||
|
"""Set up test config entry."""
|
||||||
|
await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN])
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def async_setup_entry_climate_platform(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
config_entry: ConfigEntry,
|
||||||
|
async_add_entities: AddEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up test climate platform via config entry."""
|
||||||
|
async_add_entities(
|
||||||
|
[MockClimateEntityTest(name="test", entity_id="climate.test")]
|
||||||
|
)
|
||||||
|
|
||||||
|
mock_integration(
|
||||||
|
hass,
|
||||||
|
MockModule(
|
||||||
|
"test",
|
||||||
|
async_setup_entry=async_setup_entry_init,
|
||||||
|
),
|
||||||
|
built_in=False,
|
||||||
|
)
|
||||||
|
mock_platform(
|
||||||
|
hass,
|
||||||
|
"test.climate",
|
||||||
|
MockPlatform(async_setup_entry=async_setup_entry_climate_platform),
|
||||||
|
)
|
||||||
|
|
||||||
|
with patch.object(
|
||||||
|
MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init"
|
||||||
|
):
|
||||||
|
config_entry = MockConfigEntry(domain="test")
|
||||||
|
config_entry.add_to_hass(hass)
|
||||||
|
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
state = hass.states.get("climate.test")
|
||||||
|
assert state is not None
|
||||||
|
|
||||||
|
assert "does not set ClimateEntityFeature" not in caplog.text
|
||||||
|
assert "implements HVACMode(s):" not in caplog.text
|
||||||
|
|
||||||
|
|
||||||
async def test_turn_on_off_toggle(hass: HomeAssistant) -> None:
|
async def test_turn_on_off_toggle(hass: HomeAssistant) -> None:
|
||||||
"""Test turn_on/turn_off/toggle methods."""
|
"""Test turn_on/turn_off/toggle methods."""
|
||||||
|
|
||||||
|
@ -83,16 +83,6 @@ async def test_user_errors(
|
|||||||
assert result["step_id"] == "user"
|
assert result["step_id"] == "user"
|
||||||
assert result["errors"] == {"base": "url_error"}
|
assert result["errors"] == {"base": "url_error"}
|
||||||
|
|
||||||
# no feed entries returned
|
|
||||||
feedparser.side_effect = None
|
|
||||||
feedparser.return_value = None
|
|
||||||
result = await hass.config_entries.flow.async_configure(
|
|
||||||
result["flow_id"], user_input={CONF_URL: URL}
|
|
||||||
)
|
|
||||||
assert result["type"] is FlowResultType.FORM
|
|
||||||
assert result["step_id"] == "user"
|
|
||||||
assert result["errors"] == {"base": "no_feed_entries"}
|
|
||||||
|
|
||||||
# success
|
# success
|
||||||
feedparser.side_effect = None
|
feedparser.side_effect = None
|
||||||
feedparser.return_value = feed_one_event
|
feedparser.return_value = feed_one_event
|
||||||
@ -141,40 +131,25 @@ async def test_import(
|
|||||||
assert issue_registry.async_get_issue(HA_DOMAIN, "deprecated_yaml_feedreader")
|
assert issue_registry.async_get_issue(HA_DOMAIN, "deprecated_yaml_feedreader")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
("side_effect", "return_value", "expected_issue_id"),
|
|
||||||
[
|
|
||||||
(
|
|
||||||
urllib.error.URLError("Test"),
|
|
||||||
None,
|
|
||||||
"import_yaml_error_feedreader_url_error_http_some_rss_local_rss_feed_xml",
|
|
||||||
),
|
|
||||||
(
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
"import_yaml_error_feedreader_no_feed_entries_http_some_rss_local_rss_feed_xml",
|
|
||||||
),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
async def test_import_errors(
|
async def test_import_errors(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
issue_registry: ir.IssueRegistry,
|
issue_registry: ir.IssueRegistry,
|
||||||
feedparser,
|
feedparser,
|
||||||
setup_entry,
|
setup_entry,
|
||||||
feed_one_event,
|
feed_one_event,
|
||||||
side_effect,
|
|
||||||
return_value,
|
|
||||||
expected_issue_id,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test starting an import flow which results in an URL error."""
|
"""Test starting an import flow which results in an URL error."""
|
||||||
config_entries = hass.config_entries.async_entries(DOMAIN)
|
config_entries = hass.config_entries.async_entries(DOMAIN)
|
||||||
assert not config_entries
|
assert not config_entries
|
||||||
|
|
||||||
# raise URLError
|
# raise URLError
|
||||||
feedparser.side_effect = side_effect
|
feedparser.side_effect = urllib.error.URLError("Test")
|
||||||
feedparser.return_value = return_value
|
feedparser.return_value = None
|
||||||
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_URLS: [URL]}})
|
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_URLS: [URL]}})
|
||||||
assert issue_registry.async_get_issue(DOMAIN, expected_issue_id)
|
assert issue_registry.async_get_issue(
|
||||||
|
DOMAIN,
|
||||||
|
"import_yaml_error_feedreader_url_error_http_some_rss_local_rss_feed_xml",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def test_reconfigure(hass: HomeAssistant, feedparser) -> None:
|
async def test_reconfigure(hass: HomeAssistant, feedparser) -> None:
|
||||||
@ -248,19 +223,6 @@ async def test_reconfigure_errors(
|
|||||||
assert result["step_id"] == "reconfigure_confirm"
|
assert result["step_id"] == "reconfigure_confirm"
|
||||||
assert result["errors"] == {"base": "url_error"}
|
assert result["errors"] == {"base": "url_error"}
|
||||||
|
|
||||||
# no feed entries returned
|
|
||||||
feedparser.side_effect = None
|
|
||||||
feedparser.return_value = None
|
|
||||||
result = await hass.config_entries.flow.async_configure(
|
|
||||||
result["flow_id"],
|
|
||||||
user_input={
|
|
||||||
CONF_URL: "http://other.rss.local/rss_feed.xml",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
assert result["type"] is FlowResultType.FORM
|
|
||||||
assert result["step_id"] == "reconfigure_confirm"
|
|
||||||
assert result["errors"] == {"base": "no_feed_entries"}
|
|
||||||
|
|
||||||
# success
|
# success
|
||||||
feedparser.side_effect = None
|
feedparser.side_effect = None
|
||||||
feedparser.return_value = feed_one_event
|
feedparser.return_value = feed_one_event
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
from unittest.mock import MagicMock
|
from unittest.mock import MagicMock
|
||||||
|
|
||||||
|
from fullykiosk import FullyKioskError
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from homeassistant.components.camera import async_get_image
|
from homeassistant.components.camera import async_get_image
|
||||||
@ -41,6 +42,12 @@ async def test_camera(
|
|||||||
assert mock_fully_kiosk.getCamshot.call_count == 1
|
assert mock_fully_kiosk.getCamshot.call_count == 1
|
||||||
assert image.content == b"image_bytes"
|
assert image.content == b"image_bytes"
|
||||||
|
|
||||||
|
fully_kiosk_error = FullyKioskError("error", "status")
|
||||||
|
mock_fully_kiosk.getCamshot.side_effect = fully_kiosk_error
|
||||||
|
with pytest.raises(HomeAssistantError) as error:
|
||||||
|
await async_get_image(hass, entity_camera)
|
||||||
|
assert error.value.args[0] == fully_kiosk_error
|
||||||
|
|
||||||
mock_fully_kiosk.getSettings.return_value = {"motionDetection": False}
|
mock_fully_kiosk.getSettings.return_value = {"motionDetection": False}
|
||||||
await hass.services.async_call(
|
await hass.services.async_call(
|
||||||
"camera",
|
"camera",
|
||||||
|
@ -469,7 +469,7 @@
|
|||||||
"1/47/65531": [
|
"1/47/65531": [
|
||||||
0, 1, 2, 14, 15, 16, 19, 65528, 65529, 65530, 65531, 65532, 65533
|
0, 1, 2, 14, 15, 16, 19, 65528, 65529, 65530, 65531, 65532, 65533
|
||||||
],
|
],
|
||||||
"1/257/0": 1,
|
"1/257/0": 0,
|
||||||
"1/257/1": 0,
|
"1/257/1": 0,
|
||||||
"1/257/2": true,
|
"1/257/2": true,
|
||||||
"1/257/3": 1,
|
"1/257/3": 1,
|
||||||
|
@ -8,11 +8,10 @@ import pytest
|
|||||||
|
|
||||||
from homeassistant.components.lock import (
|
from homeassistant.components.lock import (
|
||||||
STATE_LOCKED,
|
STATE_LOCKED,
|
||||||
STATE_OPEN,
|
|
||||||
STATE_UNLOCKED,
|
STATE_UNLOCKED,
|
||||||
LockEntityFeature,
|
LockEntityFeature,
|
||||||
)
|
)
|
||||||
from homeassistant.const import ATTR_CODE, STATE_LOCKING, STATE_UNKNOWN
|
from homeassistant.const import ATTR_CODE, STATE_LOCKING, STATE_OPENING, STATE_UNKNOWN
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import ServiceValidationError
|
from homeassistant.exceptions import ServiceValidationError
|
||||||
import homeassistant.helpers.entity_registry as er
|
import homeassistant.helpers.entity_registry as er
|
||||||
@ -64,6 +63,7 @@ async def test_lock(
|
|||||||
)
|
)
|
||||||
matter_client.send_device_command.reset_mock()
|
matter_client.send_device_command.reset_mock()
|
||||||
|
|
||||||
|
await hass.async_block_till_done()
|
||||||
state = hass.states.get("lock.mock_door_lock_lock")
|
state = hass.states.get("lock.mock_door_lock_lock")
|
||||||
assert state
|
assert state
|
||||||
assert state.state == STATE_LOCKING
|
assert state.state == STATE_LOCKING
|
||||||
@ -208,9 +208,14 @@ async def test_lock_with_unbolt(
|
|||||||
timed_request_timeout_ms=1000,
|
timed_request_timeout_ms=1000,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
state = hass.states.get("lock.mock_door_lock_lock")
|
||||||
|
assert state
|
||||||
|
assert state.state == STATE_OPENING
|
||||||
|
|
||||||
set_node_attribute(door_lock_with_unbolt, 1, 257, 3, 0)
|
set_node_attribute(door_lock_with_unbolt, 1, 257, 3, 0)
|
||||||
await trigger_subscription_callback(hass, matter_client)
|
await trigger_subscription_callback(hass, matter_client)
|
||||||
|
|
||||||
state = hass.states.get("lock.mock_door_lock_lock")
|
state = hass.states.get("lock.mock_door_lock_lock")
|
||||||
assert state
|
assert state
|
||||||
assert state.state == STATE_OPEN
|
assert state.state == STATE_LOCKED
|
||||||
|
@ -252,12 +252,12 @@
|
|||||||
StateSnapshot({
|
StateSnapshot({
|
||||||
'attributes': ReadOnlyDict({
|
'attributes': ReadOnlyDict({
|
||||||
'all_day': True,
|
'all_day': True,
|
||||||
'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.",
|
'description': 'Dineren met de boys',
|
||||||
'end_time': '2024-01-23 00:00:00',
|
'end_time': '2024-01-22 00:00:00',
|
||||||
'friendly_name': 'Mealie Dinner',
|
'friendly_name': 'Mealie Dinner',
|
||||||
'location': '',
|
'location': '',
|
||||||
'message': 'Zoete aardappel curry traybake',
|
'message': 'Aquavite',
|
||||||
'start_time': '2024-01-22 00:00:00',
|
'start_time': '2024-01-21 00:00:00',
|
||||||
}),
|
}),
|
||||||
'context': <ANY>,
|
'context': <ANY>,
|
||||||
'entity_id': 'calendar.mealie_dinner',
|
'entity_id': 'calendar.mealie_dinner',
|
||||||
@ -304,12 +304,12 @@
|
|||||||
StateSnapshot({
|
StateSnapshot({
|
||||||
'attributes': ReadOnlyDict({
|
'attributes': ReadOnlyDict({
|
||||||
'all_day': True,
|
'all_day': True,
|
||||||
'description': 'Te explicamos paso a paso, de manera sencilla, la elaboración de la receta de pollo al curry con leche de coco en 10 minutos. Ingredientes, tiempo de...',
|
'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.',
|
||||||
'end_time': '2024-01-24 00:00:00',
|
'end_time': '2024-01-23 00:00:00',
|
||||||
'friendly_name': 'Mealie Lunch',
|
'friendly_name': 'Mealie Lunch',
|
||||||
'location': '',
|
'location': '',
|
||||||
'message': 'Receta de pollo al curry en 10 minutos (con vídeo incluido)',
|
'message': 'All-American Beef Stew Recipe',
|
||||||
'start_time': '2024-01-23 00:00:00',
|
'start_time': '2024-01-22 00:00:00',
|
||||||
}),
|
}),
|
||||||
'context': <ANY>,
|
'context': <ANY>,
|
||||||
'entity_id': 'calendar.mealie_lunch',
|
'entity_id': 'calendar.mealie_lunch',
|
||||||
|
@ -10,7 +10,7 @@ from nacl.secret import SecretBox
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from homeassistant.components.camera import CameraEntityFeature
|
from homeassistant.components.camera import CameraEntityFeature
|
||||||
from homeassistant.components.mobile_app.const import CONF_SECRET, DOMAIN
|
from homeassistant.components.mobile_app.const import CONF_SECRET, DATA_DEVICES, DOMAIN
|
||||||
from homeassistant.components.tag import EVENT_TAG_SCANNED
|
from homeassistant.components.tag import EVENT_TAG_SCANNED
|
||||||
from homeassistant.components.zone import DOMAIN as ZONE_DOMAIN
|
from homeassistant.components.zone import DOMAIN as ZONE_DOMAIN
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
@ -243,6 +243,7 @@ async def test_webhook_handle_get_config(
|
|||||||
"""Test that we can get config properly."""
|
"""Test that we can get config properly."""
|
||||||
webhook_id = create_registrations[1]["webhook_id"]
|
webhook_id = create_registrations[1]["webhook_id"]
|
||||||
webhook_url = f"/api/webhook/{webhook_id}"
|
webhook_url = f"/api/webhook/{webhook_id}"
|
||||||
|
device: dr.DeviceEntry = hass.data[DOMAIN][DATA_DEVICES][webhook_id]
|
||||||
|
|
||||||
# Create two entities
|
# Create two entities
|
||||||
for sensor in (
|
for sensor in (
|
||||||
@ -280,6 +281,7 @@ async def test_webhook_handle_get_config(
|
|||||||
"latitude": hass_config["latitude"],
|
"latitude": hass_config["latitude"],
|
||||||
"longitude": hass_config["longitude"],
|
"longitude": hass_config["longitude"],
|
||||||
"elevation": hass_config["elevation"],
|
"elevation": hass_config["elevation"],
|
||||||
|
"hass_device_id": device.id,
|
||||||
"unit_system": hass_config["unit_system"],
|
"unit_system": hass_config["unit_system"],
|
||||||
"location_name": hass_config["location_name"],
|
"location_name": hass_config["location_name"],
|
||||||
"time_zone": hass_config["time_zone"],
|
"time_zone": hass_config["time_zone"],
|
||||||
|
@ -16,7 +16,7 @@ from sqlalchemy.exc import (
|
|||||||
ProgrammingError,
|
ProgrammingError,
|
||||||
SQLAlchemyError,
|
SQLAlchemyError,
|
||||||
)
|
)
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session, scoped_session, sessionmaker
|
||||||
from sqlalchemy.pool import StaticPool
|
from sqlalchemy.pool import StaticPool
|
||||||
|
|
||||||
from homeassistant.bootstrap import async_setup_component
|
from homeassistant.bootstrap import async_setup_component
|
||||||
@ -24,6 +24,7 @@ from homeassistant.components import persistent_notification as pn, recorder
|
|||||||
from homeassistant.components.recorder import db_schema, migration
|
from homeassistant.components.recorder import db_schema, migration
|
||||||
from homeassistant.components.recorder.db_schema import (
|
from homeassistant.components.recorder.db_schema import (
|
||||||
SCHEMA_VERSION,
|
SCHEMA_VERSION,
|
||||||
|
Events,
|
||||||
RecorderRuns,
|
RecorderRuns,
|
||||||
States,
|
States,
|
||||||
)
|
)
|
||||||
@ -633,3 +634,89 @@ def test_raise_if_exception_missing_empty_cause_str() -> None:
|
|||||||
|
|
||||||
with pytest.raises(ProgrammingError):
|
with pytest.raises(ProgrammingError):
|
||||||
migration.raise_if_exception_missing_str(programming_exc, ["not present"])
|
migration.raise_if_exception_missing_str(programming_exc, ["not present"])
|
||||||
|
|
||||||
|
|
||||||
|
def test_rebuild_sqlite_states_table(recorder_db_url: str) -> None:
|
||||||
|
"""Test that we can rebuild the states table in SQLite."""
|
||||||
|
if not recorder_db_url.startswith("sqlite://"):
|
||||||
|
# This test is specific for SQLite
|
||||||
|
return
|
||||||
|
|
||||||
|
engine = create_engine(recorder_db_url)
|
||||||
|
session_maker = scoped_session(sessionmaker(bind=engine, future=True))
|
||||||
|
with session_scope(session=session_maker()) as session:
|
||||||
|
db_schema.Base.metadata.create_all(engine)
|
||||||
|
with session_scope(session=session_maker()) as session:
|
||||||
|
session.add(States(state="on"))
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
migration.rebuild_sqlite_table(session_maker, engine, States)
|
||||||
|
|
||||||
|
with session_scope(session=session_maker()) as session:
|
||||||
|
assert session.query(States).count() == 1
|
||||||
|
assert session.query(States).first().state == "on"
|
||||||
|
|
||||||
|
engine.dispose()
|
||||||
|
|
||||||
|
|
||||||
|
def test_rebuild_sqlite_states_table_missing_fails(
|
||||||
|
recorder_db_url: str, caplog: pytest.LogCaptureFixture
|
||||||
|
) -> None:
|
||||||
|
"""Test handling missing states table when attempting rebuild."""
|
||||||
|
if not recorder_db_url.startswith("sqlite://"):
|
||||||
|
# This test is specific for SQLite
|
||||||
|
return
|
||||||
|
|
||||||
|
engine = create_engine(recorder_db_url)
|
||||||
|
session_maker = scoped_session(sessionmaker(bind=engine, future=True))
|
||||||
|
with session_scope(session=session_maker()) as session:
|
||||||
|
db_schema.Base.metadata.create_all(engine)
|
||||||
|
|
||||||
|
with session_scope(session=session_maker()) as session:
|
||||||
|
session.add(Events(event_type="state_changed", event_data="{}"))
|
||||||
|
session.connection().execute(text("DROP TABLE states"))
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
migration.rebuild_sqlite_table(session_maker, engine, States)
|
||||||
|
assert "Error recreating SQLite table states" in caplog.text
|
||||||
|
caplog.clear()
|
||||||
|
|
||||||
|
# Now rebuild the events table to make sure the database did not
|
||||||
|
# get corrupted
|
||||||
|
migration.rebuild_sqlite_table(session_maker, engine, Events)
|
||||||
|
|
||||||
|
with session_scope(session=session_maker()) as session:
|
||||||
|
assert session.query(Events).count() == 1
|
||||||
|
assert session.query(Events).first().event_type == "state_changed"
|
||||||
|
assert session.query(Events).first().event_data == "{}"
|
||||||
|
|
||||||
|
engine.dispose()
|
||||||
|
|
||||||
|
|
||||||
|
def test_rebuild_sqlite_states_table_extra_columns(
|
||||||
|
recorder_db_url: str, caplog: pytest.LogCaptureFixture
|
||||||
|
) -> None:
|
||||||
|
"""Test handling extra columns when rebuilding the states table."""
|
||||||
|
if not recorder_db_url.startswith("sqlite://"):
|
||||||
|
# This test is specific for SQLite
|
||||||
|
return
|
||||||
|
|
||||||
|
engine = create_engine(recorder_db_url)
|
||||||
|
session_maker = scoped_session(sessionmaker(bind=engine, future=True))
|
||||||
|
with session_scope(session=session_maker()) as session:
|
||||||
|
db_schema.Base.metadata.create_all(engine)
|
||||||
|
with session_scope(session=session_maker()) as session:
|
||||||
|
session.add(States(state="on"))
|
||||||
|
session.commit()
|
||||||
|
session.connection().execute(
|
||||||
|
text("ALTER TABLE states ADD COLUMN extra_column TEXT")
|
||||||
|
)
|
||||||
|
|
||||||
|
migration.rebuild_sqlite_table(session_maker, engine, States)
|
||||||
|
assert "Error recreating SQLite table states" not in caplog.text
|
||||||
|
|
||||||
|
with session_scope(session=session_maker()) as session:
|
||||||
|
assert session.query(States).count() == 1
|
||||||
|
assert session.query(States).first().state == "on"
|
||||||
|
|
||||||
|
engine.dispose()
|
||||||
|
@ -211,10 +211,9 @@ async def test_migrate_times(caplog: pytest.LogCaptureFixture, tmp_path: Path) -
|
|||||||
)
|
)
|
||||||
states_index_names = {index["name"] for index in states_indexes}
|
states_index_names = {index["name"] for index in states_indexes}
|
||||||
|
|
||||||
# sqlite does not support dropping foreign keys so the
|
# sqlite does not support dropping foreign keys so we had to
|
||||||
# ix_states_event_id index is not dropped in this case
|
# create a new table and copy the data over
|
||||||
# but use_legacy_events_index is still False
|
assert "ix_states_event_id" not in states_index_names
|
||||||
assert "ix_states_event_id" in states_index_names
|
|
||||||
|
|
||||||
assert recorder.get_instance(hass).use_legacy_events_index is False
|
assert recorder.get_instance(hass).use_legacy_events_index is False
|
||||||
|
|
||||||
@ -342,8 +341,6 @@ async def test_migrate_can_resume_entity_id_post_migration(
|
|||||||
await hass.async_stop()
|
await hass.async_stop()
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert "ix_states_entity_id_last_updated_ts" in states_index_names
|
|
||||||
|
|
||||||
async with async_test_home_assistant() as hass:
|
async with async_test_home_assistant() as hass:
|
||||||
recorder_helper.async_initialize_recorder(hass)
|
recorder_helper.async_initialize_recorder(hass)
|
||||||
assert await async_setup_component(
|
assert await async_setup_component(
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
dict({
|
dict({
|
||||||
'cloud_coverage': 100,
|
'cloud_coverage': 100,
|
||||||
'condition': 'clear-night',
|
'condition': 'clear-night',
|
||||||
'datetime': '2023-08-08T00:00:00',
|
'datetime': '2023-08-08T00:00:00+00:00',
|
||||||
'humidity': 100,
|
'humidity': 100,
|
||||||
'precipitation': 0.0,
|
'precipitation': 0.0,
|
||||||
'pressure': 992.0,
|
'pressure': 992.0,
|
||||||
@ -19,7 +19,7 @@
|
|||||||
dict({
|
dict({
|
||||||
'cloud_coverage': 100,
|
'cloud_coverage': 100,
|
||||||
'condition': 'clear-night',
|
'condition': 'clear-night',
|
||||||
'datetime': '2023-08-08T01:00:00',
|
'datetime': '2023-08-08T01:00:00+00:00',
|
||||||
'humidity': 100,
|
'humidity': 100,
|
||||||
'precipitation': 0.0,
|
'precipitation': 0.0,
|
||||||
'pressure': 992.0,
|
'pressure': 992.0,
|
||||||
@ -32,7 +32,7 @@
|
|||||||
dict({
|
dict({
|
||||||
'cloud_coverage': 100,
|
'cloud_coverage': 100,
|
||||||
'condition': 'clear-night',
|
'condition': 'clear-night',
|
||||||
'datetime': '2023-08-08T02:00:00',
|
'datetime': '2023-08-08T02:00:00+00:00',
|
||||||
'humidity': 97,
|
'humidity': 97,
|
||||||
'precipitation': 0.0,
|
'precipitation': 0.0,
|
||||||
'pressure': 992.0,
|
'pressure': 992.0,
|
||||||
@ -45,7 +45,7 @@
|
|||||||
dict({
|
dict({
|
||||||
'cloud_coverage': 100,
|
'cloud_coverage': 100,
|
||||||
'condition': 'sunny',
|
'condition': 'sunny',
|
||||||
'datetime': '2023-08-08T03:00:00',
|
'datetime': '2023-08-08T03:00:00+00:00',
|
||||||
'humidity': 96,
|
'humidity': 96,
|
||||||
'precipitation': 0.0,
|
'precipitation': 0.0,
|
||||||
'pressure': 991.0,
|
'pressure': 991.0,
|
||||||
@ -223,7 +223,7 @@
|
|||||||
dict({
|
dict({
|
||||||
'cloud_coverage': 100,
|
'cloud_coverage': 100,
|
||||||
'condition': 'cloudy',
|
'condition': 'cloudy',
|
||||||
'datetime': '2023-08-07T12:00:00',
|
'datetime': '2023-08-07T12:00:00+00:00',
|
||||||
'humidity': 96,
|
'humidity': 96,
|
||||||
'precipitation': 0.0,
|
'precipitation': 0.0,
|
||||||
'pressure': 991.0,
|
'pressure': 991.0,
|
||||||
@ -236,7 +236,7 @@
|
|||||||
dict({
|
dict({
|
||||||
'cloud_coverage': 100,
|
'cloud_coverage': 100,
|
||||||
'condition': 'rainy',
|
'condition': 'rainy',
|
||||||
'datetime': '2023-08-08T12:00:00',
|
'datetime': '2023-08-08T12:00:00+00:00',
|
||||||
'humidity': 97,
|
'humidity': 97,
|
||||||
'precipitation': 10.6,
|
'precipitation': 10.6,
|
||||||
'pressure': 984.0,
|
'pressure': 984.0,
|
||||||
@ -249,7 +249,7 @@
|
|||||||
dict({
|
dict({
|
||||||
'cloud_coverage': 100,
|
'cloud_coverage': 100,
|
||||||
'condition': 'rainy',
|
'condition': 'rainy',
|
||||||
'datetime': '2023-08-09T12:00:00',
|
'datetime': '2023-08-09T12:00:00+00:00',
|
||||||
'humidity': 95,
|
'humidity': 95,
|
||||||
'precipitation': 6.3,
|
'precipitation': 6.3,
|
||||||
'pressure': 1001.0,
|
'pressure': 1001.0,
|
||||||
@ -262,7 +262,7 @@
|
|||||||
dict({
|
dict({
|
||||||
'cloud_coverage': 100,
|
'cloud_coverage': 100,
|
||||||
'condition': 'cloudy',
|
'condition': 'cloudy',
|
||||||
'datetime': '2023-08-10T12:00:00',
|
'datetime': '2023-08-10T12:00:00+00:00',
|
||||||
'humidity': 75,
|
'humidity': 75,
|
||||||
'precipitation': 4.8,
|
'precipitation': 4.8,
|
||||||
'pressure': 1011.0,
|
'pressure': 1011.0,
|
||||||
@ -275,7 +275,7 @@
|
|||||||
dict({
|
dict({
|
||||||
'cloud_coverage': 100,
|
'cloud_coverage': 100,
|
||||||
'condition': 'cloudy',
|
'condition': 'cloudy',
|
||||||
'datetime': '2023-08-11T12:00:00',
|
'datetime': '2023-08-11T12:00:00+00:00',
|
||||||
'humidity': 69,
|
'humidity': 69,
|
||||||
'precipitation': 0.6,
|
'precipitation': 0.6,
|
||||||
'pressure': 1015.0,
|
'pressure': 1015.0,
|
||||||
@ -288,7 +288,7 @@
|
|||||||
dict({
|
dict({
|
||||||
'cloud_coverage': 100,
|
'cloud_coverage': 100,
|
||||||
'condition': 'cloudy',
|
'condition': 'cloudy',
|
||||||
'datetime': '2023-08-12T12:00:00',
|
'datetime': '2023-08-12T12:00:00+00:00',
|
||||||
'humidity': 82,
|
'humidity': 82,
|
||||||
'precipitation': 0.0,
|
'precipitation': 0.0,
|
||||||
'pressure': 1014.0,
|
'pressure': 1014.0,
|
||||||
@ -301,7 +301,7 @@
|
|||||||
dict({
|
dict({
|
||||||
'cloud_coverage': 75,
|
'cloud_coverage': 75,
|
||||||
'condition': 'partlycloudy',
|
'condition': 'partlycloudy',
|
||||||
'datetime': '2023-08-13T12:00:00',
|
'datetime': '2023-08-13T12:00:00+00:00',
|
||||||
'humidity': 59,
|
'humidity': 59,
|
||||||
'precipitation': 0.0,
|
'precipitation': 0.0,
|
||||||
'pressure': 1013.0,
|
'pressure': 1013.0,
|
||||||
@ -314,7 +314,7 @@
|
|||||||
dict({
|
dict({
|
||||||
'cloud_coverage': 100,
|
'cloud_coverage': 100,
|
||||||
'condition': 'partlycloudy',
|
'condition': 'partlycloudy',
|
||||||
'datetime': '2023-08-14T12:00:00',
|
'datetime': '2023-08-14T12:00:00+00:00',
|
||||||
'humidity': 56,
|
'humidity': 56,
|
||||||
'precipitation': 0.0,
|
'precipitation': 0.0,
|
||||||
'pressure': 1015.0,
|
'pressure': 1015.0,
|
||||||
@ -327,7 +327,7 @@
|
|||||||
dict({
|
dict({
|
||||||
'cloud_coverage': 88,
|
'cloud_coverage': 88,
|
||||||
'condition': 'partlycloudy',
|
'condition': 'partlycloudy',
|
||||||
'datetime': '2023-08-15T12:00:00',
|
'datetime': '2023-08-15T12:00:00+00:00',
|
||||||
'humidity': 64,
|
'humidity': 64,
|
||||||
'precipitation': 3.6,
|
'precipitation': 3.6,
|
||||||
'pressure': 1014.0,
|
'pressure': 1014.0,
|
||||||
@ -340,7 +340,7 @@
|
|||||||
dict({
|
dict({
|
||||||
'cloud_coverage': 75,
|
'cloud_coverage': 75,
|
||||||
'condition': 'partlycloudy',
|
'condition': 'partlycloudy',
|
||||||
'datetime': '2023-08-16T12:00:00',
|
'datetime': '2023-08-16T12:00:00+00:00',
|
||||||
'humidity': 61,
|
'humidity': 61,
|
||||||
'precipitation': 2.4,
|
'precipitation': 2.4,
|
||||||
'pressure': 1014.0,
|
'pressure': 1014.0,
|
||||||
@ -358,7 +358,7 @@
|
|||||||
dict({
|
dict({
|
||||||
'cloud_coverage': 100,
|
'cloud_coverage': 100,
|
||||||
'condition': 'cloudy',
|
'condition': 'cloudy',
|
||||||
'datetime': '2023-08-07T12:00:00',
|
'datetime': '2023-08-07T12:00:00+00:00',
|
||||||
'humidity': 96,
|
'humidity': 96,
|
||||||
'precipitation': 0.0,
|
'precipitation': 0.0,
|
||||||
'pressure': 991.0,
|
'pressure': 991.0,
|
||||||
@ -373,7 +373,7 @@
|
|||||||
dict({
|
dict({
|
||||||
'cloud_coverage': 75,
|
'cloud_coverage': 75,
|
||||||
'condition': 'partlycloudy',
|
'condition': 'partlycloudy',
|
||||||
'datetime': '2023-08-13T12:00:00',
|
'datetime': '2023-08-13T12:00:00+00:00',
|
||||||
'humidity': 59,
|
'humidity': 59,
|
||||||
'precipitation': 0.0,
|
'precipitation': 0.0,
|
||||||
'pressure': 1013.0,
|
'pressure': 1013.0,
|
||||||
@ -388,7 +388,7 @@
|
|||||||
dict({
|
dict({
|
||||||
'cloud_coverage': 100,
|
'cloud_coverage': 100,
|
||||||
'condition': 'fog',
|
'condition': 'fog',
|
||||||
'datetime': '2023-08-07T09:00:00',
|
'datetime': '2023-08-07T09:00:00+00:00',
|
||||||
'humidity': 100,
|
'humidity': 100,
|
||||||
'precipitation': 0.0,
|
'precipitation': 0.0,
|
||||||
'pressure': 992.0,
|
'pressure': 992.0,
|
||||||
@ -403,7 +403,7 @@
|
|||||||
dict({
|
dict({
|
||||||
'cloud_coverage': 100,
|
'cloud_coverage': 100,
|
||||||
'condition': 'cloudy',
|
'condition': 'cloudy',
|
||||||
'datetime': '2023-08-07T15:00:00',
|
'datetime': '2023-08-07T15:00:00+00:00',
|
||||||
'humidity': 89,
|
'humidity': 89,
|
||||||
'precipitation': 0.0,
|
'precipitation': 0.0,
|
||||||
'pressure': 991.0,
|
'pressure': 991.0,
|
||||||
|
@ -174,11 +174,11 @@ def mock_psutil(mock_process: list[MockProcess]) -> Generator:
|
|||||||
"cpu0-thermal": [shwtemp("cpu0-thermal", 50.0, 60.0, 70.0)]
|
"cpu0-thermal": [shwtemp("cpu0-thermal", 50.0, 60.0, 70.0)]
|
||||||
}
|
}
|
||||||
mock_psutil.disk_partitions.return_value = [
|
mock_psutil.disk_partitions.return_value = [
|
||||||
sdiskpart("test", "/", "ext4", "", 1, 1),
|
sdiskpart("test", "/", "ext4", ""),
|
||||||
sdiskpart("test2", "/media/share", "ext4", "", 1, 1),
|
sdiskpart("test2", "/media/share", "ext4", ""),
|
||||||
sdiskpart("test3", "/incorrect", "", "", 1, 1),
|
sdiskpart("test3", "/incorrect", "", ""),
|
||||||
sdiskpart("hosts", "/etc/hosts", "bind", "", 1, 1),
|
sdiskpart("hosts", "/etc/hosts", "bind", ""),
|
||||||
sdiskpart("proc", "/proc/run", "proc", "", 1, 1),
|
sdiskpart("proc", "/proc/run", "proc", ""),
|
||||||
]
|
]
|
||||||
mock_psutil.boot_time.return_value = 1708786800.0
|
mock_psutil.boot_time.return_value = 1708786800.0
|
||||||
mock_psutil.NoSuchProcess = NoSuchProcess
|
mock_psutil.NoSuchProcess = NoSuchProcess
|
||||||
|
@ -50,21 +50,19 @@ async def test_disk_util(
|
|||||||
"""Test the disk failures."""
|
"""Test the disk failures."""
|
||||||
|
|
||||||
mock_psutil.psutil.disk_partitions.return_value = [
|
mock_psutil.psutil.disk_partitions.return_value = [
|
||||||
sdiskpart("test", "/", "ext4", "", 1, 1), # Should be ok
|
sdiskpart("test", "/", "ext4", ""), # Should be ok
|
||||||
sdiskpart("test2", "/media/share", "ext4", "", 1, 1), # Should be ok
|
sdiskpart("test2", "/media/share", "ext4", ""), # Should be ok
|
||||||
sdiskpart("test3", "/incorrect", "", "", 1, 1), # Should be skipped as no type
|
sdiskpart("test3", "/incorrect", "", ""), # Should be skipped as no type
|
||||||
sdiskpart(
|
sdiskpart(
|
||||||
"proc", "/proc/run", "proc", "", 1, 1
|
"proc", "/proc/run", "proc", ""
|
||||||
), # Should be skipped as in skipped disk types
|
), # Should be skipped as in skipped disk types
|
||||||
sdiskpart(
|
sdiskpart(
|
||||||
"test4",
|
"test4",
|
||||||
"/tmpfs/", # noqa: S108
|
"/tmpfs/", # noqa: S108
|
||||||
"tmpfs",
|
"tmpfs",
|
||||||
"",
|
"",
|
||||||
1,
|
|
||||||
1,
|
|
||||||
), # Should be skipped as in skipped disk types
|
), # Should be skipped as in skipped disk types
|
||||||
sdiskpart("test5", "E:", "cd", "cdrom", 1, 1), # Should be skipped as cdrom
|
sdiskpart("test5", "E:", "cd", "cdrom"), # Should be skipped as cdrom
|
||||||
]
|
]
|
||||||
|
|
||||||
mock_config_entry.add_to_hass(hass)
|
mock_config_entry.add_to_hass(hass)
|
||||||
|
@ -2120,7 +2120,7 @@
|
|||||||
'last_changed': <ANY>,
|
'last_changed': <ANY>,
|
||||||
'last_reported': <ANY>,
|
'last_reported': <ANY>,
|
||||||
'last_updated': <ANY>,
|
'last_updated': <ANY>,
|
||||||
'state': 'unknown',
|
'state': '0.0',
|
||||||
})
|
})
|
||||||
# ---
|
# ---
|
||||||
# name: test_sensors[sensor.wall_connector_power_2-entry]
|
# name: test_sensors[sensor.wall_connector_power_2-entry]
|
||||||
@ -2177,7 +2177,7 @@
|
|||||||
'last_changed': <ANY>,
|
'last_changed': <ANY>,
|
||||||
'last_reported': <ANY>,
|
'last_reported': <ANY>,
|
||||||
'last_updated': <ANY>,
|
'last_updated': <ANY>,
|
||||||
'state': 'unknown',
|
'state': '0.0',
|
||||||
})
|
})
|
||||||
# ---
|
# ---
|
||||||
# name: test_sensors[sensor.wall_connector_state-entry]
|
# name: test_sensors[sensor.wall_connector_state-entry]
|
||||||
@ -2249,7 +2249,7 @@
|
|||||||
'last_changed': <ANY>,
|
'last_changed': <ANY>,
|
||||||
'last_reported': <ANY>,
|
'last_reported': <ANY>,
|
||||||
'last_updated': <ANY>,
|
'last_updated': <ANY>,
|
||||||
'state': 'unknown',
|
'state': 'disconnected',
|
||||||
})
|
})
|
||||||
# ---
|
# ---
|
||||||
# name: test_sensors[sensor.wall_connector_state_2-entry]
|
# name: test_sensors[sensor.wall_connector_state_2-entry]
|
||||||
@ -2321,7 +2321,7 @@
|
|||||||
'last_changed': <ANY>,
|
'last_changed': <ANY>,
|
||||||
'last_reported': <ANY>,
|
'last_reported': <ANY>,
|
||||||
'last_updated': <ANY>,
|
'last_updated': <ANY>,
|
||||||
'state': 'unknown',
|
'state': 'disconnected',
|
||||||
})
|
})
|
||||||
# ---
|
# ---
|
||||||
# name: test_sensors[sensor.wall_connector_vehicle-entry]
|
# name: test_sensors[sensor.wall_connector_vehicle-entry]
|
||||||
|
@ -408,7 +408,7 @@ async def test_assist_api_prompt(
|
|||||||
hass.states.async_set(
|
hass.states.async_set(
|
||||||
entry1.entity_id,
|
entry1.entity_id,
|
||||||
"on",
|
"on",
|
||||||
{"friendly_name": "Kitchen", "temperature": Decimal("0.9")},
|
{"friendly_name": "Kitchen", "temperature": Decimal("0.9"), "humidity": 65},
|
||||||
)
|
)
|
||||||
hass.states.async_set(entry2.entity_id, "on", {"friendly_name": "Living Room"})
|
hass.states.async_set(entry2.entity_id, "on", {"friendly_name": "Living Room"})
|
||||||
|
|
||||||
@ -517,9 +517,7 @@ async def test_assist_api_prompt(
|
|||||||
entry1.entity_id: {
|
entry1.entity_id: {
|
||||||
"names": "Kitchen",
|
"names": "Kitchen",
|
||||||
"state": "on",
|
"state": "on",
|
||||||
"attributes": {
|
"attributes": {"temperature": "0.9", "humidity": "65"},
|
||||||
"temperature": "0.9",
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
entry2.entity_id: {
|
entry2.entity_id: {
|
||||||
"areas": "Test Area, Alternative name",
|
"areas": "Test Area, Alternative name",
|
||||||
|
Loading…
x
Reference in New Issue
Block a user