mirror of
https://github.com/home-assistant/core.git
synced 2025-07-30 00:27:19 +00:00
Merge pull request #74689 from home-assistant/rc
This commit is contained in:
commit
d539acf2ff
@ -102,7 +102,6 @@ class KafkaManager:
|
|||||||
self._hass = hass
|
self._hass = hass
|
||||||
ssl_context = ssl_util.client_context()
|
ssl_context = ssl_util.client_context()
|
||||||
self._producer = AIOKafkaProducer(
|
self._producer = AIOKafkaProducer(
|
||||||
loop=hass.loop,
|
|
||||||
bootstrap_servers=f"{ip_address}:{port}",
|
bootstrap_servers=f"{ip_address}:{port}",
|
||||||
compression_type="gzip",
|
compression_type="gzip",
|
||||||
security_protocol=security_protocol,
|
security_protocol=security_protocol,
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"domain": "apache_kafka",
|
"domain": "apache_kafka",
|
||||||
"name": "Apache Kafka",
|
"name": "Apache Kafka",
|
||||||
"documentation": "https://www.home-assistant.io/integrations/apache_kafka",
|
"documentation": "https://www.home-assistant.io/integrations/apache_kafka",
|
||||||
"requirements": ["aiokafka==0.6.0"],
|
"requirements": ["aiokafka==0.7.2"],
|
||||||
"codeowners": ["@bachya"],
|
"codeowners": ["@bachya"],
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["aiokafka", "kafka_python"]
|
"loggers": ["aiokafka", "kafka_python"]
|
||||||
|
@ -3,7 +3,6 @@ from datetime import timedelta
|
|||||||
from http import HTTPStatus
|
from http import HTTPStatus
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from aiohttp.hdrs import AUTHORIZATION
|
|
||||||
import requests
|
import requests
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
@ -67,7 +66,7 @@ class BloomSky:
|
|||||||
_LOGGER.debug("Fetching BloomSky update")
|
_LOGGER.debug("Fetching BloomSky update")
|
||||||
response = requests.get(
|
response = requests.get(
|
||||||
f"{self.API_URL}?{self._endpoint_argument}",
|
f"{self.API_URL}?{self._endpoint_argument}",
|
||||||
headers={AUTHORIZATION: self._api_key},
|
headers={"Authorization": self._api_key},
|
||||||
timeout=10,
|
timeout=10,
|
||||||
)
|
)
|
||||||
if response.status_code == HTTPStatus.UNAUTHORIZED:
|
if response.status_code == HTTPStatus.UNAUTHORIZED:
|
||||||
|
@ -37,7 +37,7 @@ class ChromecastInfo:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def friendly_name(self) -> str:
|
def friendly_name(self) -> str:
|
||||||
"""Return the UUID."""
|
"""Return the Friendly Name."""
|
||||||
return self.cast_info.friendly_name
|
return self.cast_info.friendly_name
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -441,6 +441,19 @@ class CastMediaPlayerEntity(CastDevice, MediaPlayerEntity):
|
|||||||
connection_status.status,
|
connection_status.status,
|
||||||
)
|
)
|
||||||
self._attr_available = new_available
|
self._attr_available = new_available
|
||||||
|
if new_available and not self._cast_info.is_audio_group:
|
||||||
|
# Poll current group status
|
||||||
|
for group_uuid in self.mz_mgr.get_multizone_memberships(
|
||||||
|
self._cast_info.uuid
|
||||||
|
):
|
||||||
|
group_media_controller = self.mz_mgr.get_multizone_mediacontroller(
|
||||||
|
group_uuid
|
||||||
|
)
|
||||||
|
if not group_media_controller:
|
||||||
|
continue
|
||||||
|
self.multizone_new_media_status(
|
||||||
|
group_uuid, group_media_controller.status
|
||||||
|
)
|
||||||
self.schedule_update_ha_state()
|
self.schedule_update_ha_state()
|
||||||
|
|
||||||
def multizone_new_media_status(self, group_uuid, media_status):
|
def multizone_new_media_status(self, group_uuid, media_status):
|
||||||
|
@ -113,11 +113,7 @@ async def async_setup_entry(
|
|||||||
|
|
||||||
first = True
|
first = True
|
||||||
for light_id in group.lights:
|
for light_id in group.lights:
|
||||||
if (
|
if (light := gateway.api.lights.lights.get(light_id)) and light.reachable:
|
||||||
(light := gateway.api.lights.lights.get(light_id))
|
|
||||||
and light.ZHATYPE == Light.ZHATYPE
|
|
||||||
and light.reachable
|
|
||||||
):
|
|
||||||
group.update_color_state(light, update_all_attributes=first)
|
group.update_color_state(light, update_all_attributes=first)
|
||||||
first = False
|
first = False
|
||||||
|
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
"name": "deCONZ",
|
"name": "deCONZ",
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/deconz",
|
"documentation": "https://www.home-assistant.io/integrations/deconz",
|
||||||
"requirements": ["pydeconz==95"],
|
"requirements": ["pydeconz==96"],
|
||||||
"ssdp": [
|
"ssdp": [
|
||||||
{
|
{
|
||||||
"manufacturer": "Royal Philips Electronics",
|
"manufacturer": "Royal Philips Electronics",
|
||||||
|
@ -1,4 +1,7 @@
|
|||||||
"""Describe logbook events."""
|
"""Describe logbook events."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from homeassistant.components.logbook.const import (
|
from homeassistant.components.logbook.const import (
|
||||||
LOGBOOK_ENTRY_ENTITY_ID,
|
LOGBOOK_ENTRY_ENTITY_ID,
|
||||||
@ -28,12 +31,13 @@ def async_describe_events(hass, async_describe_event):
|
|||||||
].get(doorbird_event, event.data.get(ATTR_ENTITY_ID)),
|
].get(doorbird_event, event.data.get(ATTR_ENTITY_ID)),
|
||||||
}
|
}
|
||||||
|
|
||||||
domain_data = hass.data[DOMAIN]
|
domain_data: dict[str, Any] = hass.data[DOMAIN]
|
||||||
|
|
||||||
for config_entry_id in domain_data:
|
for data in domain_data.values():
|
||||||
door_station = domain_data[config_entry_id][DOOR_STATION]
|
if DOOR_STATION not in data:
|
||||||
|
# We need to skip door_station_event_entity_ids
|
||||||
for event in door_station.doorstation_events:
|
continue
|
||||||
|
for event in data[DOOR_STATION].doorstation_events:
|
||||||
async_describe_event(
|
async_describe_event(
|
||||||
DOMAIN, f"{DOMAIN}_{event}", async_describe_logbook_event
|
DOMAIN, f"{DOMAIN}_{event}", async_describe_logbook_event
|
||||||
)
|
)
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"domain": "elkm1",
|
"domain": "elkm1",
|
||||||
"name": "Elk-M1 Control",
|
"name": "Elk-M1 Control",
|
||||||
"documentation": "https://www.home-assistant.io/integrations/elkm1",
|
"documentation": "https://www.home-assistant.io/integrations/elkm1",
|
||||||
"requirements": ["elkm1-lib==2.0.0"],
|
"requirements": ["elkm1-lib==2.0.2"],
|
||||||
"dhcp": [{ "registered_devices": true }, { "macaddress": "00409D*" }],
|
"dhcp": [{ "registered_devices": true }, { "macaddress": "00409D*" }],
|
||||||
"codeowners": ["@gwww", "@bdraco"],
|
"codeowners": ["@gwww", "@bdraco"],
|
||||||
"dependencies": ["network"],
|
"dependencies": ["network"],
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"domain": "envisalink",
|
"domain": "envisalink",
|
||||||
"name": "Envisalink",
|
"name": "Envisalink",
|
||||||
"documentation": "https://www.home-assistant.io/integrations/envisalink",
|
"documentation": "https://www.home-assistant.io/integrations/envisalink",
|
||||||
"requirements": ["pyenvisalink==4.5"],
|
"requirements": ["pyenvisalink==4.6"],
|
||||||
"codeowners": ["@ufodone"],
|
"codeowners": ["@ufodone"],
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["pyenvisalink"]
|
"loggers": ["pyenvisalink"]
|
||||||
|
@ -12,8 +12,8 @@ from bleak.backends.scanner import AdvertisementData
|
|||||||
from fjaraskupan import Device, State, device_filter
|
from fjaraskupan import Device, State, device_filter
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import Platform
|
from homeassistant.const import EVENT_HOMEASSISTANT_STOP, Platform
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import Event, HomeAssistant, callback
|
||||||
from homeassistant.helpers.dispatcher import (
|
from homeassistant.helpers.dispatcher import (
|
||||||
async_dispatcher_connect,
|
async_dispatcher_connect,
|
||||||
async_dispatcher_send,
|
async_dispatcher_send,
|
||||||
@ -131,6 +131,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
scanner.register_detection_callback(detection_callback)
|
scanner.register_detection_callback(detection_callback)
|
||||||
await scanner.start()
|
await scanner.start()
|
||||||
|
|
||||||
|
async def on_hass_stop(event: Event) -> None:
|
||||||
|
await scanner.stop()
|
||||||
|
|
||||||
|
entry.async_on_unload(
|
||||||
|
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_hass_stop)
|
||||||
|
)
|
||||||
|
|
||||||
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
|
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"domain": "frontend",
|
"domain": "frontend",
|
||||||
"name": "Home Assistant Frontend",
|
"name": "Home Assistant Frontend",
|
||||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||||
"requirements": ["home-assistant-frontend==20220706.0"],
|
"requirements": ["home-assistant-frontend==20220707.0"],
|
||||||
"dependencies": [
|
"dependencies": [
|
||||||
"api",
|
"api",
|
||||||
"auth",
|
"auth",
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"domain": "kaiterra",
|
"domain": "kaiterra",
|
||||||
"name": "Kaiterra",
|
"name": "Kaiterra",
|
||||||
"documentation": "https://www.home-assistant.io/integrations/kaiterra",
|
"documentation": "https://www.home-assistant.io/integrations/kaiterra",
|
||||||
"requirements": ["kaiterra-async-client==0.0.2"],
|
"requirements": ["kaiterra-async-client==1.0.0"],
|
||||||
"codeowners": ["@Michsior14"],
|
"codeowners": ["@Michsior14"],
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["kaiterra_async_client"]
|
"loggers": ["kaiterra_async_client"]
|
||||||
|
@ -143,7 +143,11 @@ class OpenWeatherMapWeather(WeatherEntity):
|
|||||||
"""Return the forecast array."""
|
"""Return the forecast array."""
|
||||||
api_forecasts = self._weather_coordinator.data[ATTR_API_FORECAST]
|
api_forecasts = self._weather_coordinator.data[ATTR_API_FORECAST]
|
||||||
forecasts = [
|
forecasts = [
|
||||||
{ha_key: forecast[api_key] for api_key, ha_key in FORECAST_MAP.items()}
|
{
|
||||||
|
ha_key: forecast[api_key]
|
||||||
|
for api_key, ha_key in FORECAST_MAP.items()
|
||||||
|
if api_key in forecast
|
||||||
|
}
|
||||||
for forecast in api_forecasts
|
for forecast in api_forecasts
|
||||||
]
|
]
|
||||||
return cast(list[Forecast], forecasts)
|
return cast(list[Forecast], forecasts)
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"domain": "satel_integra",
|
"domain": "satel_integra",
|
||||||
"name": "Satel Integra",
|
"name": "Satel Integra",
|
||||||
"documentation": "https://www.home-assistant.io/integrations/satel_integra",
|
"documentation": "https://www.home-assistant.io/integrations/satel_integra",
|
||||||
"requirements": ["satel_integra==0.3.4"],
|
"requirements": ["satel_integra==0.3.7"],
|
||||||
"codeowners": [],
|
"codeowners": [],
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["satel_integra"]
|
"loggers": ["satel_integra"]
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
"name": "SkyBell",
|
"name": "SkyBell",
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/skybell",
|
"documentation": "https://www.home-assistant.io/integrations/skybell",
|
||||||
"requirements": ["aioskybell==22.6.1"],
|
"requirements": ["aioskybell==22.7.0"],
|
||||||
"dependencies": ["ffmpeg"],
|
"dependencies": ["ffmpeg"],
|
||||||
"codeowners": ["@tkdrob"],
|
"codeowners": ["@tkdrob"],
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
|
@ -69,6 +69,7 @@ from .speaker import SonosMedia, SonosSpeaker
|
|||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
LONG_SERVICE_TIMEOUT = 30.0
|
LONG_SERVICE_TIMEOUT = 30.0
|
||||||
|
UNJOIN_SERVICE_TIMEOUT = 0.1
|
||||||
VOLUME_INCREMENT = 2
|
VOLUME_INCREMENT = 2
|
||||||
|
|
||||||
REPEAT_TO_SONOS = {
|
REPEAT_TO_SONOS = {
|
||||||
@ -775,7 +776,7 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
|
|||||||
async def async_unjoin_player(self):
|
async def async_unjoin_player(self):
|
||||||
"""Remove this player from any group.
|
"""Remove this player from any group.
|
||||||
|
|
||||||
Coalesces all calls within 0.5s to allow use of SonosSpeaker.unjoin_multi()
|
Coalesces all calls within UNJOIN_SERVICE_TIMEOUT to allow use of SonosSpeaker.unjoin_multi()
|
||||||
which optimizes the order in which speakers are removed from their groups.
|
which optimizes the order in which speakers are removed from their groups.
|
||||||
Removing coordinators last better preserves playqueues on the speakers.
|
Removing coordinators last better preserves playqueues on the speakers.
|
||||||
"""
|
"""
|
||||||
@ -785,6 +786,9 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
|
|||||||
async def async_process_unjoin(now: datetime.datetime) -> None:
|
async def async_process_unjoin(now: datetime.datetime) -> None:
|
||||||
"""Process the unjoin with all remove requests within the coalescing period."""
|
"""Process the unjoin with all remove requests within the coalescing period."""
|
||||||
unjoin_data = sonos_data.unjoin_data.pop(household_id)
|
unjoin_data = sonos_data.unjoin_data.pop(household_id)
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Processing unjoins for %s", [x.zone_name for x in unjoin_data.speakers]
|
||||||
|
)
|
||||||
await SonosSpeaker.unjoin_multi(self.hass, unjoin_data.speakers)
|
await SonosSpeaker.unjoin_multi(self.hass, unjoin_data.speakers)
|
||||||
unjoin_data.event.set()
|
unjoin_data.event.set()
|
||||||
|
|
||||||
@ -794,6 +798,7 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
|
|||||||
unjoin_data = sonos_data.unjoin_data[household_id] = UnjoinData(
|
unjoin_data = sonos_data.unjoin_data[household_id] = UnjoinData(
|
||||||
speakers=[self.speaker]
|
speakers=[self.speaker]
|
||||||
)
|
)
|
||||||
async_call_later(self.hass, 0.5, async_process_unjoin)
|
async_call_later(self.hass, UNJOIN_SERVICE_TIMEOUT, async_process_unjoin)
|
||||||
|
|
||||||
|
_LOGGER.debug("Requesting unjoin for %s", self.speaker.zone_name)
|
||||||
await unjoin_data.event.wait()
|
await unjoin_data.event.wait()
|
||||||
|
@ -10,7 +10,6 @@ import logging
|
|||||||
import os
|
import os
|
||||||
from typing import Union
|
from typing import Union
|
||||||
|
|
||||||
from aiohttp.hdrs import USER_AGENT
|
|
||||||
import requests
|
import requests
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
@ -275,7 +274,7 @@ class ZamgData:
|
|||||||
"""The class for handling the data retrieval."""
|
"""The class for handling the data retrieval."""
|
||||||
|
|
||||||
API_URL = "http://www.zamg.ac.at/ogd/"
|
API_URL = "http://www.zamg.ac.at/ogd/"
|
||||||
API_HEADERS = {USER_AGENT: f"home-assistant.zamg/ {__version__}"}
|
API_HEADERS = {"User-Agent": f"home-assistant.zamg/ {__version__}"}
|
||||||
|
|
||||||
def __init__(self, station_id):
|
def __init__(self, station_id):
|
||||||
"""Initialize the probe."""
|
"""Initialize the probe."""
|
||||||
|
@ -188,7 +188,7 @@ class FrostLock(BinarySensor, id_suffix="frost_lock"):
|
|||||||
_attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.LOCK
|
_attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.LOCK
|
||||||
|
|
||||||
|
|
||||||
@MULTI_MATCH(channel_names="ikea_airpurifier", models={"STARKVIND Air purifier"})
|
@MULTI_MATCH(channel_names="ikea_airpurifier")
|
||||||
class ReplaceFilter(BinarySensor, id_suffix="replace_filter"):
|
class ReplaceFilter(BinarySensor, id_suffix="replace_filter"):
|
||||||
"""ZHA BinarySensor."""
|
"""ZHA BinarySensor."""
|
||||||
|
|
||||||
|
@ -525,9 +525,7 @@ class TimerDurationMinutes(ZHANumberConfigurationEntity, id_suffix="timer_durati
|
|||||||
_zcl_attribute: str = "timer_duration"
|
_zcl_attribute: str = "timer_duration"
|
||||||
|
|
||||||
|
|
||||||
@CONFIG_DIAGNOSTIC_MATCH(
|
@CONFIG_DIAGNOSTIC_MATCH(channel_names="ikea_airpurifier")
|
||||||
channel_names="ikea_airpurifier", models={"STARKVIND Air purifier"}
|
|
||||||
)
|
|
||||||
class FilterLifeTime(ZHANumberConfigurationEntity, id_suffix="filter_life_time"):
|
class FilterLifeTime(ZHANumberConfigurationEntity, id_suffix="filter_life_time"):
|
||||||
"""Representation of a ZHA timer duration configuration entity."""
|
"""Representation of a ZHA timer duration configuration entity."""
|
||||||
|
|
||||||
|
@ -472,25 +472,8 @@ class SmartEnergyMetering(Sensor):
|
|||||||
|
|
||||||
@MULTI_MATCH(
|
@MULTI_MATCH(
|
||||||
channel_names=CHANNEL_SMARTENERGY_METERING,
|
channel_names=CHANNEL_SMARTENERGY_METERING,
|
||||||
models={"TS011F"},
|
|
||||||
stop_on_match_group=CHANNEL_SMARTENERGY_METERING,
|
stop_on_match_group=CHANNEL_SMARTENERGY_METERING,
|
||||||
)
|
)
|
||||||
class PolledSmartEnergyMetering(SmartEnergyMetering):
|
|
||||||
"""Polled metering sensor."""
|
|
||||||
|
|
||||||
@property
|
|
||||||
def should_poll(self) -> bool:
|
|
||||||
"""Poll the entity for current state."""
|
|
||||||
return True
|
|
||||||
|
|
||||||
async def async_update(self) -> None:
|
|
||||||
"""Retrieve latest state."""
|
|
||||||
if not self.available:
|
|
||||||
return
|
|
||||||
await self._channel.async_force_update()
|
|
||||||
|
|
||||||
|
|
||||||
@MULTI_MATCH(channel_names=CHANNEL_SMARTENERGY_METERING)
|
|
||||||
class SmartEnergySummation(SmartEnergyMetering, id_suffix="summation_delivered"):
|
class SmartEnergySummation(SmartEnergyMetering, id_suffix="summation_delivered"):
|
||||||
"""Smart Energy Metering summation sensor."""
|
"""Smart Energy Metering summation sensor."""
|
||||||
|
|
||||||
@ -523,6 +506,26 @@ class SmartEnergySummation(SmartEnergyMetering, id_suffix="summation_delivered")
|
|||||||
return round(cooked, 3)
|
return round(cooked, 3)
|
||||||
|
|
||||||
|
|
||||||
|
@MULTI_MATCH(
|
||||||
|
channel_names=CHANNEL_SMARTENERGY_METERING,
|
||||||
|
models={"TS011F"},
|
||||||
|
stop_on_match_group=CHANNEL_SMARTENERGY_METERING,
|
||||||
|
)
|
||||||
|
class PolledSmartEnergySummation(SmartEnergySummation):
|
||||||
|
"""Polled Smart Energy Metering summation sensor."""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def should_poll(self) -> bool:
|
||||||
|
"""Poll the entity for current state."""
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def async_update(self) -> None:
|
||||||
|
"""Retrieve latest state."""
|
||||||
|
if not self.available:
|
||||||
|
return
|
||||||
|
await self._channel.async_force_update()
|
||||||
|
|
||||||
|
|
||||||
@MULTI_MATCH(channel_names=CHANNEL_PRESSURE)
|
@MULTI_MATCH(channel_names=CHANNEL_PRESSURE)
|
||||||
class Pressure(Sensor):
|
class Pressure(Sensor):
|
||||||
"""Pressure sensor."""
|
"""Pressure sensor."""
|
||||||
@ -810,7 +813,7 @@ class TimeLeft(Sensor, id_suffix="time_left"):
|
|||||||
_unit = TIME_MINUTES
|
_unit = TIME_MINUTES
|
||||||
|
|
||||||
|
|
||||||
@MULTI_MATCH(channel_names="ikea_airpurifier", models={"STARKVIND Air purifier"})
|
@MULTI_MATCH(channel_names="ikea_airpurifier")
|
||||||
class IkeaDeviceRunTime(Sensor, id_suffix="device_run_time"):
|
class IkeaDeviceRunTime(Sensor, id_suffix="device_run_time"):
|
||||||
"""Sensor that displays device run time (in minutes)."""
|
"""Sensor that displays device run time (in minutes)."""
|
||||||
|
|
||||||
@ -820,7 +823,7 @@ class IkeaDeviceRunTime(Sensor, id_suffix="device_run_time"):
|
|||||||
_unit = TIME_MINUTES
|
_unit = TIME_MINUTES
|
||||||
|
|
||||||
|
|
||||||
@MULTI_MATCH(channel_names="ikea_airpurifier", models={"STARKVIND Air purifier"})
|
@MULTI_MATCH(channel_names="ikea_airpurifier")
|
||||||
class IkeaFilterRunTime(Sensor, id_suffix="filter_run_time"):
|
class IkeaFilterRunTime(Sensor, id_suffix="filter_run_time"):
|
||||||
"""Sensor that displays run time of the current filter (in minutes)."""
|
"""Sensor that displays run time of the current filter (in minutes)."""
|
||||||
|
|
||||||
|
@ -7,7 +7,7 @@ from .backports.enum import StrEnum
|
|||||||
|
|
||||||
MAJOR_VERSION: Final = 2022
|
MAJOR_VERSION: Final = 2022
|
||||||
MINOR_VERSION: Final = 7
|
MINOR_VERSION: Final = 7
|
||||||
PATCH_VERSION: Final = "0"
|
PATCH_VERSION: Final = "1"
|
||||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 9, 0)
|
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 9, 0)
|
||||||
|
@ -15,7 +15,7 @@ ciso8601==2.2.0
|
|||||||
cryptography==36.0.2
|
cryptography==36.0.2
|
||||||
fnvhash==0.1.0
|
fnvhash==0.1.0
|
||||||
hass-nabucasa==0.54.0
|
hass-nabucasa==0.54.0
|
||||||
home-assistant-frontend==20220706.0
|
home-assistant-frontend==20220707.0
|
||||||
httpx==0.23.0
|
httpx==0.23.0
|
||||||
ifaddr==0.1.7
|
ifaddr==0.1.7
|
||||||
jinja2==3.1.2
|
jinja2==3.1.2
|
||||||
|
@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "homeassistant"
|
name = "homeassistant"
|
||||||
version = "2022.7.0"
|
version = "2022.7.1"
|
||||||
license = {text = "Apache-2.0"}
|
license = {text = "Apache-2.0"}
|
||||||
description = "Open-source home automation platform running on Python 3."
|
description = "Open-source home automation platform running on Python 3."
|
||||||
readme = "README.rst"
|
readme = "README.rst"
|
||||||
|
@ -181,7 +181,7 @@ aiohue==4.4.2
|
|||||||
aioimaplib==1.0.0
|
aioimaplib==1.0.0
|
||||||
|
|
||||||
# homeassistant.components.apache_kafka
|
# homeassistant.components.apache_kafka
|
||||||
aiokafka==0.6.0
|
aiokafka==0.7.2
|
||||||
|
|
||||||
# homeassistant.components.kef
|
# homeassistant.components.kef
|
||||||
aiokef==0.2.16
|
aiokef==0.2.16
|
||||||
@ -247,7 +247,7 @@ aiosenz==1.0.0
|
|||||||
aioshelly==2.0.0
|
aioshelly==2.0.0
|
||||||
|
|
||||||
# homeassistant.components.skybell
|
# homeassistant.components.skybell
|
||||||
aioskybell==22.6.1
|
aioskybell==22.7.0
|
||||||
|
|
||||||
# homeassistant.components.slimproto
|
# homeassistant.components.slimproto
|
||||||
aioslimproto==2.1.1
|
aioslimproto==2.1.1
|
||||||
@ -586,7 +586,7 @@ elgato==3.0.0
|
|||||||
eliqonline==1.2.2
|
eliqonline==1.2.2
|
||||||
|
|
||||||
# homeassistant.components.elkm1
|
# homeassistant.components.elkm1
|
||||||
elkm1-lib==2.0.0
|
elkm1-lib==2.0.2
|
||||||
|
|
||||||
# homeassistant.components.elmax
|
# homeassistant.components.elmax
|
||||||
elmax_api==0.0.2
|
elmax_api==0.0.2
|
||||||
@ -828,7 +828,7 @@ hole==0.7.0
|
|||||||
holidays==0.14.2
|
holidays==0.14.2
|
||||||
|
|
||||||
# homeassistant.components.frontend
|
# homeassistant.components.frontend
|
||||||
home-assistant-frontend==20220706.0
|
home-assistant-frontend==20220707.0
|
||||||
|
|
||||||
# homeassistant.components.home_connect
|
# homeassistant.components.home_connect
|
||||||
homeconnect==0.7.1
|
homeconnect==0.7.1
|
||||||
@ -912,7 +912,7 @@ jellyfin-apiclient-python==1.8.1
|
|||||||
jsonpath==0.82
|
jsonpath==0.82
|
||||||
|
|
||||||
# homeassistant.components.kaiterra
|
# homeassistant.components.kaiterra
|
||||||
kaiterra-async-client==0.0.2
|
kaiterra-async-client==1.0.0
|
||||||
|
|
||||||
# homeassistant.components.keba
|
# homeassistant.components.keba
|
||||||
keba-kecontact==1.1.0
|
keba-kecontact==1.1.0
|
||||||
@ -1444,7 +1444,7 @@ pydaikin==2.7.0
|
|||||||
pydanfossair==0.1.0
|
pydanfossair==0.1.0
|
||||||
|
|
||||||
# homeassistant.components.deconz
|
# homeassistant.components.deconz
|
||||||
pydeconz==95
|
pydeconz==96
|
||||||
|
|
||||||
# homeassistant.components.delijn
|
# homeassistant.components.delijn
|
||||||
pydelijn==1.0.0
|
pydelijn==1.0.0
|
||||||
@ -1477,7 +1477,7 @@ pyeight==0.3.0
|
|||||||
pyemby==1.8
|
pyemby==1.8
|
||||||
|
|
||||||
# homeassistant.components.envisalink
|
# homeassistant.components.envisalink
|
||||||
pyenvisalink==4.5
|
pyenvisalink==4.6
|
||||||
|
|
||||||
# homeassistant.components.ephember
|
# homeassistant.components.ephember
|
||||||
pyephember==0.3.1
|
pyephember==0.3.1
|
||||||
@ -2125,7 +2125,7 @@ samsungctl[websocket]==0.7.1
|
|||||||
samsungtvws[async,encrypted]==2.5.0
|
samsungtvws[async,encrypted]==2.5.0
|
||||||
|
|
||||||
# homeassistant.components.satel_integra
|
# homeassistant.components.satel_integra
|
||||||
satel_integra==0.3.4
|
satel_integra==0.3.7
|
||||||
|
|
||||||
# homeassistant.components.dhcp
|
# homeassistant.components.dhcp
|
||||||
scapy==2.4.5
|
scapy==2.4.5
|
||||||
|
@ -162,7 +162,7 @@ aiohttp_cors==0.7.0
|
|||||||
aiohue==4.4.2
|
aiohue==4.4.2
|
||||||
|
|
||||||
# homeassistant.components.apache_kafka
|
# homeassistant.components.apache_kafka
|
||||||
aiokafka==0.6.0
|
aiokafka==0.7.2
|
||||||
|
|
||||||
# homeassistant.components.lookin
|
# homeassistant.components.lookin
|
||||||
aiolookin==0.1.1
|
aiolookin==0.1.1
|
||||||
@ -216,7 +216,7 @@ aiosenz==1.0.0
|
|||||||
aioshelly==2.0.0
|
aioshelly==2.0.0
|
||||||
|
|
||||||
# homeassistant.components.skybell
|
# homeassistant.components.skybell
|
||||||
aioskybell==22.6.1
|
aioskybell==22.7.0
|
||||||
|
|
||||||
# homeassistant.components.slimproto
|
# homeassistant.components.slimproto
|
||||||
aioslimproto==2.1.1
|
aioslimproto==2.1.1
|
||||||
@ -426,7 +426,7 @@ eagle100==0.1.1
|
|||||||
elgato==3.0.0
|
elgato==3.0.0
|
||||||
|
|
||||||
# homeassistant.components.elkm1
|
# homeassistant.components.elkm1
|
||||||
elkm1-lib==2.0.0
|
elkm1-lib==2.0.2
|
||||||
|
|
||||||
# homeassistant.components.elmax
|
# homeassistant.components.elmax
|
||||||
elmax_api==0.0.2
|
elmax_api==0.0.2
|
||||||
@ -595,7 +595,7 @@ hole==0.7.0
|
|||||||
holidays==0.14.2
|
holidays==0.14.2
|
||||||
|
|
||||||
# homeassistant.components.frontend
|
# homeassistant.components.frontend
|
||||||
home-assistant-frontend==20220706.0
|
home-assistant-frontend==20220707.0
|
||||||
|
|
||||||
# homeassistant.components.home_connect
|
# homeassistant.components.home_connect
|
||||||
homeconnect==0.7.1
|
homeconnect==0.7.1
|
||||||
@ -974,7 +974,7 @@ pycoolmasternet-async==0.1.2
|
|||||||
pydaikin==2.7.0
|
pydaikin==2.7.0
|
||||||
|
|
||||||
# homeassistant.components.deconz
|
# homeassistant.components.deconz
|
||||||
pydeconz==95
|
pydeconz==96
|
||||||
|
|
||||||
# homeassistant.components.dexcom
|
# homeassistant.components.dexcom
|
||||||
pydexcom==0.2.3
|
pydexcom==0.2.3
|
||||||
|
@ -731,6 +731,20 @@ async def test_entity_availability(hass: HomeAssistant):
|
|||||||
state = hass.states.get(entity_id)
|
state = hass.states.get(entity_id)
|
||||||
assert state.state == "off"
|
assert state.state == "off"
|
||||||
|
|
||||||
|
connection_status = MagicMock()
|
||||||
|
connection_status.status = "LOST"
|
||||||
|
conn_status_cb(connection_status)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
state = hass.states.get(entity_id)
|
||||||
|
assert state.state == "unavailable"
|
||||||
|
|
||||||
|
connection_status = MagicMock()
|
||||||
|
connection_status.status = "CONNECTED"
|
||||||
|
conn_status_cb(connection_status)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
state = hass.states.get(entity_id)
|
||||||
|
assert state.state == "off"
|
||||||
|
|
||||||
connection_status = MagicMock()
|
connection_status = MagicMock()
|
||||||
connection_status.status = "DISCONNECTED"
|
connection_status.status = "DISCONNECTED"
|
||||||
conn_status_cb(connection_status)
|
conn_status_cb(connection_status)
|
||||||
@ -738,6 +752,14 @@ async def test_entity_availability(hass: HomeAssistant):
|
|||||||
state = hass.states.get(entity_id)
|
state = hass.states.get(entity_id)
|
||||||
assert state.state == "unavailable"
|
assert state.state == "unavailable"
|
||||||
|
|
||||||
|
# Can't reconnect after receiving DISCONNECTED
|
||||||
|
connection_status = MagicMock()
|
||||||
|
connection_status.status = "CONNECTED"
|
||||||
|
conn_status_cb(connection_status)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
state = hass.states.get(entity_id)
|
||||||
|
assert state.state == "unavailable"
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("port,entry_type", ((8009, None), (12345, None)))
|
@pytest.mark.parametrize("port,entry_type", ((8009, None), (12345, None)))
|
||||||
async def test_device_registry(hass: HomeAssistant, hass_ws_client, port, entry_type):
|
async def test_device_registry(hass: HomeAssistant, hass_ws_client, port, entry_type):
|
||||||
@ -1675,6 +1697,59 @@ async def test_group_media_states(hass, mz_mock):
|
|||||||
assert state.state == "playing"
|
assert state.state == "playing"
|
||||||
|
|
||||||
|
|
||||||
|
async def test_group_media_states_early(hass, mz_mock):
|
||||||
|
"""Test media states are read from group if entity has no state.
|
||||||
|
|
||||||
|
This tests case asserts group state is polled when the player is created.
|
||||||
|
"""
|
||||||
|
entity_id = "media_player.speaker"
|
||||||
|
reg = er.async_get(hass)
|
||||||
|
|
||||||
|
info = get_fake_chromecast_info()
|
||||||
|
|
||||||
|
mz_mock.get_multizone_memberships = MagicMock(return_value=[str(FakeGroupUUID)])
|
||||||
|
mz_mock.get_multizone_mediacontroller = MagicMock(
|
||||||
|
return_value=MagicMock(status=MagicMock(images=None, player_state="BUFFERING"))
|
||||||
|
)
|
||||||
|
|
||||||
|
chromecast, _ = await async_setup_media_player_cast(hass, info)
|
||||||
|
_, conn_status_cb, _, _ = get_status_callbacks(chromecast, mz_mock)
|
||||||
|
|
||||||
|
state = hass.states.get(entity_id)
|
||||||
|
assert state is not None
|
||||||
|
assert state.name == "Speaker"
|
||||||
|
assert state.state == "unavailable"
|
||||||
|
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
|
||||||
|
|
||||||
|
# Check group state is polled when player is first created
|
||||||
|
connection_status = MagicMock()
|
||||||
|
connection_status.status = "CONNECTED"
|
||||||
|
conn_status_cb(connection_status)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert hass.states.get(entity_id).state == "buffering"
|
||||||
|
|
||||||
|
connection_status = MagicMock()
|
||||||
|
connection_status.status = "LOST"
|
||||||
|
conn_status_cb(connection_status)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert hass.states.get(entity_id).state == "unavailable"
|
||||||
|
|
||||||
|
# Check group state is polled when player reconnects
|
||||||
|
mz_mock.get_multizone_mediacontroller = MagicMock(
|
||||||
|
return_value=MagicMock(status=MagicMock(images=None, player_state="PLAYING"))
|
||||||
|
)
|
||||||
|
|
||||||
|
connection_status = MagicMock()
|
||||||
|
connection_status.status = "CONNECTED"
|
||||||
|
conn_status_cb(connection_status)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert hass.states.get(entity_id).state == "playing"
|
||||||
|
|
||||||
|
|
||||||
async def test_group_media_control(hass, mz_mock, quick_play_mock):
|
async def test_group_media_control(hass, mz_mock, quick_play_mock):
|
||||||
"""Test media controls are handled by group if entity has no state."""
|
"""Test media controls are handled by group if entity has no state."""
|
||||||
entity_id = "media_player.speaker"
|
entity_id = "media_player.speaker"
|
||||||
|
Loading…
x
Reference in New Issue
Block a user