mirror of
https://github.com/home-assistant/core.git
synced 2025-07-26 06:37:52 +00:00
2024.7.3 (#122194)
This commit is contained in:
commit
267dfac737
@ -777,6 +777,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/lg_netcast/ @Drafteed @splinter98
|
/tests/components/lg_netcast/ @Drafteed @splinter98
|
||||||
/homeassistant/components/lidarr/ @tkdrob
|
/homeassistant/components/lidarr/ @tkdrob
|
||||||
/tests/components/lidarr/ @tkdrob
|
/tests/components/lidarr/ @tkdrob
|
||||||
|
/homeassistant/components/lifx/ @Djelibeybi
|
||||||
|
/tests/components/lifx/ @Djelibeybi
|
||||||
/homeassistant/components/light/ @home-assistant/core
|
/homeassistant/components/light/ @home-assistant/core
|
||||||
/tests/components/light/ @home-assistant/core
|
/tests/components/light/ @home-assistant/core
|
||||||
/homeassistant/components/linear_garage_door/ @IceBotYT
|
/homeassistant/components/linear_garage_door/ @IceBotYT
|
||||||
|
@ -1497,7 +1497,7 @@ async def async_api_adjust_range(
|
|||||||
if instance == f"{cover.DOMAIN}.{cover.ATTR_POSITION}":
|
if instance == f"{cover.DOMAIN}.{cover.ATTR_POSITION}":
|
||||||
range_delta = int(range_delta * 20) if range_delta_default else int(range_delta)
|
range_delta = int(range_delta * 20) if range_delta_default else int(range_delta)
|
||||||
service = SERVICE_SET_COVER_POSITION
|
service = SERVICE_SET_COVER_POSITION
|
||||||
if not (current := entity.attributes.get(cover.ATTR_POSITION)):
|
if not (current := entity.attributes.get(cover.ATTR_CURRENT_POSITION)):
|
||||||
msg = f"Unable to determine {entity.entity_id} current position"
|
msg = f"Unable to determine {entity.entity_id} current position"
|
||||||
raise AlexaInvalidValueError(msg)
|
raise AlexaInvalidValueError(msg)
|
||||||
position = response_value = min(100, max(0, range_delta + current))
|
position = response_value = min(100, max(0, range_delta + current))
|
||||||
|
@ -58,7 +58,7 @@ class BringConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
):
|
):
|
||||||
self._abort_if_unique_id_configured()
|
self._abort_if_unique_id_configured()
|
||||||
return self.async_create_entry(
|
return self.async_create_entry(
|
||||||
title=self.info["name"] or user_input[CONF_EMAIL], data=user_input
|
title=self.info.get("name") or user_input[CONF_EMAIL], data=user_input
|
||||||
)
|
)
|
||||||
|
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
|
@ -16,6 +16,8 @@ from homeassistant.const import (
|
|||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||||
|
|
||||||
|
from .const import CONF_SOURCE_BOUQUET
|
||||||
|
|
||||||
type Enigma2ConfigEntry = ConfigEntry[OpenWebIfDevice]
|
type Enigma2ConfigEntry = ConfigEntry[OpenWebIfDevice]
|
||||||
|
|
||||||
PLATFORMS = [Platform.MEDIA_PLAYER]
|
PLATFORMS = [Platform.MEDIA_PLAYER]
|
||||||
@ -35,7 +37,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: Enigma2ConfigEntry) -> b
|
|||||||
hass, verify_ssl=entry.data[CONF_VERIFY_SSL], base_url=base_url
|
hass, verify_ssl=entry.data[CONF_VERIFY_SSL], base_url=base_url
|
||||||
)
|
)
|
||||||
|
|
||||||
entry.runtime_data = OpenWebIfDevice(session)
|
entry.runtime_data = OpenWebIfDevice(
|
||||||
|
session, source_bouquet=entry.options.get(CONF_SOURCE_BOUQUET)
|
||||||
|
)
|
||||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -7,5 +7,5 @@
|
|||||||
"integration_type": "device",
|
"integration_type": "device",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["openwebif"],
|
"loggers": ["openwebif"],
|
||||||
"requirements": ["openwebifpy==4.2.4"]
|
"requirements": ["openwebifpy==4.2.5"]
|
||||||
}
|
}
|
||||||
|
@ -199,7 +199,8 @@ class Enigma2Device(MediaPlayerEntity):
|
|||||||
|
|
||||||
async def async_mute_volume(self, mute: bool) -> None:
|
async def async_mute_volume(self, mute: bool) -> None:
|
||||||
"""Mute or unmute."""
|
"""Mute or unmute."""
|
||||||
await self._device.toggle_mute()
|
if mute != self._device.status.muted:
|
||||||
|
await self._device.toggle_mute()
|
||||||
|
|
||||||
async def async_select_source(self, source: str) -> None:
|
async def async_select_source(self, source: str) -> None:
|
||||||
"""Select input source."""
|
"""Select input source."""
|
||||||
|
@ -142,10 +142,10 @@ class HiveClimateEntity(HiveEntity, ClimateEntity):
|
|||||||
self.device = await self.hive.heating.getClimate(self.device)
|
self.device = await self.hive.heating.getClimate(self.device)
|
||||||
self._attr_available = self.device["deviceData"].get("online")
|
self._attr_available = self.device["deviceData"].get("online")
|
||||||
if self._attr_available:
|
if self._attr_available:
|
||||||
self._attr_hvac_mode = HIVE_TO_HASS_STATE[self.device["status"]["mode"]]
|
self._attr_hvac_mode = HIVE_TO_HASS_STATE.get(self.device["status"]["mode"])
|
||||||
self._attr_hvac_action = HIVE_TO_HASS_HVAC_ACTION[
|
self._attr_hvac_action = HIVE_TO_HASS_HVAC_ACTION.get(
|
||||||
self.device["status"]["action"]
|
self.device["status"]["action"]
|
||||||
]
|
)
|
||||||
self._attr_current_temperature = self.device["status"][
|
self._attr_current_temperature = self.device["status"][
|
||||||
"current_temperature"
|
"current_temperature"
|
||||||
]
|
]
|
||||||
@ -154,5 +154,6 @@ class HiveClimateEntity(HiveEntity, ClimateEntity):
|
|||||||
self._attr_max_temp = self.device["max_temp"]
|
self._attr_max_temp = self.device["max_temp"]
|
||||||
if self.device["status"]["boost"] == "ON":
|
if self.device["status"]["boost"] == "ON":
|
||||||
self._attr_preset_mode = PRESET_BOOST
|
self._attr_preset_mode = PRESET_BOOST
|
||||||
|
self._attr_hvac_mode = HVACMode.HEAT
|
||||||
else:
|
else:
|
||||||
self._attr_preset_mode = PRESET_NONE
|
self._attr_preset_mode = PRESET_NONE
|
||||||
|
@ -5,5 +5,5 @@
|
|||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"requirements": ["holidays==0.52", "babel==2.15.0"]
|
"requirements": ["holidays==0.53", "babel==2.15.0"]
|
||||||
}
|
}
|
||||||
|
@ -12,5 +12,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/idasen_desk",
|
"documentation": "https://www.home-assistant.io/integrations/idasen_desk",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"quality_scale": "silver",
|
"quality_scale": "silver",
|
||||||
"requirements": ["idasen-ha==2.6.1"]
|
"requirements": ["idasen-ha==2.6.2"]
|
||||||
}
|
}
|
||||||
|
@ -8,6 +8,7 @@ from typing import Any
|
|||||||
|
|
||||||
from pyecotrend_ista import KeycloakError, LoginError, PyEcotrendIsta, ServerError
|
from pyecotrend_ista import KeycloakError, LoginError, PyEcotrendIsta, ServerError
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import CONF_EMAIL
|
from homeassistant.const import CONF_EMAIL
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||||
@ -21,6 +22,8 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
class IstaCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
class IstaCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||||
"""Ista EcoTrend data update coordinator."""
|
"""Ista EcoTrend data update coordinator."""
|
||||||
|
|
||||||
|
config_entry: ConfigEntry
|
||||||
|
|
||||||
def __init__(self, hass: HomeAssistant, ista: PyEcotrendIsta) -> None:
|
def __init__(self, hass: HomeAssistant, ista: PyEcotrendIsta) -> None:
|
||||||
"""Initialize ista EcoTrend data update coordinator."""
|
"""Initialize ista EcoTrend data update coordinator."""
|
||||||
super().__init__(
|
super().__init__(
|
||||||
@ -35,11 +38,14 @@ class IstaCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
|||||||
async def _async_update_data(self):
|
async def _async_update_data(self):
|
||||||
"""Fetch ista EcoTrend data."""
|
"""Fetch ista EcoTrend data."""
|
||||||
|
|
||||||
if not self.details:
|
|
||||||
self.details = await self.async_get_details()
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
await self.hass.async_add_executor_job(self.ista.login)
|
||||||
|
|
||||||
|
if not self.details:
|
||||||
|
self.details = await self.async_get_details()
|
||||||
|
|
||||||
return await self.hass.async_add_executor_job(self.get_consumption_data)
|
return await self.hass.async_add_executor_job(self.get_consumption_data)
|
||||||
|
|
||||||
except ServerError as e:
|
except ServerError as e:
|
||||||
raise UpdateFailed(
|
raise UpdateFailed(
|
||||||
"Unable to connect and retrieve data from ista EcoTrend, try again later"
|
"Unable to connect and retrieve data from ista EcoTrend, try again later"
|
||||||
@ -48,7 +54,9 @@ class IstaCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
|||||||
raise ConfigEntryAuthFailed(
|
raise ConfigEntryAuthFailed(
|
||||||
translation_domain=DOMAIN,
|
translation_domain=DOMAIN,
|
||||||
translation_key="authentication_exception",
|
translation_key="authentication_exception",
|
||||||
translation_placeholders={CONF_EMAIL: self.ista._email}, # noqa: SLF001
|
translation_placeholders={
|
||||||
|
CONF_EMAIL: self.config_entry.data[CONF_EMAIL]
|
||||||
|
},
|
||||||
) from e
|
) from e
|
||||||
|
|
||||||
def get_consumption_data(self) -> dict[str, Any]:
|
def get_consumption_data(self) -> dict[str, Any]:
|
||||||
@ -61,26 +69,16 @@ class IstaCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
|||||||
|
|
||||||
async def async_get_details(self) -> dict[str, Any]:
|
async def async_get_details(self) -> dict[str, Any]:
|
||||||
"""Retrieve details of consumption units."""
|
"""Retrieve details of consumption units."""
|
||||||
try:
|
|
||||||
result = await self.hass.async_add_executor_job(
|
result = await self.hass.async_add_executor_job(
|
||||||
self.ista.get_consumption_unit_details
|
self.ista.get_consumption_unit_details
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
consumption_unit: next(
|
||||||
|
details
|
||||||
|
for details in result["consumptionUnits"]
|
||||||
|
if details["id"] == consumption_unit
|
||||||
)
|
)
|
||||||
except ServerError as e:
|
for consumption_unit in self.ista.get_uuids()
|
||||||
raise UpdateFailed(
|
}
|
||||||
"Unable to connect and retrieve data from ista EcoTrend, try again later"
|
|
||||||
) from e
|
|
||||||
except (LoginError, KeycloakError) as e:
|
|
||||||
raise ConfigEntryAuthFailed(
|
|
||||||
translation_domain=DOMAIN,
|
|
||||||
translation_key="authentication_exception",
|
|
||||||
translation_placeholders={CONF_EMAIL: self.ista._email}, # noqa: SLF001
|
|
||||||
) from e
|
|
||||||
else:
|
|
||||||
return {
|
|
||||||
consumption_unit: next(
|
|
||||||
details
|
|
||||||
for details in result["consumptionUnits"]
|
|
||||||
if details["id"] == consumption_unit
|
|
||||||
)
|
|
||||||
for consumption_unit in self.ista.get_uuids()
|
|
||||||
}
|
|
||||||
|
@ -97,7 +97,11 @@ class JellyfinConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
)
|
)
|
||||||
|
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
step_id="user",
|
||||||
|
data_schema=self.add_suggested_values_to_schema(
|
||||||
|
STEP_USER_DATA_SCHEMA, user_input
|
||||||
|
),
|
||||||
|
errors=errors,
|
||||||
)
|
)
|
||||||
|
|
||||||
async def async_step_reauth(
|
async def async_step_reauth(
|
||||||
|
@ -4,7 +4,7 @@ from __future__ import annotations
|
|||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from knocki import KnockiClient, KnockiConnectionError
|
from knocki import KnockiClient, KnockiConnectionError, KnockiInvalidAuthError
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||||
@ -45,6 +45,8 @@ class KnockiConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
raise
|
raise
|
||||||
except KnockiConnectionError:
|
except KnockiConnectionError:
|
||||||
errors["base"] = "cannot_connect"
|
errors["base"] = "cannot_connect"
|
||||||
|
except KnockiInvalidAuthError:
|
||||||
|
errors["base"] = "invalid_auth"
|
||||||
except Exception: # noqa: BLE001
|
except Exception: # noqa: BLE001
|
||||||
LOGGER.exception("Error logging into the Knocki API")
|
LOGGER.exception("Error logging into the Knocki API")
|
||||||
errors["base"] = "unknown"
|
errors["base"] = "unknown"
|
||||||
|
@ -7,5 +7,5 @@
|
|||||||
"integration_type": "device",
|
"integration_type": "device",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["knocki"],
|
"loggers": ["knocki"],
|
||||||
"requirements": ["knocki==0.2.0"]
|
"requirements": ["knocki==0.3.1"]
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"domain": "lifx",
|
"domain": "lifx",
|
||||||
"name": "LIFX",
|
"name": "LIFX",
|
||||||
"codeowners": [],
|
"codeowners": ["@Djelibeybi"],
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"dependencies": ["network"],
|
"dependencies": ["network"],
|
||||||
"dhcp": [
|
"dhcp": [
|
||||||
@ -48,7 +48,7 @@
|
|||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["aiolifx", "aiolifx_effects", "bitstring"],
|
"loggers": ["aiolifx", "aiolifx_effects", "bitstring"],
|
||||||
"requirements": [
|
"requirements": [
|
||||||
"aiolifx==1.0.2",
|
"aiolifx==1.0.5",
|
||||||
"aiolifx-effects==0.3.2",
|
"aiolifx-effects==0.3.2",
|
||||||
"aiolifx-themes==0.4.15"
|
"aiolifx-themes==0.4.15"
|
||||||
]
|
]
|
||||||
|
@ -8,6 +8,6 @@
|
|||||||
"iot_class": "calculated",
|
"iot_class": "calculated",
|
||||||
"loggers": ["yt_dlp"],
|
"loggers": ["yt_dlp"],
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": ["yt-dlp==2024.07.01"],
|
"requirements": ["yt-dlp==2024.07.16"],
|
||||||
"single_config_entry": true
|
"single_config_entry": true
|
||||||
}
|
}
|
||||||
|
@ -470,3 +470,8 @@ class OpenThermGatewayDevice:
|
|||||||
async_dispatcher_send(self.hass, self.update_signal, status)
|
async_dispatcher_send(self.hass, self.update_signal, status)
|
||||||
|
|
||||||
self.gateway.subscribe(handle_report)
|
self.gateway.subscribe(handle_report)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def connected(self):
|
||||||
|
"""Report whether or not we are connected to the gateway."""
|
||||||
|
return self.gateway.connection.connected
|
||||||
|
@ -48,6 +48,7 @@ class OpenThermBinarySensor(BinarySensorEntity):
|
|||||||
|
|
||||||
_attr_should_poll = False
|
_attr_should_poll = False
|
||||||
_attr_entity_registry_enabled_default = False
|
_attr_entity_registry_enabled_default = False
|
||||||
|
_attr_available = False
|
||||||
|
|
||||||
def __init__(self, gw_dev, var, source, device_class, friendly_name_format):
|
def __init__(self, gw_dev, var, source, device_class, friendly_name_format):
|
||||||
"""Initialize the binary sensor."""
|
"""Initialize the binary sensor."""
|
||||||
@ -85,14 +86,10 @@ class OpenThermBinarySensor(BinarySensorEntity):
|
|||||||
_LOGGER.debug("Removing OpenTherm Gateway binary sensor %s", self._attr_name)
|
_LOGGER.debug("Removing OpenTherm Gateway binary sensor %s", self._attr_name)
|
||||||
self._unsub_updates()
|
self._unsub_updates()
|
||||||
|
|
||||||
@property
|
|
||||||
def available(self):
|
|
||||||
"""Return availability of the sensor."""
|
|
||||||
return self._attr_is_on is not None
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def receive_report(self, status):
|
def receive_report(self, status):
|
||||||
"""Handle status updates from the component."""
|
"""Handle status updates from the component."""
|
||||||
|
self._attr_available = self._gateway.connected
|
||||||
state = status[self._source].get(self._var)
|
state = status[self._source].get(self._var)
|
||||||
self._attr_is_on = None if state is None else bool(state)
|
self._attr_is_on = None if state is None else bool(state)
|
||||||
self.async_write_ha_state()
|
self.async_write_ha_state()
|
||||||
|
@ -138,7 +138,7 @@ class OpenThermClimate(ClimateEntity):
|
|||||||
@callback
|
@callback
|
||||||
def receive_report(self, status):
|
def receive_report(self, status):
|
||||||
"""Receive and handle a new report from the Gateway."""
|
"""Receive and handle a new report from the Gateway."""
|
||||||
self._attr_available = status != gw_vars.DEFAULT_STATUS
|
self._attr_available = self._gateway.connected
|
||||||
ch_active = status[gw_vars.BOILER].get(gw_vars.DATA_SLAVE_CH_ACTIVE)
|
ch_active = status[gw_vars.BOILER].get(gw_vars.DATA_SLAVE_CH_ACTIVE)
|
||||||
flame_on = status[gw_vars.BOILER].get(gw_vars.DATA_SLAVE_FLAME_ON)
|
flame_on = status[gw_vars.BOILER].get(gw_vars.DATA_SLAVE_FLAME_ON)
|
||||||
cooling_active = status[gw_vars.BOILER].get(gw_vars.DATA_SLAVE_COOLING_ACTIVE)
|
cooling_active = status[gw_vars.BOILER].get(gw_vars.DATA_SLAVE_COOLING_ACTIVE)
|
||||||
|
@ -45,6 +45,7 @@ class OpenThermSensor(SensorEntity):
|
|||||||
|
|
||||||
_attr_should_poll = False
|
_attr_should_poll = False
|
||||||
_attr_entity_registry_enabled_default = False
|
_attr_entity_registry_enabled_default = False
|
||||||
|
_attr_available = False
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@ -94,14 +95,10 @@ class OpenThermSensor(SensorEntity):
|
|||||||
_LOGGER.debug("Removing OpenTherm Gateway sensor %s", self._attr_name)
|
_LOGGER.debug("Removing OpenTherm Gateway sensor %s", self._attr_name)
|
||||||
self._unsub_updates()
|
self._unsub_updates()
|
||||||
|
|
||||||
@property
|
|
||||||
def available(self):
|
|
||||||
"""Return availability of the sensor."""
|
|
||||||
return self._attr_native_value is not None
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def receive_report(self, status):
|
def receive_report(self, status):
|
||||||
"""Handle status updates from the component."""
|
"""Handle status updates from the component."""
|
||||||
|
self._attr_available = self._gateway.connected
|
||||||
value = status[self._source].get(self._var)
|
value = status[self._source].get(self._var)
|
||||||
self._attr_native_value = value
|
self._attr_native_value = value
|
||||||
self.async_write_ha_state()
|
self.async_write_ha_state()
|
||||||
|
@ -7,5 +7,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/opower",
|
"documentation": "https://www.home-assistant.io/integrations/opower",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["opower"],
|
"loggers": ["opower"],
|
||||||
"requirements": ["opower==0.4.7"]
|
"requirements": ["opower==0.5.2"]
|
||||||
}
|
}
|
||||||
|
@ -30,7 +30,7 @@ class PyLoadData:
|
|||||||
speed: float
|
speed: float
|
||||||
download: bool
|
download: bool
|
||||||
reconnect: bool
|
reconnect: bool
|
||||||
captcha: bool
|
captcha: bool | None = None
|
||||||
free_space: int
|
free_space: int
|
||||||
|
|
||||||
|
|
||||||
|
@ -8,5 +8,5 @@
|
|||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["pyloadapi"],
|
"loggers": ["pyloadapi"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["PyLoadAPI==1.2.0"]
|
"requirements": ["PyLoadAPI==1.3.2"]
|
||||||
}
|
}
|
||||||
|
@ -167,7 +167,7 @@ class RAVEnDataCoordinator(DataUpdateCoordinator):
|
|||||||
await device.synchronize()
|
await device.synchronize()
|
||||||
self._device_info = await device.get_device_info()
|
self._device_info = await device.get_device_info()
|
||||||
except:
|
except:
|
||||||
await device.close()
|
await device.abort()
|
||||||
raise
|
raise
|
||||||
|
|
||||||
self._raven_device = device
|
self._raven_device = device
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
"dependencies": ["usb"],
|
"dependencies": ["usb"],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/rainforest_raven",
|
"documentation": "https://www.home-assistant.io/integrations/rainforest_raven",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"requirements": ["aioraven==0.6.0"],
|
"requirements": ["aioraven==0.7.0"],
|
||||||
"usb": [
|
"usb": [
|
||||||
{
|
{
|
||||||
"vid": "0403",
|
"vid": "0403",
|
||||||
|
@ -1178,7 +1178,15 @@ class Recorder(threading.Thread):
|
|||||||
|
|
||||||
def _handle_database_error(self, err: Exception) -> bool:
|
def _handle_database_error(self, err: Exception) -> bool:
|
||||||
"""Handle a database error that may result in moving away the corrupt db."""
|
"""Handle a database error that may result in moving away the corrupt db."""
|
||||||
if isinstance(err.__cause__, sqlite3.DatabaseError):
|
if (
|
||||||
|
(cause := err.__cause__)
|
||||||
|
and isinstance(cause, sqlite3.DatabaseError)
|
||||||
|
and (cause_str := str(cause))
|
||||||
|
# Make sure we do not move away a database when its only locked
|
||||||
|
# externally by another process. sqlite does not give us a named
|
||||||
|
# exception for this so we have to check the error message.
|
||||||
|
and ("malformed" in cause_str or "not a database" in cause_str)
|
||||||
|
):
|
||||||
_LOGGER.exception(
|
_LOGGER.exception(
|
||||||
"Unrecoverable sqlite3 database corruption detected: %s", err
|
"Unrecoverable sqlite3 database corruption detected: %s", err
|
||||||
)
|
)
|
||||||
|
@ -313,11 +313,9 @@ def _create_index(
|
|||||||
index = index_list[0]
|
index = index_list[0]
|
||||||
_LOGGER.debug("Creating %s index", index_name)
|
_LOGGER.debug("Creating %s index", index_name)
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
(
|
"Adding index `%s` to table `%s`. Note: this can take several "
|
||||||
"Adding index `%s` to table `%s`. Note: this can take several "
|
"minutes on large databases and slow computers. Please "
|
||||||
"minutes on large databases and slow computers. Please "
|
"be patient!",
|
||||||
"be patient!"
|
|
||||||
),
|
|
||||||
index_name,
|
index_name,
|
||||||
table_name,
|
table_name,
|
||||||
)
|
)
|
||||||
@ -331,7 +329,7 @@ def _create_index(
|
|||||||
"Index %s already exists on %s, continuing", index_name, table_name
|
"Index %s already exists on %s, continuing", index_name, table_name
|
||||||
)
|
)
|
||||||
|
|
||||||
_LOGGER.debug("Finished creating %s", index_name)
|
_LOGGER.warning("Finished adding index `%s` to table `%s`", index_name, table_name)
|
||||||
|
|
||||||
|
|
||||||
def _execute_or_collect_error(
|
def _execute_or_collect_error(
|
||||||
@ -364,11 +362,9 @@ def _drop_index(
|
|||||||
DO NOT USE THIS FUNCTION IN ANY OPERATION THAT TAKES USER INPUT.
|
DO NOT USE THIS FUNCTION IN ANY OPERATION THAT TAKES USER INPUT.
|
||||||
"""
|
"""
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
(
|
"Dropping index `%s` from table `%s`. Note: this can take several "
|
||||||
"Dropping index `%s` from table `%s`. Note: this can take several "
|
"minutes on large databases and slow computers. Please "
|
||||||
"minutes on large databases and slow computers. Please "
|
"be patient!",
|
||||||
"be patient!"
|
|
||||||
),
|
|
||||||
index_name,
|
index_name,
|
||||||
table_name,
|
table_name,
|
||||||
)
|
)
|
||||||
@ -377,8 +373,8 @@ def _drop_index(
|
|||||||
index_to_drop = get_index_by_name(session, table_name, index_name)
|
index_to_drop = get_index_by_name(session, table_name, index_name)
|
||||||
|
|
||||||
if index_to_drop is None:
|
if index_to_drop is None:
|
||||||
_LOGGER.debug(
|
_LOGGER.warning(
|
||||||
"The index %s on table %s no longer exists", index_name, table_name
|
"The index `%s` on table `%s` no longer exists", index_name, table_name
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -395,18 +391,16 @@ def _drop_index(
|
|||||||
f"DROP INDEX {index_to_drop}",
|
f"DROP INDEX {index_to_drop}",
|
||||||
):
|
):
|
||||||
if _execute_or_collect_error(session_maker, query, errors):
|
if _execute_or_collect_error(session_maker, query, errors):
|
||||||
_LOGGER.debug(
|
_LOGGER.warning(
|
||||||
"Finished dropping index %s from table %s", index_name, table_name
|
"Finished dropping index `%s` from table `%s`", index_name, table_name
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
if not quiet:
|
if not quiet:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
(
|
"Failed to drop index `%s` from table `%s`. Schema "
|
||||||
"Failed to drop index `%s` from table `%s`. Schema "
|
"Migration will continue; this is not a "
|
||||||
"Migration will continue; this is not a "
|
"critical operation: %s",
|
||||||
"critical operation: %s"
|
|
||||||
),
|
|
||||||
index_name,
|
index_name,
|
||||||
table_name,
|
table_name,
|
||||||
errors,
|
errors,
|
||||||
|
@ -5,6 +5,7 @@ from __future__ import annotations
|
|||||||
import datetime as dt
|
import datetime as dt
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
from reolink_aio.api import DUAL_LENS_MODELS
|
||||||
from reolink_aio.enums import VodRequestType
|
from reolink_aio.enums import VodRequestType
|
||||||
|
|
||||||
from homeassistant.components.camera import DOMAIN as CAM_DOMAIN, DynamicStreamSettings
|
from homeassistant.components.camera import DOMAIN as CAM_DOMAIN, DynamicStreamSettings
|
||||||
@ -184,6 +185,9 @@ class ReolinkVODMediaSource(MediaSource):
|
|||||||
if device.name_by_user is not None:
|
if device.name_by_user is not None:
|
||||||
device_name = device.name_by_user
|
device_name = device.name_by_user
|
||||||
|
|
||||||
|
if host.api.model in DUAL_LENS_MODELS:
|
||||||
|
device_name = f"{device_name} lens {ch}"
|
||||||
|
|
||||||
children.append(
|
children.append(
|
||||||
BrowseMediaSource(
|
BrowseMediaSource(
|
||||||
domain=DOMAIN,
|
domain=DOMAIN,
|
||||||
|
@ -214,7 +214,8 @@
|
|||||||
"unknown": "Unknown",
|
"unknown": "Unknown",
|
||||||
"locked": "Locked",
|
"locked": "Locked",
|
||||||
"air_drying_stopping": "Air drying stopping",
|
"air_drying_stopping": "Air drying stopping",
|
||||||
"egg_attack": "Cupid mode"
|
"egg_attack": "Cupid mode",
|
||||||
|
"mapping": "Mapping"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"total_cleaning_time": {
|
"total_cleaning_time": {
|
||||||
@ -282,7 +283,8 @@
|
|||||||
"deep": "Deep",
|
"deep": "Deep",
|
||||||
"deep_plus": "Deep+",
|
"deep_plus": "Deep+",
|
||||||
"custom": "Custom",
|
"custom": "Custom",
|
||||||
"fast": "Fast"
|
"fast": "Fast",
|
||||||
|
"smart_mode": "SmartPlan"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"mop_intensity": {
|
"mop_intensity": {
|
||||||
@ -293,10 +295,12 @@
|
|||||||
"mild": "Mild",
|
"mild": "Mild",
|
||||||
"medium": "Medium",
|
"medium": "Medium",
|
||||||
"moderate": "Moderate",
|
"moderate": "Moderate",
|
||||||
|
"max": "Max",
|
||||||
"high": "High",
|
"high": "High",
|
||||||
"intense": "Intense",
|
"intense": "Intense",
|
||||||
"custom": "[%key:component::roborock::entity::select::mop_mode::state::custom%]",
|
"custom": "[%key:component::roborock::entity::select::mop_mode::state::custom%]",
|
||||||
"custom_water_flow": "Custom water flow"
|
"custom_water_flow": "Custom water flow",
|
||||||
|
"smart_mode": "[%key:component::roborock::entity::select::mop_mode::state::smart_mode%]"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -338,13 +342,14 @@
|
|||||||
"custom": "[%key:component::roborock::entity::select::mop_mode::state::custom%]",
|
"custom": "[%key:component::roborock::entity::select::mop_mode::state::custom%]",
|
||||||
"gentle": "Gentle",
|
"gentle": "Gentle",
|
||||||
"off": "[%key:common::state::off%]",
|
"off": "[%key:common::state::off%]",
|
||||||
"max": "Max",
|
"max": "[%key:component::roborock::entity::select::mop_intensity::state::max%]",
|
||||||
"max_plus": "Max plus",
|
"max_plus": "Max plus",
|
||||||
"medium": "Medium",
|
"medium": "Medium",
|
||||||
"quiet": "Quiet",
|
"quiet": "Quiet",
|
||||||
"silent": "Silent",
|
"silent": "Silent",
|
||||||
"standard": "[%key:component::roborock::entity::select::mop_mode::state::standard%]",
|
"standard": "[%key:component::roborock::entity::select::mop_mode::state::standard%]",
|
||||||
"turbo": "Turbo"
|
"turbo": "Turbo",
|
||||||
|
"smart_mode": "[%key:component::roborock::entity::select::mop_mode::state::smart_mode%]"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -61,6 +61,7 @@ from .utils import (
|
|||||||
async_create_issue_unsupported_firmware,
|
async_create_issue_unsupported_firmware,
|
||||||
get_block_device_sleep_period,
|
get_block_device_sleep_period,
|
||||||
get_device_entry_gen,
|
get_device_entry_gen,
|
||||||
|
get_host,
|
||||||
get_http_port,
|
get_http_port,
|
||||||
get_rpc_device_wakeup_period,
|
get_rpc_device_wakeup_period,
|
||||||
update_device_fw_info,
|
update_device_fw_info,
|
||||||
@ -147,7 +148,7 @@ class ShellyCoordinatorBase[_DeviceT: BlockDevice | RpcDevice](
|
|||||||
model=MODEL_NAMES.get(self.model, self.model),
|
model=MODEL_NAMES.get(self.model, self.model),
|
||||||
sw_version=self.sw_version,
|
sw_version=self.sw_version,
|
||||||
hw_version=f"gen{get_device_entry_gen(self.entry)} ({self.model})",
|
hw_version=f"gen{get_device_entry_gen(self.entry)} ({self.model})",
|
||||||
configuration_url=f"http://{self.entry.data[CONF_HOST]}:{get_http_port(self.entry.data)}",
|
configuration_url=f"http://{get_host(self.entry.data[CONF_HOST])}:{get_http_port(self.entry.data)}",
|
||||||
)
|
)
|
||||||
self.device_id = device_entry.id
|
self.device_id = device_entry.id
|
||||||
|
|
||||||
@ -667,6 +668,9 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]):
|
|||||||
"""Handle device update."""
|
"""Handle device update."""
|
||||||
LOGGER.debug("Shelly %s handle update, type: %s", self.name, update_type)
|
LOGGER.debug("Shelly %s handle update, type: %s", self.name, update_type)
|
||||||
if update_type is RpcUpdateType.ONLINE:
|
if update_type is RpcUpdateType.ONLINE:
|
||||||
|
if self.device.connected:
|
||||||
|
LOGGER.debug("Device %s already connected", self.name)
|
||||||
|
return
|
||||||
self.entry.async_create_background_task(
|
self.entry.async_create_background_task(
|
||||||
self.hass,
|
self.hass,
|
||||||
self._async_device_connect_task(),
|
self._async_device_connect_task(),
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from ipaddress import IPv4Address
|
from ipaddress import IPv4Address, IPv6Address, ip_address
|
||||||
from types import MappingProxyType
|
from types import MappingProxyType
|
||||||
from typing import Any, cast
|
from typing import Any, cast
|
||||||
|
|
||||||
@ -482,6 +482,20 @@ def get_http_port(data: MappingProxyType[str, Any]) -> int:
|
|||||||
return cast(int, data.get(CONF_PORT, DEFAULT_HTTP_PORT))
|
return cast(int, data.get(CONF_PORT, DEFAULT_HTTP_PORT))
|
||||||
|
|
||||||
|
|
||||||
|
def get_host(host: str) -> str:
|
||||||
|
"""Get the device IP address or hostname."""
|
||||||
|
try:
|
||||||
|
ip_object = ip_address(host)
|
||||||
|
except ValueError:
|
||||||
|
# host contains hostname
|
||||||
|
return host
|
||||||
|
|
||||||
|
if isinstance(ip_object, IPv6Address):
|
||||||
|
return f"[{host}]"
|
||||||
|
|
||||||
|
return host
|
||||||
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_remove_shelly_rpc_entities(
|
def async_remove_shelly_rpc_entities(
|
||||||
hass: HomeAssistant, domain: str, mac: str, keys: list[str]
|
hass: HomeAssistant, domain: str, mac: str, keys: list[str]
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/sunweg/",
|
"documentation": "https://www.home-assistant.io/integrations/sunweg/",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["sunweg"],
|
"loggers": ["sunweg"],
|
||||||
"requirements": ["sunweg==3.0.1"]
|
"requirements": ["sunweg==3.0.2"]
|
||||||
}
|
}
|
||||||
|
@ -71,7 +71,9 @@ class SuplaCoverEntity(SuplaEntity, CoverEntity):
|
|||||||
|
|
||||||
async def async_set_cover_position(self, **kwargs: Any) -> None:
|
async def async_set_cover_position(self, **kwargs: Any) -> None:
|
||||||
"""Move the cover to a specific position."""
|
"""Move the cover to a specific position."""
|
||||||
await self.async_action("REVEAL", percentage=kwargs.get(ATTR_POSITION))
|
await self.async_action(
|
||||||
|
"REVEAL_PARTIALLY", percentage=kwargs.get(ATTR_POSITION)
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_closed(self) -> bool | None:
|
def is_closed(self) -> bool | None:
|
||||||
|
@ -39,5 +39,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/switchbot",
|
"documentation": "https://www.home-assistant.io/integrations/switchbot",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["switchbot"],
|
"loggers": ["switchbot"],
|
||||||
"requirements": ["PySwitchbot==0.48.0"]
|
"requirements": ["PySwitchbot==0.48.1"]
|
||||||
}
|
}
|
||||||
|
@ -8,5 +8,5 @@
|
|||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["pytedee_async"],
|
"loggers": ["pytedee_async"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["pytedee-async==0.2.17"]
|
"requirements": ["pytedee-async==0.2.20"]
|
||||||
}
|
}
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from kasa import (
|
from kasa import (
|
||||||
@ -52,6 +53,8 @@ from .const import (
|
|||||||
DOMAIN,
|
DOMAIN,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
STEP_AUTH_DATA_SCHEMA = vol.Schema(
|
STEP_AUTH_DATA_SCHEMA = vol.Schema(
|
||||||
{vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str}
|
{vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str}
|
||||||
)
|
)
|
||||||
@ -88,15 +91,10 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _update_config_if_entry_in_setup_error(
|
def _get_config_updates(
|
||||||
self, entry: ConfigEntry, host: str, config: dict
|
self, entry: ConfigEntry, host: str, config: dict
|
||||||
) -> ConfigFlowResult | None:
|
) -> dict | None:
|
||||||
"""If discovery encounters a device that is in SETUP_ERROR or SETUP_RETRY update the device config."""
|
"""Return updates if the host or device config has changed."""
|
||||||
if entry.state not in (
|
|
||||||
ConfigEntryState.SETUP_ERROR,
|
|
||||||
ConfigEntryState.SETUP_RETRY,
|
|
||||||
):
|
|
||||||
return None
|
|
||||||
entry_data = entry.data
|
entry_data = entry.data
|
||||||
entry_config_dict = entry_data.get(CONF_DEVICE_CONFIG)
|
entry_config_dict = entry_data.get(CONF_DEVICE_CONFIG)
|
||||||
if entry_config_dict == config and entry_data[CONF_HOST] == host:
|
if entry_config_dict == config and entry_data[CONF_HOST] == host:
|
||||||
@ -110,11 +108,31 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
!= config.get(CONF_CONNECTION_TYPE)
|
!= config.get(CONF_CONNECTION_TYPE)
|
||||||
):
|
):
|
||||||
updates.pop(CONF_CREDENTIALS_HASH, None)
|
updates.pop(CONF_CREDENTIALS_HASH, None)
|
||||||
return self.async_update_reload_and_abort(
|
_LOGGER.debug(
|
||||||
entry,
|
"Connection type changed for %s from %s to: %s",
|
||||||
data=updates,
|
host,
|
||||||
reason="already_configured",
|
entry_config_dict.get(CONF_CONNECTION_TYPE),
|
||||||
)
|
config.get(CONF_CONNECTION_TYPE),
|
||||||
|
)
|
||||||
|
return updates
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _update_config_if_entry_in_setup_error(
|
||||||
|
self, entry: ConfigEntry, host: str, config: dict
|
||||||
|
) -> ConfigFlowResult | None:
|
||||||
|
"""If discovery encounters a device that is in SETUP_ERROR or SETUP_RETRY update the device config."""
|
||||||
|
if entry.state not in (
|
||||||
|
ConfigEntryState.SETUP_ERROR,
|
||||||
|
ConfigEntryState.SETUP_RETRY,
|
||||||
|
):
|
||||||
|
return None
|
||||||
|
if updates := self._get_config_updates(entry, host, config):
|
||||||
|
return self.async_update_reload_and_abort(
|
||||||
|
entry,
|
||||||
|
data=updates,
|
||||||
|
reason="already_configured",
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
async def _async_handle_discovery(
|
async def _async_handle_discovery(
|
||||||
self, host: str, formatted_mac: str, config: dict | None = None
|
self, host: str, formatted_mac: str, config: dict | None = None
|
||||||
@ -454,7 +472,7 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
password = user_input[CONF_PASSWORD]
|
password = user_input[CONF_PASSWORD]
|
||||||
credentials = Credentials(username, password)
|
credentials = Credentials(username, password)
|
||||||
try:
|
try:
|
||||||
await self._async_try_discover_and_update(
|
device = await self._async_try_discover_and_update(
|
||||||
host,
|
host,
|
||||||
credentials=credentials,
|
credentials=credentials,
|
||||||
raise_on_progress=True,
|
raise_on_progress=True,
|
||||||
@ -467,6 +485,11 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
placeholders["error"] = str(ex)
|
placeholders["error"] = str(ex)
|
||||||
else:
|
else:
|
||||||
await set_credentials(self.hass, username, password)
|
await set_credentials(self.hass, username, password)
|
||||||
|
config = device.config.to_dict(exclude_credentials=True)
|
||||||
|
if updates := self._get_config_updates(reauth_entry, host, config):
|
||||||
|
self.hass.config_entries.async_update_entry(
|
||||||
|
reauth_entry, data=updates
|
||||||
|
)
|
||||||
self.hass.async_create_task(
|
self.hass.async_create_task(
|
||||||
self._async_reload_requires_auth_entries(), eager_start=False
|
self._async_reload_requires_auth_entries(), eager_start=False
|
||||||
)
|
)
|
||||||
|
@ -392,11 +392,11 @@ class TPLinkLightEffectEntity(TPLinkLightEntity):
|
|||||||
kwargs[ATTR_EFFECT], brightness=brightness, transition=transition
|
kwargs[ATTR_EFFECT], brightness=brightness, transition=transition
|
||||||
)
|
)
|
||||||
elif ATTR_COLOR_TEMP_KELVIN in kwargs:
|
elif ATTR_COLOR_TEMP_KELVIN in kwargs:
|
||||||
if self.effect:
|
if self.effect and self.effect != EFFECT_OFF:
|
||||||
# If there is an effect in progress
|
# If there is an effect in progress
|
||||||
# we have to clear the effect
|
# we have to clear the effect
|
||||||
# before we can set a color temp
|
# before we can set a color temp
|
||||||
await self._light_module.set_hsv(0, 0, brightness)
|
await self._effect_module.set_effect(LightEffect.LIGHT_EFFECTS_OFF)
|
||||||
await self._async_set_color_temp(
|
await self._async_set_color_temp(
|
||||||
kwargs[ATTR_COLOR_TEMP_KELVIN], brightness, transition
|
kwargs[ATTR_COLOR_TEMP_KELVIN], brightness, transition
|
||||||
)
|
)
|
||||||
|
@ -181,7 +181,7 @@
|
|||||||
"macaddress": "1C61B4*"
|
"macaddress": "1C61B4*"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"hostname": "l5*",
|
"hostname": "l[59]*",
|
||||||
"macaddress": "5CE931*"
|
"macaddress": "5CE931*"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -189,9 +189,13 @@
|
|||||||
"macaddress": "3C52A1*"
|
"macaddress": "3C52A1*"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"hostname": "l5*",
|
"hostname": "l[59]*",
|
||||||
"macaddress": "5C628B*"
|
"macaddress": "5C628B*"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"hostname": "l[59]*",
|
||||||
|
"macaddress": "14EBB6*"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"hostname": "tp*",
|
"hostname": "tp*",
|
||||||
"macaddress": "5C628B*"
|
"macaddress": "5C628B*"
|
||||||
@ -297,5 +301,5 @@
|
|||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["kasa"],
|
"loggers": ["kasa"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["python-kasa[speedups]==0.7.0.3"]
|
"requirements": ["python-kasa[speedups]==0.7.0.5"]
|
||||||
}
|
}
|
||||||
|
@ -8,7 +8,7 @@ from __future__ import annotations
|
|||||||
from collections.abc import Callable, Coroutine
|
from collections.abc import Callable, Coroutine
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
import secrets
|
import secrets
|
||||||
from typing import Any
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
import aiounifi
|
import aiounifi
|
||||||
from aiounifi.interfaces.api_handlers import ItemEvent
|
from aiounifi.interfaces.api_handlers import ItemEvent
|
||||||
@ -44,6 +44,17 @@ from .entity import (
|
|||||||
async_wlan_device_info_fn,
|
async_wlan_device_info_fn,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from .hub import UnifiHub
|
||||||
|
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def async_port_power_cycle_available_fn(hub: UnifiHub, obj_id: str) -> bool:
|
||||||
|
"""Check if port allows power cycle action."""
|
||||||
|
if not async_device_available_fn(hub, obj_id):
|
||||||
|
return False
|
||||||
|
return bool(hub.api.ports[obj_id].poe_enable)
|
||||||
|
|
||||||
|
|
||||||
async def async_restart_device_control_fn(
|
async def async_restart_device_control_fn(
|
||||||
api: aiounifi.Controller, obj_id: str
|
api: aiounifi.Controller, obj_id: str
|
||||||
@ -96,7 +107,7 @@ ENTITY_DESCRIPTIONS: tuple[UnifiButtonEntityDescription, ...] = (
|
|||||||
entity_category=EntityCategory.CONFIG,
|
entity_category=EntityCategory.CONFIG,
|
||||||
device_class=ButtonDeviceClass.RESTART,
|
device_class=ButtonDeviceClass.RESTART,
|
||||||
api_handler_fn=lambda api: api.ports,
|
api_handler_fn=lambda api: api.ports,
|
||||||
available_fn=async_device_available_fn,
|
available_fn=async_port_power_cycle_available_fn,
|
||||||
control_fn=async_power_cycle_port_control_fn,
|
control_fn=async_power_cycle_port_control_fn,
|
||||||
device_info_fn=async_device_device_info_fn,
|
device_info_fn=async_device_device_info_fn,
|
||||||
name_fn=lambda port: f"{port.name} Power Cycle",
|
name_fn=lambda port: f"{port.name} Power Cycle",
|
||||||
|
@ -164,13 +164,12 @@ class UnifiFlowHandler(ConfigFlow, domain=UNIFI_DOMAIN):
|
|||||||
config_entry = self.reauth_config_entry
|
config_entry = self.reauth_config_entry
|
||||||
abort_reason = "reauth_successful"
|
abort_reason = "reauth_successful"
|
||||||
|
|
||||||
if (
|
if config_entry:
|
||||||
config_entry is not None
|
if (
|
||||||
and config_entry.state is not ConfigEntryState.NOT_LOADED
|
config_entry.state is ConfigEntryState.LOADED
|
||||||
):
|
and (hub := config_entry.runtime_data)
|
||||||
hub = config_entry.runtime_data
|
and hub.available
|
||||||
|
):
|
||||||
if hub and hub.available:
|
|
||||||
return self.async_abort(reason="already_configured")
|
return self.async_abort(reason="already_configured")
|
||||||
|
|
||||||
return self.async_update_reload_and_abort(
|
return self.async_update_reload_and_abort(
|
||||||
|
@ -189,7 +189,6 @@ class BaseProtectEntity(Entity):
|
|||||||
self._async_get_ufp_enabled = description.get_ufp_enabled
|
self._async_get_ufp_enabled = description.get_ufp_enabled
|
||||||
|
|
||||||
self._async_set_device_info()
|
self._async_set_device_info()
|
||||||
self._async_update_device_from_protect(device)
|
|
||||||
self._state_getters = tuple(
|
self._state_getters = tuple(
|
||||||
partial(attrgetter(attr), self) for attr in self._state_attrs
|
partial(attrgetter(attr), self) for attr in self._state_attrs
|
||||||
)
|
)
|
||||||
@ -264,6 +263,7 @@ class BaseProtectEntity(Entity):
|
|||||||
self.async_on_remove(
|
self.async_on_remove(
|
||||||
self.data.async_subscribe(self.device.mac, self._async_updated_event)
|
self.data.async_subscribe(self.device.mac, self._async_updated_event)
|
||||||
)
|
)
|
||||||
|
self._async_update_device_from_protect(self.device)
|
||||||
|
|
||||||
|
|
||||||
class ProtectDeviceEntity(BaseProtectEntity):
|
class ProtectDeviceEntity(BaseProtectEntity):
|
||||||
|
@ -26,7 +26,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
|||||||
file = config_entry.data[CONF_FILE_PATH]
|
file = config_entry.data[CONF_FILE_PATH]
|
||||||
|
|
||||||
upb = upb_lib.UpbPim({"url": url, "UPStartExportFile": file})
|
upb = upb_lib.UpbPim({"url": url, "UPStartExportFile": file})
|
||||||
upb.connect()
|
await upb.async_connect()
|
||||||
hass.data.setdefault(DOMAIN, {})
|
hass.data.setdefault(DOMAIN, {})
|
||||||
hass.data[DOMAIN][config_entry.entry_id] = {"upb": upb}
|
hass.data[DOMAIN][config_entry.entry_id] = {"upb": upb}
|
||||||
|
|
||||||
|
@ -40,7 +40,7 @@ async def _validate_input(data):
|
|||||||
|
|
||||||
upb = upb_lib.UpbPim({"url": url, "UPStartExportFile": file_path})
|
upb = upb_lib.UpbPim({"url": url, "UPStartExportFile": file_path})
|
||||||
|
|
||||||
upb.connect(_connected_callback)
|
await upb.async_connect(_connected_callback)
|
||||||
|
|
||||||
if not upb.config_ok:
|
if not upb.config_ok:
|
||||||
_LOGGER.error("Missing or invalid UPB file: %s", file_path)
|
_LOGGER.error("Missing or invalid UPB file: %s", file_path)
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/upb",
|
"documentation": "https://www.home-assistant.io/integrations/upb",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["upb_lib"],
|
"loggers": ["upb_lib"],
|
||||||
"requirements": ["upb-lib==0.5.7"]
|
"requirements": ["upb-lib==0.5.8"]
|
||||||
}
|
}
|
||||||
|
@ -7,5 +7,5 @@
|
|||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["holidays"],
|
"loggers": ["holidays"],
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": ["holidays==0.52"]
|
"requirements": ["holidays==0.53"]
|
||||||
}
|
}
|
||||||
|
@ -24,7 +24,7 @@ if TYPE_CHECKING:
|
|||||||
APPLICATION_NAME: Final = "HomeAssistant"
|
APPLICATION_NAME: Final = "HomeAssistant"
|
||||||
MAJOR_VERSION: Final = 2024
|
MAJOR_VERSION: Final = 2024
|
||||||
MINOR_VERSION: Final = 7
|
MINOR_VERSION: Final = 7
|
||||||
PATCH_VERSION: Final = "2"
|
PATCH_VERSION: Final = "3"
|
||||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0)
|
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0)
|
||||||
|
@ -827,7 +827,7 @@ DHCP: Final[list[dict[str, str | bool]]] = [
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"domain": "tplink",
|
"domain": "tplink",
|
||||||
"hostname": "l5*",
|
"hostname": "l[59]*",
|
||||||
"macaddress": "5CE931*",
|
"macaddress": "5CE931*",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -837,9 +837,14 @@ DHCP: Final[list[dict[str, str | bool]]] = [
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"domain": "tplink",
|
"domain": "tplink",
|
||||||
"hostname": "l5*",
|
"hostname": "l[59]*",
|
||||||
"macaddress": "5C628B*",
|
"macaddress": "5C628B*",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"domain": "tplink",
|
||||||
|
"hostname": "l[59]*",
|
||||||
|
"macaddress": "14EBB6*",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"domain": "tplink",
|
"domain": "tplink",
|
||||||
"hostname": "tp*",
|
"hostname": "tp*",
|
||||||
|
@ -136,6 +136,9 @@ backoff>=2.0
|
|||||||
# v2 has breaking changes (#99218).
|
# v2 has breaking changes (#99218).
|
||||||
pydantic==1.10.17
|
pydantic==1.10.17
|
||||||
|
|
||||||
|
# Required for Python 3.12.4 compatibility (#119223).
|
||||||
|
mashumaro>=3.13.1
|
||||||
|
|
||||||
# Breaks asyncio
|
# Breaks asyncio
|
||||||
# https://github.com/pubnub/python/issues/130
|
# https://github.com/pubnub/python/issues/130
|
||||||
pubnub!=6.4.0
|
pubnub!=6.4.0
|
||||||
|
@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "homeassistant"
|
name = "homeassistant"
|
||||||
version = "2024.7.2"
|
version = "2024.7.3"
|
||||||
license = {text = "Apache-2.0"}
|
license = {text = "Apache-2.0"}
|
||||||
description = "Open-source home automation platform running on Python 3."
|
description = "Open-source home automation platform running on Python 3."
|
||||||
readme = "README.rst"
|
readme = "README.rst"
|
||||||
|
@ -60,7 +60,7 @@ PyFlume==0.6.5
|
|||||||
PyFronius==0.7.3
|
PyFronius==0.7.3
|
||||||
|
|
||||||
# homeassistant.components.pyload
|
# homeassistant.components.pyload
|
||||||
PyLoadAPI==1.2.0
|
PyLoadAPI==1.3.2
|
||||||
|
|
||||||
# homeassistant.components.mvglive
|
# homeassistant.components.mvglive
|
||||||
PyMVGLive==1.1.4
|
PyMVGLive==1.1.4
|
||||||
@ -90,7 +90,7 @@ PyQRCode==1.2.1
|
|||||||
PyRMVtransport==0.3.3
|
PyRMVtransport==0.3.3
|
||||||
|
|
||||||
# homeassistant.components.switchbot
|
# homeassistant.components.switchbot
|
||||||
PySwitchbot==0.48.0
|
PySwitchbot==0.48.1
|
||||||
|
|
||||||
# homeassistant.components.switchmate
|
# homeassistant.components.switchmate
|
||||||
PySwitchmate==0.5.1
|
PySwitchmate==0.5.1
|
||||||
@ -282,7 +282,7 @@ aiolifx-effects==0.3.2
|
|||||||
aiolifx-themes==0.4.15
|
aiolifx-themes==0.4.15
|
||||||
|
|
||||||
# homeassistant.components.lifx
|
# homeassistant.components.lifx
|
||||||
aiolifx==1.0.2
|
aiolifx==1.0.5
|
||||||
|
|
||||||
# homeassistant.components.livisi
|
# homeassistant.components.livisi
|
||||||
aiolivisi==0.0.19
|
aiolivisi==0.0.19
|
||||||
@ -344,7 +344,7 @@ aiopyarr==23.4.0
|
|||||||
aioqsw==0.3.5
|
aioqsw==0.3.5
|
||||||
|
|
||||||
# homeassistant.components.rainforest_raven
|
# homeassistant.components.rainforest_raven
|
||||||
aioraven==0.6.0
|
aioraven==0.7.0
|
||||||
|
|
||||||
# homeassistant.components.recollect_waste
|
# homeassistant.components.recollect_waste
|
||||||
aiorecollect==2023.09.0
|
aiorecollect==2023.09.0
|
||||||
@ -1087,7 +1087,7 @@ hole==0.8.0
|
|||||||
|
|
||||||
# homeassistant.components.holiday
|
# homeassistant.components.holiday
|
||||||
# homeassistant.components.workday
|
# homeassistant.components.workday
|
||||||
holidays==0.52
|
holidays==0.53
|
||||||
|
|
||||||
# homeassistant.components.frontend
|
# homeassistant.components.frontend
|
||||||
home-assistant-frontend==20240710.0
|
home-assistant-frontend==20240710.0
|
||||||
@ -1137,7 +1137,7 @@ ical==8.1.1
|
|||||||
icmplib==3.0
|
icmplib==3.0
|
||||||
|
|
||||||
# homeassistant.components.idasen_desk
|
# homeassistant.components.idasen_desk
|
||||||
idasen-ha==2.6.1
|
idasen-ha==2.6.2
|
||||||
|
|
||||||
# homeassistant.components.network
|
# homeassistant.components.network
|
||||||
ifaddr==0.2.0
|
ifaddr==0.2.0
|
||||||
@ -1203,7 +1203,7 @@ kegtron-ble==0.4.0
|
|||||||
kiwiki-client==0.1.1
|
kiwiki-client==0.1.1
|
||||||
|
|
||||||
# homeassistant.components.knocki
|
# homeassistant.components.knocki
|
||||||
knocki==0.2.0
|
knocki==0.3.1
|
||||||
|
|
||||||
# homeassistant.components.knx
|
# homeassistant.components.knx
|
||||||
knx-frontend==2024.1.20.105944
|
knx-frontend==2024.1.20.105944
|
||||||
@ -1492,7 +1492,7 @@ openhomedevice==2.2.0
|
|||||||
opensensemap-api==0.2.0
|
opensensemap-api==0.2.0
|
||||||
|
|
||||||
# homeassistant.components.enigma2
|
# homeassistant.components.enigma2
|
||||||
openwebifpy==4.2.4
|
openwebifpy==4.2.5
|
||||||
|
|
||||||
# homeassistant.components.luci
|
# homeassistant.components.luci
|
||||||
openwrt-luci-rpc==1.1.17
|
openwrt-luci-rpc==1.1.17
|
||||||
@ -1501,7 +1501,7 @@ openwrt-luci-rpc==1.1.17
|
|||||||
openwrt-ubus-rpc==0.0.2
|
openwrt-ubus-rpc==0.0.2
|
||||||
|
|
||||||
# homeassistant.components.opower
|
# homeassistant.components.opower
|
||||||
opower==0.4.7
|
opower==0.5.2
|
||||||
|
|
||||||
# homeassistant.components.oralb
|
# homeassistant.components.oralb
|
||||||
oralb-ble==0.17.6
|
oralb-ble==0.17.6
|
||||||
@ -2209,7 +2209,7 @@ pyswitchbee==1.8.0
|
|||||||
pytautulli==23.1.1
|
pytautulli==23.1.1
|
||||||
|
|
||||||
# homeassistant.components.tedee
|
# homeassistant.components.tedee
|
||||||
pytedee-async==0.2.17
|
pytedee-async==0.2.20
|
||||||
|
|
||||||
# homeassistant.components.tfiac
|
# homeassistant.components.tfiac
|
||||||
pytfiac==0.4
|
pytfiac==0.4
|
||||||
@ -2275,7 +2275,7 @@ python-join-api==0.0.9
|
|||||||
python-juicenet==1.1.0
|
python-juicenet==1.1.0
|
||||||
|
|
||||||
# homeassistant.components.tplink
|
# homeassistant.components.tplink
|
||||||
python-kasa[speedups]==0.7.0.3
|
python-kasa[speedups]==0.7.0.5
|
||||||
|
|
||||||
# homeassistant.components.lirc
|
# homeassistant.components.lirc
|
||||||
# python-lirc==1.2.3
|
# python-lirc==1.2.3
|
||||||
@ -2662,7 +2662,7 @@ stringcase==1.2.0
|
|||||||
subarulink==0.7.11
|
subarulink==0.7.11
|
||||||
|
|
||||||
# homeassistant.components.sunweg
|
# homeassistant.components.sunweg
|
||||||
sunweg==3.0.1
|
sunweg==3.0.2
|
||||||
|
|
||||||
# homeassistant.components.surepetcare
|
# homeassistant.components.surepetcare
|
||||||
surepy==0.9.0
|
surepy==0.9.0
|
||||||
@ -2807,7 +2807,7 @@ unifiled==0.11
|
|||||||
universal-silabs-flasher==0.0.20
|
universal-silabs-flasher==0.0.20
|
||||||
|
|
||||||
# homeassistant.components.upb
|
# homeassistant.components.upb
|
||||||
upb-lib==0.5.7
|
upb-lib==0.5.8
|
||||||
|
|
||||||
# homeassistant.components.upcloud
|
# homeassistant.components.upcloud
|
||||||
upcloud-api==2.5.1
|
upcloud-api==2.5.1
|
||||||
@ -2951,7 +2951,7 @@ youless-api==2.1.2
|
|||||||
youtubeaio==1.1.5
|
youtubeaio==1.1.5
|
||||||
|
|
||||||
# homeassistant.components.media_extractor
|
# homeassistant.components.media_extractor
|
||||||
yt-dlp==2024.07.01
|
yt-dlp==2024.07.16
|
||||||
|
|
||||||
# homeassistant.components.zamg
|
# homeassistant.components.zamg
|
||||||
zamg==0.3.6
|
zamg==0.3.6
|
||||||
|
@ -51,7 +51,7 @@ PyFlume==0.6.5
|
|||||||
PyFronius==0.7.3
|
PyFronius==0.7.3
|
||||||
|
|
||||||
# homeassistant.components.pyload
|
# homeassistant.components.pyload
|
||||||
PyLoadAPI==1.2.0
|
PyLoadAPI==1.3.2
|
||||||
|
|
||||||
# homeassistant.components.met_eireann
|
# homeassistant.components.met_eireann
|
||||||
PyMetEireann==2021.8.0
|
PyMetEireann==2021.8.0
|
||||||
@ -78,7 +78,7 @@ PyQRCode==1.2.1
|
|||||||
PyRMVtransport==0.3.3
|
PyRMVtransport==0.3.3
|
||||||
|
|
||||||
# homeassistant.components.switchbot
|
# homeassistant.components.switchbot
|
||||||
PySwitchbot==0.48.0
|
PySwitchbot==0.48.1
|
||||||
|
|
||||||
# homeassistant.components.syncthru
|
# homeassistant.components.syncthru
|
||||||
PySyncThru==0.7.10
|
PySyncThru==0.7.10
|
||||||
@ -255,7 +255,7 @@ aiolifx-effects==0.3.2
|
|||||||
aiolifx-themes==0.4.15
|
aiolifx-themes==0.4.15
|
||||||
|
|
||||||
# homeassistant.components.lifx
|
# homeassistant.components.lifx
|
||||||
aiolifx==1.0.2
|
aiolifx==1.0.5
|
||||||
|
|
||||||
# homeassistant.components.livisi
|
# homeassistant.components.livisi
|
||||||
aiolivisi==0.0.19
|
aiolivisi==0.0.19
|
||||||
@ -317,7 +317,7 @@ aiopyarr==23.4.0
|
|||||||
aioqsw==0.3.5
|
aioqsw==0.3.5
|
||||||
|
|
||||||
# homeassistant.components.rainforest_raven
|
# homeassistant.components.rainforest_raven
|
||||||
aioraven==0.6.0
|
aioraven==0.7.0
|
||||||
|
|
||||||
# homeassistant.components.recollect_waste
|
# homeassistant.components.recollect_waste
|
||||||
aiorecollect==2023.09.0
|
aiorecollect==2023.09.0
|
||||||
@ -892,7 +892,7 @@ hole==0.8.0
|
|||||||
|
|
||||||
# homeassistant.components.holiday
|
# homeassistant.components.holiday
|
||||||
# homeassistant.components.workday
|
# homeassistant.components.workday
|
||||||
holidays==0.52
|
holidays==0.53
|
||||||
|
|
||||||
# homeassistant.components.frontend
|
# homeassistant.components.frontend
|
||||||
home-assistant-frontend==20240710.0
|
home-assistant-frontend==20240710.0
|
||||||
@ -933,7 +933,7 @@ ical==8.1.1
|
|||||||
icmplib==3.0
|
icmplib==3.0
|
||||||
|
|
||||||
# homeassistant.components.idasen_desk
|
# homeassistant.components.idasen_desk
|
||||||
idasen-ha==2.6.1
|
idasen-ha==2.6.2
|
||||||
|
|
||||||
# homeassistant.components.network
|
# homeassistant.components.network
|
||||||
ifaddr==0.2.0
|
ifaddr==0.2.0
|
||||||
@ -981,7 +981,7 @@ justnimbus==0.7.4
|
|||||||
kegtron-ble==0.4.0
|
kegtron-ble==0.4.0
|
||||||
|
|
||||||
# homeassistant.components.knocki
|
# homeassistant.components.knocki
|
||||||
knocki==0.2.0
|
knocki==0.3.1
|
||||||
|
|
||||||
# homeassistant.components.knx
|
# homeassistant.components.knx
|
||||||
knx-frontend==2024.1.20.105944
|
knx-frontend==2024.1.20.105944
|
||||||
@ -1207,10 +1207,10 @@ openerz-api==0.3.0
|
|||||||
openhomedevice==2.2.0
|
openhomedevice==2.2.0
|
||||||
|
|
||||||
# homeassistant.components.enigma2
|
# homeassistant.components.enigma2
|
||||||
openwebifpy==4.2.4
|
openwebifpy==4.2.5
|
||||||
|
|
||||||
# homeassistant.components.opower
|
# homeassistant.components.opower
|
||||||
opower==0.4.7
|
opower==0.5.2
|
||||||
|
|
||||||
# homeassistant.components.oralb
|
# homeassistant.components.oralb
|
||||||
oralb-ble==0.17.6
|
oralb-ble==0.17.6
|
||||||
@ -1742,7 +1742,7 @@ pyswitchbee==1.8.0
|
|||||||
pytautulli==23.1.1
|
pytautulli==23.1.1
|
||||||
|
|
||||||
# homeassistant.components.tedee
|
# homeassistant.components.tedee
|
||||||
pytedee-async==0.2.17
|
pytedee-async==0.2.20
|
||||||
|
|
||||||
# homeassistant.components.motionmount
|
# homeassistant.components.motionmount
|
||||||
python-MotionMount==2.0.0
|
python-MotionMount==2.0.0
|
||||||
@ -1775,7 +1775,7 @@ python-izone==1.2.9
|
|||||||
python-juicenet==1.1.0
|
python-juicenet==1.1.0
|
||||||
|
|
||||||
# homeassistant.components.tplink
|
# homeassistant.components.tplink
|
||||||
python-kasa[speedups]==0.7.0.3
|
python-kasa[speedups]==0.7.0.5
|
||||||
|
|
||||||
# homeassistant.components.matter
|
# homeassistant.components.matter
|
||||||
python-matter-server==6.2.2
|
python-matter-server==6.2.2
|
||||||
@ -2081,7 +2081,7 @@ stringcase==1.2.0
|
|||||||
subarulink==0.7.11
|
subarulink==0.7.11
|
||||||
|
|
||||||
# homeassistant.components.sunweg
|
# homeassistant.components.sunweg
|
||||||
sunweg==3.0.1
|
sunweg==3.0.2
|
||||||
|
|
||||||
# homeassistant.components.surepetcare
|
# homeassistant.components.surepetcare
|
||||||
surepy==0.9.0
|
surepy==0.9.0
|
||||||
@ -2181,7 +2181,7 @@ unifi-discovery==1.2.0
|
|||||||
universal-silabs-flasher==0.0.20
|
universal-silabs-flasher==0.0.20
|
||||||
|
|
||||||
# homeassistant.components.upb
|
# homeassistant.components.upb
|
||||||
upb-lib==0.5.7
|
upb-lib==0.5.8
|
||||||
|
|
||||||
# homeassistant.components.upcloud
|
# homeassistant.components.upcloud
|
||||||
upcloud-api==2.5.1
|
upcloud-api==2.5.1
|
||||||
@ -2307,7 +2307,7 @@ youless-api==2.1.2
|
|||||||
youtubeaio==1.1.5
|
youtubeaio==1.1.5
|
||||||
|
|
||||||
# homeassistant.components.media_extractor
|
# homeassistant.components.media_extractor
|
||||||
yt-dlp==2024.07.01
|
yt-dlp==2024.07.16
|
||||||
|
|
||||||
# homeassistant.components.zamg
|
# homeassistant.components.zamg
|
||||||
zamg==0.3.6
|
zamg==0.3.6
|
||||||
|
@ -157,6 +157,9 @@ backoff>=2.0
|
|||||||
# v2 has breaking changes (#99218).
|
# v2 has breaking changes (#99218).
|
||||||
pydantic==1.10.17
|
pydantic==1.10.17
|
||||||
|
|
||||||
|
# Required for Python 3.12.4 compatibility (#119223).
|
||||||
|
mashumaro>=3.13.1
|
||||||
|
|
||||||
# Breaks asyncio
|
# Breaks asyncio
|
||||||
# https://github.com/pubnub/python/issues/130
|
# https://github.com/pubnub/python/issues/130
|
||||||
pubnub!=6.4.0
|
pubnub!=6.4.0
|
||||||
|
@ -1979,7 +1979,7 @@ async def test_cover_position(
|
|||||||
"friendly_name": "Test cover range",
|
"friendly_name": "Test cover range",
|
||||||
"device_class": "blind",
|
"device_class": "blind",
|
||||||
"supported_features": supported_features,
|
"supported_features": supported_features,
|
||||||
"position": position,
|
"current_position": position,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
appliance = await discovery_test(device, hass)
|
appliance = await discovery_test(device, hass)
|
||||||
@ -2296,7 +2296,7 @@ async def test_cover_position_range(
|
|||||||
"friendly_name": "Test cover range",
|
"friendly_name": "Test cover range",
|
||||||
"device_class": "blind",
|
"device_class": "blind",
|
||||||
"supported_features": 7,
|
"supported_features": 7,
|
||||||
"position": 30,
|
"current_position": 30,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
appliance = await discovery_test(device, hass)
|
appliance = await discovery_test(device, hass)
|
||||||
@ -4658,7 +4658,7 @@ async def test_cover_semantics_position_and_tilt(hass: HomeAssistant) -> None:
|
|||||||
"friendly_name": "Test cover semantics",
|
"friendly_name": "Test cover semantics",
|
||||||
"device_class": "blind",
|
"device_class": "blind",
|
||||||
"supported_features": 255,
|
"supported_features": 255,
|
||||||
"position": 30,
|
"current_position": 30,
|
||||||
"tilt_position": 30,
|
"tilt_position": 30,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
from unittest.mock import AsyncMock
|
from unittest.mock import AsyncMock
|
||||||
|
|
||||||
from knocki import KnockiConnectionError
|
from knocki import KnockiConnectionError, KnockiInvalidAuthError
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from homeassistant.components.knocki.const import DOMAIN
|
from homeassistant.components.knocki.const import DOMAIN
|
||||||
@ -72,7 +72,11 @@ async def test_duplcate_entry(
|
|||||||
@pytest.mark.parametrize(("field"), ["login", "link"])
|
@pytest.mark.parametrize(("field"), ["login", "link"])
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
("exception", "error"),
|
("exception", "error"),
|
||||||
[(KnockiConnectionError, "cannot_connect"), (Exception, "unknown")],
|
[
|
||||||
|
(KnockiConnectionError, "cannot_connect"),
|
||||||
|
(KnockiInvalidAuthError, "invalid_auth"),
|
||||||
|
(Exception, "unknown"),
|
||||||
|
],
|
||||||
)
|
)
|
||||||
async def test_exceptions(
|
async def test_exceptions(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
|
@ -157,3 +157,25 @@ async def test_deprecated_yaml(
|
|||||||
assert issue_registry.async_get_issue(
|
assert issue_registry.async_get_issue(
|
||||||
domain=HOMEASSISTANT_DOMAIN, issue_id=f"deprecated_yaml_{DOMAIN}"
|
domain=HOMEASSISTANT_DOMAIN, issue_id=f"deprecated_yaml_{DOMAIN}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_pyload_pre_0_5_0(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
config_entry: MockConfigEntry,
|
||||||
|
mock_pyloadapi: AsyncMock,
|
||||||
|
) -> None:
|
||||||
|
"""Test setup of the pyload sensor platform."""
|
||||||
|
mock_pyloadapi.get_status.return_value = {
|
||||||
|
"pause": False,
|
||||||
|
"active": 1,
|
||||||
|
"queue": 6,
|
||||||
|
"total": 37,
|
||||||
|
"speed": 5405963.0,
|
||||||
|
"download": True,
|
||||||
|
"reconnect": False,
|
||||||
|
}
|
||||||
|
config_entry.add_to_hass(hass)
|
||||||
|
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert config_entry.state is ConfigEntryState.LOADED
|
||||||
|
@ -1707,7 +1707,9 @@ async def test_database_corruption_while_running(
|
|||||||
hass.states.async_set("test.lost", "on", {})
|
hass.states.async_set("test.lost", "on", {})
|
||||||
|
|
||||||
sqlite3_exception = DatabaseError("statement", {}, [])
|
sqlite3_exception = DatabaseError("statement", {}, [])
|
||||||
sqlite3_exception.__cause__ = sqlite3.DatabaseError()
|
sqlite3_exception.__cause__ = sqlite3.DatabaseError(
|
||||||
|
"database disk image is malformed"
|
||||||
|
)
|
||||||
|
|
||||||
await async_wait_recording_done(hass)
|
await async_wait_recording_done(hass)
|
||||||
with patch.object(
|
with patch.object(
|
||||||
|
@ -165,7 +165,9 @@ async def test_database_migration_encounters_corruption(
|
|||||||
assert recorder.util.async_migration_in_progress(hass) is False
|
assert recorder.util.async_migration_in_progress(hass) is False
|
||||||
|
|
||||||
sqlite3_exception = DatabaseError("statement", {}, [])
|
sqlite3_exception = DatabaseError("statement", {}, [])
|
||||||
sqlite3_exception.__cause__ = sqlite3.DatabaseError()
|
sqlite3_exception.__cause__ = sqlite3.DatabaseError(
|
||||||
|
"database disk image is malformed"
|
||||||
|
)
|
||||||
|
|
||||||
with (
|
with (
|
||||||
patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True),
|
patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True),
|
||||||
|
@ -210,7 +210,7 @@ async def test_purge_old_states_encouters_database_corruption(
|
|||||||
await async_wait_recording_done(hass)
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
sqlite3_exception = DatabaseError("statement", {}, [])
|
sqlite3_exception = DatabaseError("statement", {}, [])
|
||||||
sqlite3_exception.__cause__ = sqlite3.DatabaseError()
|
sqlite3_exception.__cause__ = sqlite3.DatabaseError("not a database")
|
||||||
|
|
||||||
with (
|
with (
|
||||||
patch(
|
patch(
|
||||||
|
@ -173,7 +173,7 @@ async def test_purge_old_states_encouters_database_corruption(
|
|||||||
await async_wait_recording_done(hass)
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
sqlite3_exception = DatabaseError("statement", {}, [])
|
sqlite3_exception = DatabaseError("statement", {}, [])
|
||||||
sqlite3_exception.__cause__ = sqlite3.DatabaseError()
|
sqlite3_exception.__cause__ = sqlite3.DatabaseError("not a database")
|
||||||
|
|
||||||
with (
|
with (
|
||||||
patch(
|
patch(
|
||||||
|
@ -54,6 +54,7 @@ TEST_FILE_NAME = f"{TEST_YEAR}{TEST_MONTH}{TEST_DAY}{TEST_HOUR}{TEST_MINUTE}00"
|
|||||||
TEST_FILE_NAME_MP4 = f"{TEST_YEAR}{TEST_MONTH}{TEST_DAY}{TEST_HOUR}{TEST_MINUTE}00.mp4"
|
TEST_FILE_NAME_MP4 = f"{TEST_YEAR}{TEST_MONTH}{TEST_DAY}{TEST_HOUR}{TEST_MINUTE}00.mp4"
|
||||||
TEST_STREAM = "main"
|
TEST_STREAM = "main"
|
||||||
TEST_CHANNEL = "0"
|
TEST_CHANNEL = "0"
|
||||||
|
TEST_CAM_NAME = "Cam new name"
|
||||||
|
|
||||||
TEST_MIME_TYPE = "application/x-mpegURL"
|
TEST_MIME_TYPE = "application/x-mpegURL"
|
||||||
TEST_MIME_TYPE_MP4 = "video/mp4"
|
TEST_MIME_TYPE_MP4 = "video/mp4"
|
||||||
@ -130,6 +131,7 @@ async def test_browsing(
|
|||||||
"""Test browsing the Reolink three."""
|
"""Test browsing the Reolink three."""
|
||||||
entry_id = config_entry.entry_id
|
entry_id = config_entry.entry_id
|
||||||
reolink_connect.api_version.return_value = 1
|
reolink_connect.api_version.return_value = 1
|
||||||
|
reolink_connect.model = "Reolink TrackMix PoE"
|
||||||
|
|
||||||
with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]):
|
with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]):
|
||||||
assert await hass.config_entries.async_setup(entry_id) is True
|
assert await hass.config_entries.async_setup(entry_id) is True
|
||||||
@ -137,7 +139,7 @@ async def test_browsing(
|
|||||||
|
|
||||||
entries = dr.async_entries_for_config_entry(device_registry, entry_id)
|
entries = dr.async_entries_for_config_entry(device_registry, entry_id)
|
||||||
assert len(entries) > 0
|
assert len(entries) > 0
|
||||||
device_registry.async_update_device(entries[0].id, name_by_user="Cam new name")
|
device_registry.async_update_device(entries[0].id, name_by_user=TEST_CAM_NAME)
|
||||||
|
|
||||||
caplog.set_level(logging.DEBUG)
|
caplog.set_level(logging.DEBUG)
|
||||||
|
|
||||||
@ -149,6 +151,7 @@ async def test_browsing(
|
|||||||
assert browse.title == "Reolink"
|
assert browse.title == "Reolink"
|
||||||
assert browse.identifier is None
|
assert browse.identifier is None
|
||||||
assert browse.children[0].identifier == browse_root_id
|
assert browse.children[0].identifier == browse_root_id
|
||||||
|
assert browse.children[0].title == f"{TEST_CAM_NAME} lens 0"
|
||||||
|
|
||||||
# browse resolution select
|
# browse resolution select
|
||||||
browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/{browse_root_id}")
|
browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/{browse_root_id}")
|
||||||
|
@ -359,6 +359,7 @@ def _mock_rpc_device(version: str | None = None):
|
|||||||
status=MOCK_STATUS_RPC,
|
status=MOCK_STATUS_RPC,
|
||||||
firmware_version="some fw string",
|
firmware_version="some fw string",
|
||||||
initialized=True,
|
initialized=True,
|
||||||
|
connected=True,
|
||||||
)
|
)
|
||||||
type(device).name = PropertyMock(return_value="Test name")
|
type(device).name = PropertyMock(return_value="Test name")
|
||||||
return device
|
return device
|
||||||
|
@ -263,6 +263,7 @@ async def test_rpc_sleeping_binary_sensor(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Test RPC online sleeping binary sensor."""
|
"""Test RPC online sleeping binary sensor."""
|
||||||
entity_id = f"{BINARY_SENSOR_DOMAIN}.test_name_cloud"
|
entity_id = f"{BINARY_SENSOR_DOMAIN}.test_name_cloud"
|
||||||
|
monkeypatch.setattr(mock_rpc_device, "connected", False)
|
||||||
monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000)
|
monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000)
|
||||||
config_entry = await init_integration(hass, 2, sleep_period=1000)
|
config_entry = await init_integration(hass, 2, sleep_period=1000)
|
||||||
|
|
||||||
|
@ -1114,6 +1114,7 @@ async def test_zeroconf_sleeping_device_not_triggers_refresh(
|
|||||||
caplog: pytest.LogCaptureFixture,
|
caplog: pytest.LogCaptureFixture,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test zeroconf discovery does not triggers refresh for sleeping device."""
|
"""Test zeroconf discovery does not triggers refresh for sleeping device."""
|
||||||
|
monkeypatch.setattr(mock_rpc_device, "connected", False)
|
||||||
monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000)
|
monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000)
|
||||||
entry = MockConfigEntry(
|
entry = MockConfigEntry(
|
||||||
domain="shelly",
|
domain="shelly",
|
||||||
|
@ -545,6 +545,7 @@ async def test_rpc_update_entry_sleep_period(
|
|||||||
monkeypatch: pytest.MonkeyPatch,
|
monkeypatch: pytest.MonkeyPatch,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test RPC update entry sleep period."""
|
"""Test RPC update entry sleep period."""
|
||||||
|
monkeypatch.setattr(mock_rpc_device, "connected", False)
|
||||||
monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 600)
|
monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 600)
|
||||||
entry = await init_integration(hass, 2, sleep_period=600)
|
entry = await init_integration(hass, 2, sleep_period=600)
|
||||||
register_entity(
|
register_entity(
|
||||||
@ -578,6 +579,7 @@ async def test_rpc_sleeping_device_no_periodic_updates(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Test RPC sleeping device no periodic updates."""
|
"""Test RPC sleeping device no periodic updates."""
|
||||||
entity_id = f"{SENSOR_DOMAIN}.test_name_temperature"
|
entity_id = f"{SENSOR_DOMAIN}.test_name_temperature"
|
||||||
|
monkeypatch.setattr(mock_rpc_device, "connected", False)
|
||||||
monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000)
|
monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000)
|
||||||
entry = await init_integration(hass, 2, sleep_period=1000)
|
entry = await init_integration(hass, 2, sleep_period=1000)
|
||||||
register_entity(
|
register_entity(
|
||||||
@ -609,6 +611,7 @@ async def test_rpc_sleeping_device_firmware_unsupported(
|
|||||||
issue_registry: ir.IssueRegistry,
|
issue_registry: ir.IssueRegistry,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test RPC sleeping device firmware not supported."""
|
"""Test RPC sleeping device firmware not supported."""
|
||||||
|
monkeypatch.setattr(mock_rpc_device, "connected", False)
|
||||||
monkeypatch.setattr(mock_rpc_device, "firmware_supported", False)
|
monkeypatch.setattr(mock_rpc_device, "firmware_supported", False)
|
||||||
entry = await init_integration(hass, 2, sleep_period=3600)
|
entry = await init_integration(hass, 2, sleep_period=3600)
|
||||||
|
|
||||||
@ -912,6 +915,7 @@ async def test_rpc_sleeping_device_connection_error(
|
|||||||
hass, BINARY_SENSOR_DOMAIN, "test_name_cloud", "cloud-cloud", entry
|
hass, BINARY_SENSOR_DOMAIN, "test_name_cloud", "cloud-cloud", entry
|
||||||
)
|
)
|
||||||
mock_restore_cache(hass, [State(entity_id, STATE_ON)])
|
mock_restore_cache(hass, [State(entity_id, STATE_ON)])
|
||||||
|
monkeypatch.setattr(mock_rpc_device, "connected", False)
|
||||||
monkeypatch.setattr(mock_rpc_device, "initialized", False)
|
monkeypatch.setattr(mock_rpc_device, "initialized", False)
|
||||||
await hass.config_entries.async_setup(entry.entry_id)
|
await hass.config_entries.async_setup(entry.entry_id)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
@ -939,3 +943,19 @@ async def test_rpc_sleeping_device_connection_error(
|
|||||||
|
|
||||||
assert "Sleeping device did not update" in caplog.text
|
assert "Sleeping device did not update" in caplog.text
|
||||||
assert get_entity_state(hass, entity_id) == STATE_UNAVAILABLE
|
assert get_entity_state(hass, entity_id) == STATE_UNAVAILABLE
|
||||||
|
|
||||||
|
|
||||||
|
async def test_rpc_already_connected(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
freezer: FrozenDateTimeFactory,
|
||||||
|
mock_rpc_device: Mock,
|
||||||
|
caplog: pytest.LogCaptureFixture,
|
||||||
|
) -> None:
|
||||||
|
"""Test RPC ignore connect event if already connected."""
|
||||||
|
await init_integration(hass, 2)
|
||||||
|
|
||||||
|
mock_rpc_device.mock_online()
|
||||||
|
await hass.async_block_till_done(wait_background_tasks=True)
|
||||||
|
|
||||||
|
assert "already connected" in caplog.text
|
||||||
|
mock_rpc_device.initialize.assert_called_once()
|
||||||
|
@ -279,6 +279,7 @@ async def test_sleeping_rpc_device_online(
|
|||||||
caplog: pytest.LogCaptureFixture,
|
caplog: pytest.LogCaptureFixture,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test sleeping RPC device online."""
|
"""Test sleeping RPC device online."""
|
||||||
|
monkeypatch.setattr(mock_rpc_device, "connected", False)
|
||||||
monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", device_sleep)
|
monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", device_sleep)
|
||||||
entry = await init_integration(hass, 2, sleep_period=entry_sleep)
|
entry = await init_integration(hass, 2, sleep_period=entry_sleep)
|
||||||
assert "will resume when device is online" in caplog.text
|
assert "will resume when device is online" in caplog.text
|
||||||
@ -297,6 +298,7 @@ async def test_sleeping_rpc_device_online_new_firmware(
|
|||||||
caplog: pytest.LogCaptureFixture,
|
caplog: pytest.LogCaptureFixture,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test sleeping device Gen2 with firmware 1.0.0 or later."""
|
"""Test sleeping device Gen2 with firmware 1.0.0 or later."""
|
||||||
|
monkeypatch.setattr(mock_rpc_device, "connected", False)
|
||||||
entry = await init_integration(hass, 2, sleep_period=None)
|
entry = await init_integration(hass, 2, sleep_period=None)
|
||||||
assert "will resume when device is online" in caplog.text
|
assert "will resume when device is online" in caplog.text
|
||||||
|
|
||||||
|
@ -449,6 +449,7 @@ async def test_rpc_sleeping_sensor(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Test RPC online sleeping sensor."""
|
"""Test RPC online sleeping sensor."""
|
||||||
entity_id = f"{SENSOR_DOMAIN}.test_name_temperature"
|
entity_id = f"{SENSOR_DOMAIN}.test_name_temperature"
|
||||||
|
monkeypatch.setattr(mock_rpc_device, "connected", False)
|
||||||
monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000)
|
monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000)
|
||||||
entry = await init_integration(hass, 2, sleep_period=1000)
|
entry = await init_integration(hass, 2, sleep_period=1000)
|
||||||
|
|
||||||
@ -600,6 +601,7 @@ async def test_rpc_sleeping_update_entity_service(
|
|||||||
await async_setup_component(hass, "homeassistant", {})
|
await async_setup_component(hass, "homeassistant", {})
|
||||||
|
|
||||||
entity_id = f"{SENSOR_DOMAIN}.test_name_temperature"
|
entity_id = f"{SENSOR_DOMAIN}.test_name_temperature"
|
||||||
|
monkeypatch.setattr(mock_rpc_device, "connected", False)
|
||||||
monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000)
|
monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000)
|
||||||
await init_integration(hass, 2, sleep_period=1000)
|
await init_integration(hass, 2, sleep_period=1000)
|
||||||
|
|
||||||
|
@ -334,6 +334,7 @@ async def test_rpc_sleeping_update(
|
|||||||
monkeypatch: pytest.MonkeyPatch,
|
monkeypatch: pytest.MonkeyPatch,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test RPC sleeping device update entity."""
|
"""Test RPC sleeping device update entity."""
|
||||||
|
monkeypatch.setattr(mock_rpc_device, "connected", False)
|
||||||
monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000)
|
monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000)
|
||||||
monkeypatch.setitem(mock_rpc_device.shelly, "ver", "1")
|
monkeypatch.setitem(mock_rpc_device.shelly, "ver", "1")
|
||||||
monkeypatch.setitem(
|
monkeypatch.setitem(
|
||||||
|
@ -23,6 +23,7 @@ from homeassistant.components.shelly.utils import (
|
|||||||
get_block_device_sleep_period,
|
get_block_device_sleep_period,
|
||||||
get_block_input_triggers,
|
get_block_input_triggers,
|
||||||
get_device_uptime,
|
get_device_uptime,
|
||||||
|
get_host,
|
||||||
get_number_of_channels,
|
get_number_of_channels,
|
||||||
get_release_url,
|
get_release_url,
|
||||||
get_rpc_channel_name,
|
get_rpc_channel_name,
|
||||||
@ -274,3 +275,19 @@ def test_get_release_url(
|
|||||||
result = get_release_url(gen, model, beta)
|
result = get_release_url(gen, model, beta)
|
||||||
|
|
||||||
assert result is expected
|
assert result is expected
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("host", "expected"),
|
||||||
|
[
|
||||||
|
("shelly_device.local", "shelly_device.local"),
|
||||||
|
("192.168.178.12", "192.168.178.12"),
|
||||||
|
(
|
||||||
|
"2001:0db8:85a3:0000:0000:8a2e:0370:7334",
|
||||||
|
"[2001:0db8:85a3:0000:0000:8a2e:0370:7334]",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_get_host(host: str, expected: str) -> None:
|
||||||
|
"""Test get_host function."""
|
||||||
|
assert get_host(host) == expected
|
||||||
|
@ -57,25 +57,26 @@ CREDENTIALS_HASH_LEGACY = ""
|
|||||||
DEVICE_CONFIG_LEGACY = DeviceConfig(IP_ADDRESS)
|
DEVICE_CONFIG_LEGACY = DeviceConfig(IP_ADDRESS)
|
||||||
DEVICE_CONFIG_DICT_LEGACY = DEVICE_CONFIG_LEGACY.to_dict(exclude_credentials=True)
|
DEVICE_CONFIG_DICT_LEGACY = DEVICE_CONFIG_LEGACY.to_dict(exclude_credentials=True)
|
||||||
CREDENTIALS = Credentials("foo", "bar")
|
CREDENTIALS = Credentials("foo", "bar")
|
||||||
CREDENTIALS_HASH_AUTH = "abcdefghijklmnopqrstuv=="
|
CREDENTIALS_HASH_AES = "AES/abcdefghijklmnopqrstuvabcdefghijklmnopqrstuv=="
|
||||||
DEVICE_CONFIG_AUTH = DeviceConfig(
|
CREDENTIALS_HASH_KLAP = "KLAP/abcdefghijklmnopqrstuv=="
|
||||||
|
DEVICE_CONFIG_KLAP = DeviceConfig(
|
||||||
IP_ADDRESS,
|
IP_ADDRESS,
|
||||||
credentials=CREDENTIALS,
|
credentials=CREDENTIALS,
|
||||||
connection_type=DeviceConnectionParameters(
|
connection_type=DeviceConnectionParameters(
|
||||||
DeviceFamily.IotSmartPlugSwitch, DeviceEncryptionType.Klap
|
DeviceFamily.SmartTapoPlug, DeviceEncryptionType.Klap
|
||||||
),
|
),
|
||||||
uses_http=True,
|
uses_http=True,
|
||||||
)
|
)
|
||||||
DEVICE_CONFIG_AUTH2 = DeviceConfig(
|
DEVICE_CONFIG_AES = DeviceConfig(
|
||||||
IP_ADDRESS2,
|
IP_ADDRESS2,
|
||||||
credentials=CREDENTIALS,
|
credentials=CREDENTIALS,
|
||||||
connection_type=DeviceConnectionParameters(
|
connection_type=DeviceConnectionParameters(
|
||||||
DeviceFamily.IotSmartPlugSwitch, DeviceEncryptionType.Klap
|
DeviceFamily.SmartTapoPlug, DeviceEncryptionType.Aes
|
||||||
),
|
),
|
||||||
uses_http=True,
|
uses_http=True,
|
||||||
)
|
)
|
||||||
DEVICE_CONFIG_DICT_AUTH = DEVICE_CONFIG_AUTH.to_dict(exclude_credentials=True)
|
DEVICE_CONFIG_DICT_KLAP = DEVICE_CONFIG_KLAP.to_dict(exclude_credentials=True)
|
||||||
DEVICE_CONFIG_DICT_AUTH2 = DEVICE_CONFIG_AUTH2.to_dict(exclude_credentials=True)
|
DEVICE_CONFIG_DICT_AES = DEVICE_CONFIG_AES.to_dict(exclude_credentials=True)
|
||||||
|
|
||||||
CREATE_ENTRY_DATA_LEGACY = {
|
CREATE_ENTRY_DATA_LEGACY = {
|
||||||
CONF_HOST: IP_ADDRESS,
|
CONF_HOST: IP_ADDRESS,
|
||||||
@ -84,24 +85,28 @@ CREATE_ENTRY_DATA_LEGACY = {
|
|||||||
CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_LEGACY,
|
CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_LEGACY,
|
||||||
}
|
}
|
||||||
|
|
||||||
CREATE_ENTRY_DATA_AUTH = {
|
CREATE_ENTRY_DATA_KLAP = {
|
||||||
CONF_HOST: IP_ADDRESS,
|
CONF_HOST: IP_ADDRESS,
|
||||||
CONF_ALIAS: ALIAS,
|
CONF_ALIAS: ALIAS,
|
||||||
CONF_MODEL: MODEL,
|
CONF_MODEL: MODEL,
|
||||||
CONF_CREDENTIALS_HASH: CREDENTIALS_HASH_AUTH,
|
CONF_CREDENTIALS_HASH: CREDENTIALS_HASH_KLAP,
|
||||||
CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH,
|
CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP,
|
||||||
}
|
}
|
||||||
CREATE_ENTRY_DATA_AUTH2 = {
|
CREATE_ENTRY_DATA_AES = {
|
||||||
CONF_HOST: IP_ADDRESS2,
|
CONF_HOST: IP_ADDRESS2,
|
||||||
CONF_ALIAS: ALIAS,
|
CONF_ALIAS: ALIAS,
|
||||||
CONF_MODEL: MODEL,
|
CONF_MODEL: MODEL,
|
||||||
CONF_CREDENTIALS_HASH: CREDENTIALS_HASH_AUTH,
|
CONF_CREDENTIALS_HASH: CREDENTIALS_HASH_AES,
|
||||||
CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH2,
|
CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AES,
|
||||||
}
|
}
|
||||||
NEW_CONNECTION_TYPE = DeviceConnectionParameters(
|
CONNECTION_TYPE_KLAP = DeviceConnectionParameters(
|
||||||
DeviceFamily.IotSmartPlugSwitch, DeviceEncryptionType.Aes
|
DeviceFamily.SmartTapoPlug, DeviceEncryptionType.Klap
|
||||||
)
|
)
|
||||||
NEW_CONNECTION_TYPE_DICT = NEW_CONNECTION_TYPE.to_dict()
|
CONNECTION_TYPE_KLAP_DICT = CONNECTION_TYPE_KLAP.to_dict()
|
||||||
|
CONNECTION_TYPE_AES = DeviceConnectionParameters(
|
||||||
|
DeviceFamily.SmartTapoPlug, DeviceEncryptionType.Aes
|
||||||
|
)
|
||||||
|
CONNECTION_TYPE_AES_DICT = CONNECTION_TYPE_AES.to_dict()
|
||||||
|
|
||||||
|
|
||||||
def _load_feature_fixtures():
|
def _load_feature_fixtures():
|
||||||
@ -187,7 +192,7 @@ def _mocked_device(
|
|||||||
device_id=DEVICE_ID,
|
device_id=DEVICE_ID,
|
||||||
alias=ALIAS,
|
alias=ALIAS,
|
||||||
model=MODEL,
|
model=MODEL,
|
||||||
ip_address=IP_ADDRESS,
|
ip_address: str | None = None,
|
||||||
modules: list[str] | None = None,
|
modules: list[str] | None = None,
|
||||||
children: list[Device] | None = None,
|
children: list[Device] | None = None,
|
||||||
features: list[str | Feature] | None = None,
|
features: list[str | Feature] | None = None,
|
||||||
@ -202,12 +207,17 @@ def _mocked_device(
|
|||||||
device.mac = mac
|
device.mac = mac
|
||||||
device.alias = alias
|
device.alias = alias
|
||||||
device.model = model
|
device.model = model
|
||||||
device.host = ip_address
|
|
||||||
device.device_id = device_id
|
device.device_id = device_id
|
||||||
device.hw_info = {"sw_ver": "1.0.0", "hw_ver": "1.0.0"}
|
device.hw_info = {"sw_ver": "1.0.0", "hw_ver": "1.0.0"}
|
||||||
device.modules = {}
|
device.modules = {}
|
||||||
device.features = {}
|
device.features = {}
|
||||||
|
|
||||||
|
if not ip_address:
|
||||||
|
ip_address = IP_ADDRESS
|
||||||
|
else:
|
||||||
|
device_config.host = ip_address
|
||||||
|
device.host = ip_address
|
||||||
|
|
||||||
if modules:
|
if modules:
|
||||||
device.modules = {
|
device.modules = {
|
||||||
module_name: MODULE_TO_MOCK_GEN[module_name](device)
|
module_name: MODULE_TO_MOCK_GEN[module_name](device)
|
||||||
|
@ -11,8 +11,10 @@ from homeassistant.core import HomeAssistant
|
|||||||
|
|
||||||
from . import (
|
from . import (
|
||||||
CREATE_ENTRY_DATA_LEGACY,
|
CREATE_ENTRY_DATA_LEGACY,
|
||||||
CREDENTIALS_HASH_AUTH,
|
CREDENTIALS_HASH_AES,
|
||||||
DEVICE_CONFIG_AUTH,
|
CREDENTIALS_HASH_KLAP,
|
||||||
|
DEVICE_CONFIG_AES,
|
||||||
|
DEVICE_CONFIG_KLAP,
|
||||||
IP_ADDRESS,
|
IP_ADDRESS,
|
||||||
IP_ADDRESS2,
|
IP_ADDRESS2,
|
||||||
MAC_ADDRESS,
|
MAC_ADDRESS,
|
||||||
@ -32,14 +34,14 @@ def mock_discovery():
|
|||||||
discover_single=DEFAULT,
|
discover_single=DEFAULT,
|
||||||
) as mock_discovery:
|
) as mock_discovery:
|
||||||
device = _mocked_device(
|
device = _mocked_device(
|
||||||
device_config=copy.deepcopy(DEVICE_CONFIG_AUTH),
|
device_config=copy.deepcopy(DEVICE_CONFIG_KLAP),
|
||||||
credentials_hash=CREDENTIALS_HASH_AUTH,
|
credentials_hash=CREDENTIALS_HASH_KLAP,
|
||||||
alias=None,
|
alias=None,
|
||||||
)
|
)
|
||||||
devices = {
|
devices = {
|
||||||
"127.0.0.1": _mocked_device(
|
"127.0.0.1": _mocked_device(
|
||||||
device_config=copy.deepcopy(DEVICE_CONFIG_AUTH),
|
device_config=copy.deepcopy(DEVICE_CONFIG_KLAP),
|
||||||
credentials_hash=CREDENTIALS_HASH_AUTH,
|
credentials_hash=CREDENTIALS_HASH_KLAP,
|
||||||
alias=None,
|
alias=None,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@ -55,12 +57,15 @@ def mock_connect():
|
|||||||
with patch("homeassistant.components.tplink.Device.connect") as mock_connect:
|
with patch("homeassistant.components.tplink.Device.connect") as mock_connect:
|
||||||
devices = {
|
devices = {
|
||||||
IP_ADDRESS: _mocked_device(
|
IP_ADDRESS: _mocked_device(
|
||||||
device_config=DEVICE_CONFIG_AUTH, credentials_hash=CREDENTIALS_HASH_AUTH
|
device_config=DEVICE_CONFIG_KLAP,
|
||||||
|
credentials_hash=CREDENTIALS_HASH_KLAP,
|
||||||
|
ip_address=IP_ADDRESS,
|
||||||
),
|
),
|
||||||
IP_ADDRESS2: _mocked_device(
|
IP_ADDRESS2: _mocked_device(
|
||||||
device_config=DEVICE_CONFIG_AUTH,
|
device_config=DEVICE_CONFIG_AES,
|
||||||
credentials_hash=CREDENTIALS_HASH_AUTH,
|
credentials_hash=CREDENTIALS_HASH_AES,
|
||||||
mac=MAC_ADDRESS2,
|
mac=MAC_ADDRESS2,
|
||||||
|
ip_address=IP_ADDRESS2,
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
"""Test the tplink config flow."""
|
"""Test the tplink config flow."""
|
||||||
|
|
||||||
|
import logging
|
||||||
from unittest.mock import AsyncMock, patch
|
from unittest.mock import AsyncMock, patch
|
||||||
|
|
||||||
from kasa import TimeoutError
|
from kasa import TimeoutError
|
||||||
@ -11,6 +12,7 @@ from homeassistant.components.tplink import (
|
|||||||
DOMAIN,
|
DOMAIN,
|
||||||
AuthenticationError,
|
AuthenticationError,
|
||||||
Credentials,
|
Credentials,
|
||||||
|
Device,
|
||||||
DeviceConfig,
|
DeviceConfig,
|
||||||
KasaException,
|
KasaException,
|
||||||
)
|
)
|
||||||
@ -33,19 +35,21 @@ from homeassistant.data_entry_flow import FlowResultType
|
|||||||
|
|
||||||
from . import (
|
from . import (
|
||||||
ALIAS,
|
ALIAS,
|
||||||
CREATE_ENTRY_DATA_AUTH,
|
CONNECTION_TYPE_KLAP_DICT,
|
||||||
CREATE_ENTRY_DATA_AUTH2,
|
CREATE_ENTRY_DATA_AES,
|
||||||
|
CREATE_ENTRY_DATA_KLAP,
|
||||||
CREATE_ENTRY_DATA_LEGACY,
|
CREATE_ENTRY_DATA_LEGACY,
|
||||||
CREDENTIALS_HASH_AUTH,
|
CREDENTIALS_HASH_AES,
|
||||||
|
CREDENTIALS_HASH_KLAP,
|
||||||
DEFAULT_ENTRY_TITLE,
|
DEFAULT_ENTRY_TITLE,
|
||||||
DEVICE_CONFIG_DICT_AUTH,
|
DEVICE_CONFIG_DICT_AES,
|
||||||
|
DEVICE_CONFIG_DICT_KLAP,
|
||||||
DEVICE_CONFIG_DICT_LEGACY,
|
DEVICE_CONFIG_DICT_LEGACY,
|
||||||
DHCP_FORMATTED_MAC_ADDRESS,
|
DHCP_FORMATTED_MAC_ADDRESS,
|
||||||
IP_ADDRESS,
|
IP_ADDRESS,
|
||||||
MAC_ADDRESS,
|
MAC_ADDRESS,
|
||||||
MAC_ADDRESS2,
|
MAC_ADDRESS2,
|
||||||
MODULE,
|
MODULE,
|
||||||
NEW_CONNECTION_TYPE_DICT,
|
|
||||||
_mocked_device,
|
_mocked_device,
|
||||||
_patch_connect,
|
_patch_connect,
|
||||||
_patch_discovery,
|
_patch_discovery,
|
||||||
@ -135,7 +139,7 @@ async def test_discovery_auth(
|
|||||||
CONF_HOST: IP_ADDRESS,
|
CONF_HOST: IP_ADDRESS,
|
||||||
CONF_MAC: MAC_ADDRESS,
|
CONF_MAC: MAC_ADDRESS,
|
||||||
CONF_ALIAS: ALIAS,
|
CONF_ALIAS: ALIAS,
|
||||||
CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH,
|
CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
@ -154,7 +158,7 @@ async def test_discovery_auth(
|
|||||||
|
|
||||||
assert result2["type"] is FlowResultType.CREATE_ENTRY
|
assert result2["type"] is FlowResultType.CREATE_ENTRY
|
||||||
assert result2["title"] == DEFAULT_ENTRY_TITLE
|
assert result2["title"] == DEFAULT_ENTRY_TITLE
|
||||||
assert result2["data"] == CREATE_ENTRY_DATA_AUTH
|
assert result2["data"] == CREATE_ENTRY_DATA_KLAP
|
||||||
assert result2["context"]["unique_id"] == MAC_ADDRESS
|
assert result2["context"]["unique_id"] == MAC_ADDRESS
|
||||||
|
|
||||||
|
|
||||||
@ -187,7 +191,7 @@ async def test_discovery_auth_errors(
|
|||||||
CONF_HOST: IP_ADDRESS,
|
CONF_HOST: IP_ADDRESS,
|
||||||
CONF_MAC: MAC_ADDRESS,
|
CONF_MAC: MAC_ADDRESS,
|
||||||
CONF_ALIAS: ALIAS,
|
CONF_ALIAS: ALIAS,
|
||||||
CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH,
|
CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
@ -218,7 +222,7 @@ async def test_discovery_auth_errors(
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
assert result3["type"] is FlowResultType.CREATE_ENTRY
|
assert result3["type"] is FlowResultType.CREATE_ENTRY
|
||||||
assert result3["data"] == CREATE_ENTRY_DATA_AUTH
|
assert result3["data"] == CREATE_ENTRY_DATA_KLAP
|
||||||
assert result3["context"]["unique_id"] == MAC_ADDRESS
|
assert result3["context"]["unique_id"] == MAC_ADDRESS
|
||||||
|
|
||||||
|
|
||||||
@ -238,7 +242,7 @@ async def test_discovery_new_credentials(
|
|||||||
CONF_HOST: IP_ADDRESS,
|
CONF_HOST: IP_ADDRESS,
|
||||||
CONF_MAC: MAC_ADDRESS,
|
CONF_MAC: MAC_ADDRESS,
|
||||||
CONF_ALIAS: ALIAS,
|
CONF_ALIAS: ALIAS,
|
||||||
CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH,
|
CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
@ -267,7 +271,7 @@ async def test_discovery_new_credentials(
|
|||||||
{},
|
{},
|
||||||
)
|
)
|
||||||
assert result3["type"] is FlowResultType.CREATE_ENTRY
|
assert result3["type"] is FlowResultType.CREATE_ENTRY
|
||||||
assert result3["data"] == CREATE_ENTRY_DATA_AUTH
|
assert result3["data"] == CREATE_ENTRY_DATA_KLAP
|
||||||
assert result3["context"]["unique_id"] == MAC_ADDRESS
|
assert result3["context"]["unique_id"] == MAC_ADDRESS
|
||||||
|
|
||||||
|
|
||||||
@ -290,7 +294,7 @@ async def test_discovery_new_credentials_invalid(
|
|||||||
CONF_HOST: IP_ADDRESS,
|
CONF_HOST: IP_ADDRESS,
|
||||||
CONF_MAC: MAC_ADDRESS,
|
CONF_MAC: MAC_ADDRESS,
|
||||||
CONF_ALIAS: ALIAS,
|
CONF_ALIAS: ALIAS,
|
||||||
CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH,
|
CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
@ -323,7 +327,7 @@ async def test_discovery_new_credentials_invalid(
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
assert result3["type"] is FlowResultType.CREATE_ENTRY
|
assert result3["type"] is FlowResultType.CREATE_ENTRY
|
||||||
assert result3["data"] == CREATE_ENTRY_DATA_AUTH
|
assert result3["data"] == CREATE_ENTRY_DATA_KLAP
|
||||||
assert result3["context"]["unique_id"] == MAC_ADDRESS
|
assert result3["context"]["unique_id"] == MAC_ADDRESS
|
||||||
|
|
||||||
|
|
||||||
@ -543,7 +547,7 @@ async def test_manual_auth(
|
|||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
assert result3["type"] is FlowResultType.CREATE_ENTRY
|
assert result3["type"] is FlowResultType.CREATE_ENTRY
|
||||||
assert result3["title"] == DEFAULT_ENTRY_TITLE
|
assert result3["title"] == DEFAULT_ENTRY_TITLE
|
||||||
assert result3["data"] == CREATE_ENTRY_DATA_AUTH
|
assert result3["data"] == CREATE_ENTRY_DATA_KLAP
|
||||||
assert result3["context"]["unique_id"] == MAC_ADDRESS
|
assert result3["context"]["unique_id"] == MAC_ADDRESS
|
||||||
|
|
||||||
|
|
||||||
@ -607,7 +611,7 @@ async def test_manual_auth_errors(
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
assert result4["type"] is FlowResultType.CREATE_ENTRY
|
assert result4["type"] is FlowResultType.CREATE_ENTRY
|
||||||
assert result4["data"] == CREATE_ENTRY_DATA_AUTH
|
assert result4["data"] == CREATE_ENTRY_DATA_KLAP
|
||||||
assert result4["context"]["unique_id"] == MAC_ADDRESS
|
assert result4["context"]["unique_id"] == MAC_ADDRESS
|
||||||
|
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
@ -791,16 +795,16 @@ async def test_integration_discovery_with_ip_change(
|
|||||||
CONF_HOST: "127.0.0.2",
|
CONF_HOST: "127.0.0.2",
|
||||||
CONF_MAC: MAC_ADDRESS,
|
CONF_MAC: MAC_ADDRESS,
|
||||||
CONF_ALIAS: ALIAS,
|
CONF_ALIAS: ALIAS,
|
||||||
CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH,
|
CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
assert discovery_result["type"] is FlowResultType.ABORT
|
assert discovery_result["type"] is FlowResultType.ABORT
|
||||||
assert discovery_result["reason"] == "already_configured"
|
assert discovery_result["reason"] == "already_configured"
|
||||||
assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AUTH
|
assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_KLAP
|
||||||
assert mock_config_entry.data[CONF_HOST] == "127.0.0.2"
|
assert mock_config_entry.data[CONF_HOST] == "127.0.0.2"
|
||||||
|
|
||||||
config = DeviceConfig.from_dict(DEVICE_CONFIG_DICT_AUTH)
|
config = DeviceConfig.from_dict(DEVICE_CONFIG_DICT_KLAP)
|
||||||
|
|
||||||
mock_connect["connect"].reset_mock(side_effect=True)
|
mock_connect["connect"].reset_mock(side_effect=True)
|
||||||
bulb = _mocked_device(
|
bulb = _mocked_device(
|
||||||
@ -832,8 +836,8 @@ async def test_integration_discovery_with_connection_change(
|
|||||||
mock_config_entry = MockConfigEntry(
|
mock_config_entry = MockConfigEntry(
|
||||||
title="TPLink",
|
title="TPLink",
|
||||||
domain=DOMAIN,
|
domain=DOMAIN,
|
||||||
data=CREATE_ENTRY_DATA_AUTH,
|
data=CREATE_ENTRY_DATA_AES,
|
||||||
unique_id=MAC_ADDRESS,
|
unique_id=MAC_ADDRESS2,
|
||||||
)
|
)
|
||||||
mock_config_entry.add_to_hass(hass)
|
mock_config_entry.add_to_hass(hass)
|
||||||
with patch("homeassistant.components.tplink.Discover.discover", return_value={}):
|
with patch("homeassistant.components.tplink.Discover.discover", return_value={}):
|
||||||
@ -849,13 +853,15 @@ async def test_integration_discovery_with_connection_change(
|
|||||||
)
|
)
|
||||||
== 0
|
== 0
|
||||||
)
|
)
|
||||||
assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AUTH
|
assert mock_config_entry.data[CONF_HOST] == "127.0.0.2"
|
||||||
assert mock_config_entry.data[CONF_DEVICE_CONFIG].get(CONF_HOST) == "127.0.0.1"
|
assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AES
|
||||||
assert mock_config_entry.data[CONF_CREDENTIALS_HASH] == CREDENTIALS_HASH_AUTH
|
assert mock_config_entry.data[CONF_DEVICE_CONFIG].get(CONF_HOST) == "127.0.0.2"
|
||||||
|
assert mock_config_entry.data[CONF_CREDENTIALS_HASH] == CREDENTIALS_HASH_AES
|
||||||
|
|
||||||
NEW_DEVICE_CONFIG = {
|
NEW_DEVICE_CONFIG = {
|
||||||
**DEVICE_CONFIG_DICT_AUTH,
|
**DEVICE_CONFIG_DICT_KLAP,
|
||||||
CONF_CONNECTION_TYPE: NEW_CONNECTION_TYPE_DICT,
|
CONF_CONNECTION_TYPE: CONNECTION_TYPE_KLAP_DICT,
|
||||||
|
CONF_HOST: "127.0.0.2",
|
||||||
}
|
}
|
||||||
config = DeviceConfig.from_dict(NEW_DEVICE_CONFIG)
|
config = DeviceConfig.from_dict(NEW_DEVICE_CONFIG)
|
||||||
# Reset the connect mock so when the config flow reloads the entry it succeeds
|
# Reset the connect mock so when the config flow reloads the entry it succeeds
|
||||||
@ -870,8 +876,8 @@ async def test_integration_discovery_with_connection_change(
|
|||||||
DOMAIN,
|
DOMAIN,
|
||||||
context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY},
|
context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY},
|
||||||
data={
|
data={
|
||||||
CONF_HOST: "127.0.0.1",
|
CONF_HOST: "127.0.0.2",
|
||||||
CONF_MAC: MAC_ADDRESS,
|
CONF_MAC: MAC_ADDRESS2,
|
||||||
CONF_ALIAS: ALIAS,
|
CONF_ALIAS: ALIAS,
|
||||||
CONF_DEVICE_CONFIG: NEW_DEVICE_CONFIG,
|
CONF_DEVICE_CONFIG: NEW_DEVICE_CONFIG,
|
||||||
},
|
},
|
||||||
@ -880,8 +886,8 @@ async def test_integration_discovery_with_connection_change(
|
|||||||
assert discovery_result["type"] is FlowResultType.ABORT
|
assert discovery_result["type"] is FlowResultType.ABORT
|
||||||
assert discovery_result["reason"] == "already_configured"
|
assert discovery_result["reason"] == "already_configured"
|
||||||
assert mock_config_entry.data[CONF_DEVICE_CONFIG] == NEW_DEVICE_CONFIG
|
assert mock_config_entry.data[CONF_DEVICE_CONFIG] == NEW_DEVICE_CONFIG
|
||||||
assert mock_config_entry.data[CONF_HOST] == "127.0.0.1"
|
assert mock_config_entry.data[CONF_HOST] == "127.0.0.2"
|
||||||
assert CREDENTIALS_HASH_AUTH not in mock_config_entry.data
|
assert CREDENTIALS_HASH_AES not in mock_config_entry.data
|
||||||
|
|
||||||
assert mock_config_entry.state is ConfigEntryState.LOADED
|
assert mock_config_entry.state is ConfigEntryState.LOADED
|
||||||
|
|
||||||
@ -953,6 +959,77 @@ async def test_reauth(
|
|||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
|
||||||
|
async def test_reauth_update_with_encryption_change(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
mock_discovery: AsyncMock,
|
||||||
|
mock_connect: AsyncMock,
|
||||||
|
caplog: pytest.LogCaptureFixture,
|
||||||
|
) -> None:
|
||||||
|
"""Test reauth flow."""
|
||||||
|
orig_side_effect = mock_connect["connect"].side_effect
|
||||||
|
mock_connect["connect"].side_effect = AuthenticationError()
|
||||||
|
mock_config_entry = MockConfigEntry(
|
||||||
|
title="TPLink",
|
||||||
|
domain=DOMAIN,
|
||||||
|
data={**CREATE_ENTRY_DATA_AES},
|
||||||
|
unique_id=MAC_ADDRESS2,
|
||||||
|
)
|
||||||
|
mock_config_entry.add_to_hass(hass)
|
||||||
|
assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AES
|
||||||
|
assert mock_config_entry.data[CONF_CREDENTIALS_HASH] == CREDENTIALS_HASH_AES
|
||||||
|
|
||||||
|
with patch("homeassistant.components.tplink.Discover.discover", return_value={}):
|
||||||
|
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR
|
||||||
|
|
||||||
|
caplog.set_level(logging.DEBUG)
|
||||||
|
flows = hass.config_entries.flow.async_progress()
|
||||||
|
assert len(flows) == 1
|
||||||
|
[result] = flows
|
||||||
|
assert result["step_id"] == "reauth_confirm"
|
||||||
|
assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AES
|
||||||
|
assert CONF_CREDENTIALS_HASH not in mock_config_entry.data
|
||||||
|
|
||||||
|
new_config = DeviceConfig(
|
||||||
|
"127.0.0.2",
|
||||||
|
credentials=None,
|
||||||
|
connection_type=Device.ConnectionParameters(
|
||||||
|
Device.Family.SmartTapoPlug, Device.EncryptionType.Klap
|
||||||
|
),
|
||||||
|
uses_http=True,
|
||||||
|
)
|
||||||
|
mock_discovery["mock_device"].host = "127.0.0.2"
|
||||||
|
mock_discovery["mock_device"].config = new_config
|
||||||
|
mock_discovery["mock_device"].credentials_hash = None
|
||||||
|
mock_connect["mock_devices"]["127.0.0.2"].config = new_config
|
||||||
|
mock_connect["mock_devices"]["127.0.0.2"].credentials_hash = CREDENTIALS_HASH_KLAP
|
||||||
|
|
||||||
|
mock_connect["connect"].side_effect = orig_side_effect
|
||||||
|
result2 = await hass.config_entries.flow.async_configure(
|
||||||
|
result["flow_id"],
|
||||||
|
user_input={
|
||||||
|
CONF_USERNAME: "fake_username",
|
||||||
|
CONF_PASSWORD: "fake_password",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done(wait_background_tasks=True)
|
||||||
|
assert "Connection type changed for 127.0.0.2" in caplog.text
|
||||||
|
credentials = Credentials("fake_username", "fake_password")
|
||||||
|
mock_discovery["discover_single"].assert_called_once_with(
|
||||||
|
"127.0.0.2", credentials=credentials
|
||||||
|
)
|
||||||
|
mock_discovery["mock_device"].update.assert_called_once_with()
|
||||||
|
assert result2["type"] is FlowResultType.ABORT
|
||||||
|
assert result2["reason"] == "reauth_successful"
|
||||||
|
assert mock_config_entry.state is ConfigEntryState.LOADED
|
||||||
|
assert mock_config_entry.data[CONF_DEVICE_CONFIG] == {
|
||||||
|
**DEVICE_CONFIG_DICT_KLAP,
|
||||||
|
CONF_HOST: "127.0.0.2",
|
||||||
|
}
|
||||||
|
assert mock_config_entry.data[CONF_CREDENTIALS_HASH] == CREDENTIALS_HASH_KLAP
|
||||||
|
|
||||||
|
|
||||||
async def test_reauth_update_from_discovery(
|
async def test_reauth_update_from_discovery(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
mock_config_entry: MockConfigEntry,
|
mock_config_entry: MockConfigEntry,
|
||||||
@ -981,13 +1058,13 @@ async def test_reauth_update_from_discovery(
|
|||||||
CONF_HOST: IP_ADDRESS,
|
CONF_HOST: IP_ADDRESS,
|
||||||
CONF_MAC: MAC_ADDRESS,
|
CONF_MAC: MAC_ADDRESS,
|
||||||
CONF_ALIAS: ALIAS,
|
CONF_ALIAS: ALIAS,
|
||||||
CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH,
|
CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
assert discovery_result["type"] is FlowResultType.ABORT
|
assert discovery_result["type"] is FlowResultType.ABORT
|
||||||
assert discovery_result["reason"] == "already_configured"
|
assert discovery_result["reason"] == "already_configured"
|
||||||
assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AUTH
|
assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_KLAP
|
||||||
|
|
||||||
|
|
||||||
async def test_reauth_update_from_discovery_with_ip_change(
|
async def test_reauth_update_from_discovery_with_ip_change(
|
||||||
@ -1017,13 +1094,13 @@ async def test_reauth_update_from_discovery_with_ip_change(
|
|||||||
CONF_HOST: "127.0.0.2",
|
CONF_HOST: "127.0.0.2",
|
||||||
CONF_MAC: MAC_ADDRESS,
|
CONF_MAC: MAC_ADDRESS,
|
||||||
CONF_ALIAS: ALIAS,
|
CONF_ALIAS: ALIAS,
|
||||||
CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH,
|
CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
assert discovery_result["type"] is FlowResultType.ABORT
|
assert discovery_result["type"] is FlowResultType.ABORT
|
||||||
assert discovery_result["reason"] == "already_configured"
|
assert discovery_result["reason"] == "already_configured"
|
||||||
assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AUTH
|
assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_KLAP
|
||||||
assert mock_config_entry.data[CONF_HOST] == "127.0.0.2"
|
assert mock_config_entry.data[CONF_HOST] == "127.0.0.2"
|
||||||
|
|
||||||
|
|
||||||
@ -1040,7 +1117,7 @@ async def test_reauth_no_update_if_config_and_ip_the_same(
|
|||||||
mock_config_entry,
|
mock_config_entry,
|
||||||
data={
|
data={
|
||||||
**mock_config_entry.data,
|
**mock_config_entry.data,
|
||||||
CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH,
|
CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||||
@ -1051,7 +1128,7 @@ async def test_reauth_no_update_if_config_and_ip_the_same(
|
|||||||
assert len(flows) == 1
|
assert len(flows) == 1
|
||||||
[result] = flows
|
[result] = flows
|
||||||
assert result["step_id"] == "reauth_confirm"
|
assert result["step_id"] == "reauth_confirm"
|
||||||
assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AUTH
|
assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_KLAP
|
||||||
|
|
||||||
discovery_result = await hass.config_entries.flow.async_init(
|
discovery_result = await hass.config_entries.flow.async_init(
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
@ -1060,13 +1137,13 @@ async def test_reauth_no_update_if_config_and_ip_the_same(
|
|||||||
CONF_HOST: IP_ADDRESS,
|
CONF_HOST: IP_ADDRESS,
|
||||||
CONF_MAC: MAC_ADDRESS,
|
CONF_MAC: MAC_ADDRESS,
|
||||||
CONF_ALIAS: ALIAS,
|
CONF_ALIAS: ALIAS,
|
||||||
CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH,
|
CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
assert discovery_result["type"] is FlowResultType.ABORT
|
assert discovery_result["type"] is FlowResultType.ABORT
|
||||||
assert discovery_result["reason"] == "already_configured"
|
assert discovery_result["reason"] == "already_configured"
|
||||||
assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AUTH
|
assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_KLAP
|
||||||
assert mock_config_entry.data[CONF_HOST] == IP_ADDRESS
|
assert mock_config_entry.data[CONF_HOST] == IP_ADDRESS
|
||||||
|
|
||||||
|
|
||||||
@ -1214,15 +1291,20 @@ async def test_discovery_timeout_connect(
|
|||||||
|
|
||||||
async def test_reauth_update_other_flows(
|
async def test_reauth_update_other_flows(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
mock_config_entry: MockConfigEntry,
|
|
||||||
mock_discovery: AsyncMock,
|
mock_discovery: AsyncMock,
|
||||||
mock_connect: AsyncMock,
|
mock_connect: AsyncMock,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test reauth updates other reauth flows."""
|
"""Test reauth updates other reauth flows."""
|
||||||
|
mock_config_entry = MockConfigEntry(
|
||||||
|
title="TPLink",
|
||||||
|
domain=DOMAIN,
|
||||||
|
data={**CREATE_ENTRY_DATA_KLAP},
|
||||||
|
unique_id=MAC_ADDRESS,
|
||||||
|
)
|
||||||
mock_config_entry2 = MockConfigEntry(
|
mock_config_entry2 = MockConfigEntry(
|
||||||
title="TPLink",
|
title="TPLink",
|
||||||
domain=DOMAIN,
|
domain=DOMAIN,
|
||||||
data={**CREATE_ENTRY_DATA_AUTH2},
|
data={**CREATE_ENTRY_DATA_AES},
|
||||||
unique_id=MAC_ADDRESS2,
|
unique_id=MAC_ADDRESS2,
|
||||||
)
|
)
|
||||||
default_side_effect = mock_connect["connect"].side_effect
|
default_side_effect = mock_connect["connect"].side_effect
|
||||||
@ -1244,7 +1326,7 @@ async def test_reauth_update_other_flows(
|
|||||||
flows_by_entry_id = {flow["context"]["entry_id"]: flow for flow in flows}
|
flows_by_entry_id = {flow["context"]["entry_id"]: flow for flow in flows}
|
||||||
result = flows_by_entry_id[mock_config_entry.entry_id]
|
result = flows_by_entry_id[mock_config_entry.entry_id]
|
||||||
assert result["step_id"] == "reauth_confirm"
|
assert result["step_id"] == "reauth_confirm"
|
||||||
assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_LEGACY
|
assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_KLAP
|
||||||
result2 = await hass.config_entries.flow.async_configure(
|
result2 = await hass.config_entries.flow.async_configure(
|
||||||
result["flow_id"],
|
result["flow_id"],
|
||||||
user_input={
|
user_input={
|
||||||
|
@ -33,9 +33,9 @@ from homeassistant.setup import async_setup_component
|
|||||||
from homeassistant.util import dt as dt_util
|
from homeassistant.util import dt as dt_util
|
||||||
|
|
||||||
from . import (
|
from . import (
|
||||||
CREATE_ENTRY_DATA_AUTH,
|
CREATE_ENTRY_DATA_KLAP,
|
||||||
CREATE_ENTRY_DATA_LEGACY,
|
CREATE_ENTRY_DATA_LEGACY,
|
||||||
DEVICE_CONFIG_AUTH,
|
DEVICE_CONFIG_KLAP,
|
||||||
DEVICE_ID,
|
DEVICE_ID,
|
||||||
DEVICE_ID_MAC,
|
DEVICE_ID_MAC,
|
||||||
IP_ADDRESS,
|
IP_ADDRESS,
|
||||||
@ -178,7 +178,7 @@ async def test_config_entry_device_config(
|
|||||||
mock_config_entry = MockConfigEntry(
|
mock_config_entry = MockConfigEntry(
|
||||||
title="TPLink",
|
title="TPLink",
|
||||||
domain=DOMAIN,
|
domain=DOMAIN,
|
||||||
data={**CREATE_ENTRY_DATA_AUTH},
|
data={**CREATE_ENTRY_DATA_KLAP},
|
||||||
unique_id=MAC_ADDRESS,
|
unique_id=MAC_ADDRESS,
|
||||||
)
|
)
|
||||||
mock_config_entry.add_to_hass(hass)
|
mock_config_entry.add_to_hass(hass)
|
||||||
@ -197,7 +197,7 @@ async def test_config_entry_with_stored_credentials(
|
|||||||
mock_config_entry = MockConfigEntry(
|
mock_config_entry = MockConfigEntry(
|
||||||
title="TPLink",
|
title="TPLink",
|
||||||
domain=DOMAIN,
|
domain=DOMAIN,
|
||||||
data={**CREATE_ENTRY_DATA_AUTH},
|
data={**CREATE_ENTRY_DATA_KLAP},
|
||||||
unique_id=MAC_ADDRESS,
|
unique_id=MAC_ADDRESS,
|
||||||
)
|
)
|
||||||
auth = {
|
auth = {
|
||||||
@ -210,7 +210,7 @@ async def test_config_entry_with_stored_credentials(
|
|||||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
assert mock_config_entry.state is ConfigEntryState.LOADED
|
assert mock_config_entry.state is ConfigEntryState.LOADED
|
||||||
config = DEVICE_CONFIG_AUTH
|
config = DEVICE_CONFIG_KLAP
|
||||||
assert config.credentials != stored_credentials
|
assert config.credentials != stored_credentials
|
||||||
config.credentials = stored_credentials
|
config.credentials = stored_credentials
|
||||||
mock_connect["connect"].assert_called_once_with(config=config)
|
mock_connect["connect"].assert_called_once_with(config=config)
|
||||||
@ -223,7 +223,7 @@ async def test_config_entry_device_config_invalid(
|
|||||||
caplog: pytest.LogCaptureFixture,
|
caplog: pytest.LogCaptureFixture,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test that an invalid device config logs an error and loads the config entry."""
|
"""Test that an invalid device config logs an error and loads the config entry."""
|
||||||
entry_data = copy.deepcopy(CREATE_ENTRY_DATA_AUTH)
|
entry_data = copy.deepcopy(CREATE_ENTRY_DATA_KLAP)
|
||||||
entry_data[CONF_DEVICE_CONFIG] = {"foo": "bar"}
|
entry_data[CONF_DEVICE_CONFIG] = {"foo": "bar"}
|
||||||
mock_config_entry = MockConfigEntry(
|
mock_config_entry = MockConfigEntry(
|
||||||
title="TPLink",
|
title="TPLink",
|
||||||
@ -263,7 +263,7 @@ async def test_config_entry_errors(
|
|||||||
mock_config_entry = MockConfigEntry(
|
mock_config_entry = MockConfigEntry(
|
||||||
title="TPLink",
|
title="TPLink",
|
||||||
domain=DOMAIN,
|
domain=DOMAIN,
|
||||||
data={**CREATE_ENTRY_DATA_AUTH},
|
data={**CREATE_ENTRY_DATA_KLAP},
|
||||||
unique_id=MAC_ADDRESS,
|
unique_id=MAC_ADDRESS,
|
||||||
)
|
)
|
||||||
mock_config_entry.add_to_hass(hass)
|
mock_config_entry.add_to_hass(hass)
|
||||||
@ -520,11 +520,11 @@ async def test_move_credentials_hash(
|
|||||||
from the device.
|
from the device.
|
||||||
"""
|
"""
|
||||||
device_config = {
|
device_config = {
|
||||||
**DEVICE_CONFIG_AUTH.to_dict(
|
**DEVICE_CONFIG_KLAP.to_dict(
|
||||||
exclude_credentials=True, credentials_hash="theHash"
|
exclude_credentials=True, credentials_hash="theHash"
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
entry_data = {**CREATE_ENTRY_DATA_AUTH, CONF_DEVICE_CONFIG: device_config}
|
entry_data = {**CREATE_ENTRY_DATA_KLAP, CONF_DEVICE_CONFIG: device_config}
|
||||||
|
|
||||||
entry = MockConfigEntry(
|
entry = MockConfigEntry(
|
||||||
title="TPLink",
|
title="TPLink",
|
||||||
@ -567,11 +567,11 @@ async def test_move_credentials_hash_auth_error(
|
|||||||
in async_setup_entry.
|
in async_setup_entry.
|
||||||
"""
|
"""
|
||||||
device_config = {
|
device_config = {
|
||||||
**DEVICE_CONFIG_AUTH.to_dict(
|
**DEVICE_CONFIG_KLAP.to_dict(
|
||||||
exclude_credentials=True, credentials_hash="theHash"
|
exclude_credentials=True, credentials_hash="theHash"
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
entry_data = {**CREATE_ENTRY_DATA_AUTH, CONF_DEVICE_CONFIG: device_config}
|
entry_data = {**CREATE_ENTRY_DATA_KLAP, CONF_DEVICE_CONFIG: device_config}
|
||||||
|
|
||||||
entry = MockConfigEntry(
|
entry = MockConfigEntry(
|
||||||
title="TPLink",
|
title="TPLink",
|
||||||
@ -610,11 +610,11 @@ async def test_move_credentials_hash_other_error(
|
|||||||
at the end of the test.
|
at the end of the test.
|
||||||
"""
|
"""
|
||||||
device_config = {
|
device_config = {
|
||||||
**DEVICE_CONFIG_AUTH.to_dict(
|
**DEVICE_CONFIG_KLAP.to_dict(
|
||||||
exclude_credentials=True, credentials_hash="theHash"
|
exclude_credentials=True, credentials_hash="theHash"
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
entry_data = {**CREATE_ENTRY_DATA_AUTH, CONF_DEVICE_CONFIG: device_config}
|
entry_data = {**CREATE_ENTRY_DATA_KLAP, CONF_DEVICE_CONFIG: device_config}
|
||||||
|
|
||||||
entry = MockConfigEntry(
|
entry = MockConfigEntry(
|
||||||
title="TPLink",
|
title="TPLink",
|
||||||
@ -647,9 +647,9 @@ async def test_credentials_hash(
|
|||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test credentials_hash used to call connect."""
|
"""Test credentials_hash used to call connect."""
|
||||||
device_config = {**DEVICE_CONFIG_AUTH.to_dict(exclude_credentials=True)}
|
device_config = {**DEVICE_CONFIG_KLAP.to_dict(exclude_credentials=True)}
|
||||||
entry_data = {
|
entry_data = {
|
||||||
**CREATE_ENTRY_DATA_AUTH,
|
**CREATE_ENTRY_DATA_KLAP,
|
||||||
CONF_DEVICE_CONFIG: device_config,
|
CONF_DEVICE_CONFIG: device_config,
|
||||||
CONF_CREDENTIALS_HASH: "theHash",
|
CONF_CREDENTIALS_HASH: "theHash",
|
||||||
}
|
}
|
||||||
@ -684,9 +684,9 @@ async def test_credentials_hash_auth_error(
|
|||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test credentials_hash is deleted after an auth failure."""
|
"""Test credentials_hash is deleted after an auth failure."""
|
||||||
device_config = {**DEVICE_CONFIG_AUTH.to_dict(exclude_credentials=True)}
|
device_config = {**DEVICE_CONFIG_KLAP.to_dict(exclude_credentials=True)}
|
||||||
entry_data = {
|
entry_data = {
|
||||||
**CREATE_ENTRY_DATA_AUTH,
|
**CREATE_ENTRY_DATA_KLAP,
|
||||||
CONF_DEVICE_CONFIG: device_config,
|
CONF_DEVICE_CONFIG: device_config,
|
||||||
CONF_CREDENTIALS_HASH: "theHash",
|
CONF_CREDENTIALS_HASH: "theHash",
|
||||||
}
|
}
|
||||||
@ -710,7 +710,7 @@ async def test_credentials_hash_auth_error(
|
|||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
expected_config = DeviceConfig.from_dict(
|
expected_config = DeviceConfig.from_dict(
|
||||||
DEVICE_CONFIG_AUTH.to_dict(exclude_credentials=True, credentials_hash="theHash")
|
DEVICE_CONFIG_KLAP.to_dict(exclude_credentials=True, credentials_hash="theHash")
|
||||||
)
|
)
|
||||||
connect_mock.assert_called_with(config=expected_config)
|
connect_mock.assert_called_with(config=expected_config)
|
||||||
assert entry.state is ConfigEntryState.SETUP_ERROR
|
assert entry.state is ConfigEntryState.SETUP_ERROR
|
||||||
|
@ -533,16 +533,16 @@ async def test_smart_strip_effects(hass: HomeAssistant) -> None:
|
|||||||
assert state.attributes[ATTR_EFFECT_LIST] == ["Off", "Effect1", "Effect2"]
|
assert state.attributes[ATTR_EFFECT_LIST] == ["Off", "Effect1", "Effect2"]
|
||||||
|
|
||||||
# Ensure setting color temp when an effect
|
# Ensure setting color temp when an effect
|
||||||
# is in progress calls set_hsv to clear the effect
|
# is in progress calls set_effect to clear the effect
|
||||||
await hass.services.async_call(
|
await hass.services.async_call(
|
||||||
LIGHT_DOMAIN,
|
LIGHT_DOMAIN,
|
||||||
"turn_on",
|
"turn_on",
|
||||||
{ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 4000},
|
{ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 4000},
|
||||||
blocking=True,
|
blocking=True,
|
||||||
)
|
)
|
||||||
light.set_hsv.assert_called_once_with(0, 0, None)
|
light_effect.set_effect.assert_called_once_with(LightEffect.LIGHT_EFFECTS_OFF)
|
||||||
light.set_color_temp.assert_called_once_with(4000, brightness=None, transition=None)
|
light.set_color_temp.assert_called_once_with(4000, brightness=None, transition=None)
|
||||||
light.set_hsv.reset_mock()
|
light_effect.set_effect.reset_mock()
|
||||||
light.set_color_temp.reset_mock()
|
light.set_color_temp.reset_mock()
|
||||||
|
|
||||||
await hass.services.async_call(
|
await hass.services.async_call(
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
"""UniFi Network button platform tests."""
|
"""UniFi Network button platform tests."""
|
||||||
|
|
||||||
|
from copy import deepcopy
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
from aiounifi.models.message import MessageKey
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, ButtonDeviceClass
|
from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, ButtonDeviceClass
|
||||||
@ -319,3 +321,33 @@ async def test_wlan_button_entities(
|
|||||||
request_data,
|
request_data,
|
||||||
call,
|
call,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("device_payload", [DEVICE_POWER_CYCLE_POE])
|
||||||
|
@pytest.mark.usefixtures("config_entry_setup")
|
||||||
|
async def test_power_cycle_availability(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
mock_websocket_message,
|
||||||
|
device_payload: dict[str, Any],
|
||||||
|
) -> None:
|
||||||
|
"""Verify that disabling PoE marks entity as unavailable."""
|
||||||
|
entity_id = "button.switch_port_1_power_cycle"
|
||||||
|
|
||||||
|
assert hass.states.get(entity_id).state != STATE_UNAVAILABLE
|
||||||
|
|
||||||
|
# PoE disabled
|
||||||
|
|
||||||
|
device_1 = deepcopy(device_payload[0])
|
||||||
|
device_1["port_table"][0]["poe_enable"] = False
|
||||||
|
mock_websocket_message(message=MessageKey.DEVICE, data=device_1)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
||||||
|
|
||||||
|
# PoE enabled
|
||||||
|
device_1 = deepcopy(device_payload[0])
|
||||||
|
device_1["port_table"][0]["poe_enable"] = True
|
||||||
|
mock_websocket_message(message=MessageKey.DEVICE, data=device_1)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert hass.states.get(entity_id).state != STATE_UNAVAILABLE
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
"""Test UniFi Network config flow."""
|
"""Test UniFi Network config flow."""
|
||||||
|
|
||||||
|
from collections.abc import Callable
|
||||||
import socket
|
import socket
|
||||||
from unittest.mock import PropertyMock, patch
|
from unittest.mock import PropertyMock, patch
|
||||||
|
|
||||||
@ -338,6 +339,44 @@ async def test_reauth_flow_update_configuration(
|
|||||||
assert config_entry.data[CONF_PASSWORD] == "new_pass"
|
assert config_entry.data[CONF_PASSWORD] == "new_pass"
|
||||||
|
|
||||||
|
|
||||||
|
async def test_reauth_flow_update_configuration_on_not_loaded_entry(
|
||||||
|
hass: HomeAssistant, config_entry_factory: Callable[[], ConfigEntry]
|
||||||
|
) -> None:
|
||||||
|
"""Verify reauth flow can update hub configuration on a not loaded entry."""
|
||||||
|
with patch("aiounifi.Controller.login", side_effect=aiounifi.errors.RequestError):
|
||||||
|
config_entry = await config_entry_factory()
|
||||||
|
|
||||||
|
result = await hass.config_entries.flow.async_init(
|
||||||
|
UNIFI_DOMAIN,
|
||||||
|
context={
|
||||||
|
"source": SOURCE_REAUTH,
|
||||||
|
"unique_id": config_entry.unique_id,
|
||||||
|
"entry_id": config_entry.entry_id,
|
||||||
|
},
|
||||||
|
data=config_entry.data,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result["type"] is FlowResultType.FORM
|
||||||
|
assert result["step_id"] == "user"
|
||||||
|
|
||||||
|
result = await hass.config_entries.flow.async_configure(
|
||||||
|
result["flow_id"],
|
||||||
|
user_input={
|
||||||
|
CONF_HOST: "1.2.3.4",
|
||||||
|
CONF_USERNAME: "new_name",
|
||||||
|
CONF_PASSWORD: "new_pass",
|
||||||
|
CONF_PORT: 1234,
|
||||||
|
CONF_VERIFY_SSL: True,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result["type"] is FlowResultType.ABORT
|
||||||
|
assert result["reason"] == "reauth_successful"
|
||||||
|
assert config_entry.data[CONF_HOST] == "1.2.3.4"
|
||||||
|
assert config_entry.data[CONF_USERNAME] == "new_name"
|
||||||
|
assert config_entry.data[CONF_PASSWORD] == "new_pass"
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("client_payload", [CLIENTS])
|
@pytest.mark.parametrize("client_payload", [CLIENTS])
|
||||||
@pytest.mark.parametrize("device_payload", [DEVICES])
|
@pytest.mark.parametrize("device_payload", [DEVICES])
|
||||||
@pytest.mark.parametrize("wlan_payload", [WLANS])
|
@pytest.mark.parametrize("wlan_payload", [WLANS])
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""Test the UPB Control config flow."""
|
"""Test the UPB Control config flow."""
|
||||||
|
|
||||||
from asyncio import TimeoutError
|
from asyncio import TimeoutError
|
||||||
from unittest.mock import MagicMock, PropertyMock, patch
|
from unittest.mock import AsyncMock, PropertyMock, patch
|
||||||
|
|
||||||
from homeassistant import config_entries
|
from homeassistant import config_entries
|
||||||
from homeassistant.components.upb.const import DOMAIN
|
from homeassistant.components.upb.const import DOMAIN
|
||||||
@ -15,11 +15,11 @@ def mocked_upb(sync_complete=True, config_ok=True):
|
|||||||
def _upb_lib_connect(callback):
|
def _upb_lib_connect(callback):
|
||||||
callback()
|
callback()
|
||||||
|
|
||||||
upb_mock = MagicMock()
|
upb_mock = AsyncMock()
|
||||||
type(upb_mock).network_id = PropertyMock(return_value="42")
|
type(upb_mock).network_id = PropertyMock(return_value="42")
|
||||||
type(upb_mock).config_ok = PropertyMock(return_value=config_ok)
|
type(upb_mock).config_ok = PropertyMock(return_value=config_ok)
|
||||||
if sync_complete:
|
if sync_complete:
|
||||||
upb_mock.connect.side_effect = _upb_lib_connect
|
upb_mock.async_connect.side_effect = _upb_lib_connect
|
||||||
return patch(
|
return patch(
|
||||||
"homeassistant.components.upb.config_flow.upb_lib.UpbPim", return_value=upb_mock
|
"homeassistant.components.upb.config_flow.upb_lib.UpbPim", return_value=upb_mock
|
||||||
)
|
)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user