mirror of
https://github.com/home-assistant/core.git
synced 2025-07-28 15:47:12 +00:00
Merge pull request #69509 from home-assistant/rc
This commit is contained in:
commit
919f4dd719
@ -3,7 +3,7 @@
|
||||
"name": "Airzone",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||
"requirements": ["aioairzone==0.2.3"],
|
||||
"requirements": ["aioairzone==0.3.3"],
|
||||
"codeowners": ["@Noltari"],
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"]
|
||||
|
@ -363,6 +363,9 @@ async def async_wait_for_elk_to_sync(
|
||||
# VN is the first command sent for panel, when we get
|
||||
# it back we now we are logged in either with or without a password
|
||||
elk.add_handler("VN", first_response)
|
||||
# Some panels do not respond to the vn request so we
|
||||
# check for lw as well
|
||||
elk.add_handler("LW", first_response)
|
||||
elk.add_handler("sync_complete", sync_complete)
|
||||
for name, event, timeout in (
|
||||
("login", login_event, login_timeout),
|
||||
|
@ -489,7 +489,7 @@ async def async_validate(hass: HomeAssistant) -> EnergyPreferencesValidation:
|
||||
|
||||
# Fetch the needed statistics metadata
|
||||
statistics_metadata.update(
|
||||
await hass.async_add_executor_job(
|
||||
await recorder.get_instance(hass).async_add_executor_job(
|
||||
functools.partial(
|
||||
recorder.statistics.get_metadata,
|
||||
hass,
|
||||
|
@ -260,7 +260,7 @@ async def ws_get_fossil_energy_consumption(
|
||||
statistic_ids.append(msg["co2_statistic_id"])
|
||||
|
||||
# Fetch energy + CO2 statistics
|
||||
statistics = await hass.async_add_executor_job(
|
||||
statistics = await recorder.get_instance(hass).async_add_executor_job(
|
||||
recorder.statistics.statistics_during_period,
|
||||
hass,
|
||||
start_time,
|
||||
|
@ -1,6 +1,7 @@
|
||||
"""Config flow for AVM FRITZ!SmartHome."""
|
||||
from __future__ import annotations
|
||||
|
||||
import ipaddress
|
||||
from typing import Any
|
||||
from urllib.parse import urlparse
|
||||
|
||||
@ -120,6 +121,12 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
assert isinstance(host, str)
|
||||
self.context[CONF_HOST] = host
|
||||
|
||||
if (
|
||||
ipaddress.ip_address(host).version == 6
|
||||
and ipaddress.ip_address(host).is_link_local
|
||||
):
|
||||
return self.async_abort(reason="ignore_ip6_link_local")
|
||||
|
||||
if uuid := discovery_info.upnp.get(ssdp.ATTR_UPNP_UDN):
|
||||
if uuid.startswith("uuid:"):
|
||||
uuid = uuid[5:]
|
||||
|
@ -28,6 +28,7 @@
|
||||
"abort": {
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"ignore_ip6_link_local": "IPv6 link local address is not supported.",
|
||||
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]",
|
||||
"not_supported": "Connected to AVM FRITZ!Box but it's unable to control Smart Home devices.",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
|
@ -3,6 +3,7 @@
|
||||
"abort": {
|
||||
"already_configured": "Device is already configured",
|
||||
"already_in_progress": "Configuration flow is already in progress",
|
||||
"ignore_ip6_link_local": "IPv6 link local address is not supported.",
|
||||
"no_devices_found": "No devices found on the network",
|
||||
"not_supported": "Connected to AVM FRITZ!Box but it's unable to control Smart Home devices.",
|
||||
"reauth_successful": "Re-authentication was successful"
|
||||
|
@ -109,6 +109,20 @@ def build_schema(
|
||||
return vol.Schema(spec)
|
||||
|
||||
|
||||
def build_schema_content_type(user_input: dict[str, Any] | MappingProxyType[str, Any]):
|
||||
"""Create schema for conditional 2nd page specifying stream content_type."""
|
||||
return vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_CONTENT_TYPE,
|
||||
description={
|
||||
"suggested_value": user_input.get(CONF_CONTENT_TYPE, "image/jpeg")
|
||||
},
|
||||
): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def get_image_type(image):
|
||||
"""Get the format of downloaded bytes that could be an image."""
|
||||
fmt = None
|
||||
@ -129,14 +143,14 @@ async def async_test_still(hass, info) -> tuple[dict[str, str], str | None]:
|
||||
"""Verify that the still image is valid before we create an entity."""
|
||||
fmt = None
|
||||
if not (url := info.get(CONF_STILL_IMAGE_URL)):
|
||||
return {}, None
|
||||
return {}, info.get(CONF_CONTENT_TYPE, "image/jpeg")
|
||||
if not isinstance(url, template_helper.Template) and url:
|
||||
url = cv.template(url)
|
||||
url.hass = hass
|
||||
try:
|
||||
url = url.async_render(parse_result=False)
|
||||
except TemplateError as err:
|
||||
_LOGGER.error("Error parsing template %s: %s", url, err)
|
||||
_LOGGER.warning("Problem rendering template %s: %s", url, err)
|
||||
return {CONF_STILL_IMAGE_URL: "template_error"}, None
|
||||
verify_ssl = info.get(CONF_VERIFY_SSL)
|
||||
auth = generate_auth(info)
|
||||
@ -228,6 +242,11 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize Generic ConfigFlow."""
|
||||
self.cached_user_input: dict[str, Any] = {}
|
||||
self.cached_title = ""
|
||||
|
||||
@staticmethod
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
@ -238,8 +257,8 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
def check_for_existing(self, options):
|
||||
"""Check whether an existing entry is using the same URLs."""
|
||||
return any(
|
||||
entry.options[CONF_STILL_IMAGE_URL] == options[CONF_STILL_IMAGE_URL]
|
||||
and entry.options[CONF_STREAM_SOURCE] == options[CONF_STREAM_SOURCE]
|
||||
entry.options.get(CONF_STILL_IMAGE_URL) == options.get(CONF_STILL_IMAGE_URL)
|
||||
and entry.options.get(CONF_STREAM_SOURCE) == options.get(CONF_STREAM_SOURCE)
|
||||
for entry in self._async_current_entries()
|
||||
)
|
||||
|
||||
@ -264,10 +283,17 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if not errors:
|
||||
user_input[CONF_CONTENT_TYPE] = still_format
|
||||
user_input[CONF_LIMIT_REFETCH_TO_URL_CHANGE] = False
|
||||
await self.async_set_unique_id(self.flow_id)
|
||||
return self.async_create_entry(
|
||||
title=name, data={}, options=user_input
|
||||
)
|
||||
if user_input.get(CONF_STILL_IMAGE_URL):
|
||||
await self.async_set_unique_id(self.flow_id)
|
||||
return self.async_create_entry(
|
||||
title=name, data={}, options=user_input
|
||||
)
|
||||
# If user didn't specify a still image URL,
|
||||
# we can't (yet) autodetect it from the stream.
|
||||
# Show a conditional 2nd page to ask them the content type.
|
||||
self.cached_user_input = user_input
|
||||
self.cached_title = name
|
||||
return await self.async_step_content_type()
|
||||
else:
|
||||
user_input = DEFAULT_DATA.copy()
|
||||
|
||||
@ -277,12 +303,36 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_content_type(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Handle the user's choice for stream content_type."""
|
||||
if user_input is not None:
|
||||
user_input = self.cached_user_input | user_input
|
||||
await self.async_set_unique_id(self.flow_id)
|
||||
return self.async_create_entry(
|
||||
title=self.cached_title, data={}, options=user_input
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="content_type",
|
||||
data_schema=build_schema_content_type({}),
|
||||
errors={},
|
||||
)
|
||||
|
||||
async def async_step_import(self, import_config) -> FlowResult:
|
||||
"""Handle config import from yaml."""
|
||||
# abort if we've already got this one.
|
||||
if self.check_for_existing(import_config):
|
||||
return self.async_abort(reason="already_exists")
|
||||
errors, still_format = await async_test_still(self.hass, import_config)
|
||||
if errors.get(CONF_STILL_IMAGE_URL) == "template_error":
|
||||
_LOGGER.warning(
|
||||
"Could not render template, but it could be that "
|
||||
"referenced entities are still initialising. "
|
||||
"Continuing assuming that imported YAML template is valid"
|
||||
)
|
||||
errors.pop(CONF_STILL_IMAGE_URL)
|
||||
still_format = import_config.get(CONF_CONTENT_TYPE, "image/jpeg")
|
||||
errors = errors | await async_test_stream(self.hass, import_config)
|
||||
still_url = import_config.get(CONF_STILL_IMAGE_URL)
|
||||
stream_url = import_config.get(CONF_STREAM_SOURCE)
|
||||
@ -308,6 +358,8 @@ class GenericOptionsFlowHandler(OptionsFlow):
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize Generic IP Camera options flow."""
|
||||
self.config_entry = config_entry
|
||||
self.cached_user_input: dict[str, Any] = {}
|
||||
self.cached_title = ""
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@ -316,29 +368,52 @@ class GenericOptionsFlowHandler(OptionsFlow):
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
errors, still_format = await async_test_still(self.hass, user_input)
|
||||
errors, still_format = await async_test_still(
|
||||
self.hass, self.config_entry.options | user_input
|
||||
)
|
||||
errors = errors | await async_test_stream(self.hass, user_input)
|
||||
still_url = user_input.get(CONF_STILL_IMAGE_URL)
|
||||
stream_url = user_input.get(CONF_STREAM_SOURCE)
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title=slug_url(still_url) or slug_url(stream_url) or DEFAULT_NAME,
|
||||
data={
|
||||
CONF_AUTHENTICATION: user_input.get(CONF_AUTHENTICATION),
|
||||
CONF_STREAM_SOURCE: user_input.get(CONF_STREAM_SOURCE),
|
||||
CONF_PASSWORD: user_input.get(CONF_PASSWORD),
|
||||
CONF_STILL_IMAGE_URL: user_input.get(CONF_STILL_IMAGE_URL),
|
||||
CONF_CONTENT_TYPE: still_format,
|
||||
CONF_USERNAME: user_input.get(CONF_USERNAME),
|
||||
CONF_LIMIT_REFETCH_TO_URL_CHANGE: user_input[
|
||||
CONF_LIMIT_REFETCH_TO_URL_CHANGE
|
||||
],
|
||||
CONF_FRAMERATE: user_input[CONF_FRAMERATE],
|
||||
CONF_VERIFY_SSL: user_input[CONF_VERIFY_SSL],
|
||||
},
|
||||
)
|
||||
title = slug_url(still_url) or slug_url(stream_url) or DEFAULT_NAME
|
||||
data = {
|
||||
CONF_AUTHENTICATION: user_input.get(CONF_AUTHENTICATION),
|
||||
CONF_STREAM_SOURCE: user_input.get(CONF_STREAM_SOURCE),
|
||||
CONF_PASSWORD: user_input.get(CONF_PASSWORD),
|
||||
CONF_STILL_IMAGE_URL: user_input.get(CONF_STILL_IMAGE_URL),
|
||||
CONF_CONTENT_TYPE: still_format
|
||||
or self.config_entry.options.get(CONF_CONTENT_TYPE),
|
||||
CONF_USERNAME: user_input.get(CONF_USERNAME),
|
||||
CONF_LIMIT_REFETCH_TO_URL_CHANGE: user_input[
|
||||
CONF_LIMIT_REFETCH_TO_URL_CHANGE
|
||||
],
|
||||
CONF_FRAMERATE: user_input[CONF_FRAMERATE],
|
||||
CONF_VERIFY_SSL: user_input[CONF_VERIFY_SSL],
|
||||
}
|
||||
if still_url:
|
||||
return self.async_create_entry(
|
||||
title=title,
|
||||
data=data,
|
||||
)
|
||||
self.cached_title = title
|
||||
self.cached_user_input = data
|
||||
return await self.async_step_content_type()
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
data_schema=build_schema(user_input or self.config_entry.options, True),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_content_type(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Handle the user's choice for stream content_type."""
|
||||
if user_input is not None:
|
||||
user_input = self.cached_user_input | user_input
|
||||
return self.async_create_entry(title=self.cached_title, data=user_input)
|
||||
return self.async_show_form(
|
||||
step_id="content_type",
|
||||
data_schema=build_schema_content_type(self.cached_user_input),
|
||||
errors={},
|
||||
)
|
||||
|
@ -30,11 +30,16 @@
|
||||
"limit_refetch_to_url_change": "Limit refetch to url change",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"content_type": "Content Type",
|
||||
"framerate": "Frame Rate (Hz)",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
}
|
||||
},
|
||||
"content_type": {
|
||||
"description": "Specify the content type for the stream.",
|
||||
"data": {
|
||||
"content_type": "Content Type"
|
||||
}
|
||||
},
|
||||
"confirm": {
|
||||
"description": "[%key:common::config_flow::description::confirm_setup%]"
|
||||
}
|
||||
@ -51,10 +56,15 @@
|
||||
"limit_refetch_to_url_change": "[%key:component::generic::config::step::user::data::limit_refetch_to_url_change%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"content_type": "[%key:component::generic::config::step::user::data::content_type%]",
|
||||
"framerate": "[%key:component::generic::config::step::user::data::framerate%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
}
|
||||
},
|
||||
"content_type": {
|
||||
"description": "[%key:component::generic::config::step::content_type::description%]",
|
||||
"data": {
|
||||
"content_type": "[%key:component::generic::config::step::content_type::data::content_type%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
|
@ -23,10 +23,15 @@
|
||||
"confirm": {
|
||||
"description": "Do you want to start set up?"
|
||||
},
|
||||
"content_type": {
|
||||
"data": {
|
||||
"content_type": "Content Type"
|
||||
},
|
||||
"description": "Specify the content type for the stream."
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"authentication": "Authentication",
|
||||
"content_type": "Content Type",
|
||||
"framerate": "Frame Rate (Hz)",
|
||||
"limit_refetch_to_url_change": "Limit refetch to url change",
|
||||
"password": "Password",
|
||||
@ -57,10 +62,15 @@
|
||||
"unknown": "Unexpected error"
|
||||
},
|
||||
"step": {
|
||||
"content_type": {
|
||||
"data": {
|
||||
"content_type": "Content Type"
|
||||
},
|
||||
"description": "Specify the content type for the stream."
|
||||
},
|
||||
"init": {
|
||||
"data": {
|
||||
"authentication": "Authentication",
|
||||
"content_type": "Content Type",
|
||||
"framerate": "Frame Rate (Hz)",
|
||||
"limit_refetch_to_url_change": "Limit refetch to url change",
|
||||
"password": "Password",
|
||||
|
@ -5,6 +5,7 @@ from __future__ import annotations
|
||||
from collections.abc import Awaitable, Callable
|
||||
import datetime
|
||||
import logging
|
||||
import time
|
||||
from typing import Any
|
||||
|
||||
from googleapiclient import discovery as google_discovery
|
||||
@ -58,7 +59,7 @@ class DeviceAuth(config_entry_oauth2_flow.LocalOAuth2Implementation):
|
||||
"refresh_token": creds.refresh_token,
|
||||
"scope": " ".join(creds.scopes),
|
||||
"token_type": "Bearer",
|
||||
"expires_in": creds.token_expiry.timestamp(),
|
||||
"expires_in": creds.token_expiry.timestamp() - time.time(),
|
||||
}
|
||||
|
||||
|
||||
@ -157,16 +158,16 @@ def _async_google_creds(hass: HomeAssistant, token: dict[str, Any]) -> Credentia
|
||||
client_id=conf[CONF_CLIENT_ID],
|
||||
client_secret=conf[CONF_CLIENT_SECRET],
|
||||
refresh_token=token["refresh_token"],
|
||||
token_expiry=token["expires_at"],
|
||||
token_expiry=datetime.datetime.fromtimestamp(token["expires_at"]),
|
||||
token_uri=oauth2client.GOOGLE_TOKEN_URI,
|
||||
scopes=[conf[CONF_CALENDAR_ACCESS].scope],
|
||||
user_agent=None,
|
||||
)
|
||||
|
||||
|
||||
def _api_time_format(time: datetime.datetime | None) -> str | None:
|
||||
def _api_time_format(date_time: datetime.datetime | None) -> str | None:
|
||||
"""Convert a datetime to the api string format."""
|
||||
return time.isoformat("T") if time else None
|
||||
return date_time.isoformat("T") if date_time else None
|
||||
|
||||
|
||||
class GoogleCalendarService:
|
||||
|
@ -183,7 +183,9 @@ class GoogleCalendarEventDevice(CalendarEventDevice):
|
||||
valid_items = filter(self._event_filter, items)
|
||||
self._event = copy.deepcopy(next(valid_items, None))
|
||||
if self._event:
|
||||
(summary, offset) = extract_offset(self._event["summary"], self._offset)
|
||||
(summary, offset) = extract_offset(
|
||||
self._event.get("summary", ""), self._offset
|
||||
)
|
||||
self._event["summary"] = summary
|
||||
self._offset_reached = is_offset_reached(
|
||||
get_date(self._event["start"]), offset
|
||||
|
@ -824,7 +824,7 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
self.data = {}
|
||||
self.entry_id = config_entry.entry_id
|
||||
self.dev_reg = dev_reg
|
||||
self.is_hass_os = "hassos" in get_info(self.hass)
|
||||
self.is_hass_os = (get_info(self.hass) or {}).get("hassos") is not None
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Update data via library."""
|
||||
@ -891,6 +891,12 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
if stale_addons := supervisor_addon_devices - set(new_data[DATA_KEY_ADDONS]):
|
||||
async_remove_addons_from_dev_reg(self.dev_reg, stale_addons)
|
||||
|
||||
if not self.is_hass_os and (
|
||||
dev := self.dev_reg.async_get_device({(DOMAIN, "OS")})
|
||||
):
|
||||
# Remove the OS device if it exists and the installation is not hassos
|
||||
self.dev_reg.async_remove_device(dev.id)
|
||||
|
||||
# If there are new add-ons, we should reload the config entry so we can
|
||||
# create new devices and entities. We can return an empty dict because
|
||||
# coordinator will be recreated.
|
||||
|
@ -652,7 +652,7 @@ def _exclude_by_entity_registry(
|
||||
(entry := ent_reg.async_get(entity_id))
|
||||
and (
|
||||
entry.hidden_by is not None
|
||||
or (not include_entity_category or entry.entity_category is not None)
|
||||
or (not include_entity_category and entry.entity_category is not None)
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -204,9 +204,10 @@ class ONVIFDevice:
|
||||
|
||||
if self._dt_diff_seconds > 5:
|
||||
LOGGER.warning(
|
||||
"The date/time on the device (UTC) is '%s', "
|
||||
"The date/time on %s (UTC) is '%s', "
|
||||
"which is different from the system '%s', "
|
||||
"this could lead to authentication issues",
|
||||
self.name,
|
||||
cam_date_utc,
|
||||
system_date,
|
||||
)
|
||||
|
@ -223,13 +223,18 @@ def call_base_info(power_wall: Powerwall, host: str) -> PowerwallBaseInfo:
|
||||
|
||||
def _fetch_powerwall_data(power_wall: Powerwall) -> PowerwallData:
|
||||
"""Process and update powerwall data."""
|
||||
try:
|
||||
backup_reserve = power_wall.get_backup_reserve_percentage()
|
||||
except MissingAttributeError:
|
||||
backup_reserve = None
|
||||
|
||||
return PowerwallData(
|
||||
charge=power_wall.get_charge(),
|
||||
site_master=power_wall.get_sitemaster(),
|
||||
meters=power_wall.get_meters(),
|
||||
grid_services_active=power_wall.is_grid_services_active(),
|
||||
grid_status=power_wall.get_grid_status(),
|
||||
backup_reserve=power_wall.get_backup_reserve_percentage(),
|
||||
backup_reserve=backup_reserve,
|
||||
)
|
||||
|
||||
|
||||
|
@ -38,7 +38,7 @@ class PowerwallData:
|
||||
meters: MetersAggregates
|
||||
grid_services_active: bool
|
||||
grid_status: GridStatus
|
||||
backup_reserve: float
|
||||
backup_reserve: float | None
|
||||
|
||||
|
||||
class PowerwallRuntimeData(TypedDict):
|
||||
|
@ -117,9 +117,11 @@ async def async_setup_entry(
|
||||
data: PowerwallData = coordinator.data
|
||||
entities: list[PowerWallEntity] = [
|
||||
PowerWallChargeSensor(powerwall_data),
|
||||
PowerWallBackupReserveSensor(powerwall_data),
|
||||
]
|
||||
|
||||
if data.backup_reserve is not None:
|
||||
entities.append(PowerWallBackupReserveSensor(powerwall_data))
|
||||
|
||||
for meter in data.meters.meters:
|
||||
entities.append(PowerWallExportSensor(powerwall_data, meter))
|
||||
entities.append(PowerWallImportSensor(powerwall_data, meter))
|
||||
@ -190,8 +192,10 @@ class PowerWallBackupReserveSensor(PowerWallEntity, SensorEntity):
|
||||
return f"{self.base_unique_id}_backup_reserve"
|
||||
|
||||
@property
|
||||
def native_value(self) -> int:
|
||||
def native_value(self) -> int | None:
|
||||
"""Get the current value in percentage."""
|
||||
if self.data.backup_reserve is None:
|
||||
return None
|
||||
return round(self.data.backup_reserve)
|
||||
|
||||
|
||||
|
@ -2,8 +2,8 @@
|
||||
"domain": "remote_rpi_gpio",
|
||||
"name": "remote_rpi_gpio",
|
||||
"documentation": "https://www.home-assistant.io/integrations/remote_rpi_gpio",
|
||||
"requirements": ["gpiozero==1.5.1"],
|
||||
"requirements": ["gpiozero==1.6.2", "pigpio==1.78"],
|
||||
"codeowners": [],
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["gpiozero"]
|
||||
"loggers": ["gpiozero", "pigpio"]
|
||||
}
|
||||
|
@ -130,9 +130,12 @@ class Sun(Entity):
|
||||
self._config_listener = self.hass.bus.async_listen(
|
||||
EVENT_CORE_CONFIG_UPDATE, self.update_location
|
||||
)
|
||||
self._loaded_listener = self.hass.bus.async_listen(
|
||||
EVENT_COMPONENT_LOADED, self.loading_complete
|
||||
)
|
||||
if DOMAIN in hass.config.components:
|
||||
self.update_location()
|
||||
else:
|
||||
self._loaded_listener = self.hass.bus.async_listen(
|
||||
EVENT_COMPONENT_LOADED, self.loading_complete
|
||||
)
|
||||
|
||||
@callback
|
||||
def loading_complete(self, event_: Event) -> None:
|
||||
@ -158,6 +161,7 @@ class Sun(Entity):
|
||||
"""Remove the loaded listener."""
|
||||
if self._loaded_listener:
|
||||
self._loaded_listener()
|
||||
self._loaded_listener = None
|
||||
|
||||
@callback
|
||||
def remove_listeners(self):
|
||||
|
@ -18,7 +18,10 @@ from homeassistant.util import Throttle
|
||||
|
||||
from .const import (
|
||||
CONF_FALLBACK,
|
||||
CONST_OVERLAY_MANUAL,
|
||||
CONST_OVERLAY_TADO_DEFAULT,
|
||||
CONST_OVERLAY_TADO_MODE,
|
||||
CONST_OVERLAY_TADO_OPTIONS,
|
||||
DATA,
|
||||
DOMAIN,
|
||||
INSIDE_TEMPERATURE_MEASUREMENT,
|
||||
@ -51,7 +54,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
username = entry.data[CONF_USERNAME]
|
||||
password = entry.data[CONF_PASSWORD]
|
||||
fallback = entry.options.get(CONF_FALLBACK, CONST_OVERLAY_TADO_MODE)
|
||||
fallback = entry.options.get(CONF_FALLBACK, CONST_OVERLAY_TADO_DEFAULT)
|
||||
|
||||
tadoconnector = TadoConnector(hass, username, password, fallback)
|
||||
|
||||
@ -99,7 +102,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
def _async_import_options_from_data_if_missing(hass: HomeAssistant, entry: ConfigEntry):
|
||||
options = dict(entry.options)
|
||||
if CONF_FALLBACK not in options:
|
||||
options[CONF_FALLBACK] = entry.data.get(CONF_FALLBACK, CONST_OVERLAY_TADO_MODE)
|
||||
options[CONF_FALLBACK] = entry.data.get(
|
||||
CONF_FALLBACK, CONST_OVERLAY_TADO_DEFAULT
|
||||
)
|
||||
hass.config_entries.async_update_entry(entry, options=options)
|
||||
|
||||
if options[CONF_FALLBACK] not in CONST_OVERLAY_TADO_OPTIONS:
|
||||
if options[CONF_FALLBACK]:
|
||||
options[CONF_FALLBACK] = CONST_OVERLAY_TADO_MODE
|
||||
else:
|
||||
options[CONF_FALLBACK] = CONST_OVERLAY_MANUAL
|
||||
hass.config_entries.async_update_entry(entry, options=options)
|
||||
|
||||
|
||||
|
@ -11,7 +11,13 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
|
||||
from .const import CONF_FALLBACK, CONST_OVERLAY_TADO_OPTIONS, DOMAIN, UNIQUE_ID
|
||||
from .const import (
|
||||
CONF_FALLBACK,
|
||||
CONST_OVERLAY_TADO_DEFAULT,
|
||||
CONST_OVERLAY_TADO_OPTIONS,
|
||||
DOMAIN,
|
||||
UNIQUE_ID,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@ -126,7 +132,10 @@ class OptionsFlowHandler(config_entries.OptionsFlow):
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_FALLBACK, default=self.config_entry.options.get(CONF_FALLBACK)
|
||||
CONF_FALLBACK,
|
||||
default=self.config_entry.options.get(
|
||||
CONF_FALLBACK, CONST_OVERLAY_TADO_DEFAULT
|
||||
),
|
||||
): vol.In(CONST_OVERLAY_TADO_OPTIONS),
|
||||
}
|
||||
)
|
||||
|
@ -1,17 +1,6 @@
|
||||
"""Support for Telegram bot to send messages only."""
|
||||
import logging
|
||||
|
||||
from . import initialize_bot
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_platform(hass, config):
|
||||
async def async_setup_platform(hass, bot, config):
|
||||
"""Set up the Telegram broadcast platform."""
|
||||
bot = initialize_bot(config)
|
||||
|
||||
bot_config = await hass.async_add_executor_job(bot.getMe)
|
||||
_LOGGER.debug(
|
||||
"Telegram broadcast platform setup with bot %s", bot_config["username"]
|
||||
)
|
||||
return True
|
||||
|
@ -103,7 +103,7 @@ RANDOM_EFFECT_DICT: Final = {
|
||||
vol.Optional("random_seed", default=100): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=1, max=100)
|
||||
),
|
||||
vol.Required("backgrounds"): vol.All(
|
||||
vol.Optional("backgrounds"): vol.All(
|
||||
cv.ensure_list,
|
||||
vol.Length(min=1, max=16),
|
||||
[vol.All(vol.Coerce(tuple), HSV_SEQUENCE)],
|
||||
@ -366,7 +366,7 @@ class TPLinkSmartLightStrip(TPLinkSmartBulb):
|
||||
fadeoff: int,
|
||||
init_states: tuple[int, int, int],
|
||||
random_seed: int,
|
||||
backgrounds: Sequence[tuple[int, int, int]],
|
||||
backgrounds: Sequence[tuple[int, int, int]] | None = None,
|
||||
hue_range: tuple[int, int] | None = None,
|
||||
saturation_range: tuple[int, int] | None = None,
|
||||
brightness_range: tuple[int, int] | None = None,
|
||||
@ -378,8 +378,9 @@ class TPLinkSmartLightStrip(TPLinkSmartBulb):
|
||||
"type": "random",
|
||||
"init_states": [init_states],
|
||||
"random_seed": random_seed,
|
||||
"backgrounds": backgrounds,
|
||||
}
|
||||
if backgrounds:
|
||||
effect["backgrounds"] = backgrounds
|
||||
if fadeoff:
|
||||
effect["fadeoff"] = fadeoff
|
||||
if hue_range:
|
||||
|
@ -93,7 +93,7 @@ random_effect:
|
||||
- [199, 89, 50]
|
||||
- [160, 50, 50]
|
||||
- [180, 100, 50]
|
||||
required: true
|
||||
required: false
|
||||
selector:
|
||||
object:
|
||||
segments:
|
||||
|
@ -116,9 +116,6 @@ class ProtectFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
self.hass.config_entries.async_update_entry(
|
||||
entry, data={**entry.data, CONF_HOST: new_host}
|
||||
)
|
||||
self.hass.async_create_task(
|
||||
self.hass.config_entries.async_reload(entry.entry_id)
|
||||
)
|
||||
return self.async_abort(reason="already_configured")
|
||||
if entry_host in (direct_connect_domain, source_ip) or (
|
||||
entry_has_direct_connect
|
||||
|
@ -106,27 +106,24 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
async def async_reset_meters(service_call):
|
||||
"""Reset all sensors of a meter."""
|
||||
entity_id = service_call.data["entity_id"]
|
||||
meters = service_call.data["entity_id"]
|
||||
|
||||
domain = split_entity_id(entity_id)[0]
|
||||
if domain == DOMAIN:
|
||||
for entity in hass.data[DATA_LEGACY_COMPONENT].entities:
|
||||
if entity_id == entity.entity_id:
|
||||
_LOGGER.debug(
|
||||
"forward reset meter from %s to %s",
|
||||
entity_id,
|
||||
entity.tracked_entity_id,
|
||||
)
|
||||
entity_id = entity.tracked_entity_id
|
||||
|
||||
_LOGGER.debug("reset meter %s", entity_id)
|
||||
async_dispatcher_send(hass, SIGNAL_RESET_METER, entity_id)
|
||||
for meter in meters:
|
||||
_LOGGER.debug("resetting meter %s", meter)
|
||||
domain, entity = split_entity_id(meter)
|
||||
# backward compatibility up to 2022.07:
|
||||
if domain == DOMAIN:
|
||||
async_dispatcher_send(
|
||||
hass, SIGNAL_RESET_METER, f"{SELECT_DOMAIN}.{entity}"
|
||||
)
|
||||
else:
|
||||
async_dispatcher_send(hass, SIGNAL_RESET_METER, meter)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_RESET,
|
||||
async_reset_meters,
|
||||
vol.Schema({ATTR_ENTITY_ID: cv.entity_id}),
|
||||
vol.Schema({ATTR_ENTITY_ID: vol.All(cv.ensure_list, [cv.entity_id])}),
|
||||
)
|
||||
|
||||
if DOMAIN not in config:
|
||||
|
@ -6,7 +6,6 @@ reset:
|
||||
target:
|
||||
entity:
|
||||
domain: select
|
||||
integration: utility_meter
|
||||
|
||||
next_tariff:
|
||||
name: Next Tariff
|
||||
|
@ -2,7 +2,7 @@
|
||||
"domain": "version",
|
||||
"name": "Version",
|
||||
"documentation": "https://www.home-assistant.io/integrations/version",
|
||||
"requirements": ["pyhaversion==22.04.0"],
|
||||
"requirements": ["pyhaversion==22.4.1"],
|
||||
"codeowners": ["@fabaff", "@ludeeus"],
|
||||
"quality_scale": "internal",
|
||||
"iot_class": "local_push",
|
||||
|
@ -232,7 +232,7 @@ GROUP_MEMBER_SCHEMA = vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_IEEE): IEEE_SCHEMA,
|
||||
vol.Required(ATTR_ENDPOINT_ID): int,
|
||||
vol.Required(ATTR_ENDPOINT_ID): vol.Coerce(int),
|
||||
}
|
||||
),
|
||||
_cv_group_member,
|
||||
@ -244,8 +244,8 @@ CLUSTER_BINDING_SCHEMA = vol.All(
|
||||
{
|
||||
vol.Required(ATTR_NAME): cv.string,
|
||||
vol.Required(ATTR_TYPE): cv.string,
|
||||
vol.Required(ATTR_ID): int,
|
||||
vol.Required(ATTR_ENDPOINT_ID): int,
|
||||
vol.Required(ATTR_ID): vol.Coerce(int),
|
||||
vol.Required(ATTR_ENDPOINT_ID): vol.Coerce(int),
|
||||
}
|
||||
),
|
||||
_cv_cluster_binding,
|
||||
|
@ -8,7 +8,12 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
import zigpy.exceptions
|
||||
from zigpy.zcl.foundation import ConfigureReportingResponseRecord, Status
|
||||
from zigpy.zcl.foundation import (
|
||||
CommandSchema,
|
||||
ConfigureReportingResponseRecord,
|
||||
Status,
|
||||
ZCLAttributeDef,
|
||||
)
|
||||
|
||||
from homeassistant.const import ATTR_COMMAND
|
||||
from homeassistant.core import callback
|
||||
@ -20,6 +25,7 @@ from ..const import (
|
||||
ATTR_ATTRIBUTE_ID,
|
||||
ATTR_ATTRIBUTE_NAME,
|
||||
ATTR_CLUSTER_ID,
|
||||
ATTR_PARAMS,
|
||||
ATTR_TYPE,
|
||||
ATTR_UNIQUE_ID,
|
||||
ATTR_VALUE,
|
||||
@ -111,7 +117,11 @@ class ZigbeeChannel(LogMixin):
|
||||
if not hasattr(self, "_value_attribute") and self.REPORT_CONFIG:
|
||||
attr = self.REPORT_CONFIG[0].get("attr")
|
||||
if isinstance(attr, str):
|
||||
self.value_attribute = self.cluster.attributes_by_name.get(attr)
|
||||
attribute: ZCLAttributeDef = self.cluster.attributes_by_name.get(attr)
|
||||
if attribute is not None:
|
||||
self.value_attribute = attribute.id
|
||||
else:
|
||||
self.value_attribute = None
|
||||
else:
|
||||
self.value_attribute = attr
|
||||
self._status = ChannelStatus.CREATED
|
||||
@ -354,14 +364,27 @@ class ZigbeeChannel(LogMixin):
|
||||
"""Handle ZDO commands on this cluster."""
|
||||
|
||||
@callback
|
||||
def zha_send_event(self, command: str, args: int | dict) -> None:
|
||||
def zha_send_event(self, command: str, arg: list | dict | CommandSchema) -> None:
|
||||
"""Relay events to hass."""
|
||||
|
||||
if isinstance(arg, CommandSchema):
|
||||
args = [a for a in arg if a is not None]
|
||||
params = arg.as_dict()
|
||||
elif isinstance(arg, (list, dict)):
|
||||
# Quirks can directly send lists and dicts to ZHA this way
|
||||
args = arg
|
||||
params = {}
|
||||
else:
|
||||
raise TypeError(f"Unexpected zha_send_event {command!r} argument: {arg!r}")
|
||||
|
||||
self._ch_pool.zha_send_event(
|
||||
{
|
||||
ATTR_UNIQUE_ID: self.unique_id,
|
||||
ATTR_CLUSTER_ID: self.cluster.cluster_id,
|
||||
ATTR_COMMAND: command,
|
||||
# Maintain backwards compatibility with the old zigpy response format
|
||||
ATTR_ARGS: args,
|
||||
ATTR_PARAMS: params,
|
||||
}
|
||||
)
|
||||
|
||||
|
@ -43,6 +43,7 @@ ATTR_NEIGHBORS = "neighbors"
|
||||
ATTR_NODE_DESCRIPTOR = "node_descriptor"
|
||||
ATTR_NWK = "nwk"
|
||||
ATTR_OUT_CLUSTERS = "out_clusters"
|
||||
ATTR_PARAMS = "params"
|
||||
ATTR_POWER_SOURCE = "power_source"
|
||||
ATTR_PROFILE_ID = "profile_id"
|
||||
ATTR_QUIRK_APPLIED = "quirk_applied"
|
||||
|
@ -661,7 +661,11 @@ class ZHADevice(LogMixin):
|
||||
async def async_add_to_group(self, group_id: int) -> None:
|
||||
"""Add this device to the provided zigbee group."""
|
||||
try:
|
||||
await self._zigpy_device.add_to_group(group_id)
|
||||
# A group name is required. However, the spec also explicitly states that
|
||||
# the group name can be ignored by the receiving device if a device cannot
|
||||
# store it, so we cannot rely on it existing after being written. This is
|
||||
# only done to make the ZCL command valid.
|
||||
await self._zigpy_device.add_to_group(group_id, name=f"0x{group_id:04X}")
|
||||
except (zigpy.exceptions.ZigbeeException, asyncio.TimeoutError) as ex:
|
||||
self.debug(
|
||||
"Failed to add device '%s' to group: 0x%04x ex: %s",
|
||||
@ -687,7 +691,9 @@ class ZHADevice(LogMixin):
|
||||
) -> None:
|
||||
"""Add the device endpoint to the provided zigbee group."""
|
||||
try:
|
||||
await self._zigpy_device.endpoints[endpoint_id].add_to_group(group_id)
|
||||
await self._zigpy_device.endpoints[endpoint_id].add_to_group(
|
||||
group_id, name=f"0x{group_id:04X}"
|
||||
)
|
||||
except (zigpy.exceptions.ZigbeeException, asyncio.TimeoutError) as ex:
|
||||
self.debug(
|
||||
"Failed to add endpoint: %s for device: '%s' to group: 0x%04x ex: %s",
|
||||
|
@ -2,7 +2,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import collections
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, NamedTuple
|
||||
|
||||
@ -30,9 +29,12 @@ class GroupMember(NamedTuple):
|
||||
endpoint_id: int
|
||||
|
||||
|
||||
GroupEntityReference = collections.namedtuple(
|
||||
"GroupEntityReference", "name original_name entity_id"
|
||||
)
|
||||
class GroupEntityReference(NamedTuple):
|
||||
"""Reference to a group entity."""
|
||||
|
||||
name: str
|
||||
original_name: str
|
||||
entity_id: int
|
||||
|
||||
|
||||
class ZHAGroupMember(LogMixin):
|
||||
|
@ -7,7 +7,7 @@
|
||||
"bellows==0.29.0",
|
||||
"pyserial==3.5",
|
||||
"pyserial-asyncio==0.6",
|
||||
"zha-quirks==0.0.69",
|
||||
"zha-quirks==0.0.71",
|
||||
"zigpy-deconz==0.15.0",
|
||||
"zigpy==0.44.1",
|
||||
"zigpy-xbee==0.14.0",
|
||||
|
@ -496,6 +496,7 @@ async def websocket_node_metadata(
|
||||
"wakeup": node.device_config.metadata.wakeup,
|
||||
"reset": node.device_config.metadata.reset,
|
||||
"device_database_url": node.device_database_url,
|
||||
"comments": node.device_config.metadata.comments,
|
||||
}
|
||||
connection.send_result(
|
||||
msg[ID],
|
||||
|
@ -7,7 +7,7 @@ from .backports.enum import StrEnum
|
||||
|
||||
MAJOR_VERSION: Final = 2022
|
||||
MINOR_VERSION: Final = 4
|
||||
PATCH_VERSION: Final = "0"
|
||||
PATCH_VERSION: Final = "1"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 9, 0)
|
||||
|
@ -110,7 +110,7 @@ aio_geojson_nsw_rfs_incidents==0.4
|
||||
aio_georss_gdacs==0.5
|
||||
|
||||
# homeassistant.components.airzone
|
||||
aioairzone==0.2.3
|
||||
aioairzone==0.3.3
|
||||
|
||||
# homeassistant.components.ambient_station
|
||||
aioambient==2021.11.0
|
||||
@ -740,7 +740,7 @@ googlemaps==2.5.1
|
||||
goslide-api==0.5.1
|
||||
|
||||
# homeassistant.components.remote_rpi_gpio
|
||||
gpiozero==1.5.1
|
||||
gpiozero==1.6.2
|
||||
|
||||
# homeassistant.components.gpsd
|
||||
gps3==0.33.3
|
||||
@ -1198,6 +1198,9 @@ phone_modem==0.1.1
|
||||
# homeassistant.components.onewire
|
||||
pi1wire==0.1.0
|
||||
|
||||
# homeassistant.components.remote_rpi_gpio
|
||||
pigpio==1.78
|
||||
|
||||
# homeassistant.components.pilight
|
||||
pilight==0.1.1
|
||||
|
||||
@ -1511,7 +1514,7 @@ pygtfs==0.1.6
|
||||
pygti==0.9.2
|
||||
|
||||
# homeassistant.components.version
|
||||
pyhaversion==22.04.0
|
||||
pyhaversion==22.4.1
|
||||
|
||||
# homeassistant.components.heos
|
||||
pyheos==0.7.2
|
||||
@ -2470,7 +2473,7 @@ zengge==0.2
|
||||
zeroconf==0.38.4
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha-quirks==0.0.69
|
||||
zha-quirks==0.0.71
|
||||
|
||||
# homeassistant.components.zhong_hong
|
||||
zhong_hong_hvac==1.0.9
|
||||
|
@ -94,7 +94,7 @@ aio_geojson_nsw_rfs_incidents==0.4
|
||||
aio_georss_gdacs==0.5
|
||||
|
||||
# homeassistant.components.airzone
|
||||
aioairzone==0.2.3
|
||||
aioairzone==0.3.3
|
||||
|
||||
# homeassistant.components.ambient_station
|
||||
aioambient==2021.11.0
|
||||
@ -999,7 +999,7 @@ pygatt[GATTTOOL]==4.0.5
|
||||
pygti==0.9.2
|
||||
|
||||
# homeassistant.components.version
|
||||
pyhaversion==22.04.0
|
||||
pyhaversion==22.4.1
|
||||
|
||||
# homeassistant.components.heos
|
||||
pyheos==0.7.2
|
||||
@ -1601,7 +1601,7 @@ youless-api==0.16
|
||||
zeroconf==0.38.4
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha-quirks==0.0.69
|
||||
zha-quirks==0.0.71
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy-deconz==0.15.0
|
||||
|
@ -1,6 +1,6 @@
|
||||
[metadata]
|
||||
name = homeassistant
|
||||
version = 2022.4.0
|
||||
version = 2022.4.1
|
||||
author = The Home Assistant Authors
|
||||
author_email = hello@home-assistant.io
|
||||
license = Apache-2.0
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from aiohttp.client_exceptions import ClientConnectorError
|
||||
from aiohttp.client_exceptions import ClientConnectorError, ClientResponseError
|
||||
|
||||
from homeassistant import data_entry_flow
|
||||
from homeassistant.components.airzone.const import DOMAIN
|
||||
@ -23,6 +23,12 @@ async def test_form(hass):
|
||||
) as mock_setup_entry, patch(
|
||||
"homeassistant.components.airzone.AirzoneLocalApi.get_hvac",
|
||||
return_value=HVAC_MOCK,
|
||||
), patch(
|
||||
"homeassistant.components.airzone.AirzoneLocalApi.get_hvac_systems",
|
||||
side_effect=ClientResponseError(MagicMock(), MagicMock()),
|
||||
), patch(
|
||||
"homeassistant.components.airzone.AirzoneLocalApi.get_webserver",
|
||||
side_effect=ClientResponseError(MagicMock(), MagicMock()),
|
||||
):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
|
@ -6,7 +6,7 @@ MOCK_CONFIG = {
|
||||
DOMAIN: {
|
||||
CONF_DEVICES: [
|
||||
{
|
||||
CONF_HOST: "fake_host",
|
||||
CONF_HOST: "10.0.0.1",
|
||||
CONF_PASSWORD: "fake_pass",
|
||||
CONF_USERNAME: "fake_user",
|
||||
}
|
||||
|
@ -2,6 +2,7 @@
|
||||
import dataclasses
|
||||
from unittest import mock
|
||||
from unittest.mock import Mock, patch
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from pyfritzhome import LoginError
|
||||
import pytest
|
||||
@ -24,15 +25,35 @@ from .const import CONF_FAKE_NAME, MOCK_CONFIG
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
MOCK_USER_DATA = MOCK_CONFIG[DOMAIN][CONF_DEVICES][0]
|
||||
MOCK_SSDP_DATA = ssdp.SsdpServiceInfo(
|
||||
ssdp_usn="mock_usn",
|
||||
ssdp_st="mock_st",
|
||||
ssdp_location="https://fake_host:12345/test",
|
||||
upnp={
|
||||
ATTR_UPNP_FRIENDLY_NAME: CONF_FAKE_NAME,
|
||||
ATTR_UPNP_UDN: "uuid:only-a-test",
|
||||
},
|
||||
)
|
||||
MOCK_SSDP_DATA = {
|
||||
"ip4_valid": ssdp.SsdpServiceInfo(
|
||||
ssdp_usn="mock_usn",
|
||||
ssdp_st="mock_st",
|
||||
ssdp_location="https://10.0.0.1:12345/test",
|
||||
upnp={
|
||||
ATTR_UPNP_FRIENDLY_NAME: CONF_FAKE_NAME,
|
||||
ATTR_UPNP_UDN: "uuid:only-a-test",
|
||||
},
|
||||
),
|
||||
"ip6_valid": ssdp.SsdpServiceInfo(
|
||||
ssdp_usn="mock_usn",
|
||||
ssdp_st="mock_st",
|
||||
ssdp_location="https://[1234::1]:12345/test",
|
||||
upnp={
|
||||
ATTR_UPNP_FRIENDLY_NAME: CONF_FAKE_NAME,
|
||||
ATTR_UPNP_UDN: "uuid:only-a-test",
|
||||
},
|
||||
),
|
||||
"ip6_invalid": ssdp.SsdpServiceInfo(
|
||||
ssdp_usn="mock_usn",
|
||||
ssdp_st="mock_st",
|
||||
ssdp_location="https://[fe80::1%1]:12345/test",
|
||||
upnp={
|
||||
ATTR_UPNP_FRIENDLY_NAME: CONF_FAKE_NAME,
|
||||
ATTR_UPNP_UDN: "uuid:only-a-test",
|
||||
},
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(name="fritz")
|
||||
@ -56,8 +77,8 @@ async def test_user(hass: HomeAssistant, fritz: Mock):
|
||||
result["flow_id"], user_input=MOCK_USER_DATA
|
||||
)
|
||||
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
|
||||
assert result["title"] == "fake_host"
|
||||
assert result["data"][CONF_HOST] == "fake_host"
|
||||
assert result["title"] == "10.0.0.1"
|
||||
assert result["data"][CONF_HOST] == "10.0.0.1"
|
||||
assert result["data"][CONF_PASSWORD] == "fake_pass"
|
||||
assert result["data"][CONF_USERNAME] == "fake_user"
|
||||
assert not result["result"].unique_id
|
||||
@ -183,12 +204,29 @@ async def test_reauth_not_successful(hass: HomeAssistant, fritz: Mock):
|
||||
assert result["reason"] == "no_devices_found"
|
||||
|
||||
|
||||
async def test_ssdp(hass: HomeAssistant, fritz: Mock):
|
||||
@pytest.mark.parametrize(
|
||||
"test_data,expected_result",
|
||||
[
|
||||
(MOCK_SSDP_DATA["ip4_valid"], RESULT_TYPE_FORM),
|
||||
(MOCK_SSDP_DATA["ip6_valid"], RESULT_TYPE_FORM),
|
||||
(MOCK_SSDP_DATA["ip6_invalid"], RESULT_TYPE_ABORT),
|
||||
],
|
||||
)
|
||||
async def test_ssdp(
|
||||
hass: HomeAssistant,
|
||||
fritz: Mock,
|
||||
test_data: ssdp.SsdpServiceInfo,
|
||||
expected_result: str,
|
||||
):
|
||||
"""Test starting a flow from discovery."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_SSDP}, data=MOCK_SSDP_DATA
|
||||
DOMAIN, context={"source": SOURCE_SSDP}, data=test_data
|
||||
)
|
||||
assert result["type"] == RESULT_TYPE_FORM
|
||||
assert result["type"] == expected_result
|
||||
|
||||
if expected_result == RESULT_TYPE_ABORT:
|
||||
return
|
||||
|
||||
assert result["step_id"] == "confirm"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
@ -197,7 +235,7 @@ async def test_ssdp(hass: HomeAssistant, fritz: Mock):
|
||||
)
|
||||
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
|
||||
assert result["title"] == CONF_FAKE_NAME
|
||||
assert result["data"][CONF_HOST] == "fake_host"
|
||||
assert result["data"][CONF_HOST] == urlparse(test_data.ssdp_location).hostname
|
||||
assert result["data"][CONF_PASSWORD] == "fake_pass"
|
||||
assert result["data"][CONF_USERNAME] == "fake_user"
|
||||
assert result["result"].unique_id == "only-a-test"
|
||||
@ -205,7 +243,7 @@ async def test_ssdp(hass: HomeAssistant, fritz: Mock):
|
||||
|
||||
async def test_ssdp_no_friendly_name(hass: HomeAssistant, fritz: Mock):
|
||||
"""Test starting a flow from discovery without friendly name."""
|
||||
MOCK_NO_NAME = dataclasses.replace(MOCK_SSDP_DATA)
|
||||
MOCK_NO_NAME = dataclasses.replace(MOCK_SSDP_DATA["ip4_valid"])
|
||||
MOCK_NO_NAME.upnp = MOCK_NO_NAME.upnp.copy()
|
||||
del MOCK_NO_NAME.upnp[ATTR_UPNP_FRIENDLY_NAME]
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
@ -219,8 +257,8 @@ async def test_ssdp_no_friendly_name(hass: HomeAssistant, fritz: Mock):
|
||||
user_input={CONF_PASSWORD: "fake_pass", CONF_USERNAME: "fake_user"},
|
||||
)
|
||||
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
|
||||
assert result["title"] == "fake_host"
|
||||
assert result["data"][CONF_HOST] == "fake_host"
|
||||
assert result["title"] == "10.0.0.1"
|
||||
assert result["data"][CONF_HOST] == "10.0.0.1"
|
||||
assert result["data"][CONF_PASSWORD] == "fake_pass"
|
||||
assert result["data"][CONF_USERNAME] == "fake_user"
|
||||
assert result["result"].unique_id == "only-a-test"
|
||||
@ -231,7 +269,7 @@ async def test_ssdp_auth_failed(hass: HomeAssistant, fritz: Mock):
|
||||
fritz().login.side_effect = LoginError("Boom")
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_SSDP}, data=MOCK_SSDP_DATA
|
||||
DOMAIN, context={"source": SOURCE_SSDP}, data=MOCK_SSDP_DATA["ip4_valid"]
|
||||
)
|
||||
assert result["type"] == RESULT_TYPE_FORM
|
||||
assert result["step_id"] == "confirm"
|
||||
@ -251,7 +289,7 @@ async def test_ssdp_not_successful(hass: HomeAssistant, fritz: Mock):
|
||||
fritz().login.side_effect = OSError("Boom")
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_SSDP}, data=MOCK_SSDP_DATA
|
||||
DOMAIN, context={"source": SOURCE_SSDP}, data=MOCK_SSDP_DATA["ip4_valid"]
|
||||
)
|
||||
assert result["type"] == RESULT_TYPE_FORM
|
||||
assert result["step_id"] == "confirm"
|
||||
@ -269,7 +307,7 @@ async def test_ssdp_not_supported(hass: HomeAssistant, fritz: Mock):
|
||||
fritz().get_device_elements.side_effect = HTTPError("Boom")
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_SSDP}, data=MOCK_SSDP_DATA
|
||||
DOMAIN, context={"source": SOURCE_SSDP}, data=MOCK_SSDP_DATA["ip4_valid"]
|
||||
)
|
||||
assert result["type"] == RESULT_TYPE_FORM
|
||||
assert result["step_id"] == "confirm"
|
||||
@ -285,13 +323,13 @@ async def test_ssdp_not_supported(hass: HomeAssistant, fritz: Mock):
|
||||
async def test_ssdp_already_in_progress_unique_id(hass: HomeAssistant, fritz: Mock):
|
||||
"""Test starting a flow from discovery twice."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_SSDP}, data=MOCK_SSDP_DATA
|
||||
DOMAIN, context={"source": SOURCE_SSDP}, data=MOCK_SSDP_DATA["ip4_valid"]
|
||||
)
|
||||
assert result["type"] == RESULT_TYPE_FORM
|
||||
assert result["step_id"] == "confirm"
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_SSDP}, data=MOCK_SSDP_DATA
|
||||
DOMAIN, context={"source": SOURCE_SSDP}, data=MOCK_SSDP_DATA["ip4_valid"]
|
||||
)
|
||||
assert result["type"] == RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "already_in_progress"
|
||||
@ -300,12 +338,12 @@ async def test_ssdp_already_in_progress_unique_id(hass: HomeAssistant, fritz: Mo
|
||||
async def test_ssdp_already_in_progress_host(hass: HomeAssistant, fritz: Mock):
|
||||
"""Test starting a flow from discovery twice."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_SSDP}, data=MOCK_SSDP_DATA
|
||||
DOMAIN, context={"source": SOURCE_SSDP}, data=MOCK_SSDP_DATA["ip4_valid"]
|
||||
)
|
||||
assert result["type"] == RESULT_TYPE_FORM
|
||||
assert result["step_id"] == "confirm"
|
||||
|
||||
MOCK_NO_UNIQUE_ID = dataclasses.replace(MOCK_SSDP_DATA)
|
||||
MOCK_NO_UNIQUE_ID = dataclasses.replace(MOCK_SSDP_DATA["ip4_valid"])
|
||||
MOCK_NO_UNIQUE_ID.upnp = MOCK_NO_UNIQUE_ID.upnp.copy()
|
||||
del MOCK_NO_UNIQUE_ID.upnp[ATTR_UPNP_UDN]
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
@ -324,7 +362,7 @@ async def test_ssdp_already_configured(hass: HomeAssistant, fritz: Mock):
|
||||
assert not result["result"].unique_id
|
||||
|
||||
result2 = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_SSDP}, data=MOCK_SSDP_DATA
|
||||
DOMAIN, context={"source": SOURCE_SSDP}, data=MOCK_SSDP_DATA["ip4_valid"]
|
||||
)
|
||||
assert result2["type"] == RESULT_TYPE_ABORT
|
||||
assert result2["reason"] == "already_configured"
|
||||
|
@ -35,12 +35,12 @@ async def test_setup(hass: HomeAssistant, fritz: Mock):
|
||||
entries = hass.config_entries.async_entries()
|
||||
assert entries
|
||||
assert len(entries) == 1
|
||||
assert entries[0].data[CONF_HOST] == "fake_host"
|
||||
assert entries[0].data[CONF_HOST] == "10.0.0.1"
|
||||
assert entries[0].data[CONF_PASSWORD] == "fake_pass"
|
||||
assert entries[0].data[CONF_USERNAME] == "fake_user"
|
||||
assert fritz.call_count == 1
|
||||
assert fritz.call_args_list == [
|
||||
call(host="fake_host", password="fake_pass", user="fake_user")
|
||||
call(host="10.0.0.1", password="fake_pass", user="fake_user")
|
||||
]
|
||||
|
||||
|
||||
|
@ -10,6 +10,7 @@ import pytest
|
||||
import respx
|
||||
|
||||
from homeassistant import config_entries, data_entry_flow, setup
|
||||
from homeassistant.components.camera import async_get_image
|
||||
from homeassistant.components.generic.const import (
|
||||
CONF_CONTENT_TYPE,
|
||||
CONF_FRAMERATE,
|
||||
@ -191,7 +192,7 @@ async def test_form_rtsp_mode(hass, fakeimg_png, mock_av_open, user_flow):
|
||||
assert len(mock_setup.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_form_only_stream(hass, mock_av_open):
|
||||
async def test_form_only_stream(hass, mock_av_open, fakeimgbytes_jpg):
|
||||
"""Test we complete ok if the user wants stream only."""
|
||||
await setup.async_setup_component(hass, "persistent_notification", {})
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
@ -204,21 +205,34 @@ async def test_form_only_stream(hass, mock_av_open):
|
||||
result["flow_id"],
|
||||
data,
|
||||
)
|
||||
assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
|
||||
assert result2["title"] == "127_0_0_1_testurl_2"
|
||||
assert result2["options"] == {
|
||||
assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM
|
||||
result3 = await hass.config_entries.flow.async_configure(
|
||||
result2["flow_id"],
|
||||
{CONF_CONTENT_TYPE: "image/jpeg"},
|
||||
)
|
||||
|
||||
assert result3["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
|
||||
assert result3["title"] == "127_0_0_1_testurl_2"
|
||||
assert result3["options"] == {
|
||||
CONF_AUTHENTICATION: HTTP_BASIC_AUTHENTICATION,
|
||||
CONF_STREAM_SOURCE: "http://127.0.0.1/testurl/2",
|
||||
CONF_RTSP_TRANSPORT: "tcp",
|
||||
CONF_USERNAME: "fred_flintstone",
|
||||
CONF_PASSWORD: "bambam",
|
||||
CONF_LIMIT_REFETCH_TO_URL_CHANGE: False,
|
||||
CONF_CONTENT_TYPE: None,
|
||||
CONF_CONTENT_TYPE: "image/jpeg",
|
||||
CONF_FRAMERATE: 5,
|
||||
CONF_VERIFY_SSL: False,
|
||||
}
|
||||
|
||||
await hass.async_block_till_done()
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.generic.camera.GenericCamera.async_camera_image",
|
||||
return_value=fakeimgbytes_jpg,
|
||||
):
|
||||
image_obj = await async_get_image(hass, "camera.127_0_0_1_testurl_2")
|
||||
assert image_obj.content == fakeimgbytes_jpg
|
||||
assert len(mock_setup.mock_calls) == 1
|
||||
|
||||
|
||||
@ -478,6 +492,45 @@ async def test_options_template_error(hass, fakeimgbytes_png, mock_av_open):
|
||||
assert result4["errors"] == {"still_image_url": "template_error"}
|
||||
|
||||
|
||||
@respx.mock
|
||||
async def test_options_only_stream(hass, fakeimgbytes_png, mock_av_open):
|
||||
"""Test the options flow without a still_image_url."""
|
||||
respx.get("http://127.0.0.1/testurl/2").respond(stream=fakeimgbytes_png)
|
||||
data = TESTDATA.copy()
|
||||
data.pop(CONF_STILL_IMAGE_URL)
|
||||
|
||||
mock_entry = MockConfigEntry(
|
||||
title="Test Camera",
|
||||
domain=DOMAIN,
|
||||
data={},
|
||||
options=data,
|
||||
)
|
||||
with mock_av_open:
|
||||
mock_entry.add_to_hass(hass)
|
||||
await hass.config_entries.async_setup(mock_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
result = await hass.config_entries.options.async_init(mock_entry.entry_id)
|
||||
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
|
||||
assert result["step_id"] == "init"
|
||||
|
||||
# try updating the config options
|
||||
result2 = await hass.config_entries.options.async_configure(
|
||||
result["flow_id"],
|
||||
user_input=data,
|
||||
)
|
||||
# Should be shown a 2nd form
|
||||
assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM
|
||||
assert result2["step_id"] == "content_type"
|
||||
|
||||
result3 = await hass.config_entries.options.async_configure(
|
||||
result2["flow_id"],
|
||||
user_input={CONF_CONTENT_TYPE: "image/png"},
|
||||
)
|
||||
assert result3["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
|
||||
assert result3["data"][CONF_CONTENT_TYPE] == "image/png"
|
||||
|
||||
|
||||
# These below can be deleted after deprecation period is finished.
|
||||
@respx.mock
|
||||
async def test_import(hass, fakeimg_png, mock_av_open):
|
||||
|
@ -272,6 +272,35 @@ async def test_all_day_offset_event(hass, mock_events_list_items, component_setu
|
||||
}
|
||||
|
||||
|
||||
async def test_missing_summary(hass, mock_events_list_items, component_setup):
|
||||
"""Test that we can create an event trigger on device."""
|
||||
start_event = dt_util.now() + datetime.timedelta(minutes=14)
|
||||
end_event = start_event + datetime.timedelta(minutes=60)
|
||||
event = {
|
||||
**TEST_EVENT,
|
||||
"start": {"dateTime": start_event.isoformat()},
|
||||
"end": {"dateTime": end_event.isoformat()},
|
||||
}
|
||||
del event["summary"]
|
||||
mock_events_list_items([event])
|
||||
|
||||
assert await component_setup()
|
||||
|
||||
state = hass.states.get(TEST_ENTITY)
|
||||
assert state.name == TEST_ENTITY_NAME
|
||||
assert state.state == STATE_OFF
|
||||
assert dict(state.attributes) == {
|
||||
"friendly_name": TEST_ENTITY_NAME,
|
||||
"message": "",
|
||||
"all_day": False,
|
||||
"offset_reached": False,
|
||||
"start_time": start_event.strftime(DATE_STR_FORMAT),
|
||||
"end_time": end_event.strftime(DATE_STR_FORMAT),
|
||||
"location": event["location"],
|
||||
"description": event["description"],
|
||||
}
|
||||
|
||||
|
||||
async def test_update_error(
|
||||
hass, calendar_resource, component_setup, test_api_calendar
|
||||
):
|
||||
|
@ -97,6 +97,12 @@ async def test_full_flow(
|
||||
assert "data" in result
|
||||
data = result["data"]
|
||||
assert "token" in data
|
||||
assert 0 < data["token"]["expires_in"] < 8 * 86400
|
||||
assert (
|
||||
datetime.datetime.now().timestamp()
|
||||
<= data["token"]["expires_at"]
|
||||
< (datetime.datetime.now() + datetime.timedelta(days=8)).timestamp()
|
||||
)
|
||||
data["token"].pop("expires_at")
|
||||
data["token"].pop("expires_in")
|
||||
assert data == {
|
||||
|
@ -24,7 +24,11 @@ def mock_all(aioclient_mock, request):
|
||||
"http://127.0.0.1/info",
|
||||
json={
|
||||
"result": "ok",
|
||||
"data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None},
|
||||
"data": {
|
||||
"supervisor": "222",
|
||||
"homeassistant": "0.110.0",
|
||||
"hassos": "1.2.3",
|
||||
},
|
||||
},
|
||||
)
|
||||
aioclient_mock.get(
|
||||
|
@ -30,7 +30,11 @@ def mock_all(aioclient_mock, request):
|
||||
"http://127.0.0.1/info",
|
||||
json={
|
||||
"result": "ok",
|
||||
"data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None},
|
||||
"data": {
|
||||
"supervisor": "222",
|
||||
"homeassistant": "0.110.0",
|
||||
"hassos": "1.2.3",
|
||||
},
|
||||
},
|
||||
)
|
||||
aioclient_mock.get(
|
||||
@ -396,14 +400,14 @@ async def test_service_calls(hassio_env, hass, aioclient_mock, caplog):
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert aioclient_mock.call_count == 8
|
||||
assert aioclient_mock.call_count == 9
|
||||
assert aioclient_mock.mock_calls[-1][2] == "test"
|
||||
|
||||
await hass.services.async_call("hassio", "host_shutdown", {})
|
||||
await hass.services.async_call("hassio", "host_reboot", {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert aioclient_mock.call_count == 10
|
||||
assert aioclient_mock.call_count == 11
|
||||
|
||||
await hass.services.async_call("hassio", "backup_full", {})
|
||||
await hass.services.async_call(
|
||||
@ -418,7 +422,7 @@ async def test_service_calls(hassio_env, hass, aioclient_mock, caplog):
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert aioclient_mock.call_count == 12
|
||||
assert aioclient_mock.call_count == 13
|
||||
assert aioclient_mock.mock_calls[-1][2] == {
|
||||
"homeassistant": True,
|
||||
"addons": ["test"],
|
||||
@ -442,7 +446,7 @@ async def test_service_calls(hassio_env, hass, aioclient_mock, caplog):
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert aioclient_mock.call_count == 14
|
||||
assert aioclient_mock.call_count == 15
|
||||
assert aioclient_mock.mock_calls[-1][2] == {
|
||||
"addons": ["test"],
|
||||
"folders": ["ssl"],
|
||||
@ -461,12 +465,12 @@ async def test_service_calls_core(hassio_env, hass, aioclient_mock):
|
||||
await hass.services.async_call("homeassistant", "stop")
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert aioclient_mock.call_count == 4
|
||||
assert aioclient_mock.call_count == 5
|
||||
|
||||
await hass.services.async_call("homeassistant", "check_config")
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert aioclient_mock.call_count == 4
|
||||
assert aioclient_mock.call_count == 5
|
||||
|
||||
with patch(
|
||||
"homeassistant.config.async_check_ha_config_file", return_value=None
|
||||
@ -475,7 +479,7 @@ async def test_service_calls_core(hassio_env, hass, aioclient_mock):
|
||||
await hass.async_block_till_done()
|
||||
assert mock_check_config.called
|
||||
|
||||
assert aioclient_mock.call_count == 5
|
||||
assert aioclient_mock.call_count == 6
|
||||
|
||||
|
||||
async def test_entry_load_and_unload(hass):
|
||||
@ -628,10 +632,17 @@ async def test_device_registry_calls(hass):
|
||||
), patch(
|
||||
"homeassistant.components.hassio.HassIO.get_os_info",
|
||||
return_value=os_mock_data,
|
||||
), patch(
|
||||
"homeassistant.components.hassio.HassIO.get_info",
|
||||
return_value={
|
||||
"supervisor": "222",
|
||||
"homeassistant": "0.110.0",
|
||||
"hassos": None,
|
||||
},
|
||||
):
|
||||
async_fire_time_changed(hass, dt_util.now() + timedelta(hours=3))
|
||||
await hass.async_block_till_done()
|
||||
assert len(dev_reg.devices) == 5
|
||||
assert len(dev_reg.devices) == 4
|
||||
|
||||
|
||||
async def test_coordinator_updates(hass, caplog):
|
||||
|
@ -24,7 +24,11 @@ def mock_all(aioclient_mock, request):
|
||||
"http://127.0.0.1/info",
|
||||
json={
|
||||
"result": "ok",
|
||||
"data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None},
|
||||
"data": {
|
||||
"supervisor": "222",
|
||||
"homeassistant": "0.110.0",
|
||||
"hassos": "1.2.3",
|
||||
},
|
||||
},
|
||||
)
|
||||
aioclient_mock.get(
|
||||
|
@ -25,7 +25,11 @@ def mock_all(aioclient_mock, request):
|
||||
"http://127.0.0.1/info",
|
||||
json={
|
||||
"result": "ok",
|
||||
"data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None},
|
||||
"data": {
|
||||
"supervisor": "222",
|
||||
"homeassistant": "0.110.0",
|
||||
"hassos": "1.2.3",
|
||||
},
|
||||
},
|
||||
)
|
||||
aioclient_mock.get(
|
||||
@ -483,3 +487,25 @@ async def test_not_release_notes(hass, aioclient_mock, hass_ws_client):
|
||||
)
|
||||
result = await client.receive_json()
|
||||
assert result["result"] is None
|
||||
|
||||
|
||||
async def test_no_os_entity(hass):
|
||||
"""Test handling where there is no os entity."""
|
||||
with patch.dict(os.environ, MOCK_ENVIRON), patch(
|
||||
"homeassistant.components.hassio.HassIO.get_info",
|
||||
return_value={
|
||||
"supervisor": "222",
|
||||
"homeassistant": "0.110.0",
|
||||
"hassos": None,
|
||||
},
|
||||
):
|
||||
result = await async_setup_component(
|
||||
hass,
|
||||
"hassio",
|
||||
{"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}},
|
||||
)
|
||||
assert result
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Verify that the entity does not exist
|
||||
assert not hass.states.get("update.home_assistant_operating_system_update")
|
||||
|
@ -1347,6 +1347,16 @@ async def test_options_flow_exclude_mode_skips_category_entities(
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
)
|
||||
hass.states.async_set(sonos_config_switch.entity_id, "off")
|
||||
|
||||
sonos_notconfig_switch: RegistryEntry = entity_reg.async_get_or_create(
|
||||
"switch",
|
||||
"sonos",
|
||||
"notconfig",
|
||||
device_id="1234",
|
||||
entity_category=None,
|
||||
)
|
||||
hass.states.async_set(sonos_notconfig_switch.entity_id, "off")
|
||||
|
||||
await hass.async_block_till_done()
|
||||
|
||||
result = await hass.config_entries.options.async_init(
|
||||
@ -1391,14 +1401,24 @@ async def test_options_flow_exclude_mode_skips_category_entities(
|
||||
|
||||
result4 = await hass.config_entries.options.async_configure(
|
||||
result2["flow_id"],
|
||||
user_input={"entities": ["media_player.tv", "switch.other"]},
|
||||
user_input={
|
||||
"entities": [
|
||||
"media_player.tv",
|
||||
"switch.other",
|
||||
sonos_notconfig_switch.entity_id,
|
||||
]
|
||||
},
|
||||
)
|
||||
assert result4["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
|
||||
assert config_entry.options == {
|
||||
"mode": "bridge",
|
||||
"filter": {
|
||||
"exclude_domains": [],
|
||||
"exclude_entities": ["media_player.tv", "switch.other"],
|
||||
"exclude_entities": [
|
||||
"media_player.tv",
|
||||
"switch.other",
|
||||
sonos_notconfig_switch.entity_id,
|
||||
],
|
||||
"include_domains": ["media_player", "switch"],
|
||||
"include_entities": [],
|
||||
},
|
||||
|
@ -1,5 +1,7 @@
|
||||
"""The sensor tests for the powerwall platform."""
|
||||
from unittest.mock import patch
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
from tesla_powerwall.error import MissingAttributeError
|
||||
|
||||
from homeassistant.components.powerwall.const import DOMAIN
|
||||
from homeassistant.components.sensor import ATTR_STATE_CLASS
|
||||
@ -112,3 +114,26 @@ async def test_sensors(hass, entity_registry_enabled_by_default):
|
||||
# HA changes the implementation and a new one appears
|
||||
for key, value in expected_attributes.items():
|
||||
assert state.attributes[key] == value
|
||||
|
||||
|
||||
async def test_sensor_backup_reserve_unavailable(hass):
|
||||
"""Confirm that backup reserve sensor is not added if data is unavailable from the device."""
|
||||
|
||||
mock_powerwall = await _mock_powerwall_with_fixtures(hass)
|
||||
mock_powerwall.get_backup_reserve_percentage = Mock(
|
||||
side_effect=MissingAttributeError(Mock(), "backup_reserve_percent", "operation")
|
||||
)
|
||||
|
||||
config_entry = MockConfigEntry(domain=DOMAIN, data={CONF_IP_ADDRESS: "1.2.3.4"})
|
||||
config_entry.add_to_hass(hass)
|
||||
with patch(
|
||||
"homeassistant.components.powerwall.config_flow.Powerwall",
|
||||
return_value=mock_powerwall,
|
||||
), patch(
|
||||
"homeassistant.components.powerwall.Powerwall", return_value=mock_powerwall
|
||||
):
|
||||
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
state = hass.states.get("sensor.powerwall_backup_reserve")
|
||||
assert state is None
|
||||
|
@ -2,7 +2,6 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from telegram.ext.dispatcher import Dispatcher
|
||||
|
||||
from homeassistant.components.telegram_bot import (
|
||||
CONF_ALLOWED_CHAT_IDS,
|
||||
@ -176,12 +175,3 @@ async def polling_platform(hass, config_polling):
|
||||
config_polling,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def clear_dispatcher():
|
||||
"""Clear the singleton that telegram.ext.dispatcher.Dispatcher sets on itself."""
|
||||
yield
|
||||
Dispatcher._set_singleton(None)
|
||||
# This is how python-telegram-bot resets the dispatcher in their test suite
|
||||
Dispatcher._Dispatcher__singleton_semaphore.release()
|
||||
|
20
tests/components/telegram_bot/test_broadcast.py
Normal file
20
tests/components/telegram_bot/test_broadcast.py
Normal file
@ -0,0 +1,20 @@
|
||||
"""Test Telegram broadcast."""
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
|
||||
async def test_setup(hass):
|
||||
"""Test setting up Telegram broadcast."""
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"telegram_bot",
|
||||
{
|
||||
"telegram_bot": {
|
||||
"platform": "broadcast",
|
||||
"api_key": "1234567890:ABC",
|
||||
"allowed_chat_ids": [1],
|
||||
}
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.services.has_service("telegram_bot", "send_message") is True
|
@ -1,4 +1,5 @@
|
||||
"""Tests for the telegram_bot component."""
|
||||
import pytest
|
||||
from telegram import Update
|
||||
from telegram.ext.dispatcher import Dispatcher
|
||||
|
||||
@ -8,6 +9,15 @@ from homeassistant.components.telegram_bot.webhooks import TELEGRAM_WEBHOOK_URL
|
||||
from tests.common import async_capture_events
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def clear_dispatcher():
|
||||
"""Clear the singleton that telegram.ext.dispatcher.Dispatcher sets on itself."""
|
||||
yield
|
||||
Dispatcher._set_singleton(None)
|
||||
# This is how python-telegram-bot resets the dispatcher in their test suite
|
||||
Dispatcher._Dispatcher__singleton_semaphore.release()
|
||||
|
||||
|
||||
async def test_webhook_platform_init(hass, webhook_platform):
|
||||
"""Test initialization of the webhooks platform."""
|
||||
assert hass.services.has_service(DOMAIN, SERVICE_SEND_MESSAGE) is True
|
||||
|
@ -517,6 +517,33 @@ async def test_smart_strip_custom_random_effect(hass: HomeAssistant) -> None:
|
||||
)
|
||||
strip.set_custom_effect.reset_mock()
|
||||
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"random_effect",
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
"init_states": [340, 20, 50],
|
||||
},
|
||||
blocking=True,
|
||||
)
|
||||
strip.set_custom_effect.assert_called_once_with(
|
||||
{
|
||||
"custom": 1,
|
||||
"id": "yMwcNpLxijmoKamskHCvvravpbnIqAIN",
|
||||
"brightness": 100,
|
||||
"name": "Custom",
|
||||
"segments": [0],
|
||||
"expansion_strategy": 1,
|
||||
"enable": 1,
|
||||
"duration": 0,
|
||||
"transition": 0,
|
||||
"type": "random",
|
||||
"init_states": [[340, 20, 50]],
|
||||
"random_seed": 100,
|
||||
}
|
||||
)
|
||||
strip.set_custom_effect.reset_mock()
|
||||
|
||||
strip.effect = {
|
||||
"custom": 1,
|
||||
"id": "yMwcNpLxijmoKamskHCvvravpbnIqAIN",
|
||||
|
@ -365,10 +365,7 @@ async def test_discovered_by_unifi_discovery_direct_connect_updated(
|
||||
)
|
||||
mock_config.add_to_hass(hass)
|
||||
|
||||
with _patch_discovery(), patch(
|
||||
"homeassistant.components.unifiprotect.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup_entry:
|
||||
with _patch_discovery():
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY},
|
||||
@ -378,7 +375,6 @@ async def test_discovered_by_unifi_discovery_direct_connect_updated(
|
||||
|
||||
assert result["type"] == RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
assert mock_config.data[CONF_HOST] == DIRECT_CONNECT_DOMAIN
|
||||
|
||||
|
||||
@ -401,10 +397,7 @@ async def test_discovered_by_unifi_discovery_direct_connect_updated_but_not_usin
|
||||
)
|
||||
mock_config.add_to_hass(hass)
|
||||
|
||||
with _patch_discovery(), patch(
|
||||
"homeassistant.components.unifiprotect.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup_entry:
|
||||
with _patch_discovery():
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY},
|
||||
@ -414,7 +407,6 @@ async def test_discovered_by_unifi_discovery_direct_connect_updated_but_not_usin
|
||||
|
||||
assert result["type"] == RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
assert mock_config.data[CONF_HOST] == "127.0.0.1"
|
||||
|
||||
|
||||
|
@ -65,7 +65,16 @@ async def test_restore_state(hass):
|
||||
assert state.state == "midpeak"
|
||||
|
||||
|
||||
async def test_services(hass):
|
||||
@pytest.mark.parametrize(
|
||||
"meter",
|
||||
(
|
||||
["select.energy_bill"],
|
||||
"select.energy_bill",
|
||||
["utility_meter.energy_bill"],
|
||||
"utility_meter.energy_bill",
|
||||
),
|
||||
)
|
||||
async def test_services(hass, meter):
|
||||
"""Test energy sensor reset service."""
|
||||
config = {
|
||||
"utility_meter": {
|
||||
@ -159,7 +168,7 @@ async def test_services(hass):
|
||||
assert state.state == "1"
|
||||
|
||||
# Reset meters
|
||||
data = {ATTR_ENTITY_ID: "select.energy_bill"}
|
||||
data = {ATTR_ENTITY_ID: meter}
|
||||
await hass.services.async_call(DOMAIN, SERVICE_RESET, data)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
|
@ -140,3 +140,13 @@ async def test_action(hass, device_ias):
|
||||
assert calls[0].domain == DOMAIN
|
||||
assert calls[0].service == "warning_device_warn"
|
||||
assert calls[0].data["ieee"] == ieee_address
|
||||
|
||||
|
||||
async def test_invalid_zha_event_type(hass, device_ias):
|
||||
"""Test that unexpected types are not passed to `zha_send_event`."""
|
||||
zigpy_device, zha_device = device_ias
|
||||
channel = zha_device.channels.pools[0].client_channels["1:0x0006"]
|
||||
|
||||
# `zha_send_event` accepts only zigpy responses, lists, and dicts
|
||||
with pytest.raises(TypeError):
|
||||
channel.zha_send_event(COMMAND_SINGLE, 123)
|
||||
|
@ -68,7 +68,11 @@
|
||||
"inclusion": "To add the ZP3111 to the Z-Wave network (inclusion), place the Z-Wave primary controller into inclusion mode. Press the Program Switch of ZP3111 for sending the NIF. After sending NIF, Z-Wave will send the auto inclusion, otherwise, ZP3111 will go to sleep after 20 seconds.",
|
||||
"exclusion": "To remove the ZP3111 from the Z-Wave network (exclusion), place the Z-Wave primary controller into \u201cexclusion\u201d mode, and following its instruction to delete the ZP3111 to the controller. Press the Program Switch of ZP3111 once to be excluded.",
|
||||
"reset": "Remove cover to triggered tamper switch, LED flash once & send out Alarm Report. Press Program Switch 10 times within 10 seconds, ZP3111 will send the \u201cDevice Reset Locally Notification\u201d command and reset to the factory default. (Remark: This is to be used only in the case of primary controller being inoperable or otherwise unavailable.)",
|
||||
"manual": "https://products.z-wavealliance.org/ProductManual/File?folder=&filename=MarketCertificationFiles/2479/ZP3111-5_R2_20170316.pdf"
|
||||
"manual": "https://products.z-wavealliance.org/ProductManual/File?folder=&filename=MarketCertificationFiles/2479/ZP3111-5_R2_20170316.pdf",
|
||||
"comments": {
|
||||
"level": "info",
|
||||
"text": "test"
|
||||
}
|
||||
},
|
||||
"isEmbedded": true
|
||||
},
|
||||
|
@ -249,6 +249,7 @@ async def test_node_metadata(hass, wallmote_central_scene, integration, hass_ws_
|
||||
result["device_database_url"]
|
||||
== "https://devices.zwave-js.io/?jumpTo=0x0086:0x0002:0x0082:0.0"
|
||||
)
|
||||
assert result["comments"] == [{"level": "info", "text": "test"}]
|
||||
|
||||
# Test getting non-existent node fails
|
||||
await ws_client.send_json(
|
||||
|
@ -8,8 +8,12 @@ import pytest
|
||||
|
||||
from homeassistant import config_entries, data_entry_flow, loader
|
||||
from homeassistant.components.hassio import HassioServiceInfo
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STARTED, EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import CoreState, callback
|
||||
from homeassistant.const import (
|
||||
EVENT_COMPONENT_LOADED,
|
||||
EVENT_HOMEASSISTANT_STARTED,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
)
|
||||
from homeassistant.core import CoreState, Event, callback
|
||||
from homeassistant.data_entry_flow import RESULT_TYPE_ABORT, BaseServiceInfo
|
||||
from homeassistant.exceptions import (
|
||||
ConfigEntryAuthFailed,
|
||||
@ -2299,6 +2303,72 @@ async def test_async_setup_init_entry(hass):
|
||||
assert entries[0].state is config_entries.ConfigEntryState.LOADED
|
||||
|
||||
|
||||
async def test_async_setup_init_entry_completes_before_loaded_event_fires(hass):
|
||||
"""Test a config entry being initialized during integration setup before the loaded event fires."""
|
||||
|
||||
@callback
|
||||
def _record_load(event: Event) -> None:
|
||||
nonlocal load_events
|
||||
load_events.append(event)
|
||||
|
||||
listener = hass.bus.async_listen(EVENT_COMPONENT_LOADED, _record_load)
|
||||
load_events: list[Event] = []
|
||||
|
||||
async def mock_async_setup(hass, config):
|
||||
"""Mock setup."""
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
"comp",
|
||||
context={"source": config_entries.SOURCE_IMPORT},
|
||||
data={},
|
||||
)
|
||||
)
|
||||
return True
|
||||
|
||||
async_setup_entry = AsyncMock(return_value=True)
|
||||
mock_integration(
|
||||
hass,
|
||||
MockModule(
|
||||
"comp", async_setup=mock_async_setup, async_setup_entry=async_setup_entry
|
||||
),
|
||||
)
|
||||
mock_entity_platform(hass, "config_flow.comp", None)
|
||||
|
||||
class TestFlow(config_entries.ConfigFlow):
|
||||
"""Test flow."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_three(self, user_input=None):
|
||||
"""Test import step creating entry."""
|
||||
return self.async_create_entry(title="title", data={})
|
||||
|
||||
async def async_step_two(self, user_input=None):
|
||||
"""Test import step creating entry."""
|
||||
return await self.async_step_three()
|
||||
|
||||
async def async_step_one(self, user_input=None):
|
||||
"""Test import step creating entry."""
|
||||
return await self.async_step_two()
|
||||
|
||||
async def async_step_import(self, user_input=None):
|
||||
"""Test import step creating entry."""
|
||||
return await self.async_step_one()
|
||||
|
||||
# This test must not use hass.async_block_till_done()
|
||||
# as its explicitly testing what happens without it
|
||||
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
|
||||
assert await async_setup_component(hass, "comp", {})
|
||||
assert len(async_setup_entry.mock_calls) == 1
|
||||
assert load_events[0].event_type == EVENT_COMPONENT_LOADED
|
||||
assert load_events[0].data == {"component": "comp"}
|
||||
entries = hass.config_entries.async_entries("comp")
|
||||
assert len(entries) == 1
|
||||
assert entries[0].state is config_entries.ConfigEntryState.LOADED
|
||||
|
||||
listener()
|
||||
|
||||
|
||||
async def test_async_setup_update_entry(hass):
|
||||
"""Test a config entry being updated during integration setup."""
|
||||
entry = MockConfigEntry(domain="comp", data={"value": "initial"})
|
||||
|
Loading…
x
Reference in New Issue
Block a user