mirror of
https://github.com/home-assistant/core.git
synced 2025-07-23 21:27:38 +00:00
2025.7.2 (#148725)
This commit is contained in:
commit
1f59b735c6
@ -505,8 +505,13 @@ class ClimateCapabilities(AlexaEntity):
|
||||
):
|
||||
yield AlexaThermostatController(self.hass, self.entity)
|
||||
yield AlexaTemperatureSensor(self.hass, self.entity)
|
||||
if self.entity.domain == water_heater.DOMAIN and (
|
||||
supported_features & water_heater.WaterHeaterEntityFeature.OPERATION_MODE
|
||||
if (
|
||||
self.entity.domain == water_heater.DOMAIN
|
||||
and (
|
||||
supported_features
|
||||
& water_heater.WaterHeaterEntityFeature.OPERATION_MODE
|
||||
)
|
||||
and self.entity.attributes.get(water_heater.ATTR_OPERATION_LIST)
|
||||
):
|
||||
yield AlexaModeController(
|
||||
self.entity,
|
||||
@ -634,7 +639,9 @@ class FanCapabilities(AlexaEntity):
|
||||
self.entity, instance=f"{fan.DOMAIN}.{fan.ATTR_OSCILLATING}"
|
||||
)
|
||||
force_range_controller = False
|
||||
if supported & fan.FanEntityFeature.PRESET_MODE:
|
||||
if supported & fan.FanEntityFeature.PRESET_MODE and self.entity.attributes.get(
|
||||
fan.ATTR_PRESET_MODES
|
||||
):
|
||||
yield AlexaModeController(
|
||||
self.entity, instance=f"{fan.DOMAIN}.{fan.ATTR_PRESET_MODE}"
|
||||
)
|
||||
@ -672,7 +679,11 @@ class RemoteCapabilities(AlexaEntity):
|
||||
yield AlexaPowerController(self.entity)
|
||||
supported = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||
activities = self.entity.attributes.get(remote.ATTR_ACTIVITY_LIST) or []
|
||||
if activities and supported & remote.RemoteEntityFeature.ACTIVITY:
|
||||
if (
|
||||
activities
|
||||
and (supported & remote.RemoteEntityFeature.ACTIVITY)
|
||||
and self.entity.attributes.get(remote.ATTR_ACTIVITY_LIST)
|
||||
):
|
||||
yield AlexaModeController(
|
||||
self.entity, instance=f"{remote.DOMAIN}.{remote.ATTR_ACTIVITY}"
|
||||
)
|
||||
@ -692,7 +703,9 @@ class HumidifierCapabilities(AlexaEntity):
|
||||
"""Yield the supported interfaces."""
|
||||
yield AlexaPowerController(self.entity)
|
||||
supported = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||
if supported & humidifier.HumidifierEntityFeature.MODES:
|
||||
if (
|
||||
supported & humidifier.HumidifierEntityFeature.MODES
|
||||
) and self.entity.attributes.get(humidifier.ATTR_AVAILABLE_MODES):
|
||||
yield AlexaModeController(
|
||||
self.entity, instance=f"{humidifier.DOMAIN}.{humidifier.ATTR_MODE}"
|
||||
)
|
||||
|
@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["aioamazondevices==3.2.3"]
|
||||
"requirements": ["aioamazondevices==3.2.10"]
|
||||
}
|
||||
|
@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==0.105.0"],
|
||||
"requirements": ["hass-nabucasa==0.106.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@ -7,7 +7,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyenphase"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pyenphase==2.2.0"],
|
||||
"requirements": ["pyenphase==2.2.1"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_enphase-envoy._tcp.local."
|
||||
|
@ -171,14 +171,19 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
|
||||
for device in new_data.devices.values():
|
||||
# create device registry entry for new main devices
|
||||
if (
|
||||
device.ain not in self.data.devices
|
||||
and device.device_and_unit_id[1] is None
|
||||
if device.ain not in self.data.devices and (
|
||||
device.device_and_unit_id[1] is None
|
||||
or (
|
||||
# workaround for sub units without a main device, e.g. Energy 250
|
||||
# https://github.com/home-assistant/core/issues/145204
|
||||
device.device_and_unit_id[1] == "1"
|
||||
and device.device_and_unit_id[0] not in new_data.devices
|
||||
)
|
||||
):
|
||||
dr.async_get(self.hass).async_get_or_create(
|
||||
config_entry_id=self.config_entry.entry_id,
|
||||
name=device.name,
|
||||
identifiers={(DOMAIN, device.ain)},
|
||||
identifiers={(DOMAIN, device.device_and_unit_id[0])},
|
||||
manufacturer=device.manufacturer,
|
||||
model=device.productname,
|
||||
sw_version=device.fw_version,
|
||||
|
@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250702.1"]
|
||||
"requirements": ["home-assistant-frontend==20250702.2"]
|
||||
}
|
||||
|
@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["dacite", "gios"],
|
||||
"requirements": ["gios==6.0.0"]
|
||||
"requirements": ["gios==6.1.0"]
|
||||
}
|
||||
|
@ -127,7 +127,7 @@ class GoogleCloudSpeechToTextEntity(SpeechToTextEntity):
|
||||
try:
|
||||
responses = await self._client.streaming_recognize(
|
||||
requests=request_generator(),
|
||||
timeout=10,
|
||||
timeout=30,
|
||||
retry=AsyncRetry(initial=0.1, maximum=2.0, multiplier=2.0),
|
||||
)
|
||||
|
||||
|
@ -218,7 +218,7 @@ class BaseGoogleCloudProvider:
|
||||
|
||||
response = await self._client.synthesize_speech(
|
||||
request,
|
||||
timeout=10,
|
||||
timeout=30,
|
||||
retry=AsyncRetry(initial=0.1, maximum=2.0, multiplier=2.0),
|
||||
)
|
||||
|
||||
|
@ -1,3 +1 @@
|
||||
"""The hddtemp component."""
|
||||
|
||||
DOMAIN = "hddtemp"
|
||||
|
@ -22,14 +22,11 @@ from homeassistant.const import (
|
||||
CONF_PORT,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_DEVICE = "device"
|
||||
@ -59,21 +56,6 @@ def setup_platform(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the HDDTemp sensor."""
|
||||
create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_system_packages_yaml_integration_{DOMAIN}",
|
||||
breaks_in_ha_version="2025.12.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_system_packages_yaml_integration",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "hddtemp",
|
||||
},
|
||||
)
|
||||
|
||||
name = config.get(CONF_NAME)
|
||||
host = config.get(CONF_HOST)
|
||||
port = config.get(CONF_PORT)
|
||||
|
@ -41,7 +41,12 @@ from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr, issue_registry as ir
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import API_DEFAULT_RETRY_AFTER, APPLIANCES_WITH_PROGRAMS, DOMAIN
|
||||
from .const import (
|
||||
API_DEFAULT_RETRY_AFTER,
|
||||
APPLIANCES_WITH_PROGRAMS,
|
||||
BSH_OPERATION_STATE_PAUSE,
|
||||
DOMAIN,
|
||||
)
|
||||
from .utils import get_dict_from_home_connect_error
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@ -66,6 +71,7 @@ class HomeConnectApplianceData:
|
||||
|
||||
def update(self, other: HomeConnectApplianceData) -> None:
|
||||
"""Update data with data from other instance."""
|
||||
self.commands.clear()
|
||||
self.commands.update(other.commands)
|
||||
self.events.update(other.events)
|
||||
self.info.connected = other.info.connected
|
||||
@ -201,6 +207,28 @@ class HomeConnectCoordinator(
|
||||
raw_key=status_key.value,
|
||||
value=event.value,
|
||||
)
|
||||
if (
|
||||
status_key == StatusKey.BSH_COMMON_OPERATION_STATE
|
||||
and event.value == BSH_OPERATION_STATE_PAUSE
|
||||
and CommandKey.BSH_COMMON_RESUME_PROGRAM
|
||||
not in (
|
||||
commands := self.data[
|
||||
event_message_ha_id
|
||||
].commands
|
||||
)
|
||||
):
|
||||
# All the appliances that can be paused
|
||||
# should have the resume command available.
|
||||
commands.add(CommandKey.BSH_COMMON_RESUME_PROGRAM)
|
||||
for (
|
||||
listener,
|
||||
context,
|
||||
) in self._special_listeners.values():
|
||||
if (
|
||||
EventKey.BSH_COMMON_APPLIANCE_DEPAIRED
|
||||
not in context
|
||||
):
|
||||
listener()
|
||||
self._call_event_listener(event_message)
|
||||
|
||||
case EventType.NOTIFY:
|
||||
@ -627,10 +655,7 @@ class HomeConnectCoordinator(
|
||||
"times": str(MAX_EXECUTIONS),
|
||||
"time_window": str(MAX_EXECUTIONS_TIME_WINDOW // 60),
|
||||
"home_connect_resource_url": "https://www.home-connect.com/global/help-support/error-codes#/Togglebox=15362315-13320636-1/",
|
||||
"home_assistant_core_new_issue_url": (
|
||||
"https://github.com/home-assistant/core/issues/new?template=bug_report.yml"
|
||||
f"&integration_name={DOMAIN}&integration_link=https://www.home-assistant.io/integrations/{DOMAIN}/"
|
||||
),
|
||||
"home_assistant_core_issue_url": "https://github.com/home-assistant/core/issues/147299",
|
||||
},
|
||||
)
|
||||
return True
|
||||
|
@ -130,7 +130,7 @@
|
||||
"step": {
|
||||
"confirm": {
|
||||
"title": "[%key:component::home_connect::issues::home_connect_too_many_connected_paired_events::title%]",
|
||||
"description": "The appliance \"{appliance_name}\" has been reported as connected or paired {times} times in less than {time_window} minutes, so refreshes on connected or paired events has been disabled to avoid exceeding the API rate limit.\n\nPlease refer to the [Home Connect Wi-Fi requirements and recommendations]({home_connect_resource_url}). If everything seems right with your network configuration, restart the appliance.\n\nClick \"submit\" to re-enable the updates.\nIf the issue persists, please create an issue in the [Home Assistant core repository]({home_assistant_core_new_issue_url})."
|
||||
"description": "The appliance \"{appliance_name}\" has been reported as connected or paired {times} times in less than {time_window} minutes, so refreshes on connected or paired events has been disabled to avoid exceeding the API rate limit.\n\nPlease refer to the [Home Connect Wi-Fi requirements and recommendations]({home_connect_resource_url}). If everything seems right with your network configuration, restart the appliance.\n\nClick \"submit\" to re-enable the updates.\nIf the issue persists, please see the following issue in the [Home Assistant core repository]({home_assistant_core_issue_url})."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -64,7 +64,7 @@ def setup_bans(hass: HomeAssistant, app: Application, login_threshold: int) -> N
|
||||
"""Initialize bans when app starts up."""
|
||||
await app[KEY_BAN_MANAGER].async_load()
|
||||
|
||||
app.on_startup.append(ban_startup) # type: ignore[arg-type]
|
||||
app.on_startup.append(ban_startup)
|
||||
|
||||
|
||||
@middleware
|
||||
|
@ -8,5 +8,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioimmich"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioimmich==0.10.1"]
|
||||
"requirements": ["aioimmich==0.10.2"]
|
||||
}
|
||||
|
@ -44,7 +44,7 @@ class ImmichUpdateEntity(ImmichEntity, UpdateEntity):
|
||||
return self.coordinator.data.server_about.version
|
||||
|
||||
@property
|
||||
def latest_version(self) -> str:
|
||||
def latest_version(self) -> str | None:
|
||||
"""Available new immich server version."""
|
||||
assert self.coordinator.data.server_version_check
|
||||
return self.coordinator.data.server_version_check.release_version
|
||||
|
@ -23,7 +23,7 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
from .const import CONF_USE_BLUETOOTH, DOMAIN
|
||||
from .coordinator import (
|
||||
@ -57,11 +57,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -
|
||||
assert entry.unique_id
|
||||
serial = entry.unique_id
|
||||
|
||||
client = async_get_clientsession(hass)
|
||||
cloud_client = LaMarzoccoCloudClient(
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
client=client,
|
||||
client=async_create_clientsession(hass),
|
||||
)
|
||||
|
||||
try:
|
||||
|
@ -66,7 +66,7 @@ ENTITIES: tuple[LaMarzoccoBinarySensorEntityDescription, ...] = (
|
||||
WidgetType.CM_BACK_FLUSH, BackFlush(status=BackFlushStatus.OFF)
|
||||
),
|
||||
).status
|
||||
is BackFlushStatus.REQUESTED
|
||||
in (BackFlushStatus.REQUESTED, BackFlushStatus.CLEANING)
|
||||
),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
supported_fn=lambda coordinator: (
|
||||
|
@ -33,7 +33,7 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
from homeassistant.helpers.selector import (
|
||||
SelectOptionDict,
|
||||
SelectSelector,
|
||||
@ -83,7 +83,7 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
**user_input,
|
||||
}
|
||||
|
||||
self._client = async_get_clientsession(self.hass)
|
||||
self._client = async_create_clientsession(self.hass)
|
||||
cloud_client = LaMarzoccoCloudClient(
|
||||
username=data[CONF_USERNAME],
|
||||
password=data[CONF_PASSWORD],
|
||||
|
@ -37,5 +37,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pylamarzocco"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pylamarzocco==2.0.9"]
|
||||
"requirements": ["pylamarzocco==2.0.11"]
|
||||
}
|
||||
|
@ -26,14 +26,6 @@ class OAuth2FlowHandler(
|
||||
"""Return logger."""
|
||||
return logging.getLogger(__name__)
|
||||
|
||||
@property
|
||||
def extra_authorize_data(self) -> dict:
|
||||
"""Extra data that needs to be appended to the authorize url."""
|
||||
# "vg" is mandatory but the value doesn't seem to matter
|
||||
return {
|
||||
"vg": "sv-SE",
|
||||
}
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
|
@ -21,5 +21,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/motion_blinds",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["motionblinds"],
|
||||
"requirements": ["motionblinds==0.6.28"]
|
||||
"requirements": ["motionblinds==0.6.29"]
|
||||
}
|
||||
|
@ -2114,6 +2114,9 @@ def data_schema_from_fields(
|
||||
if schema_section is None:
|
||||
data_schema.update(data_schema_element)
|
||||
continue
|
||||
if not data_schema_element:
|
||||
# Do not show empty sections
|
||||
continue
|
||||
collapsed = (
|
||||
not any(
|
||||
(default := data_schema_fields[str(option)].default) is vol.UNDEFINED
|
||||
|
@ -389,16 +389,6 @@ def async_setup_entity_entry_helper(
|
||||
_async_setup_entities()
|
||||
|
||||
|
||||
def init_entity_id_from_config(
|
||||
hass: HomeAssistant, entity: Entity, config: ConfigType, entity_id_format: str
|
||||
) -> None:
|
||||
"""Set entity_id from object_id if defined in config."""
|
||||
if CONF_OBJECT_ID in config:
|
||||
entity.entity_id = async_generate_entity_id(
|
||||
entity_id_format, config[CONF_OBJECT_ID], None, hass
|
||||
)
|
||||
|
||||
|
||||
class MqttAttributesMixin(Entity):
|
||||
"""Mixin used for platforms that support JSON attributes."""
|
||||
|
||||
@ -1312,6 +1302,7 @@ class MqttEntity(
|
||||
_attr_should_poll = False
|
||||
_default_name: str | None
|
||||
_entity_id_format: str
|
||||
_update_registry_entity_id: str | None = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@ -1346,13 +1337,33 @@ class MqttEntity(
|
||||
|
||||
def _init_entity_id(self) -> None:
|
||||
"""Set entity_id from object_id if defined in config."""
|
||||
init_entity_id_from_config(
|
||||
self.hass, self, self._config, self._entity_id_format
|
||||
if CONF_OBJECT_ID not in self._config:
|
||||
return
|
||||
self.entity_id = async_generate_entity_id(
|
||||
self._entity_id_format, self._config[CONF_OBJECT_ID], None, self.hass
|
||||
)
|
||||
if self.unique_id is None:
|
||||
return
|
||||
# Check for previous deleted entities
|
||||
entity_registry = er.async_get(self.hass)
|
||||
entity_platform = self._entity_id_format.split(".")[0]
|
||||
if (
|
||||
deleted_entry := entity_registry.deleted_entities.get(
|
||||
(entity_platform, DOMAIN, self.unique_id)
|
||||
)
|
||||
) and deleted_entry.entity_id != self.entity_id:
|
||||
# Plan to update the entity_id basis on `object_id` if a deleted entity was found
|
||||
self._update_registry_entity_id = self.entity_id
|
||||
|
||||
@final
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to MQTT events."""
|
||||
if self._update_registry_entity_id is not None:
|
||||
entity_registry = er.async_get(self.hass)
|
||||
entity_registry.async_update_entity(
|
||||
self.entity_id, new_entity_id=self._update_registry_entity_id
|
||||
)
|
||||
|
||||
await super().async_added_to_hass()
|
||||
self._subscriptions = {}
|
||||
self._prepare_subscribe_topics()
|
||||
|
@ -39,6 +39,7 @@ class BinarySensor(CoilEntity, BinarySensorEntity):
|
||||
def __init__(self, coordinator: CoilCoordinator, coil: Coil) -> None:
|
||||
"""Initialize entity."""
|
||||
super().__init__(coordinator, coil, ENTITY_ID_FORMAT)
|
||||
self._on_value = coil.get_mapping_for(1)
|
||||
|
||||
def _async_read_coil(self, data: CoilData) -> None:
|
||||
self._attr_is_on = data.value == "ON"
|
||||
self._attr_is_on = data.value == self._on_value
|
||||
|
@ -41,14 +41,16 @@ class Switch(CoilEntity, SwitchEntity):
|
||||
def __init__(self, coordinator: CoilCoordinator, coil: Coil) -> None:
|
||||
"""Initialize entity."""
|
||||
super().__init__(coordinator, coil, ENTITY_ID_FORMAT)
|
||||
self._on_value = coil.get_mapping_for(1)
|
||||
self._off_value = coil.get_mapping_for(0)
|
||||
|
||||
def _async_read_coil(self, data: CoilData) -> None:
|
||||
self._attr_is_on = data.value == "ON"
|
||||
self._attr_is_on = data.value == self._on_value
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
await self._async_write_coil("ON")
|
||||
await self._async_write_coil(self._on_value)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity off."""
|
||||
await self._async_write_coil("OFF")
|
||||
await self._async_write_coil(self._off_value)
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/nyt_games",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["nyt_games==0.4.4"]
|
||||
"requirements": ["nyt_games==0.5.0"]
|
||||
}
|
||||
|
@ -196,7 +196,7 @@ class RensonFan(RensonEntity, FanEntity):
|
||||
all_data = self.coordinator.data
|
||||
breeze_temp = self.api.get_field_value(all_data, BREEZE_TEMPERATURE_FIELD)
|
||||
await self.hass.async_add_executor_job(
|
||||
self.api.set_breeze, cmd.name, breeze_temp, True
|
||||
self.api.set_breeze, cmd, breeze_temp, True
|
||||
)
|
||||
else:
|
||||
await self.hass.async_add_executor_job(self.api.set_manual_level, cmd)
|
||||
|
@ -49,7 +49,7 @@ class RestData:
|
||||
# Convert auth tuple to aiohttp.BasicAuth if needed
|
||||
if isinstance(auth, tuple) and len(auth) == 2:
|
||||
self._auth: aiohttp.BasicAuth | aiohttp.DigestAuthMiddleware | None = (
|
||||
aiohttp.BasicAuth(auth[0], auth[1])
|
||||
aiohttp.BasicAuth(auth[0], auth[1], encoding="utf-8")
|
||||
)
|
||||
else:
|
||||
self._auth = auth
|
||||
@ -115,6 +115,16 @@ class RestData:
|
||||
for key, value in rendered_params.items():
|
||||
if isinstance(value, bool):
|
||||
rendered_params[key] = str(value).lower()
|
||||
elif not isinstance(value, (str, int, float, type(None))):
|
||||
# For backward compatibility with httpx behavior, convert non-primitive
|
||||
# types to strings. This maintains compatibility after switching from
|
||||
# httpx to aiohttp. See https://github.com/home-assistant/core/issues/148153
|
||||
_LOGGER.debug(
|
||||
"REST query parameter '%s' has type %s, converting to string",
|
||||
key,
|
||||
type(value).__name__,
|
||||
)
|
||||
rendered_params[key] = str(value)
|
||||
|
||||
_LOGGER.debug("Updating from %s", self._resource)
|
||||
# Create request kwargs
|
||||
@ -140,7 +150,14 @@ class RestData:
|
||||
self._method, self._resource, **request_kwargs
|
||||
) as response:
|
||||
# Read the response
|
||||
self.data = await response.text(encoding=self._encoding)
|
||||
# Only use configured encoding if no charset in Content-Type header
|
||||
# If charset is present in Content-Type, let aiohttp use it
|
||||
if response.charset:
|
||||
# Let aiohttp use the charset from Content-Type header
|
||||
self.data = await response.text()
|
||||
else:
|
||||
# Use configured encoding as fallback
|
||||
self.data = await response.text(encoding=self._encoding)
|
||||
self.headers = response.headers
|
||||
|
||||
except TimeoutError as ex:
|
||||
|
@ -178,6 +178,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
)
|
||||
|
||||
if not service.return_response:
|
||||
# always read the response to avoid closing the connection
|
||||
# before the server has finished sending it, while avoiding excessive memory usage
|
||||
async for _ in response.content.iter_chunked(1024):
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
_content = None
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/sharkiq",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["sharkiq"],
|
||||
"requirements": ["sharkiq==1.1.0"]
|
||||
"requirements": ["sharkiq==1.1.1"]
|
||||
}
|
||||
|
@ -9,7 +9,7 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioshelly"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioshelly==13.7.1"],
|
||||
"requirements": ["aioshelly==13.7.2"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_http._tcp.local.",
|
||||
|
@ -30,5 +30,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pysmartthings"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pysmartthings==3.2.5"]
|
||||
"requirements": ["pysmartthings==3.2.7"]
|
||||
}
|
||||
|
@ -18,6 +18,11 @@ from .entity import SmartThingsEntity
|
||||
|
||||
LAMP_TO_HA = {
|
||||
"extraHigh": "extra_high",
|
||||
"high": "high",
|
||||
"mid": "mid",
|
||||
"low": "low",
|
||||
"on": "on",
|
||||
"off": "off",
|
||||
}
|
||||
|
||||
WASHER_SOIL_LEVEL_TO_HA = {
|
||||
|
@ -12,7 +12,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["pysmlight==0.2.6"],
|
||||
"requirements": ["pysmlight==0.2.7"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_slzb-06._tcp.local."
|
||||
|
@ -38,7 +38,7 @@ BINARY_SENSOR_DESCRIPTIONS: list[SnooBinarySensorEntityDescription] = [
|
||||
SnooBinarySensorEntityDescription(
|
||||
key="right_clip",
|
||||
translation_key="right_clip",
|
||||
value_fn=lambda data: data.left_safety_clip,
|
||||
value_fn=lambda data: data.right_safety_clip,
|
||||
device_class=BinarySensorDeviceClass.CONNECTIVITY,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
|
@ -221,12 +221,16 @@ def _get_item_thumbnail(
|
||||
) -> str | None:
|
||||
"""Construct path to thumbnail image."""
|
||||
item_thumbnail: str | None = None
|
||||
if artwork_track_id := item.get("artwork_track_id"):
|
||||
track_id = item.get("artwork_track_id") or (
|
||||
item.get("id") if item_type == "track" else None
|
||||
)
|
||||
|
||||
if track_id:
|
||||
if internal_request:
|
||||
item_thumbnail = player.generate_image_url_from_track_id(artwork_track_id)
|
||||
item_thumbnail = player.generate_image_url_from_track_id(track_id)
|
||||
elif item_type is not None:
|
||||
item_thumbnail = entity.get_browse_image_url(
|
||||
item_type, item["id"], artwork_track_id
|
||||
item_type, item["id"], track_id
|
||||
)
|
||||
|
||||
elif search_type in ["apps", "radios"]:
|
||||
@ -311,8 +315,7 @@ async def build_item_response(
|
||||
title=item["title"],
|
||||
media_content_type=item_type,
|
||||
media_class=CONTENT_TYPE_MEDIA_CLASS[item_type]["item"],
|
||||
can_expand=CONTENT_TYPE_MEDIA_CLASS[item_type]["children"]
|
||||
is not None,
|
||||
can_expand=bool(CONTENT_TYPE_MEDIA_CLASS[item_type]["children"]),
|
||||
can_play=True,
|
||||
)
|
||||
|
||||
|
@ -41,5 +41,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["switchbot"],
|
||||
"quality_scale": "gold",
|
||||
"requirements": ["PySwitchbot==0.67.0"]
|
||||
"requirements": ["PySwitchbot==0.68.1"]
|
||||
}
|
||||
|
@ -113,11 +113,11 @@ SENSOR_DESCRIPTIONS_BY_DEVICE_TYPES = {
|
||||
),
|
||||
"Plug Mini (US)": (
|
||||
VOLTAGE_DESCRIPTION,
|
||||
CURRENT_DESCRIPTION_IN_A,
|
||||
CURRENT_DESCRIPTION_IN_MA,
|
||||
),
|
||||
"Plug Mini (JP)": (
|
||||
VOLTAGE_DESCRIPTION,
|
||||
CURRENT_DESCRIPTION_IN_A,
|
||||
CURRENT_DESCRIPTION_IN_MA,
|
||||
),
|
||||
"Hub 2": (
|
||||
TEMPERATURE_DESCRIPTION,
|
||||
|
@ -125,8 +125,8 @@ VEHICLE_DESCRIPTIONS: tuple[TeslemetryBinarySensorEntityDescription, ...] = (
|
||||
key="charge_state_conn_charge_cable",
|
||||
polling=True,
|
||||
polling_value_fn=lambda x: x != "<invalid>",
|
||||
streaming_listener=lambda vehicle, callback: vehicle.listen_ChargingCableType(
|
||||
lambda value: callback(value is not None and value != "Unknown")
|
||||
streaming_listener=lambda vehicle, callback: vehicle.listen_DetailedChargeState(
|
||||
lambda value: callback(None if value is None else value != "Disconnected")
|
||||
),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_class=BinarySensorDeviceClass.CONNECTIVITY,
|
||||
|
@ -40,7 +40,7 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["uiprotect", "unifi_discovery"],
|
||||
"requirements": ["uiprotect==7.14.1", "unifi-discovery==1.2.0"],
|
||||
"requirements": ["uiprotect==7.14.2", "unifi-discovery==1.2.0"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Ubiquiti Networks",
|
||||
|
@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/vicare",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["PyViCare"],
|
||||
"requirements": ["PyViCare==2.44.0"]
|
||||
"requirements": ["PyViCare==2.50.0"]
|
||||
}
|
||||
|
@ -98,7 +98,10 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
data = {CONF_HOST: self._host, CONF_CLIENT_SECRET: client.client_key}
|
||||
|
||||
if not self._name:
|
||||
self._name = f"{DEFAULT_NAME} {client.tv_info.system['modelName']}"
|
||||
if model_name := client.tv_info.system.get("modelName"):
|
||||
self._name = f"{DEFAULT_NAME} {model_name}"
|
||||
else:
|
||||
self._name = DEFAULT_NAME
|
||||
return self.async_create_entry(title=self._name, data=data)
|
||||
|
||||
return self.async_show_form(step_id="pairing", errors=errors)
|
||||
|
@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/webostv",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiowebostv"],
|
||||
"requirements": ["aiowebostv==0.7.3"],
|
||||
"requirements": ["aiowebostv==0.7.4"],
|
||||
"ssdp": [
|
||||
{
|
||||
"st": "urn:lge-com:service:webos-second-screen:1"
|
||||
|
@ -25,7 +25,7 @@ if TYPE_CHECKING:
|
||||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2025
|
||||
MINOR_VERSION: Final = 7
|
||||
PATCH_VERSION: Final = "1"
|
||||
PATCH_VERSION: Final = "2"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 2)
|
||||
|
@ -6,7 +6,7 @@ aiodns==3.5.0
|
||||
aiohasupervisor==0.3.1
|
||||
aiohttp-asyncmdnsresolver==0.1.1
|
||||
aiohttp-fast-zlib==0.3.0
|
||||
aiohttp==3.12.13
|
||||
aiohttp==3.12.14
|
||||
aiohttp_cors==0.8.1
|
||||
aiousbwatcher==1.1.1
|
||||
aiozoneinfo==0.2.3
|
||||
@ -35,10 +35,10 @@ fnv-hash-fast==1.5.0
|
||||
go2rtc-client==0.2.1
|
||||
ha-ffmpeg==3.2.2
|
||||
habluetooth==3.49.0
|
||||
hass-nabucasa==0.105.0
|
||||
hass-nabucasa==0.106.0
|
||||
hassil==2.2.3
|
||||
home-assistant-bluetooth==1.13.1
|
||||
home-assistant-frontend==20250702.1
|
||||
home-assistant-frontend==20250702.2
|
||||
home-assistant-intents==2025.6.23
|
||||
httpx==0.28.1
|
||||
ifaddr==0.2.0
|
||||
|
@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "homeassistant"
|
||||
version = "2025.7.1"
|
||||
version = "2025.7.2"
|
||||
license = "Apache-2.0"
|
||||
license-files = ["LICENSE*", "homeassistant/backports/LICENSE*"]
|
||||
description = "Open-source home automation platform running on Python 3."
|
||||
@ -28,7 +28,7 @@ dependencies = [
|
||||
# change behavior based on presence of supervisor. Deprecated with #127228
|
||||
# Lib can be removed with 2025.11
|
||||
"aiohasupervisor==0.3.1",
|
||||
"aiohttp==3.12.13",
|
||||
"aiohttp==3.12.14",
|
||||
"aiohttp_cors==0.8.1",
|
||||
"aiohttp-fast-zlib==0.3.0",
|
||||
"aiohttp-asyncmdnsresolver==0.1.1",
|
||||
@ -47,7 +47,7 @@ dependencies = [
|
||||
"fnv-hash-fast==1.5.0",
|
||||
# hass-nabucasa is imported by helpers which don't depend on the cloud
|
||||
# integration
|
||||
"hass-nabucasa==0.105.0",
|
||||
"hass-nabucasa==0.106.0",
|
||||
# When bumping httpx, please check the version pins of
|
||||
# httpcore, anyio, and h11 in gen_requirements_all
|
||||
"httpx==0.28.1",
|
||||
|
4
requirements.txt
generated
4
requirements.txt
generated
@ -5,7 +5,7 @@
|
||||
# Home Assistant Core
|
||||
aiodns==3.5.0
|
||||
aiohasupervisor==0.3.1
|
||||
aiohttp==3.12.13
|
||||
aiohttp==3.12.14
|
||||
aiohttp_cors==0.8.1
|
||||
aiohttp-fast-zlib==0.3.0
|
||||
aiohttp-asyncmdnsresolver==0.1.1
|
||||
@ -22,7 +22,7 @@ certifi>=2021.5.30
|
||||
ciso8601==2.3.2
|
||||
cronsim==2.6
|
||||
fnv-hash-fast==1.5.0
|
||||
hass-nabucasa==0.105.0
|
||||
hass-nabucasa==0.106.0
|
||||
httpx==0.28.1
|
||||
home-assistant-bluetooth==1.13.1
|
||||
ifaddr==0.2.0
|
||||
|
34
requirements_all.txt
generated
34
requirements_all.txt
generated
@ -84,7 +84,7 @@ PyQRCode==1.2.1
|
||||
PyRMVtransport==0.3.3
|
||||
|
||||
# homeassistant.components.switchbot
|
||||
PySwitchbot==0.67.0
|
||||
PySwitchbot==0.68.1
|
||||
|
||||
# homeassistant.components.switchmate
|
||||
PySwitchmate==0.5.1
|
||||
@ -100,7 +100,7 @@ PyTransportNSW==0.1.1
|
||||
PyTurboJPEG==1.8.0
|
||||
|
||||
# homeassistant.components.vicare
|
||||
PyViCare==2.44.0
|
||||
PyViCare==2.50.0
|
||||
|
||||
# homeassistant.components.xiaomi_aqara
|
||||
PyXiaomiGateway==0.14.3
|
||||
@ -185,7 +185,7 @@ aioairzone-cloud==0.6.12
|
||||
aioairzone==1.0.0
|
||||
|
||||
# homeassistant.components.alexa_devices
|
||||
aioamazondevices==3.2.3
|
||||
aioamazondevices==3.2.10
|
||||
|
||||
# homeassistant.components.ambient_network
|
||||
# homeassistant.components.ambient_station
|
||||
@ -283,7 +283,7 @@ aiohue==4.7.4
|
||||
aioimaplib==2.0.1
|
||||
|
||||
# homeassistant.components.immich
|
||||
aioimmich==0.10.1
|
||||
aioimmich==0.10.2
|
||||
|
||||
# homeassistant.components.apache_kafka
|
||||
aiokafka==0.10.0
|
||||
@ -381,7 +381,7 @@ aioruuvigateway==0.1.0
|
||||
aiosenz==1.0.0
|
||||
|
||||
# homeassistant.components.shelly
|
||||
aioshelly==13.7.1
|
||||
aioshelly==13.7.2
|
||||
|
||||
# homeassistant.components.skybell
|
||||
aioskybell==22.7.0
|
||||
@ -435,7 +435,7 @@ aiowatttime==0.1.1
|
||||
aiowebdav2==0.4.6
|
||||
|
||||
# homeassistant.components.webostv
|
||||
aiowebostv==0.7.3
|
||||
aiowebostv==0.7.4
|
||||
|
||||
# homeassistant.components.withings
|
||||
aiowithings==3.1.6
|
||||
@ -1020,7 +1020,7 @@ georss-qld-bushfire-alert-client==0.8
|
||||
getmac==0.9.5
|
||||
|
||||
# homeassistant.components.gios
|
||||
gios==6.0.0
|
||||
gios==6.1.0
|
||||
|
||||
# homeassistant.components.gitter
|
||||
gitterpy==0.1.7
|
||||
@ -1127,7 +1127,7 @@ habiticalib==0.4.0
|
||||
habluetooth==3.49.0
|
||||
|
||||
# homeassistant.components.cloud
|
||||
hass-nabucasa==0.105.0
|
||||
hass-nabucasa==0.106.0
|
||||
|
||||
# homeassistant.components.splunk
|
||||
hass-splunk==0.1.1
|
||||
@ -1168,7 +1168,7 @@ hole==0.8.0
|
||||
holidays==0.75
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20250702.1
|
||||
home-assistant-frontend==20250702.2
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2025.6.23
|
||||
@ -1452,7 +1452,7 @@ monzopy==1.4.2
|
||||
mopeka-iot-ble==0.8.0
|
||||
|
||||
# homeassistant.components.motion_blinds
|
||||
motionblinds==0.6.28
|
||||
motionblinds==0.6.29
|
||||
|
||||
# homeassistant.components.motionblinds_ble
|
||||
motionblindsble==0.1.3
|
||||
@ -1555,7 +1555,7 @@ numato-gpio==0.13.0
|
||||
numpy==2.3.0
|
||||
|
||||
# homeassistant.components.nyt_games
|
||||
nyt_games==0.4.4
|
||||
nyt_games==0.5.0
|
||||
|
||||
# homeassistant.components.oasa_telematics
|
||||
oasatelematics==0.3
|
||||
@ -1962,7 +1962,7 @@ pyeiscp==0.0.7
|
||||
pyemoncms==0.1.1
|
||||
|
||||
# homeassistant.components.enphase_envoy
|
||||
pyenphase==2.2.0
|
||||
pyenphase==2.2.1
|
||||
|
||||
# homeassistant.components.envisalink
|
||||
pyenvisalink==4.7
|
||||
@ -2100,7 +2100,7 @@ pykwb==0.0.8
|
||||
pylacrosse==0.4
|
||||
|
||||
# homeassistant.components.lamarzocco
|
||||
pylamarzocco==2.0.9
|
||||
pylamarzocco==2.0.11
|
||||
|
||||
# homeassistant.components.lastfm
|
||||
pylast==5.1.0
|
||||
@ -2348,7 +2348,7 @@ pysmappee==0.2.29
|
||||
pysmarlaapi==0.9.0
|
||||
|
||||
# homeassistant.components.smartthings
|
||||
pysmartthings==3.2.5
|
||||
pysmartthings==3.2.7
|
||||
|
||||
# homeassistant.components.smarty
|
||||
pysmarty2==0.10.2
|
||||
@ -2360,7 +2360,7 @@ pysmhi==1.0.2
|
||||
pysml==0.1.5
|
||||
|
||||
# homeassistant.components.smlight
|
||||
pysmlight==0.2.6
|
||||
pysmlight==0.2.7
|
||||
|
||||
# homeassistant.components.snmp
|
||||
pysnmp==6.2.6
|
||||
@ -2756,7 +2756,7 @@ sentry-sdk==1.45.1
|
||||
sfrbox-api==0.0.11
|
||||
|
||||
# homeassistant.components.sharkiq
|
||||
sharkiq==1.1.0
|
||||
sharkiq==1.1.1
|
||||
|
||||
# homeassistant.components.aquostv
|
||||
sharp_aquos_rc==0.3.2
|
||||
@ -2994,7 +2994,7 @@ typedmonarchmoney==0.4.4
|
||||
uasiren==0.0.1
|
||||
|
||||
# homeassistant.components.unifiprotect
|
||||
uiprotect==7.14.1
|
||||
uiprotect==7.14.2
|
||||
|
||||
# homeassistant.components.landisgyr_heat_meter
|
||||
ultraheat-api==0.5.7
|
||||
|
34
requirements_test_all.txt
generated
34
requirements_test_all.txt
generated
@ -81,7 +81,7 @@ PyQRCode==1.2.1
|
||||
PyRMVtransport==0.3.3
|
||||
|
||||
# homeassistant.components.switchbot
|
||||
PySwitchbot==0.67.0
|
||||
PySwitchbot==0.68.1
|
||||
|
||||
# homeassistant.components.syncthru
|
||||
PySyncThru==0.8.0
|
||||
@ -94,7 +94,7 @@ PyTransportNSW==0.1.1
|
||||
PyTurboJPEG==1.8.0
|
||||
|
||||
# homeassistant.components.vicare
|
||||
PyViCare==2.44.0
|
||||
PyViCare==2.50.0
|
||||
|
||||
# homeassistant.components.xiaomi_aqara
|
||||
PyXiaomiGateway==0.14.3
|
||||
@ -173,7 +173,7 @@ aioairzone-cloud==0.6.12
|
||||
aioairzone==1.0.0
|
||||
|
||||
# homeassistant.components.alexa_devices
|
||||
aioamazondevices==3.2.3
|
||||
aioamazondevices==3.2.10
|
||||
|
||||
# homeassistant.components.ambient_network
|
||||
# homeassistant.components.ambient_station
|
||||
@ -268,7 +268,7 @@ aiohue==4.7.4
|
||||
aioimaplib==2.0.1
|
||||
|
||||
# homeassistant.components.immich
|
||||
aioimmich==0.10.1
|
||||
aioimmich==0.10.2
|
||||
|
||||
# homeassistant.components.apache_kafka
|
||||
aiokafka==0.10.0
|
||||
@ -363,7 +363,7 @@ aioruuvigateway==0.1.0
|
||||
aiosenz==1.0.0
|
||||
|
||||
# homeassistant.components.shelly
|
||||
aioshelly==13.7.1
|
||||
aioshelly==13.7.2
|
||||
|
||||
# homeassistant.components.skybell
|
||||
aioskybell==22.7.0
|
||||
@ -417,7 +417,7 @@ aiowatttime==0.1.1
|
||||
aiowebdav2==0.4.6
|
||||
|
||||
# homeassistant.components.webostv
|
||||
aiowebostv==0.7.3
|
||||
aiowebostv==0.7.4
|
||||
|
||||
# homeassistant.components.withings
|
||||
aiowithings==3.1.6
|
||||
@ -890,7 +890,7 @@ georss-qld-bushfire-alert-client==0.8
|
||||
getmac==0.9.5
|
||||
|
||||
# homeassistant.components.gios
|
||||
gios==6.0.0
|
||||
gios==6.1.0
|
||||
|
||||
# homeassistant.components.glances
|
||||
glances-api==0.8.0
|
||||
@ -988,7 +988,7 @@ habiticalib==0.4.0
|
||||
habluetooth==3.49.0
|
||||
|
||||
# homeassistant.components.cloud
|
||||
hass-nabucasa==0.105.0
|
||||
hass-nabucasa==0.106.0
|
||||
|
||||
# homeassistant.components.assist_satellite
|
||||
# homeassistant.components.conversation
|
||||
@ -1017,7 +1017,7 @@ hole==0.8.0
|
||||
holidays==0.75
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20250702.1
|
||||
home-assistant-frontend==20250702.2
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2025.6.23
|
||||
@ -1244,7 +1244,7 @@ monzopy==1.4.2
|
||||
mopeka-iot-ble==0.8.0
|
||||
|
||||
# homeassistant.components.motion_blinds
|
||||
motionblinds==0.6.28
|
||||
motionblinds==0.6.29
|
||||
|
||||
# homeassistant.components.motionblinds_ble
|
||||
motionblindsble==0.1.3
|
||||
@ -1329,7 +1329,7 @@ numato-gpio==0.13.0
|
||||
numpy==2.3.0
|
||||
|
||||
# homeassistant.components.nyt_games
|
||||
nyt_games==0.4.4
|
||||
nyt_games==0.5.0
|
||||
|
||||
# homeassistant.components.google
|
||||
oauth2client==4.1.3
|
||||
@ -1637,7 +1637,7 @@ pyeiscp==0.0.7
|
||||
pyemoncms==0.1.1
|
||||
|
||||
# homeassistant.components.enphase_envoy
|
||||
pyenphase==2.2.0
|
||||
pyenphase==2.2.1
|
||||
|
||||
# homeassistant.components.everlights
|
||||
pyeverlights==0.1.0
|
||||
@ -1745,7 +1745,7 @@ pykrakenapi==0.1.8
|
||||
pykulersky==0.5.8
|
||||
|
||||
# homeassistant.components.lamarzocco
|
||||
pylamarzocco==2.0.9
|
||||
pylamarzocco==2.0.11
|
||||
|
||||
# homeassistant.components.lastfm
|
||||
pylast==5.1.0
|
||||
@ -1951,7 +1951,7 @@ pysmappee==0.2.29
|
||||
pysmarlaapi==0.9.0
|
||||
|
||||
# homeassistant.components.smartthings
|
||||
pysmartthings==3.2.5
|
||||
pysmartthings==3.2.7
|
||||
|
||||
# homeassistant.components.smarty
|
||||
pysmarty2==0.10.2
|
||||
@ -1963,7 +1963,7 @@ pysmhi==1.0.2
|
||||
pysml==0.1.5
|
||||
|
||||
# homeassistant.components.smlight
|
||||
pysmlight==0.2.6
|
||||
pysmlight==0.2.7
|
||||
|
||||
# homeassistant.components.snmp
|
||||
pysnmp==6.2.6
|
||||
@ -2278,7 +2278,7 @@ sentry-sdk==1.45.1
|
||||
sfrbox-api==0.0.11
|
||||
|
||||
# homeassistant.components.sharkiq
|
||||
sharkiq==1.1.0
|
||||
sharkiq==1.1.1
|
||||
|
||||
# homeassistant.components.simplefin
|
||||
simplefin4py==0.0.18
|
||||
@ -2468,7 +2468,7 @@ typedmonarchmoney==0.4.4
|
||||
uasiren==0.0.1
|
||||
|
||||
# homeassistant.components.unifiprotect
|
||||
uiprotect==7.14.1
|
||||
uiprotect==7.14.2
|
||||
|
||||
# homeassistant.components.landisgyr_heat_meter
|
||||
ultraheat-api==0.5.7
|
||||
|
@ -5,6 +5,7 @@ from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components import fan, humidifier, remote, water_heater
|
||||
from homeassistant.components.alexa import smart_home
|
||||
from homeassistant.const import EntityCategory, UnitOfTemperature, __version__
|
||||
from homeassistant.core import HomeAssistant
|
||||
@ -200,3 +201,167 @@ async def test_serialize_discovery_recovers(
|
||||
"Error serializing Alexa.PowerController discovery"
|
||||
f" for {hass.states.get('switch.bla')}"
|
||||
) in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("domain", "state", "state_attributes", "mode_controller_exists"),
|
||||
[
|
||||
("switch", "on", {}, False),
|
||||
(
|
||||
"fan",
|
||||
"on",
|
||||
{
|
||||
"preset_modes": ["eco", "auto"],
|
||||
"preset_mode": "eco",
|
||||
"supported_features": fan.FanEntityFeature.PRESET_MODE.value,
|
||||
},
|
||||
True,
|
||||
),
|
||||
(
|
||||
"fan",
|
||||
"on",
|
||||
{
|
||||
"preset_modes": ["eco", "auto"],
|
||||
"preset_mode": None,
|
||||
"supported_features": fan.FanEntityFeature.PRESET_MODE.value,
|
||||
},
|
||||
True,
|
||||
),
|
||||
(
|
||||
"fan",
|
||||
"on",
|
||||
{
|
||||
"preset_modes": ["eco"],
|
||||
"preset_mode": None,
|
||||
"supported_features": fan.FanEntityFeature.PRESET_MODE.value,
|
||||
},
|
||||
True,
|
||||
),
|
||||
(
|
||||
"fan",
|
||||
"on",
|
||||
{
|
||||
"preset_modes": [],
|
||||
"preset_mode": None,
|
||||
"supported_features": fan.FanEntityFeature.PRESET_MODE.value,
|
||||
},
|
||||
False,
|
||||
),
|
||||
(
|
||||
"humidifier",
|
||||
"on",
|
||||
{
|
||||
"available_modes": ["auto", "manual"],
|
||||
"mode": "auto",
|
||||
"supported_features": humidifier.HumidifierEntityFeature.MODES.value,
|
||||
},
|
||||
True,
|
||||
),
|
||||
(
|
||||
"humidifier",
|
||||
"on",
|
||||
{
|
||||
"available_modes": ["auto"],
|
||||
"mode": None,
|
||||
"supported_features": humidifier.HumidifierEntityFeature.MODES.value,
|
||||
},
|
||||
True,
|
||||
),
|
||||
(
|
||||
"humidifier",
|
||||
"on",
|
||||
{
|
||||
"available_modes": [],
|
||||
"mode": None,
|
||||
"supported_features": humidifier.HumidifierEntityFeature.MODES.value,
|
||||
},
|
||||
False,
|
||||
),
|
||||
(
|
||||
"remote",
|
||||
"on",
|
||||
{
|
||||
"activity_list": ["tv", "dvd"],
|
||||
"current_activity": "tv",
|
||||
"supported_features": remote.RemoteEntityFeature.ACTIVITY.value,
|
||||
},
|
||||
True,
|
||||
),
|
||||
(
|
||||
"remote",
|
||||
"on",
|
||||
{
|
||||
"activity_list": ["tv"],
|
||||
"current_activity": None,
|
||||
"supported_features": remote.RemoteEntityFeature.ACTIVITY.value,
|
||||
},
|
||||
True,
|
||||
),
|
||||
(
|
||||
"remote",
|
||||
"on",
|
||||
{
|
||||
"activity_list": [],
|
||||
"current_activity": None,
|
||||
"supported_features": remote.RemoteEntityFeature.ACTIVITY.value,
|
||||
},
|
||||
False,
|
||||
),
|
||||
(
|
||||
"water_heater",
|
||||
"on",
|
||||
{
|
||||
"operation_list": ["on", "auto"],
|
||||
"operation_mode": "auto",
|
||||
"supported_features": water_heater.WaterHeaterEntityFeature.OPERATION_MODE.value,
|
||||
},
|
||||
True,
|
||||
),
|
||||
(
|
||||
"water_heater",
|
||||
"on",
|
||||
{
|
||||
"operation_list": ["on"],
|
||||
"operation_mode": None,
|
||||
"supported_features": water_heater.WaterHeaterEntityFeature.OPERATION_MODE.value,
|
||||
},
|
||||
True,
|
||||
),
|
||||
(
|
||||
"water_heater",
|
||||
"on",
|
||||
{
|
||||
"operation_list": [],
|
||||
"operation_mode": None,
|
||||
"supported_features": water_heater.WaterHeaterEntityFeature.OPERATION_MODE.value,
|
||||
},
|
||||
False,
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_mode_controller_is_omitted_if_no_modes_are_set(
|
||||
hass: HomeAssistant,
|
||||
domain: str,
|
||||
state: str,
|
||||
state_attributes: dict[str, Any],
|
||||
mode_controller_exists: bool,
|
||||
) -> None:
|
||||
"""Test we do not generate an invalid discovery with AlexaModeController during serialize discovery.
|
||||
|
||||
AlexModeControllers need at least 2 modes. If one mode is set, an extra mode will be added for compatibility.
|
||||
If no modes are offered, the mode controller should be omitted to prevent schema validations.
|
||||
"""
|
||||
request = get_new_request("Alexa.Discovery", "Discover")
|
||||
|
||||
hass.states.async_set(
|
||||
f"{domain}.bla", state, {"friendly_name": "Boop Woz"} | state_attributes
|
||||
)
|
||||
|
||||
msg = await smart_home.async_handle_message(hass, get_default_config(hass), request)
|
||||
msg = msg["event"]
|
||||
|
||||
interfaces = {
|
||||
ifc["interface"] for ifc in msg["payload"]["endpoints"][0]["capabilities"]
|
||||
}
|
||||
|
||||
assert ("Alexa.ModeController" in interfaces) is mode_controller_exists
|
||||
|
@ -50,6 +50,7 @@ def mock_amazon_devices_client() -> Generator[AsyncMock]:
|
||||
device_type="echo",
|
||||
device_owner_customer_id="amazon_ower_id",
|
||||
device_cluster_members=[TEST_SERIAL_NUMBER],
|
||||
device_locale="en-US",
|
||||
online=True,
|
||||
serial_number=TEST_SERIAL_NUMBER,
|
||||
software_version="echo_test_software_version",
|
||||
|
@ -15,7 +15,12 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.util.dt import utcnow
|
||||
|
||||
from . import FritzDeviceCoverMock, FritzDeviceSwitchMock, FritzEntityBaseMock
|
||||
from . import (
|
||||
FritzDeviceCoverMock,
|
||||
FritzDeviceSensorMock,
|
||||
FritzDeviceSwitchMock,
|
||||
FritzEntityBaseMock,
|
||||
)
|
||||
from .const import MOCK_CONFIG
|
||||
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||
@ -140,3 +145,42 @@ async def test_coordinator_automatic_registry_cleanup(
|
||||
|
||||
assert len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) == 12
|
||||
assert len(dr.async_entries_for_config_entry(device_registry, entry.entry_id)) == 1
|
||||
|
||||
|
||||
async def test_coordinator_workaround_sub_units_without_main_device(
|
||||
hass: HomeAssistant,
|
||||
fritz: Mock,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
) -> None:
|
||||
"""Test the workaround for sub units without main device."""
|
||||
fritz().get_devices.return_value = [
|
||||
FritzDeviceSensorMock(
|
||||
ain="bad_device-1",
|
||||
device_and_unit_id=("bad_device", "1"),
|
||||
name="bad_sensor_sub",
|
||||
),
|
||||
FritzDeviceSensorMock(
|
||||
ain="good_device",
|
||||
device_and_unit_id=("good_device", None),
|
||||
name="good_sensor",
|
||||
),
|
||||
FritzDeviceSensorMock(
|
||||
ain="good_device-1",
|
||||
device_and_unit_id=("good_device", "1"),
|
||||
name="good_sensor_sub",
|
||||
),
|
||||
]
|
||||
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data=MOCK_CONFIG[DOMAIN][CONF_DEVICES][0],
|
||||
unique_id="any",
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done(wait_background_tasks=True)
|
||||
|
||||
device_entries = dr.async_entries_for_config_entry(device_registry, entry.entry_id)
|
||||
assert len(device_entries) == 2
|
||||
assert device_entries[0].identifiers == {(DOMAIN, "good_device")}
|
||||
assert device_entries[1].identifiers == {(DOMAIN, "bad_device")}
|
||||
|
@ -1,16 +1,29 @@
|
||||
"""Tests for GIOS."""
|
||||
|
||||
import json
|
||||
from unittest.mock import patch
|
||||
|
||||
from homeassistant.components.gios.const import DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.common import MockConfigEntry, async_load_fixture
|
||||
from tests.common import (
|
||||
MockConfigEntry,
|
||||
async_load_json_array_fixture,
|
||||
async_load_json_object_fixture,
|
||||
)
|
||||
|
||||
STATIONS = [
|
||||
{"id": 123, "stationName": "Test Name 1", "gegrLat": "99.99", "gegrLon": "88.88"},
|
||||
{"id": 321, "stationName": "Test Name 2", "gegrLat": "77.77", "gegrLon": "66.66"},
|
||||
{
|
||||
"Identyfikator stacji": 123,
|
||||
"Nazwa stacji": "Test Name 1",
|
||||
"WGS84 φ N": "99.99",
|
||||
"WGS84 λ E": "88.88",
|
||||
},
|
||||
{
|
||||
"Identyfikator stacji": 321,
|
||||
"Nazwa stacji": "Test Name 2",
|
||||
"WGS84 φ N": "77.77",
|
||||
"WGS84 λ E": "66.66",
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@ -26,13 +39,13 @@ async def init_integration(
|
||||
entry_id="86129426118ae32020417a53712d6eef",
|
||||
)
|
||||
|
||||
indexes = json.loads(await async_load_fixture(hass, "indexes.json", DOMAIN))
|
||||
station = json.loads(await async_load_fixture(hass, "station.json", DOMAIN))
|
||||
sensors = json.loads(await async_load_fixture(hass, "sensors.json", DOMAIN))
|
||||
indexes = await async_load_json_object_fixture(hass, "indexes.json", DOMAIN)
|
||||
station = await async_load_json_array_fixture(hass, "station.json", DOMAIN)
|
||||
sensors = await async_load_json_object_fixture(hass, "sensors.json", DOMAIN)
|
||||
if incomplete_data:
|
||||
indexes["stIndexLevel"]["indexLevelName"] = "foo"
|
||||
sensors["pm10"]["values"][0]["value"] = None
|
||||
sensors["pm10"]["values"][1]["value"] = None
|
||||
indexes["AqIndex"] = "foo"
|
||||
sensors["pm10"]["Lista danych pomiarowych"][0]["Wartość"] = None
|
||||
sensors["pm10"]["Lista danych pomiarowych"][1]["Wartość"] = None
|
||||
if invalid_indexes:
|
||||
indexes = {}
|
||||
|
||||
|
@ -1,29 +1,38 @@
|
||||
{
|
||||
"id": 123,
|
||||
"stCalcDate": "2020-07-31 15:10:17",
|
||||
"stIndexLevel": { "id": 1, "indexLevelName": "Dobry" },
|
||||
"stSourceDataDate": "2020-07-31 14:00:00",
|
||||
"so2CalcDate": "2020-07-31 15:10:17",
|
||||
"so2IndexLevel": { "id": 0, "indexLevelName": "Bardzo dobry" },
|
||||
"so2SourceDataDate": "2020-07-31 14:00:00",
|
||||
"no2CalcDate": 1596201017000,
|
||||
"no2IndexLevel": { "id": 0, "indexLevelName": "Dobry" },
|
||||
"no2SourceDataDate": "2020-07-31 14:00:00",
|
||||
"coCalcDate": "2020-07-31 15:10:17",
|
||||
"coIndexLevel": { "id": 0, "indexLevelName": "Dobry" },
|
||||
"coSourceDataDate": "2020-07-31 14:00:00",
|
||||
"pm10CalcDate": "2020-07-31 15:10:17",
|
||||
"pm10IndexLevel": { "id": 0, "indexLevelName": "Dobry" },
|
||||
"pm10SourceDataDate": "2020-07-31 14:00:00",
|
||||
"pm25CalcDate": "2020-07-31 15:10:17",
|
||||
"pm25IndexLevel": { "id": 0, "indexLevelName": "Dobry" },
|
||||
"pm25SourceDataDate": "2020-07-31 14:00:00",
|
||||
"o3CalcDate": "2020-07-31 15:10:17",
|
||||
"o3IndexLevel": { "id": 1, "indexLevelName": "Dobry" },
|
||||
"o3SourceDataDate": "2020-07-31 14:00:00",
|
||||
"c6h6CalcDate": "2020-07-31 15:10:17",
|
||||
"c6h6IndexLevel": { "id": 0, "indexLevelName": "Bardzo dobry" },
|
||||
"c6h6SourceDataDate": "2020-07-31 14:00:00",
|
||||
"stIndexStatus": true,
|
||||
"stIndexCrParam": "OZON"
|
||||
"AqIndex": {
|
||||
"Identyfikator stacji pomiarowej": 123,
|
||||
"Data wykonania obliczeń indeksu": "2020-07-31 15:10:17",
|
||||
"Nazwa kategorii indeksu": "Dobry",
|
||||
"Data danych źródłowych, z których policzono wartość indeksu dla wskaźnika st": "2020-07-31 14:00:00",
|
||||
"Data wykonania obliczeń indeksu dla wskaźnika SO2": "2020-07-31 15:10:17",
|
||||
"Wartość indeksu dla wskaźnika SO2": 0,
|
||||
"Nazwa kategorii indeksu dla wskażnika SO2": "Bardzo dobry",
|
||||
"Data danych źródłowych, z których policzono wartość indeksu dla wskaźnika SO2": "2020-07-31 14:00:00",
|
||||
"Data wykonania obliczeń indeksu dla wskaźnika NO2": "2020-07-31 14:00:00",
|
||||
"Wartość indeksu dla wskaźnika NO2": 0,
|
||||
"Nazwa kategorii indeksu dla wskażnika NO2": "Dobry",
|
||||
"Data danych źródłowych, z których policzono wartość indeksu dla wskaźnika NO2": "2020-07-31 14:00:00",
|
||||
"Data danych źródłowych, z których policzono wartość indeksu dla wskaźnika CO": "2020-07-31 15:10:17",
|
||||
"Wartość indeksu dla wskaźnika CO": 0,
|
||||
"Nazwa kategorii indeksu dla wskażnika CO": "Dobry",
|
||||
"Data wykonania obliczeń indeksu dla wskaźnika CO": "2020-07-31 14:00:00",
|
||||
"Data danych źródłowych, z których policzono wartość indeksu dla wskaźnika PM10": "2020-07-31 15:10:17",
|
||||
"Wartość indeksu dla wskaźnika PM10": 0,
|
||||
"Nazwa kategorii indeksu dla wskażnika PM10": "Dobry",
|
||||
"Data wykonania obliczeń indeksu dla wskaźnika PM10": "2020-07-31 14:00:00",
|
||||
"Data danych źródłowych, z których policzono wartość indeksu dla wskaźnika PM2.5": "2020-07-31 15:10:17",
|
||||
"Wartość indeksu dla wskaźnika PM2.5": 0,
|
||||
"Nazwa kategorii indeksu dla wskażnika PM2.5": "Dobry",
|
||||
"Data wykonania obliczeń indeksu dla wskaźnika PM2.5": "2020-07-31 14:00:00",
|
||||
"Data danych źródłowych, z których policzono wartość indeksu dla wskaźnika O3": "2020-07-31 15:10:17",
|
||||
"Wartość indeksu dla wskaźnika O3": 1,
|
||||
"Nazwa kategorii indeksu dla wskażnika O3": "Dobry",
|
||||
"Data wykonania obliczeń indeksu dla wskaźnika O3": "2020-07-31 14:00:00",
|
||||
"Data danych źródłowych, z których policzono wartość indeksu dla wskaźnika C6H6": "2020-07-31 15:10:17",
|
||||
"Wartość indeksu dla wskaźnika C6H6": 0,
|
||||
"Nazwa kategorii indeksu dla wskażnika C6H6": "Bardzo dobry",
|
||||
"Data wykonania obliczeń indeksu dla wskaźnika C6H6": "2020-07-31 14:00:00",
|
||||
"Status indeksu ogólnego dla stacji pomiarowej": true,
|
||||
"Kod zanieczyszczenia krytycznego": "OZON"
|
||||
}
|
||||
}
|
||||
|
@ -1,51 +1,51 @@
|
||||
{
|
||||
"so2": {
|
||||
"values": [
|
||||
{ "date": "2020-07-31 15:00:00", "value": 4.35478 },
|
||||
{ "date": "2020-07-31 14:00:00", "value": 4.25478 },
|
||||
{ "date": "2020-07-31 13:00:00", "value": 4.34309 }
|
||||
"Lista danych pomiarowych": [
|
||||
{ "Data": "2020-07-31 15:00:00", "Wartość": 4.35478 },
|
||||
{ "Data": "2020-07-31 14:00:00", "Wartość": 4.25478 },
|
||||
{ "Data": "2020-07-31 13:00:00", "Wartość": 4.34309 }
|
||||
]
|
||||
},
|
||||
"c6h6": {
|
||||
"values": [
|
||||
{ "date": "2020-07-31 15:00:00", "value": 0.23789 },
|
||||
{ "date": "2020-07-31 14:00:00", "value": 0.22789 },
|
||||
{ "date": "2020-07-31 13:00:00", "value": 0.21315 }
|
||||
"Lista danych pomiarowych": [
|
||||
{ "Data": "2020-07-31 15:00:00", "Wartość": 0.23789 },
|
||||
{ "Data": "2020-07-31 14:00:00", "Wartość": 0.22789 },
|
||||
{ "Data": "2020-07-31 13:00:00", "Wartość": 0.21315 }
|
||||
]
|
||||
},
|
||||
"co": {
|
||||
"values": [
|
||||
{ "date": "2020-07-31 15:00:00", "value": 251.874 },
|
||||
{ "date": "2020-07-31 14:00:00", "value": 250.874 },
|
||||
{ "date": "2020-07-31 13:00:00", "value": 251.097 }
|
||||
"Lista danych pomiarowych": [
|
||||
{ "Data": "2020-07-31 15:00:00", "Wartość": 251.874 },
|
||||
{ "Data": "2020-07-31 14:00:00", "Wartość": 250.874 },
|
||||
{ "Data": "2020-07-31 13:00:00", "Wartość": 251.097 }
|
||||
]
|
||||
},
|
||||
"no2": {
|
||||
"values": [
|
||||
{ "date": "2020-07-31 15:00:00", "value": 7.13411 },
|
||||
{ "date": "2020-07-31 14:00:00", "value": 7.33411 },
|
||||
{ "date": "2020-07-31 13:00:00", "value": 9.32578 }
|
||||
"Lista danych pomiarowych": [
|
||||
{ "Data": "2020-07-31 15:00:00", "Wartość": 7.13411 },
|
||||
{ "Data": "2020-07-31 14:00:00", "Wartość": 7.33411 },
|
||||
{ "Data": "2020-07-31 13:00:00", "Wartość": 9.32578 }
|
||||
]
|
||||
},
|
||||
"o3": {
|
||||
"values": [
|
||||
{ "date": "2020-07-31 15:00:00", "value": 95.7768 },
|
||||
{ "date": "2020-07-31 14:00:00", "value": 93.7768 },
|
||||
{ "date": "2020-07-31 13:00:00", "value": 89.4232 }
|
||||
"Lista danych pomiarowych": [
|
||||
{ "Data": "2020-07-31 15:00:00", "Wartość": 95.7768 },
|
||||
{ "Data": "2020-07-31 14:00:00", "Wartość": 93.7768 },
|
||||
{ "Data": "2020-07-31 13:00:00", "Wartość": 89.4232 }
|
||||
]
|
||||
},
|
||||
"pm2.5": {
|
||||
"values": [
|
||||
{ "date": "2020-07-31 15:00:00", "value": 4 },
|
||||
{ "date": "2020-07-31 14:00:00", "value": 4 },
|
||||
{ "date": "2020-07-31 13:00:00", "value": 5 }
|
||||
"Lista danych pomiarowych": [
|
||||
{ "Data": "2020-07-31 15:00:00", "Wartość": 4 },
|
||||
{ "Data": "2020-07-31 14:00:00", "Wartość": 4 },
|
||||
{ "Data": "2020-07-31 13:00:00", "Wartość": 5 }
|
||||
]
|
||||
},
|
||||
"pm10": {
|
||||
"values": [
|
||||
{ "date": "2020-07-31 15:00:00", "value": 16.8344 },
|
||||
{ "date": "2020-07-31 14:00:00", "value": 17.8344 },
|
||||
{ "date": "2020-07-31 13:00:00", "value": 20.8094 }
|
||||
"Lista danych pomiarowych": [
|
||||
{ "Data": "2020-07-31 15:00:00", "Wartość": 16.8344 },
|
||||
{ "Data": "2020-07-31 14:00:00", "Wartość": 17.8344 },
|
||||
{ "Data": "2020-07-31 13:00:00", "Wartość": 20.8094 }
|
||||
]
|
||||
}
|
||||
}
|
||||
|
@ -1,72 +1,58 @@
|
||||
[
|
||||
{
|
||||
"id": 672,
|
||||
"stationId": 117,
|
||||
"param": {
|
||||
"paramName": "dwutlenek siarki",
|
||||
"paramFormula": "SO2",
|
||||
"paramCode": "SO2",
|
||||
"idParam": 1
|
||||
}
|
||||
"Identyfikator stanowiska": 672,
|
||||
"Identyfikator stacji": 117,
|
||||
"Wskaźnik": "dwutlenek siarki",
|
||||
"Wskaźnik - wzór": "SO2",
|
||||
"Wskaźnik - kod": "SO2",
|
||||
"Id wskaźnika": 1
|
||||
},
|
||||
{
|
||||
"id": 658,
|
||||
"stationId": 117,
|
||||
"param": {
|
||||
"paramName": "benzen",
|
||||
"paramFormula": "C6H6",
|
||||
"paramCode": "C6H6",
|
||||
"idParam": 10
|
||||
}
|
||||
"Identyfikator stanowiska": 658,
|
||||
"Identyfikator stacji": 117,
|
||||
"Wskaźnik": "benzen",
|
||||
"Wskaźnik - wzór": "C6H6",
|
||||
"Wskaźnik - kod": "C6H6",
|
||||
"Id wskaźnika": 10
|
||||
},
|
||||
{
|
||||
"id": 660,
|
||||
"stationId": 117,
|
||||
"param": {
|
||||
"paramName": "tlenek węgla",
|
||||
"paramFormula": "CO",
|
||||
"paramCode": "CO",
|
||||
"idParam": 8
|
||||
}
|
||||
"Identyfikator stanowiska": 660,
|
||||
"Identyfikator stacji": 117,
|
||||
"Wskaźnik": "tlenek węgla",
|
||||
"Wskaźnik - wzór": "CO",
|
||||
"Wskaźnik - kod": "CO",
|
||||
"Id wskaźnika": 8
|
||||
},
|
||||
{
|
||||
"id": 665,
|
||||
"stationId": 117,
|
||||
"param": {
|
||||
"paramName": "dwutlenek azotu",
|
||||
"paramFormula": "NO2",
|
||||
"paramCode": "NO2",
|
||||
"idParam": 6
|
||||
}
|
||||
"Identyfikator stanowiska": 665,
|
||||
"Identyfikator stacji": 117,
|
||||
"Wskaźnik": "dwutlenek azotu",
|
||||
"Wskaźnik - wzór": "NO2",
|
||||
"Wskaźnik - kod": "NO2",
|
||||
"Id wskaźnika": 6
|
||||
},
|
||||
{
|
||||
"id": 667,
|
||||
"stationId": 117,
|
||||
"param": {
|
||||
"paramName": "ozon",
|
||||
"paramFormula": "O3",
|
||||
"paramCode": "O3",
|
||||
"idParam": 5
|
||||
}
|
||||
"Identyfikator stanowiska": 667,
|
||||
"Identyfikator stacji": 117,
|
||||
"Wskaźnik": "ozon",
|
||||
"Wskaźnik - wzór": "O3",
|
||||
"Wskaźnik - kod": "O3",
|
||||
"Id wskaźnika": 5
|
||||
},
|
||||
{
|
||||
"id": 670,
|
||||
"stationId": 117,
|
||||
"param": {
|
||||
"paramName": "pył zawieszony PM2.5",
|
||||
"paramFormula": "PM2.5",
|
||||
"paramCode": "PM2.5",
|
||||
"idParam": 69
|
||||
}
|
||||
"Identyfikator stanowiska": 670,
|
||||
"Identyfikator stacji": 117,
|
||||
"Wskaźnik": "pył zawieszony PM2.5",
|
||||
"Wskaźnik - wzór": "PM2.5",
|
||||
"Wskaźnik - kod": "PM2.5",
|
||||
"Id wskaźnika": 69
|
||||
},
|
||||
{
|
||||
"id": 14395,
|
||||
"stationId": 117,
|
||||
"param": {
|
||||
"paramName": "pył zawieszony PM10",
|
||||
"paramFormula": "PM10",
|
||||
"paramCode": "PM10",
|
||||
"idParam": 3
|
||||
}
|
||||
"Identyfikator stanowiska": 14395,
|
||||
"Identyfikator stacji": 117,
|
||||
"Wskaźnik": "pył zawieszony PM10",
|
||||
"Wskaźnik - wzór": "PM10",
|
||||
"Wskaźnik - kod": "PM10",
|
||||
"Id wskaźnika": 3
|
||||
}
|
||||
]
|
||||
|
@ -42,12 +42,14 @@
|
||||
'name': 'carbon monoxide',
|
||||
'value': 251.874,
|
||||
}),
|
||||
'no': None,
|
||||
'no2': dict({
|
||||
'id': 665,
|
||||
'index': 'good',
|
||||
'name': 'nitrogen dioxide',
|
||||
'value': 7.13411,
|
||||
}),
|
||||
'nox': None,
|
||||
'o3': dict({
|
||||
'id': 667,
|
||||
'index': 'good',
|
||||
|
@ -1,15 +1,12 @@
|
||||
"""The tests for the hddtemp platform."""
|
||||
|
||||
import socket
|
||||
from unittest.mock import Mock, patch
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.hddtemp import DOMAIN
|
||||
from homeassistant.components.sensor import DOMAIN as PLATFORM_DOMAIN
|
||||
from homeassistant.const import UnitOfTemperature
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
VALID_CONFIG_MINIMAL = {"sensor": {"platform": "hddtemp"}}
|
||||
@ -195,17 +192,3 @@ async def test_hddtemp_host_unreachable(hass: HomeAssistant, telnetmock) -> None
|
||||
assert await async_setup_component(hass, "sensor", VALID_CONFIG_HOST_UNREACHABLE)
|
||||
await hass.async_block_till_done()
|
||||
assert len(hass.states.async_all()) == 0
|
||||
|
||||
|
||||
@patch.dict("sys.modules", gsp=Mock())
|
||||
async def test_repair_issue_is_created(
|
||||
hass: HomeAssistant,
|
||||
issue_registry: ir.IssueRegistry,
|
||||
) -> None:
|
||||
"""Test repair issue is created."""
|
||||
assert await async_setup_component(hass, PLATFORM_DOMAIN, VALID_CONFIG_MINIMAL)
|
||||
await hass.async_block_till_done()
|
||||
assert (
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_system_packages_yaml_integration_{DOMAIN}",
|
||||
) in issue_registry.issues
|
||||
|
@ -1,12 +1,14 @@
|
||||
"""Tests for home_connect button entities."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from typing import Any
|
||||
from typing import Any, cast
|
||||
from unittest.mock import AsyncMock, MagicMock
|
||||
|
||||
from aiohomeconnect.model import (
|
||||
ArrayOfCommands,
|
||||
CommandKey,
|
||||
Event,
|
||||
EventKey,
|
||||
EventMessage,
|
||||
HomeAppliance,
|
||||
)
|
||||
@ -317,3 +319,62 @@ async def test_stop_program_button_exception(
|
||||
{ATTR_ENTITY_ID: entity_id},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("appliance", ["Washer"], indirect=True)
|
||||
async def test_enable_resume_command_on_pause(
|
||||
hass: HomeAssistant,
|
||||
client: MagicMock,
|
||||
config_entry: MockConfigEntry,
|
||||
integration_setup: Callable[[MagicMock], Awaitable[bool]],
|
||||
appliance: HomeAppliance,
|
||||
) -> None:
|
||||
"""Test if all commands enabled option works as expected."""
|
||||
entity_id = "button.washer_resume_program"
|
||||
|
||||
original_get_available_commands = client.get_available_commands
|
||||
|
||||
async def get_available_commands_side_effect(ha_id: str) -> ArrayOfCommands:
|
||||
array_of_commands = cast(
|
||||
ArrayOfCommands, await original_get_available_commands(ha_id)
|
||||
)
|
||||
if ha_id == appliance.ha_id:
|
||||
for command in array_of_commands.commands:
|
||||
if command.key == CommandKey.BSH_COMMON_RESUME_PROGRAM:
|
||||
# Simulate that the resume command is not available initially
|
||||
array_of_commands.commands.remove(command)
|
||||
break
|
||||
return array_of_commands
|
||||
|
||||
client.get_available_commands = AsyncMock(
|
||||
side_effect=get_available_commands_side_effect
|
||||
)
|
||||
|
||||
assert await integration_setup(client)
|
||||
assert config_entry.state is ConfigEntryState.LOADED
|
||||
|
||||
assert not hass.states.get(entity_id)
|
||||
|
||||
await client.add_events(
|
||||
[
|
||||
EventMessage(
|
||||
appliance.ha_id,
|
||||
EventType.STATUS,
|
||||
data=ArrayOfEvents(
|
||||
[
|
||||
Event(
|
||||
key=EventKey.BSH_COMMON_STATUS_OPERATION_STATE,
|
||||
raw_key=EventKey.BSH_COMMON_STATUS_OPERATION_STATE.value,
|
||||
timestamp=0,
|
||||
level="",
|
||||
handling="",
|
||||
value="BSH.Common.EnumType.OperationState.Pause",
|
||||
)
|
||||
]
|
||||
),
|
||||
)
|
||||
]
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get(entity_id)
|
||||
|
@ -46,7 +46,6 @@ async def test_full_flow(
|
||||
f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}"
|
||||
f"&redirect_uri={REDIRECT_URL}"
|
||||
f"&state={state}"
|
||||
"&vg=sv-SE"
|
||||
)
|
||||
|
||||
client = await hass_client_no_auth()
|
||||
@ -118,7 +117,6 @@ async def test_flow_reauth_abort(
|
||||
f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}"
|
||||
f"&redirect_uri={REDIRECT_URL}"
|
||||
f"&state={state}"
|
||||
"&vg=sv-SE"
|
||||
)
|
||||
|
||||
client = await hass_client_no_auth()
|
||||
@ -187,7 +185,6 @@ async def test_flow_reconfigure_abort(
|
||||
f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}"
|
||||
f"&redirect_uri={REDIRECT_URL}"
|
||||
f"&state={state}"
|
||||
"&vg=sv-SE"
|
||||
)
|
||||
|
||||
client = await hass_client_no_auth()
|
||||
@ -247,7 +244,6 @@ async def test_zeroconf_flow(
|
||||
f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}"
|
||||
f"&redirect_uri={REDIRECT_URL}"
|
||||
f"&state={state}"
|
||||
"&vg=sv-SE"
|
||||
)
|
||||
|
||||
client = await hass_client_no_auth()
|
||||
|
@ -3220,7 +3220,7 @@ async def test_subentry_configflow(
|
||||
"url": learn_more_url(component["platform"]),
|
||||
}
|
||||
|
||||
# Process entity details setep
|
||||
# Process entity details step
|
||||
assert result["step_id"] == "entity_platform_config"
|
||||
|
||||
# First test validators if set of test
|
||||
@ -4212,3 +4212,52 @@ async def test_subentry_reconfigure_availablity(
|
||||
"payload_available": "1",
|
||||
"payload_not_available": "0",
|
||||
}
|
||||
|
||||
|
||||
async def test_subentry_configflow_section_feature(
|
||||
hass: HomeAssistant,
|
||||
mqtt_mock_entry: MqttMockHAClientGenerator,
|
||||
) -> None:
|
||||
"""Test the subentry ConfigFlow sections are hidden when they have no configurable options."""
|
||||
await mqtt_mock_entry()
|
||||
config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0]
|
||||
|
||||
result = await hass.config_entries.subentries.async_init(
|
||||
(config_entry.entry_id, "device"),
|
||||
context={"source": config_entries.SOURCE_USER},
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "device"
|
||||
result = await hass.config_entries.subentries.async_configure(
|
||||
result["flow_id"],
|
||||
user_input={"name": "Bla", "mqtt_settings": {"qos": 1}},
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
|
||||
result = await hass.config_entries.subentries.async_configure(
|
||||
result["flow_id"],
|
||||
user_input={"platform": "fan"},
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["description_placeholders"] == {
|
||||
"mqtt_device": "Bla",
|
||||
"platform": "fan",
|
||||
"entity": "Bla",
|
||||
"url": learn_more_url("fan"),
|
||||
}
|
||||
|
||||
# Process entity details step
|
||||
assert result["step_id"] == "entity_platform_config"
|
||||
|
||||
result = await hass.config_entries.subentries.async_configure(
|
||||
result["flow_id"],
|
||||
user_input={"fan_feature_speed": True},
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {}
|
||||
assert result["step_id"] == "mqtt_platform_config"
|
||||
|
||||
# Check mqtt platform config flow sections from data schema
|
||||
data_schema = result["data_schema"].schema
|
||||
assert "fan_speed_settings" in data_schema
|
||||
assert "fan_preset_mode_settings" not in data_schema
|
||||
|
@ -1496,6 +1496,52 @@ async def test_discovery_with_object_id(
|
||||
assert (domain, "object bla") in hass.data["mqtt"].discovery_already_discovered
|
||||
|
||||
|
||||
async def test_discovery_with_object_id_for_previous_deleted_entity(
|
||||
hass: HomeAssistant,
|
||||
mqtt_mock_entry: MqttMockHAClientGenerator,
|
||||
) -> None:
|
||||
"""Test discovering an MQTT entity with object_id and unique_id."""
|
||||
|
||||
topic = "homeassistant/sensor/object/bla/config"
|
||||
config = (
|
||||
'{ "name": "Hello World 11", "unique_id": "very_unique", '
|
||||
'"obj_id": "hello_id", "state_topic": "test-topic" }'
|
||||
)
|
||||
new_config = (
|
||||
'{ "name": "Hello World 11", "unique_id": "very_unique", '
|
||||
'"obj_id": "updated_hello_id", "state_topic": "test-topic" }'
|
||||
)
|
||||
initial_entity_id = "sensor.hello_id"
|
||||
new_entity_id = "sensor.updated_hello_id"
|
||||
name = "Hello World 11"
|
||||
domain = "sensor"
|
||||
|
||||
await mqtt_mock_entry()
|
||||
async_fire_mqtt_message(hass, topic, config)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
state = hass.states.get(initial_entity_id)
|
||||
|
||||
assert state is not None
|
||||
assert state.name == name
|
||||
assert (domain, "object bla") in hass.data["mqtt"].discovery_already_discovered
|
||||
|
||||
# Delete the entity
|
||||
async_fire_mqtt_message(hass, topic, "")
|
||||
await hass.async_block_till_done()
|
||||
assert (domain, "object bla") not in hass.data["mqtt"].discovery_already_discovered
|
||||
|
||||
# Rediscover with new object_id
|
||||
async_fire_mqtt_message(hass, topic, new_config)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
state = hass.states.get(new_entity_id)
|
||||
|
||||
assert state is not None
|
||||
assert state.name == name
|
||||
assert (domain, "object bla") in hass.data["mqtt"].discovery_already_discovered
|
||||
|
||||
|
||||
async def test_discovery_incl_nodeid(
|
||||
hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator
|
||||
) -> None:
|
||||
|
@ -24,6 +24,8 @@ MOCK_ENTRY_DATA = {
|
||||
"connection_type": "nibegw",
|
||||
}
|
||||
|
||||
MOCK_UNIQUE_ID = "mock_entry_unique_id"
|
||||
|
||||
|
||||
class MockConnection(Connection):
|
||||
"""A mock connection class."""
|
||||
@ -59,7 +61,9 @@ class MockConnection(Connection):
|
||||
|
||||
async def async_add_entry(hass: HomeAssistant, data: dict[str, Any]) -> MockConfigEntry:
|
||||
"""Add entry and get the coordinator."""
|
||||
entry = MockConfigEntry(domain=DOMAIN, title="Dummy", data=data)
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN, title="Dummy", data=data, unique_id=MOCK_UNIQUE_ID
|
||||
)
|
||||
|
||||
entry.add_to_hass(hass)
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
|
@ -0,0 +1,97 @@
|
||||
# serializer version: 1
|
||||
# name: test_update[Model.F1255-49239-OFF][binary_sensor.eb101_installed_49239-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.eb101_installed_49239',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'EB101 Installed',
|
||||
'platform': 'nibe_heatpump',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': 'eb101_installed_49239',
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': 'mock_entry_unique_id-49239',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_update[Model.F1255-49239-OFF][binary_sensor.eb101_installed_49239-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'F1255 EB101 Installed',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.eb101_installed_49239',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_update[Model.F1255-49239-ON][binary_sensor.eb101_installed_49239-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.eb101_installed_49239',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'EB101 Installed',
|
||||
'platform': 'nibe_heatpump',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': 'eb101_installed_49239',
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': 'mock_entry_unique_id-49239',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_update[Model.F1255-49239-ON][binary_sensor.eb101_installed_49239-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'F1255 EB101 Installed',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.eb101_installed_49239',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
193
tests/components/nibe_heatpump/snapshots/test_switch.ambr
Normal file
193
tests/components/nibe_heatpump/snapshots/test_switch.ambr
Normal file
@ -0,0 +1,193 @@
|
||||
# serializer version: 1
|
||||
# name: test_update[Model.F1255-48043-ACTIVE][switch.holiday_activated_48043-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'switch',
|
||||
'entity_category': <EntityCategory.CONFIG: 'config'>,
|
||||
'entity_id': 'switch.holiday_activated_48043',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Holiday - Activated',
|
||||
'platform': 'nibe_heatpump',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': 'holiday_activated_48043',
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': 'mock_entry_unique_id-48043',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_update[Model.F1255-48043-ACTIVE][switch.holiday_activated_48043-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'F1255 Holiday - Activated',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'switch.holiday_activated_48043',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
# name: test_update[Model.F1255-48043-INACTIVE][switch.holiday_activated_48043-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'switch',
|
||||
'entity_category': <EntityCategory.CONFIG: 'config'>,
|
||||
'entity_id': 'switch.holiday_activated_48043',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Holiday - Activated',
|
||||
'platform': 'nibe_heatpump',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': 'holiday_activated_48043',
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': 'mock_entry_unique_id-48043',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_update[Model.F1255-48043-INACTIVE][switch.holiday_activated_48043-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'F1255 Holiday - Activated',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'switch.holiday_activated_48043',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_update[Model.F1255-48071-OFF][switch.flm_1_accessory_48071-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'switch',
|
||||
'entity_category': <EntityCategory.CONFIG: 'config'>,
|
||||
'entity_id': 'switch.flm_1_accessory_48071',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'FLM 1 accessory',
|
||||
'platform': 'nibe_heatpump',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': 'flm_1_accessory_48071',
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': 'mock_entry_unique_id-48071',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_update[Model.F1255-48071-OFF][switch.flm_1_accessory_48071-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'F1255 FLM 1 accessory',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'switch.flm_1_accessory_48071',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_update[Model.F1255-48071-ON][switch.flm_1_accessory_48071-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'switch',
|
||||
'entity_category': <EntityCategory.CONFIG: 'config'>,
|
||||
'entity_id': 'switch.flm_1_accessory_48071',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'FLM 1 accessory',
|
||||
'platform': 'nibe_heatpump',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': 'flm_1_accessory_48071',
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': 'mock_entry_unique_id-48071',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_update[Model.F1255-48071-ON][switch.flm_1_accessory_48071-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'F1255 FLM 1 accessory',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'switch.flm_1_accessory_48071',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
49
tests/components/nibe_heatpump/test_binary_sensor.py
Normal file
49
tests/components/nibe_heatpump/test_binary_sensor.py
Normal file
@ -0,0 +1,49 @@
|
||||
"""Test the Nibe Heat Pump binary sensor entities."""
|
||||
|
||||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
|
||||
from nibe.heatpump import Model
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from . import async_add_model
|
||||
|
||||
from tests.common import snapshot_platform
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
async def fixture_single_platform():
|
||||
"""Only allow this platform to load."""
|
||||
with patch(
|
||||
"homeassistant.components.nibe_heatpump.PLATFORMS", [Platform.BINARY_SENSOR]
|
||||
):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("model", "address", "value"),
|
||||
[
|
||||
(Model.F1255, 49239, "OFF"),
|
||||
(Model.F1255, 49239, "ON"),
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_update(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
model: Model,
|
||||
address: int,
|
||||
value: Any,
|
||||
coils: dict[int, Any],
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test setting of value."""
|
||||
coils[address] = value
|
||||
|
||||
entry = await async_add_model(hass, model)
|
||||
await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id)
|
@ -1,4 +1,4 @@
|
||||
"""Test the Nibe Heat Pump config flow."""
|
||||
"""Test the Nibe Heat Pump buttons."""
|
||||
|
||||
from typing import Any
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
@ -1,4 +1,4 @@
|
||||
"""Test the Nibe Heat Pump config flow."""
|
||||
"""Test the Nibe Heat Pump climate entities."""
|
||||
|
||||
from typing import Any
|
||||
from unittest.mock import call, patch
|
||||
|
@ -1,4 +1,4 @@
|
||||
"""Test the Nibe Heat Pump config flow."""
|
||||
"""Test the Nibe Heat Pump number entities."""
|
||||
|
||||
from typing import Any
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
133
tests/components/nibe_heatpump/test_switch.py
Normal file
133
tests/components/nibe_heatpump/test_switch.py
Normal file
@ -0,0 +1,133 @@
|
||||
"""Test the Nibe Heat Pump switch entities."""
|
||||
|
||||
from typing import Any
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from nibe.coil import CoilData
|
||||
from nibe.heatpump import Model
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.components.switch import (
|
||||
DOMAIN as SWITCH_PLATFORM,
|
||||
SERVICE_TURN_OFF,
|
||||
SERVICE_TURN_ON,
|
||||
)
|
||||
from homeassistant.const import ATTR_ENTITY_ID, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from . import async_add_model
|
||||
|
||||
from tests.common import snapshot_platform
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
async def fixture_single_platform():
|
||||
"""Only allow this platform to load."""
|
||||
with patch("homeassistant.components.nibe_heatpump.PLATFORMS", [Platform.SWITCH]):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("model", "address", "value"),
|
||||
[
|
||||
(Model.F1255, 48043, "INACTIVE"),
|
||||
(Model.F1255, 48043, "ACTIVE"),
|
||||
(Model.F1255, 48071, "OFF"),
|
||||
(Model.F1255, 48071, "ON"),
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_update(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
model: Model,
|
||||
address: int,
|
||||
value: Any,
|
||||
coils: dict[int, Any],
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test setting of value."""
|
||||
coils[address] = value
|
||||
|
||||
entry = await async_add_model(hass, model)
|
||||
await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("model", "address", "entity_id", "state"),
|
||||
[
|
||||
(Model.F1255, 48043, "switch.holiday_activated_48043", "INACTIVE"),
|
||||
(Model.F1255, 48071, "switch.flm_1_accessory_48071", "OFF"),
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_turn_on(
|
||||
hass: HomeAssistant,
|
||||
mock_connection: AsyncMock,
|
||||
model: Model,
|
||||
entity_id: str,
|
||||
address: int,
|
||||
state: Any,
|
||||
coils: dict[int, Any],
|
||||
) -> None:
|
||||
"""Test setting of value."""
|
||||
coils[address] = state
|
||||
|
||||
await async_add_model(hass, model)
|
||||
|
||||
# Write value
|
||||
await hass.services.async_call(
|
||||
SWITCH_PLATFORM,
|
||||
SERVICE_TURN_ON,
|
||||
{ATTR_ENTITY_ID: entity_id},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
# Verify written
|
||||
args = mock_connection.write_coil.call_args
|
||||
assert args
|
||||
coil = args.args[0]
|
||||
assert isinstance(coil, CoilData)
|
||||
assert coil.coil.address == address
|
||||
assert coil.raw_value == 1
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("model", "address", "entity_id", "state"),
|
||||
[
|
||||
(Model.F1255, 48043, "switch.holiday_activated_48043", "INACTIVE"),
|
||||
(Model.F1255, 48071, "switch.flm_1_accessory_48071", "ON"),
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_turn_off(
|
||||
hass: HomeAssistant,
|
||||
mock_connection: AsyncMock,
|
||||
model: Model,
|
||||
entity_id: str,
|
||||
address: int,
|
||||
state: Any,
|
||||
coils: dict[int, Any],
|
||||
) -> None:
|
||||
"""Test setting of value."""
|
||||
coils[address] = state
|
||||
|
||||
await async_add_model(hass, model)
|
||||
|
||||
# Write value
|
||||
await hass.services.async_call(
|
||||
SWITCH_PLATFORM,
|
||||
SERVICE_TURN_OFF,
|
||||
{ATTR_ENTITY_ID: entity_id},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
# Verify written
|
||||
args = mock_connection.write_coil.call_args
|
||||
assert args
|
||||
coil = args.args[0]
|
||||
assert isinstance(coil, CoilData)
|
||||
assert coil.coil.address == address
|
||||
assert coil.raw_value == 0
|
@ -25,43 +25,46 @@
|
||||
},
|
||||
"wordle": {
|
||||
"legacyStats": {
|
||||
"gamesPlayed": 70,
|
||||
"gamesWon": 51,
|
||||
"gamesPlayed": 1111,
|
||||
"gamesWon": 1069,
|
||||
"guesses": {
|
||||
"1": 0,
|
||||
"2": 1,
|
||||
"3": 7,
|
||||
"4": 11,
|
||||
"5": 20,
|
||||
"6": 12,
|
||||
"fail": 19
|
||||
"2": 8,
|
||||
"3": 83,
|
||||
"4": 440,
|
||||
"5": 372,
|
||||
"6": 166,
|
||||
"fail": 42
|
||||
},
|
||||
"currentStreak": 1,
|
||||
"maxStreak": 5,
|
||||
"lastWonDayOffset": 1189,
|
||||
"currentStreak": 229,
|
||||
"maxStreak": 229,
|
||||
"lastWonDayOffset": 1472,
|
||||
"hasPlayed": true,
|
||||
"autoOptInTimestamp": 1708273168957,
|
||||
"hasMadeStatsChoice": false,
|
||||
"timestamp": 1726831978
|
||||
"autoOptInTimestamp": 1712205417018,
|
||||
"hasMadeStatsChoice": true,
|
||||
"timestamp": 1751255756
|
||||
},
|
||||
"calculatedStats": {
|
||||
"gamesPlayed": 33,
|
||||
"gamesWon": 26,
|
||||
"currentStreak": 237,
|
||||
"maxStreak": 241,
|
||||
"lastWonPrintDate": "2025-07-08",
|
||||
"lastCompletedPrintDate": "2025-07-08",
|
||||
"hasPlayed": true
|
||||
},
|
||||
"totalStats": {
|
||||
"gamesWon": 1077,
|
||||
"gamesPlayed": 1119,
|
||||
"guesses": {
|
||||
"1": 0,
|
||||
"2": 1,
|
||||
"3": 4,
|
||||
"4": 7,
|
||||
"5": 10,
|
||||
"6": 4,
|
||||
"fail": 7
|
||||
"2": 8,
|
||||
"3": 83,
|
||||
"4": 444,
|
||||
"5": 376,
|
||||
"6": 166,
|
||||
"fail": 42
|
||||
},
|
||||
"currentStreak": 1,
|
||||
"maxStreak": 5,
|
||||
"lastWonPrintDate": "2024-09-20",
|
||||
"lastCompletedPrintDate": "2024-09-20",
|
||||
"hasPlayed": true,
|
||||
"generation": 1
|
||||
"hasPlayedArchive": false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -7,26 +7,6 @@
|
||||
"stats": {
|
||||
"wordle": {
|
||||
"legacyStats": {
|
||||
"gamesPlayed": 1,
|
||||
"gamesWon": 1,
|
||||
"guesses": {
|
||||
"1": 0,
|
||||
"2": 0,
|
||||
"3": 0,
|
||||
"4": 0,
|
||||
"5": 1,
|
||||
"6": 0,
|
||||
"fail": 0
|
||||
},
|
||||
"currentStreak": 0,
|
||||
"maxStreak": 1,
|
||||
"lastWonDayOffset": 1118,
|
||||
"hasPlayed": true,
|
||||
"autoOptInTimestamp": 1727357874700,
|
||||
"hasMadeStatsChoice": false,
|
||||
"timestamp": 1727358123
|
||||
},
|
||||
"calculatedStats": {
|
||||
"gamesPlayed": 0,
|
||||
"gamesWon": 0,
|
||||
"guesses": {
|
||||
@ -38,12 +18,35 @@
|
||||
"6": 0,
|
||||
"fail": 0
|
||||
},
|
||||
"currentStreak": 0,
|
||||
"maxStreak": 1,
|
||||
"lastWonDayOffset": 1118,
|
||||
"hasPlayed": true,
|
||||
"autoOptInTimestamp": 1727357874700,
|
||||
"hasMadeStatsChoice": false,
|
||||
"timestamp": 1727358123
|
||||
},
|
||||
"calculatedStats": {
|
||||
"currentStreak": 0,
|
||||
"maxStreak": 1,
|
||||
"lastWonPrintDate": "",
|
||||
"lastCompletedPrintDate": "",
|
||||
"hasPlayed": false
|
||||
},
|
||||
"totalStats": {
|
||||
"gamesPlayed": 1,
|
||||
"gamesWon": 1,
|
||||
"guesses": {
|
||||
"1": 0,
|
||||
"2": 0,
|
||||
"3": 0,
|
||||
"4": 0,
|
||||
"5": 1,
|
||||
"6": 0,
|
||||
"fail": 0
|
||||
},
|
||||
"hasPlayed": false,
|
||||
"generation": 1
|
||||
"hasPlayedArchive": false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -473,7 +473,7 @@
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '1',
|
||||
'state': '237',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.wordle_highest_streak-entry]
|
||||
@ -529,7 +529,7 @@
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '5',
|
||||
'state': '241',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.wordle_played-entry]
|
||||
@ -581,7 +581,7 @@
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '70',
|
||||
'state': '1119',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.wordle_won-entry]
|
||||
@ -633,6 +633,6 @@
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '51',
|
||||
'state': '1077',
|
||||
})
|
||||
# ---
|
||||
|
@ -667,3 +667,36 @@ async def test_availability_blocks_value_template(
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert error in caplog.text
|
||||
|
||||
|
||||
async def test_setup_get_basic_auth_utf8(
|
||||
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test setup with basic auth using UTF-8 characters including Unicode char \u2018."""
|
||||
# Use a password with the Unicode character \u2018 (left single quotation mark)
|
||||
aioclient_mock.get("http://localhost", status=HTTPStatus.OK, json={"key": "on"})
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
BINARY_SENSOR_DOMAIN,
|
||||
{
|
||||
BINARY_SENSOR_DOMAIN: {
|
||||
"platform": DOMAIN,
|
||||
"resource": "http://localhost",
|
||||
"method": "GET",
|
||||
"value_template": "{{ value_json.key }}",
|
||||
"name": "foo",
|
||||
"verify_ssl": "true",
|
||||
"timeout": 30,
|
||||
"authentication": "basic",
|
||||
"username": "test_user",
|
||||
"password": "test\u2018password", # Password with Unicode char
|
||||
"headers": {"Accept": CONTENT_TYPE_JSON},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
await hass.async_block_till_done()
|
||||
assert len(hass.states.async_all(BINARY_SENSOR_DOMAIN)) == 1
|
||||
|
||||
state = hass.states.get("binary_sensor.foo")
|
||||
assert state.state == STATE_ON
|
||||
|
@ -1,6 +1,7 @@
|
||||
"""The tests for the REST sensor platform."""
|
||||
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
import ssl
|
||||
from unittest.mock import patch
|
||||
|
||||
@ -19,6 +20,14 @@ from homeassistant.const import (
|
||||
ATTR_DEVICE_CLASS,
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_UNIT_OF_MEASUREMENT,
|
||||
CONF_DEVICE_CLASS,
|
||||
CONF_FORCE_UPDATE,
|
||||
CONF_METHOD,
|
||||
CONF_NAME,
|
||||
CONF_PARAMS,
|
||||
CONF_RESOURCE,
|
||||
CONF_UNIT_OF_MEASUREMENT,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
CONTENT_TYPE_JSON,
|
||||
SERVICE_RELOAD,
|
||||
STATE_UNAVAILABLE,
|
||||
@ -162,6 +171,94 @@ async def test_setup_encoding(
|
||||
assert hass.states.get("sensor.mysensor").state == "tack själv"
|
||||
|
||||
|
||||
async def test_setup_auto_encoding_from_content_type(
|
||||
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test setup with encoding auto-detected from Content-Type header."""
|
||||
# Test with ISO-8859-1 charset in Content-Type header
|
||||
aioclient_mock.get(
|
||||
"http://localhost",
|
||||
status=HTTPStatus.OK,
|
||||
content="Björk Guðmundsdóttir".encode("iso-8859-1"),
|
||||
headers={"Content-Type": "text/plain; charset=iso-8859-1"},
|
||||
)
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
SENSOR_DOMAIN,
|
||||
{
|
||||
SENSOR_DOMAIN: {
|
||||
"name": "mysensor",
|
||||
# encoding defaults to UTF-8, but should be ignored when charset present
|
||||
"platform": DOMAIN,
|
||||
"resource": "http://localhost",
|
||||
"method": "GET",
|
||||
}
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert len(hass.states.async_all(SENSOR_DOMAIN)) == 1
|
||||
assert hass.states.get("sensor.mysensor").state == "Björk Guðmundsdóttir"
|
||||
|
||||
|
||||
async def test_setup_encoding_fallback_no_charset(
|
||||
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test that configured encoding is used when no charset in Content-Type."""
|
||||
# No charset in Content-Type header
|
||||
aioclient_mock.get(
|
||||
"http://localhost",
|
||||
status=HTTPStatus.OK,
|
||||
content="Björk Guðmundsdóttir".encode("iso-8859-1"),
|
||||
headers={"Content-Type": "text/plain"}, # No charset!
|
||||
)
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
SENSOR_DOMAIN,
|
||||
{
|
||||
SENSOR_DOMAIN: {
|
||||
"name": "mysensor",
|
||||
"encoding": "iso-8859-1", # This will be used as fallback
|
||||
"platform": DOMAIN,
|
||||
"resource": "http://localhost",
|
||||
"method": "GET",
|
||||
}
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert len(hass.states.async_all(SENSOR_DOMAIN)) == 1
|
||||
assert hass.states.get("sensor.mysensor").state == "Björk Guðmundsdóttir"
|
||||
|
||||
|
||||
async def test_setup_charset_overrides_encoding_config(
|
||||
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test that charset in Content-Type overrides configured encoding."""
|
||||
# Server sends UTF-8 with correct charset header
|
||||
aioclient_mock.get(
|
||||
"http://localhost",
|
||||
status=HTTPStatus.OK,
|
||||
content="Björk Guðmundsdóttir".encode(),
|
||||
headers={"Content-Type": "text/plain; charset=utf-8"},
|
||||
)
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
SENSOR_DOMAIN,
|
||||
{
|
||||
SENSOR_DOMAIN: {
|
||||
"name": "mysensor",
|
||||
"encoding": "iso-8859-1", # Config says ISO-8859-1, but charset=utf-8 should win
|
||||
"platform": DOMAIN,
|
||||
"resource": "http://localhost",
|
||||
"method": "GET",
|
||||
}
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert len(hass.states.async_all(SENSOR_DOMAIN)) == 1
|
||||
# This should work because charset=utf-8 overrides the iso-8859-1 config
|
||||
assert hass.states.get("sensor.mysensor").state == "Björk Guðmundsdóttir"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("ssl_cipher_list", "ssl_cipher_list_expected"),
|
||||
[
|
||||
@ -978,6 +1075,124 @@ async def test_update_with_failed_get(
|
||||
assert "Empty reply" in caplog.text
|
||||
|
||||
|
||||
async def test_query_param_dict_value(
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
) -> None:
|
||||
"""Test dict values in query params are handled for backward compatibility."""
|
||||
# Mock response
|
||||
aioclient_mock.post(
|
||||
"https://www.envertecportal.com/ApiInverters/QueryTerminalReal",
|
||||
status=HTTPStatus.OK,
|
||||
json={"Data": {"QueryResults": [{"POWER": 1500}]}},
|
||||
)
|
||||
|
||||
# This test checks that when template_complex processes a string that looks like
|
||||
# a dict/list, it converts it to an actual dict/list, which then needs to be
|
||||
# handled by our backward compatibility code
|
||||
with caplog.at_level(logging.DEBUG, logger="homeassistant.components.rest.data"):
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
DOMAIN,
|
||||
{
|
||||
DOMAIN: [
|
||||
{
|
||||
CONF_RESOURCE: (
|
||||
"https://www.envertecportal.com/ApiInverters/"
|
||||
"QueryTerminalReal"
|
||||
),
|
||||
CONF_METHOD: "POST",
|
||||
CONF_PARAMS: {
|
||||
"page": "1",
|
||||
"perPage": "20",
|
||||
"orderBy": "SN",
|
||||
# When processed by template.render_complex, certain
|
||||
# strings might be converted to dicts/lists if they
|
||||
# look like JSON
|
||||
"whereCondition": (
|
||||
"{{ {'STATIONID': 'A6327A17797C1234'} }}"
|
||||
), # Template that evaluates to dict
|
||||
},
|
||||
"sensor": [
|
||||
{
|
||||
CONF_NAME: "Solar MPPT1 Power",
|
||||
CONF_VALUE_TEMPLATE: (
|
||||
"{{ value_json.Data.QueryResults[0].POWER }}"
|
||||
),
|
||||
CONF_DEVICE_CLASS: "power",
|
||||
CONF_UNIT_OF_MEASUREMENT: "W",
|
||||
CONF_FORCE_UPDATE: True,
|
||||
"state_class": "measurement",
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# The sensor should be created successfully with backward compatibility
|
||||
assert len(hass.states.async_all(SENSOR_DOMAIN)) == 1
|
||||
state = hass.states.get("sensor.solar_mppt1_power")
|
||||
assert state is not None
|
||||
assert state.state == "1500"
|
||||
|
||||
# Check that a debug message was logged about the parameter conversion
|
||||
assert "REST query parameter 'whereCondition' has type" in caplog.text
|
||||
assert "converting to string" in caplog.text
|
||||
|
||||
|
||||
async def test_query_param_json_string_preserved(
|
||||
hass: HomeAssistant,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
) -> None:
|
||||
"""Test that JSON strings in query params are preserved and not converted to dicts."""
|
||||
# Mock response
|
||||
aioclient_mock.get(
|
||||
"https://api.example.com/data",
|
||||
status=HTTPStatus.OK,
|
||||
json={"value": 42},
|
||||
)
|
||||
|
||||
# Config with JSON string (quoted) - should remain a string
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
DOMAIN,
|
||||
{
|
||||
DOMAIN: [
|
||||
{
|
||||
CONF_RESOURCE: "https://api.example.com/data",
|
||||
CONF_METHOD: "GET",
|
||||
CONF_PARAMS: {
|
||||
"filter": '{"type": "sensor", "id": 123}', # JSON string
|
||||
"normal": "value",
|
||||
},
|
||||
"sensor": [
|
||||
{
|
||||
CONF_NAME: "Test Sensor",
|
||||
CONF_VALUE_TEMPLATE: "{{ value_json.value }}",
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Check the sensor was created
|
||||
assert len(hass.states.async_all(SENSOR_DOMAIN)) == 1
|
||||
state = hass.states.get("sensor.test_sensor")
|
||||
assert state is not None
|
||||
assert state.state == "42"
|
||||
|
||||
# Verify the request was made with the JSON string intact
|
||||
assert len(aioclient_mock.mock_calls) == 1
|
||||
method, url, data, headers = aioclient_mock.mock_calls[0]
|
||||
assert url.query["filter"] == '{"type": "sensor", "id": 123}'
|
||||
assert url.query["normal"] == "value"
|
||||
|
||||
|
||||
async def test_reload(hass: HomeAssistant, aioclient_mock: AiohttpClientMocker) -> None:
|
||||
"""Verify we can reload reset sensors."""
|
||||
|
||||
|
@ -326,7 +326,7 @@ async def test_rest_command_get_response_malformed_json(
|
||||
|
||||
aioclient_mock.get(
|
||||
TEST_URL,
|
||||
content='{"status": "failure", 42',
|
||||
content=b'{"status": "failure", 42',
|
||||
headers={"content-type": "application/json"},
|
||||
)
|
||||
|
||||
@ -379,3 +379,27 @@ async def test_rest_command_get_response_none(
|
||||
)
|
||||
|
||||
assert not response
|
||||
|
||||
|
||||
async def test_rest_command_response_iter_chunked(
|
||||
hass: HomeAssistant,
|
||||
setup_component: ComponentSetup,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
) -> None:
|
||||
"""Ensure response is consumed when return_response is False."""
|
||||
await setup_component()
|
||||
|
||||
png = base64.decodebytes(
|
||||
b"iVBORw0KGgoAAAANSUhEUgAAAAIAAAABCAIAAAB7QOjdAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQ"
|
||||
b"UAAAAJcEhZcwAAFiUAABYlAUlSJPAAAAAPSURBVBhXY/h/ku////8AECAE1JZPvDAAAAAASUVORK5CYII="
|
||||
)
|
||||
aioclient_mock.get(TEST_URL, content=png)
|
||||
|
||||
with patch("aiohttp.StreamReader.iter_chunked", autospec=True) as mock_iter_chunked:
|
||||
response = await hass.services.async_call(DOMAIN, "get_test", {}, blocking=True)
|
||||
|
||||
# Ensure the response is not returned
|
||||
assert response is None
|
||||
|
||||
# Verify iter_chunked was called with a chunk size
|
||||
assert mock_iter_chunked.called
|
||||
|
@ -55,7 +55,7 @@
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'unknown',
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[da_ks_oven_01061][select.oven_lamp-entry]
|
||||
@ -112,7 +112,7 @@
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'unknown',
|
||||
'state': 'high',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[da_ks_range_0101x][select.vulcan_lamp-entry]
|
||||
|
@ -4,7 +4,12 @@ from aiowebostv import WebOsTvPairError
|
||||
import pytest
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.webostv.const import CONF_SOURCES, DOMAIN, LIVE_TV_APP_ID
|
||||
from homeassistant.components.webostv.const import (
|
||||
CONF_SOURCES,
|
||||
DEFAULT_NAME,
|
||||
DOMAIN,
|
||||
LIVE_TV_APP_ID,
|
||||
)
|
||||
from homeassistant.config_entries import SOURCE_SSDP
|
||||
from homeassistant.const import CONF_CLIENT_SECRET, CONF_HOST, CONF_SOURCE
|
||||
from homeassistant.core import HomeAssistant
|
||||
@ -63,6 +68,29 @@ async def test_form(hass: HomeAssistant, client) -> None:
|
||||
assert config_entry.unique_id == FAKE_UUID
|
||||
|
||||
|
||||
async def test_form_no_model_name(hass: HomeAssistant, client) -> None:
|
||||
"""Test successful user flow without model name."""
|
||||
client.tv_info.system = {}
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={CONF_SOURCE: config_entries.SOURCE_USER},
|
||||
data=MOCK_USER_CONFIG,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "pairing"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input={}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == DEFAULT_NAME
|
||||
config_entry = result["result"]
|
||||
assert config_entry.unique_id == FAKE_UUID
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("apps", "inputs"),
|
||||
[
|
||||
|
@ -156,6 +156,9 @@ class AiohttpClientMocker:
|
||||
|
||||
for response in self._mocks:
|
||||
if response.match_request(method, url, params):
|
||||
# If auth is provided, try to encode it to trigger any encoding errors
|
||||
if auth is not None:
|
||||
auth.encode()
|
||||
self.mock_calls.append((method, url, data, headers))
|
||||
if response.side_effect:
|
||||
response = await response.side_effect(method, url, data)
|
||||
@ -191,7 +194,6 @@ class AiohttpClientMockResponse:
|
||||
if response is None:
|
||||
response = b""
|
||||
|
||||
self.charset = "utf-8"
|
||||
self.method = method
|
||||
self._url = url
|
||||
self.status = status
|
||||
@ -261,16 +263,32 @@ class AiohttpClientMockResponse:
|
||||
"""Return content."""
|
||||
return mock_stream(self.response)
|
||||
|
||||
@property
|
||||
def charset(self):
|
||||
"""Return charset from Content-Type header."""
|
||||
if (content_type := self._headers.get("content-type")) is None:
|
||||
return None
|
||||
content_type = content_type.lower()
|
||||
if "charset=" in content_type:
|
||||
return content_type.split("charset=")[1].split(";")[0].strip()
|
||||
return None
|
||||
|
||||
async def read(self):
|
||||
"""Return mock response."""
|
||||
return self.response
|
||||
|
||||
async def text(self, encoding="utf-8", errors="strict"):
|
||||
async def text(self, encoding=None, errors="strict") -> str:
|
||||
"""Return mock response as a string."""
|
||||
# Match real aiohttp behavior: encoding=None means auto-detect
|
||||
if encoding is None:
|
||||
encoding = self.charset or "utf-8"
|
||||
return self.response.decode(encoding, errors=errors)
|
||||
|
||||
async def json(self, encoding="utf-8", content_type=None, loads=json_loads):
|
||||
async def json(self, encoding=None, content_type=None, loads=json_loads) -> Any:
|
||||
"""Return mock response as a json."""
|
||||
# Match real aiohttp behavior: encoding=None means auto-detect
|
||||
if encoding is None:
|
||||
encoding = self.charset or "utf-8"
|
||||
return loads(self.response.decode(encoding))
|
||||
|
||||
def release(self):
|
||||
|
Loading…
x
Reference in New Issue
Block a user