Compare commits

..

17 Commits

Author SHA1 Message Date
jbouwh
4f70fa30cf Implement mixin class and add feature to maintain included entities from unique IDs 2025-10-09 20:01:51 +00:00
jbouwh
ee24acf52a Add included_entities attribute to base Entity class 2025-10-08 20:32:58 +00:00
hanwg
42a9d5d4e3 Add webhook tests for Telegram bot (#153998) 2025-10-08 20:58:15 +02:00
Maciej Bieniek
93fa162913 Update IQS for IMGW-PIB integration (#153870) 2025-10-08 20:30:05 +02:00
Maciej Bieniek
c432b1c8da Add entities for Shely cury component (#153918) 2025-10-08 20:26:29 +02:00
Artur Pragacz
00955b8e6a Fix empty llm api list in chat log (#153996) 2025-10-08 10:39:56 -05:00
Erik Montnemery
045b9d7f01 Correct homeassistant.helpers.trigger._trigger_action_wrapper (#153983) 2025-10-08 17:33:44 +02:00
Aaron Bach
438c4c7871 Limit SimpliSafe websocket connection attempts during startup (#153853)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-10-08 16:32:17 +02:00
Thomas D
abc360460c Add diagnostics to Volvo integration (#153997) 2025-10-08 16:25:33 +02:00
HarvsG
26437bb253 Adds ConfigFlow for London Underground (#152050)
Co-authored-by: Norbert Rittel <norbert@rittel.de>
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-10-08 16:17:34 +02:00
epenet
56d953ac1e Use contants in climate set_temperature (#154008) 2025-10-08 16:15:01 +02:00
Joost Lekkerkerker
fe4eb8766d Don't mark ZHA coordinator as via_device with itself (#154004) 2025-10-08 16:05:54 +02:00
Mark Adkins
2d9f14c401 Add 3rd maintainer to sharkiq (#153961) 2025-10-08 15:17:52 +02:00
dependabot[bot]
7b6ccb07fd Bump github/codeql-action from 3.30.6 to 4.30.7 (#153979)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-10-08 13:42:25 +02:00
Shay Levy
2ba5728060 Enable Shelly binary input sensors by default (#154001) 2025-10-08 14:41:53 +03:00
epenet
b5f163cc85 Update Tuya fixture for product ID IAYz2WK1th0cMLmL (#154000) 2025-10-08 13:28:11 +02:00
Marc Mueller
65540a3e0b Update mypy dev to 1.19.0a4 (#153995) 2025-10-08 13:24:54 +02:00
61 changed files with 2455 additions and 232 deletions

View File

@@ -741,7 +741,7 @@ jobs:
- name: Generate partial mypy restore key
id: generate-mypy-key
run: |
mypy_version=$(cat requirements_test.txt | grep mypy | cut -d '=' -f 3)
mypy_version=$(cat requirements_test.txt | grep 'mypy.*=' | cut -d '=' -f 3)
echo "version=$mypy_version" >> $GITHUB_OUTPUT
echo "key=mypy-${{ env.MYPY_CACHE_VERSION }}-$mypy_version-${{
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT

View File

@@ -24,11 +24,11 @@ jobs:
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Initialize CodeQL
uses: github/codeql-action/init@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
uses: github/codeql-action/init@e296a935590eb16afc0c0108289f68c87e2a89a5 # v4.30.7
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
uses: github/codeql-action/analyze@e296a935590eb16afc0c0108289f68c87e2a89a5 # v4.30.7
with:
category: "/language:python"

4
CODEOWNERS generated
View File

@@ -1413,8 +1413,8 @@ build.json @home-assistant/supervisor
/tests/components/sfr_box/ @epenet
/homeassistant/components/sftp_storage/ @maretodoric
/tests/components/sftp_storage/ @maretodoric
/homeassistant/components/sharkiq/ @JeffResc @funkybunch
/tests/components/sharkiq/ @JeffResc @funkybunch
/homeassistant/components/sharkiq/ @JeffResc @funkybunch @TheOneOgre
/tests/components/sharkiq/ @JeffResc @funkybunch @TheOneOgre
/homeassistant/components/shell_command/ @home-assistant/core
/tests/components/shell_command/ @home-assistant/core
/homeassistant/components/shelly/ @bieniu @thecode @chemelli74 @bdraco

View File

@@ -7,6 +7,8 @@ from typing import Any
from pyaprilaire.const import Attribute
from homeassistant.components.climate import (
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
FAN_AUTO,
FAN_ON,
PRESET_AWAY,
@@ -16,7 +18,12 @@ from homeassistant.components.climate import (
HVACAction,
HVACMode,
)
from homeassistant.const import PRECISION_HALVES, PRECISION_WHOLE, UnitOfTemperature
from homeassistant.const import (
ATTR_TEMPERATURE,
PRECISION_HALVES,
PRECISION_WHOLE,
UnitOfTemperature,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -232,15 +239,15 @@ class AprilaireClimate(BaseAprilaireEntity, ClimateEntity):
cool_setpoint = 0
heat_setpoint = 0
if temperature := kwargs.get("temperature"):
if temperature := kwargs.get(ATTR_TEMPERATURE):
if self.coordinator.data.get(Attribute.MODE) == 3:
cool_setpoint = temperature
else:
heat_setpoint = temperature
else:
if target_temp_low := kwargs.get("target_temp_low"):
if target_temp_low := kwargs.get(ATTR_TARGET_TEMP_LOW):
heat_setpoint = target_temp_low
if target_temp_high := kwargs.get("target_temp_high"):
if target_temp_high := kwargs.get(ATTR_TARGET_TEMP_HIGH):
cool_setpoint = target_temp_high
if cool_setpoint == 0 and heat_setpoint == 0:

View File

@@ -7,12 +7,14 @@ from typing import Any
from evolutionhttp import BryantEvolutionLocalClient
from homeassistant.components.climate import (
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
ClimateEntity,
ClimateEntityFeature,
HVACAction,
HVACMode,
)
from homeassistant.const import UnitOfTemperature
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.device_registry import DeviceInfo
@@ -208,24 +210,24 @@ class BryantEvolutionClimate(ClimateEntity):
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set new target temperature."""
if kwargs.get("target_temp_high"):
temp = int(kwargs["target_temp_high"])
if value := kwargs.get(ATTR_TARGET_TEMP_HIGH):
temp = int(value)
if not await self._client.set_cooling_setpoint(temp):
raise HomeAssistantError(
translation_domain=DOMAIN, translation_key="failed_to_set_clsp"
)
self._attr_target_temperature_high = temp
if kwargs.get("target_temp_low"):
temp = int(kwargs["target_temp_low"])
if value := kwargs.get(ATTR_TARGET_TEMP_LOW):
temp = int(value)
if not await self._client.set_heating_setpoint(temp):
raise HomeAssistantError(
translation_domain=DOMAIN, translation_key="failed_to_set_htsp"
)
self._attr_target_temperature_low = temp
if kwargs.get("temperature"):
temp = int(kwargs["temperature"])
if value := kwargs.get(ATTR_TEMPERATURE):
temp = int(value)
fn = (
self._client.set_heating_setpoint
if self.hvac_mode == HVACMode.HEAT

View File

@@ -169,7 +169,7 @@ class CalendarEventListener:
def __init__(
self,
hass: HomeAssistant,
job: HassJob[..., Coroutine[Any, Any, None]],
job: HassJob[..., Coroutine[Any, Any, None] | Any],
trigger_data: dict[str, Any],
fetcher: QueuedEventFetcher,
) -> None:

View File

@@ -514,7 +514,7 @@ class ChatLog:
"""Set the LLM system prompt."""
llm_api: llm.APIInstance | None = None
if user_llm_hass_api is None:
if not user_llm_hass_api:
pass
elif isinstance(user_llm_hass_api, llm.API):
llm_api = await user_llm_hass_api.async_get_api_instance(llm_context)

View File

@@ -29,7 +29,12 @@ from homeassistant.components.climate import (
ClimateEntityFeature,
HVACMode,
)
from homeassistant.const import ATTR_MODE, PRECISION_TENTHS, UnitOfTemperature
from homeassistant.const import (
ATTR_MODE,
ATTR_TEMPERATURE,
PRECISION_TENTHS,
UnitOfTemperature,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddEntitiesCallback
@@ -243,7 +248,7 @@ class EvoZone(EvoChild, EvoClimateEntity):
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set a new target temperature."""
temperature = kwargs["temperature"]
temperature = kwargs[ATTR_TEMPERATURE]
if (until := kwargs.get("until")) is None:
if self._evo_device.mode == EvoZoneMode.TEMPORARY_OVERRIDE:

View File

@@ -456,7 +456,7 @@ class HomeAccessory(Accessory): # type: ignore[misc]
return self._available
@ha_callback
@pyhap_callback # type: ignore[misc]
@pyhap_callback # type: ignore[untyped-decorator]
def run(self) -> None:
"""Handle accessory driver started event."""
if state := self.hass.states.get(self.entity_id):
@@ -725,7 +725,7 @@ class HomeDriver(AccessoryDriver): # type: ignore[misc]
self._entry_title = entry_title
self.iid_storage = iid_storage
@pyhap_callback # type: ignore[misc]
@pyhap_callback # type: ignore[untyped-decorator]
def pair(
self, client_username_bytes: bytes, client_public: str, client_permissions: int
) -> bool:
@@ -735,7 +735,7 @@ class HomeDriver(AccessoryDriver): # type: ignore[misc]
async_dismiss_setup_message(self.hass, self.entry_id)
return cast(bool, success)
@pyhap_callback # type: ignore[misc]
@pyhap_callback # type: ignore[untyped-decorator]
def unpair(self, client_uuid: UUID) -> None:
"""Override super function to show setup message if unpaired."""
super().unpair(client_uuid)

View File

@@ -71,7 +71,7 @@ class HomeDoorbellAccessory(HomeAccessory):
self.async_update_doorbell_state(None, state)
@ha_callback
@pyhap_callback # type: ignore[misc]
@pyhap_callback # type: ignore[untyped-decorator]
def run(self) -> None:
"""Handle doorbell event."""
if self._char_doorbell_detected:

View File

@@ -219,7 +219,7 @@ class AirPurifier(Fan):
return preset_mode.lower() != "auto"
@callback
@pyhap_callback # type: ignore[misc]
@pyhap_callback # type: ignore[untyped-decorator]
def run(self) -> None:
"""Handle accessory driver started event.

View File

@@ -229,7 +229,7 @@ class Camera(HomeDoorbellAccessory, PyhapCamera): # type: ignore[misc]
)
self._async_update_motion_state(None, state)
@pyhap_callback # type: ignore[misc]
@pyhap_callback # type: ignore[untyped-decorator]
@callback
def run(self) -> None:
"""Handle accessory driver started event.

View File

@@ -127,7 +127,7 @@ class GarageDoorOpener(HomeAccessory):
self.async_update_state(state)
@callback
@pyhap_callback # type: ignore[misc]
@pyhap_callback # type: ignore[untyped-decorator]
def run(self) -> None:
"""Handle accessory driver started event.

View File

@@ -178,7 +178,7 @@ class HumidifierDehumidifier(HomeAccessory):
self._async_update_current_humidity(humidity_state)
@callback
@pyhap_callback # type: ignore[misc]
@pyhap_callback # type: ignore[untyped-decorator]
def run(self) -> None:
"""Handle accessory driver started event.

View File

@@ -108,7 +108,7 @@ class DeviceTriggerAccessory(HomeAccessory):
_LOGGER.log,
)
@pyhap_callback # type: ignore[misc]
@pyhap_callback # type: ignore[untyped-decorator]
@callback
def run(self) -> None:
"""Run the accessory."""

View File

@@ -5,6 +5,6 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/imgw_pib",
"iot_class": "cloud_polling",
"quality_scale": "silver",
"quality_scale": "platinum",
"requirements": ["imgw_pib==1.5.6"]
}

View File

@@ -50,17 +50,17 @@ rules:
discovery:
status: exempt
comment: The integration is a cloud service and thus does not support discovery.
docs-data-update: todo
docs-examples: todo
docs-known-limitations: todo
docs-data-update: done
docs-examples: done
docs-known-limitations: done
docs-supported-devices:
status: exempt
comment: This is a service, which doesn't integrate with any devices.
docs-supported-functions: todo
docs-supported-functions: done
docs-troubleshooting:
status: exempt
comment: No known issues that could be resolved by the user.
docs-use-cases: todo
docs-use-cases: done
dynamic-devices:
status: exempt
comment: This integration has a fixed single service.

View File

@@ -1 +1,36 @@
"""The london_underground component."""
from __future__ import annotations
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DOMAIN as DOMAIN
from .coordinator import LondonTubeCoordinator, LondonUndergroundConfigEntry, TubeData
PLATFORMS: list[Platform] = [Platform.SENSOR]
async def async_setup_entry(
hass: HomeAssistant, entry: LondonUndergroundConfigEntry
) -> bool:
"""Set up London Underground from a config entry."""
session = async_get_clientsession(hass)
data = TubeData(session)
coordinator = LondonTubeCoordinator(hass, data, config_entry=entry)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
# Forward the setup to the sensor platform
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(
hass: HomeAssistant, entry: LondonUndergroundConfigEntry
) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@@ -0,0 +1,152 @@
"""Config flow for London Underground integration."""
from __future__ import annotations
import asyncio
import logging
from typing import Any
from london_tube_status import TubeData
import voluptuous as vol
from homeassistant.config_entries import (
ConfigEntry,
ConfigFlow,
ConfigFlowResult,
OptionsFlowWithReload,
)
from homeassistant.core import callback
from homeassistant.helpers import selector
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import ConfigType
from .const import CONF_LINE, DEFAULT_LINES, DOMAIN, TUBE_LINES
_LOGGER = logging.getLogger(__name__)
class LondonUndergroundConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for London Underground."""
VERSION = 1
MINOR_VERSION = 1
@staticmethod
@callback
def async_get_options_flow(
_: ConfigEntry,
) -> LondonUndergroundOptionsFlow:
"""Get the options flow for this handler."""
return LondonUndergroundOptionsFlow()
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
if user_input is not None:
session = async_get_clientsession(self.hass)
data = TubeData(session)
try:
async with asyncio.timeout(10):
await data.update()
except TimeoutError:
errors["base"] = "timeout_connect"
except Exception:
_LOGGER.exception("Unexpected error")
errors["base"] = "cannot_connect"
else:
return self.async_create_entry(
title="London Underground",
data={},
options={CONF_LINE: user_input.get(CONF_LINE, DEFAULT_LINES)},
)
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Optional(
CONF_LINE,
default=DEFAULT_LINES,
): selector.SelectSelector(
selector.SelectSelectorConfig(
options=TUBE_LINES,
multiple=True,
mode=selector.SelectSelectorMode.DROPDOWN,
)
),
}
),
errors=errors,
)
async def async_step_import(self, import_data: ConfigType) -> ConfigFlowResult:
"""Handle import from configuration.yaml."""
session = async_get_clientsession(self.hass)
data = TubeData(session)
try:
async with asyncio.timeout(10):
await data.update()
except Exception:
_LOGGER.exception(
"Unexpected error trying to connect before importing config, aborting import "
)
return self.async_abort(reason="cannot_connect")
_LOGGER.warning(
"Importing London Underground config from configuration.yaml: %s",
import_data,
)
# Extract lines from the sensor platform config
lines = import_data.get(CONF_LINE, DEFAULT_LINES)
if "London Overground" in lines:
_LOGGER.warning(
"London Overground was removed from the configuration as the line has been divided and renamed"
)
lines.remove("London Overground")
return self.async_create_entry(
title="London Underground",
data={},
options={CONF_LINE: import_data.get(CONF_LINE, DEFAULT_LINES)},
)
class LondonUndergroundOptionsFlow(OptionsFlowWithReload):
"""Handle options."""
async def async_step_init(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Manage the options."""
if user_input is not None:
_LOGGER.debug(
"Updating london underground with options flow user_input: %s",
user_input,
)
return self.async_create_entry(
title="",
data={CONF_LINE: user_input[CONF_LINE]},
)
return self.async_show_form(
step_id="init",
data_schema=vol.Schema(
{
vol.Optional(
CONF_LINE,
default=self.config_entry.options.get(
CONF_LINE,
self.config_entry.data.get(CONF_LINE, DEFAULT_LINES),
),
): selector.SelectSelector(
selector.SelectSelectorConfig(
options=TUBE_LINES,
multiple=True,
mode=selector.SelectSelectorMode.DROPDOWN,
)
),
}
),
)

View File

@@ -6,7 +6,6 @@ DOMAIN = "london_underground"
CONF_LINE = "line"
SCAN_INTERVAL = timedelta(seconds=30)
TUBE_LINES = [
@@ -18,7 +17,7 @@ TUBE_LINES = [
"Elizabeth line",
"Hammersmith & City",
"Jubilee",
"London Overground",
"London Overground", # no longer supported
"Metropolitan",
"Northern",
"Piccadilly",
@@ -31,3 +30,20 @@ TUBE_LINES = [
"Weaver",
"Windrush",
]
# Default lines to monitor if none selected
DEFAULT_LINES = [
"Bakerloo",
"Central",
"Circle",
"District",
"DLR",
"Elizabeth line",
"Hammersmith & City",
"Jubilee",
"Metropolitan",
"Northern",
"Piccadilly",
"Victoria",
"Waterloo & City",
]

View File

@@ -8,6 +8,7 @@ from typing import cast
from london_tube_status import TubeData
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
@@ -15,16 +16,23 @@ from .const import DOMAIN, SCAN_INTERVAL
_LOGGER = logging.getLogger(__name__)
type LondonUndergroundConfigEntry = ConfigEntry[LondonTubeCoordinator]
class LondonTubeCoordinator(DataUpdateCoordinator[dict[str, dict[str, str]]]):
"""London Underground sensor coordinator."""
def __init__(self, hass: HomeAssistant, data: TubeData) -> None:
def __init__(
self,
hass: HomeAssistant,
data: TubeData,
config_entry: LondonUndergroundConfigEntry,
) -> None:
"""Initialize coordinator."""
super().__init__(
hass,
_LOGGER,
config_entry=None,
config_entry=config_entry,
name=DOMAIN,
update_interval=SCAN_INTERVAL,
)

View File

@@ -2,9 +2,12 @@
"domain": "london_underground",
"name": "London Underground",
"codeowners": ["@jpbede"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/london_underground",
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["london_tube_status"],
"quality_scale": "legacy",
"requirements": ["london-tube-status==0.5"]
"requirements": ["london-tube-status==0.5"],
"single_config_entry": true
}

View File

@@ -5,23 +5,26 @@ from __future__ import annotations
import logging
from typing import Any
from london_tube_status import TubeData
import voluptuous as vol
from homeassistant.components.sensor import (
PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA,
SensorEntity,
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from homeassistant.helpers import config_validation as cv, issue_registry as ir
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity_platform import (
AddConfigEntryEntitiesCallback,
AddEntitiesCallback,
)
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import CONF_LINE, TUBE_LINES
from .coordinator import LondonTubeCoordinator
from .const import CONF_LINE, DOMAIN, TUBE_LINES
from .coordinator import LondonTubeCoordinator, LondonUndergroundConfigEntry
_LOGGER = logging.getLogger(__name__)
@@ -38,18 +41,54 @@ async def async_setup_platform(
) -> None:
"""Set up the Tube sensor."""
session = async_get_clientsession(hass)
# If configuration.yaml config exists, trigger the import flow.
# If the config entry already exists, this will not be triggered as only one config is allowed.
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=config
)
if (
result.get("type") is FlowResultType.ABORT
and result.get("reason") != "already_configured"
):
ir.async_create_issue(
hass,
DOMAIN,
f"deprecated_yaml_import_issue_{result.get('reason')}",
is_fixable=False,
issue_domain=DOMAIN,
severity=ir.IssueSeverity.WARNING,
translation_key="deprecated_yaml_import_issue",
translation_placeholders={
"domain": DOMAIN,
"integration_title": "London Underground",
},
)
return
data = TubeData(session)
coordinator = LondonTubeCoordinator(hass, data)
ir.async_create_issue(
hass,
HOMEASSISTANT_DOMAIN,
"deprecated_yaml",
is_fixable=False,
issue_domain=DOMAIN,
severity=ir.IssueSeverity.WARNING,
translation_key="deprecated_yaml",
translation_placeholders={
"domain": DOMAIN,
"integration_title": "London Underground",
},
)
await coordinator.async_refresh()
if not coordinator.last_update_success:
raise PlatformNotReady
async def async_setup_entry(
hass: HomeAssistant,
entry: LondonUndergroundConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the London Underground sensor from config entry."""
async_add_entities(
LondonTubeSensor(coordinator, line) for line in config[CONF_LINE]
LondonTubeSensor(entry.runtime_data, line) for line in entry.options[CONF_LINE]
)
@@ -58,11 +97,21 @@ class LondonTubeSensor(CoordinatorEntity[LondonTubeCoordinator], SensorEntity):
_attr_attribution = "Powered by TfL Open Data"
_attr_icon = "mdi:subway"
_attr_has_entity_name = True # Use modern entity naming
def __init__(self, coordinator: LondonTubeCoordinator, name: str) -> None:
"""Initialize the London Underground sensor."""
super().__init__(coordinator)
self._name = name
# Add unique_id for proper entity registry
self._attr_unique_id = f"tube_{name.lower().replace(' ', '_')}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, DOMAIN)},
name="London Underground",
manufacturer="Transport for London",
model="Tube Status",
entry_type=DeviceEntryType.SERVICE,
)
@property
def name(self) -> str:

View File

@@ -0,0 +1,38 @@
{
"config": {
"step": {
"user": {
"title": "Set up London Underground",
"description": "Select which tube lines you want to monitor",
"data": {
"line": "Tube lines"
}
}
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"timeout_connect": "[%key:common::config_flow::error::timeout_connect%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"abort": {
"single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]"
}
},
"options": {
"step": {
"init": {
"title": "Configure London Underground",
"description": "[%key:component::london_underground::config::step::user::description%]",
"data": {
"line": "[%key:component::london_underground::config::step::user::data::line%]"
}
}
}
},
"issues": {
"deprecated_yaml_import_issue": {
"title": "London Underground YAML configuration deprecated",
"description": "Configuring London Underground using YAML sensor platform is deprecated.\n\nWhile importing your configuration, an error occurred when trying to connect to the Transport for London API. Please restart Home Assistant to try again, or remove the existing YAML configuration and set the integration up via the UI."
}
}
}

View File

@@ -59,7 +59,7 @@ async def create_server(
# Backwards compatibility with old MCP Server config
return await llm.async_get_api(hass, llm_api_id, llm_context)
@server.list_prompts() # type: ignore[no-untyped-call, misc]
@server.list_prompts() # type: ignore[no-untyped-call,untyped-decorator]
async def handle_list_prompts() -> list[types.Prompt]:
llm_api = await get_api_instance()
return [
@@ -69,7 +69,7 @@ async def create_server(
)
]
@server.get_prompt() # type: ignore[no-untyped-call, misc]
@server.get_prompt() # type: ignore[no-untyped-call,untyped-decorator]
async def handle_get_prompt(
name: str, arguments: dict[str, str] | None
) -> types.GetPromptResult:
@@ -90,13 +90,13 @@ async def create_server(
],
)
@server.list_tools() # type: ignore[no-untyped-call, misc]
@server.list_tools() # type: ignore[no-untyped-call,untyped-decorator]
async def list_tools() -> list[types.Tool]:
"""List available time tools."""
llm_api = await get_api_instance()
return [_format_tool(tool, llm_api.custom_serializer) for tool in llm_api.tools]
@server.call_tool() # type: ignore[misc]
@server.call_tool() # type: ignore[untyped-decorator]
async def call_tool(name: str, arguments: dict) -> Sequence[types.TextContent]:
"""Handle calling tools."""
llm_api = await get_api_instance()

View File

@@ -408,5 +408,5 @@ class AtwDeviceZoneClimate(MelCloudClimate):
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set new target temperature."""
await self._zone.set_target_temperature(
kwargs.get("temperature", self.target_temperature)
kwargs.get(ATTR_TEMPERATURE, self.target_temperature)
)

View File

@@ -1,7 +1,7 @@
{
"domain": "sharkiq",
"name": "Shark IQ",
"codeowners": ["@JeffResc", "@funkybunch"],
"codeowners": ["@JeffResc", "@funkybunch", "@TheOneOgre"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/sharkiq",
"iot_class": "cloud_polling",

View File

@@ -157,21 +157,18 @@ SENSORS: dict[tuple[str, str], BlockBinarySensorDescription] = {
key="input|input",
name="Input",
device_class=BinarySensorDeviceClass.POWER,
entity_registry_enabled_default=False,
removal_condition=is_block_momentary_input,
),
("relay", "input"): BlockBinarySensorDescription(
key="relay|input",
name="Input",
device_class=BinarySensorDeviceClass.POWER,
entity_registry_enabled_default=False,
removal_condition=is_block_momentary_input,
),
("device", "input"): BlockBinarySensorDescription(
key="device|input",
name="Input",
device_class=BinarySensorDeviceClass.POWER,
entity_registry_enabled_default=False,
removal_condition=is_block_momentary_input,
),
("sensor", "extInput"): BlockBinarySensorDescription(
@@ -201,7 +198,6 @@ RPC_SENSORS: Final = {
key="input",
sub_key="state",
device_class=BinarySensorDeviceClass.POWER,
entity_registry_enabled_default=False,
removal_condition=is_rpc_momentary_input,
),
"cloud": RpcBinarySensorDescription(

View File

@@ -50,8 +50,14 @@
"valve_status": {
"default": "mdi:valve"
},
"vial_name": {
"default": "mdi:scent"
},
"illuminance_level": {
"default": "mdi:brightness-5"
},
"vial_level": {
"default": "mdi:bottle-tonic-outline"
}
},
"switch": {
@@ -61,6 +67,13 @@
"off": "mdi:valve-closed",
"on": "mdi:valve-open"
}
},
"cury_slot": {
"default": "mdi:scent",
"state": {
"off": "mdi:scent-off",
"on": "mdi:scent"
}
}
}
}

View File

@@ -72,6 +72,7 @@ class RpcNumberDescription(RpcEntityDescription, NumberEntityDescription):
min_fn: Callable[[dict], float] | None = None
step_fn: Callable[[dict], float] | None = None
mode_fn: Callable[[dict], NumberMode] | None = None
slot: str | None = None
method: str
@@ -121,6 +122,22 @@ class RpcNumber(ShellyRpcAttributeEntity, NumberEntity):
await method(self._id, value)
class RpcCuryIntensityNumber(RpcNumber):
"""Represent a RPC Cury Intensity entity."""
@rpc_call
async def async_set_native_value(self, value: float) -> None:
"""Change the value."""
method = getattr(self.coordinator.device, self.entity_description.method)
if TYPE_CHECKING:
assert method is not None
await method(
self._id, slot=self.entity_description.slot, intensity=round(value)
)
class RpcBluTrvNumber(RpcNumber):
"""Represent a RPC BluTrv number."""
@@ -274,6 +291,38 @@ RPC_NUMBERS: Final = {
is True,
entity_class=RpcBluTrvNumber,
),
"left_slot_intensity": RpcNumberDescription(
key="cury",
sub_key="slots",
name="Left slot intensity",
value=lambda status, _: status["left"]["intensity"],
native_min_value=0,
native_max_value=100,
native_step=1,
mode=NumberMode.SLIDER,
native_unit_of_measurement=PERCENTAGE,
method="cury_set",
slot="left",
available=lambda status: (left := status["left"]) is not None
and left.get("vial", {}).get("level", -1) != -1,
entity_class=RpcCuryIntensityNumber,
),
"right_slot_intensity": RpcNumberDescription(
key="cury",
sub_key="slots",
name="Right slot intensity",
value=lambda status, _: status["right"]["intensity"],
native_min_value=0,
native_max_value=100,
native_step=1,
mode=NumberMode.SLIDER,
native_unit_of_measurement=PERCENTAGE,
method="cury_set",
slot="right",
available=lambda status: (right := status["right"]) is not None
and right.get("vial", {}).get("level", -1) != -1,
entity_class=RpcCuryIntensityNumber,
),
}

View File

@@ -1658,6 +1658,50 @@ RPC_SENSORS: Final = {
state_class=SensorStateClass.MEASUREMENT,
role="phase_info",
),
"cury_left_level": RpcSensorDescription(
key="cury",
sub_key="slots",
name="Left slot level",
translation_key="vial_level",
value=lambda status, _: status["left"]["vial"]["level"],
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=PERCENTAGE,
entity_category=EntityCategory.DIAGNOSTIC,
available=lambda status: (left := status["left"]) is not None
and left.get("vial", {}).get("level", -1) != -1,
),
"cury_left_vial": RpcSensorDescription(
key="cury",
sub_key="slots",
name="Left slot vial",
translation_key="vial_name",
value=lambda status, _: status["left"]["vial"]["name"],
entity_category=EntityCategory.DIAGNOSTIC,
available=lambda status: (left := status["left"]) is not None
and left.get("vial", {}).get("level", -1) != -1,
),
"cury_right_level": RpcSensorDescription(
key="cury",
sub_key="slots",
name="Right slot level",
translation_key="vial_level",
value=lambda status, _: status["right"]["vial"]["level"],
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=PERCENTAGE,
entity_category=EntityCategory.DIAGNOSTIC,
available=lambda status: (right := status["right"]) is not None
and right.get("vial", {}).get("level", -1) != -1,
),
"cury_right_vial": RpcSensorDescription(
key="cury",
sub_key="slots",
name="Right slot vial",
translation_key="vial_name",
value=lambda status, _: status["right"]["vial"]["name"],
entity_category=EntityCategory.DIAGNOSTIC,
available=lambda status: (right := status["right"]) is not None
and right.get("vial", {}).get("level", -1) != -1,
),
}

View File

@@ -230,6 +230,32 @@ RPC_SWITCHES = {
entity_registry_enabled_default=False,
entity_category=EntityCategory.CONFIG,
),
"cury_left": RpcSwitchDescription(
key="cury",
sub_key="slots",
name="Left slot",
translation_key="cury_slot",
is_on=lambda status: bool(status["slots"]["left"]["on"]),
method_on="cury_set",
method_off="cury_set",
method_params_fn=lambda id, value: (id, "left", value),
entity_registry_enabled_default=True,
available=lambda status: (left := status["left"]) is not None
and left.get("vial", {}).get("level", -1) != -1,
),
"cury_right": RpcSwitchDescription(
key="cury",
sub_key="slots",
name="Right slot",
translation_key="cury_slot",
is_on=lambda status: bool(status["slots"]["right"]["on"]),
method_on="cury_set",
method_off="cury_set",
method_params_fn=lambda id, value: (id, "right", value),
entity_registry_enabled_default=True,
available=lambda status: (right := status["right"]) is not None
and right.get("vial", {}).get("level", -1) != -1,
),
}

View File

@@ -100,8 +100,9 @@ ATTR_PIN_VALUE = "pin"
ATTR_TIMESTAMP = "timestamp"
DEFAULT_SCAN_INTERVAL = timedelta(seconds=30)
DEFAULT_SOCKET_MIN_RETRY = 15
WEBSOCKET_RECONNECT_RETRIES = 3
WEBSOCKET_RETRY_DELAY = 2
EVENT_SIMPLISAFE_EVENT = "SIMPLISAFE_EVENT"
EVENT_SIMPLISAFE_NOTIFICATION = "SIMPLISAFE_NOTIFICATION"
@@ -419,6 +420,7 @@ class SimpliSafe:
self._api = api
self._hass = hass
self._system_notifications: dict[int, set[SystemNotification]] = {}
self._websocket_reconnect_retries: int = 0
self._websocket_reconnect_task: asyncio.Task | None = None
self.entry = entry
self.initial_event_to_use: dict[int, dict[str, Any]] = {}
@@ -469,6 +471,8 @@ class SimpliSafe:
"""Start a websocket reconnection loop."""
assert self._api.websocket
self._websocket_reconnect_retries += 1
try:
await self._api.websocket.async_connect()
await self._api.websocket.async_listen()
@@ -479,9 +483,21 @@ class SimpliSafe:
LOGGER.error("Failed to connect to websocket: %s", err)
except Exception as err: # noqa: BLE001
LOGGER.error("Unknown exception while connecting to websocket: %s", err)
else:
self._websocket_reconnect_retries = 0
LOGGER.debug("Reconnecting to websocket")
await self._async_cancel_websocket_loop()
if self._websocket_reconnect_retries >= WEBSOCKET_RECONNECT_RETRIES:
LOGGER.error("Max websocket connection retries exceeded")
return
delay = WEBSOCKET_RETRY_DELAY * (2 ** (self._websocket_reconnect_retries - 1))
LOGGER.info(
"Retrying websocket connection in %s seconds (attempt %s/%s)",
delay,
self._websocket_reconnect_retries,
WEBSOCKET_RECONNECT_RETRIES,
)
await asyncio.sleep(delay)
self._websocket_reconnect_task = self._hass.async_create_task(
self._async_start_websocket_loop()
)

View File

@@ -18,7 +18,7 @@ from homeassistant.components.climate import (
ClimateEntityFeature,
HVACMode,
)
from homeassistant.const import UnitOfTemperature
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -352,7 +352,7 @@ class TuyaClimateEntity(TuyaEntity, ClimateEntity):
{
"code": self._set_temperature.dpcode,
"value": round(
self._set_temperature.scale_value_back(kwargs["temperature"])
self._set_temperature.scale_value_back(kwargs[ATTR_TEMPERATURE])
),
}
]

View File

@@ -0,0 +1,45 @@
"""Volvo diagnostics."""
from dataclasses import asdict
from typing import Any
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_API_KEY
from homeassistant.core import HomeAssistant
from homeassistant.helpers.redact import async_redact_data
from .const import CONF_VIN
from .coordinator import VolvoConfigEntry
_TO_REDACT_ENTRY = [
CONF_ACCESS_TOKEN,
CONF_API_KEY,
CONF_VIN,
"id_token",
"refresh_token",
]
_TO_REDACT_DATA = [
"coordinates",
"heading",
"vin",
]
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, entry: VolvoConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
context = entry.runtime_data.interval_coordinators[0].context
data: dict[str, dict] = {}
for coordinator in entry.runtime_data.interval_coordinators:
data[coordinator.name] = {
key: async_redact_data(asdict(value), _TO_REDACT_DATA) if value else None
for key, value in coordinator.data.items()
}
return {
"entry_data": async_redact_data(entry.data, _TO_REDACT_ENTRY),
"vehicle": async_redact_data(asdict(context.vehicle), _TO_REDACT_DATA),
**data,
}

View File

@@ -11,7 +11,13 @@ from typing import Any
from propcache.api import cached_property
from zha.mixins import LogMixin
from homeassistant.const import ATTR_MANUFACTURER, ATTR_MODEL, ATTR_NAME, EntityCategory
from homeassistant.const import (
ATTR_MANUFACTURER,
ATTR_MODEL,
ATTR_NAME,
ATTR_VIA_DEVICE,
EntityCategory,
)
from homeassistant.core import State, callback
from homeassistant.helpers.device_registry import CONNECTION_ZIGBEE, DeviceInfo
from homeassistant.helpers.dispatcher import async_dispatcher_connect
@@ -85,14 +91,19 @@ class ZHAEntity(LogMixin, RestoreEntity, Entity):
ieee = zha_device_info["ieee"]
zha_gateway = self.entity_data.device_proxy.gateway_proxy.gateway
return DeviceInfo(
device_info = DeviceInfo(
connections={(CONNECTION_ZIGBEE, ieee)},
identifiers={(DOMAIN, ieee)},
manufacturer=zha_device_info[ATTR_MANUFACTURER],
model=zha_device_info[ATTR_MODEL],
name=zha_device_info[ATTR_NAME],
via_device=(DOMAIN, str(zha_gateway.state.node_info.ieee)),
)
if ieee != str(zha_gateway.state.node_info.ieee):
device_info[ATTR_VIA_DEVICE] = (
DOMAIN,
str(zha_gateway.state.node_info.ieee),
)
return device_info
@callback
def _handle_entity_events(self, event: Any) -> None:

View File

@@ -341,6 +341,9 @@ ATTR_NAME: Final = "name"
# Contains one string or a list of strings, each being an entity id
ATTR_ENTITY_ID: Final = "entity_id"
# Contains one string or a list of strings, each being an entity id
ATTR_INCLUDED_ENTITIES: Final = "included_entities"
# Contains one string, the config entry ID
ATTR_CONFIG_ENTRY_ID: Final = "config_entry_id"

View File

@@ -367,6 +367,7 @@ FLOWS = {
"local_ip",
"local_todo",
"locative",
"london_underground",
"lookin",
"loqed",
"luftdaten",

View File

@@ -3688,9 +3688,10 @@
},
"london_underground": {
"name": "London Underground",
"integration_type": "hub",
"config_flow": false,
"iot_class": "cloud_polling"
"integration_type": "service",
"config_flow": true,
"iot_class": "cloud_polling",
"single_config_entry": true
},
"lookin": {
"name": "LOOKin",

View File

@@ -1699,3 +1699,79 @@ class ToggleEntity(
await self.async_turn_off(**kwargs)
else:
await self.async_turn_on(**kwargs)
class IncludedEntitiesMixin(Entity):
"""Mixin class to include entities that are contained.
Integrations can include the this Mixin class for platforms that have
included the `entity_id` capability attribute.
Domain base entity platforms can include the `entity_id` capability attribute
to expose to allow exposure of the included entities.
"""
_attr_included_entities: list[str]
_included_unique_ids: list[str]
_initialized: bool = False
_platform_domain: str
@callback
def async_set_included_entities(
self, platform_domain: str, unique_ids: list[str]
) -> None:
"""Set the list of included entities identified by their unique IDs.
The entity_id of included entities will will be looked up and they will be
tracked for changes.
None existing entities for the supplied unique IDs will be ignored.
"""
self._included_unique_ids = unique_ids
self._platform_domain = platform_domain
self._monitor_member_updates()
@property
def included_entities(self) -> list[str] | None:
"""Return a list of entity IDs if the entity represents a group.
Included entities will be shown as members in the UI.
"""
if hasattr(self, "_attr_included_entities"):
return self._attr_included_entities
return None
@callback
def _monitor_member_updates(self) -> None:
"""Update the group members if the entity registry is updated."""
entity_registry = er.async_get(self.hass)
def _update_group_entity_ids() -> None:
self._attr_included_entities = []
for included_id in self._included_unique_ids:
if entity_id := entity_registry.async_get_entity_id(
self.entity_id.split(".")[0], self._platform_domain, included_id
):
self._attr_included_entities.append(entity_id)
async def _handle_entity_registry_updated(event: Event[Any]) -> None:
"""Handle registry create or update event."""
if (
event.data["action"] in {"create", "update"}
and (entry := entity_registry.async_get(event.data["entity_id"]))
and entry.unique_id in self._included_unique_ids
) or (
event.data["action"] == "remove"
and self.included_entities is not None
and event.data["entity_id"] in self.included_entities
):
_update_group_entity_ids()
self.async_write_ha_state()
if not self._initialized:
self.async_on_remove(
self.hass.bus.async_listen(
er.EVENT_ENTITY_REGISTRY_UPDATED,
_handle_entity_registry_updated,
)
)
self._initialized = True
_update_group_entity_ids()

View File

@@ -294,7 +294,7 @@ class PluggableActionsEntry:
actions: dict[
object,
tuple[
HassJob[[dict[str, Any], Context | None], Coroutine[Any, Any, None]],
HassJob[[dict[str, Any], Context | None], Coroutine[Any, Any, None] | Any],
dict[str, Any],
],
] = field(default_factory=dict)
@@ -444,8 +444,8 @@ async def async_validate_trigger_config(
def _trigger_action_wrapper(
hass: HomeAssistant, action: TriggerActionType, conf: ConfigType
) -> TriggerActionType:
hass: HomeAssistant, action: Callable, conf: ConfigType
) -> Callable:
"""Wrap trigger action with extra vars if configured.
If action is a coroutine function, a coroutine function will be returned.

View File

@@ -11,9 +11,11 @@ astroid==3.3.11
coverage==7.10.6
freezegun==1.5.2
go2rtc-client==0.2.1
# librt is an internal mypy dependency
librt==0.2.1
license-expression==30.4.3
mock-open==1.4.0
mypy-dev==1.19.0a2
mypy-dev==1.19.0a4
pre-commit==4.2.0
pydantic==2.12.0
pylint==3.3.8

View File

@@ -0,0 +1,65 @@
"""Fixtures for the london_underground tests."""
from collections.abc import AsyncGenerator
import json
from unittest.mock import AsyncMock, patch
from london_tube_status import parse_api_response
import pytest
from homeassistant.components.london_underground.const import CONF_LINE, DOMAIN
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry, async_load_fixture
from tests.conftest import AiohttpClientMocker
@pytest.fixture
def mock_setup_entry():
"""Prevent setup of integration during tests."""
with patch(
"homeassistant.components.london_underground.async_setup_entry",
return_value=True,
) as mock_setup:
yield mock_setup
@pytest.fixture
async def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry:
"""Mock the config entry."""
entry = MockConfigEntry(
domain=DOMAIN,
data={},
options={CONF_LINE: ["Metropolitan"]},
title="London Underground",
)
# Add and set up the entry
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
return entry
@pytest.fixture
async def mock_london_underground_client(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
) -> AsyncGenerator[AsyncMock]:
"""Mock a London Underground client."""
with (
patch(
"homeassistant.components.london_underground.TubeData",
autospec=True,
) as mock_client,
patch(
"homeassistant.components.london_underground.config_flow.TubeData",
new=mock_client,
),
):
client = mock_client.return_value
# Load the fixture text
fixture_text = await async_load_fixture(hass, "line_status.json", DOMAIN)
fixture_data = parse_api_response(json.loads(fixture_text))
client.data = fixture_data
yield client

View File

@@ -0,0 +1,186 @@
"""Test the London Underground config flow."""
import asyncio
import pytest
from homeassistant.components.london_underground.const import (
CONF_LINE,
DEFAULT_LINES,
DOMAIN,
)
from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from homeassistant.helpers import issue_registry as ir
async def test_validate_input_success(
hass: HomeAssistant, mock_setup_entry, mock_london_underground_client
) -> None:
"""Test successful validation of TfL API."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] is FlowResultType.FORM
assert result["errors"] == {}
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_LINE: ["Bakerloo", "Central"]},
)
assert result["type"] is FlowResultType.CREATE_ENTRY
assert result["title"] == "London Underground"
assert result["data"] == {}
assert result["options"] == {CONF_LINE: ["Bakerloo", "Central"]}
async def test_options(
hass: HomeAssistant, mock_setup_entry, mock_config_entry
) -> None:
"""Test updating options."""
result = await hass.config_entries.options.async_init(mock_config_entry.entry_id)
assert result["type"] == FlowResultType.FORM
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={
CONF_LINE: ["Bakerloo", "Central"],
},
)
assert result["type"] == FlowResultType.CREATE_ENTRY
assert result["data"] == {
CONF_LINE: ["Bakerloo", "Central"],
}
@pytest.mark.parametrize(
("side_effect", "expected_error"),
[
(Exception, "cannot_connect"),
(asyncio.TimeoutError, "timeout_connect"),
],
)
async def test_validate_input_exceptions(
hass: HomeAssistant,
mock_setup_entry,
mock_london_underground_client,
side_effect,
expected_error,
) -> None:
"""Test validation with connection and timeout errors."""
mock_london_underground_client.update.side_effect = side_effect
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_LINE: ["Bakerloo", "Central"]},
)
assert result["type"] is FlowResultType.FORM
assert result["errors"]["base"] == expected_error
# confirm recovery after error
mock_london_underground_client.update.side_effect = None
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{},
)
assert result["type"] is FlowResultType.CREATE_ENTRY
assert result["title"] == "London Underground"
assert result["data"] == {}
assert result["options"] == {CONF_LINE: DEFAULT_LINES}
async def test_already_configured(
hass: HomeAssistant,
mock_london_underground_client,
mock_setup_entry,
mock_config_entry,
) -> None:
"""Try (and fail) setting up a config entry when one already exists."""
# Try to start the flow
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "single_instance_allowed"
async def test_yaml_import(
hass: HomeAssistant,
issue_registry: ir.IssueRegistry,
mock_london_underground_client,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test a YAML sensor is imported and becomes an operational config entry."""
# Set up via YAML which will trigger import and set up the config entry
IMPORT_DATA = {
"platform": "london_underground",
"line": ["Central", "Piccadilly", "Victoria", "Bakerloo", "Northern"],
}
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=IMPORT_DATA
)
assert result["type"] is FlowResultType.CREATE_ENTRY
assert result["title"] == "London Underground"
assert result["data"] == {}
assert result["options"] == {
CONF_LINE: ["Central", "Piccadilly", "Victoria", "Bakerloo", "Northern"]
}
async def test_failed_yaml_import_connection(
hass: HomeAssistant,
issue_registry: ir.IssueRegistry,
mock_london_underground_client,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test a YAML sensor is imported and becomes an operational config entry."""
# Set up via YAML which will trigger import and set up the config entry
mock_london_underground_client.update.side_effect = asyncio.TimeoutError
IMPORT_DATA = {
"platform": "london_underground",
"line": ["Central", "Piccadilly", "Victoria", "Bakerloo", "Northern"],
}
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=IMPORT_DATA
)
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "cannot_connect"
async def test_failed_yaml_import_already_configured(
hass: HomeAssistant,
issue_registry: ir.IssueRegistry,
mock_london_underground_client,
caplog: pytest.LogCaptureFixture,
mock_config_entry,
) -> None:
"""Test a YAML sensor is imported and becomes an operational config entry."""
# Set up via YAML which will trigger import and set up the config entry
IMPORT_DATA = {
"platform": "london_underground",
"line": ["Central", "Piccadilly", "Victoria", "Bakerloo", "Northern"],
}
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=IMPORT_DATA
)
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "single_instance_allowed"

View File

@@ -0,0 +1,20 @@
"""Test the London Underground init."""
from homeassistant.core import HomeAssistant
async def test_reload_entry(
hass: HomeAssistant, mock_london_underground_client, mock_config_entry
) -> None:
"""Test reloading the config entry."""
# Test reloading with updated options
hass.config_entries.async_update_entry(
mock_config_entry,
data={},
options={"line": ["Bakerloo", "Central"]},
)
await hass.async_block_till_done()
# Verify that setup was called for each reload
assert len(mock_london_underground_client.mock_calls) > 0

View File

@@ -1,37 +1,130 @@
"""The tests for the london_underground platform."""
from london_tube_status import API_URL
import asyncio
import pytest
from homeassistant.components.london_underground.const import CONF_LINE, DOMAIN
from homeassistant.core import HomeAssistant
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
from homeassistant.helpers import issue_registry as ir
from homeassistant.setup import async_setup_component
from tests.common import async_load_fixture
from tests.test_util.aiohttp import AiohttpClientMocker
VALID_CONFIG = {
"sensor": {"platform": "london_underground", CONF_LINE: ["Metropolitan"]}
}
async def test_valid_state(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
hass: HomeAssistant,
issue_registry: ir.IssueRegistry,
mock_london_underground_client,
mock_config_entry,
) -> None:
"""Test for operational london_underground sensor with proper attributes."""
aioclient_mock.get(
API_URL,
text=await async_load_fixture(hass, "line_status.json", DOMAIN),
)
"""Test operational London Underground sensor using a mock config entry."""
# Ensure the entry is fully loaded
assert mock_config_entry.state is ConfigEntryState.LOADED
# Confirm that the expected entity exists and is correct
state = hass.states.get("sensor.london_underground_metropolitan")
assert state is not None
assert state.state == "Good Service"
assert state.attributes == {
"Description": "Nothing to report",
"attribution": "Powered by TfL Open Data",
"friendly_name": "London Underground Metropolitan",
"icon": "mdi:subway",
}
# No YAML warning should be issued, since setup was not via YAML
assert not issue_registry.async_get_issue(DOMAIN, "yaml_deprecated")
async def test_yaml_import(
hass: HomeAssistant,
issue_registry: ir.IssueRegistry,
mock_london_underground_client,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test a YAML sensor is imported and becomes an operational config entry."""
# Set up via YAML which will trigger import and set up the config entry
VALID_CONFIG = {
"sensor": {
"platform": "london_underground",
CONF_LINE: ["Metropolitan", "London Overground"],
}
}
assert await async_setup_component(hass, "sensor", VALID_CONFIG)
await hass.async_block_till_done()
state = hass.states.get("sensor.metropolitan")
# Verify the config entry was created
entries = hass.config_entries.async_entries(DOMAIN)
assert len(entries) == 1
# Verify a warning was issued about YAML deprecation
assert issue_registry.async_get_issue(HOMEASSISTANT_DOMAIN, "deprecated_yaml")
# Check the state after setup completes
state = hass.states.get("sensor.london_underground_metropolitan")
assert state
assert state.state == "Good Service"
assert state.attributes == {
"Description": "Nothing to report",
"attribution": "Powered by TfL Open Data",
"friendly_name": "Metropolitan",
"friendly_name": "London Underground Metropolitan",
"icon": "mdi:subway",
}
# Since being renamed London overground is no longer returned by the API
# So check that we do not import it and that we warn the user
state = hass.states.get("sensor.london_underground_london_overground")
assert not state
assert any(
"London Overground was removed from the configuration as the line has been divided and renamed"
in record.message
for record in caplog.records
)
async def test_failed_yaml_import(
hass: HomeAssistant,
issue_registry: ir.IssueRegistry,
mock_london_underground_client,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test a YAML sensor is imported and becomes an operational config entry."""
# Set up via YAML which will trigger import and set up the config entry
mock_london_underground_client.update.side_effect = asyncio.TimeoutError
VALID_CONFIG = {
"sensor": {"platform": "london_underground", CONF_LINE: ["Metropolitan"]}
}
assert await async_setup_component(hass, "sensor", VALID_CONFIG)
await hass.async_block_till_done()
# Verify the config entry was not created
entries = hass.config_entries.async_entries(DOMAIN)
assert len(entries) == 0
# verify no flows still in progress
flows = hass.config_entries.flow.async_progress()
assert len(flows) == 0
assert any(
"Unexpected error trying to connect before importing config" in record.message
for record in caplog.records
)
# Confirm that the import did not happen
assert not any(
"Importing London Underground config from configuration.yaml" in record.message
for record in caplog.records
)
assert not any(
"migrated to a config entry and can be safely removed" in record.message
for record in caplog.records
)
# Verify a warning was issued about YAML not being imported
assert issue_registry.async_get_issue(
DOMAIN, "deprecated_yaml_import_issue_cannot_connect"
)

View File

@@ -115,3 +115,119 @@
'state': '0',
})
# ---
# name: test_cury_number_entity[number.test_name_left_slot_intensity-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'max': 100,
'min': 0,
'mode': <NumberMode.SLIDER: 'slider'>,
'step': 1,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'number',
'entity_category': None,
'entity_id': 'number.test_name_left_slot_intensity',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Left slot intensity',
'platform': 'shelly',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '123456789ABC-cury:0-left_slot_intensity',
'unit_of_measurement': '%',
})
# ---
# name: test_cury_number_entity[number.test_name_left_slot_intensity-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Test name Left slot intensity',
'max': 100,
'min': 0,
'mode': <NumberMode.SLIDER: 'slider'>,
'step': 1,
'unit_of_measurement': '%',
}),
'context': <ANY>,
'entity_id': 'number.test_name_left_slot_intensity',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '70',
})
# ---
# name: test_cury_number_entity[number.test_name_right_slot_intensity-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'max': 100,
'min': 0,
'mode': <NumberMode.SLIDER: 'slider'>,
'step': 1,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'number',
'entity_category': None,
'entity_id': 'number.test_name_right_slot_intensity',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Right slot intensity',
'platform': 'shelly',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '123456789ABC-cury:0-right_slot_intensity',
'unit_of_measurement': '%',
})
# ---
# name: test_cury_number_entity[number.test_name_right_slot_intensity-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Test name Right slot intensity',
'max': 100,
'min': 0,
'mode': <NumberMode.SLIDER: 'slider'>,
'step': 1,
'unit_of_measurement': '%',
}),
'context': <ANY>,
'entity_id': 'number.test_name_right_slot_intensity',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '70',
})
# ---

View File

@@ -157,6 +157,206 @@
'state': '0',
})
# ---
# name: test_cury_sensor_entity[sensor.test_name_left_slot_level-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.test_name_left_slot_level',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Left slot level',
'platform': 'shelly',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'vial_level',
'unique_id': '123456789ABC-cury:0-cury_left_level',
'unit_of_measurement': '%',
})
# ---
# name: test_cury_sensor_entity[sensor.test_name_left_slot_level-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Test name Left slot level',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': '%',
}),
'context': <ANY>,
'entity_id': 'sensor.test_name_left_slot_level',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '27',
})
# ---
# name: test_cury_sensor_entity[sensor.test_name_left_slot_vial-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.test_name_left_slot_vial',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Left slot vial',
'platform': 'shelly',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'vial_name',
'unique_id': '123456789ABC-cury:0-cury_left_vial',
'unit_of_measurement': None,
})
# ---
# name: test_cury_sensor_entity[sensor.test_name_left_slot_vial-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Test name Left slot vial',
}),
'context': <ANY>,
'entity_id': 'sensor.test_name_left_slot_vial',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'Forest Dream',
})
# ---
# name: test_cury_sensor_entity[sensor.test_name_right_slot_level-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.test_name_right_slot_level',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Right slot level',
'platform': 'shelly',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'vial_level',
'unique_id': '123456789ABC-cury:0-cury_right_level',
'unit_of_measurement': '%',
})
# ---
# name: test_cury_sensor_entity[sensor.test_name_right_slot_level-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Test name Right slot level',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': '%',
}),
'context': <ANY>,
'entity_id': 'sensor.test_name_right_slot_level',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '84',
})
# ---
# name: test_cury_sensor_entity[sensor.test_name_right_slot_vial-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.test_name_right_slot_vial',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Right slot vial',
'platform': 'shelly',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'vial_name',
'unique_id': '123456789ABC-cury:0-cury_right_vial',
'unit_of_measurement': None,
})
# ---
# name: test_cury_sensor_entity[sensor.test_name_right_slot_vial-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Test name Right slot vial',
}),
'context': <ANY>,
'entity_id': 'sensor.test_name_right_slot_vial',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'Velvet Rose',
})
# ---
# name: test_rpc_shelly_ev_sensors[sensor.test_name_charger_state-entry]
EntityRegistryEntrySnapshot({
'aliases': set({

View File

@@ -0,0 +1,97 @@
# serializer version: 1
# name: test_cury_switch_entity[switch.test_name_left_slot-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'switch',
'entity_category': None,
'entity_id': 'switch.test_name_left_slot',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Left slot',
'platform': 'shelly',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'cury_slot',
'unique_id': '123456789ABC-cury:0-cury_left',
'unit_of_measurement': None,
})
# ---
# name: test_cury_switch_entity[switch.test_name_left_slot-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Test name Left slot',
}),
'context': <ANY>,
'entity_id': 'switch.test_name_left_slot',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'on',
})
# ---
# name: test_cury_switch_entity[switch.test_name_right_slot-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'switch',
'entity_category': None,
'entity_id': 'switch.test_name_right_slot',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Right slot',
'platform': 'shelly',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'cury_slot',
'unique_id': '123456789ABC-cury:0-cury_right',
'unit_of_measurement': None,
})
# ---
# name: test_cury_switch_entity[switch.test_name_right_slot-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Test name Right slot',
}),
'context': <ANY>,
'entity_id': 'switch.test_name_right_slot',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'off',
})
# ---

View File

@@ -568,3 +568,50 @@ async def test_blu_trv_number_reauth_error(
assert "context" in flow
assert flow["context"].get("source") == SOURCE_REAUTH
assert flow["context"].get("entry_id") == entry.entry_id
async def test_cury_number_entity(
hass: HomeAssistant,
mock_rpc_device: Mock,
entity_registry: EntityRegistry,
snapshot: SnapshotAssertion,
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""Test number entities for cury component."""
status = {
"cury:0": {
"id": 0,
"slots": {
"left": {
"intensity": 70,
"on": True,
"vial": {"level": 27, "name": "Forest Dream"},
},
"right": {
"intensity": 70,
"on": False,
"vial": {"level": 84, "name": "Velvet Rose"},
},
},
}
}
monkeypatch.setattr(mock_rpc_device, "status", status)
await init_integration(hass, 3)
for entity in ("left_slot_intensity", "right_slot_intensity"):
entity_id = f"{NUMBER_DOMAIN}.test_name_{entity}"
state = hass.states.get(entity_id)
assert state == snapshot(name=f"{entity_id}-state")
entry = entity_registry.async_get(entity_id)
assert entry == snapshot(name=f"{entity_id}-entry")
await hass.services.async_call(
NUMBER_DOMAIN,
SERVICE_SET_VALUE,
{ATTR_ENTITY_ID: "number.test_name_left_slot_intensity", ATTR_VALUE: 80.0},
blocking=True,
)
mock_rpc_device.mock_update()
mock_rpc_device.cury_set.assert_called_once_with(0, slot="left", intensity=80)

View File

@@ -1949,3 +1949,46 @@ async def test_rpc_pm1_energy_consumed_sensor_non_float_value(
assert (state := hass.states.get(entity_id))
assert state.state == STATE_UNKNOWN
async def test_cury_sensor_entity(
hass: HomeAssistant,
mock_rpc_device: Mock,
entity_registry: EntityRegistry,
snapshot: SnapshotAssertion,
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""Test sensor entities for cury component."""
status = {
"cury:0": {
"id": 0,
"slots": {
"left": {
"intensity": 70,
"on": True,
"vial": {"level": 27, "name": "Forest Dream"},
},
"right": {
"intensity": 70,
"on": False,
"vial": {"level": 84, "name": "Velvet Rose"},
},
},
}
}
monkeypatch.setattr(mock_rpc_device, "status", status)
await init_integration(hass, 3)
for entity in (
"left_slot_level",
"right_slot_level",
"left_slot_vial",
"right_slot_vial",
):
entity_id = f"{SENSOR_DOMAIN}.test_name_{entity}"
state = hass.states.get(entity_id)
assert state == snapshot(name=f"{entity_id}-state")
entry = entity_registry.async_get(entity_id)
assert entry == snapshot(name=f"{entity_id}-entry")

View File

@@ -8,6 +8,7 @@ from aioshelly.const import MODEL_1PM, MODEL_GAS, MODEL_MOTION
from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError, RpcCallError
from freezegun.api import FrozenDateTimeFactory
import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN
from homeassistant.components.shelly.const import (
@@ -24,6 +25,7 @@ from homeassistant.const import (
SERVICE_TURN_ON,
STATE_OFF,
STATE_ON,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
Platform,
)
@@ -35,6 +37,7 @@ from homeassistant.helpers.entity_registry import EntityRegistry
from . import (
init_integration,
inject_rpc_device_event,
mutate_rpc_device_status,
patch_platforms,
register_device,
register_entity,
@@ -829,3 +832,119 @@ async def test_rpc_device_script_switch(
assert (state := hass.states.get(entity_id))
assert state.state == STATE_ON
mock_rpc_device.script_start.assert_called_once_with(1)
async def test_cury_switch_entity(
hass: HomeAssistant,
mock_rpc_device: Mock,
entity_registry: EntityRegistry,
snapshot: SnapshotAssertion,
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""Test switch entities for cury component."""
status = {
"cury:0": {
"id": 0,
"slots": {
"left": {
"intensity": 70,
"on": True,
"vial": {"level": 27, "name": "Forest Dream"},
},
"right": {
"intensity": 70,
"on": False,
"vial": {"level": 84, "name": "Velvet Rose"},
},
},
}
}
monkeypatch.setattr(mock_rpc_device, "status", status)
await init_integration(hass, 3)
for entity in ("left_slot", "right_slot"):
entity_id = f"{SWITCH_DOMAIN}.test_name_{entity}"
state = hass.states.get(entity_id)
assert state == snapshot(name=f"{entity_id}-state")
entry = entity_registry.async_get(entity_id)
assert entry == snapshot(name=f"{entity_id}-entry")
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "switch.test_name_left_slot"},
blocking=True,
)
mock_rpc_device.mock_update()
mock_rpc_device.cury_set.assert_called_once_with(0, "left", False)
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.test_name_right_slot"},
blocking=True,
)
mock_rpc_device.mock_update()
mock_rpc_device.cury_set.assert_called_with(0, "right", True)
async def test_cury_switch_availability(
hass: HomeAssistant,
mock_rpc_device: Mock,
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""Test availability of switch entities for cury component."""
slots = {
"left": {
"intensity": 70,
"on": True,
"vial": {"level": 27, "name": "Forest Dream"},
},
"right": {
"intensity": 70,
"on": False,
"vial": {"level": 84, "name": "Velvet Rose"},
},
}
status = {"cury:0": {"id": 0, "slots": slots}}
monkeypatch.setattr(mock_rpc_device, "status", status)
await init_integration(hass, 3)
entity_id = f"{SWITCH_DOMAIN}.test_name_left_slot"
assert (state := hass.states.get(entity_id))
assert state.state == STATE_ON
slots["left"]["vial"]["level"] = -1
mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cury:0", "slots", slots)
mock_rpc_device.mock_update()
assert (state := hass.states.get(entity_id))
assert state.state == STATE_UNAVAILABLE
slots["left"].pop("vial")
mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cury:0", "slots", slots)
mock_rpc_device.mock_update()
assert (state := hass.states.get(entity_id))
assert state.state == STATE_UNAVAILABLE
slots["left"] = None
mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cury:0", "slots", slots)
mock_rpc_device.mock_update()
assert (state := hass.states.get(entity_id))
assert state.state == STATE_UNAVAILABLE
slots["left"] = {
"intensity": 70,
"on": True,
"vial": {"level": 27, "name": "Forest Dream"},
}
mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cury:0", "slots", slots)
mock_rpc_device.mock_update()
assert (state := hass.states.get(entity_id))
assert state.state == STATE_ON

View File

@@ -6,7 +6,7 @@ from typing import Any
from unittest.mock import AsyncMock, patch
import pytest
from telegram import Bot, Chat, ChatFullInfo, Message, User
from telegram import Bot, Chat, ChatFullInfo, Message, User, WebhookInfo
from telegram.constants import AccentColor, ChatType
from homeassistant.components.telegram_bot import (
@@ -74,11 +74,22 @@ def mock_register_webhook() -> Generator[None]:
"""Mock calls made by telegram_bot when (de)registering webhook."""
with (
patch(
"homeassistant.components.telegram_bot.webhooks.PushBot.register_webhook",
return_value=True,
"homeassistant.components.telegram_bot.webhooks.Bot.delete_webhook",
AsyncMock(),
),
patch(
"homeassistant.components.telegram_bot.webhooks.PushBot.deregister_webhook",
"homeassistant.components.telegram_bot.webhooks.Bot.get_webhook_info",
AsyncMock(
return_value=WebhookInfo(
url="mock url",
last_error_date=datetime.now(),
has_custom_certificate=False,
pending_update_count=0,
)
),
),
patch(
"homeassistant.components.telegram_bot.webhooks.Bot.set_webhook",
return_value=True,
),
):
@@ -113,9 +124,6 @@ def mock_external_calls() -> Generator[None]:
super().__init__(*args, **kwargs)
self._bot_user = test_user
async def delete_webhook(self) -> bool:
return True
with (
patch("homeassistant.components.telegram_bot.bot.Bot", BotMock),
patch.object(BotMock, "get_chat", return_value=test_chat),

View File

@@ -1,12 +1,11 @@
"""Tests for webhooks."""
from datetime import datetime
from ipaddress import IPv4Network
from unittest.mock import AsyncMock, patch
from unittest.mock import patch
from telegram import WebhookInfo
from telegram.error import TimedOut
from homeassistant.components.telegram_bot.const import DOMAIN
from homeassistant.components.telegram_bot.webhooks import TELEGRAM_WEBHOOK_URL
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant
@@ -19,91 +18,61 @@ async def test_set_webhooks_failed(
hass: HomeAssistant,
mock_webhooks_config_entry: MockConfigEntry,
mock_external_calls: None,
mock_generate_secret_token,
mock_register_webhook: None,
) -> None:
"""Test set webhooks failed."""
mock_webhooks_config_entry.add_to_hass(hass)
with (
patch(
"homeassistant.components.telegram_bot.webhooks.Bot.get_webhook_info",
AsyncMock(
return_value=WebhookInfo(
url="mock url",
last_error_date=datetime.now(),
has_custom_certificate=False,
pending_update_count=0,
)
),
) as mock_webhook_info,
"homeassistant.components.telegram_bot.webhooks.secrets.choice",
return_value="DEADBEEF12345678DEADBEEF87654321",
),
patch(
"homeassistant.components.telegram_bot.webhooks.Bot.set_webhook",
) as mock_set_webhook,
patch(
"homeassistant.components.telegram_bot.webhooks.ApplicationBuilder"
) as application_builder_class,
):
mock_set_webhook.side_effect = [TimedOut("mock timeout"), False]
application = application_builder_class.return_value.bot.return_value.updater.return_value.build.return_value
application.initialize = AsyncMock()
application.start = AsyncMock()
await hass.config_entries.async_setup(mock_webhooks_config_entry.entry_id)
await hass.async_block_till_done()
await hass.async_stop()
mock_webhook_info.assert_called_once()
application.initialize.assert_called_once()
application.start.assert_called_once()
assert mock_set_webhook.call_count > 0
# first fail with exception, second fail with False
assert mock_set_webhook.call_count == 2
# SETUP_ERROR is result of ConfigEntryNotReady("Failed to register webhook with Telegram") in webhooks.py
assert mock_webhooks_config_entry.state == ConfigEntryState.SETUP_ERROR
# test fail after retries
mock_set_webhook.reset_mock()
mock_set_webhook.side_effect = TimedOut("mock timeout")
await hass.config_entries.async_reload(mock_webhooks_config_entry.entry_id)
await hass.async_block_till_done()
# 3 retries
assert mock_set_webhook.call_count == 3
assert mock_webhooks_config_entry.state == ConfigEntryState.SETUP_ERROR
await hass.async_block_till_done()
async def test_set_webhooks(
hass: HomeAssistant,
mock_webhooks_config_entry: MockConfigEntry,
mock_external_calls: None,
mock_register_webhook: None,
mock_generate_secret_token,
) -> None:
"""Test set webhooks success."""
mock_webhooks_config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_webhooks_config_entry.entry_id)
with (
patch(
"homeassistant.components.telegram_bot.webhooks.Bot.get_webhook_info",
AsyncMock(
return_value=WebhookInfo(
url="mock url",
last_error_date=datetime.now(),
has_custom_certificate=False,
pending_update_count=0,
)
),
) as mock_webhook_info,
patch(
"homeassistant.components.telegram_bot.webhooks.Bot.set_webhook",
AsyncMock(return_value=True),
) as mock_set_webhook,
patch(
"homeassistant.components.telegram_bot.webhooks.ApplicationBuilder"
) as application_builder_class,
):
application = application_builder_class.return_value.bot.return_value.updater.return_value.build.return_value
application.initialize = AsyncMock()
application.start = AsyncMock()
await hass.async_block_till_done()
await hass.config_entries.async_setup(mock_webhooks_config_entry.entry_id)
await hass.async_block_till_done()
await hass.async_stop()
mock_webhook_info.assert_called_once()
application.initialize.assert_called_once()
application.start.assert_called_once()
mock_set_webhook.assert_called_once()
assert mock_webhooks_config_entry.state == ConfigEntryState.LOADED
assert mock_webhooks_config_entry.state == ConfigEntryState.LOADED
async def test_webhooks_update_invalid_json(
@@ -148,3 +117,24 @@ async def test_webhooks_unauthorized_network(
await hass.async_block_till_done()
mock_remote.assert_called_once()
async def test_webhooks_deregister_failed(
hass: HomeAssistant,
webhook_platform,
mock_external_calls: None,
mock_generate_secret_token,
) -> None:
"""Test deregister webhooks."""
config_entry = hass.config_entries.async_entries(DOMAIN)[0]
assert config_entry.state == ConfigEntryState.LOADED
with patch(
"homeassistant.components.telegram_bot.webhooks.Bot.delete_webhook",
) as mock_delete_webhook:
mock_delete_webhook.side_effect = TimedOut("mock timeout")
await hass.config_entries.async_unload(config_entry.entry_id)
mock_delete_webhook.assert_called_once()
assert config_entry.state == ConfigEntryState.NOT_LOADED

View File

@@ -230,7 +230,7 @@ DEVICE_MOCKS = [
"wg2_tmwhss6ntjfc7prs", # https://github.com/home-assistant/core/issues/150662
"wg2_v7owd9tzcaninc36", # https://github.com/orgs/home-assistant/discussions/539
"wk_6kijc7nd", # https://github.com/home-assistant/core/issues/136513
"wk_IAYz2WK1th0cMLmL", # https://github.com/orgs/home-assistant/discussions/842
"wk_IAYz2WK1th0cMLmL", # https://github.com/home-assistant/core/issues/150077
"wk_aqoouq7x", # https://github.com/home-assistant/core/issues/146263
"wk_ccpwojhalfxryigz", # https://github.com/home-assistant/core/issues/145551
"wk_cpmgn2cf", # https://github.com/orgs/home-assistant/discussions/684

View File

@@ -10,9 +10,9 @@
"online": true,
"sub": false,
"time_zone": "+01:00",
"active_time": "2018-12-04T17:50:07+00:00",
"create_time": "2018-12-04T17:50:07+00:00",
"update_time": "2025-09-03T07:44:16+00:00",
"active_time": "2022-11-15T08:35:43+00:00",
"create_time": "2022-11-15T08:35:43+00:00",
"update_time": "2022-11-15T08:35:43+00:00",
"function": {
"switch": {
"type": "Boolean",
@@ -22,6 +22,16 @@
"type": "Boolean",
"value": {}
},
"temp_set": {
"type": "Integer",
"value": {
"unit": "\u2103",
"min": 10,
"max": 70,
"scale": 1,
"step": 5
}
},
"eco": {
"type": "Boolean",
"value": {}
@@ -35,26 +45,14 @@
"scale": 0,
"step": 5
}
}
},
"status_range": {
"eco": {
"type": "Boolean",
"value": {}
},
"Mode": {
"type": "Enum",
"value": {
"range": ["0", "1"]
}
},
"program": {
"type": "Raw",
"value": {
"maxlen": 128
}
},
"tempSwitch": {
"type": "Enum",
"value": {
"range": ["0", "1"]
}
},
"TempSet": {
"temp_set": {
"type": "Integer",
"value": {
"unit": "\u2103",
@@ -63,12 +61,6 @@
"scale": 1,
"step": 5
}
}
},
"status_range": {
"eco": {
"type": "Boolean",
"value": {}
},
"switch": {
"type": "Boolean",
@@ -87,43 +79,14 @@
"scale": 0,
"step": 5
}
},
"floorTemp": {
"type": "Integer",
"value": {
"max": 198,
"min": 0,
"scale": 0,
"step": 5,
"unit": "\u2103"
}
},
"floortempFunction": {
"type": "Boolean",
"value": {}
},
"TempSet": {
"type": "Integer",
"value": {
"unit": "\u2103",
"min": 10,
"max": 70,
"scale": 1,
"step": 5
}
}
},
"status": {
"switch": false,
"upper_temp": 55,
"eco": true,
"child_lock": false,
"Mode": 1,
"program": "DwYoDwceHhQoORceOhceOxceAAkoAAoeHhQoORceOhceOxceAAkoAAoeHhQoORceOhceOxce",
"floorTemp": 0,
"tempSwitch": 0,
"floortempFunction": true,
"TempSet": 41
"switch": true,
"temp_set": 46,
"upper_temp": 45,
"eco": false,
"child_lock": true
},
"set_up": true,
"support_local": true

View File

@@ -383,9 +383,9 @@
<HVACMode.OFF: 'off'>,
<HVACMode.HEAT_COOL: 'heat_cool'>,
]),
'max_temp': 35,
'min_temp': 7,
'target_temp_step': 1.0,
'max_temp': 7.0,
'min_temp': 1.0,
'target_temp_step': 0.5,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
@@ -410,7 +410,7 @@
'platform': 'tuya',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': <ClimateEntityFeature: 384>,
'supported_features': <ClimateEntityFeature: 385>,
'translation_key': None,
'unique_id': 'tuya.LmLMc0ht1KW2zYAIkw',
'unit_of_measurement': None,
@@ -419,23 +419,24 @@
# name: test_platform_setup_and_discovery[climate.el_termostato_de_la_cocina-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'current_temperature': 5.5,
'current_temperature': 4.5,
'friendly_name': 'El termostato de la cocina',
'hvac_modes': list([
<HVACMode.OFF: 'off'>,
<HVACMode.HEAT_COOL: 'heat_cool'>,
]),
'max_temp': 35,
'min_temp': 7,
'supported_features': <ClimateEntityFeature: 384>,
'target_temp_step': 1.0,
'max_temp': 7.0,
'min_temp': 1.0,
'supported_features': <ClimateEntityFeature: 385>,
'target_temp_step': 0.5,
'temperature': 4.6,
}),
'context': <ANY>,
'entity_id': 'climate.el_termostato_de_la_cocina',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'off',
'state': 'heat_cool',
})
# ---
# name: test_platform_setup_and_discovery[climate.empore-entry]

View File

@@ -3238,7 +3238,7 @@
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'off',
'state': 'on',
})
# ---
# name: test_platform_setup_and_discovery[switch.elivco_kitchen_socket_child_lock-entry]

View File

@@ -0,0 +1,535 @@
# serializer version: 1
# name: test_entry_diagnostics[xc40_electric_2024]
dict({
'Volvo fast interval coordinator': dict({
'centralLock': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:20:20.570000+00:00',
'unit': None,
'value': 'LOCKED',
}),
'frontLeftDoor': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:20:20.570000+00:00',
'unit': None,
'value': 'CLOSED',
}),
'frontLeftWindow': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:28:12.202000+00:00',
'unit': None,
'value': 'CLOSED',
}),
'frontRightDoor': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:20:20.570000+00:00',
'unit': None,
'value': 'CLOSED',
}),
'frontRightWindow': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:28:12.202000+00:00',
'unit': None,
'value': 'CLOSED',
}),
'hood': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:20:20.570000+00:00',
'unit': None,
'value': 'CLOSED',
}),
'rearLeftDoor': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:20:20.570000+00:00',
'unit': None,
'value': 'CLOSED',
}),
'rearLeftWindow': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:28:12.202000+00:00',
'unit': None,
'value': 'CLOSED',
}),
'rearRightDoor': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:20:20.570000+00:00',
'unit': None,
'value': 'CLOSED',
}),
'rearRightWindow': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:28:12.202000+00:00',
'unit': None,
'value': 'CLOSED',
}),
'sunroof': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:28:12.202000+00:00',
'unit': None,
'value': 'UNSPECIFIED',
}),
'tailgate': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:20:20.570000+00:00',
'unit': None,
'value': 'CLOSED',
}),
'tankLid': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:20:20.570000+00:00',
'unit': None,
'value': 'CLOSED',
}),
}),
'Volvo medium interval coordinator': dict({
'batteryChargeLevel': dict({
'extra_data': dict({
'updated_at': '2025-07-02T08:51:23Z',
}),
'status': 'OK',
'timestamp': None,
'unit': 'percentage',
'value': 53,
}),
'chargerConnectionStatus': dict({
'extra_data': dict({
'updated_at': '2025-07-02T08:51:23Z',
}),
'status': 'OK',
'timestamp': None,
'unit': None,
'value': 'CONNECTED',
}),
'chargerPowerStatus': dict({
'extra_data': dict({
'updated_at': '2025-07-02T08:51:23Z',
}),
'status': 'OK',
'timestamp': None,
'unit': None,
'value': 'PROVIDING_POWER',
}),
'chargingCurrentLimit': dict({
'extra_data': dict({
'updated_at': '2024-03-05T08:38:44Z',
}),
'status': 'OK',
'timestamp': None,
'unit': 'ampere',
'value': 32,
}),
'chargingPower': dict({
'extra_data': dict({
'updated_at': '2025-07-02T08:51:23Z',
}),
'status': 'OK',
'timestamp': None,
'unit': 'watts',
'value': 1386,
}),
'chargingStatus': dict({
'extra_data': dict({
'updated_at': '2025-07-02T08:51:23Z',
}),
'status': 'OK',
'timestamp': None,
'unit': None,
'value': 'CHARGING',
}),
'chargingType': dict({
'extra_data': dict({
'updated_at': '2025-07-02T08:51:23Z',
}),
'status': 'OK',
'timestamp': None,
'unit': None,
'value': 'AC',
}),
'electricRange': dict({
'extra_data': dict({
'updated_at': '2025-07-02T08:51:23Z',
}),
'status': 'OK',
'timestamp': None,
'unit': 'mi',
'value': 150,
}),
'estimatedChargingTimeToTargetBatteryChargeLevel': dict({
'extra_data': dict({
'updated_at': '2025-07-02T08:51:23Z',
}),
'status': 'OK',
'timestamp': None,
'unit': 'minutes',
'value': 1440,
}),
'targetBatteryChargeLevel': dict({
'extra_data': dict({
'updated_at': '2024-09-22T09:40:12Z',
}),
'status': 'OK',
'timestamp': None,
'unit': 'percentage',
'value': 90,
}),
}),
'Volvo slow interval coordinator': dict({
'availabilityStatus': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:32:26.169000+00:00',
'unit': None,
'value': 'AVAILABLE',
}),
}),
'Volvo very slow interval coordinator': dict({
'averageEnergyConsumption': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:53:44.785000+00:00',
'unit': 'kWh/100km',
'value': 22.6,
}),
'averageSpeed': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': 'km/h',
'value': 53,
}),
'averageSpeedAutomatic': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': 'km/h',
'value': 26,
}),
'battery_capacity_kwh': dict({
'extra_data': dict({
}),
'value': 81.608,
}),
'brakeFluidLevelWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'NO_WARNING',
}),
'brakeLightCenterWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'NO_WARNING',
}),
'brakeLightLeftWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'NO_WARNING',
}),
'brakeLightRightWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'NO_WARNING',
}),
'daytimeRunningLightLeftWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'NO_WARNING',
}),
'daytimeRunningLightRightWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'NO_WARNING',
}),
'distanceToEmptyBattery': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:30:08.338000+00:00',
'unit': 'km',
'value': 250,
}),
'distanceToService': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': 'km',
'value': 29000,
}),
'engineCoolantLevelWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'NO_WARNING',
}),
'engineHoursToService': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': 'h',
'value': 1266,
}),
'fogLightFrontWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'NO_WARNING',
}),
'fogLightRearWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'NO_WARNING',
}),
'frontLeft': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'UNSPECIFIED',
}),
'frontRight': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'UNSPECIFIED',
}),
'hazardLightsWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'UNSPECIFIED',
}),
'highBeamLeftWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'NO_WARNING',
}),
'highBeamRightWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'NO_WARNING',
}),
'lowBeamLeftWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'NO_WARNING',
}),
'lowBeamRightWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'NO_WARNING',
}),
'odometer': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': 'km',
'value': 30000,
}),
'oilLevelWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'NO_WARNING',
}),
'positionLightFrontLeftWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'NO_WARNING',
}),
'positionLightFrontRightWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'NO_WARNING',
}),
'positionLightRearLeftWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'NO_WARNING',
}),
'positionLightRearRightWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'NO_WARNING',
}),
'rearLeft': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'UNSPECIFIED',
}),
'rearRight': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'UNSPECIFIED',
}),
'registrationPlateLightWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'NO_WARNING',
}),
'reverseLightsWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'UNSPECIFIED',
}),
'serviceWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'NO_WARNING',
}),
'sideMarkLightsWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'NO_WARNING',
}),
'timeToService': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': 'months',
'value': 23,
}),
'tripMeterAutomatic': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': 'km',
'value': 18.2,
}),
'tripMeterManual': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': 'km',
'value': 3822.9,
}),
'turnIndicationFrontLeftWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'NO_WARNING',
}),
'turnIndicationFrontRightWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'NO_WARNING',
}),
'turnIndicationRearLeftWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'NO_WARNING',
}),
'turnIndicationRearRightWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'NO_WARNING',
}),
'washerFluidLevelWarning': dict({
'extra_data': dict({
}),
'timestamp': '2024-12-30T14:18:56.849000+00:00',
'unit': None,
'value': 'NO_WARNING',
}),
}),
'entry_data': dict({
'api_key': '**REDACTED**',
'auth_implementation': 'volvo',
'token': dict({
'access_token': '**REDACTED**',
'expires_at': 1759919745.7328658,
'expires_in': 60,
'refresh_token': '**REDACTED**',
'token_type': 'Bearer',
}),
'vin': '**REDACTED**',
}),
'vehicle': dict({
'battery_capacity_kwh': 81.608,
'description': dict({
'extra_data': dict({
}),
'model': 'XC40',
'steering': 'LEFT',
'upholstery': 'null',
}),
'external_colour': 'Silver Dawn',
'extra_data': dict({
}),
'fuel_type': 'ELECTRIC',
'gearbox': 'AUTOMATIC',
'images': dict({
'exterior_image_url': 'https://cas.volvocars.com/image/dynamic/MY24_0000/123/exterior-v4/_/default.png?market=se&client=public-api-engineering&angle=1&bg=00000000&w=1920',
'extra_data': dict({
}),
'internal_image_url': 'https://cas.volvocars.com/image/dynamic/MY24_0000/123/interior-v4/_/default.jpg?market=se&client=public-api-engineering&angle=0&w=1920',
}),
'model_year': 2024,
'vin': '**REDACTED**',
}),
})
# ---

View File

@@ -0,0 +1,35 @@
"""Test Volvo diagnostics."""
from collections.abc import Awaitable, Callable
import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.const import CONF_TOKEN
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry
from tests.components.diagnostics import get_diagnostics_for_config_entry
from tests.typing import ClientSessionGenerator
@pytest.mark.usefixtures("mock_api")
async def test_entry_diagnostics(
hass: HomeAssistant,
setup_integration: Callable[[], Awaitable[bool]],
hass_client: ClientSessionGenerator,
snapshot: SnapshotAssertion,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test config entry diagnostics."""
assert await setup_integration()
await hass.async_block_till_done()
# Give it a fixed timestamp so it won't change with every test run
mock_config_entry.data[CONF_TOKEN]["expires_at"] = 1759919745.7328658
assert (
await get_diagnostics_for_config_entry(hass, hass_client, mock_config_entry)
== snapshot
)

View File

@@ -6,7 +6,7 @@ import dataclasses
from datetime import timedelta
import logging
import threading
from typing import Any
from typing import Any, override
from unittest.mock import MagicMock, PropertyMock, patch
from freezegun.api import FrozenDateTimeFactory
@@ -20,7 +20,9 @@ from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_DEVICE_CLASS,
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
ATTR_INCLUDED_ENTITIES,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
EntityCategory,
@@ -2896,3 +2898,108 @@ async def test_platform_state_write_from_init_unique_id(
# The early attempt to write is interpreted as a unique ID collision
assert "Platform test_platform does not generate unique IDs." in caplog.text
assert "Entity id already exists - ignoring: test.test" not in caplog.text
async def test_included_entities_mixin(
hass: HomeAssistant,
entity_registry: er.EntityRegistry,
) -> None:
"""Test included entities attribute."""
entity_registry.async_get_or_create(
domain="hello",
platform="hello",
unique_id="very_unique_oceans",
suggested_object_id="oceans",
)
entity_registry.async_get_or_create(
domain="hello",
platform="hello",
unique_id="very_unique_continents",
suggested_object_id="continents",
)
entity_registry.async_get_or_create(
domain="hello",
platform="hello",
unique_id="very_unique_moon",
suggested_object_id="moon",
)
class MockHelloBaseClass(entity.Entity):
"""Domain base entity platform domain Hello."""
@property
@override
def capability_attributes(self) -> dict[str, Any] | None:
"""Return the capability attributes."""
if included_entities := getattr(self, ATTR_INCLUDED_ENTITIES):
return {ATTR_ENTITY_ID: included_entities}
return None
class MockHelloIncludedEntitiesClass(
MockHelloBaseClass, entity.IncludedEntitiesMixin
):
""".Mock hello grouped entity class for a test integration."""
platform = MockEntityPlatform(hass, domain="hello")
mock_entity = MockHelloIncludedEntitiesClass()
mock_entity.hass = hass
mock_entity.entity_id = "hello.universe"
mock_entity.unique_id = "very_unique_universe"
await platform.async_add_entities([mock_entity])
# Initiate mock grouped entity for hello domain
mock_entity.async_set_included_entities(
"hello", ["very_unique_continents", "very_unique_oceans"]
)
mock_entity.async_schedule_update_ha_state(True)
await hass.async_block_till_done()
state = hass.states.get(mock_entity.entity_id)
assert state.attributes.get(ATTR_ENTITY_ID) == ["hello.continents", "hello.oceans"]
# Add an entity to the group of included entities
mock_entity.async_set_included_entities(
"hello", ["very_unique_continents", "very_unique_moon", "very_unique_oceans"]
)
mock_entity.async_schedule_update_ha_state(True)
await hass.async_block_till_done()
state = hass.states.get(mock_entity.entity_id)
assert state.attributes.get(ATTR_ENTITY_ID) == [
"hello.continents",
"hello.moon",
"hello.oceans",
]
# Remove an entity from the group of included entities
mock_entity.async_set_included_entities(
"hello", ["very_unique_moon", "very_unique_oceans"]
)
mock_entity.async_schedule_update_ha_state(True)
await hass.async_block_till_done()
state = hass.states.get(mock_entity.entity_id)
assert state.attributes.get(ATTR_ENTITY_ID) == ["hello.moon", "hello.oceans"]
# Rename an included entity via the registry entity
entity_registry.async_update_entity(
entity_id="hello.moon", new_entity_id="hello.moon_light"
)
await hass.async_block_till_done()
state = hass.states.get(mock_entity.entity_id)
assert state.attributes.get(ATTR_ENTITY_ID) == ["hello.moon_light", "hello.oceans"]
# Remove an included entity from the registry entity
entity_registry.async_remove(entity_id="hello.oceans")
await hass.async_block_till_done()
state = hass.states.get(mock_entity.entity_id)
assert state.attributes.get(ATTR_ENTITY_ID) == ["hello.moon_light"]