Compare commits

..

3 Commits

Author SHA1 Message Date
Mike Degatano
40cb7e8fdf Add jobs info mock to switch tests 2025-10-07 15:54:32 -04:00
Mike Degatano
d8051c5caa Add tests 2025-10-07 15:54:32 -04:00
Mike Degatano
94b4a7978c Add progress reporting for addon/core updates 2025-10-07 15:54:32 -04:00
108 changed files with 1073 additions and 3107 deletions

View File

@@ -741,7 +741,7 @@ jobs:
- name: Generate partial mypy restore key
id: generate-mypy-key
run: |
mypy_version=$(cat requirements_test.txt | grep 'mypy.*=' | cut -d '=' -f 3)
mypy_version=$(cat requirements_test.txt | grep mypy | cut -d '=' -f 3)
echo "version=$mypy_version" >> $GITHUB_OUTPUT
echo "key=mypy-${{ env.MYPY_CACHE_VERSION }}-$mypy_version-${{
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT

View File

@@ -24,11 +24,11 @@ jobs:
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Initialize CodeQL
uses: github/codeql-action/init@e296a935590eb16afc0c0108289f68c87e2a89a5 # v4.30.7
uses: github/codeql-action/init@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@e296a935590eb16afc0c0108289f68c87e2a89a5 # v4.30.7
uses: github/codeql-action/analyze@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
with:
category: "/language:python"

4
CODEOWNERS generated
View File

@@ -1413,8 +1413,8 @@ build.json @home-assistant/supervisor
/tests/components/sfr_box/ @epenet
/homeassistant/components/sftp_storage/ @maretodoric
/tests/components/sftp_storage/ @maretodoric
/homeassistant/components/sharkiq/ @JeffResc @funkybunch @TheOneOgre
/tests/components/sharkiq/ @JeffResc @funkybunch @TheOneOgre
/homeassistant/components/sharkiq/ @JeffResc @funkybunch
/tests/components/sharkiq/ @JeffResc @funkybunch
/homeassistant/components/shell_command/ @home-assistant/core
/tests/components/shell_command/ @home-assistant/core
/homeassistant/components/shelly/ @bieniu @thecode @chemelli74 @bdraco

View File

@@ -71,4 +71,4 @@ POLLEN_CATEGORY_MAP = {
}
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=10)
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(minutes=30)
UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(hours=30)

View File

@@ -7,8 +7,6 @@ from typing import Any
from pyaprilaire.const import Attribute
from homeassistant.components.climate import (
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
FAN_AUTO,
FAN_ON,
PRESET_AWAY,
@@ -18,12 +16,7 @@ from homeassistant.components.climate import (
HVACAction,
HVACMode,
)
from homeassistant.const import (
ATTR_TEMPERATURE,
PRECISION_HALVES,
PRECISION_WHOLE,
UnitOfTemperature,
)
from homeassistant.const import PRECISION_HALVES, PRECISION_WHOLE, UnitOfTemperature
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -239,15 +232,15 @@ class AprilaireClimate(BaseAprilaireEntity, ClimateEntity):
cool_setpoint = 0
heat_setpoint = 0
if temperature := kwargs.get(ATTR_TEMPERATURE):
if temperature := kwargs.get("temperature"):
if self.coordinator.data.get(Attribute.MODE) == 3:
cool_setpoint = temperature
else:
heat_setpoint = temperature
else:
if target_temp_low := kwargs.get(ATTR_TARGET_TEMP_LOW):
if target_temp_low := kwargs.get("target_temp_low"):
heat_setpoint = target_temp_low
if target_temp_high := kwargs.get(ATTR_TARGET_TEMP_HIGH):
if target_temp_high := kwargs.get("target_temp_high"):
cool_setpoint = target_temp_high
if cool_setpoint == 0 and heat_setpoint == 0:

View File

@@ -7,14 +7,12 @@ from typing import Any
from evolutionhttp import BryantEvolutionLocalClient
from homeassistant.components.climate import (
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
ClimateEntity,
ClimateEntityFeature,
HVACAction,
HVACMode,
)
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
from homeassistant.const import UnitOfTemperature
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.device_registry import DeviceInfo
@@ -210,24 +208,24 @@ class BryantEvolutionClimate(ClimateEntity):
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set new target temperature."""
if value := kwargs.get(ATTR_TARGET_TEMP_HIGH):
temp = int(value)
if kwargs.get("target_temp_high"):
temp = int(kwargs["target_temp_high"])
if not await self._client.set_cooling_setpoint(temp):
raise HomeAssistantError(
translation_domain=DOMAIN, translation_key="failed_to_set_clsp"
)
self._attr_target_temperature_high = temp
if value := kwargs.get(ATTR_TARGET_TEMP_LOW):
temp = int(value)
if kwargs.get("target_temp_low"):
temp = int(kwargs["target_temp_low"])
if not await self._client.set_heating_setpoint(temp):
raise HomeAssistantError(
translation_domain=DOMAIN, translation_key="failed_to_set_htsp"
)
self._attr_target_temperature_low = temp
if value := kwargs.get(ATTR_TEMPERATURE):
temp = int(value)
if kwargs.get("temperature"):
temp = int(kwargs["temperature"])
fn = (
self._client.set_heating_setpoint
if self.hvac_mode == HVACMode.HEAT

View File

@@ -169,7 +169,7 @@ class CalendarEventListener:
def __init__(
self,
hass: HomeAssistant,
job: HassJob[..., Coroutine[Any, Any, None] | Any],
job: HassJob[..., Coroutine[Any, Any, None]],
trigger_data: dict[str, Any],
fetcher: QueuedEventFetcher,
) -> None:

View File

@@ -514,7 +514,7 @@ class ChatLog:
"""Set the LLM system prompt."""
llm_api: llm.APIInstance | None = None
if not user_llm_hass_api:
if user_llm_hass_api is None:
pass
elif isinstance(user_llm_hass_api, llm.API):
llm_api = await user_llm_hass_api.async_get_api_instance(llm_context)

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/environment_canada",
"iot_class": "cloud_polling",
"loggers": ["env_canada"],
"requirements": ["env-canada==0.11.3"]
"requirements": ["env-canada==0.11.2"]
}

View File

@@ -17,7 +17,7 @@
"mqtt": ["esphome/discover/#"],
"quality_scale": "platinum",
"requirements": [
"aioesphomeapi==41.13.0",
"aioesphomeapi==41.12.0",
"esphome-dashboard-api==1.3.0",
"bleak-esphome==3.4.0"
],

View File

@@ -29,12 +29,7 @@ from homeassistant.components.climate import (
ClimateEntityFeature,
HVACMode,
)
from homeassistant.const import (
ATTR_MODE,
ATTR_TEMPERATURE,
PRECISION_TENTHS,
UnitOfTemperature,
)
from homeassistant.const import ATTR_MODE, PRECISION_TENTHS, UnitOfTemperature
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddEntitiesCallback
@@ -248,7 +243,7 @@ class EvoZone(EvoChild, EvoClimateEntity):
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set a new target temperature."""
temperature = kwargs[ATTR_TEMPERATURE]
temperature = kwargs["temperature"]
if (until := kwargs.get("until")) is None:
if self._evo_device.mode == EvoZoneMode.TEMPORARY_OVERRIDE:

View File

@@ -68,6 +68,7 @@ EVENT_HEALTH_CHANGED = "health_changed"
EVENT_SUPPORTED_CHANGED = "supported_changed"
EVENT_ISSUE_CHANGED = "issue_changed"
EVENT_ISSUE_REMOVED = "issue_removed"
EVENT_JOB = "job"
UPDATE_KEY_SUPERVISOR = "supervisor"

View File

@@ -56,6 +56,7 @@ from .const import (
SupervisorEntityModel,
)
from .handler import HassioAPIError, get_supervisor_client
from .jobs import SupervisorJobs
if TYPE_CHECKING:
from .issues import SupervisorIssues
@@ -311,6 +312,7 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
lambda: defaultdict(set)
)
self.supervisor_client = get_supervisor_client(hass)
self.jobs = SupervisorJobs(hass)
async def _async_update_data(self) -> dict[str, Any]:
"""Update data via library."""
@@ -485,6 +487,9 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
)
)
# Refresh jobs data
await self.jobs.refresh_data(first_update)
async def _update_addon_stats(self, slug: str) -> tuple[str, dict[str, Any] | None]:
"""Update single addon stats."""
try:

View File

@@ -0,0 +1,157 @@
"""Track Supervisor job data and allow subscription to updates."""
from collections.abc import Callable
from dataclasses import dataclass, replace
from functools import partial
from typing import Any
from uuid import UUID
from aiohasupervisor.models import Job
from homeassistant.core import (
CALLBACK_TYPE,
HomeAssistant,
callback,
is_callback_check_partial,
)
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import (
ATTR_DATA,
ATTR_UPDATE_KEY,
ATTR_WS_EVENT,
EVENT_JOB,
EVENT_SUPERVISOR_EVENT,
EVENT_SUPERVISOR_UPDATE,
UPDATE_KEY_SUPERVISOR,
)
from .handler import get_supervisor_client
@dataclass(slots=True, frozen=True)
class JobSubscription:
"""Subscribe for updates on jobs which match filters.
UUID is preferred match but only available in cases of a background API that
returns the UUID before taking the action. Others are used to match jobs only
if UUID is omitted. Either name or UUID is required to be able to match.
event_callback must be safe annotated as a homeassistant.core.callback
and safe to call in the event loop.
"""
event_callback: Callable[[Job], Any]
uuid: str | None = None
name: str | None = None
reference: str | None | type[Any] = Any
def __post_init__(self) -> None:
"""Validate at least one filter option is present."""
if not self.name and not self.uuid:
raise ValueError("Either name or uuid must be provided!")
if not is_callback_check_partial(self.event_callback):
raise ValueError("event_callback must be a homeassistant.core.callback!")
def matches(self, job: Job) -> bool:
"""Return true if job matches subscription filters."""
if self.uuid:
return job.uuid == self.uuid
return job.name == self.name and self.reference in (Any, job.reference)
class SupervisorJobs:
"""Manage access to Supervisor jobs."""
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize object."""
self._hass = hass
self._supervisor_client = get_supervisor_client(hass)
self._jobs: dict[UUID, Job] = {}
self._subscriptions: set[JobSubscription] = set()
@property
def current_jobs(self) -> list[Job]:
"""Return current jobs."""
return list(self._jobs.values())
def subscribe(self, subscription: JobSubscription) -> CALLBACK_TYPE:
"""Subscribe to updates for job. Return callback is used to unsubscribe.
If any jobs match the subscription at the time this is called, creates
tasks to run their callback on it.
"""
self._subscriptions.add(subscription)
# As these are callbacks they are safe to run in the event loop
# We wrap these in an asyncio task so subscribing does not wait on the logic
if matches := [job for job in self._jobs.values() if subscription.matches(job)]:
async def event_callback_async(job: Job) -> Any:
return subscription.event_callback(job)
for match in matches:
self._hass.async_create_task(event_callback_async(match))
return partial(self._subscriptions.discard, subscription)
async def refresh_data(self, first_update: bool = False) -> None:
"""Refresh job data."""
job_data = await self._supervisor_client.jobs.info()
job_queue: list[Job] = job_data.jobs.copy()
new_jobs: dict[UUID, Job] = {}
changed_jobs: list[Job] = []
# Rebuild our job cache from new info and compare to find changes
while job_queue:
job = job_queue.pop(0)
job_queue.extend(job.child_jobs)
job = replace(job, child_jobs=[])
if job.uuid not in self._jobs or job != self._jobs[job.uuid]:
changed_jobs.append(job)
new_jobs[job.uuid] = replace(job, child_jobs=[])
# For any jobs that disappeared which weren't done, tell subscribers they
# changed to done. We don't know what else happened to them so leave the
# rest of their state as is rather then guessing
changed_jobs.extend(
[
replace(job, done=True)
for uuid, job in self._jobs.items()
if uuid not in new_jobs and job.done is False
]
)
# Replace our cache and inform subscribers of all changes
self._jobs = new_jobs
for job in changed_jobs:
self._process_job_change(job)
# If this is the first update register to receive Supervisor events
if first_update:
async_dispatcher_connect(
self._hass, EVENT_SUPERVISOR_EVENT, self._supervisor_events_to_jobs
)
@callback
def _supervisor_events_to_jobs(self, event: dict[str, Any]) -> None:
"""Update job data cache from supervisor events."""
if ATTR_WS_EVENT not in event:
return
if (
event[ATTR_WS_EVENT] == EVENT_SUPERVISOR_UPDATE
and event.get(ATTR_UPDATE_KEY) == UPDATE_KEY_SUPERVISOR
):
self._hass.async_create_task(self.refresh_data())
elif event[ATTR_WS_EVENT] == EVENT_JOB:
job = Job.from_dict(event[ATTR_DATA] | {"child_jobs": []})
self._jobs[job.uuid] = job
self._process_job_change(job)
def _process_job_change(self, job: Job) -> None:
"""Process a job change by triggering callbacks on subscribers."""
for sub in self._subscriptions:
if sub.matches(job):
sub.event_callback(job)

View File

@@ -6,6 +6,7 @@ import re
from typing import Any
from aiohasupervisor import SupervisorError
from aiohasupervisor.models import Job
from awesomeversion import AwesomeVersion, AwesomeVersionStrategy
from homeassistant.components.update import (
@@ -15,7 +16,7 @@ from homeassistant.components.update import (
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_ICON, ATTR_NAME
from homeassistant.core import HomeAssistant
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -35,6 +36,7 @@ from .entity import (
HassioOSEntity,
HassioSupervisorEntity,
)
from .jobs import JobSubscription
from .update_helper import update_addon, update_core, update_os
ENTITY_DESCRIPTION = UpdateEntityDescription(
@@ -89,6 +91,7 @@ class SupervisorAddonUpdateEntity(HassioAddonEntity, UpdateEntity):
UpdateEntityFeature.INSTALL
| UpdateEntityFeature.BACKUP
| UpdateEntityFeature.RELEASE_NOTES
| UpdateEntityFeature.PROGRESS
)
@property
@@ -154,6 +157,30 @@ class SupervisorAddonUpdateEntity(HassioAddonEntity, UpdateEntity):
)
await self.coordinator.async_refresh()
@callback
def _update_job_changed(self, job: Job) -> None:
"""Process update for this entity's update job."""
if job.done is False:
self._attr_in_progress = True
self._attr_update_percentage = job.progress
else:
self._attr_in_progress = False
self._attr_update_percentage = None
self.async_write_ha_state()
async def async_added_to_hass(self) -> None:
"""Subscribe to progress updates."""
await super().async_added_to_hass()
self.async_on_remove(
self.coordinator.jobs.subscribe(
JobSubscription(
self._update_job_changed,
name="addon_manager_update",
reference=self._addon_slug,
)
)
)
class SupervisorOSUpdateEntity(HassioOSEntity, UpdateEntity):
"""Update entity to handle updates for the Home Assistant Operating System."""
@@ -250,6 +277,7 @@ class SupervisorCoreUpdateEntity(HassioCoreEntity, UpdateEntity):
UpdateEntityFeature.INSTALL
| UpdateEntityFeature.SPECIFIC_VERSION
| UpdateEntityFeature.BACKUP
| UpdateEntityFeature.PROGRESS
)
_attr_title = "Home Assistant Core"
@@ -281,3 +309,25 @@ class SupervisorCoreUpdateEntity(HassioCoreEntity, UpdateEntity):
) -> None:
"""Install an update."""
await update_core(self.hass, version, backup)
@callback
def _update_job_changed(self, job: Job) -> None:
"""Process update for this entity's update job."""
if job.done is False:
self._attr_in_progress = True
self._attr_update_percentage = job.progress
else:
self._attr_in_progress = False
self._attr_update_percentage = None
self.async_write_ha_state()
async def async_added_to_hass(self) -> None:
"""Subscribe to progress updates."""
await super().async_added_to_hass()
self.async_on_remove(
self.coordinator.jobs.subscribe(
JobSubscription(
self._update_job_changed, name="home_assistant_core_update"
)
)
)

View File

@@ -5,5 +5,5 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/holiday",
"iot_class": "local_polling",
"requirements": ["holidays==0.82", "babel==2.15.0"]
"requirements": ["holidays==0.81", "babel==2.15.0"]
}

View File

@@ -456,7 +456,7 @@ class HomeAccessory(Accessory): # type: ignore[misc]
return self._available
@ha_callback
@pyhap_callback # type: ignore[untyped-decorator]
@pyhap_callback # type: ignore[misc]
def run(self) -> None:
"""Handle accessory driver started event."""
if state := self.hass.states.get(self.entity_id):
@@ -725,7 +725,7 @@ class HomeDriver(AccessoryDriver): # type: ignore[misc]
self._entry_title = entry_title
self.iid_storage = iid_storage
@pyhap_callback # type: ignore[untyped-decorator]
@pyhap_callback # type: ignore[misc]
def pair(
self, client_username_bytes: bytes, client_public: str, client_permissions: int
) -> bool:
@@ -735,7 +735,7 @@ class HomeDriver(AccessoryDriver): # type: ignore[misc]
async_dismiss_setup_message(self.hass, self.entry_id)
return cast(bool, success)
@pyhap_callback # type: ignore[untyped-decorator]
@pyhap_callback # type: ignore[misc]
def unpair(self, client_uuid: UUID) -> None:
"""Override super function to show setup message if unpaired."""
super().unpair(client_uuid)

View File

@@ -71,7 +71,7 @@ class HomeDoorbellAccessory(HomeAccessory):
self.async_update_doorbell_state(None, state)
@ha_callback
@pyhap_callback # type: ignore[untyped-decorator]
@pyhap_callback # type: ignore[misc]
def run(self) -> None:
"""Handle doorbell event."""
if self._char_doorbell_detected:

View File

@@ -219,7 +219,7 @@ class AirPurifier(Fan):
return preset_mode.lower() != "auto"
@callback
@pyhap_callback # type: ignore[untyped-decorator]
@pyhap_callback # type: ignore[misc]
def run(self) -> None:
"""Handle accessory driver started event.

View File

@@ -229,7 +229,7 @@ class Camera(HomeDoorbellAccessory, PyhapCamera): # type: ignore[misc]
)
self._async_update_motion_state(None, state)
@pyhap_callback # type: ignore[untyped-decorator]
@pyhap_callback # type: ignore[misc]
@callback
def run(self) -> None:
"""Handle accessory driver started event.

View File

@@ -127,7 +127,7 @@ class GarageDoorOpener(HomeAccessory):
self.async_update_state(state)
@callback
@pyhap_callback # type: ignore[untyped-decorator]
@pyhap_callback # type: ignore[misc]
def run(self) -> None:
"""Handle accessory driver started event.

View File

@@ -178,7 +178,7 @@ class HumidifierDehumidifier(HomeAccessory):
self._async_update_current_humidity(humidity_state)
@callback
@pyhap_callback # type: ignore[untyped-decorator]
@pyhap_callback # type: ignore[misc]
def run(self) -> None:
"""Handle accessory driver started event.

View File

@@ -108,7 +108,7 @@ class DeviceTriggerAccessory(HomeAccessory):
_LOGGER.log,
)
@pyhap_callback # type: ignore[untyped-decorator]
@pyhap_callback # type: ignore[misc]
@callback
def run(self) -> None:
"""Run the accessory."""

View File

@@ -5,6 +5,6 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/imgw_pib",
"iot_class": "cloud_polling",
"quality_scale": "platinum",
"quality_scale": "silver",
"requirements": ["imgw_pib==1.5.6"]
}

View File

@@ -50,17 +50,17 @@ rules:
discovery:
status: exempt
comment: The integration is a cloud service and thus does not support discovery.
docs-data-update: done
docs-examples: done
docs-known-limitations: done
docs-data-update: todo
docs-examples: todo
docs-known-limitations: todo
docs-supported-devices:
status: exempt
comment: This is a service, which doesn't integrate with any devices.
docs-supported-functions: done
docs-supported-functions: todo
docs-troubleshooting:
status: exempt
comment: No known issues that could be resolved by the user.
docs-use-cases: done
docs-use-cases: todo
dynamic-devices:
status: exempt
comment: This integration has a fixed single service.

View File

@@ -1,36 +1 @@
"""The london_underground component."""
from __future__ import annotations
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DOMAIN as DOMAIN
from .coordinator import LondonTubeCoordinator, LondonUndergroundConfigEntry, TubeData
PLATFORMS: list[Platform] = [Platform.SENSOR]
async def async_setup_entry(
hass: HomeAssistant, entry: LondonUndergroundConfigEntry
) -> bool:
"""Set up London Underground from a config entry."""
session = async_get_clientsession(hass)
data = TubeData(session)
coordinator = LondonTubeCoordinator(hass, data, config_entry=entry)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
# Forward the setup to the sensor platform
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(
hass: HomeAssistant, entry: LondonUndergroundConfigEntry
) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@@ -1,152 +0,0 @@
"""Config flow for London Underground integration."""
from __future__ import annotations
import asyncio
import logging
from typing import Any
from london_tube_status import TubeData
import voluptuous as vol
from homeassistant.config_entries import (
ConfigEntry,
ConfigFlow,
ConfigFlowResult,
OptionsFlowWithReload,
)
from homeassistant.core import callback
from homeassistant.helpers import selector
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import ConfigType
from .const import CONF_LINE, DEFAULT_LINES, DOMAIN, TUBE_LINES
_LOGGER = logging.getLogger(__name__)
class LondonUndergroundConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for London Underground."""
VERSION = 1
MINOR_VERSION = 1
@staticmethod
@callback
def async_get_options_flow(
_: ConfigEntry,
) -> LondonUndergroundOptionsFlow:
"""Get the options flow for this handler."""
return LondonUndergroundOptionsFlow()
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
if user_input is not None:
session = async_get_clientsession(self.hass)
data = TubeData(session)
try:
async with asyncio.timeout(10):
await data.update()
except TimeoutError:
errors["base"] = "timeout_connect"
except Exception:
_LOGGER.exception("Unexpected error")
errors["base"] = "cannot_connect"
else:
return self.async_create_entry(
title="London Underground",
data={},
options={CONF_LINE: user_input.get(CONF_LINE, DEFAULT_LINES)},
)
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Optional(
CONF_LINE,
default=DEFAULT_LINES,
): selector.SelectSelector(
selector.SelectSelectorConfig(
options=TUBE_LINES,
multiple=True,
mode=selector.SelectSelectorMode.DROPDOWN,
)
),
}
),
errors=errors,
)
async def async_step_import(self, import_data: ConfigType) -> ConfigFlowResult:
"""Handle import from configuration.yaml."""
session = async_get_clientsession(self.hass)
data = TubeData(session)
try:
async with asyncio.timeout(10):
await data.update()
except Exception:
_LOGGER.exception(
"Unexpected error trying to connect before importing config, aborting import "
)
return self.async_abort(reason="cannot_connect")
_LOGGER.warning(
"Importing London Underground config from configuration.yaml: %s",
import_data,
)
# Extract lines from the sensor platform config
lines = import_data.get(CONF_LINE, DEFAULT_LINES)
if "London Overground" in lines:
_LOGGER.warning(
"London Overground was removed from the configuration as the line has been divided and renamed"
)
lines.remove("London Overground")
return self.async_create_entry(
title="London Underground",
data={},
options={CONF_LINE: import_data.get(CONF_LINE, DEFAULT_LINES)},
)
class LondonUndergroundOptionsFlow(OptionsFlowWithReload):
"""Handle options."""
async def async_step_init(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Manage the options."""
if user_input is not None:
_LOGGER.debug(
"Updating london underground with options flow user_input: %s",
user_input,
)
return self.async_create_entry(
title="",
data={CONF_LINE: user_input[CONF_LINE]},
)
return self.async_show_form(
step_id="init",
data_schema=vol.Schema(
{
vol.Optional(
CONF_LINE,
default=self.config_entry.options.get(
CONF_LINE,
self.config_entry.data.get(CONF_LINE, DEFAULT_LINES),
),
): selector.SelectSelector(
selector.SelectSelectorConfig(
options=TUBE_LINES,
multiple=True,
mode=selector.SelectSelectorMode.DROPDOWN,
)
),
}
),
)

View File

@@ -6,6 +6,7 @@ DOMAIN = "london_underground"
CONF_LINE = "line"
SCAN_INTERVAL = timedelta(seconds=30)
TUBE_LINES = [
@@ -17,7 +18,7 @@ TUBE_LINES = [
"Elizabeth line",
"Hammersmith & City",
"Jubilee",
"London Overground", # no longer supported
"London Overground",
"Metropolitan",
"Northern",
"Piccadilly",
@@ -30,20 +31,3 @@ TUBE_LINES = [
"Weaver",
"Windrush",
]
# Default lines to monitor if none selected
DEFAULT_LINES = [
"Bakerloo",
"Central",
"Circle",
"District",
"DLR",
"Elizabeth line",
"Hammersmith & City",
"Jubilee",
"Metropolitan",
"Northern",
"Piccadilly",
"Victoria",
"Waterloo & City",
]

View File

@@ -8,7 +8,6 @@ from typing import cast
from london_tube_status import TubeData
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
@@ -16,23 +15,16 @@ from .const import DOMAIN, SCAN_INTERVAL
_LOGGER = logging.getLogger(__name__)
type LondonUndergroundConfigEntry = ConfigEntry[LondonTubeCoordinator]
class LondonTubeCoordinator(DataUpdateCoordinator[dict[str, dict[str, str]]]):
"""London Underground sensor coordinator."""
def __init__(
self,
hass: HomeAssistant,
data: TubeData,
config_entry: LondonUndergroundConfigEntry,
) -> None:
def __init__(self, hass: HomeAssistant, data: TubeData) -> None:
"""Initialize coordinator."""
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
config_entry=None,
name=DOMAIN,
update_interval=SCAN_INTERVAL,
)

View File

@@ -2,12 +2,9 @@
"domain": "london_underground",
"name": "London Underground",
"codeowners": ["@jpbede"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/london_underground",
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["london_tube_status"],
"quality_scale": "legacy",
"requirements": ["london-tube-status==0.5"],
"single_config_entry": true
"requirements": ["london-tube-status==0.5"]
}

View File

@@ -5,26 +5,23 @@ from __future__ import annotations
import logging
from typing import Any
from london_tube_status import TubeData
import voluptuous as vol
from homeassistant.components.sensor import (
PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA,
SensorEntity,
)
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from homeassistant.helpers import config_validation as cv, issue_registry as ir
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity_platform import (
AddConfigEntryEntitiesCallback,
AddEntitiesCallback,
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import CONF_LINE, DOMAIN, TUBE_LINES
from .coordinator import LondonTubeCoordinator, LondonUndergroundConfigEntry
from .const import CONF_LINE, TUBE_LINES
from .coordinator import LondonTubeCoordinator
_LOGGER = logging.getLogger(__name__)
@@ -41,54 +38,18 @@ async def async_setup_platform(
) -> None:
"""Set up the Tube sensor."""
# If configuration.yaml config exists, trigger the import flow.
# If the config entry already exists, this will not be triggered as only one config is allowed.
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=config
)
if (
result.get("type") is FlowResultType.ABORT
and result.get("reason") != "already_configured"
):
ir.async_create_issue(
hass,
DOMAIN,
f"deprecated_yaml_import_issue_{result.get('reason')}",
is_fixable=False,
issue_domain=DOMAIN,
severity=ir.IssueSeverity.WARNING,
translation_key="deprecated_yaml_import_issue",
translation_placeholders={
"domain": DOMAIN,
"integration_title": "London Underground",
},
)
return
session = async_get_clientsession(hass)
ir.async_create_issue(
hass,
HOMEASSISTANT_DOMAIN,
"deprecated_yaml",
is_fixable=False,
issue_domain=DOMAIN,
severity=ir.IssueSeverity.WARNING,
translation_key="deprecated_yaml",
translation_placeholders={
"domain": DOMAIN,
"integration_title": "London Underground",
},
)
data = TubeData(session)
coordinator = LondonTubeCoordinator(hass, data)
await coordinator.async_refresh()
async def async_setup_entry(
hass: HomeAssistant,
entry: LondonUndergroundConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the London Underground sensor from config entry."""
if not coordinator.last_update_success:
raise PlatformNotReady
async_add_entities(
LondonTubeSensor(entry.runtime_data, line) for line in entry.options[CONF_LINE]
LondonTubeSensor(coordinator, line) for line in config[CONF_LINE]
)
@@ -97,21 +58,11 @@ class LondonTubeSensor(CoordinatorEntity[LondonTubeCoordinator], SensorEntity):
_attr_attribution = "Powered by TfL Open Data"
_attr_icon = "mdi:subway"
_attr_has_entity_name = True # Use modern entity naming
def __init__(self, coordinator: LondonTubeCoordinator, name: str) -> None:
"""Initialize the London Underground sensor."""
super().__init__(coordinator)
self._name = name
# Add unique_id for proper entity registry
self._attr_unique_id = f"tube_{name.lower().replace(' ', '_')}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, DOMAIN)},
name="London Underground",
manufacturer="Transport for London",
model="Tube Status",
entry_type=DeviceEntryType.SERVICE,
)
@property
def name(self) -> str:

View File

@@ -1,38 +0,0 @@
{
"config": {
"step": {
"user": {
"title": "Set up London Underground",
"description": "Select which tube lines you want to monitor",
"data": {
"line": "Tube lines"
}
}
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"timeout_connect": "[%key:common::config_flow::error::timeout_connect%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"abort": {
"single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]"
}
},
"options": {
"step": {
"init": {
"title": "Configure London Underground",
"description": "[%key:component::london_underground::config::step::user::description%]",
"data": {
"line": "[%key:component::london_underground::config::step::user::data::line%]"
}
}
}
},
"issues": {
"deprecated_yaml_import_issue": {
"title": "London Underground YAML configuration deprecated",
"description": "Configuring London Underground using YAML sensor platform is deprecated.\n\nWhile importing your configuration, an error occurred when trying to connect to the Transport for London API. Please restart Home Assistant to try again, or remove the existing YAML configuration and set the integration up via the UI."
}
}
}

View File

@@ -59,7 +59,7 @@ async def create_server(
# Backwards compatibility with old MCP Server config
return await llm.async_get_api(hass, llm_api_id, llm_context)
@server.list_prompts() # type: ignore[no-untyped-call,untyped-decorator]
@server.list_prompts() # type: ignore[no-untyped-call, misc]
async def handle_list_prompts() -> list[types.Prompt]:
llm_api = await get_api_instance()
return [
@@ -69,7 +69,7 @@ async def create_server(
)
]
@server.get_prompt() # type: ignore[no-untyped-call,untyped-decorator]
@server.get_prompt() # type: ignore[no-untyped-call, misc]
async def handle_get_prompt(
name: str, arguments: dict[str, str] | None
) -> types.GetPromptResult:
@@ -90,13 +90,13 @@ async def create_server(
],
)
@server.list_tools() # type: ignore[no-untyped-call,untyped-decorator]
@server.list_tools() # type: ignore[no-untyped-call, misc]
async def list_tools() -> list[types.Tool]:
"""List available time tools."""
llm_api = await get_api_instance()
return [_format_tool(tool, llm_api.custom_serializer) for tool in llm_api.tools]
@server.call_tool() # type: ignore[untyped-decorator]
@server.call_tool() # type: ignore[misc]
async def call_tool(name: str, arguments: dict) -> Sequence[types.TextContent]:
"""Handle calling tools."""
llm_api = await get_api_instance()

View File

@@ -408,5 +408,5 @@ class AtwDeviceZoneClimate(MelCloudClimate):
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set new target temperature."""
await self._zone.set_target_temperature(
kwargs.get(ATTR_TEMPERATURE, self.target_temperature)
kwargs.get("temperature", self.target_temperature)
)

View File

@@ -10,11 +10,7 @@ from mill import Heater, Mill
from mill_local import Mill as MillLocal
from homeassistant.components.recorder import get_instance
from homeassistant.components.recorder.models import (
StatisticData,
StatisticMeanType,
StatisticMetaData,
)
from homeassistant.components.recorder.models import StatisticData, StatisticMetaData
from homeassistant.components.recorder.statistics import (
async_add_external_statistics,
get_last_statistics,
@@ -151,7 +147,7 @@ class MillHistoricDataUpdateCoordinator(DataUpdateCoordinator):
)
)
metadata = StatisticMetaData(
mean_type=StatisticMeanType.NONE,
has_mean=False,
has_sum=True,
name=f"{heater.name}",
source=DOMAIN,

View File

@@ -253,7 +253,6 @@ class ModbusHub:
self._client: (
AsyncModbusSerialClient | AsyncModbusTcpClient | AsyncModbusUdpClient | None
) = None
self._lock = asyncio.Lock()
self.event_connected = asyncio.Event()
self.hass = hass
self.name = client_config[CONF_NAME]
@@ -416,9 +415,7 @@ class ModbusHub:
"""Convert async to sync pymodbus call."""
if not self._client:
return None
async with self._lock:
result = await self.low_level_pb_call(unit, address, value, use_call)
if self._msg_wait:
# small delay until next request/response
await asyncio.sleep(self._msg_wait)
return result
result = await self.low_level_pb_call(unit, address, value, use_call)
if self._msg_wait:
await asyncio.sleep(self._msg_wait)
return result

View File

@@ -53,7 +53,7 @@ class NikoHomeControlLight(NikoHomeControlEntity, LightEntity):
async def async_turn_on(self, **kwargs: Any) -> None:
"""Instruct the light to turn on."""
await self._action.turn_on(kwargs.get(ATTR_BRIGHTNESS))
await self._action.turn_on(kwargs.get(ATTR_BRIGHTNESS, 255))
async def async_turn_off(self, **kwargs: Any) -> None:
"""Instruct the light to turn off."""

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/niko_home_control",
"iot_class": "local_push",
"loggers": ["nikohomecontrol"],
"requirements": ["nhc==0.6.1"]
"requirements": ["nhc==0.4.12"]
}

View File

@@ -316,23 +316,16 @@ class OpenAISubentryFlowHandler(ConfigSubentryFlow):
options = self.options
errors: dict[str, str] = {}
step_schema: VolDictType = {}
step_schema: VolDictType = {
vol.Optional(
CONF_CODE_INTERPRETER,
default=RECOMMENDED_CODE_INTERPRETER,
): bool,
}
model = options[CONF_CHAT_MODEL]
if not model.startswith(("gpt-5-pro", "gpt-5-codex")):
step_schema.update(
{
vol.Optional(
CONF_CODE_INTERPRETER,
default=RECOMMENDED_CODE_INTERPRETER,
): bool,
}
)
elif CONF_CODE_INTERPRETER in options:
options.pop(CONF_CODE_INTERPRETER)
if model.startswith(("o", "gpt-5")) and not model.startswith("gpt-5-pro"):
if model.startswith(("o", "gpt-5")):
step_schema.update(
{
vol.Optional(

View File

@@ -468,9 +468,7 @@ class OpenAIBaseLLMEntity(Entity):
model_args["reasoning"] = {
"effort": options.get(
CONF_REASONING_EFFORT, RECOMMENDED_REASONING_EFFORT
)
if not model_args["model"].startswith("gpt-5-pro")
else "high", # GPT-5 pro only supports reasoning.effort: high
),
"summary": "auto",
}
model_args["include"] = ["reasoning.encrypted_content"]

View File

@@ -18,8 +18,7 @@ from homeassistant.helpers.aiohttp_client import async_create_clientsession
from .coordinator import PortainerCoordinator
_PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.SWITCH]
_PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.SWITCH]
type PortainerConfigEntry = ConfigEntry[PortainerCoordinator]

View File

@@ -1,10 +1,5 @@
{
"entity": {
"sensor": {
"image": {
"default": "mdi:docker"
}
},
"switch": {
"container": {
"default": "mdi:arrow-down-box",

View File

@@ -1,83 +0,0 @@
"""Sensor platform for Portainer integration."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from pyportainer.models.docker import DockerContainer
from homeassistant.components.sensor import SensorEntity, SensorEntityDescription
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import PortainerConfigEntry, PortainerCoordinator
from .entity import PortainerContainerEntity, PortainerCoordinatorData
@dataclass(frozen=True, kw_only=True)
class PortainerSensorEntityDescription(SensorEntityDescription):
"""Class to hold Portainer sensor description."""
value_fn: Callable[[DockerContainer], str | None]
CONTAINER_SENSORS: tuple[PortainerSensorEntityDescription, ...] = (
PortainerSensorEntityDescription(
key="image",
translation_key="image",
value_fn=lambda data: data.image,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: PortainerConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Portainer sensors based on a config entry."""
coordinator = entry.runtime_data
async_add_entities(
PortainerContainerSensor(
coordinator,
entity_description,
container,
endpoint,
)
for endpoint in coordinator.data.values()
for container in endpoint.containers.values()
for entity_description in CONTAINER_SENSORS
)
class PortainerContainerSensor(PortainerContainerEntity, SensorEntity):
"""Representation of a Portainer container sensor."""
entity_description: PortainerSensorEntityDescription
def __init__(
self,
coordinator: PortainerCoordinator,
entity_description: PortainerSensorEntityDescription,
device_info: DockerContainer,
via_device: PortainerCoordinatorData,
) -> None:
"""Initialize the Portainer container sensor."""
self.entity_description = entity_description
super().__init__(device_info, coordinator, via_device)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_name}_{entity_description.key}"
@property
def available(self) -> bool:
"""Return if the device is available."""
return super().available and self.endpoint_id in self.coordinator.data
@property
def native_value(self) -> str | None:
"""Return the state of the sensor."""
return self.entity_description.value_fn(
self.coordinator.data[self.endpoint_id].containers[self.device_id]
)

View File

@@ -46,11 +46,6 @@
"name": "Status"
}
},
"sensor": {
"image": {
"name": "Image"
}
},
"switch": {
"container": {
"name": "Container"

View File

@@ -1,7 +1,7 @@
{
"domain": "sharkiq",
"name": "Shark IQ",
"codeowners": ["@JeffResc", "@funkybunch", "@TheOneOgre"],
"codeowners": ["@JeffResc", "@funkybunch"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/sharkiq",
"iot_class": "cloud_polling",

View File

@@ -157,18 +157,21 @@ SENSORS: dict[tuple[str, str], BlockBinarySensorDescription] = {
key="input|input",
name="Input",
device_class=BinarySensorDeviceClass.POWER,
entity_registry_enabled_default=False,
removal_condition=is_block_momentary_input,
),
("relay", "input"): BlockBinarySensorDescription(
key="relay|input",
name="Input",
device_class=BinarySensorDeviceClass.POWER,
entity_registry_enabled_default=False,
removal_condition=is_block_momentary_input,
),
("device", "input"): BlockBinarySensorDescription(
key="device|input",
name="Input",
device_class=BinarySensorDeviceClass.POWER,
entity_registry_enabled_default=False,
removal_condition=is_block_momentary_input,
),
("sensor", "extInput"): BlockBinarySensorDescription(
@@ -198,6 +201,7 @@ RPC_SENSORS: Final = {
key="input",
sub_key="state",
device_class=BinarySensorDeviceClass.POWER,
entity_registry_enabled_default=False,
removal_condition=is_rpc_momentary_input,
),
"cloud": RpcBinarySensorDescription(

View File

@@ -9,6 +9,7 @@ from typing import TYPE_CHECKING, Any, Final
from aioshelly.const import BLU_TRV_IDENTIFIER, MODEL_BLU_GATEWAY_G3, RPC_GENERATIONS
from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError, RpcCallError
from aioshelly.rpc_device import RpcDevice
from homeassistant.components.button import (
DOMAIN as BUTTON_PLATFORM,
@@ -23,24 +24,16 @@ from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, LOGGER, MODEL_FRANKEVER_WATER_VALVE, SHELLY_GAS_MODELS
from .const import DOMAIN, LOGGER, SHELLY_GAS_MODELS
from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator
from .entity import (
RpcEntityDescription,
ShellyRpcAttributeEntity,
async_setup_entry_rpc,
get_entity_block_device_info,
get_entity_rpc_device_info,
rpc_call,
)
from .entity import get_entity_block_device_info, get_entity_rpc_device_info
from .utils import (
async_remove_orphaned_entities,
format_ble_addr,
get_blu_trv_device_info,
get_device_entry_gen,
get_rpc_entity_name,
get_rpc_key_ids,
get_rpc_key_instances,
get_rpc_role_by_key,
get_virtual_component_ids,
)
@@ -58,11 +51,6 @@ class ShellyButtonDescription[
supported: Callable[[_ShellyCoordinatorT], bool] = lambda _: True
@dataclass(frozen=True, kw_only=True)
class RpcButtonDescription(RpcEntityDescription, ButtonEntityDescription):
"""Class to describe a RPC button."""
BUTTONS: Final[list[ShellyButtonDescription[Any]]] = [
ShellyButtonDescription[ShellyBlockCoordinator | ShellyRpcCoordinator](
key="reboot",
@@ -108,24 +96,12 @@ BLU_TRV_BUTTONS: Final[list[ShellyButtonDescription]] = [
),
]
RPC_VIRTUAL_BUTTONS = {
"button_generic": RpcButtonDescription(
VIRTUAL_BUTTONS: Final[list[ShellyButtonDescription]] = [
ShellyButtonDescription[ShellyRpcCoordinator](
key="button",
role="generic",
),
"button_open": RpcButtonDescription(
key="button",
entity_registry_enabled_default=False,
role="open",
models={MODEL_FRANKEVER_WATER_VALVE},
),
"button_close": RpcButtonDescription(
key="button",
entity_registry_enabled_default=False,
role="close",
models={MODEL_FRANKEVER_WATER_VALVE},
),
}
press_action="single_push",
)
]
@callback
@@ -153,10 +129,8 @@ def async_migrate_unique_ids(
)
}
if not isinstance(coordinator, ShellyRpcCoordinator):
return None
if blutrv_key_ids := get_rpc_key_ids(coordinator.device.status, BLU_TRV_IDENTIFIER):
assert isinstance(coordinator.device, RpcDevice)
for _id in blutrv_key_ids:
key = f"{BLU_TRV_IDENTIFIER}:{_id}"
ble_addr: str = coordinator.device.config[key]["addr"]
@@ -175,26 +149,6 @@ def async_migrate_unique_ids(
)
}
if virtual_button_keys := get_rpc_key_instances(
coordinator.device.config, "button"
):
for key in virtual_button_keys:
old_unique_id = f"{coordinator.mac}-{key}"
if entity_entry.unique_id == old_unique_id:
role = get_rpc_role_by_key(coordinator.device.config, key)
new_unique_id = f"{coordinator.mac}-{key}-button_{role}"
LOGGER.debug(
"Migrating unique_id for %s entity from [%s] to [%s]",
entity_entry.entity_id,
old_unique_id,
new_unique_id,
)
return {
"new_unique_id": entity_entry.unique_id.replace(
old_unique_id, new_unique_id
)
}
return None
@@ -218,7 +172,7 @@ async def async_setup_entry(
hass, config_entry.entry_id, partial(async_migrate_unique_ids, coordinator)
)
entities: list[ShellyButton | ShellyBluTrvButton] = []
entities: list[ShellyButton | ShellyBluTrvButton | ShellyVirtualButton] = []
entities.extend(
ShellyButton(coordinator, button)
@@ -231,9 +185,12 @@ async def async_setup_entry(
return
# add virtual buttons
async_setup_entry_rpc(
hass, config_entry, async_add_entities, RPC_VIRTUAL_BUTTONS, RpcVirtualButton
)
if virtual_button_ids := get_rpc_key_ids(coordinator.device.status, "button"):
entities.extend(
ShellyVirtualButton(coordinator, button, id_)
for id_ in virtual_button_ids
for button in VIRTUAL_BUTTONS
)
# add BLU TRV buttons
if blutrv_key_ids := get_rpc_key_ids(coordinator.device.status, BLU_TRV_IDENTIFIER):
@@ -375,16 +332,30 @@ class ShellyBluTrvButton(ShellyBaseButton):
await method(self._id)
class RpcVirtualButton(ShellyRpcAttributeEntity, ButtonEntity):
"""Defines a Shelly RPC virtual component button."""
class ShellyVirtualButton(ShellyBaseButton):
"""Defines a Shelly virtual component button."""
entity_description: RpcButtonDescription
_id: int
def __init__(
self,
coordinator: ShellyRpcCoordinator,
description: ShellyButtonDescription,
_id: int,
) -> None:
"""Initialize Shelly virtual component button."""
super().__init__(coordinator, description)
@rpc_call
async def async_press(self) -> None:
"""Triggers the Shelly button press service."""
self._attr_unique_id = f"{coordinator.mac}-{description.key}:{_id}"
self._attr_device_info = get_entity_rpc_device_info(coordinator)
self._attr_name = get_rpc_entity_name(
coordinator.device, f"{description.key}:{_id}"
)
self._id = _id
async def _press_method(self) -> None:
"""Press method."""
if TYPE_CHECKING:
assert isinstance(self.coordinator, ShellyRpcCoordinator)
await self.coordinator.device.button_trigger(self._id, "single_push")
await self.coordinator.device.button_trigger(
self._id, self.entity_description.press_action
)

View File

@@ -195,11 +195,9 @@ def async_setup_rpc_attribute_entities(
):
continue
if (
description.sub_key
and description.sub_key not in coordinator.device.status[key]
and not description.supported(coordinator.device.status[key])
):
if description.sub_key not in coordinator.device.status[
key
] and not description.supported(coordinator.device.status[key]):
continue
# Filter and remove entities that according to settings/status
@@ -311,7 +309,7 @@ class RpcEntityDescription(EntityDescription):
# restrict the type to str.
name: str = ""
sub_key: str | None = None
sub_key: str
value: Callable[[Any, Any], Any] | None = None
available: Callable[[dict], bool] | None = None

View File

@@ -50,14 +50,8 @@
"valve_status": {
"default": "mdi:valve"
},
"vial_name": {
"default": "mdi:scent"
},
"illuminance_level": {
"default": "mdi:brightness-5"
},
"vial_level": {
"default": "mdi:bottle-tonic-outline"
}
},
"switch": {
@@ -67,13 +61,6 @@
"off": "mdi:valve-closed",
"on": "mdi:valve-open"
}
},
"cury_slot": {
"default": "mdi:scent",
"state": {
"off": "mdi:scent-off",
"on": "mdi:scent"
}
}
}
}

View File

@@ -72,7 +72,6 @@ class RpcNumberDescription(RpcEntityDescription, NumberEntityDescription):
min_fn: Callable[[dict], float] | None = None
step_fn: Callable[[dict], float] | None = None
mode_fn: Callable[[dict], NumberMode] | None = None
slot: str | None = None
method: str
@@ -122,22 +121,6 @@ class RpcNumber(ShellyRpcAttributeEntity, NumberEntity):
await method(self._id, value)
class RpcCuryIntensityNumber(RpcNumber):
"""Represent a RPC Cury Intensity entity."""
@rpc_call
async def async_set_native_value(self, value: float) -> None:
"""Change the value."""
method = getattr(self.coordinator.device, self.entity_description.method)
if TYPE_CHECKING:
assert method is not None
await method(
self._id, slot=self.entity_description.slot, intensity=round(value)
)
class RpcBluTrvNumber(RpcNumber):
"""Represent a RPC BluTrv number."""
@@ -291,38 +274,6 @@ RPC_NUMBERS: Final = {
is True,
entity_class=RpcBluTrvNumber,
),
"left_slot_intensity": RpcNumberDescription(
key="cury",
sub_key="slots",
name="Left slot intensity",
value=lambda status, _: status["left"]["intensity"],
native_min_value=0,
native_max_value=100,
native_step=1,
mode=NumberMode.SLIDER,
native_unit_of_measurement=PERCENTAGE,
method="cury_set",
slot="left",
available=lambda status: (left := status["left"]) is not None
and left.get("vial", {}).get("level", -1) != -1,
entity_class=RpcCuryIntensityNumber,
),
"right_slot_intensity": RpcNumberDescription(
key="cury",
sub_key="slots",
name="Right slot intensity",
value=lambda status, _: status["right"]["intensity"],
native_min_value=0,
native_max_value=100,
native_step=1,
mode=NumberMode.SLIDER,
native_unit_of_measurement=PERCENTAGE,
method="cury_set",
slot="right",
available=lambda status: (right := status["right"]) is not None
and right.get("vial", {}).get("level", -1) != -1,
entity_class=RpcCuryIntensityNumber,
),
}

View File

@@ -1658,50 +1658,6 @@ RPC_SENSORS: Final = {
state_class=SensorStateClass.MEASUREMENT,
role="phase_info",
),
"cury_left_level": RpcSensorDescription(
key="cury",
sub_key="slots",
name="Left slot level",
translation_key="vial_level",
value=lambda status, _: status["left"]["vial"]["level"],
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=PERCENTAGE,
entity_category=EntityCategory.DIAGNOSTIC,
available=lambda status: (left := status["left"]) is not None
and left.get("vial", {}).get("level", -1) != -1,
),
"cury_left_vial": RpcSensorDescription(
key="cury",
sub_key="slots",
name="Left slot vial",
translation_key="vial_name",
value=lambda status, _: status["left"]["vial"]["name"],
entity_category=EntityCategory.DIAGNOSTIC,
available=lambda status: (left := status["left"]) is not None
and left.get("vial", {}).get("level", -1) != -1,
),
"cury_right_level": RpcSensorDescription(
key="cury",
sub_key="slots",
name="Right slot level",
translation_key="vial_level",
value=lambda status, _: status["right"]["vial"]["level"],
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=PERCENTAGE,
entity_category=EntityCategory.DIAGNOSTIC,
available=lambda status: (right := status["right"]) is not None
and right.get("vial", {}).get("level", -1) != -1,
),
"cury_right_vial": RpcSensorDescription(
key="cury",
sub_key="slots",
name="Right slot vial",
translation_key="vial_name",
value=lambda status, _: status["right"]["vial"]["name"],
entity_category=EntityCategory.DIAGNOSTIC,
available=lambda status: (right := status["right"]) is not None
and right.get("vial", {}).get("level", -1) != -1,
),
}

View File

@@ -230,32 +230,6 @@ RPC_SWITCHES = {
entity_registry_enabled_default=False,
entity_category=EntityCategory.CONFIG,
),
"cury_left": RpcSwitchDescription(
key="cury",
sub_key="slots",
name="Left slot",
translation_key="cury_slot",
is_on=lambda status: bool(status["slots"]["left"]["on"]),
method_on="cury_set",
method_off="cury_set",
method_params_fn=lambda id, value: (id, "left", value),
entity_registry_enabled_default=True,
available=lambda status: (left := status["left"]) is not None
and left.get("vial", {}).get("level", -1) != -1,
),
"cury_right": RpcSwitchDescription(
key="cury",
sub_key="slots",
name="Right slot",
translation_key="cury_slot",
is_on=lambda status: bool(status["slots"]["right"]["on"]),
method_on="cury_set",
method_off="cury_set",
method_params_fn=lambda id, value: (id, "right", value),
entity_registry_enabled_default=True,
available=lambda status: (right := status["right"]) is not None
and right.get("vial", {}).get("level", -1) != -1,
),
}

View File

@@ -100,9 +100,8 @@ ATTR_PIN_VALUE = "pin"
ATTR_TIMESTAMP = "timestamp"
DEFAULT_SCAN_INTERVAL = timedelta(seconds=30)
DEFAULT_SOCKET_MIN_RETRY = 15
WEBSOCKET_RECONNECT_RETRIES = 3
WEBSOCKET_RETRY_DELAY = 2
EVENT_SIMPLISAFE_EVENT = "SIMPLISAFE_EVENT"
EVENT_SIMPLISAFE_NOTIFICATION = "SIMPLISAFE_NOTIFICATION"
@@ -420,7 +419,6 @@ class SimpliSafe:
self._api = api
self._hass = hass
self._system_notifications: dict[int, set[SystemNotification]] = {}
self._websocket_reconnect_retries: int = 0
self._websocket_reconnect_task: asyncio.Task | None = None
self.entry = entry
self.initial_event_to_use: dict[int, dict[str, Any]] = {}
@@ -471,8 +469,6 @@ class SimpliSafe:
"""Start a websocket reconnection loop."""
assert self._api.websocket
self._websocket_reconnect_retries += 1
try:
await self._api.websocket.async_connect()
await self._api.websocket.async_listen()
@@ -483,21 +479,9 @@ class SimpliSafe:
LOGGER.error("Failed to connect to websocket: %s", err)
except Exception as err: # noqa: BLE001
LOGGER.error("Unknown exception while connecting to websocket: %s", err)
else:
self._websocket_reconnect_retries = 0
if self._websocket_reconnect_retries >= WEBSOCKET_RECONNECT_RETRIES:
LOGGER.error("Max websocket connection retries exceeded")
return
delay = WEBSOCKET_RETRY_DELAY * (2 ** (self._websocket_reconnect_retries - 1))
LOGGER.info(
"Retrying websocket connection in %s seconds (attempt %s/%s)",
delay,
self._websocket_reconnect_retries,
WEBSOCKET_RECONNECT_RETRIES,
)
await asyncio.sleep(delay)
LOGGER.debug("Reconnecting to websocket")
await self._async_cancel_websocket_loop()
self._websocket_reconnect_task = self._hass.async_create_task(
self._async_start_websocket_loop()
)

View File

@@ -241,6 +241,7 @@ class SuezWaterCoordinator(DataUpdateCoordinator[SuezWaterData]):
) -> StatisticMetaData:
"""Build statistics metadata for requested configuration."""
return StatisticMetaData(
has_mean=False,
mean_type=StatisticMeanType.NONE,
has_sum=True,
name=f"Suez water {name} {self._counter_id}",

View File

@@ -6,6 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/systemmonitor",
"iot_class": "local_push",
"loggers": ["psutil"],
"requirements": ["psutil-home-assistant==0.0.1", "psutil==7.1.0"],
"requirements": ["psutil-home-assistant==0.0.1", "psutil==7.0.0"],
"single_config_entry": true
}

View File

@@ -18,7 +18,7 @@ from homeassistant.components.climate import (
ClimateEntityFeature,
HVACMode,
)
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
from homeassistant.const import UnitOfTemperature
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -352,7 +352,7 @@ class TuyaClimateEntity(TuyaEntity, ClimateEntity):
{
"code": self._set_temperature.dpcode,
"value": round(
self._set_temperature.scale_value_back(kwargs[ATTR_TEMPERATURE])
self._set_temperature.scale_value_back(kwargs["temperature"])
),
}
]

View File

@@ -1,45 +0,0 @@
"""Volvo diagnostics."""
from dataclasses import asdict
from typing import Any
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_API_KEY
from homeassistant.core import HomeAssistant
from homeassistant.helpers.redact import async_redact_data
from .const import CONF_VIN
from .coordinator import VolvoConfigEntry
_TO_REDACT_ENTRY = [
CONF_ACCESS_TOKEN,
CONF_API_KEY,
CONF_VIN,
"id_token",
"refresh_token",
]
_TO_REDACT_DATA = [
"coordinates",
"heading",
"vin",
]
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, entry: VolvoConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
context = entry.runtime_data.interval_coordinators[0].context
data: dict[str, dict] = {}
for coordinator in entry.runtime_data.interval_coordinators:
data[coordinator.name] = {
key: async_redact_data(asdict(value), _TO_REDACT_DATA) if value else None
for key, value in coordinator.data.items()
}
return {
"entry_data": async_redact_data(entry.data, _TO_REDACT_ENTRY),
"vehicle": async_redact_data(asdict(context.vehicle), _TO_REDACT_DATA),
**data,
}

View File

@@ -7,5 +7,5 @@
"iot_class": "local_polling",
"loggers": ["holidays"],
"quality_scale": "internal",
"requirements": ["holidays==0.82"]
"requirements": ["holidays==0.81"]
}

View File

@@ -11,13 +11,7 @@ from typing import Any
from propcache.api import cached_property
from zha.mixins import LogMixin
from homeassistant.const import (
ATTR_MANUFACTURER,
ATTR_MODEL,
ATTR_NAME,
ATTR_VIA_DEVICE,
EntityCategory,
)
from homeassistant.const import ATTR_MANUFACTURER, ATTR_MODEL, ATTR_NAME, EntityCategory
from homeassistant.core import State, callback
from homeassistant.helpers.device_registry import CONNECTION_ZIGBEE, DeviceInfo
from homeassistant.helpers.dispatcher import async_dispatcher_connect
@@ -91,19 +85,14 @@ class ZHAEntity(LogMixin, RestoreEntity, Entity):
ieee = zha_device_info["ieee"]
zha_gateway = self.entity_data.device_proxy.gateway_proxy.gateway
device_info = DeviceInfo(
return DeviceInfo(
connections={(CONNECTION_ZIGBEE, ieee)},
identifiers={(DOMAIN, ieee)},
manufacturer=zha_device_info[ATTR_MANUFACTURER],
model=zha_device_info[ATTR_MODEL],
name=zha_device_info[ATTR_NAME],
via_device=(DOMAIN, str(zha_gateway.state.node_info.ieee)),
)
if ieee != str(zha_gateway.state.node_info.ieee):
device_info[ATTR_VIA_DEVICE] = (
DOMAIN,
str(zha_gateway.state.node_info.ieee),
)
return device_info
@callback
def _handle_entity_events(self, event: Any) -> None:

View File

@@ -9,6 +9,7 @@ from typing import TYPE_CHECKING, Final
from .generated.entity_platforms import EntityPlatforms
from .helpers.deprecation import (
DeprecatedConstantEnum,
EnumWithDeprecatedMembers,
all_with_deprecated_constants,
check_if_deprecated_constant,
dir_with_deprecated_constants,
@@ -341,9 +342,6 @@ ATTR_NAME: Final = "name"
# Contains one string or a list of strings, each being an entity id
ATTR_ENTITY_ID: Final = "entity_id"
# Contains one string or a list of strings, each being an entity id
ATTR_INCLUDED_ENTITIES: Final = "included_entities"
# Contains one string, the config entry ID
ATTR_CONFIG_ENTRY_ID: Final = "config_entry_id"
@@ -706,13 +704,35 @@ class UnitOfMass(StrEnum):
STONES = "st"
class UnitOfConductivity(StrEnum):
class UnitOfConductivity(
StrEnum,
metaclass=EnumWithDeprecatedMembers,
deprecated={
"SIEMENS": ("UnitOfConductivity.SIEMENS_PER_CM", "2025.11.0"),
"MICROSIEMENS": ("UnitOfConductivity.MICROSIEMENS_PER_CM", "2025.11.0"),
"MILLISIEMENS": ("UnitOfConductivity.MILLISIEMENS_PER_CM", "2025.11.0"),
},
):
"""Conductivity units."""
SIEMENS_PER_CM = "S/cm"
MICROSIEMENS_PER_CM = "μS/cm"
MILLISIEMENS_PER_CM = "mS/cm"
# Deprecated aliases
SIEMENS = "S/cm"
"""Deprecated: Please use UnitOfConductivity.SIEMENS_PER_CM"""
MICROSIEMENS = "μS/cm"
"""Deprecated: Please use UnitOfConductivity.MICROSIEMENS_PER_CM"""
MILLISIEMENS = "mS/cm"
"""Deprecated: Please use UnitOfConductivity.MILLISIEMENS_PER_CM"""
_DEPRECATED_CONDUCTIVITY: Final = DeprecatedConstantEnum(
UnitOfConductivity.MICROSIEMENS_PER_CM,
"2025.11",
)
"""Deprecated: please use UnitOfConductivity.MICROSIEMENS_PER_CM"""
# Light units
LIGHT_LUX: Final = "lx"

View File

@@ -367,7 +367,6 @@ FLOWS = {
"local_ip",
"local_todo",
"locative",
"london_underground",
"lookin",
"loqed",
"luftdaten",

View File

@@ -3688,10 +3688,9 @@
},
"london_underground": {
"name": "London Underground",
"integration_type": "service",
"config_flow": true,
"iot_class": "cloud_polling",
"single_config_entry": true
"integration_type": "hub",
"config_flow": false,
"iot_class": "cloud_polling"
},
"lookin": {
"name": "LOOKin",

View File

@@ -1699,79 +1699,3 @@ class ToggleEntity(
await self.async_turn_off(**kwargs)
else:
await self.async_turn_on(**kwargs)
class IncludedEntitiesMixin(Entity):
"""Mixin class to include entities that are contained.
Integrations can include the this Mixin class for platforms that have
included the `entity_id` capability attribute.
Domain base entity platforms can include the `entity_id` capability attribute
to expose to allow exposure of the included entities.
"""
_attr_included_entities: list[str]
_included_unique_ids: list[str]
_initialized: bool = False
_platform_domain: str
@callback
def async_set_included_entities(
self, platform_domain: str, unique_ids: list[str]
) -> None:
"""Set the list of included entities identified by their unique IDs.
The entity_id of included entities will will be looked up and they will be
tracked for changes.
None existing entities for the supplied unique IDs will be ignored.
"""
self._included_unique_ids = unique_ids
self._platform_domain = platform_domain
self._monitor_member_updates()
@property
def included_entities(self) -> list[str] | None:
"""Return a list of entity IDs if the entity represents a group.
Included entities will be shown as members in the UI.
"""
if hasattr(self, "_attr_included_entities"):
return self._attr_included_entities
return None
@callback
def _monitor_member_updates(self) -> None:
"""Update the group members if the entity registry is updated."""
entity_registry = er.async_get(self.hass)
def _update_group_entity_ids() -> None:
self._attr_included_entities = []
for included_id in self._included_unique_ids:
if entity_id := entity_registry.async_get_entity_id(
self.entity_id.split(".")[0], self._platform_domain, included_id
):
self._attr_included_entities.append(entity_id)
async def _handle_entity_registry_updated(event: Event[Any]) -> None:
"""Handle registry create or update event."""
if (
event.data["action"] in {"create", "update"}
and (entry := entity_registry.async_get(event.data["entity_id"]))
and entry.unique_id in self._included_unique_ids
) or (
event.data["action"] == "remove"
and self.included_entities is not None
and event.data["entity_id"] in self.included_entities
):
_update_group_entity_ids()
self.async_write_ha_state()
if not self._initialized:
self.async_on_remove(
self.hass.bus.async_listen(
er.EVENT_ENTITY_REGISTRY_UPDATED,
_handle_entity_registry_updated,
)
)
self._initialized = True
_update_group_entity_ids()

View File

@@ -36,7 +36,7 @@ from homeassistant.core import (
callback,
split_entity_id,
)
from homeassistant.exceptions import HomeAssistantError, TemplateError
from homeassistant.exceptions import TemplateError
from homeassistant.loader import bind_hass
from homeassistant.util import dt as dt_util
from homeassistant.util.async_ import run_callback_threadsafe
@@ -1004,9 +1004,12 @@ class TrackTemplateResultInfo:
if track_template_.template.hass:
continue
raise HomeAssistantError(
"Calls async_track_template_result with template without hass"
frame.report_usage(
"calls async_track_template_result with template without hass",
core_behavior=frame.ReportBehavior.LOG,
breaks_in_ha_version="2025.10",
)
track_template_.template.hass = hass
self._rate_limit = KeyedRateLimit(hass)
self._info: dict[Template, RenderInfo] = {}

View File

@@ -260,11 +260,11 @@ class TriggerConfig:
class TriggerActionType(Protocol):
"""Protocol type for trigger action callback."""
def __call__(
async def __call__(
self,
run_variables: dict[str, Any],
context: Context | None = None,
) -> Coroutine[Any, Any, Any] | Any:
) -> Any:
"""Define action callback type."""
@@ -294,7 +294,7 @@ class PluggableActionsEntry:
actions: dict[
object,
tuple[
HassJob[[dict[str, Any], Context | None], Coroutine[Any, Any, None] | Any],
HassJob[[dict[str, Any], Context | None], Coroutine[Any, Any, None]],
dict[str, Any],
],
] = field(default_factory=dict)
@@ -477,7 +477,7 @@ def _trigger_action_wrapper(
else:
@functools.wraps(action)
def with_vars(
async def with_vars(
run_variables: dict[str, Any], context: Context | None = None
) -> Any:
"""Wrap action with extra vars."""

10
requirements_all.txt generated
View File

@@ -247,7 +247,7 @@ aioelectricitymaps==1.1.1
aioemonitor==1.0.5
# homeassistant.components.esphome
aioesphomeapi==41.13.0
aioesphomeapi==41.12.0
# homeassistant.components.flo
aioflo==2021.11.0
@@ -895,7 +895,7 @@ enocean==0.50
enturclient==0.2.4
# homeassistant.components.environment_canada
env-canada==0.11.3
env-canada==0.11.2
# homeassistant.components.season
ephem==4.1.6
@@ -1183,7 +1183,7 @@ hole==0.9.0
# homeassistant.components.holiday
# homeassistant.components.workday
holidays==0.82
holidays==0.81
# homeassistant.components.frontend
home-assistant-frontend==20251001.0
@@ -1545,7 +1545,7 @@ nextcord==3.1.0
nextdns==4.1.0
# homeassistant.components.niko_home_control
nhc==0.6.1
nhc==0.4.12
# homeassistant.components.nibe_heatpump
nibe==2.19.0
@@ -1755,7 +1755,7 @@ proxmoxer==2.0.1
psutil-home-assistant==0.0.1
# homeassistant.components.systemmonitor
psutil==7.1.0
psutil==7.0.0
# homeassistant.components.pulseaudio_loopback
pulsectl==23.5.2

View File

@@ -11,11 +11,9 @@ astroid==3.3.11
coverage==7.10.6
freezegun==1.5.2
go2rtc-client==0.2.1
# librt is an internal mypy dependency
librt==0.2.1
license-expression==30.4.3
mock-open==1.4.0
mypy-dev==1.19.0a4
mypy-dev==1.19.0a2
pre-commit==4.2.0
pydantic==2.12.0
pylint==3.3.8

View File

@@ -235,7 +235,7 @@ aioelectricitymaps==1.1.1
aioemonitor==1.0.5
# homeassistant.components.esphome
aioesphomeapi==41.13.0
aioesphomeapi==41.12.0
# homeassistant.components.flo
aioflo==2021.11.0
@@ -777,7 +777,7 @@ energyzero==2.1.1
enocean==0.50
# homeassistant.components.environment_canada
env-canada==0.11.3
env-canada==0.11.2
# homeassistant.components.season
ephem==4.1.6
@@ -1032,7 +1032,7 @@ hole==0.9.0
# homeassistant.components.holiday
# homeassistant.components.workday
holidays==0.82
holidays==0.81
# homeassistant.components.frontend
home-assistant-frontend==20251001.0
@@ -1328,7 +1328,7 @@ nextcord==3.1.0
nextdns==4.1.0
# homeassistant.components.niko_home_control
nhc==0.6.1
nhc==0.4.12
# homeassistant.components.nibe_heatpump
nibe==2.19.0
@@ -1487,7 +1487,7 @@ prowlpy==1.0.2
psutil-home-assistant==0.0.1
# homeassistant.components.systemmonitor
psutil==7.1.0
psutil==7.0.0
# homeassistant.components.pushbullet
pushbullet.py==0.11.0

View File

@@ -14,6 +14,7 @@ from unittest.mock import AsyncMock, MagicMock, patch
from aiohasupervisor.models import (
Discovery,
JobsInfo,
Repository,
ResolutionInfo,
StoreAddon,
@@ -509,6 +510,13 @@ def resolution_suggestions_for_issue_fixture(supervisor_client: AsyncMock) -> As
return supervisor_client.resolution.suggestions_for_issue
@pytest.fixture(name="jobs_info")
def jobs_info_fixture(supervisor_client: AsyncMock) -> AsyncMock:
"""Mock jobs info from supervisor."""
supervisor_client.jobs.info.return_value = JobsInfo(ignore_conditions=[], jobs=[])
return supervisor_client.jobs.info
@pytest.fixture(name="supervisor_client")
def supervisor_client() -> Generator[AsyncMock]:
"""Mock the supervisor client."""
@@ -554,6 +562,10 @@ def supervisor_client() -> Generator[AsyncMock]:
"homeassistant.components.hassio.issues.get_supervisor_client",
return_value=supervisor_client,
),
patch(
"homeassistant.components.hassio.jobs.get_supervisor_client",
return_value=supervisor_client,
),
patch(
"homeassistant.components.hassio.repairs.get_supervisor_client",
return_value=supervisor_client,

View File

@@ -79,6 +79,7 @@ def all_setup_requests(
store_info: AsyncMock,
addon_changelog: AsyncMock,
addon_stats: AsyncMock,
jobs_info: AsyncMock,
) -> None:
"""Mock all setup requests."""
include_addons = hasattr(request, "param") and request.param.get(
@@ -261,3 +262,8 @@ def all_setup_requests(
},
},
)
aioclient_mock.get(
"http://127.0.0.1/jobs/info",
json={"result": "ok", "data": {"ignore_conditions": [], "jobs": []}},
)

View File

@@ -26,6 +26,7 @@ def mock_all(
addon_changelog: AsyncMock,
addon_stats: AsyncMock,
resolution_info: AsyncMock,
jobs_info: AsyncMock,
) -> None:
"""Mock all setup requests."""
aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"})

View File

@@ -25,6 +25,7 @@ def mock_all(
addon_stats: AsyncMock,
addon_changelog: AsyncMock,
resolution_info: AsyncMock,
jobs_info: AsyncMock,
) -> None:
"""Mock all setup requests."""
aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"})

View File

@@ -72,6 +72,7 @@ def mock_all(
addon_stats: AsyncMock,
addon_changelog: AsyncMock,
resolution_info: AsyncMock,
jobs_info: AsyncMock,
) -> None:
"""Mock all setup requests."""
aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"})
@@ -232,7 +233,7 @@ async def test_setup_api_ping(
await hass.async_block_till_done()
assert result
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 18
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
assert get_core_info(hass)["version_latest"] == "1.0.0"
assert is_hassio(hass)
@@ -279,7 +280,7 @@ async def test_setup_api_push_api_data(
await hass.async_block_till_done()
assert result
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 18
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
assert not aioclient_mock.mock_calls[0][2]["ssl"]
assert aioclient_mock.mock_calls[0][2]["port"] == 9999
assert "watchdog" not in aioclient_mock.mock_calls[0][2]
@@ -300,7 +301,7 @@ async def test_setup_api_push_api_data_server_host(
await hass.async_block_till_done()
assert result
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 18
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
assert not aioclient_mock.mock_calls[0][2]["ssl"]
assert aioclient_mock.mock_calls[0][2]["port"] == 9999
assert not aioclient_mock.mock_calls[0][2]["watchdog"]
@@ -321,7 +322,7 @@ async def test_setup_api_push_api_data_default(
await hass.async_block_till_done()
assert result
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 18
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
assert not aioclient_mock.mock_calls[0][2]["ssl"]
assert aioclient_mock.mock_calls[0][2]["port"] == 8123
refresh_token = aioclient_mock.mock_calls[0][2]["refresh_token"]
@@ -402,7 +403,7 @@ async def test_setup_api_existing_hassio_user(
await hass.async_block_till_done()
assert result
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 18
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
assert not aioclient_mock.mock_calls[0][2]["ssl"]
assert aioclient_mock.mock_calls[0][2]["port"] == 8123
assert aioclient_mock.mock_calls[0][2]["refresh_token"] == token.token
@@ -421,7 +422,7 @@ async def test_setup_core_push_config(
await hass.async_block_till_done()
assert result
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 18
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
assert aioclient_mock.mock_calls[1][2]["timezone"] == "testzone"
with patch("homeassistant.util.dt.set_default_time_zone"):
@@ -445,7 +446,7 @@ async def test_setup_hassio_no_additional_data(
await hass.async_block_till_done()
assert result
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 18
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
assert aioclient_mock.mock_calls[-1][3]["Authorization"] == "Bearer 123456"
@@ -527,14 +528,14 @@ async def test_service_calls(
)
await hass.async_block_till_done()
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 22
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 23
assert aioclient_mock.mock_calls[-1][2] == "test"
await hass.services.async_call("hassio", "host_shutdown", {})
await hass.services.async_call("hassio", "host_reboot", {})
await hass.async_block_till_done()
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 24
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 25
await hass.services.async_call("hassio", "backup_full", {})
await hass.services.async_call(
@@ -549,7 +550,7 @@ async def test_service_calls(
)
await hass.async_block_till_done()
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 26
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 27
assert aioclient_mock.mock_calls[-1][2] == {
"name": "2021-11-13 03:48:00",
"homeassistant": True,
@@ -574,7 +575,7 @@ async def test_service_calls(
)
await hass.async_block_till_done()
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 28
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 29
assert aioclient_mock.mock_calls[-1][2] == {
"addons": ["test"],
"folders": ["ssl"],
@@ -593,7 +594,7 @@ async def test_service_calls(
)
await hass.async_block_till_done()
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 29
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 30
assert aioclient_mock.mock_calls[-1][2] == {
"name": "backup_name",
"location": "backup_share",
@@ -609,7 +610,7 @@ async def test_service_calls(
)
await hass.async_block_till_done()
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 30
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 31
assert aioclient_mock.mock_calls[-1][2] == {
"name": "2021-11-13 03:48:00",
"location": None,
@@ -628,7 +629,7 @@ async def test_service_calls(
)
await hass.async_block_till_done()
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 32
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 33
assert aioclient_mock.mock_calls[-1][2] == {
"name": "2021-11-13 11:48:00",
"location": None,
@@ -1074,7 +1075,7 @@ async def test_setup_hardware_integration(
await hass.async_block_till_done(wait_background_tasks=True)
assert result
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 18
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
assert len(mock_setup_entry.mock_calls) == 1

View File

@@ -34,6 +34,7 @@ def mock_all(
addon_stats: AsyncMock,
addon_changelog: AsyncMock,
resolution_info: AsyncMock,
jobs_info: AsyncMock,
) -> None:
"""Mock all setup requests."""
_install_default_mocks(aioclient_mock)

View File

@@ -60,6 +60,7 @@ def mock_all(
addon_changelog: AsyncMock,
addon_stats: AsyncMock,
resolution_info: AsyncMock,
jobs_info: AsyncMock,
) -> None:
"""Mock all setup requests."""
aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"})

View File

@@ -1,9 +1,10 @@
"""The tests for the hassio update entities."""
from datetime import timedelta
from datetime import datetime, timedelta
import os
from typing import Any
from unittest.mock import AsyncMock, MagicMock, patch
from uuid import uuid4
from aiohasupervisor import (
SupervisorBadRequestError,
@@ -12,6 +13,8 @@ from aiohasupervisor import (
)
from aiohasupervisor.models import (
HomeAssistantUpdateOptions,
Job,
JobsInfo,
OSUpdate,
StoreAddonUpdate,
)
@@ -44,6 +47,7 @@ def mock_all(
addon_stats: AsyncMock,
addon_changelog: AsyncMock,
resolution_info: AsyncMock,
jobs_info: AsyncMock,
) -> None:
"""Mock all setup requests."""
aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"})
@@ -243,6 +247,131 @@ async def test_update_addon(hass: HomeAssistant, update_addon: AsyncMock) -> Non
update_addon.assert_called_once_with("test", StoreAddonUpdate(backup=False))
async def test_update_addon_progress(
hass: HomeAssistant, hass_ws_client: WebSocketGenerator
) -> None:
"""Test progress reporting for addon update."""
config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN)
config_entry.add_to_hass(hass)
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(
hass,
"hassio",
{"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}},
)
assert result
await hass.async_block_till_done()
client = await hass_ws_client(hass)
message_id = 0
job_uuid = uuid4().hex
def make_job_message(progress: float, done: bool | None):
nonlocal message_id
message_id += 1
return {
"id": message_id,
"type": "supervisor/event",
"data": {
"event": "job",
"data": {
"uuid": job_uuid,
"created": "2025-09-29T00:00:00.000000+00:00",
"name": "addon_manager_update",
"reference": "test",
"progress": progress,
"done": done,
"stage": None,
"extra": {"total": 1234567890} if progress > 0 else None,
"errors": [],
},
},
}
await client.send_json(make_job_message(progress=0, done=None))
msg = await client.receive_json()
assert msg["success"]
await hass.async_block_till_done()
assert hass.states.get("update.test_update").attributes.get("in_progress") is False
assert (
hass.states.get("update.test_update").attributes.get("update_percentage")
is None
)
await client.send_json(make_job_message(progress=5, done=False))
msg = await client.receive_json()
assert msg["success"]
await hass.async_block_till_done()
assert hass.states.get("update.test_update").attributes.get("in_progress") is True
assert (
hass.states.get("update.test_update").attributes.get("update_percentage") == 5
)
await client.send_json(make_job_message(progress=50, done=False))
msg = await client.receive_json()
assert msg["success"]
await hass.async_block_till_done()
assert hass.states.get("update.test_update").attributes.get("in_progress") is True
assert (
hass.states.get("update.test_update").attributes.get("update_percentage") == 50
)
await client.send_json(make_job_message(progress=100, done=True))
msg = await client.receive_json()
assert msg["success"]
await hass.async_block_till_done()
assert hass.states.get("update.test_update").attributes.get("in_progress") is False
assert (
hass.states.get("update.test_update").attributes.get("update_percentage")
is None
)
async def test_addon_update_progress_startup(
hass: HomeAssistant, jobs_info: AsyncMock
) -> None:
"""Test addon update in progress during home assistant startup."""
jobs_info.return_value = JobsInfo(
ignore_conditions=[],
jobs=[
Job(
name="addon_manager_update",
reference="test",
uuid=uuid4().hex,
progress=50,
stage=None,
done=False,
errors=[],
created=datetime.now(),
child_jobs=[],
extra={"total": 1234567890},
)
],
)
config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN)
config_entry.add_to_hass(hass)
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(
hass,
"hassio",
{"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}},
)
assert result
await hass.async_block_till_done()
assert hass.states.get("update.test_update").attributes.get("in_progress") is True
assert (
hass.states.get("update.test_update").attributes.get("update_percentage") == 50
)
async def setup_backup_integration(hass: HomeAssistant) -> None:
"""Set up the backup integration."""
assert await async_setup_component(hass, "backup", {})
@@ -630,6 +759,186 @@ async def test_update_core(hass: HomeAssistant, supervisor_client: AsyncMock) ->
)
async def test_update_core_progress(
hass: HomeAssistant, hass_ws_client: WebSocketGenerator
) -> None:
"""Test progress reporting for core update."""
config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN)
config_entry.add_to_hass(hass)
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(
hass,
"hassio",
{"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}},
)
assert result
await hass.async_block_till_done()
client = await hass_ws_client(hass)
message_id = 0
job_uuid = uuid4().hex
def make_job_message(
progress: float, done: bool | None, errors: list[dict[str, str]] | None = None
):
nonlocal message_id
message_id += 1
return {
"id": message_id,
"type": "supervisor/event",
"data": {
"event": "job",
"data": {
"uuid": job_uuid,
"created": "2025-09-29T00:00:00.000000+00:00",
"name": "home_assistant_core_update",
"reference": None,
"progress": progress,
"done": done,
"stage": None,
"extra": {"total": 1234567890} if progress > 0 else None,
"errors": errors if errors else [],
},
},
}
await client.send_json(make_job_message(progress=0, done=None))
msg = await client.receive_json()
assert msg["success"]
await hass.async_block_till_done()
assert (
hass.states.get("update.home_assistant_core_update").attributes.get(
"in_progress"
)
is False
)
assert (
hass.states.get("update.home_assistant_core_update").attributes.get(
"update_percentage"
)
is None
)
await client.send_json(make_job_message(progress=5, done=False))
msg = await client.receive_json()
assert msg["success"]
await hass.async_block_till_done()
assert (
hass.states.get("update.home_assistant_core_update").attributes.get(
"in_progress"
)
is True
)
assert (
hass.states.get("update.home_assistant_core_update").attributes.get(
"update_percentage"
)
== 5
)
await client.send_json(make_job_message(progress=50, done=False))
msg = await client.receive_json()
assert msg["success"]
await hass.async_block_till_done()
assert (
hass.states.get("update.home_assistant_core_update").attributes.get(
"in_progress"
)
is True
)
assert (
hass.states.get("update.home_assistant_core_update").attributes.get(
"update_percentage"
)
== 50
)
# During a successful update Home Assistant is stopped before the update job
# reaches the end. An error ends it early so we use that for test
await client.send_json(
make_job_message(
progress=70,
done=True,
errors=[
{"type": "HomeAssistantUpdateError", "message": "bad", "stage": None}
],
)
)
msg = await client.receive_json()
assert msg["success"]
await hass.async_block_till_done()
assert (
hass.states.get("update.home_assistant_core_update").attributes.get(
"in_progress"
)
is False
)
assert (
hass.states.get("update.home_assistant_core_update").attributes.get(
"update_percentage"
)
is None
)
async def test_core_update_progress_startup(
hass: HomeAssistant, jobs_info: AsyncMock
) -> None:
"""Test core update in progress during home assistant startup.
This is an odd test, it's very unlikely core will be starting during an update.
It is technically possible though as core isn't stopped until the docker portion
is complete and updates can be started from CLI.
"""
jobs_info.return_value = JobsInfo(
ignore_conditions=[],
jobs=[
Job(
name="home_assistant_core_update",
reference=None,
uuid=uuid4().hex,
progress=50,
stage=None,
done=False,
errors=[],
created=datetime.now(),
child_jobs=[],
extra={"total": 1234567890},
)
],
)
config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN)
config_entry.add_to_hass(hass)
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(
hass,
"hassio",
{"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}},
)
assert result
await hass.async_block_till_done()
assert (
hass.states.get("update.home_assistant_core_update").attributes.get(
"in_progress"
)
is True
)
assert (
hass.states.get("update.home_assistant_core_update").attributes.get(
"update_percentage"
)
== 50
)
@pytest.mark.parametrize(
("commands", "default_mount", "expected_kwargs"),
[

View File

@@ -1,65 +0,0 @@
"""Fixtures for the london_underground tests."""
from collections.abc import AsyncGenerator
import json
from unittest.mock import AsyncMock, patch
from london_tube_status import parse_api_response
import pytest
from homeassistant.components.london_underground.const import CONF_LINE, DOMAIN
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry, async_load_fixture
from tests.conftest import AiohttpClientMocker
@pytest.fixture
def mock_setup_entry():
"""Prevent setup of integration during tests."""
with patch(
"homeassistant.components.london_underground.async_setup_entry",
return_value=True,
) as mock_setup:
yield mock_setup
@pytest.fixture
async def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry:
"""Mock the config entry."""
entry = MockConfigEntry(
domain=DOMAIN,
data={},
options={CONF_LINE: ["Metropolitan"]},
title="London Underground",
)
# Add and set up the entry
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
return entry
@pytest.fixture
async def mock_london_underground_client(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
) -> AsyncGenerator[AsyncMock]:
"""Mock a London Underground client."""
with (
patch(
"homeassistant.components.london_underground.TubeData",
autospec=True,
) as mock_client,
patch(
"homeassistant.components.london_underground.config_flow.TubeData",
new=mock_client,
),
):
client = mock_client.return_value
# Load the fixture text
fixture_text = await async_load_fixture(hass, "line_status.json", DOMAIN)
fixture_data = parse_api_response(json.loads(fixture_text))
client.data = fixture_data
yield client

View File

@@ -1,186 +0,0 @@
"""Test the London Underground config flow."""
import asyncio
import pytest
from homeassistant.components.london_underground.const import (
CONF_LINE,
DEFAULT_LINES,
DOMAIN,
)
from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from homeassistant.helpers import issue_registry as ir
async def test_validate_input_success(
hass: HomeAssistant, mock_setup_entry, mock_london_underground_client
) -> None:
"""Test successful validation of TfL API."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] is FlowResultType.FORM
assert result["errors"] == {}
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_LINE: ["Bakerloo", "Central"]},
)
assert result["type"] is FlowResultType.CREATE_ENTRY
assert result["title"] == "London Underground"
assert result["data"] == {}
assert result["options"] == {CONF_LINE: ["Bakerloo", "Central"]}
async def test_options(
hass: HomeAssistant, mock_setup_entry, mock_config_entry
) -> None:
"""Test updating options."""
result = await hass.config_entries.options.async_init(mock_config_entry.entry_id)
assert result["type"] == FlowResultType.FORM
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={
CONF_LINE: ["Bakerloo", "Central"],
},
)
assert result["type"] == FlowResultType.CREATE_ENTRY
assert result["data"] == {
CONF_LINE: ["Bakerloo", "Central"],
}
@pytest.mark.parametrize(
("side_effect", "expected_error"),
[
(Exception, "cannot_connect"),
(asyncio.TimeoutError, "timeout_connect"),
],
)
async def test_validate_input_exceptions(
hass: HomeAssistant,
mock_setup_entry,
mock_london_underground_client,
side_effect,
expected_error,
) -> None:
"""Test validation with connection and timeout errors."""
mock_london_underground_client.update.side_effect = side_effect
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_LINE: ["Bakerloo", "Central"]},
)
assert result["type"] is FlowResultType.FORM
assert result["errors"]["base"] == expected_error
# confirm recovery after error
mock_london_underground_client.update.side_effect = None
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{},
)
assert result["type"] is FlowResultType.CREATE_ENTRY
assert result["title"] == "London Underground"
assert result["data"] == {}
assert result["options"] == {CONF_LINE: DEFAULT_LINES}
async def test_already_configured(
hass: HomeAssistant,
mock_london_underground_client,
mock_setup_entry,
mock_config_entry,
) -> None:
"""Try (and fail) setting up a config entry when one already exists."""
# Try to start the flow
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "single_instance_allowed"
async def test_yaml_import(
hass: HomeAssistant,
issue_registry: ir.IssueRegistry,
mock_london_underground_client,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test a YAML sensor is imported and becomes an operational config entry."""
# Set up via YAML which will trigger import and set up the config entry
IMPORT_DATA = {
"platform": "london_underground",
"line": ["Central", "Piccadilly", "Victoria", "Bakerloo", "Northern"],
}
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=IMPORT_DATA
)
assert result["type"] is FlowResultType.CREATE_ENTRY
assert result["title"] == "London Underground"
assert result["data"] == {}
assert result["options"] == {
CONF_LINE: ["Central", "Piccadilly", "Victoria", "Bakerloo", "Northern"]
}
async def test_failed_yaml_import_connection(
hass: HomeAssistant,
issue_registry: ir.IssueRegistry,
mock_london_underground_client,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test a YAML sensor is imported and becomes an operational config entry."""
# Set up via YAML which will trigger import and set up the config entry
mock_london_underground_client.update.side_effect = asyncio.TimeoutError
IMPORT_DATA = {
"platform": "london_underground",
"line": ["Central", "Piccadilly", "Victoria", "Bakerloo", "Northern"],
}
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=IMPORT_DATA
)
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "cannot_connect"
async def test_failed_yaml_import_already_configured(
hass: HomeAssistant,
issue_registry: ir.IssueRegistry,
mock_london_underground_client,
caplog: pytest.LogCaptureFixture,
mock_config_entry,
) -> None:
"""Test a YAML sensor is imported and becomes an operational config entry."""
# Set up via YAML which will trigger import and set up the config entry
IMPORT_DATA = {
"platform": "london_underground",
"line": ["Central", "Piccadilly", "Victoria", "Bakerloo", "Northern"],
}
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=IMPORT_DATA
)
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "single_instance_allowed"

View File

@@ -1,20 +0,0 @@
"""Test the London Underground init."""
from homeassistant.core import HomeAssistant
async def test_reload_entry(
hass: HomeAssistant, mock_london_underground_client, mock_config_entry
) -> None:
"""Test reloading the config entry."""
# Test reloading with updated options
hass.config_entries.async_update_entry(
mock_config_entry,
data={},
options={"line": ["Bakerloo", "Central"]},
)
await hass.async_block_till_done()
# Verify that setup was called for each reload
assert len(mock_london_underground_client.mock_calls) > 0

View File

@@ -1,130 +1,37 @@
"""The tests for the london_underground platform."""
import asyncio
import pytest
from london_tube_status import API_URL
from homeassistant.components.london_underground.const import CONF_LINE, DOMAIN
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
from homeassistant.helpers import issue_registry as ir
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
from tests.common import async_load_fixture
from tests.test_util.aiohttp import AiohttpClientMocker
VALID_CONFIG = {
"sensor": {"platform": "london_underground", CONF_LINE: ["Metropolitan"]}
}
async def test_valid_state(
hass: HomeAssistant,
issue_registry: ir.IssueRegistry,
mock_london_underground_client,
mock_config_entry,
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test operational London Underground sensor using a mock config entry."""
# Ensure the entry is fully loaded
assert mock_config_entry.state is ConfigEntryState.LOADED
"""Test for operational london_underground sensor with proper attributes."""
aioclient_mock.get(
API_URL,
text=await async_load_fixture(hass, "line_status.json", DOMAIN),
)
# Confirm that the expected entity exists and is correct
state = hass.states.get("sensor.london_underground_metropolitan")
assert state is not None
assert state.state == "Good Service"
assert state.attributes == {
"Description": "Nothing to report",
"attribution": "Powered by TfL Open Data",
"friendly_name": "London Underground Metropolitan",
"icon": "mdi:subway",
}
# No YAML warning should be issued, since setup was not via YAML
assert not issue_registry.async_get_issue(DOMAIN, "yaml_deprecated")
async def test_yaml_import(
hass: HomeAssistant,
issue_registry: ir.IssueRegistry,
mock_london_underground_client,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test a YAML sensor is imported and becomes an operational config entry."""
# Set up via YAML which will trigger import and set up the config entry
VALID_CONFIG = {
"sensor": {
"platform": "london_underground",
CONF_LINE: ["Metropolitan", "London Overground"],
}
}
assert await async_setup_component(hass, "sensor", VALID_CONFIG)
await hass.async_block_till_done()
# Verify the config entry was created
entries = hass.config_entries.async_entries(DOMAIN)
assert len(entries) == 1
# Verify a warning was issued about YAML deprecation
assert issue_registry.async_get_issue(HOMEASSISTANT_DOMAIN, "deprecated_yaml")
# Check the state after setup completes
state = hass.states.get("sensor.london_underground_metropolitan")
state = hass.states.get("sensor.metropolitan")
assert state
assert state.state == "Good Service"
assert state.attributes == {
"Description": "Nothing to report",
"attribution": "Powered by TfL Open Data",
"friendly_name": "London Underground Metropolitan",
"friendly_name": "Metropolitan",
"icon": "mdi:subway",
}
# Since being renamed London overground is no longer returned by the API
# So check that we do not import it and that we warn the user
state = hass.states.get("sensor.london_underground_london_overground")
assert not state
assert any(
"London Overground was removed from the configuration as the line has been divided and renamed"
in record.message
for record in caplog.records
)
async def test_failed_yaml_import(
hass: HomeAssistant,
issue_registry: ir.IssueRegistry,
mock_london_underground_client,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test a YAML sensor is imported and becomes an operational config entry."""
# Set up via YAML which will trigger import and set up the config entry
mock_london_underground_client.update.side_effect = asyncio.TimeoutError
VALID_CONFIG = {
"sensor": {"platform": "london_underground", CONF_LINE: ["Metropolitan"]}
}
assert await async_setup_component(hass, "sensor", VALID_CONFIG)
await hass.async_block_till_done()
# Verify the config entry was not created
entries = hass.config_entries.async_entries(DOMAIN)
assert len(entries) == 0
# verify no flows still in progress
flows = hass.config_entries.flow.async_progress()
assert len(flows) == 0
assert any(
"Unexpected error trying to connect before importing config" in record.message
for record in caplog.records
)
# Confirm that the import did not happen
assert not any(
"Importing London Underground config from configuration.yaml" in record.message
for record in caplog.records
)
assert not any(
"migrated to a config entry and can be safely removed" in record.message
for record in caplog.records
)
# Verify a warning was issued about YAML not being imported
assert issue_registry.async_get_issue(
DOMAIN, "deprecated_yaml_import_issue_cannot_connect"
)

View File

@@ -45,7 +45,7 @@ def dimmable_light() -> NHCLight:
mock.is_dimmable = True
mock.name = "dimmable light"
mock.suggested_area = "room"
mock.state = 100
mock.state = 255
return mock

View File

@@ -41,7 +41,7 @@
# name: test_entities[light.dimmable_light-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'brightness': 100,
'brightness': 255,
'color_mode': <ColorMode.BRIGHTNESS: 'brightness'>,
'friendly_name': 'dimmable light',
'supported_color_modes': list([

View File

@@ -42,7 +42,7 @@ async def test_entities(
@pytest.mark.parametrize(
("light_id", "data", "set_brightness"),
[
(0, {ATTR_ENTITY_ID: "light.light"}, None),
(0, {ATTR_ENTITY_ID: "light.light"}, 255),
(
1,
{ATTR_ENTITY_ID: "light.dimmable_light", ATTR_BRIGHTNESS: 50},
@@ -119,7 +119,7 @@ async def test_updating(
assert hass.states.get("light.light").state == STATE_OFF
assert hass.states.get("light.dimmable_light").state == STATE_ON
assert hass.states.get("light.dimmable_light").attributes[ATTR_BRIGHTNESS] == 100
assert hass.states.get("light.dimmable_light").attributes[ATTR_BRIGHTNESS] == 255
dimmable_light.state = 204
await find_update_callback(mock_niko_home_control_connection, 2)(204)

View File

@@ -569,7 +569,7 @@ async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> Non
CONF_PROMPT: "Speak like a pirate",
CONF_LLM_HASS_API: ["assist"],
CONF_TEMPERATURE: 0.8,
CONF_CHAT_MODEL: "gpt-5",
CONF_CHAT_MODEL: "o5",
CONF_TOP_P: 0.9,
CONF_MAX_TOKENS: 1000,
CONF_REASONING_EFFORT: "low",
@@ -607,52 +607,6 @@ async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> Non
CONF_CODE_INTERPRETER: False,
},
),
( # Case 5: code interpreter supported to not supported model
{
CONF_RECOMMENDED: False,
CONF_PROMPT: "Speak like a pirate",
CONF_LLM_HASS_API: ["assist"],
CONF_TEMPERATURE: 0.8,
CONF_CHAT_MODEL: "gpt-5",
CONF_TOP_P: 0.9,
CONF_MAX_TOKENS: 1000,
CONF_REASONING_EFFORT: "low",
CONF_CODE_INTERPRETER: True,
CONF_VERBOSITY: "medium",
CONF_WEB_SEARCH: True,
CONF_WEB_SEARCH_CONTEXT_SIZE: "high",
CONF_WEB_SEARCH_USER_LOCATION: False,
},
(
{
CONF_RECOMMENDED: False,
CONF_PROMPT: "Speak like a pirate",
},
{
CONF_TEMPERATURE: 0.8,
CONF_CHAT_MODEL: "gpt-5-pro",
CONF_TOP_P: 0.9,
CONF_MAX_TOKENS: 1000,
},
{
CONF_WEB_SEARCH: True,
CONF_WEB_SEARCH_CONTEXT_SIZE: "high",
CONF_WEB_SEARCH_USER_LOCATION: False,
},
),
{
CONF_RECOMMENDED: False,
CONF_PROMPT: "Speak like a pirate",
CONF_TEMPERATURE: 0.8,
CONF_CHAT_MODEL: "gpt-5-pro",
CONF_TOP_P: 0.9,
CONF_MAX_TOKENS: 1000,
CONF_VERBOSITY: "medium",
CONF_WEB_SEARCH: True,
CONF_WEB_SEARCH_CONTEXT_SIZE: "high",
CONF_WEB_SEARCH_USER_LOCATION: False,
},
),
],
)
async def test_subentry_switching(

View File

@@ -80,9 +80,7 @@ async def test_low_battery(hass: HomeAssistant) -> None:
async def test_initial_states(hass: HomeAssistant) -> None:
"""Test plant initialises attributes if sensor already exists."""
hass.states.async_set(
MOISTURE_ENTITY,
5,
{ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS_PER_CM},
MOISTURE_ENTITY, 5, {ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS}
)
plant_name = "some_plant"
assert await async_setup_component(
@@ -103,9 +101,7 @@ async def test_update_states(hass: HomeAssistant) -> None:
hass, plant.DOMAIN, {plant.DOMAIN: {plant_name: GOOD_CONFIG}}
)
hass.states.async_set(
MOISTURE_ENTITY,
5,
{ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS_PER_CM},
MOISTURE_ENTITY, 5, {ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS}
)
await hass.async_block_till_done()
state = hass.states.get(f"plant.{plant_name}")
@@ -125,7 +121,7 @@ async def test_unavailable_state(hass: HomeAssistant) -> None:
hass.states.async_set(
MOISTURE_ENTITY,
STATE_UNAVAILABLE,
{ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS_PER_CM},
{ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS},
)
await hass.async_block_till_done()
state = hass.states.get(f"plant.{plant_name}")
@@ -143,9 +139,7 @@ async def test_state_problem_if_unavailable(hass: HomeAssistant) -> None:
hass, plant.DOMAIN, {plant.DOMAIN: {plant_name: GOOD_CONFIG}}
)
hass.states.async_set(
MOISTURE_ENTITY,
42,
{ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS_PER_CM},
MOISTURE_ENTITY, 42, {ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS}
)
await hass.async_block_till_done()
state = hass.states.get(f"plant.{plant_name}")
@@ -154,7 +148,7 @@ async def test_state_problem_if_unavailable(hass: HomeAssistant) -> None:
hass.states.async_set(
MOISTURE_ENTITY,
STATE_UNAVAILABLE,
{ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS_PER_CM},
{ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS},
)
await hass.async_block_till_done()
state = hass.states.get(f"plant.{plant_name}")

View File

@@ -1,241 +0,0 @@
# serializer version: 1
# name: test_all_entities[sensor.focused_einstein_image-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.focused_einstein_image',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Image',
'platform': 'portainer',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'image',
'unique_id': 'portainer_test_entry_123_focused_einstein_image',
'unit_of_measurement': None,
})
# ---
# name: test_all_entities[sensor.focused_einstein_image-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'focused_einstein Image',
}),
'context': <ANY>,
'entity_id': 'sensor.focused_einstein_image',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'docker.io/library/redis:7',
})
# ---
# name: test_all_entities[sensor.funny_chatelet_image-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.funny_chatelet_image',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Image',
'platform': 'portainer',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'image',
'unique_id': 'portainer_test_entry_123_funny_chatelet_image',
'unit_of_measurement': None,
})
# ---
# name: test_all_entities[sensor.funny_chatelet_image-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'funny_chatelet Image',
}),
'context': <ANY>,
'entity_id': 'sensor.funny_chatelet_image',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'docker.io/library/ubuntu:latest',
})
# ---
# name: test_all_entities[sensor.practical_morse_image-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.practical_morse_image',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Image',
'platform': 'portainer',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'image',
'unique_id': 'portainer_test_entry_123_practical_morse_image',
'unit_of_measurement': None,
})
# ---
# name: test_all_entities[sensor.practical_morse_image-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'practical_morse Image',
}),
'context': <ANY>,
'entity_id': 'sensor.practical_morse_image',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'docker.io/library/python:3.13-slim',
})
# ---
# name: test_all_entities[sensor.serene_banach_image-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.serene_banach_image',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Image',
'platform': 'portainer',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'image',
'unique_id': 'portainer_test_entry_123_serene_banach_image',
'unit_of_measurement': None,
})
# ---
# name: test_all_entities[sensor.serene_banach_image-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'serene_banach Image',
}),
'context': <ANY>,
'entity_id': 'sensor.serene_banach_image',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'docker.io/library/nginx:latest',
})
# ---
# name: test_all_entities[sensor.stoic_turing_image-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.stoic_turing_image',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Image',
'platform': 'portainer',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'image',
'unique_id': 'portainer_test_entry_123_stoic_turing_image',
'unit_of_measurement': None,
})
# ---
# name: test_all_entities[sensor.stoic_turing_image-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'stoic_turing Image',
}),
'context': <ANY>,
'entity_id': 'sensor.stoic_turing_image',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'docker.io/library/postgres:15',
})
# ---

View File

@@ -1,32 +0,0 @@
"""Tests for the Portainer sensor platform."""
from unittest.mock import patch
import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from . import setup_integration
from tests.common import MockConfigEntry, snapshot_platform
@pytest.mark.usefixtures("mock_portainer_client")
async def test_all_entities(
hass: HomeAssistant,
snapshot: SnapshotAssertion,
mock_config_entry: MockConfigEntry,
entity_registry: er.EntityRegistry,
) -> None:
"""Test all entities."""
with patch(
"homeassistant.components.portainer._PLATFORMS",
[Platform.SENSOR],
):
await setup_integration(hass, mock_config_entry)
await snapshot_platform(
hass, entity_registry, snapshot, mock_config_entry.entry_id
)

View File

@@ -1785,7 +1785,7 @@ async def test_unit_conversion_priority_suggested_unit_change_2(
UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER,
0,
),
(SensorDeviceClass.CONDUCTIVITY, UnitOfConductivity.MICROSIEMENS_PER_CM, 1),
(SensorDeviceClass.CONDUCTIVITY, UnitOfConductivity.MICROSIEMENS, 1),
(SensorDeviceClass.CURRENT, UnitOfElectricCurrent.MILLIAMPERE, 0),
(SensorDeviceClass.DATA_RATE, UnitOfDataRate.KILOBITS_PER_SECOND, 0),
(SensorDeviceClass.DATA_SIZE, UnitOfInformation.KILOBITS, 0),

View File

@@ -127,7 +127,7 @@
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '123456789ABC-button:200-button_generic',
'unique_id': '123456789ABC-button:200',
'unit_of_measurement': None,
})
# ---
@@ -175,7 +175,7 @@
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '123456789ABC-button:200-button_generic',
'unique_id': '123456789ABC-button:200',
'unit_of_measurement': None,
})
# ---

View File

@@ -115,119 +115,3 @@
'state': '0',
})
# ---
# name: test_cury_number_entity[number.test_name_left_slot_intensity-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'max': 100,
'min': 0,
'mode': <NumberMode.SLIDER: 'slider'>,
'step': 1,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'number',
'entity_category': None,
'entity_id': 'number.test_name_left_slot_intensity',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Left slot intensity',
'platform': 'shelly',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '123456789ABC-cury:0-left_slot_intensity',
'unit_of_measurement': '%',
})
# ---
# name: test_cury_number_entity[number.test_name_left_slot_intensity-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Test name Left slot intensity',
'max': 100,
'min': 0,
'mode': <NumberMode.SLIDER: 'slider'>,
'step': 1,
'unit_of_measurement': '%',
}),
'context': <ANY>,
'entity_id': 'number.test_name_left_slot_intensity',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '70',
})
# ---
# name: test_cury_number_entity[number.test_name_right_slot_intensity-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'max': 100,
'min': 0,
'mode': <NumberMode.SLIDER: 'slider'>,
'step': 1,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'number',
'entity_category': None,
'entity_id': 'number.test_name_right_slot_intensity',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Right slot intensity',
'platform': 'shelly',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '123456789ABC-cury:0-right_slot_intensity',
'unit_of_measurement': '%',
})
# ---
# name: test_cury_number_entity[number.test_name_right_slot_intensity-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Test name Right slot intensity',
'max': 100,
'min': 0,
'mode': <NumberMode.SLIDER: 'slider'>,
'step': 1,
'unit_of_measurement': '%',
}),
'context': <ANY>,
'entity_id': 'number.test_name_right_slot_intensity',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '70',
})
# ---

View File

@@ -157,206 +157,6 @@
'state': '0',
})
# ---
# name: test_cury_sensor_entity[sensor.test_name_left_slot_level-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.test_name_left_slot_level',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Left slot level',
'platform': 'shelly',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'vial_level',
'unique_id': '123456789ABC-cury:0-cury_left_level',
'unit_of_measurement': '%',
})
# ---
# name: test_cury_sensor_entity[sensor.test_name_left_slot_level-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Test name Left slot level',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': '%',
}),
'context': <ANY>,
'entity_id': 'sensor.test_name_left_slot_level',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '27',
})
# ---
# name: test_cury_sensor_entity[sensor.test_name_left_slot_vial-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.test_name_left_slot_vial',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Left slot vial',
'platform': 'shelly',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'vial_name',
'unique_id': '123456789ABC-cury:0-cury_left_vial',
'unit_of_measurement': None,
})
# ---
# name: test_cury_sensor_entity[sensor.test_name_left_slot_vial-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Test name Left slot vial',
}),
'context': <ANY>,
'entity_id': 'sensor.test_name_left_slot_vial',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'Forest Dream',
})
# ---
# name: test_cury_sensor_entity[sensor.test_name_right_slot_level-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.test_name_right_slot_level',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Right slot level',
'platform': 'shelly',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'vial_level',
'unique_id': '123456789ABC-cury:0-cury_right_level',
'unit_of_measurement': '%',
})
# ---
# name: test_cury_sensor_entity[sensor.test_name_right_slot_level-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Test name Right slot level',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': '%',
}),
'context': <ANY>,
'entity_id': 'sensor.test_name_right_slot_level',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '84',
})
# ---
# name: test_cury_sensor_entity[sensor.test_name_right_slot_vial-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.test_name_right_slot_vial',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Right slot vial',
'platform': 'shelly',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'vial_name',
'unique_id': '123456789ABC-cury:0-cury_right_vial',
'unit_of_measurement': None,
})
# ---
# name: test_cury_sensor_entity[sensor.test_name_right_slot_vial-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Test name Right slot vial',
}),
'context': <ANY>,
'entity_id': 'sensor.test_name_right_slot_vial',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'Velvet Rose',
})
# ---
# name: test_rpc_shelly_ev_sensors[sensor.test_name_charger_state-entry]
EntityRegistryEntrySnapshot({
'aliases': set({

View File

@@ -1,97 +0,0 @@
# serializer version: 1
# name: test_cury_switch_entity[switch.test_name_left_slot-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'switch',
'entity_category': None,
'entity_id': 'switch.test_name_left_slot',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Left slot',
'platform': 'shelly',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'cury_slot',
'unique_id': '123456789ABC-cury:0-cury_left',
'unit_of_measurement': None,
})
# ---
# name: test_cury_switch_entity[switch.test_name_left_slot-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Test name Left slot',
}),
'context': <ANY>,
'entity_id': 'switch.test_name_left_slot',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'on',
})
# ---
# name: test_cury_switch_entity[switch.test_name_right_slot-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'switch',
'entity_category': None,
'entity_id': 'switch.test_name_right_slot',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Right slot',
'platform': 'shelly',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'cury_slot',
'unique_id': '123456789ABC-cury:0-cury_right',
'unit_of_measurement': None,
})
# ---
# name: test_cury_switch_entity[switch.test_name_right_slot-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Test name Right slot',
}),
'context': <ANY>,
'entity_id': 'switch.test_name_right_slot',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'off',
})
# ---

View File

@@ -9,7 +9,7 @@ import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS
from homeassistant.components.shelly.const import DOMAIN, MODEL_FRANKEVER_WATER_VALVE
from homeassistant.components.shelly.const import DOMAIN
from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState
from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform
from homeassistant.core import HomeAssistant
@@ -17,13 +17,7 @@ from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.device_registry import DeviceRegistry
from homeassistant.helpers.entity_registry import EntityRegistry
from . import (
MOCK_MAC,
init_integration,
patch_platforms,
register_device,
register_entity,
)
from . import init_integration, patch_platforms, register_device, register_entity
@pytest.fixture(autouse=True)
@@ -423,56 +417,3 @@ async def test_migrate_unique_id_blu_trv(
assert entity_entry.unique_id == "F8447725F0DD-blutrv:200-calibrate"
assert "Migrating unique_id for button.trv_name_calibrate" in caplog.text
@pytest.mark.parametrize(
("old_id", "new_id", "role"),
[
("button", "button_generic", None),
("button", "button_open", "open"),
("button", "button_close", "close"),
],
)
async def test_migrate_unique_id_virtual_components_roles(
hass: HomeAssistant,
mock_rpc_device: Mock,
entity_registry: EntityRegistry,
caplog: pytest.LogCaptureFixture,
monkeypatch: pytest.MonkeyPatch,
old_id: str,
new_id: str,
role: str | None,
) -> None:
"""Test migration of unique_id for virtual components to include role."""
entry = await init_integration(
hass, 3, model=MODEL_FRANKEVER_WATER_VALVE, skip_setup=True
)
old_unique_id = f"{MOCK_MAC}-{old_id}:200"
new_unique_id = f"{old_unique_id}-{new_id}"
config = deepcopy(mock_rpc_device.config)
if role:
config[f"{old_id}:200"] = {
"role": role,
}
else:
config[f"{old_id}:200"] = {}
monkeypatch.setattr(mock_rpc_device, "config", config)
entity = entity_registry.async_get_or_create(
suggested_object_id="test_name_test_button",
disabled_by=None,
domain=BUTTON_DOMAIN,
platform=DOMAIN,
unique_id=old_unique_id,
config_entry=entry,
)
assert entity.unique_id == old_unique_id
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
entity_entry = entity_registry.async_get("button.test_name_test_button")
assert entity_entry
assert entity_entry.unique_id == new_unique_id
assert "Migrating unique_id for button.test_name_test_button" in caplog.text

View File

@@ -568,50 +568,3 @@ async def test_blu_trv_number_reauth_error(
assert "context" in flow
assert flow["context"].get("source") == SOURCE_REAUTH
assert flow["context"].get("entry_id") == entry.entry_id
async def test_cury_number_entity(
hass: HomeAssistant,
mock_rpc_device: Mock,
entity_registry: EntityRegistry,
snapshot: SnapshotAssertion,
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""Test number entities for cury component."""
status = {
"cury:0": {
"id": 0,
"slots": {
"left": {
"intensity": 70,
"on": True,
"vial": {"level": 27, "name": "Forest Dream"},
},
"right": {
"intensity": 70,
"on": False,
"vial": {"level": 84, "name": "Velvet Rose"},
},
},
}
}
monkeypatch.setattr(mock_rpc_device, "status", status)
await init_integration(hass, 3)
for entity in ("left_slot_intensity", "right_slot_intensity"):
entity_id = f"{NUMBER_DOMAIN}.test_name_{entity}"
state = hass.states.get(entity_id)
assert state == snapshot(name=f"{entity_id}-state")
entry = entity_registry.async_get(entity_id)
assert entry == snapshot(name=f"{entity_id}-entry")
await hass.services.async_call(
NUMBER_DOMAIN,
SERVICE_SET_VALUE,
{ATTR_ENTITY_ID: "number.test_name_left_slot_intensity", ATTR_VALUE: 80.0},
blocking=True,
)
mock_rpc_device.mock_update()
mock_rpc_device.cury_set.assert_called_once_with(0, slot="left", intensity=80)

View File

@@ -1949,46 +1949,3 @@ async def test_rpc_pm1_energy_consumed_sensor_non_float_value(
assert (state := hass.states.get(entity_id))
assert state.state == STATE_UNKNOWN
async def test_cury_sensor_entity(
hass: HomeAssistant,
mock_rpc_device: Mock,
entity_registry: EntityRegistry,
snapshot: SnapshotAssertion,
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""Test sensor entities for cury component."""
status = {
"cury:0": {
"id": 0,
"slots": {
"left": {
"intensity": 70,
"on": True,
"vial": {"level": 27, "name": "Forest Dream"},
},
"right": {
"intensity": 70,
"on": False,
"vial": {"level": 84, "name": "Velvet Rose"},
},
},
}
}
monkeypatch.setattr(mock_rpc_device, "status", status)
await init_integration(hass, 3)
for entity in (
"left_slot_level",
"right_slot_level",
"left_slot_vial",
"right_slot_vial",
):
entity_id = f"{SENSOR_DOMAIN}.test_name_{entity}"
state = hass.states.get(entity_id)
assert state == snapshot(name=f"{entity_id}-state")
entry = entity_registry.async_get(entity_id)
assert entry == snapshot(name=f"{entity_id}-entry")

View File

@@ -8,7 +8,6 @@ from aioshelly.const import MODEL_1PM, MODEL_GAS, MODEL_MOTION
from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError, RpcCallError
from freezegun.api import FrozenDateTimeFactory
import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN
from homeassistant.components.shelly.const import (
@@ -25,7 +24,6 @@ from homeassistant.const import (
SERVICE_TURN_ON,
STATE_OFF,
STATE_ON,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
Platform,
)
@@ -37,7 +35,6 @@ from homeassistant.helpers.entity_registry import EntityRegistry
from . import (
init_integration,
inject_rpc_device_event,
mutate_rpc_device_status,
patch_platforms,
register_device,
register_entity,
@@ -832,119 +829,3 @@ async def test_rpc_device_script_switch(
assert (state := hass.states.get(entity_id))
assert state.state == STATE_ON
mock_rpc_device.script_start.assert_called_once_with(1)
async def test_cury_switch_entity(
hass: HomeAssistant,
mock_rpc_device: Mock,
entity_registry: EntityRegistry,
snapshot: SnapshotAssertion,
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""Test switch entities for cury component."""
status = {
"cury:0": {
"id": 0,
"slots": {
"left": {
"intensity": 70,
"on": True,
"vial": {"level": 27, "name": "Forest Dream"},
},
"right": {
"intensity": 70,
"on": False,
"vial": {"level": 84, "name": "Velvet Rose"},
},
},
}
}
monkeypatch.setattr(mock_rpc_device, "status", status)
await init_integration(hass, 3)
for entity in ("left_slot", "right_slot"):
entity_id = f"{SWITCH_DOMAIN}.test_name_{entity}"
state = hass.states.get(entity_id)
assert state == snapshot(name=f"{entity_id}-state")
entry = entity_registry.async_get(entity_id)
assert entry == snapshot(name=f"{entity_id}-entry")
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "switch.test_name_left_slot"},
blocking=True,
)
mock_rpc_device.mock_update()
mock_rpc_device.cury_set.assert_called_once_with(0, "left", False)
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.test_name_right_slot"},
blocking=True,
)
mock_rpc_device.mock_update()
mock_rpc_device.cury_set.assert_called_with(0, "right", True)
async def test_cury_switch_availability(
hass: HomeAssistant,
mock_rpc_device: Mock,
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""Test availability of switch entities for cury component."""
slots = {
"left": {
"intensity": 70,
"on": True,
"vial": {"level": 27, "name": "Forest Dream"},
},
"right": {
"intensity": 70,
"on": False,
"vial": {"level": 84, "name": "Velvet Rose"},
},
}
status = {"cury:0": {"id": 0, "slots": slots}}
monkeypatch.setattr(mock_rpc_device, "status", status)
await init_integration(hass, 3)
entity_id = f"{SWITCH_DOMAIN}.test_name_left_slot"
assert (state := hass.states.get(entity_id))
assert state.state == STATE_ON
slots["left"]["vial"]["level"] = -1
mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cury:0", "slots", slots)
mock_rpc_device.mock_update()
assert (state := hass.states.get(entity_id))
assert state.state == STATE_UNAVAILABLE
slots["left"].pop("vial")
mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cury:0", "slots", slots)
mock_rpc_device.mock_update()
assert (state := hass.states.get(entity_id))
assert state.state == STATE_UNAVAILABLE
slots["left"] = None
mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cury:0", "slots", slots)
mock_rpc_device.mock_update()
assert (state := hass.states.get(entity_id))
assert state.state == STATE_UNAVAILABLE
slots["left"] = {
"intensity": 70,
"on": True,
"vial": {"level": 27, "name": "Forest Dream"},
}
mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cury:0", "slots", slots)
mock_rpc_device.mock_update()
assert (state := hass.states.get(entity_id))
assert state.state == STATE_ON

View File

@@ -6,7 +6,7 @@ from typing import Any
from unittest.mock import AsyncMock, patch
import pytest
from telegram import Bot, Chat, ChatFullInfo, Message, User, WebhookInfo
from telegram import Bot, Chat, ChatFullInfo, Message, User
from telegram.constants import AccentColor, ChatType
from homeassistant.components.telegram_bot import (
@@ -74,22 +74,11 @@ def mock_register_webhook() -> Generator[None]:
"""Mock calls made by telegram_bot when (de)registering webhook."""
with (
patch(
"homeassistant.components.telegram_bot.webhooks.Bot.delete_webhook",
AsyncMock(),
"homeassistant.components.telegram_bot.webhooks.PushBot.register_webhook",
return_value=True,
),
patch(
"homeassistant.components.telegram_bot.webhooks.Bot.get_webhook_info",
AsyncMock(
return_value=WebhookInfo(
url="mock url",
last_error_date=datetime.now(),
has_custom_certificate=False,
pending_update_count=0,
)
),
),
patch(
"homeassistant.components.telegram_bot.webhooks.Bot.set_webhook",
"homeassistant.components.telegram_bot.webhooks.PushBot.deregister_webhook",
return_value=True,
),
):
@@ -124,6 +113,9 @@ def mock_external_calls() -> Generator[None]:
super().__init__(*args, **kwargs)
self._bot_user = test_user
async def delete_webhook(self) -> bool:
return True
with (
patch("homeassistant.components.telegram_bot.bot.Bot", BotMock),
patch.object(BotMock, "get_chat", return_value=test_chat),

View File

@@ -1,11 +1,12 @@
"""Tests for webhooks."""
from datetime import datetime
from ipaddress import IPv4Network
from unittest.mock import patch
from unittest.mock import AsyncMock, patch
from telegram import WebhookInfo
from telegram.error import TimedOut
from homeassistant.components.telegram_bot.const import DOMAIN
from homeassistant.components.telegram_bot.webhooks import TELEGRAM_WEBHOOK_URL
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant
@@ -18,61 +19,91 @@ async def test_set_webhooks_failed(
hass: HomeAssistant,
mock_webhooks_config_entry: MockConfigEntry,
mock_external_calls: None,
mock_register_webhook: None,
mock_generate_secret_token,
) -> None:
"""Test set webhooks failed."""
mock_webhooks_config_entry.add_to_hass(hass)
with (
patch(
"homeassistant.components.telegram_bot.webhooks.secrets.choice",
return_value="DEADBEEF12345678DEADBEEF87654321",
),
"homeassistant.components.telegram_bot.webhooks.Bot.get_webhook_info",
AsyncMock(
return_value=WebhookInfo(
url="mock url",
last_error_date=datetime.now(),
has_custom_certificate=False,
pending_update_count=0,
)
),
) as mock_webhook_info,
patch(
"homeassistant.components.telegram_bot.webhooks.Bot.set_webhook",
) as mock_set_webhook,
patch(
"homeassistant.components.telegram_bot.webhooks.ApplicationBuilder"
) as application_builder_class,
):
mock_set_webhook.side_effect = [TimedOut("mock timeout"), False]
application = application_builder_class.return_value.bot.return_value.updater.return_value.build.return_value
application.initialize = AsyncMock()
application.start = AsyncMock()
await hass.config_entries.async_setup(mock_webhooks_config_entry.entry_id)
await hass.async_block_till_done()
await hass.async_stop()
# first fail with exception, second fail with False
assert mock_set_webhook.call_count == 2
mock_webhook_info.assert_called_once()
application.initialize.assert_called_once()
application.start.assert_called_once()
assert mock_set_webhook.call_count > 0
# SETUP_ERROR is result of ConfigEntryNotReady("Failed to register webhook with Telegram") in webhooks.py
assert mock_webhooks_config_entry.state == ConfigEntryState.SETUP_ERROR
# test fail after retries
mock_set_webhook.reset_mock()
mock_set_webhook.side_effect = TimedOut("mock timeout")
await hass.config_entries.async_reload(mock_webhooks_config_entry.entry_id)
await hass.async_block_till_done()
# 3 retries
assert mock_set_webhook.call_count == 3
assert mock_webhooks_config_entry.state == ConfigEntryState.SETUP_ERROR
await hass.async_block_till_done()
async def test_set_webhooks(
hass: HomeAssistant,
mock_webhooks_config_entry: MockConfigEntry,
mock_external_calls: None,
mock_register_webhook: None,
mock_generate_secret_token,
) -> None:
"""Test set webhooks success."""
mock_webhooks_config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_webhooks_config_entry.entry_id)
await hass.async_block_till_done()
with (
patch(
"homeassistant.components.telegram_bot.webhooks.Bot.get_webhook_info",
AsyncMock(
return_value=WebhookInfo(
url="mock url",
last_error_date=datetime.now(),
has_custom_certificate=False,
pending_update_count=0,
)
),
) as mock_webhook_info,
patch(
"homeassistant.components.telegram_bot.webhooks.Bot.set_webhook",
AsyncMock(return_value=True),
) as mock_set_webhook,
patch(
"homeassistant.components.telegram_bot.webhooks.ApplicationBuilder"
) as application_builder_class,
):
application = application_builder_class.return_value.bot.return_value.updater.return_value.build.return_value
application.initialize = AsyncMock()
application.start = AsyncMock()
assert mock_webhooks_config_entry.state == ConfigEntryState.LOADED
await hass.config_entries.async_setup(mock_webhooks_config_entry.entry_id)
await hass.async_block_till_done()
await hass.async_stop()
mock_webhook_info.assert_called_once()
application.initialize.assert_called_once()
application.start.assert_called_once()
mock_set_webhook.assert_called_once()
assert mock_webhooks_config_entry.state == ConfigEntryState.LOADED
async def test_webhooks_update_invalid_json(
@@ -117,24 +148,3 @@ async def test_webhooks_unauthorized_network(
await hass.async_block_till_done()
mock_remote.assert_called_once()
async def test_webhooks_deregister_failed(
hass: HomeAssistant,
webhook_platform,
mock_external_calls: None,
mock_generate_secret_token,
) -> None:
"""Test deregister webhooks."""
config_entry = hass.config_entries.async_entries(DOMAIN)[0]
assert config_entry.state == ConfigEntryState.LOADED
with patch(
"homeassistant.components.telegram_bot.webhooks.Bot.delete_webhook",
) as mock_delete_webhook:
mock_delete_webhook.side_effect = TimedOut("mock timeout")
await hass.config_entries.async_unload(config_entry.entry_id)
mock_delete_webhook.assert_called_once()
assert config_entry.state == ConfigEntryState.NOT_LOADED

View File

@@ -230,7 +230,7 @@ DEVICE_MOCKS = [
"wg2_tmwhss6ntjfc7prs", # https://github.com/home-assistant/core/issues/150662
"wg2_v7owd9tzcaninc36", # https://github.com/orgs/home-assistant/discussions/539
"wk_6kijc7nd", # https://github.com/home-assistant/core/issues/136513
"wk_IAYz2WK1th0cMLmL", # https://github.com/home-assistant/core/issues/150077
"wk_IAYz2WK1th0cMLmL", # https://github.com/orgs/home-assistant/discussions/842
"wk_aqoouq7x", # https://github.com/home-assistant/core/issues/146263
"wk_ccpwojhalfxryigz", # https://github.com/home-assistant/core/issues/145551
"wk_cpmgn2cf", # https://github.com/orgs/home-assistant/discussions/684

View File

@@ -10,9 +10,9 @@
"online": true,
"sub": false,
"time_zone": "+01:00",
"active_time": "2022-11-15T08:35:43+00:00",
"create_time": "2022-11-15T08:35:43+00:00",
"update_time": "2022-11-15T08:35:43+00:00",
"active_time": "2018-12-04T17:50:07+00:00",
"create_time": "2018-12-04T17:50:07+00:00",
"update_time": "2025-09-03T07:44:16+00:00",
"function": {
"switch": {
"type": "Boolean",
@@ -22,16 +22,6 @@
"type": "Boolean",
"value": {}
},
"temp_set": {
"type": "Integer",
"value": {
"unit": "\u2103",
"min": 10,
"max": 70,
"scale": 1,
"step": 5
}
},
"eco": {
"type": "Boolean",
"value": {}
@@ -45,14 +35,26 @@
"scale": 0,
"step": 5
}
}
},
"status_range": {
"eco": {
"type": "Boolean",
"value": {}
},
"temp_set": {
"Mode": {
"type": "Enum",
"value": {
"range": ["0", "1"]
}
},
"program": {
"type": "Raw",
"value": {
"maxlen": 128
}
},
"tempSwitch": {
"type": "Enum",
"value": {
"range": ["0", "1"]
}
},
"TempSet": {
"type": "Integer",
"value": {
"unit": "\u2103",
@@ -61,6 +63,12 @@
"scale": 1,
"step": 5
}
}
},
"status_range": {
"eco": {
"type": "Boolean",
"value": {}
},
"switch": {
"type": "Boolean",
@@ -79,14 +87,43 @@
"scale": 0,
"step": 5
}
},
"floorTemp": {
"type": "Integer",
"value": {
"max": 198,
"min": 0,
"scale": 0,
"step": 5,
"unit": "\u2103"
}
},
"floortempFunction": {
"type": "Boolean",
"value": {}
},
"TempSet": {
"type": "Integer",
"value": {
"unit": "\u2103",
"min": 10,
"max": 70,
"scale": 1,
"step": 5
}
}
},
"status": {
"switch": true,
"temp_set": 46,
"upper_temp": 45,
"eco": false,
"child_lock": true
"switch": false,
"upper_temp": 55,
"eco": true,
"child_lock": false,
"Mode": 1,
"program": "DwYoDwceHhQoORceOhceOxceAAkoAAoeHhQoORceOhceOxceAAkoAAoeHhQoORceOhceOxce",
"floorTemp": 0,
"tempSwitch": 0,
"floortempFunction": true,
"TempSet": 41
},
"set_up": true,
"support_local": true

Some files were not shown because too many files have changed in this diff Show More