mirror of
https://github.com/home-assistant/core.git
synced 2025-10-13 21:59:35 +00:00
Compare commits
16 Commits
progress-a
...
trigger_ac
Author | SHA1 | Date | |
---|---|---|---|
![]() |
90a30be906 | ||
![]() |
cbf1b39edb | ||
![]() |
142daf5e49 | ||
![]() |
8bd0ff7cca | ||
![]() |
ac676e12f6 | ||
![]() |
c0ac3292cd | ||
![]() |
80fd07c128 | ||
![]() |
3701d8859a | ||
![]() |
6dd26bae88 | ||
![]() |
1a0abe296c | ||
![]() |
de6c61a4ab | ||
![]() |
33c677596e | ||
![]() |
e9b4b8e99b | ||
![]() |
0525c04c42 | ||
![]() |
d57b502551 | ||
![]() |
9fb708baf4 |
@@ -71,4 +71,4 @@ POLLEN_CATEGORY_MAP = {
|
||||
}
|
||||
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=10)
|
||||
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
|
||||
UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(hours=30)
|
||||
UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(minutes=30)
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/environment_canada",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["env_canada"],
|
||||
"requirements": ["env-canada==0.11.2"]
|
||||
"requirements": ["env-canada==0.11.3"]
|
||||
}
|
||||
|
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==41.12.0",
|
||||
"aioesphomeapi==41.13.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.4.0"
|
||||
],
|
||||
|
@@ -68,7 +68,6 @@ EVENT_HEALTH_CHANGED = "health_changed"
|
||||
EVENT_SUPPORTED_CHANGED = "supported_changed"
|
||||
EVENT_ISSUE_CHANGED = "issue_changed"
|
||||
EVENT_ISSUE_REMOVED = "issue_removed"
|
||||
EVENT_JOB = "job"
|
||||
|
||||
UPDATE_KEY_SUPERVISOR = "supervisor"
|
||||
|
||||
|
@@ -56,7 +56,6 @@ from .const import (
|
||||
SupervisorEntityModel,
|
||||
)
|
||||
from .handler import HassioAPIError, get_supervisor_client
|
||||
from .jobs import SupervisorJobs
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .issues import SupervisorIssues
|
||||
@@ -312,7 +311,6 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
lambda: defaultdict(set)
|
||||
)
|
||||
self.supervisor_client = get_supervisor_client(hass)
|
||||
self.jobs = SupervisorJobs(hass)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Update data via library."""
|
||||
@@ -487,9 +485,6 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
)
|
||||
)
|
||||
|
||||
# Refresh jobs data
|
||||
await self.jobs.refresh_data(first_update)
|
||||
|
||||
async def _update_addon_stats(self, slug: str) -> tuple[str, dict[str, Any] | None]:
|
||||
"""Update single addon stats."""
|
||||
try:
|
||||
|
@@ -1,157 +0,0 @@
|
||||
"""Track Supervisor job data and allow subscription to updates."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass, replace
|
||||
from functools import partial
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
from aiohasupervisor.models import Job
|
||||
|
||||
from homeassistant.core import (
|
||||
CALLBACK_TYPE,
|
||||
HomeAssistant,
|
||||
callback,
|
||||
is_callback_check_partial,
|
||||
)
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
|
||||
from .const import (
|
||||
ATTR_DATA,
|
||||
ATTR_UPDATE_KEY,
|
||||
ATTR_WS_EVENT,
|
||||
EVENT_JOB,
|
||||
EVENT_SUPERVISOR_EVENT,
|
||||
EVENT_SUPERVISOR_UPDATE,
|
||||
UPDATE_KEY_SUPERVISOR,
|
||||
)
|
||||
from .handler import get_supervisor_client
|
||||
|
||||
|
||||
@dataclass(slots=True, frozen=True)
|
||||
class JobSubscription:
|
||||
"""Subscribe for updates on jobs which match filters.
|
||||
|
||||
UUID is preferred match but only available in cases of a background API that
|
||||
returns the UUID before taking the action. Others are used to match jobs only
|
||||
if UUID is omitted. Either name or UUID is required to be able to match.
|
||||
|
||||
event_callback must be safe annotated as a homeassistant.core.callback
|
||||
and safe to call in the event loop.
|
||||
"""
|
||||
|
||||
event_callback: Callable[[Job], Any]
|
||||
uuid: str | None = None
|
||||
name: str | None = None
|
||||
reference: str | None | type[Any] = Any
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
"""Validate at least one filter option is present."""
|
||||
if not self.name and not self.uuid:
|
||||
raise ValueError("Either name or uuid must be provided!")
|
||||
if not is_callback_check_partial(self.event_callback):
|
||||
raise ValueError("event_callback must be a homeassistant.core.callback!")
|
||||
|
||||
def matches(self, job: Job) -> bool:
|
||||
"""Return true if job matches subscription filters."""
|
||||
if self.uuid:
|
||||
return job.uuid == self.uuid
|
||||
return job.name == self.name and self.reference in (Any, job.reference)
|
||||
|
||||
|
||||
class SupervisorJobs:
|
||||
"""Manage access to Supervisor jobs."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize object."""
|
||||
self._hass = hass
|
||||
self._supervisor_client = get_supervisor_client(hass)
|
||||
self._jobs: dict[UUID, Job] = {}
|
||||
self._subscriptions: set[JobSubscription] = set()
|
||||
|
||||
@property
|
||||
def current_jobs(self) -> list[Job]:
|
||||
"""Return current jobs."""
|
||||
return list(self._jobs.values())
|
||||
|
||||
def subscribe(self, subscription: JobSubscription) -> CALLBACK_TYPE:
|
||||
"""Subscribe to updates for job. Return callback is used to unsubscribe.
|
||||
|
||||
If any jobs match the subscription at the time this is called, creates
|
||||
tasks to run their callback on it.
|
||||
"""
|
||||
self._subscriptions.add(subscription)
|
||||
|
||||
# As these are callbacks they are safe to run in the event loop
|
||||
# We wrap these in an asyncio task so subscribing does not wait on the logic
|
||||
if matches := [job for job in self._jobs.values() if subscription.matches(job)]:
|
||||
|
||||
async def event_callback_async(job: Job) -> Any:
|
||||
return subscription.event_callback(job)
|
||||
|
||||
for match in matches:
|
||||
self._hass.async_create_task(event_callback_async(match))
|
||||
|
||||
return partial(self._subscriptions.discard, subscription)
|
||||
|
||||
async def refresh_data(self, first_update: bool = False) -> None:
|
||||
"""Refresh job data."""
|
||||
job_data = await self._supervisor_client.jobs.info()
|
||||
job_queue: list[Job] = job_data.jobs.copy()
|
||||
new_jobs: dict[UUID, Job] = {}
|
||||
changed_jobs: list[Job] = []
|
||||
|
||||
# Rebuild our job cache from new info and compare to find changes
|
||||
while job_queue:
|
||||
job = job_queue.pop(0)
|
||||
job_queue.extend(job.child_jobs)
|
||||
job = replace(job, child_jobs=[])
|
||||
|
||||
if job.uuid not in self._jobs or job != self._jobs[job.uuid]:
|
||||
changed_jobs.append(job)
|
||||
new_jobs[job.uuid] = replace(job, child_jobs=[])
|
||||
|
||||
# For any jobs that disappeared which weren't done, tell subscribers they
|
||||
# changed to done. We don't know what else happened to them so leave the
|
||||
# rest of their state as is rather then guessing
|
||||
changed_jobs.extend(
|
||||
[
|
||||
replace(job, done=True)
|
||||
for uuid, job in self._jobs.items()
|
||||
if uuid not in new_jobs and job.done is False
|
||||
]
|
||||
)
|
||||
|
||||
# Replace our cache and inform subscribers of all changes
|
||||
self._jobs = new_jobs
|
||||
for job in changed_jobs:
|
||||
self._process_job_change(job)
|
||||
|
||||
# If this is the first update register to receive Supervisor events
|
||||
if first_update:
|
||||
async_dispatcher_connect(
|
||||
self._hass, EVENT_SUPERVISOR_EVENT, self._supervisor_events_to_jobs
|
||||
)
|
||||
|
||||
@callback
|
||||
def _supervisor_events_to_jobs(self, event: dict[str, Any]) -> None:
|
||||
"""Update job data cache from supervisor events."""
|
||||
if ATTR_WS_EVENT not in event:
|
||||
return
|
||||
|
||||
if (
|
||||
event[ATTR_WS_EVENT] == EVENT_SUPERVISOR_UPDATE
|
||||
and event.get(ATTR_UPDATE_KEY) == UPDATE_KEY_SUPERVISOR
|
||||
):
|
||||
self._hass.async_create_task(self.refresh_data())
|
||||
|
||||
elif event[ATTR_WS_EVENT] == EVENT_JOB:
|
||||
job = Job.from_dict(event[ATTR_DATA] | {"child_jobs": []})
|
||||
self._jobs[job.uuid] = job
|
||||
self._process_job_change(job)
|
||||
|
||||
def _process_job_change(self, job: Job) -> None:
|
||||
"""Process a job change by triggering callbacks on subscribers."""
|
||||
for sub in self._subscriptions:
|
||||
if sub.matches(job):
|
||||
sub.event_callback(job)
|
@@ -6,7 +6,6 @@ import re
|
||||
from typing import Any
|
||||
|
||||
from aiohasupervisor import SupervisorError
|
||||
from aiohasupervisor.models import Job
|
||||
from awesomeversion import AwesomeVersion, AwesomeVersionStrategy
|
||||
|
||||
from homeassistant.components.update import (
|
||||
@@ -16,7 +15,7 @@ from homeassistant.components.update import (
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_ICON, ATTR_NAME
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
@@ -36,7 +35,6 @@ from .entity import (
|
||||
HassioOSEntity,
|
||||
HassioSupervisorEntity,
|
||||
)
|
||||
from .jobs import JobSubscription
|
||||
from .update_helper import update_addon, update_core, update_os
|
||||
|
||||
ENTITY_DESCRIPTION = UpdateEntityDescription(
|
||||
@@ -91,7 +89,6 @@ class SupervisorAddonUpdateEntity(HassioAddonEntity, UpdateEntity):
|
||||
UpdateEntityFeature.INSTALL
|
||||
| UpdateEntityFeature.BACKUP
|
||||
| UpdateEntityFeature.RELEASE_NOTES
|
||||
| UpdateEntityFeature.PROGRESS
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -157,30 +154,6 @@ class SupervisorAddonUpdateEntity(HassioAddonEntity, UpdateEntity):
|
||||
)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
@callback
|
||||
def _update_job_changed(self, job: Job) -> None:
|
||||
"""Process update for this entity's update job."""
|
||||
if job.done is False:
|
||||
self._attr_in_progress = True
|
||||
self._attr_update_percentage = job.progress
|
||||
else:
|
||||
self._attr_in_progress = False
|
||||
self._attr_update_percentage = None
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to progress updates."""
|
||||
await super().async_added_to_hass()
|
||||
self.async_on_remove(
|
||||
self.coordinator.jobs.subscribe(
|
||||
JobSubscription(
|
||||
self._update_job_changed,
|
||||
name="addon_manager_update",
|
||||
reference=self._addon_slug,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class SupervisorOSUpdateEntity(HassioOSEntity, UpdateEntity):
|
||||
"""Update entity to handle updates for the Home Assistant Operating System."""
|
||||
@@ -277,7 +250,6 @@ class SupervisorCoreUpdateEntity(HassioCoreEntity, UpdateEntity):
|
||||
UpdateEntityFeature.INSTALL
|
||||
| UpdateEntityFeature.SPECIFIC_VERSION
|
||||
| UpdateEntityFeature.BACKUP
|
||||
| UpdateEntityFeature.PROGRESS
|
||||
)
|
||||
_attr_title = "Home Assistant Core"
|
||||
|
||||
@@ -309,25 +281,3 @@ class SupervisorCoreUpdateEntity(HassioCoreEntity, UpdateEntity):
|
||||
) -> None:
|
||||
"""Install an update."""
|
||||
await update_core(self.hass, version, backup)
|
||||
|
||||
@callback
|
||||
def _update_job_changed(self, job: Job) -> None:
|
||||
"""Process update for this entity's update job."""
|
||||
if job.done is False:
|
||||
self._attr_in_progress = True
|
||||
self._attr_update_percentage = job.progress
|
||||
else:
|
||||
self._attr_in_progress = False
|
||||
self._attr_update_percentage = None
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to progress updates."""
|
||||
await super().async_added_to_hass()
|
||||
self.async_on_remove(
|
||||
self.coordinator.jobs.subscribe(
|
||||
JobSubscription(
|
||||
self._update_job_changed, name="home_assistant_core_update"
|
||||
)
|
||||
)
|
||||
)
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.81", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.82", "babel==2.15.0"]
|
||||
}
|
||||
|
@@ -10,7 +10,11 @@ from mill import Heater, Mill
|
||||
from mill_local import Mill as MillLocal
|
||||
|
||||
from homeassistant.components.recorder import get_instance
|
||||
from homeassistant.components.recorder.models import StatisticData, StatisticMetaData
|
||||
from homeassistant.components.recorder.models import (
|
||||
StatisticData,
|
||||
StatisticMeanType,
|
||||
StatisticMetaData,
|
||||
)
|
||||
from homeassistant.components.recorder.statistics import (
|
||||
async_add_external_statistics,
|
||||
get_last_statistics,
|
||||
@@ -147,7 +151,7 @@ class MillHistoricDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
)
|
||||
)
|
||||
metadata = StatisticMetaData(
|
||||
has_mean=False,
|
||||
mean_type=StatisticMeanType.NONE,
|
||||
has_sum=True,
|
||||
name=f"{heater.name}",
|
||||
source=DOMAIN,
|
||||
|
@@ -253,6 +253,7 @@ class ModbusHub:
|
||||
self._client: (
|
||||
AsyncModbusSerialClient | AsyncModbusTcpClient | AsyncModbusUdpClient | None
|
||||
) = None
|
||||
self._lock = asyncio.Lock()
|
||||
self.event_connected = asyncio.Event()
|
||||
self.hass = hass
|
||||
self.name = client_config[CONF_NAME]
|
||||
@@ -415,7 +416,9 @@ class ModbusHub:
|
||||
"""Convert async to sync pymodbus call."""
|
||||
if not self._client:
|
||||
return None
|
||||
result = await self.low_level_pb_call(unit, address, value, use_call)
|
||||
if self._msg_wait:
|
||||
await asyncio.sleep(self._msg_wait)
|
||||
return result
|
||||
async with self._lock:
|
||||
result = await self.low_level_pb_call(unit, address, value, use_call)
|
||||
if self._msg_wait:
|
||||
# small delay until next request/response
|
||||
await asyncio.sleep(self._msg_wait)
|
||||
return result
|
||||
|
@@ -53,7 +53,7 @@ class NikoHomeControlLight(NikoHomeControlEntity, LightEntity):
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Instruct the light to turn on."""
|
||||
await self._action.turn_on(kwargs.get(ATTR_BRIGHTNESS, 255))
|
||||
await self._action.turn_on(kwargs.get(ATTR_BRIGHTNESS))
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Instruct the light to turn off."""
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/niko_home_control",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["nikohomecontrol"],
|
||||
"requirements": ["nhc==0.4.12"]
|
||||
"requirements": ["nhc==0.6.1"]
|
||||
}
|
||||
|
@@ -316,16 +316,23 @@ class OpenAISubentryFlowHandler(ConfigSubentryFlow):
|
||||
options = self.options
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
step_schema: VolDictType = {
|
||||
vol.Optional(
|
||||
CONF_CODE_INTERPRETER,
|
||||
default=RECOMMENDED_CODE_INTERPRETER,
|
||||
): bool,
|
||||
}
|
||||
step_schema: VolDictType = {}
|
||||
|
||||
model = options[CONF_CHAT_MODEL]
|
||||
|
||||
if model.startswith(("o", "gpt-5")):
|
||||
if not model.startswith(("gpt-5-pro", "gpt-5-codex")):
|
||||
step_schema.update(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_CODE_INTERPRETER,
|
||||
default=RECOMMENDED_CODE_INTERPRETER,
|
||||
): bool,
|
||||
}
|
||||
)
|
||||
elif CONF_CODE_INTERPRETER in options:
|
||||
options.pop(CONF_CODE_INTERPRETER)
|
||||
|
||||
if model.startswith(("o", "gpt-5")) and not model.startswith("gpt-5-pro"):
|
||||
step_schema.update(
|
||||
{
|
||||
vol.Optional(
|
||||
|
@@ -468,7 +468,9 @@ class OpenAIBaseLLMEntity(Entity):
|
||||
model_args["reasoning"] = {
|
||||
"effort": options.get(
|
||||
CONF_REASONING_EFFORT, RECOMMENDED_REASONING_EFFORT
|
||||
),
|
||||
)
|
||||
if not model_args["model"].startswith("gpt-5-pro")
|
||||
else "high", # GPT-5 pro only supports reasoning.effort: high
|
||||
"summary": "auto",
|
||||
}
|
||||
model_args["include"] = ["reasoning.encrypted_content"]
|
||||
|
@@ -18,7 +18,8 @@ from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
from .coordinator import PortainerCoordinator
|
||||
|
||||
_PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.SWITCH]
|
||||
_PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.SWITCH]
|
||||
|
||||
|
||||
type PortainerConfigEntry = ConfigEntry[PortainerCoordinator]
|
||||
|
||||
|
@@ -1,5 +1,10 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"image": {
|
||||
"default": "mdi:docker"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"container": {
|
||||
"default": "mdi:arrow-down-box",
|
||||
|
83
homeassistant/components/portainer/sensor.py
Normal file
83
homeassistant/components/portainer/sensor.py
Normal file
@@ -0,0 +1,83 @@
|
||||
"""Sensor platform for Portainer integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from pyportainer.models.docker import DockerContainer
|
||||
|
||||
from homeassistant.components.sensor import SensorEntity, SensorEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import PortainerConfigEntry, PortainerCoordinator
|
||||
from .entity import PortainerContainerEntity, PortainerCoordinatorData
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class PortainerSensorEntityDescription(SensorEntityDescription):
|
||||
"""Class to hold Portainer sensor description."""
|
||||
|
||||
value_fn: Callable[[DockerContainer], str | None]
|
||||
|
||||
|
||||
CONTAINER_SENSORS: tuple[PortainerSensorEntityDescription, ...] = (
|
||||
PortainerSensorEntityDescription(
|
||||
key="image",
|
||||
translation_key="image",
|
||||
value_fn=lambda data: data.image,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: PortainerConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Portainer sensors based on a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
PortainerContainerSensor(
|
||||
coordinator,
|
||||
entity_description,
|
||||
container,
|
||||
endpoint,
|
||||
)
|
||||
for endpoint in coordinator.data.values()
|
||||
for container in endpoint.containers.values()
|
||||
for entity_description in CONTAINER_SENSORS
|
||||
)
|
||||
|
||||
|
||||
class PortainerContainerSensor(PortainerContainerEntity, SensorEntity):
|
||||
"""Representation of a Portainer container sensor."""
|
||||
|
||||
entity_description: PortainerSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: PortainerSensorEntityDescription,
|
||||
device_info: DockerContainer,
|
||||
via_device: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize the Portainer container sensor."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator, via_device)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_name}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if the device is available."""
|
||||
return super().available and self.endpoint_id in self.coordinator.data
|
||||
|
||||
@property
|
||||
def native_value(self) -> str | None:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(
|
||||
self.coordinator.data[self.endpoint_id].containers[self.device_id]
|
||||
)
|
@@ -46,6 +46,11 @@
|
||||
"name": "Status"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"image": {
|
||||
"name": "Image"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"container": {
|
||||
"name": "Container"
|
||||
|
@@ -9,7 +9,6 @@ from typing import TYPE_CHECKING, Any, Final
|
||||
|
||||
from aioshelly.const import BLU_TRV_IDENTIFIER, MODEL_BLU_GATEWAY_G3, RPC_GENERATIONS
|
||||
from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError, RpcCallError
|
||||
from aioshelly.rpc_device import RpcDevice
|
||||
|
||||
from homeassistant.components.button import (
|
||||
DOMAIN as BUTTON_PLATFORM,
|
||||
@@ -24,16 +23,24 @@ from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, LOGGER, SHELLY_GAS_MODELS
|
||||
from .const import DOMAIN, LOGGER, MODEL_FRANKEVER_WATER_VALVE, SHELLY_GAS_MODELS
|
||||
from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator
|
||||
from .entity import get_entity_block_device_info, get_entity_rpc_device_info
|
||||
from .entity import (
|
||||
RpcEntityDescription,
|
||||
ShellyRpcAttributeEntity,
|
||||
async_setup_entry_rpc,
|
||||
get_entity_block_device_info,
|
||||
get_entity_rpc_device_info,
|
||||
rpc_call,
|
||||
)
|
||||
from .utils import (
|
||||
async_remove_orphaned_entities,
|
||||
format_ble_addr,
|
||||
get_blu_trv_device_info,
|
||||
get_device_entry_gen,
|
||||
get_rpc_entity_name,
|
||||
get_rpc_key_ids,
|
||||
get_rpc_key_instances,
|
||||
get_rpc_role_by_key,
|
||||
get_virtual_component_ids,
|
||||
)
|
||||
|
||||
@@ -51,6 +58,11 @@ class ShellyButtonDescription[
|
||||
supported: Callable[[_ShellyCoordinatorT], bool] = lambda _: True
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class RpcButtonDescription(RpcEntityDescription, ButtonEntityDescription):
|
||||
"""Class to describe a RPC button."""
|
||||
|
||||
|
||||
BUTTONS: Final[list[ShellyButtonDescription[Any]]] = [
|
||||
ShellyButtonDescription[ShellyBlockCoordinator | ShellyRpcCoordinator](
|
||||
key="reboot",
|
||||
@@ -96,12 +108,24 @@ BLU_TRV_BUTTONS: Final[list[ShellyButtonDescription]] = [
|
||||
),
|
||||
]
|
||||
|
||||
VIRTUAL_BUTTONS: Final[list[ShellyButtonDescription]] = [
|
||||
ShellyButtonDescription[ShellyRpcCoordinator](
|
||||
RPC_VIRTUAL_BUTTONS = {
|
||||
"button_generic": RpcButtonDescription(
|
||||
key="button",
|
||||
press_action="single_push",
|
||||
)
|
||||
]
|
||||
role="generic",
|
||||
),
|
||||
"button_open": RpcButtonDescription(
|
||||
key="button",
|
||||
entity_registry_enabled_default=False,
|
||||
role="open",
|
||||
models={MODEL_FRANKEVER_WATER_VALVE},
|
||||
),
|
||||
"button_close": RpcButtonDescription(
|
||||
key="button",
|
||||
entity_registry_enabled_default=False,
|
||||
role="close",
|
||||
models={MODEL_FRANKEVER_WATER_VALVE},
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@callback
|
||||
@@ -129,8 +153,10 @@ def async_migrate_unique_ids(
|
||||
)
|
||||
}
|
||||
|
||||
if not isinstance(coordinator, ShellyRpcCoordinator):
|
||||
return None
|
||||
|
||||
if blutrv_key_ids := get_rpc_key_ids(coordinator.device.status, BLU_TRV_IDENTIFIER):
|
||||
assert isinstance(coordinator.device, RpcDevice)
|
||||
for _id in blutrv_key_ids:
|
||||
key = f"{BLU_TRV_IDENTIFIER}:{_id}"
|
||||
ble_addr: str = coordinator.device.config[key]["addr"]
|
||||
@@ -149,6 +175,26 @@ def async_migrate_unique_ids(
|
||||
)
|
||||
}
|
||||
|
||||
if virtual_button_keys := get_rpc_key_instances(
|
||||
coordinator.device.config, "button"
|
||||
):
|
||||
for key in virtual_button_keys:
|
||||
old_unique_id = f"{coordinator.mac}-{key}"
|
||||
if entity_entry.unique_id == old_unique_id:
|
||||
role = get_rpc_role_by_key(coordinator.device.config, key)
|
||||
new_unique_id = f"{coordinator.mac}-{key}-button_{role}"
|
||||
LOGGER.debug(
|
||||
"Migrating unique_id for %s entity from [%s] to [%s]",
|
||||
entity_entry.entity_id,
|
||||
old_unique_id,
|
||||
new_unique_id,
|
||||
)
|
||||
return {
|
||||
"new_unique_id": entity_entry.unique_id.replace(
|
||||
old_unique_id, new_unique_id
|
||||
)
|
||||
}
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@@ -172,7 +218,7 @@ async def async_setup_entry(
|
||||
hass, config_entry.entry_id, partial(async_migrate_unique_ids, coordinator)
|
||||
)
|
||||
|
||||
entities: list[ShellyButton | ShellyBluTrvButton | ShellyVirtualButton] = []
|
||||
entities: list[ShellyButton | ShellyBluTrvButton] = []
|
||||
|
||||
entities.extend(
|
||||
ShellyButton(coordinator, button)
|
||||
@@ -185,12 +231,9 @@ async def async_setup_entry(
|
||||
return
|
||||
|
||||
# add virtual buttons
|
||||
if virtual_button_ids := get_rpc_key_ids(coordinator.device.status, "button"):
|
||||
entities.extend(
|
||||
ShellyVirtualButton(coordinator, button, id_)
|
||||
for id_ in virtual_button_ids
|
||||
for button in VIRTUAL_BUTTONS
|
||||
)
|
||||
async_setup_entry_rpc(
|
||||
hass, config_entry, async_add_entities, RPC_VIRTUAL_BUTTONS, RpcVirtualButton
|
||||
)
|
||||
|
||||
# add BLU TRV buttons
|
||||
if blutrv_key_ids := get_rpc_key_ids(coordinator.device.status, BLU_TRV_IDENTIFIER):
|
||||
@@ -332,30 +375,16 @@ class ShellyBluTrvButton(ShellyBaseButton):
|
||||
await method(self._id)
|
||||
|
||||
|
||||
class ShellyVirtualButton(ShellyBaseButton):
|
||||
"""Defines a Shelly virtual component button."""
|
||||
class RpcVirtualButton(ShellyRpcAttributeEntity, ButtonEntity):
|
||||
"""Defines a Shelly RPC virtual component button."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ShellyRpcCoordinator,
|
||||
description: ShellyButtonDescription,
|
||||
_id: int,
|
||||
) -> None:
|
||||
"""Initialize Shelly virtual component button."""
|
||||
super().__init__(coordinator, description)
|
||||
entity_description: RpcButtonDescription
|
||||
_id: int
|
||||
|
||||
self._attr_unique_id = f"{coordinator.mac}-{description.key}:{_id}"
|
||||
self._attr_device_info = get_entity_rpc_device_info(coordinator)
|
||||
self._attr_name = get_rpc_entity_name(
|
||||
coordinator.device, f"{description.key}:{_id}"
|
||||
)
|
||||
self._id = _id
|
||||
|
||||
async def _press_method(self) -> None:
|
||||
"""Press method."""
|
||||
@rpc_call
|
||||
async def async_press(self) -> None:
|
||||
"""Triggers the Shelly button press service."""
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(self.coordinator, ShellyRpcCoordinator)
|
||||
|
||||
await self.coordinator.device.button_trigger(
|
||||
self._id, self.entity_description.press_action
|
||||
)
|
||||
await self.coordinator.device.button_trigger(self._id, "single_push")
|
||||
|
@@ -195,9 +195,11 @@ def async_setup_rpc_attribute_entities(
|
||||
):
|
||||
continue
|
||||
|
||||
if description.sub_key not in coordinator.device.status[
|
||||
key
|
||||
] and not description.supported(coordinator.device.status[key]):
|
||||
if (
|
||||
description.sub_key
|
||||
and description.sub_key not in coordinator.device.status[key]
|
||||
and not description.supported(coordinator.device.status[key])
|
||||
):
|
||||
continue
|
||||
|
||||
# Filter and remove entities that according to settings/status
|
||||
@@ -309,7 +311,7 @@ class RpcEntityDescription(EntityDescription):
|
||||
# restrict the type to str.
|
||||
name: str = ""
|
||||
|
||||
sub_key: str
|
||||
sub_key: str | None = None
|
||||
|
||||
value: Callable[[Any, Any], Any] | None = None
|
||||
available: Callable[[dict], bool] | None = None
|
||||
|
@@ -241,7 +241,6 @@ class SuezWaterCoordinator(DataUpdateCoordinator[SuezWaterData]):
|
||||
) -> StatisticMetaData:
|
||||
"""Build statistics metadata for requested configuration."""
|
||||
return StatisticMetaData(
|
||||
has_mean=False,
|
||||
mean_type=StatisticMeanType.NONE,
|
||||
has_sum=True,
|
||||
name=f"Suez water {name} {self._counter_id}",
|
||||
|
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/systemmonitor",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["psutil"],
|
||||
"requirements": ["psutil-home-assistant==0.0.1", "psutil==7.0.0"],
|
||||
"requirements": ["psutil-home-assistant==0.0.1", "psutil==7.1.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["holidays"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["holidays==0.81"]
|
||||
"requirements": ["holidays==0.82"]
|
||||
}
|
||||
|
@@ -9,7 +9,6 @@ from typing import TYPE_CHECKING, Final
|
||||
from .generated.entity_platforms import EntityPlatforms
|
||||
from .helpers.deprecation import (
|
||||
DeprecatedConstantEnum,
|
||||
EnumWithDeprecatedMembers,
|
||||
all_with_deprecated_constants,
|
||||
check_if_deprecated_constant,
|
||||
dir_with_deprecated_constants,
|
||||
@@ -704,35 +703,13 @@ class UnitOfMass(StrEnum):
|
||||
STONES = "st"
|
||||
|
||||
|
||||
class UnitOfConductivity(
|
||||
StrEnum,
|
||||
metaclass=EnumWithDeprecatedMembers,
|
||||
deprecated={
|
||||
"SIEMENS": ("UnitOfConductivity.SIEMENS_PER_CM", "2025.11.0"),
|
||||
"MICROSIEMENS": ("UnitOfConductivity.MICROSIEMENS_PER_CM", "2025.11.0"),
|
||||
"MILLISIEMENS": ("UnitOfConductivity.MILLISIEMENS_PER_CM", "2025.11.0"),
|
||||
},
|
||||
):
|
||||
class UnitOfConductivity(StrEnum):
|
||||
"""Conductivity units."""
|
||||
|
||||
SIEMENS_PER_CM = "S/cm"
|
||||
MICROSIEMENS_PER_CM = "μS/cm"
|
||||
MILLISIEMENS_PER_CM = "mS/cm"
|
||||
|
||||
# Deprecated aliases
|
||||
SIEMENS = "S/cm"
|
||||
"""Deprecated: Please use UnitOfConductivity.SIEMENS_PER_CM"""
|
||||
MICROSIEMENS = "μS/cm"
|
||||
"""Deprecated: Please use UnitOfConductivity.MICROSIEMENS_PER_CM"""
|
||||
MILLISIEMENS = "mS/cm"
|
||||
"""Deprecated: Please use UnitOfConductivity.MILLISIEMENS_PER_CM"""
|
||||
|
||||
|
||||
_DEPRECATED_CONDUCTIVITY: Final = DeprecatedConstantEnum(
|
||||
UnitOfConductivity.MICROSIEMENS_PER_CM,
|
||||
"2025.11",
|
||||
)
|
||||
"""Deprecated: please use UnitOfConductivity.MICROSIEMENS_PER_CM"""
|
||||
|
||||
# Light units
|
||||
LIGHT_LUX: Final = "lx"
|
||||
|
@@ -36,7 +36,7 @@ from homeassistant.core import (
|
||||
callback,
|
||||
split_entity_id,
|
||||
)
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.exceptions import HomeAssistantError, TemplateError
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.async_ import run_callback_threadsafe
|
||||
@@ -1004,12 +1004,9 @@ class TrackTemplateResultInfo:
|
||||
if track_template_.template.hass:
|
||||
continue
|
||||
|
||||
frame.report_usage(
|
||||
"calls async_track_template_result with template without hass",
|
||||
core_behavior=frame.ReportBehavior.LOG,
|
||||
breaks_in_ha_version="2025.10",
|
||||
raise HomeAssistantError(
|
||||
"Calls async_track_template_result with template without hass"
|
||||
)
|
||||
track_template_.template.hass = hass
|
||||
|
||||
self._rate_limit = KeyedRateLimit(hass)
|
||||
self._info: dict[Template, RenderInfo] = {}
|
||||
|
@@ -260,11 +260,11 @@ class TriggerConfig:
|
||||
class TriggerActionType(Protocol):
|
||||
"""Protocol type for trigger action callback."""
|
||||
|
||||
async def __call__(
|
||||
def __call__(
|
||||
self,
|
||||
run_variables: dict[str, Any],
|
||||
context: Context | None = None,
|
||||
) -> Any:
|
||||
) -> Coroutine[Any, Any, Any] | Any:
|
||||
"""Define action callback type."""
|
||||
|
||||
|
||||
@@ -444,8 +444,8 @@ async def async_validate_trigger_config(
|
||||
|
||||
|
||||
def _trigger_action_wrapper(
|
||||
hass: HomeAssistant, action: Callable, conf: ConfigType
|
||||
) -> Callable:
|
||||
hass: HomeAssistant, action: TriggerActionType, conf: ConfigType
|
||||
) -> TriggerActionType:
|
||||
"""Wrap trigger action with extra vars if configured.
|
||||
|
||||
If action is a coroutine function, a coroutine function will be returned.
|
||||
@@ -477,7 +477,7 @@ def _trigger_action_wrapper(
|
||||
else:
|
||||
|
||||
@functools.wraps(action)
|
||||
async def with_vars(
|
||||
def with_vars(
|
||||
run_variables: dict[str, Any], context: Context | None = None
|
||||
) -> Any:
|
||||
"""Wrap action with extra vars."""
|
||||
|
10
requirements_all.txt
generated
10
requirements_all.txt
generated
@@ -247,7 +247,7 @@ aioelectricitymaps==1.1.1
|
||||
aioemonitor==1.0.5
|
||||
|
||||
# homeassistant.components.esphome
|
||||
aioesphomeapi==41.12.0
|
||||
aioesphomeapi==41.13.0
|
||||
|
||||
# homeassistant.components.flo
|
||||
aioflo==2021.11.0
|
||||
@@ -895,7 +895,7 @@ enocean==0.50
|
||||
enturclient==0.2.4
|
||||
|
||||
# homeassistant.components.environment_canada
|
||||
env-canada==0.11.2
|
||||
env-canada==0.11.3
|
||||
|
||||
# homeassistant.components.season
|
||||
ephem==4.1.6
|
||||
@@ -1183,7 +1183,7 @@ hole==0.9.0
|
||||
|
||||
# homeassistant.components.holiday
|
||||
# homeassistant.components.workday
|
||||
holidays==0.81
|
||||
holidays==0.82
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20251001.0
|
||||
@@ -1545,7 +1545,7 @@ nextcord==3.1.0
|
||||
nextdns==4.1.0
|
||||
|
||||
# homeassistant.components.niko_home_control
|
||||
nhc==0.4.12
|
||||
nhc==0.6.1
|
||||
|
||||
# homeassistant.components.nibe_heatpump
|
||||
nibe==2.19.0
|
||||
@@ -1755,7 +1755,7 @@ proxmoxer==2.0.1
|
||||
psutil-home-assistant==0.0.1
|
||||
|
||||
# homeassistant.components.systemmonitor
|
||||
psutil==7.0.0
|
||||
psutil==7.1.0
|
||||
|
||||
# homeassistant.components.pulseaudio_loopback
|
||||
pulsectl==23.5.2
|
||||
|
10
requirements_test_all.txt
generated
10
requirements_test_all.txt
generated
@@ -235,7 +235,7 @@ aioelectricitymaps==1.1.1
|
||||
aioemonitor==1.0.5
|
||||
|
||||
# homeassistant.components.esphome
|
||||
aioesphomeapi==41.12.0
|
||||
aioesphomeapi==41.13.0
|
||||
|
||||
# homeassistant.components.flo
|
||||
aioflo==2021.11.0
|
||||
@@ -777,7 +777,7 @@ energyzero==2.1.1
|
||||
enocean==0.50
|
||||
|
||||
# homeassistant.components.environment_canada
|
||||
env-canada==0.11.2
|
||||
env-canada==0.11.3
|
||||
|
||||
# homeassistant.components.season
|
||||
ephem==4.1.6
|
||||
@@ -1032,7 +1032,7 @@ hole==0.9.0
|
||||
|
||||
# homeassistant.components.holiday
|
||||
# homeassistant.components.workday
|
||||
holidays==0.81
|
||||
holidays==0.82
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20251001.0
|
||||
@@ -1328,7 +1328,7 @@ nextcord==3.1.0
|
||||
nextdns==4.1.0
|
||||
|
||||
# homeassistant.components.niko_home_control
|
||||
nhc==0.4.12
|
||||
nhc==0.6.1
|
||||
|
||||
# homeassistant.components.nibe_heatpump
|
||||
nibe==2.19.0
|
||||
@@ -1487,7 +1487,7 @@ prowlpy==1.0.2
|
||||
psutil-home-assistant==0.0.1
|
||||
|
||||
# homeassistant.components.systemmonitor
|
||||
psutil==7.0.0
|
||||
psutil==7.1.0
|
||||
|
||||
# homeassistant.components.pushbullet
|
||||
pushbullet.py==0.11.0
|
||||
|
@@ -14,7 +14,6 @@ from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
from aiohasupervisor.models import (
|
||||
Discovery,
|
||||
JobsInfo,
|
||||
Repository,
|
||||
ResolutionInfo,
|
||||
StoreAddon,
|
||||
@@ -510,13 +509,6 @@ def resolution_suggestions_for_issue_fixture(supervisor_client: AsyncMock) -> As
|
||||
return supervisor_client.resolution.suggestions_for_issue
|
||||
|
||||
|
||||
@pytest.fixture(name="jobs_info")
|
||||
def jobs_info_fixture(supervisor_client: AsyncMock) -> AsyncMock:
|
||||
"""Mock jobs info from supervisor."""
|
||||
supervisor_client.jobs.info.return_value = JobsInfo(ignore_conditions=[], jobs=[])
|
||||
return supervisor_client.jobs.info
|
||||
|
||||
|
||||
@pytest.fixture(name="supervisor_client")
|
||||
def supervisor_client() -> Generator[AsyncMock]:
|
||||
"""Mock the supervisor client."""
|
||||
@@ -562,10 +554,6 @@ def supervisor_client() -> Generator[AsyncMock]:
|
||||
"homeassistant.components.hassio.issues.get_supervisor_client",
|
||||
return_value=supervisor_client,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.hassio.jobs.get_supervisor_client",
|
||||
return_value=supervisor_client,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.hassio.repairs.get_supervisor_client",
|
||||
return_value=supervisor_client,
|
||||
|
@@ -79,7 +79,6 @@ def all_setup_requests(
|
||||
store_info: AsyncMock,
|
||||
addon_changelog: AsyncMock,
|
||||
addon_stats: AsyncMock,
|
||||
jobs_info: AsyncMock,
|
||||
) -> None:
|
||||
"""Mock all setup requests."""
|
||||
include_addons = hasattr(request, "param") and request.param.get(
|
||||
@@ -262,8 +261,3 @@ def all_setup_requests(
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
aioclient_mock.get(
|
||||
"http://127.0.0.1/jobs/info",
|
||||
json={"result": "ok", "data": {"ignore_conditions": [], "jobs": []}},
|
||||
)
|
||||
|
@@ -26,7 +26,6 @@ def mock_all(
|
||||
addon_changelog: AsyncMock,
|
||||
addon_stats: AsyncMock,
|
||||
resolution_info: AsyncMock,
|
||||
jobs_info: AsyncMock,
|
||||
) -> None:
|
||||
"""Mock all setup requests."""
|
||||
aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"})
|
||||
|
@@ -25,7 +25,6 @@ def mock_all(
|
||||
addon_stats: AsyncMock,
|
||||
addon_changelog: AsyncMock,
|
||||
resolution_info: AsyncMock,
|
||||
jobs_info: AsyncMock,
|
||||
) -> None:
|
||||
"""Mock all setup requests."""
|
||||
aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"})
|
||||
|
@@ -72,7 +72,6 @@ def mock_all(
|
||||
addon_stats: AsyncMock,
|
||||
addon_changelog: AsyncMock,
|
||||
resolution_info: AsyncMock,
|
||||
jobs_info: AsyncMock,
|
||||
) -> None:
|
||||
"""Mock all setup requests."""
|
||||
aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"})
|
||||
@@ -233,7 +232,7 @@ async def test_setup_api_ping(
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 18
|
||||
assert get_core_info(hass)["version_latest"] == "1.0.0"
|
||||
assert is_hassio(hass)
|
||||
|
||||
@@ -280,7 +279,7 @@ async def test_setup_api_push_api_data(
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 18
|
||||
assert not aioclient_mock.mock_calls[0][2]["ssl"]
|
||||
assert aioclient_mock.mock_calls[0][2]["port"] == 9999
|
||||
assert "watchdog" not in aioclient_mock.mock_calls[0][2]
|
||||
@@ -301,7 +300,7 @@ async def test_setup_api_push_api_data_server_host(
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 18
|
||||
assert not aioclient_mock.mock_calls[0][2]["ssl"]
|
||||
assert aioclient_mock.mock_calls[0][2]["port"] == 9999
|
||||
assert not aioclient_mock.mock_calls[0][2]["watchdog"]
|
||||
@@ -322,7 +321,7 @@ async def test_setup_api_push_api_data_default(
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 18
|
||||
assert not aioclient_mock.mock_calls[0][2]["ssl"]
|
||||
assert aioclient_mock.mock_calls[0][2]["port"] == 8123
|
||||
refresh_token = aioclient_mock.mock_calls[0][2]["refresh_token"]
|
||||
@@ -403,7 +402,7 @@ async def test_setup_api_existing_hassio_user(
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 18
|
||||
assert not aioclient_mock.mock_calls[0][2]["ssl"]
|
||||
assert aioclient_mock.mock_calls[0][2]["port"] == 8123
|
||||
assert aioclient_mock.mock_calls[0][2]["refresh_token"] == token.token
|
||||
@@ -422,7 +421,7 @@ async def test_setup_core_push_config(
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 18
|
||||
assert aioclient_mock.mock_calls[1][2]["timezone"] == "testzone"
|
||||
|
||||
with patch("homeassistant.util.dt.set_default_time_zone"):
|
||||
@@ -446,7 +445,7 @@ async def test_setup_hassio_no_additional_data(
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 18
|
||||
assert aioclient_mock.mock_calls[-1][3]["Authorization"] == "Bearer 123456"
|
||||
|
||||
|
||||
@@ -528,14 +527,14 @@ async def test_service_calls(
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 23
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 22
|
||||
assert aioclient_mock.mock_calls[-1][2] == "test"
|
||||
|
||||
await hass.services.async_call("hassio", "host_shutdown", {})
|
||||
await hass.services.async_call("hassio", "host_reboot", {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 25
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 24
|
||||
|
||||
await hass.services.async_call("hassio", "backup_full", {})
|
||||
await hass.services.async_call(
|
||||
@@ -550,7 +549,7 @@ async def test_service_calls(
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 27
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 26
|
||||
assert aioclient_mock.mock_calls[-1][2] == {
|
||||
"name": "2021-11-13 03:48:00",
|
||||
"homeassistant": True,
|
||||
@@ -575,7 +574,7 @@ async def test_service_calls(
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 29
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 28
|
||||
assert aioclient_mock.mock_calls[-1][2] == {
|
||||
"addons": ["test"],
|
||||
"folders": ["ssl"],
|
||||
@@ -594,7 +593,7 @@ async def test_service_calls(
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 30
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 29
|
||||
assert aioclient_mock.mock_calls[-1][2] == {
|
||||
"name": "backup_name",
|
||||
"location": "backup_share",
|
||||
@@ -610,7 +609,7 @@ async def test_service_calls(
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 31
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 30
|
||||
assert aioclient_mock.mock_calls[-1][2] == {
|
||||
"name": "2021-11-13 03:48:00",
|
||||
"location": None,
|
||||
@@ -629,7 +628,7 @@ async def test_service_calls(
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 33
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 32
|
||||
assert aioclient_mock.mock_calls[-1][2] == {
|
||||
"name": "2021-11-13 11:48:00",
|
||||
"location": None,
|
||||
@@ -1075,7 +1074,7 @@ async def test_setup_hardware_integration(
|
||||
await hass.async_block_till_done(wait_background_tasks=True)
|
||||
|
||||
assert result
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
|
||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 18
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
|
@@ -34,7 +34,6 @@ def mock_all(
|
||||
addon_stats: AsyncMock,
|
||||
addon_changelog: AsyncMock,
|
||||
resolution_info: AsyncMock,
|
||||
jobs_info: AsyncMock,
|
||||
) -> None:
|
||||
"""Mock all setup requests."""
|
||||
_install_default_mocks(aioclient_mock)
|
||||
|
@@ -60,7 +60,6 @@ def mock_all(
|
||||
addon_changelog: AsyncMock,
|
||||
addon_stats: AsyncMock,
|
||||
resolution_info: AsyncMock,
|
||||
jobs_info: AsyncMock,
|
||||
) -> None:
|
||||
"""Mock all setup requests."""
|
||||
aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"})
|
||||
|
@@ -1,10 +1,9 @@
|
||||
"""The tests for the hassio update entities."""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import timedelta
|
||||
import os
|
||||
from typing import Any
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
from uuid import uuid4
|
||||
|
||||
from aiohasupervisor import (
|
||||
SupervisorBadRequestError,
|
||||
@@ -13,8 +12,6 @@ from aiohasupervisor import (
|
||||
)
|
||||
from aiohasupervisor.models import (
|
||||
HomeAssistantUpdateOptions,
|
||||
Job,
|
||||
JobsInfo,
|
||||
OSUpdate,
|
||||
StoreAddonUpdate,
|
||||
)
|
||||
@@ -47,7 +44,6 @@ def mock_all(
|
||||
addon_stats: AsyncMock,
|
||||
addon_changelog: AsyncMock,
|
||||
resolution_info: AsyncMock,
|
||||
jobs_info: AsyncMock,
|
||||
) -> None:
|
||||
"""Mock all setup requests."""
|
||||
aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"})
|
||||
@@ -247,131 +243,6 @@ async def test_update_addon(hass: HomeAssistant, update_addon: AsyncMock) -> Non
|
||||
update_addon.assert_called_once_with("test", StoreAddonUpdate(backup=False))
|
||||
|
||||
|
||||
async def test_update_addon_progress(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||
) -> None:
|
||||
"""Test progress reporting for addon update."""
|
||||
config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN)
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
with patch.dict(os.environ, MOCK_ENVIRON):
|
||||
result = await async_setup_component(
|
||||
hass,
|
||||
"hassio",
|
||||
{"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}},
|
||||
)
|
||||
assert result
|
||||
await hass.async_block_till_done()
|
||||
|
||||
client = await hass_ws_client(hass)
|
||||
message_id = 0
|
||||
job_uuid = uuid4().hex
|
||||
|
||||
def make_job_message(progress: float, done: bool | None):
|
||||
nonlocal message_id
|
||||
message_id += 1
|
||||
return {
|
||||
"id": message_id,
|
||||
"type": "supervisor/event",
|
||||
"data": {
|
||||
"event": "job",
|
||||
"data": {
|
||||
"uuid": job_uuid,
|
||||
"created": "2025-09-29T00:00:00.000000+00:00",
|
||||
"name": "addon_manager_update",
|
||||
"reference": "test",
|
||||
"progress": progress,
|
||||
"done": done,
|
||||
"stage": None,
|
||||
"extra": {"total": 1234567890} if progress > 0 else None,
|
||||
"errors": [],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
await client.send_json(make_job_message(progress=0, done=None))
|
||||
msg = await client.receive_json()
|
||||
assert msg["success"]
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("update.test_update").attributes.get("in_progress") is False
|
||||
assert (
|
||||
hass.states.get("update.test_update").attributes.get("update_percentage")
|
||||
is None
|
||||
)
|
||||
|
||||
await client.send_json(make_job_message(progress=5, done=False))
|
||||
msg = await client.receive_json()
|
||||
assert msg["success"]
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("update.test_update").attributes.get("in_progress") is True
|
||||
assert (
|
||||
hass.states.get("update.test_update").attributes.get("update_percentage") == 5
|
||||
)
|
||||
|
||||
await client.send_json(make_job_message(progress=50, done=False))
|
||||
msg = await client.receive_json()
|
||||
assert msg["success"]
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("update.test_update").attributes.get("in_progress") is True
|
||||
assert (
|
||||
hass.states.get("update.test_update").attributes.get("update_percentage") == 50
|
||||
)
|
||||
|
||||
await client.send_json(make_job_message(progress=100, done=True))
|
||||
msg = await client.receive_json()
|
||||
assert msg["success"]
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("update.test_update").attributes.get("in_progress") is False
|
||||
assert (
|
||||
hass.states.get("update.test_update").attributes.get("update_percentage")
|
||||
is None
|
||||
)
|
||||
|
||||
|
||||
async def test_addon_update_progress_startup(
|
||||
hass: HomeAssistant, jobs_info: AsyncMock
|
||||
) -> None:
|
||||
"""Test addon update in progress during home assistant startup."""
|
||||
jobs_info.return_value = JobsInfo(
|
||||
ignore_conditions=[],
|
||||
jobs=[
|
||||
Job(
|
||||
name="addon_manager_update",
|
||||
reference="test",
|
||||
uuid=uuid4().hex,
|
||||
progress=50,
|
||||
stage=None,
|
||||
done=False,
|
||||
errors=[],
|
||||
created=datetime.now(),
|
||||
child_jobs=[],
|
||||
extra={"total": 1234567890},
|
||||
)
|
||||
],
|
||||
)
|
||||
|
||||
config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN)
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
with patch.dict(os.environ, MOCK_ENVIRON):
|
||||
result = await async_setup_component(
|
||||
hass,
|
||||
"hassio",
|
||||
{"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}},
|
||||
)
|
||||
assert result
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("update.test_update").attributes.get("in_progress") is True
|
||||
assert (
|
||||
hass.states.get("update.test_update").attributes.get("update_percentage") == 50
|
||||
)
|
||||
|
||||
|
||||
async def setup_backup_integration(hass: HomeAssistant) -> None:
|
||||
"""Set up the backup integration."""
|
||||
assert await async_setup_component(hass, "backup", {})
|
||||
@@ -759,186 +630,6 @@ async def test_update_core(hass: HomeAssistant, supervisor_client: AsyncMock) ->
|
||||
)
|
||||
|
||||
|
||||
async def test_update_core_progress(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||
) -> None:
|
||||
"""Test progress reporting for core update."""
|
||||
config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN)
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
with patch.dict(os.environ, MOCK_ENVIRON):
|
||||
result = await async_setup_component(
|
||||
hass,
|
||||
"hassio",
|
||||
{"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}},
|
||||
)
|
||||
assert result
|
||||
await hass.async_block_till_done()
|
||||
|
||||
client = await hass_ws_client(hass)
|
||||
message_id = 0
|
||||
job_uuid = uuid4().hex
|
||||
|
||||
def make_job_message(
|
||||
progress: float, done: bool | None, errors: list[dict[str, str]] | None = None
|
||||
):
|
||||
nonlocal message_id
|
||||
message_id += 1
|
||||
return {
|
||||
"id": message_id,
|
||||
"type": "supervisor/event",
|
||||
"data": {
|
||||
"event": "job",
|
||||
"data": {
|
||||
"uuid": job_uuid,
|
||||
"created": "2025-09-29T00:00:00.000000+00:00",
|
||||
"name": "home_assistant_core_update",
|
||||
"reference": None,
|
||||
"progress": progress,
|
||||
"done": done,
|
||||
"stage": None,
|
||||
"extra": {"total": 1234567890} if progress > 0 else None,
|
||||
"errors": errors if errors else [],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
await client.send_json(make_job_message(progress=0, done=None))
|
||||
msg = await client.receive_json()
|
||||
assert msg["success"]
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert (
|
||||
hass.states.get("update.home_assistant_core_update").attributes.get(
|
||||
"in_progress"
|
||||
)
|
||||
is False
|
||||
)
|
||||
assert (
|
||||
hass.states.get("update.home_assistant_core_update").attributes.get(
|
||||
"update_percentage"
|
||||
)
|
||||
is None
|
||||
)
|
||||
|
||||
await client.send_json(make_job_message(progress=5, done=False))
|
||||
msg = await client.receive_json()
|
||||
assert msg["success"]
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert (
|
||||
hass.states.get("update.home_assistant_core_update").attributes.get(
|
||||
"in_progress"
|
||||
)
|
||||
is True
|
||||
)
|
||||
assert (
|
||||
hass.states.get("update.home_assistant_core_update").attributes.get(
|
||||
"update_percentage"
|
||||
)
|
||||
== 5
|
||||
)
|
||||
|
||||
await client.send_json(make_job_message(progress=50, done=False))
|
||||
msg = await client.receive_json()
|
||||
assert msg["success"]
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert (
|
||||
hass.states.get("update.home_assistant_core_update").attributes.get(
|
||||
"in_progress"
|
||||
)
|
||||
is True
|
||||
)
|
||||
assert (
|
||||
hass.states.get("update.home_assistant_core_update").attributes.get(
|
||||
"update_percentage"
|
||||
)
|
||||
== 50
|
||||
)
|
||||
|
||||
# During a successful update Home Assistant is stopped before the update job
|
||||
# reaches the end. An error ends it early so we use that for test
|
||||
await client.send_json(
|
||||
make_job_message(
|
||||
progress=70,
|
||||
done=True,
|
||||
errors=[
|
||||
{"type": "HomeAssistantUpdateError", "message": "bad", "stage": None}
|
||||
],
|
||||
)
|
||||
)
|
||||
msg = await client.receive_json()
|
||||
assert msg["success"]
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert (
|
||||
hass.states.get("update.home_assistant_core_update").attributes.get(
|
||||
"in_progress"
|
||||
)
|
||||
is False
|
||||
)
|
||||
assert (
|
||||
hass.states.get("update.home_assistant_core_update").attributes.get(
|
||||
"update_percentage"
|
||||
)
|
||||
is None
|
||||
)
|
||||
|
||||
|
||||
async def test_core_update_progress_startup(
|
||||
hass: HomeAssistant, jobs_info: AsyncMock
|
||||
) -> None:
|
||||
"""Test core update in progress during home assistant startup.
|
||||
|
||||
This is an odd test, it's very unlikely core will be starting during an update.
|
||||
It is technically possible though as core isn't stopped until the docker portion
|
||||
is complete and updates can be started from CLI.
|
||||
"""
|
||||
jobs_info.return_value = JobsInfo(
|
||||
ignore_conditions=[],
|
||||
jobs=[
|
||||
Job(
|
||||
name="home_assistant_core_update",
|
||||
reference=None,
|
||||
uuid=uuid4().hex,
|
||||
progress=50,
|
||||
stage=None,
|
||||
done=False,
|
||||
errors=[],
|
||||
created=datetime.now(),
|
||||
child_jobs=[],
|
||||
extra={"total": 1234567890},
|
||||
)
|
||||
],
|
||||
)
|
||||
|
||||
config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN)
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
with patch.dict(os.environ, MOCK_ENVIRON):
|
||||
result = await async_setup_component(
|
||||
hass,
|
||||
"hassio",
|
||||
{"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}},
|
||||
)
|
||||
assert result
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert (
|
||||
hass.states.get("update.home_assistant_core_update").attributes.get(
|
||||
"in_progress"
|
||||
)
|
||||
is True
|
||||
)
|
||||
assert (
|
||||
hass.states.get("update.home_assistant_core_update").attributes.get(
|
||||
"update_percentage"
|
||||
)
|
||||
== 50
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("commands", "default_mount", "expected_kwargs"),
|
||||
[
|
||||
|
@@ -45,7 +45,7 @@ def dimmable_light() -> NHCLight:
|
||||
mock.is_dimmable = True
|
||||
mock.name = "dimmable light"
|
||||
mock.suggested_area = "room"
|
||||
mock.state = 255
|
||||
mock.state = 100
|
||||
return mock
|
||||
|
||||
|
||||
|
@@ -41,7 +41,7 @@
|
||||
# name: test_entities[light.dimmable_light-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'brightness': 255,
|
||||
'brightness': 100,
|
||||
'color_mode': <ColorMode.BRIGHTNESS: 'brightness'>,
|
||||
'friendly_name': 'dimmable light',
|
||||
'supported_color_modes': list([
|
||||
|
@@ -42,7 +42,7 @@ async def test_entities(
|
||||
@pytest.mark.parametrize(
|
||||
("light_id", "data", "set_brightness"),
|
||||
[
|
||||
(0, {ATTR_ENTITY_ID: "light.light"}, 255),
|
||||
(0, {ATTR_ENTITY_ID: "light.light"}, None),
|
||||
(
|
||||
1,
|
||||
{ATTR_ENTITY_ID: "light.dimmable_light", ATTR_BRIGHTNESS: 50},
|
||||
@@ -119,7 +119,7 @@ async def test_updating(
|
||||
assert hass.states.get("light.light").state == STATE_OFF
|
||||
|
||||
assert hass.states.get("light.dimmable_light").state == STATE_ON
|
||||
assert hass.states.get("light.dimmable_light").attributes[ATTR_BRIGHTNESS] == 255
|
||||
assert hass.states.get("light.dimmable_light").attributes[ATTR_BRIGHTNESS] == 100
|
||||
|
||||
dimmable_light.state = 204
|
||||
await find_update_callback(mock_niko_home_control_connection, 2)(204)
|
||||
|
@@ -569,7 +569,7 @@ async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> Non
|
||||
CONF_PROMPT: "Speak like a pirate",
|
||||
CONF_LLM_HASS_API: ["assist"],
|
||||
CONF_TEMPERATURE: 0.8,
|
||||
CONF_CHAT_MODEL: "o5",
|
||||
CONF_CHAT_MODEL: "gpt-5",
|
||||
CONF_TOP_P: 0.9,
|
||||
CONF_MAX_TOKENS: 1000,
|
||||
CONF_REASONING_EFFORT: "low",
|
||||
@@ -607,6 +607,52 @@ async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> Non
|
||||
CONF_CODE_INTERPRETER: False,
|
||||
},
|
||||
),
|
||||
( # Case 5: code interpreter supported to not supported model
|
||||
{
|
||||
CONF_RECOMMENDED: False,
|
||||
CONF_PROMPT: "Speak like a pirate",
|
||||
CONF_LLM_HASS_API: ["assist"],
|
||||
CONF_TEMPERATURE: 0.8,
|
||||
CONF_CHAT_MODEL: "gpt-5",
|
||||
CONF_TOP_P: 0.9,
|
||||
CONF_MAX_TOKENS: 1000,
|
||||
CONF_REASONING_EFFORT: "low",
|
||||
CONF_CODE_INTERPRETER: True,
|
||||
CONF_VERBOSITY: "medium",
|
||||
CONF_WEB_SEARCH: True,
|
||||
CONF_WEB_SEARCH_CONTEXT_SIZE: "high",
|
||||
CONF_WEB_SEARCH_USER_LOCATION: False,
|
||||
},
|
||||
(
|
||||
{
|
||||
CONF_RECOMMENDED: False,
|
||||
CONF_PROMPT: "Speak like a pirate",
|
||||
},
|
||||
{
|
||||
CONF_TEMPERATURE: 0.8,
|
||||
CONF_CHAT_MODEL: "gpt-5-pro",
|
||||
CONF_TOP_P: 0.9,
|
||||
CONF_MAX_TOKENS: 1000,
|
||||
},
|
||||
{
|
||||
CONF_WEB_SEARCH: True,
|
||||
CONF_WEB_SEARCH_CONTEXT_SIZE: "high",
|
||||
CONF_WEB_SEARCH_USER_LOCATION: False,
|
||||
},
|
||||
),
|
||||
{
|
||||
CONF_RECOMMENDED: False,
|
||||
CONF_PROMPT: "Speak like a pirate",
|
||||
CONF_TEMPERATURE: 0.8,
|
||||
CONF_CHAT_MODEL: "gpt-5-pro",
|
||||
CONF_TOP_P: 0.9,
|
||||
CONF_MAX_TOKENS: 1000,
|
||||
CONF_VERBOSITY: "medium",
|
||||
CONF_WEB_SEARCH: True,
|
||||
CONF_WEB_SEARCH_CONTEXT_SIZE: "high",
|
||||
CONF_WEB_SEARCH_USER_LOCATION: False,
|
||||
},
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_subentry_switching(
|
||||
|
@@ -80,7 +80,9 @@ async def test_low_battery(hass: HomeAssistant) -> None:
|
||||
async def test_initial_states(hass: HomeAssistant) -> None:
|
||||
"""Test plant initialises attributes if sensor already exists."""
|
||||
hass.states.async_set(
|
||||
MOISTURE_ENTITY, 5, {ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS}
|
||||
MOISTURE_ENTITY,
|
||||
5,
|
||||
{ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS_PER_CM},
|
||||
)
|
||||
plant_name = "some_plant"
|
||||
assert await async_setup_component(
|
||||
@@ -101,7 +103,9 @@ async def test_update_states(hass: HomeAssistant) -> None:
|
||||
hass, plant.DOMAIN, {plant.DOMAIN: {plant_name: GOOD_CONFIG}}
|
||||
)
|
||||
hass.states.async_set(
|
||||
MOISTURE_ENTITY, 5, {ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS}
|
||||
MOISTURE_ENTITY,
|
||||
5,
|
||||
{ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS_PER_CM},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
state = hass.states.get(f"plant.{plant_name}")
|
||||
@@ -121,7 +125,7 @@ async def test_unavailable_state(hass: HomeAssistant) -> None:
|
||||
hass.states.async_set(
|
||||
MOISTURE_ENTITY,
|
||||
STATE_UNAVAILABLE,
|
||||
{ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS},
|
||||
{ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS_PER_CM},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
state = hass.states.get(f"plant.{plant_name}")
|
||||
@@ -139,7 +143,9 @@ async def test_state_problem_if_unavailable(hass: HomeAssistant) -> None:
|
||||
hass, plant.DOMAIN, {plant.DOMAIN: {plant_name: GOOD_CONFIG}}
|
||||
)
|
||||
hass.states.async_set(
|
||||
MOISTURE_ENTITY, 42, {ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS}
|
||||
MOISTURE_ENTITY,
|
||||
42,
|
||||
{ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS_PER_CM},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
state = hass.states.get(f"plant.{plant_name}")
|
||||
@@ -148,7 +154,7 @@ async def test_state_problem_if_unavailable(hass: HomeAssistant) -> None:
|
||||
hass.states.async_set(
|
||||
MOISTURE_ENTITY,
|
||||
STATE_UNAVAILABLE,
|
||||
{ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS},
|
||||
{ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS_PER_CM},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
state = hass.states.get(f"plant.{plant_name}")
|
||||
|
241
tests/components/portainer/snapshots/test_sensor.ambr
Normal file
241
tests/components/portainer/snapshots/test_sensor.ambr
Normal file
@@ -0,0 +1,241 @@
|
||||
# serializer version: 1
|
||||
# name: test_all_entities[sensor.focused_einstein_image-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.focused_einstein_image',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Image',
|
||||
'platform': 'portainer',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'image',
|
||||
'unique_id': 'portainer_test_entry_123_focused_einstein_image',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.focused_einstein_image-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'focused_einstein Image',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.focused_einstein_image',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'docker.io/library/redis:7',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.funny_chatelet_image-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.funny_chatelet_image',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Image',
|
||||
'platform': 'portainer',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'image',
|
||||
'unique_id': 'portainer_test_entry_123_funny_chatelet_image',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.funny_chatelet_image-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'funny_chatelet Image',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.funny_chatelet_image',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'docker.io/library/ubuntu:latest',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.practical_morse_image-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.practical_morse_image',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Image',
|
||||
'platform': 'portainer',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'image',
|
||||
'unique_id': 'portainer_test_entry_123_practical_morse_image',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.practical_morse_image-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'practical_morse Image',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.practical_morse_image',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'docker.io/library/python:3.13-slim',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.serene_banach_image-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.serene_banach_image',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Image',
|
||||
'platform': 'portainer',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'image',
|
||||
'unique_id': 'portainer_test_entry_123_serene_banach_image',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.serene_banach_image-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'serene_banach Image',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.serene_banach_image',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'docker.io/library/nginx:latest',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.stoic_turing_image-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.stoic_turing_image',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Image',
|
||||
'platform': 'portainer',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'image',
|
||||
'unique_id': 'portainer_test_entry_123_stoic_turing_image',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.stoic_turing_image-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'stoic_turing Image',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.stoic_turing_image',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'docker.io/library/postgres:15',
|
||||
})
|
||||
# ---
|
32
tests/components/portainer/test_sensor.py
Normal file
32
tests/components/portainer/test_sensor.py
Normal file
@@ -0,0 +1,32 @@
|
||||
"""Tests for the Portainer sensor platform."""
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from . import setup_integration
|
||||
|
||||
from tests.common import MockConfigEntry, snapshot_platform
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_portainer_client")
|
||||
async def test_all_entities(
|
||||
hass: HomeAssistant,
|
||||
snapshot: SnapshotAssertion,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Test all entities."""
|
||||
with patch(
|
||||
"homeassistant.components.portainer._PLATFORMS",
|
||||
[Platform.SENSOR],
|
||||
):
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
await snapshot_platform(
|
||||
hass, entity_registry, snapshot, mock_config_entry.entry_id
|
||||
)
|
@@ -1785,7 +1785,7 @@ async def test_unit_conversion_priority_suggested_unit_change_2(
|
||||
UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER,
|
||||
0,
|
||||
),
|
||||
(SensorDeviceClass.CONDUCTIVITY, UnitOfConductivity.MICROSIEMENS, 1),
|
||||
(SensorDeviceClass.CONDUCTIVITY, UnitOfConductivity.MICROSIEMENS_PER_CM, 1),
|
||||
(SensorDeviceClass.CURRENT, UnitOfElectricCurrent.MILLIAMPERE, 0),
|
||||
(SensorDeviceClass.DATA_RATE, UnitOfDataRate.KILOBITS_PER_SECOND, 0),
|
||||
(SensorDeviceClass.DATA_SIZE, UnitOfInformation.KILOBITS, 0),
|
||||
|
@@ -127,7 +127,7 @@
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': '123456789ABC-button:200',
|
||||
'unique_id': '123456789ABC-button:200-button_generic',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
@@ -175,7 +175,7 @@
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': '123456789ABC-button:200',
|
||||
'unique_id': '123456789ABC-button:200-button_generic',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
|
@@ -9,7 +9,7 @@ import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS
|
||||
from homeassistant.components.shelly.const import DOMAIN
|
||||
from homeassistant.components.shelly.const import DOMAIN, MODEL_FRANKEVER_WATER_VALVE
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState
|
||||
from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -17,7 +17,13 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import DeviceRegistry
|
||||
from homeassistant.helpers.entity_registry import EntityRegistry
|
||||
|
||||
from . import init_integration, patch_platforms, register_device, register_entity
|
||||
from . import (
|
||||
MOCK_MAC,
|
||||
init_integration,
|
||||
patch_platforms,
|
||||
register_device,
|
||||
register_entity,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
@@ -417,3 +423,56 @@ async def test_migrate_unique_id_blu_trv(
|
||||
assert entity_entry.unique_id == "F8447725F0DD-blutrv:200-calibrate"
|
||||
|
||||
assert "Migrating unique_id for button.trv_name_calibrate" in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("old_id", "new_id", "role"),
|
||||
[
|
||||
("button", "button_generic", None),
|
||||
("button", "button_open", "open"),
|
||||
("button", "button_close", "close"),
|
||||
],
|
||||
)
|
||||
async def test_migrate_unique_id_virtual_components_roles(
|
||||
hass: HomeAssistant,
|
||||
mock_rpc_device: Mock,
|
||||
entity_registry: EntityRegistry,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
old_id: str,
|
||||
new_id: str,
|
||||
role: str | None,
|
||||
) -> None:
|
||||
"""Test migration of unique_id for virtual components to include role."""
|
||||
entry = await init_integration(
|
||||
hass, 3, model=MODEL_FRANKEVER_WATER_VALVE, skip_setup=True
|
||||
)
|
||||
old_unique_id = f"{MOCK_MAC}-{old_id}:200"
|
||||
new_unique_id = f"{old_unique_id}-{new_id}"
|
||||
config = deepcopy(mock_rpc_device.config)
|
||||
if role:
|
||||
config[f"{old_id}:200"] = {
|
||||
"role": role,
|
||||
}
|
||||
else:
|
||||
config[f"{old_id}:200"] = {}
|
||||
monkeypatch.setattr(mock_rpc_device, "config", config)
|
||||
|
||||
entity = entity_registry.async_get_or_create(
|
||||
suggested_object_id="test_name_test_button",
|
||||
disabled_by=None,
|
||||
domain=BUTTON_DOMAIN,
|
||||
platform=DOMAIN,
|
||||
unique_id=old_unique_id,
|
||||
config_entry=entry,
|
||||
)
|
||||
assert entity.unique_id == old_unique_id
|
||||
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
entity_entry = entity_registry.async_get("button.test_name_test_button")
|
||||
assert entity_entry
|
||||
assert entity_entry.unique_id == new_unique_id
|
||||
|
||||
assert "Migrating unique_id for button.test_name_test_button" in caplog.text
|
||||
|
@@ -22,7 +22,7 @@ from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.exceptions import HomeAssistantError, TemplateError
|
||||
from homeassistant.helpers.device_registry import EVENT_DEVICE_REGISTRY_UPDATED
|
||||
from homeassistant.helpers.entity_registry import EVENT_ENTITY_REGISTRY_UPDATED
|
||||
from homeassistant.helpers.event import (
|
||||
@@ -4975,43 +4975,25 @@ async def test_async_track_state_report_change_event(hass: HomeAssistant) -> Non
|
||||
}
|
||||
|
||||
|
||||
async def test_async_track_template_no_hass_deprecated(
|
||||
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test async_track_template with a template without hass is deprecated."""
|
||||
message = (
|
||||
"Detected code that calls async_track_template_result with template without "
|
||||
"hass. This will stop working in Home Assistant 2025.10, please "
|
||||
"report this issue"
|
||||
)
|
||||
async def test_async_track_template_no_hass_fails(hass: HomeAssistant) -> None:
|
||||
"""Test async_track_template with a template without hass now fails."""
|
||||
message = "Calls async_track_template_result with template without hass"
|
||||
|
||||
async_track_template(hass, Template("blah"), lambda x, y, z: None)
|
||||
assert message in caplog.text
|
||||
caplog.clear()
|
||||
with pytest.raises(HomeAssistantError, match=message):
|
||||
async_track_template(hass, Template("blah"), lambda x, y, z: None)
|
||||
|
||||
async_track_template(hass, Template("blah", hass), lambda x, y, z: None)
|
||||
assert message not in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
|
||||
async def test_async_track_template_result_no_hass_deprecated(
|
||||
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test async_track_template_result with a template without hass is deprecated."""
|
||||
message = (
|
||||
"Detected code that calls async_track_template_result with template without "
|
||||
"hass. This will stop working in Home Assistant 2025.10, please "
|
||||
"report this issue"
|
||||
)
|
||||
async def test_async_track_template_result_no_hass_fails(hass: HomeAssistant) -> None:
|
||||
"""Test async_track_template_result with a template without hass now fails."""
|
||||
message = "Calls async_track_template_result with template without hass"
|
||||
|
||||
async_track_template_result(
|
||||
hass, [TrackTemplate(Template("blah"), None)], lambda x, y, z: None
|
||||
)
|
||||
assert message in caplog.text
|
||||
caplog.clear()
|
||||
with pytest.raises(HomeAssistantError, match=message):
|
||||
async_track_template_result(
|
||||
hass, [TrackTemplate(Template("blah"), None)], lambda x, y, z: None
|
||||
)
|
||||
|
||||
async_track_template_result(
|
||||
hass, [TrackTemplate(Template("blah", hass), None)], lambda x, y, z: None
|
||||
)
|
||||
assert message not in caplog.text
|
||||
caplog.clear()
|
||||
|
@@ -1,19 +1,12 @@
|
||||
"""Test const module."""
|
||||
|
||||
from enum import Enum
|
||||
import logging
|
||||
import sys
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant import const
|
||||
|
||||
from .common import (
|
||||
extract_stack_to_frame,
|
||||
help_test_all,
|
||||
import_and_test_deprecated_constant,
|
||||
)
|
||||
from .common import help_test_all, import_and_test_deprecated_constant
|
||||
|
||||
|
||||
def _create_tuples(
|
||||
@@ -48,78 +41,3 @@ def test_deprecated_constant_name_changes(
|
||||
replacement,
|
||||
breaks_in_version,
|
||||
)
|
||||
|
||||
|
||||
def test_deprecated_unit_of_conductivity_alias() -> None:
|
||||
"""Test UnitOfConductivity deprecation."""
|
||||
|
||||
# Test the deprecated members are aliases
|
||||
assert set(const.UnitOfConductivity) == {"S/cm", "μS/cm", "mS/cm"}
|
||||
|
||||
|
||||
def test_deprecated_unit_of_conductivity_members(
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test UnitOfConductivity deprecation."""
|
||||
|
||||
module_name = "config.custom_components.hue.light"
|
||||
filename = f"/home/paulus/{module_name.replace('.', '/')}.py"
|
||||
|
||||
with (
|
||||
patch.dict(sys.modules, {module_name: Mock(__file__=filename)}),
|
||||
patch(
|
||||
"homeassistant.helpers.frame.linecache.getline",
|
||||
return_value="await session.close()",
|
||||
),
|
||||
patch(
|
||||
"homeassistant.helpers.frame.get_current_frame",
|
||||
return_value=extract_stack_to_frame(
|
||||
[
|
||||
Mock(
|
||||
filename="/home/paulus/homeassistant/core.py",
|
||||
lineno="23",
|
||||
line="do_something()",
|
||||
),
|
||||
Mock(
|
||||
filename=filename,
|
||||
lineno="23",
|
||||
line="await session.close()",
|
||||
),
|
||||
Mock(
|
||||
filename="/home/paulus/aiohue/lights.py",
|
||||
lineno="2",
|
||||
line="something()",
|
||||
),
|
||||
]
|
||||
),
|
||||
),
|
||||
):
|
||||
const.UnitOfConductivity.SIEMENS # noqa: B018
|
||||
const.UnitOfConductivity.MICROSIEMENS # noqa: B018
|
||||
const.UnitOfConductivity.MILLISIEMENS # noqa: B018
|
||||
|
||||
assert len(caplog.record_tuples) == 3
|
||||
|
||||
def deprecation_message(member: str, replacement: str) -> str:
|
||||
return (
|
||||
f"The deprecated enum member UnitOfConductivity.{member} was used from hue. "
|
||||
"It will be removed in HA Core 2025.11.0. Use UnitOfConductivity."
|
||||
f"{replacement} instead, please report it to the author of the 'hue' custom"
|
||||
" integration"
|
||||
)
|
||||
|
||||
assert (
|
||||
const.__name__,
|
||||
logging.WARNING,
|
||||
deprecation_message("SIEMENS", "SIEMENS_PER_CM"),
|
||||
) in caplog.record_tuples
|
||||
assert (
|
||||
const.__name__,
|
||||
logging.WARNING,
|
||||
deprecation_message("MICROSIEMENS", "MICROSIEMENS_PER_CM"),
|
||||
) in caplog.record_tuples
|
||||
assert (
|
||||
const.__name__,
|
||||
logging.WARNING,
|
||||
deprecation_message("MILLISIEMENS", "MILLISIEMENS_PER_CM"),
|
||||
) in caplog.record_tuples
|
||||
|
@@ -281,48 +281,6 @@ _CONVERTED_VALUE: dict[
|
||||
),
|
||||
],
|
||||
ConductivityConverter: [
|
||||
# Deprecated to deprecated
|
||||
(5, UnitOfConductivity.SIEMENS, 5e3, UnitOfConductivity.MILLISIEMENS),
|
||||
(5, UnitOfConductivity.SIEMENS, 5e6, UnitOfConductivity.MICROSIEMENS),
|
||||
(5, UnitOfConductivity.MILLISIEMENS, 5e3, UnitOfConductivity.MICROSIEMENS),
|
||||
(5, UnitOfConductivity.MILLISIEMENS, 5e-3, UnitOfConductivity.SIEMENS),
|
||||
(5e6, UnitOfConductivity.MICROSIEMENS, 5e3, UnitOfConductivity.MILLISIEMENS),
|
||||
(5e6, UnitOfConductivity.MICROSIEMENS, 5, UnitOfConductivity.SIEMENS),
|
||||
# Deprecated to new
|
||||
(5, UnitOfConductivity.SIEMENS, 5e3, UnitOfConductivity.MILLISIEMENS_PER_CM),
|
||||
(5, UnitOfConductivity.SIEMENS, 5e6, UnitOfConductivity.MICROSIEMENS_PER_CM),
|
||||
(
|
||||
5,
|
||||
UnitOfConductivity.MILLISIEMENS,
|
||||
5e3,
|
||||
UnitOfConductivity.MICROSIEMENS_PER_CM,
|
||||
),
|
||||
(5, UnitOfConductivity.MILLISIEMENS, 5e-3, UnitOfConductivity.SIEMENS_PER_CM),
|
||||
(
|
||||
5e6,
|
||||
UnitOfConductivity.MICROSIEMENS,
|
||||
5e3,
|
||||
UnitOfConductivity.MILLISIEMENS_PER_CM,
|
||||
),
|
||||
(5e6, UnitOfConductivity.MICROSIEMENS, 5, UnitOfConductivity.SIEMENS_PER_CM),
|
||||
# New to deprecated
|
||||
(5, UnitOfConductivity.SIEMENS_PER_CM, 5e3, UnitOfConductivity.MILLISIEMENS),
|
||||
(5, UnitOfConductivity.SIEMENS_PER_CM, 5e6, UnitOfConductivity.MICROSIEMENS),
|
||||
(
|
||||
5,
|
||||
UnitOfConductivity.MILLISIEMENS_PER_CM,
|
||||
5e3,
|
||||
UnitOfConductivity.MICROSIEMENS,
|
||||
),
|
||||
(5, UnitOfConductivity.MILLISIEMENS_PER_CM, 5e-3, UnitOfConductivity.SIEMENS),
|
||||
(
|
||||
5e6,
|
||||
UnitOfConductivity.MICROSIEMENS_PER_CM,
|
||||
5e3,
|
||||
UnitOfConductivity.MILLISIEMENS,
|
||||
),
|
||||
(5e6, UnitOfConductivity.MICROSIEMENS_PER_CM, 5, UnitOfConductivity.SIEMENS),
|
||||
# New to new
|
||||
(
|
||||
5,
|
||||
UnitOfConductivity.SIEMENS_PER_CM,
|
||||
|
Reference in New Issue
Block a user