mirror of
https://github.com/home-assistant/core.git
synced 2025-12-02 14:08:12 +00:00
Compare commits
42 Commits
negative_r
...
dev
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e2acf30637 | ||
|
|
29631a2c5a | ||
|
|
1d31e6d0ea | ||
|
|
8109d9a39c | ||
|
|
e1abd451b8 | ||
|
|
2c72cd94f2 | ||
|
|
3bccb4b89c | ||
|
|
6d4fb30630 | ||
|
|
c04411f1bc | ||
|
|
753ea023de | ||
|
|
1ca1cf59eb | ||
|
|
5b01bb1a29 | ||
|
|
15c89d24eb | ||
|
|
b26b2347e6 | ||
|
|
7d54103c09 | ||
|
|
c705a1dc4b | ||
|
|
998bd23446 | ||
|
|
3a1a58d6ad | ||
|
|
f9219dd841 | ||
|
|
402ed7e0f3 | ||
|
|
7a1a5df89e | ||
|
|
df558fc1e7 | ||
|
|
ec66407ef1 | ||
|
|
6b99234a43 | ||
|
|
393be71009 | ||
|
|
12bc1687ec | ||
|
|
c59b322c0a | ||
|
|
e00266463d | ||
|
|
cbc8a33553 | ||
|
|
28582f75d4 | ||
|
|
39cccd212d | ||
|
|
329ea33337 | ||
|
|
521733c420 | ||
|
|
33e9f9a0ff | ||
|
|
5fda2bccbe | ||
|
|
ae75332656 | ||
|
|
b171785f96 | ||
|
|
ff3d6783c6 | ||
|
|
b1e579bea0 | ||
|
|
87241ea051 | ||
|
|
a871ec0bdf | ||
|
|
b8829b645a |
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
||||
uses: github/codeql-action/init@fe4161a26a8629af62121b670040955b330f9af2 # v4.31.6
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
||||
uses: github/codeql-action/analyze@fe4161a26a8629af62121b670040955b330f9af2 # v4.31.6
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
2
.github/workflows/wheels.yml
vendored
2
.github/workflows/wheels.yml
vendored
@@ -136,7 +136,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Build wheels
|
||||
uses: &home-assistant-wheels home-assistant/wheels@6066c17a2a4aafcf7bdfeae01717f63adfcdba98 # 2025.11.0
|
||||
uses: &home-assistant-wheels home-assistant/wheels@e5742a69d69f0e274e2689c998900c7d19652c21 # 2025.12.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
|
||||
@@ -7,6 +7,7 @@ from typing import Any, Final
|
||||
from homeassistant.const import (
|
||||
EVENT_COMPONENT_LOADED,
|
||||
EVENT_CORE_CONFIG_UPDATE,
|
||||
EVENT_LABS_UPDATED,
|
||||
EVENT_LOVELACE_UPDATED,
|
||||
EVENT_PANELS_UPDATED,
|
||||
EVENT_RECORDER_5MIN_STATISTICS_GENERATED,
|
||||
@@ -45,6 +46,7 @@ SUBSCRIBE_ALLOWLIST: Final[set[EventType[Any] | str]] = {
|
||||
EVENT_STATE_CHANGED,
|
||||
EVENT_THEMES_UPDATED,
|
||||
EVENT_LABEL_REGISTRY_UPDATED,
|
||||
EVENT_LABS_UPDATED,
|
||||
EVENT_CATEGORY_REGISTRY_UPDATED,
|
||||
EVENT_FLOOR_REGISTRY_UPDATED,
|
||||
}
|
||||
|
||||
@@ -159,74 +159,74 @@
|
||||
"title": "Alarm control panel",
|
||||
"triggers": {
|
||||
"armed": {
|
||||
"description": "Triggers when an alarm is armed.",
|
||||
"description": "Triggers after one or more alarms become armed, regardless of the mode.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an alarm is armed"
|
||||
"name": "Alarm armed"
|
||||
},
|
||||
"armed_away": {
|
||||
"description": "Triggers when an alarm is armed away.",
|
||||
"description": "Triggers after one or more alarms become armed in away mode.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an alarm is armed away"
|
||||
"name": "Alarm armed away"
|
||||
},
|
||||
"armed_home": {
|
||||
"description": "Triggers when an alarm is armed home.",
|
||||
"description": "Triggers after one or more alarms become armed in home mode.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an alarm is armed home"
|
||||
"name": "Alarm armed home"
|
||||
},
|
||||
"armed_night": {
|
||||
"description": "Triggers when an alarm is armed night.",
|
||||
"description": "Triggers after one or more alarms become armed in night mode.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an alarm is armed night"
|
||||
"name": "Alarm armed night"
|
||||
},
|
||||
"armed_vacation": {
|
||||
"description": "Triggers when an alarm is armed vacation.",
|
||||
"description": "Triggers after one or more alarms become armed in vacation mode.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an alarm is armed vacation"
|
||||
"name": "Alarm armed vacation"
|
||||
},
|
||||
"disarmed": {
|
||||
"description": "Triggers when an alarm is disarmed.",
|
||||
"description": "Triggers after one or more alarms become disarmed.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an alarm is disarmed"
|
||||
"name": "Alarm disarmed"
|
||||
},
|
||||
"triggered": {
|
||||
"description": "Triggers when an alarm is triggered.",
|
||||
"description": "Triggers after one or more alarms become triggered.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an alarm is triggered"
|
||||
"name": "Alarm triggered"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -112,44 +112,44 @@
|
||||
"title": "Assist satellite",
|
||||
"triggers": {
|
||||
"idle": {
|
||||
"description": "Triggers when an Assist satellite becomes idle.",
|
||||
"description": "Triggers after one or more voice assistant satellites become idle after having processed a command.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an Assist satellite becomes idle"
|
||||
"name": "Satellite became idle"
|
||||
},
|
||||
"listening": {
|
||||
"description": "Triggers when an Assist satellite starts listening.",
|
||||
"description": "Triggers after one or more voice assistant satellites start listening for a command from someone.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an Assist satellite starts listening"
|
||||
"name": "Satellite started listening"
|
||||
},
|
||||
"processing": {
|
||||
"description": "Triggers when an Assist satellite is processing.",
|
||||
"description": "Triggers after one or more voice assistant satellites start processing a command after having heard it.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an Assist satellite is processing"
|
||||
"name": "Satellite started processing"
|
||||
},
|
||||
"responding": {
|
||||
"description": "Triggers when an Assist satellite is responding.",
|
||||
"description": "Triggers after one or more voice assistant satellites start responding to a command after having processed it, or start announcing something.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an Assist satellite is responding"
|
||||
"name": "Satellite started responding"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -69,10 +69,10 @@
|
||||
},
|
||||
"preview_features": {
|
||||
"new_triggers_conditions": {
|
||||
"description": "Enables new intuitive triggers and conditions that are more user-friendly than technical state-based options.\n\nThese new automation features support targets across your entire home, letting you trigger automations for any entity, device, area, floor, or label (for example, when any light in your living room turned on). Integrations can now also provide their own intuitive triggers and conditions, just like actions.\n\nThis preview also includes a new tree view to help you navigate your home when adding triggers, conditions, and actions.",
|
||||
"disable_confirmation": "Disabling this preview will cause automations and scripts that use the new intuitive triggers and conditions to fail.\n\nBefore disabling, ensure that your automations or scripts do not rely on this feature.",
|
||||
"enable_confirmation": "This feature is still in development and may change. These new intuitive triggers and conditions are being refined based on user feedback and are not yet complete.\n\nBy enabling this preview, you'll have early access to these new capabilities, but be aware that they may be modified or updated in future releases.",
|
||||
"name": "Intuitive triggers and conditions"
|
||||
"description": "Enables new purpose-specific triggers and conditions that are more user-friendly than technical state-based options.\n\nThese new automation features support targets across your entire home, letting you trigger automations for any entity, device, area, floor, or label (for example, when any light in your living room turned on). Integrations can now also provide their own purpose-specific triggers and conditions, just like actions.\n\nThis preview also includes a new tree view to help you navigate your home when adding triggers, conditions, and actions.",
|
||||
"disable_confirmation": "Disabling this preview will cause automations and scripts that use the new purpose-specific triggers and conditions to fail.\n\nBefore disabling, ensure that your automations or scripts do not rely on this feature.",
|
||||
"enable_confirmation": "This feature is still in development and may change. These new purpose-specific triggers and conditions are being refined based on user feedback and are not yet complete.\n\nBy enabling this preview, you'll have early access to these new capabilities, but be aware that they may be modified or updated in future releases.",
|
||||
"name": "Purpose-specific triggers and conditions"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -36,6 +36,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/broadlink",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["broadlink"],
|
||||
"requirements": ["broadlink==0.19.0"]
|
||||
|
||||
@@ -299,54 +299,54 @@
|
||||
"title": "Climate",
|
||||
"triggers": {
|
||||
"started_cooling": {
|
||||
"description": "Triggers when a climate started cooling.",
|
||||
"description": "Triggers after one or more climate-control devices start cooling.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a climate started cooling"
|
||||
"name": "Climate-control device started cooling"
|
||||
},
|
||||
"started_drying": {
|
||||
"description": "Triggers when a climate started drying.",
|
||||
"description": "Triggers after one or more climate-control devices start drying.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a climate started drying"
|
||||
"name": "Climate-control device started drying"
|
||||
},
|
||||
"started_heating": {
|
||||
"description": "Triggers when a climate starts to heat.",
|
||||
"description": "Triggers after one or more climate-control devices start heating.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a climate starts to heat"
|
||||
"name": "Climate-control device started heating"
|
||||
},
|
||||
"turned_off": {
|
||||
"description": "Triggers when a climate is turned off.",
|
||||
"description": "Triggers after one or more climate-control devices turn off.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a climate is turned off"
|
||||
"name": "Climate-control device turned off"
|
||||
},
|
||||
"turned_on": {
|
||||
"description": "Triggers when a climate is turned on.",
|
||||
"description": "Triggers after one or more climate-control devices turn on, regardless of the mode.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a climate is turned on"
|
||||
"name": "Climate-control device turned on"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["ssdp"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/dlna_dmr",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["async_upnp_client"],
|
||||
"requirements": ["async-upnp-client==0.46.0", "getmac==0.9.5"],
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["ssdp"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/dlna_dms",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["async-upnp-client==0.46.0"],
|
||||
"ssdp": [
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["webhook"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecowitt",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["aioecowitt==2025.9.2"]
|
||||
}
|
||||
|
||||
@@ -166,24 +166,24 @@
|
||||
"title": "Fan",
|
||||
"triggers": {
|
||||
"turned_off": {
|
||||
"description": "Triggers when a fan is turned off.",
|
||||
"description": "Triggers after one or more fans turn off.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::fan::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::fan::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a fan is turned off"
|
||||
"name": "Fan turned off"
|
||||
},
|
||||
"turned_on": {
|
||||
"description": "Triggers when a fan is turned on.",
|
||||
"description": "Triggers after one or more fans turn on.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::fan::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::fan::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a fan is turned on"
|
||||
"name": "Fan turned on"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ from dataclasses import dataclass
|
||||
import logging
|
||||
from secrets import token_hex
|
||||
import shutil
|
||||
from tempfile import mkdtemp
|
||||
|
||||
from aiohttp import BasicAuth, ClientSession, UnixConnector
|
||||
from aiohttp.client_exceptions import ClientConnectionError, ServerConnectionError
|
||||
@@ -62,11 +63,11 @@ from .const import (
|
||||
CONF_DEBUG_UI,
|
||||
DEBUG_UI_URL_MESSAGE,
|
||||
DOMAIN,
|
||||
HA_MANAGED_UNIX_SOCKET,
|
||||
HA_MANAGED_URL,
|
||||
RECOMMENDED_VERSION,
|
||||
)
|
||||
from .server import Server
|
||||
from .util import get_go2rtc_unix_socket_path
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -154,10 +155,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
auth = BasicAuth(username, password)
|
||||
# HA will manage the binary
|
||||
temp_dir = mkdtemp(prefix="go2rtc-")
|
||||
# Manually created session (not using the helper) needs to be closed manually
|
||||
# See on_stop listener below
|
||||
session = ClientSession(
|
||||
connector=UnixConnector(path=HA_MANAGED_UNIX_SOCKET), auth=auth
|
||||
connector=UnixConnector(path=get_go2rtc_unix_socket_path(temp_dir)),
|
||||
auth=auth,
|
||||
)
|
||||
server = Server(
|
||||
hass,
|
||||
@@ -166,6 +169,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
enable_ui=domain_config.get(CONF_DEBUG_UI, False),
|
||||
username=username,
|
||||
password=password,
|
||||
working_dir=temp_dir,
|
||||
)
|
||||
try:
|
||||
await server.start()
|
||||
|
||||
@@ -6,7 +6,6 @@ CONF_DEBUG_UI = "debug_ui"
|
||||
DEBUG_UI_URL_MESSAGE = "Url and debug_ui cannot be set at the same time."
|
||||
HA_MANAGED_API_PORT = 11984
|
||||
HA_MANAGED_URL = f"http://localhost:{HA_MANAGED_API_PORT}/"
|
||||
HA_MANAGED_UNIX_SOCKET = "/run/go2rtc.sock"
|
||||
# When changing this version, also update the corresponding SHA hash (_GO2RTC_SHA)
|
||||
# in script/hassfest/docker.py.
|
||||
RECOMMENDED_VERSION = "1.9.12"
|
||||
|
||||
@@ -12,13 +12,13 @@ from go2rtc_client import Go2RtcRestClient
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from .const import HA_MANAGED_API_PORT, HA_MANAGED_UNIX_SOCKET, HA_MANAGED_URL
|
||||
from .const import HA_MANAGED_API_PORT, HA_MANAGED_URL
|
||||
from .util import get_go2rtc_unix_socket_path
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_TERMINATE_TIMEOUT = 5
|
||||
_SETUP_TIMEOUT = 30
|
||||
_SUCCESSFUL_BOOT_MESSAGE = "INF [api] listen addr="
|
||||
_LOCALHOST_IP = "127.0.0.1"
|
||||
_LOG_BUFFER_SIZE = 512
|
||||
_RESPAWN_COOLDOWN = 1
|
||||
|
||||
@@ -122,7 +122,9 @@ def _format_list_for_yaml(items: tuple[str, ...]) -> str:
|
||||
return f"[{formatted_items}]"
|
||||
|
||||
|
||||
def _create_temp_file(enable_ui: bool, username: str, password: str) -> str:
|
||||
def _create_temp_file(
|
||||
enable_ui: bool, username: str, password: str, working_dir: str
|
||||
) -> str:
|
||||
"""Create temporary config file."""
|
||||
app_modules: tuple[str, ...] = _APP_MODULES
|
||||
api_paths: tuple[str, ...] = _API_ALLOW_PATHS
|
||||
@@ -139,11 +141,13 @@ def _create_temp_file(enable_ui: bool, username: str, password: str) -> str:
|
||||
|
||||
# Set delete=False to prevent the file from being deleted when the file is closed
|
||||
# Linux is clearing tmp folder on reboot, so no need to delete it manually
|
||||
with NamedTemporaryFile(prefix="go2rtc_", suffix=".yaml", delete=False) as file:
|
||||
with NamedTemporaryFile(
|
||||
prefix="go2rtc_", suffix=".yaml", dir=working_dir, delete=False
|
||||
) as file:
|
||||
file.write(
|
||||
_GO2RTC_CONFIG_FORMAT.format(
|
||||
listen_config=listen_config,
|
||||
unix_socket=HA_MANAGED_UNIX_SOCKET,
|
||||
unix_socket=get_go2rtc_unix_socket_path(working_dir),
|
||||
app_modules=_format_list_for_yaml(app_modules),
|
||||
api_allow_paths=_format_list_for_yaml(api_paths),
|
||||
username=username,
|
||||
@@ -165,6 +169,7 @@ class Server:
|
||||
enable_ui: bool = False,
|
||||
username: str,
|
||||
password: str,
|
||||
working_dir: str,
|
||||
) -> None:
|
||||
"""Initialize the server."""
|
||||
self._hass = hass
|
||||
@@ -173,6 +178,7 @@ class Server:
|
||||
self._enable_ui = enable_ui
|
||||
self._username = username
|
||||
self._password = password
|
||||
self._working_dir = working_dir
|
||||
self._log_buffer: deque[str] = deque(maxlen=_LOG_BUFFER_SIZE)
|
||||
self._process: asyncio.subprocess.Process | None = None
|
||||
self._startup_complete = asyncio.Event()
|
||||
@@ -190,7 +196,11 @@ class Server:
|
||||
"""Start the server."""
|
||||
_LOGGER.debug("Starting go2rtc server")
|
||||
config_file = await self._hass.async_add_executor_job(
|
||||
_create_temp_file, self._enable_ui, self._username, self._password
|
||||
_create_temp_file,
|
||||
self._enable_ui,
|
||||
self._username,
|
||||
self._password,
|
||||
self._working_dir,
|
||||
)
|
||||
|
||||
self._startup_complete.clear()
|
||||
|
||||
12
homeassistant/components/go2rtc/util.py
Normal file
12
homeassistant/components/go2rtc/util.py
Normal file
@@ -0,0 +1,12 @@
|
||||
"""Go2rtc utility functions."""
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
_HA_MANAGED_UNIX_SOCKET_FILE = "go2rtc.sock"
|
||||
|
||||
|
||||
def get_go2rtc_unix_socket_path(path: str | Path) -> str:
|
||||
"""Get the Go2rtc unix socket path."""
|
||||
if not isinstance(path, Path):
|
||||
path = Path(path)
|
||||
return str(path / _HA_MANAGED_UNIX_SOCKET_FILE)
|
||||
@@ -5,6 +5,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["application_credentials"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/google",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["googleapiclient"],
|
||||
"requirements": ["gcal-sync==8.0.0", "oauth2client==4.1.3", "ical==11.1.0"]
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": [],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/google_translate",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["gtts"],
|
||||
"requirements": ["gTTS==2.5.3"]
|
||||
|
||||
@@ -23,6 +23,6 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aiohomeconnect"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiohomeconnect==0.23.1"],
|
||||
"requirements": ["aiohomeconnect==0.24.0"],
|
||||
"zeroconf": ["_homeconnect._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ibeacon",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["bleak"],
|
||||
"requirements": ["ibeacon-ble==1.2.0"],
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["http"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/konnected",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["konnected"],
|
||||
"requirements": ["konnected==1.2.0"],
|
||||
|
||||
@@ -10,6 +10,7 @@ from __future__ import annotations
|
||||
from collections.abc import Callable
|
||||
import logging
|
||||
|
||||
from homeassistant.const import EVENT_LABS_UPDATED
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.generated.labs import LABS_PREVIEW_FEATURES
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
@@ -17,7 +18,7 @@ from homeassistant.helpers.storage import Store
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import async_get_custom_components
|
||||
|
||||
from .const import DOMAIN, EVENT_LABS_UPDATED, LABS_DATA, STORAGE_KEY, STORAGE_VERSION
|
||||
from .const import DOMAIN, LABS_DATA, STORAGE_KEY, STORAGE_VERSION
|
||||
from .models import (
|
||||
EventLabsUpdatedData,
|
||||
LabPreviewFeature,
|
||||
|
||||
@@ -11,6 +11,4 @@ DOMAIN = "labs"
|
||||
STORAGE_KEY = "core.labs"
|
||||
STORAGE_VERSION = 1
|
||||
|
||||
EVENT_LABS_UPDATED = "labs_updated"
|
||||
|
||||
LABS_DATA: HassKey[LabsData] = HassKey(DOMAIN)
|
||||
|
||||
@@ -8,9 +8,10 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.components.backup import async_get_manager
|
||||
from homeassistant.const import EVENT_LABS_UPDATED
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .const import EVENT_LABS_UPDATED, LABS_DATA
|
||||
from .const import LABS_DATA
|
||||
from .models import EventLabsUpdatedData
|
||||
|
||||
|
||||
|
||||
@@ -41,44 +41,44 @@
|
||||
"title": "Lawn mower",
|
||||
"triggers": {
|
||||
"docked": {
|
||||
"description": "Triggers when a lawn mower has docked.",
|
||||
"description": "Triggers after one or more lawn mowers return to dock.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::lawn_mower::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::lawn_mower::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a lawn mower has docked"
|
||||
"name": "Lawn mower returned to dock"
|
||||
},
|
||||
"errored": {
|
||||
"description": "Triggers when a lawn mower has errored.",
|
||||
"description": "Triggers after one or more lawn mowers encounter an error.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::lawn_mower::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::lawn_mower::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a lawn mower has errored"
|
||||
"name": "Lawn mower encountered an error"
|
||||
},
|
||||
"paused_mowing": {
|
||||
"description": "Triggers when a lawn mower has paused mowing.",
|
||||
"description": "Triggers after one or more lawn mowers pause mowing.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::lawn_mower::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::lawn_mower::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a lawn mower has paused mowing"
|
||||
"name": "Lawn mower paused mowing"
|
||||
},
|
||||
"started_mowing": {
|
||||
"description": "Triggers when a lawn mower has started mowing.",
|
||||
"description": "Triggers after one or more lawn mowers start mowing.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::lawn_mower::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::lawn_mower::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a lawn mower has started mowing"
|
||||
"name": "Lawn mower started mowing"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -510,24 +510,24 @@
|
||||
"title": "Light",
|
||||
"triggers": {
|
||||
"turned_off": {
|
||||
"description": "Triggers when a light is turned off.",
|
||||
"description": "Triggers after one or more lights turn off.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::light::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::light::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a light is turned off"
|
||||
"name": "Light turned off"
|
||||
},
|
||||
"turned_on": {
|
||||
"description": "Triggers when a light is turned on.",
|
||||
"description": "Triggers after one or more lights turn on.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::light::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::light::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a light is turned on"
|
||||
"name": "Light turned on"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -381,14 +381,14 @@
|
||||
"title": "Media player",
|
||||
"triggers": {
|
||||
"stopped_playing": {
|
||||
"description": "Triggers when a media player stops playing.",
|
||||
"description": "Triggers after one or more media players stop playing media.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::media_player::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::media_player::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a media player stops playing"
|
||||
"name": "Media player stopped playing"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@danielhiversen"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/met",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["metno"],
|
||||
"requirements": ["PyMetno==0.13.0"]
|
||||
|
||||
@@ -48,6 +48,7 @@ CONDITION_CLASSES: dict[str, list[str]] = {
|
||||
"Brouillard givrant",
|
||||
"Bancs de Brouillard",
|
||||
"Brouillard dense",
|
||||
"Brouillard dense givrant",
|
||||
],
|
||||
ATTR_CONDITION_HAIL: ["Risque de grêle", "Averses de grêle"],
|
||||
ATTR_CONDITION_LIGHTNING: ["Risque d'orages", "Orages", "Orage avec grêle"],
|
||||
|
||||
@@ -182,7 +182,7 @@ class ProgramPhaseWashingMachine(MieleEnum, missing_to_none=True):
|
||||
drain = 265
|
||||
spin = 266, 11010
|
||||
anti_crease = 267, 11029
|
||||
finished = 268
|
||||
finished = 268, 11012
|
||||
venting = 269
|
||||
starch_stop = 270
|
||||
freshen_up_and_moisten = 271
|
||||
@@ -190,6 +190,7 @@ class ProgramPhaseWashingMachine(MieleEnum, missing_to_none=True):
|
||||
hygiene = 279
|
||||
drying = 280
|
||||
disinfecting = 285
|
||||
flex_load_active = 11047
|
||||
|
||||
|
||||
class ProgramPhaseTumbleDryer(MieleEnum, missing_to_none=True):
|
||||
@@ -481,8 +482,8 @@ class WashingMachineProgramId(MieleEnum, missing_to_none=True):
|
||||
express_20 = 122
|
||||
down_filled_items = 129
|
||||
cottons_eco = 133
|
||||
quick_power_wash = 146
|
||||
eco_40_60 = 190
|
||||
quick_power_wash = 146, 10031
|
||||
eco_40_60 = 190, 10007
|
||||
normal = 10001
|
||||
|
||||
|
||||
|
||||
@@ -954,6 +954,7 @@
|
||||
"extra_dry": "Extra dry",
|
||||
"final_rinse": "Final rinse",
|
||||
"finished": "Finished",
|
||||
"flex_load_active": "FlexLoad active",
|
||||
"freshen_up_and_moisten": "Freshen up & moisten",
|
||||
"going_to_target_area": "Going to target area",
|
||||
"grinding": "Grinding",
|
||||
|
||||
@@ -378,31 +378,33 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Migrate the options from config entry data."""
|
||||
_LOGGER.debug("Migrating from version %s:%s", entry.version, entry.minor_version)
|
||||
_LOGGER.debug("Migrating from version %s.%s", entry.version, entry.minor_version)
|
||||
data: dict[str, Any] = dict(entry.data)
|
||||
options: dict[str, Any] = dict(entry.options)
|
||||
if entry.version > 1:
|
||||
if entry.version > 2 or (entry.version == 2 and entry.minor_version > 1):
|
||||
# This means the user has downgraded from a future version
|
||||
# We allow read support for version 2.1
|
||||
return False
|
||||
|
||||
if entry.version == 1 and entry.minor_version < 2:
|
||||
# Can be removed when config entry is bumped to version 2.1
|
||||
# with HA Core 2026.1.0. Read support for version 2.1 is expected before 2026.1
|
||||
# From 2026.1 we will write version 2.1
|
||||
# Can be removed when the config entry is bumped to version 2.1
|
||||
# with HA Core 2026.7.0. Read support for version 2.1 is expected with 2026.1
|
||||
# From 2026.7 we will write version 2.1
|
||||
for key in ENTRY_OPTION_FIELDS:
|
||||
if key not in data:
|
||||
continue
|
||||
options[key] = data.pop(key)
|
||||
# Write version 1.2 for backwards compatibility
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data=data,
|
||||
options=options,
|
||||
version=CONFIG_ENTRY_VERSION,
|
||||
minor_version=CONFIG_ENTRY_MINOR_VERSION,
|
||||
version=1,
|
||||
minor_version=2,
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Migration to version %s:%s successful", entry.version, entry.minor_version
|
||||
"Migration to version %s.%s successful", entry.version, entry.minor_version
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
@@ -3952,9 +3952,8 @@ REAUTH_SCHEMA = vol.Schema(
|
||||
class FlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow."""
|
||||
|
||||
# Can be bumped to version 2.1 with HA Core 2026.1.0
|
||||
VERSION = CONFIG_ENTRY_VERSION # 1
|
||||
MINOR_VERSION = CONFIG_ENTRY_MINOR_VERSION # 2
|
||||
VERSION = CONFIG_ENTRY_VERSION # 2
|
||||
MINOR_VERSION = CONFIG_ENTRY_MINOR_VERSION # 1
|
||||
|
||||
_hassio_discovery: dict[str, Any] | None = None
|
||||
_addon_manager: AddonManager
|
||||
|
||||
@@ -381,13 +381,13 @@ MQTT_PROCESSED_SUBSCRIPTIONS = "mqtt_processed_subscriptions"
|
||||
PAYLOAD_EMPTY_JSON = "{}"
|
||||
PAYLOAD_NONE = "None"
|
||||
|
||||
CONFIG_ENTRY_VERSION = 1
|
||||
CONFIG_ENTRY_MINOR_VERSION = 2
|
||||
CONFIG_ENTRY_VERSION = 2
|
||||
CONFIG_ENTRY_MINOR_VERSION = 1
|
||||
|
||||
# Split mqtt entry data and options
|
||||
# Can be removed when config entry is bumped to version 2.1
|
||||
# with HA Core 2026.1.0. Read support for version 2.1 is expected before 2026.1
|
||||
# From 2026.1 we will write version 2.1
|
||||
# with HA Core 2026.7.0. Read support for version 2.1 is expected from 2026.1
|
||||
# From 2026.7 we will write version 2.1
|
||||
ENTRY_OPTION_FIELDS = (
|
||||
CONF_DISCOVERY,
|
||||
CONF_DISCOVERY_PREFIX,
|
||||
|
||||
@@ -1562,7 +1562,7 @@
|
||||
},
|
||||
"triggers": {
|
||||
"_": {
|
||||
"description": "When a specific message is received on a given MQTT topic.",
|
||||
"description": "Triggers after a specific message is received on a given MQTT topic.",
|
||||
"fields": {
|
||||
"payload": {
|
||||
"description": "The payload to trigger on.",
|
||||
@@ -1573,7 +1573,7 @@
|
||||
"name": "Topic"
|
||||
}
|
||||
},
|
||||
"name": "MQTT"
|
||||
"name": "MQTT message received"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["auth"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/music_assistant",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["music_assistant"],
|
||||
"quality_scale": "bronze",
|
||||
|
||||
@@ -10,6 +10,47 @@
|
||||
"is_going": {
|
||||
"default": "mdi:bell-cancel-outline"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"arrival_platform_actual": {
|
||||
"default": "mdi:logout"
|
||||
},
|
||||
"arrival_platform_planned": {
|
||||
"default": "mdi:logout"
|
||||
},
|
||||
"arrival_time_actual": {
|
||||
"default": "mdi:clock"
|
||||
},
|
||||
"arrival_time_planned": {
|
||||
"default": "mdi:calendar-clock"
|
||||
},
|
||||
"departure": {
|
||||
"default": "mdi:train"
|
||||
},
|
||||
"departure_platform_actual": {
|
||||
"default": "mdi:login"
|
||||
},
|
||||
"departure_platform_planned": {
|
||||
"default": "mdi:login"
|
||||
},
|
||||
"departure_time_actual": {
|
||||
"default": "mdi:clock"
|
||||
},
|
||||
"departure_time_planned": {
|
||||
"default": "mdi:calendar-clock"
|
||||
},
|
||||
"next_departure_time": {
|
||||
"default": "mdi:train"
|
||||
},
|
||||
"route": {
|
||||
"default": "mdi:transit-connection-variant"
|
||||
},
|
||||
"status": {
|
||||
"default": "mdi:information"
|
||||
},
|
||||
"transfers": {
|
||||
"default": "mdi:swap-horizontal"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
import logging
|
||||
from typing import Any
|
||||
@@ -13,9 +15,10 @@ from homeassistant.components.sensor import (
|
||||
PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA,
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
)
|
||||
from homeassistant.config_entries import SOURCE_IMPORT
|
||||
from homeassistant.const import CONF_API_KEY, CONF_NAME
|
||||
from homeassistant.const import CONF_API_KEY, CONF_NAME, EntityCategory
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir
|
||||
@@ -24,9 +27,10 @@ from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .binary_sensor import get_delay
|
||||
from .const import (
|
||||
CONF_FROM,
|
||||
CONF_ROUTES,
|
||||
@@ -40,7 +44,7 @@ from .const import (
|
||||
from .coordinator import NSConfigEntry, NSDataUpdateCoordinator
|
||||
|
||||
|
||||
def _get_departure_time(trip: Trip | None) -> datetime | None:
|
||||
def get_departure_time(trip: Trip | None) -> datetime | None:
|
||||
"""Get next departure time from trip data."""
|
||||
return trip.departure_time_actual or trip.departure_time_planned if trip else None
|
||||
|
||||
@@ -61,13 +65,15 @@ def _get_route(trip: Trip | None) -> list[str]:
|
||||
return route
|
||||
|
||||
|
||||
def _get_delay(planned: datetime | None, actual: datetime | None) -> bool:
|
||||
"""Return True if delay is present, False otherwise."""
|
||||
return bool(planned and actual and planned != actual)
|
||||
|
||||
TRIP_STATUS = {
|
||||
"NORMAL": "normal",
|
||||
"CANCELLED": "cancelled",
|
||||
}
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PARALLEL_UPDATES = 0 # since we use coordinator pattern
|
||||
|
||||
ROUTE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
@@ -85,6 +91,110 @@ PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend(
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class NSSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes Nederlandse Spoorwegen sensor entity."""
|
||||
|
||||
is_next: bool = False
|
||||
value_fn: Callable[[Trip], datetime | str | int | None]
|
||||
entity_category: EntityCategory | None = EntityCategory.DIAGNOSTIC
|
||||
|
||||
|
||||
# Entity descriptions for all the different sensors we create per route
|
||||
SENSOR_DESCRIPTIONS: tuple[NSSensorEntityDescription, ...] = (
|
||||
NSSensorEntityDescription(
|
||||
key="actual_departure",
|
||||
translation_key="departure",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=None,
|
||||
value_fn=get_departure_time,
|
||||
),
|
||||
NSSensorEntityDescription(
|
||||
key="next_departure",
|
||||
translation_key="next_departure_time",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
is_next=True,
|
||||
value_fn=get_departure_time,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
# Platform information
|
||||
NSSensorEntityDescription(
|
||||
key="departure_platform_planned",
|
||||
translation_key="departure_platform_planned",
|
||||
value_fn=lambda trip: getattr(trip, "departure_platform_planned", None),
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
NSSensorEntityDescription(
|
||||
key="departure_platform_actual",
|
||||
translation_key="departure_platform_actual",
|
||||
value_fn=lambda trip: trip.departure_platform_actual,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
NSSensorEntityDescription(
|
||||
key="arrival_platform_planned",
|
||||
translation_key="arrival_platform_planned",
|
||||
value_fn=lambda trip: trip.arrival_platform_planned,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
NSSensorEntityDescription(
|
||||
key="arrival_platform_actual",
|
||||
translation_key="arrival_platform_actual",
|
||||
value_fn=lambda trip: trip.arrival_platform_actual,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
NSSensorEntityDescription(
|
||||
key="departure_time_planned",
|
||||
translation_key="departure_time_planned",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
value_fn=lambda trip: trip.departure_time_planned,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
NSSensorEntityDescription(
|
||||
key="departure_time_actual",
|
||||
translation_key="departure_time_actual",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
value_fn=lambda trip: trip.departure_time_actual,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
NSSensorEntityDescription(
|
||||
key="arrival_time_planned",
|
||||
translation_key="arrival_time_planned",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
value_fn=lambda trip: trip.arrival_time_planned,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
NSSensorEntityDescription(
|
||||
key="arrival_time_actual",
|
||||
translation_key="arrival_time_actual",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
value_fn=lambda trip: trip.arrival_time_actual,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
# Trip information
|
||||
NSSensorEntityDescription(
|
||||
key="status",
|
||||
translation_key="status",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=list(TRIP_STATUS.values()),
|
||||
value_fn=lambda trip: TRIP_STATUS.get(trip.status),
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
NSSensorEntityDescription(
|
||||
key="transfers",
|
||||
translation_key="transfers",
|
||||
value_fn=lambda trip: trip.nr_transfers if trip else 0,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
# Route info sensors
|
||||
NSSensorEntityDescription(
|
||||
key="route",
|
||||
translation_key="route",
|
||||
value_fn=lambda trip: ", ".join(_get_route(trip)),
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
@@ -144,58 +254,61 @@ async def async_setup_entry(
|
||||
coordinators = config_entry.runtime_data
|
||||
|
||||
for subentry_id, coordinator in coordinators.items():
|
||||
# Build entity from coordinator fields directly
|
||||
entity = NSDepartureSensor(
|
||||
subentry_id,
|
||||
coordinator,
|
||||
async_add_entities(
|
||||
[
|
||||
NSSensor(coordinator, subentry_id, description)
|
||||
for description in SENSOR_DESCRIPTIONS
|
||||
],
|
||||
config_subentry_id=subentry_id,
|
||||
)
|
||||
|
||||
# Add entity with proper subentry association
|
||||
async_add_entities([entity], config_subentry_id=subentry_id)
|
||||
|
||||
class NSSensor(CoordinatorEntity[NSDataUpdateCoordinator], SensorEntity):
|
||||
"""Generic NS sensor based on entity description."""
|
||||
|
||||
class NSDepartureSensor(CoordinatorEntity[NSDataUpdateCoordinator], SensorEntity):
|
||||
"""Implementation of a NS Departure Sensor (legacy)."""
|
||||
|
||||
_attr_device_class = SensorDeviceClass.TIMESTAMP
|
||||
_attr_has_entity_name = True
|
||||
_attr_attribution = "Data provided by NS"
|
||||
_attr_icon = "mdi:train"
|
||||
entity_description: NSSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
subentry_id: str,
|
||||
coordinator: NSDataUpdateCoordinator,
|
||||
subentry_id: str,
|
||||
description: NSSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator)
|
||||
self._name = coordinator.name
|
||||
self.entity_description = description
|
||||
self._attr_entity_category = description.entity_category
|
||||
self._subentry_id = subentry_id
|
||||
self._attr_unique_id = f"{subentry_id}-actual_departure"
|
||||
|
||||
self._attr_unique_id = f"{subentry_id}-{description.key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._subentry_id)},
|
||||
name=self._name,
|
||||
identifiers={(DOMAIN, subentry_id)},
|
||||
name=coordinator.name,
|
||||
manufacturer=INTEGRATION_TITLE,
|
||||
model=ROUTE_MODEL,
|
||||
)
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return the name of the sensor."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def native_value(self) -> datetime | None:
|
||||
def native_value(self) -> StateType | datetime:
|
||||
"""Return the native value of the sensor."""
|
||||
route_data = self.coordinator.data
|
||||
if not route_data.first_trip:
|
||||
data = (
|
||||
self.coordinator.data.first_trip
|
||||
if not self.entity_description.is_next
|
||||
else self.coordinator.data.next_trip
|
||||
)
|
||||
if data is None:
|
||||
return None
|
||||
|
||||
first_trip = route_data.first_trip
|
||||
return _get_departure_time(first_trip)
|
||||
return self.entity_description.value_fn(data)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return the state attributes."""
|
||||
if self.entity_description.key != "actual_departure":
|
||||
return None
|
||||
|
||||
first_trip = self.coordinator.data.first_trip
|
||||
next_trip = self.coordinator.data.next_trip
|
||||
|
||||
@@ -204,11 +317,12 @@ class NSDepartureSensor(CoordinatorEntity[NSDataUpdateCoordinator], SensorEntity
|
||||
|
||||
status = first_trip.status
|
||||
|
||||
# Static attributes
|
||||
return {
|
||||
"going": first_trip.going,
|
||||
"departure_time_planned": _get_time_str(first_trip.departure_time_planned),
|
||||
"departure_time_actual": _get_time_str(first_trip.departure_time_actual),
|
||||
"departure_delay": _get_delay(
|
||||
"departure_delay": get_delay(
|
||||
first_trip.departure_time_planned,
|
||||
first_trip.departure_time_actual,
|
||||
),
|
||||
@@ -216,13 +330,13 @@ class NSDepartureSensor(CoordinatorEntity[NSDataUpdateCoordinator], SensorEntity
|
||||
"departure_platform_actual": first_trip.departure_platform_actual,
|
||||
"arrival_time_planned": _get_time_str(first_trip.arrival_time_planned),
|
||||
"arrival_time_actual": _get_time_str(first_trip.arrival_time_actual),
|
||||
"arrival_delay": _get_delay(
|
||||
"arrival_delay": get_delay(
|
||||
first_trip.arrival_time_planned,
|
||||
first_trip.arrival_time_actual,
|
||||
),
|
||||
"arrival_platform_planned": first_trip.arrival_platform_planned,
|
||||
"arrival_platform_actual": first_trip.arrival_platform_actual,
|
||||
"next": _get_time_str(_get_departure_time(next_trip)),
|
||||
"next": _get_time_str(get_departure_time(next_trip)),
|
||||
"status": status.lower() if status else None,
|
||||
"transfers": first_trip.nr_transfers,
|
||||
"route": _get_route(first_trip),
|
||||
|
||||
@@ -75,6 +75,57 @@
|
||||
"is_going": {
|
||||
"name": "Going"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"arrival_platform_actual": {
|
||||
"name": "Actual arrival platform"
|
||||
},
|
||||
"arrival_platform_planned": {
|
||||
"name": "Planned arrival platform"
|
||||
},
|
||||
"arrival_time_actual": {
|
||||
"name": "Actual arrival time"
|
||||
},
|
||||
"arrival_time_planned": {
|
||||
"name": "Planned arrival time"
|
||||
},
|
||||
"departure": {
|
||||
"name": "Departure"
|
||||
},
|
||||
"departure_platform_actual": {
|
||||
"name": "Actual departure platform"
|
||||
},
|
||||
"departure_platform_planned": {
|
||||
"name": "Planned departure platform"
|
||||
},
|
||||
"departure_time_actual": {
|
||||
"name": "Actual departure time"
|
||||
},
|
||||
"departure_time_planned": {
|
||||
"name": "Planned departure time"
|
||||
},
|
||||
"next_departure_time": {
|
||||
"name": "Next departure"
|
||||
},
|
||||
"route": {
|
||||
"name": "Route"
|
||||
},
|
||||
"route_from": {
|
||||
"name": "Route from"
|
||||
},
|
||||
"route_to": {
|
||||
"name": "Route to"
|
||||
},
|
||||
"status": {
|
||||
"name": "Status",
|
||||
"state": {
|
||||
"cancelled": "Cancelled",
|
||||
"normal": "Normal"
|
||||
}
|
||||
},
|
||||
"transfers": {
|
||||
"name": "Transfers"
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@jpbede"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/ping",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["icmplib"],
|
||||
"quality_scale": "internal",
|
||||
|
||||
@@ -17,6 +17,7 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.schema_config_entry_flow import (
|
||||
SchemaCommonFlowHandler,
|
||||
SchemaConfigFlowHandler,
|
||||
@@ -59,8 +60,8 @@ def _generate_schema(domain: str, flow_type: _FlowType) -> vol.Schema:
|
||||
if domain == Platform.SENSOR:
|
||||
schema.update(
|
||||
{
|
||||
vol.Optional(CONF_MINIMUM, default=DEFAULT_MIN): vol.Coerce(int),
|
||||
vol.Optional(CONF_MAXIMUM, default=DEFAULT_MAX): vol.Coerce(int),
|
||||
vol.Optional(CONF_MINIMUM, default=DEFAULT_MIN): cv.positive_int,
|
||||
vol.Optional(CONF_MAXIMUM, default=DEFAULT_MAX): cv.positive_int,
|
||||
vol.Optional(CONF_DEVICE_CLASS): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
"name": "RESTful",
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/rest",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["jsonpath==0.82.2", "xmltodict==1.0.2"]
|
||||
}
|
||||
|
||||
@@ -27,6 +27,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ring",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["ring_doorbell"],
|
||||
"quality_scale": "bronze",
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/roborock",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["roborock"],
|
||||
"quality_scale": "silver",
|
||||
|
||||
@@ -43,12 +43,7 @@ rules:
|
||||
|
||||
## Gold
|
||||
entity-translations: done
|
||||
entity-device-class:
|
||||
status: todo
|
||||
comment: |
|
||||
What does DSL counter count?
|
||||
What is the state of CRC?
|
||||
line_status and training and net_infra and mode -> unknown shouldn't be an option and the entity should return None instead
|
||||
entity-device-class: done
|
||||
devices:
|
||||
status: todo
|
||||
comment: MAC address can be set to the connections
|
||||
|
||||
@@ -49,14 +49,14 @@ DSL_SENSOR_TYPES: tuple[SFRBoxSensorEntityDescription[DslInfo], ...] = (
|
||||
key="counter",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
translation_key="dsl_counter",
|
||||
translation_key="dsl_connect_count",
|
||||
value_fn=lambda x: x.counter,
|
||||
),
|
||||
SFRBoxSensorEntityDescription[DslInfo](
|
||||
key="crc",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
translation_key="dsl_crc",
|
||||
translation_key="dsl_crc_error_count",
|
||||
value_fn=lambda x: x.crc,
|
||||
),
|
||||
SFRBoxSensorEntityDescription[DslInfo](
|
||||
@@ -126,7 +126,6 @@ DSL_SENSOR_TYPES: tuple[SFRBoxSensorEntityDescription[DslInfo], ...] = (
|
||||
"loss_of_signal",
|
||||
"loss_of_power",
|
||||
"loss_of_signal_quality",
|
||||
"unknown",
|
||||
],
|
||||
translation_key="dsl_line_status",
|
||||
value_fn=lambda x: _value_to_option(x.line_status),
|
||||
@@ -146,7 +145,6 @@ DSL_SENSOR_TYPES: tuple[SFRBoxSensorEntityDescription[DslInfo], ...] = (
|
||||
"g_993_channel_analysis",
|
||||
"g_993_message_exchange",
|
||||
"showtime",
|
||||
"unknown",
|
||||
],
|
||||
translation_key="dsl_training",
|
||||
value_fn=lambda x: _value_to_option(x.training),
|
||||
@@ -162,10 +160,9 @@ SYSTEM_SENSOR_TYPES: tuple[SFRBoxSensorEntityDescription[SystemInfo], ...] = (
|
||||
"adsl",
|
||||
"ftth",
|
||||
"gprs",
|
||||
"unknown",
|
||||
],
|
||||
translation_key="net_infra",
|
||||
value_fn=lambda x: x.net_infra,
|
||||
value_fn=lambda x: _value_to_option(x.net_infra),
|
||||
),
|
||||
SFRBoxSensorEntityDescription[SystemInfo](
|
||||
key="alimvoltage",
|
||||
@@ -197,18 +194,17 @@ WAN_SENSOR_TYPES: tuple[SFRBoxSensorEntityDescription[WanInfo], ...] = (
|
||||
"adsl_routed",
|
||||
"ftth_routed",
|
||||
"grps_ppp",
|
||||
"unknown",
|
||||
],
|
||||
translation_key="wan_mode",
|
||||
value_fn=lambda x: x.mode.replace("/", "_"),
|
||||
value_fn=lambda x: _value_to_option(x.mode),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _value_to_option(value: str | None) -> str | None:
|
||||
if value is None:
|
||||
return value
|
||||
return value.lower().replace(" ", "_").replace(".", "_")
|
||||
if value is None or value == "Unknown":
|
||||
return None
|
||||
return value.lower().replace(" ", "_").replace(".", "_").replace("/", "_")
|
||||
|
||||
|
||||
def _get_temperature(value: float | None) -> float | None:
|
||||
|
||||
@@ -56,11 +56,13 @@
|
||||
"dsl_attenuation_up": {
|
||||
"name": "DSL attenuation up"
|
||||
},
|
||||
"dsl_counter": {
|
||||
"name": "DSL counter"
|
||||
"dsl_connect_count": {
|
||||
"name": "DSL connect count",
|
||||
"unit_of_measurement": "connects"
|
||||
},
|
||||
"dsl_crc": {
|
||||
"name": "DSL CRC"
|
||||
"dsl_crc_error_count": {
|
||||
"name": "DSL CRC error count",
|
||||
"unit_of_measurement": "errors"
|
||||
},
|
||||
"dsl_line_status": {
|
||||
"name": "DSL line status",
|
||||
@@ -69,8 +71,7 @@
|
||||
"loss_of_power": "Loss of power",
|
||||
"loss_of_signal": "Loss of signal",
|
||||
"loss_of_signal_quality": "Loss of signal quality",
|
||||
"no_defect": "No defect",
|
||||
"unknown": "Unknown"
|
||||
"no_defect": "No defect"
|
||||
}
|
||||
},
|
||||
"dsl_linemode": {
|
||||
@@ -99,8 +100,7 @@
|
||||
"g_993_started": "G.993 Started",
|
||||
"g_994_training": "G.994 Training",
|
||||
"idle": "[%key:common::state::idle%]",
|
||||
"showtime": "Showtime",
|
||||
"unknown": "Unknown"
|
||||
"showtime": "Showtime"
|
||||
}
|
||||
},
|
||||
"net_infra": {
|
||||
@@ -108,8 +108,7 @@
|
||||
"state": {
|
||||
"adsl": "ADSL",
|
||||
"ftth": "FTTH",
|
||||
"gprs": "GPRS",
|
||||
"unknown": "Unknown"
|
||||
"gprs": "GPRS"
|
||||
}
|
||||
},
|
||||
"wan_mode": {
|
||||
@@ -118,8 +117,7 @@
|
||||
"adsl_ppp": "ADSL (PPP)",
|
||||
"adsl_routed": "ADSL (Routed)",
|
||||
"ftth_routed": "FTTH (Routed)",
|
||||
"grps_ppp": "GPRS (PPP)",
|
||||
"unknown": "Unknown"
|
||||
"grps_ppp": "GPRS (PPP)"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,6 +27,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/smartthings",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pysmartthings"],
|
||||
"quality_scale": "bronze",
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@rohankapoorcom", "@engrbm87"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/speedtestdotnet",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["speedtest-cli==2.1.3"]
|
||||
}
|
||||
|
||||
@@ -38,6 +38,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/switchbot",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["switchbot"],
|
||||
"quality_scale": "gold",
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["application_credentials", "http"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/tesla_fleet",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["tesla-fleet-api"],
|
||||
"requirements": ["tesla-fleet-api==1.2.5"]
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@Bre77"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/teslemetry",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["tesla-fleet-api"],
|
||||
"requirements": ["tesla-fleet-api==1.2.5", "teslemetry-stream==0.7.10"]
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"codeowners": ["@Bre77"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/tessie",
|
||||
"integration_type": "device",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["tessie", "tesla-fleet-api"],
|
||||
"requirements": ["tessie-api==0.1.1", "tesla-fleet-api==1.2.5"]
|
||||
|
||||
@@ -45,8 +45,8 @@
|
||||
"title": "Text",
|
||||
"triggers": {
|
||||
"changed": {
|
||||
"description": "Triggers when the text changes.",
|
||||
"name": "When the text changes"
|
||||
"description": "Triggers after one or more texts change.",
|
||||
"name": "Text changed"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -298,6 +298,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/tplink",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["kasa"],
|
||||
"quality_scale": "platinum",
|
||||
|
||||
@@ -61,28 +61,25 @@ class TuyaEntity(Entity):
|
||||
) -> None:
|
||||
self.async_write_ha_state()
|
||||
|
||||
def _send_command(self, commands: list[dict[str, Any]]) -> None:
|
||||
"""Send command to the device."""
|
||||
LOGGER.debug("Sending commands for device %s: %s", self.device.id, commands)
|
||||
self.device_manager.send_commands(self.device.id, commands)
|
||||
|
||||
async def _async_send_commands(self, commands: list[dict[str, Any]]) -> None:
|
||||
"""Send a list of commands to the device."""
|
||||
await self.hass.async_add_executor_job(self._send_command, commands)
|
||||
LOGGER.debug("Sending commands for device %s: %s", self.device.id, commands)
|
||||
await self.hass.async_add_executor_job(
|
||||
self.device_manager.send_commands, self.device.id, commands
|
||||
)
|
||||
|
||||
def _read_wrapper(self, dpcode_wrapper: DPCodeWrapper | None) -> Any | None:
|
||||
def _read_wrapper(self, wrapper: DPCodeWrapper | None) -> Any | None:
|
||||
"""Read the wrapper device status."""
|
||||
if dpcode_wrapper is None:
|
||||
if wrapper is None:
|
||||
return None
|
||||
return dpcode_wrapper.read_device_status(self.device)
|
||||
return wrapper.read_device_status(self.device)
|
||||
|
||||
async def _async_send_wrapper_updates(
|
||||
self, dpcode_wrapper: DPCodeWrapper | None, value: Any
|
||||
self, wrapper: DPCodeWrapper | None, value: Any
|
||||
) -> None:
|
||||
"""Send command to the device."""
|
||||
if dpcode_wrapper is None:
|
||||
if wrapper is None:
|
||||
return
|
||||
await self.hass.async_add_executor_job(
|
||||
self._send_command,
|
||||
dpcode_wrapper.get_update_commands(self.device, value),
|
||||
await self._async_send_commands(
|
||||
wrapper.get_update_commands(self.device, value),
|
||||
)
|
||||
|
||||
@@ -718,7 +718,7 @@ class TuyaLightEntity(TuyaEntity, LightEntity):
|
||||
"""Return true if light is on."""
|
||||
return self._read_wrapper(self._switch_wrapper)
|
||||
|
||||
def turn_on(self, **kwargs: Any) -> None:
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn on or control the light."""
|
||||
commands = self._switch_wrapper.get_update_commands(self.device, True)
|
||||
|
||||
@@ -778,7 +778,7 @@ class TuyaLightEntity(TuyaEntity, LightEntity):
|
||||
self._brightness_wrapper.get_update_commands(self.device, brightness),
|
||||
)
|
||||
|
||||
self._send_command(commands)
|
||||
await self._async_send_commands(commands)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Instruct the light to turn off."""
|
||||
|
||||
@@ -196,7 +196,7 @@ class TuyaVacuumEntity(TuyaEntity, StateVacuumEntity):
|
||||
"""Set fan speed."""
|
||||
await self._async_send_wrapper_updates(self._fan_speed_wrapper, fan_speed)
|
||||
|
||||
def send_command(
|
||||
async def async_send_command(
|
||||
self,
|
||||
command: str,
|
||||
params: dict[str, Any] | list[Any] | None = None,
|
||||
@@ -207,4 +207,4 @@ class TuyaVacuumEntity(TuyaEntity, StateVacuumEntity):
|
||||
raise ValueError("Params cannot be omitted for Tuya vacuum commands")
|
||||
if not isinstance(params, list):
|
||||
raise TypeError("Params must be a list for Tuya vacuum commands")
|
||||
self._send_command([{"code": command, "value": params[0]}])
|
||||
await self._async_send_commands([{"code": command, "value": params[0]}])
|
||||
|
||||
@@ -113,44 +113,44 @@
|
||||
"title": "Vacuum",
|
||||
"triggers": {
|
||||
"docked": {
|
||||
"description": "Triggers when a vacuum cleaner has docked.",
|
||||
"description": "Triggers after one or more vacuums return to dock.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::vacuum::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::vacuum::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a vacuum cleaner has docked"
|
||||
"name": "Vacuum returned to dock"
|
||||
},
|
||||
"errored": {
|
||||
"description": "Triggers when a vacuum cleaner has errored.",
|
||||
"description": "Triggers after one or more vacuums encounter an error.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::vacuum::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::vacuum::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a vacuum cleaner has errored"
|
||||
"name": "Vacuum encountered an error"
|
||||
},
|
||||
"paused_cleaning": {
|
||||
"description": "Triggers when a vacuum cleaner has paused cleaning.",
|
||||
"description": "Triggers after one or more vacuums pause cleaning.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::vacuum::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::vacuum::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a vacuum cleaner has paused cleaning"
|
||||
"name": "Vacuum cleaner paused cleaning"
|
||||
},
|
||||
"started_cleaning": {
|
||||
"description": "Triggers when a vacuum cleaner has started cleaning.",
|
||||
"description": "Triggers after one or more vacuums start cleaning.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::vacuum::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::vacuum::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a vacuum cleaner has started cleaning"
|
||||
"name": "Vacuum cleaner started cleaning"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,5 +13,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/vesync",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyvesync"],
|
||||
"requirements": ["pyvesync==3.3.2"]
|
||||
"requirements": ["pyvesync==3.3.3"]
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@thecode"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/webostv",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiowebostv"],
|
||||
"quality_scale": "platinum",
|
||||
|
||||
@@ -23,6 +23,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/xiaomi_ble",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["xiaomi-ble==1.2.0"]
|
||||
}
|
||||
|
||||
@@ -271,6 +271,7 @@ EVENT_HOMEASSISTANT_STOP: EventType[NoEventData] = EventType("homeassistant_stop
|
||||
EVENT_HOMEASSISTANT_FINAL_WRITE: EventType[NoEventData] = EventType(
|
||||
"homeassistant_final_write"
|
||||
)
|
||||
EVENT_LABS_UPDATED: Final = "labs_updated"
|
||||
EVENT_LOGBOOK_ENTRY: Final = "logbook_entry"
|
||||
EVENT_LOGGING_CHANGED: Final = "logging_changed"
|
||||
EVENT_SERVICE_REGISTERED: Final = "service_registered"
|
||||
|
||||
@@ -847,7 +847,7 @@
|
||||
},
|
||||
"broadlink": {
|
||||
"name": "Broadlink",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
@@ -1374,13 +1374,13 @@
|
||||
"name": "DLNA",
|
||||
"integrations": {
|
||||
"dlna_dmr": {
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push",
|
||||
"name": "DLNA Digital Media Renderer"
|
||||
},
|
||||
"dlna_dms": {
|
||||
"integration_type": "hub",
|
||||
"integration_type": "service",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling",
|
||||
"name": "DLNA Digital Media Server"
|
||||
@@ -1563,7 +1563,7 @@
|
||||
},
|
||||
"ecowitt": {
|
||||
"name": "Ecowitt",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push"
|
||||
},
|
||||
@@ -2471,7 +2471,7 @@
|
||||
"name": "Google Tasks"
|
||||
},
|
||||
"google_translate": {
|
||||
"integration_type": "hub",
|
||||
"integration_type": "service",
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "Google Translate text-to-speech"
|
||||
@@ -2494,7 +2494,7 @@
|
||||
"name": "Google Wifi"
|
||||
},
|
||||
"google": {
|
||||
"integration_type": "hub",
|
||||
"integration_type": "service",
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_polling",
|
||||
"name": "Google Calendar"
|
||||
@@ -3525,7 +3525,7 @@
|
||||
"name": "LG ThinQ"
|
||||
},
|
||||
"webostv": {
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push",
|
||||
"name": "LG webOS TV"
|
||||
@@ -3903,7 +3903,7 @@
|
||||
},
|
||||
"met": {
|
||||
"name": "Meteorologisk institutt (Met.no)",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "service",
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_polling"
|
||||
},
|
||||
@@ -4242,7 +4242,7 @@
|
||||
},
|
||||
"music_assistant": {
|
||||
"name": "Music Assistant",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "service",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push"
|
||||
},
|
||||
@@ -5055,7 +5055,7 @@
|
||||
},
|
||||
"ping": {
|
||||
"name": "Ping (ICMP)",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "service",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
@@ -5548,7 +5548,7 @@
|
||||
},
|
||||
"rest": {
|
||||
"name": "RESTful",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "service",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
@@ -6354,7 +6354,7 @@
|
||||
},
|
||||
"speedtestdotnet": {
|
||||
"name": "Speedtest.net",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "service",
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_polling"
|
||||
},
|
||||
@@ -6523,7 +6523,7 @@
|
||||
"name": "SwitchBot",
|
||||
"integrations": {
|
||||
"switchbot": {
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push",
|
||||
"name": "SwitchBot Bluetooth"
|
||||
@@ -6755,7 +6755,7 @@
|
||||
},
|
||||
"tessie": {
|
||||
"name": "Tessie",
|
||||
"integration_type": "device",
|
||||
"integration_type": "hub",
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_polling"
|
||||
},
|
||||
@@ -6929,7 +6929,7 @@
|
||||
"name": "TP-Link",
|
||||
"integrations": {
|
||||
"tplink": {
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling",
|
||||
"name": "TP-Link Smart Home"
|
||||
@@ -7629,7 +7629,7 @@
|
||||
"name": "Xiaomi Gateway (Aqara)"
|
||||
},
|
||||
"xiaomi_ble": {
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push",
|
||||
"name": "Xiaomi BLE"
|
||||
|
||||
@@ -198,7 +198,12 @@ def async_subscribe_platform_events(
|
||||
async def _register_condition_platform(
|
||||
hass: HomeAssistant, integration_domain: str, platform: ConditionProtocol
|
||||
) -> None:
|
||||
"""Register a condition platform."""
|
||||
"""Register a condition platform and notify listeners.
|
||||
|
||||
If the condition platform does not provide any conditions, or it is disabled,
|
||||
listeners will not be notified.
|
||||
"""
|
||||
from homeassistant.components import automation # noqa: PLC0415
|
||||
|
||||
new_conditions: set[str] = set()
|
||||
|
||||
@@ -209,6 +214,12 @@ async def _register_condition_platform(
|
||||
)
|
||||
hass.data[CONDITIONS][condition_key] = integration_domain
|
||||
new_conditions.add(condition_key)
|
||||
if not new_conditions:
|
||||
_LOGGER.debug(
|
||||
"Integration %s returned no conditions in async_get_conditions",
|
||||
integration_domain,
|
||||
)
|
||||
return
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"Integration %s does not provide condition support, skipping",
|
||||
@@ -216,6 +227,10 @@ async def _register_condition_platform(
|
||||
)
|
||||
return
|
||||
|
||||
if automation.is_disabled_experimental_condition(hass, integration_domain):
|
||||
_LOGGER.debug("Conditions for integration %s are disabled", integration_domain)
|
||||
return
|
||||
|
||||
# We don't use gather here because gather adds additional overhead
|
||||
# when wrapping each coroutine in a task, and we expect our listeners
|
||||
# to call condition.async_get_all_descriptions which will only yield
|
||||
|
||||
@@ -169,7 +169,12 @@ def async_subscribe_platform_events(
|
||||
async def _register_trigger_platform(
|
||||
hass: HomeAssistant, integration_domain: str, platform: TriggerProtocol
|
||||
) -> None:
|
||||
"""Register a trigger platform."""
|
||||
"""Register a trigger platform and notify listeners.
|
||||
|
||||
If the trigger platform does not provide any triggers, or it is disabled,
|
||||
listeners will not be notified.
|
||||
"""
|
||||
from homeassistant.components import automation # noqa: PLC0415
|
||||
|
||||
new_triggers: set[str] = set()
|
||||
|
||||
@@ -178,6 +183,12 @@ async def _register_trigger_platform(
|
||||
trigger_key = get_absolute_description_key(integration_domain, trigger_key)
|
||||
hass.data[TRIGGERS][trigger_key] = integration_domain
|
||||
new_triggers.add(trigger_key)
|
||||
if not new_triggers:
|
||||
_LOGGER.debug(
|
||||
"Integration %s returned no triggers in async_get_triggers",
|
||||
integration_domain,
|
||||
)
|
||||
return
|
||||
elif hasattr(platform, "async_validate_trigger_config") or hasattr(
|
||||
platform, "TRIGGER_SCHEMA"
|
||||
):
|
||||
@@ -190,6 +201,10 @@ async def _register_trigger_platform(
|
||||
)
|
||||
return
|
||||
|
||||
if automation.is_disabled_experimental_trigger(hass, integration_domain):
|
||||
_LOGGER.debug("Triggers for integration %s are disabled", integration_domain)
|
||||
return
|
||||
|
||||
# We don't use gather here because gather adds additional overhead
|
||||
# when wrapping each coroutine in a task, and we expect our listeners
|
||||
# to call trigger.async_get_all_descriptions which will only yield
|
||||
|
||||
4
requirements_all.txt
generated
4
requirements_all.txt
generated
@@ -277,7 +277,7 @@ aioharmony==0.5.3
|
||||
aiohasupervisor==0.3.3
|
||||
|
||||
# homeassistant.components.home_connect
|
||||
aiohomeconnect==0.23.1
|
||||
aiohomeconnect==0.24.0
|
||||
|
||||
# homeassistant.components.homekit_controller
|
||||
aiohomekit==3.2.20
|
||||
@@ -2633,7 +2633,7 @@ pyvera==0.3.16
|
||||
pyversasense==0.0.6
|
||||
|
||||
# homeassistant.components.vesync
|
||||
pyvesync==3.3.2
|
||||
pyvesync==3.3.3
|
||||
|
||||
# homeassistant.components.vizio
|
||||
pyvizio==0.1.61
|
||||
|
||||
4
requirements_test_all.txt
generated
4
requirements_test_all.txt
generated
@@ -265,7 +265,7 @@ aioharmony==0.5.3
|
||||
aiohasupervisor==0.3.3
|
||||
|
||||
# homeassistant.components.home_connect
|
||||
aiohomeconnect==0.23.1
|
||||
aiohomeconnect==0.24.0
|
||||
|
||||
# homeassistant.components.homekit_controller
|
||||
aiohomekit==3.2.20
|
||||
@@ -2199,7 +2199,7 @@ pyuptimerobot==22.2.0
|
||||
pyvera==0.3.16
|
||||
|
||||
# homeassistant.components.vesync
|
||||
pyvesync==3.3.2
|
||||
pyvesync==3.3.3
|
||||
|
||||
# homeassistant.components.vizio
|
||||
pyvizio==0.1.61
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Go2rtc test configuration."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from pathlib import Path
|
||||
from unittest.mock import AsyncMock, Mock, patch
|
||||
|
||||
from awesomeversion import AwesomeVersion
|
||||
@@ -228,3 +229,15 @@ async def init_test_integration(
|
||||
await hass.async_block_till_done()
|
||||
|
||||
return test_camera
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def server_dir(tmp_path: Path) -> Generator[Path]:
|
||||
"""Fixture to provide a temporary directory for the server."""
|
||||
server_dir = tmp_path / "go2rtc"
|
||||
server_dir.mkdir()
|
||||
with patch(
|
||||
"homeassistant.components.go2rtc.mkdtemp",
|
||||
return_value=str(server_dir),
|
||||
):
|
||||
yield server_dir
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
_CallList([
|
||||
_Call(
|
||||
tuple(
|
||||
b'# This file is managed by Home Assistant\n# Do not edit it manually\n\napp:\n modules: ["api","exec","ffmpeg","http","mjpeg","onvif","rtmp","rtsp","srtp","webrtc","ws"]\n\napi:\n listen: ""\n unix_listen: "/run/go2rtc.sock"\n allow_paths: ["/","/api","/api/frame.jpeg","/api/schemes","/api/streams","/api/webrtc","/api/ws"]\n local_auth: true\n username: d2a0b844f4cdbe773702176c47c9a675eb0c56a0779b8f880cdb3b492ed3b1c1\n password: bc495d266a32e66ba69b9c72546e00101e04fb573f1bd08863fe4ad1aac02949\n\n# ffmpeg needs the exec module\n# Restrict execution to only ffmpeg binary\nexec:\n allow_paths:\n - ffmpeg\n\nrtsp:\n listen: "127.0.0.1:18554"\n\nwebrtc:\n listen: ":18555/tcp"\n ice_servers: []\n',
|
||||
b'# This file is managed by Home Assistant\n# Do not edit it manually\n\napp:\n modules: ["api","exec","ffmpeg","http","mjpeg","onvif","rtmp","rtsp","srtp","webrtc","ws"]\n\napi:\n listen: ""\n unix_listen: "/test/path/go2rtc.sock"\n allow_paths: ["/","/api","/api/frame.jpeg","/api/schemes","/api/streams","/api/webrtc","/api/ws"]\n local_auth: true\n username: d2a0b844f4cdbe773702176c47c9a675eb0c56a0779b8f880cdb3b492ed3b1c1\n password: bc495d266a32e66ba69b9c72546e00101e04fb573f1bd08863fe4ad1aac02949\n\n# ffmpeg needs the exec module\n# Restrict execution to only ffmpeg binary\nexec:\n allow_paths:\n - ffmpeg\n\nrtsp:\n listen: "127.0.0.1:18554"\n\nwebrtc:\n listen: ":18555/tcp"\n ice_servers: []\n',
|
||||
),
|
||||
dict({
|
||||
}),
|
||||
@@ -14,7 +14,7 @@
|
||||
_CallList([
|
||||
_Call(
|
||||
tuple(
|
||||
b'# This file is managed by Home Assistant\n# Do not edit it manually\n\napp:\n modules: ["api","exec","ffmpeg","http","mjpeg","onvif","rtmp","rtsp","srtp","webrtc","ws","debug"]\n\napi:\n listen: ":11984"\n unix_listen: "/run/go2rtc.sock"\n allow_paths: ["/","/api","/api/frame.jpeg","/api/schemes","/api/streams","/api/webrtc","/api/ws","/api/config","/api/log","/api/streams.dot"]\n local_auth: true\n username: user\n password: pass\n\n# ffmpeg needs the exec module\n# Restrict execution to only ffmpeg binary\nexec:\n allow_paths:\n - ffmpeg\n\nrtsp:\n listen: "127.0.0.1:18554"\n\nwebrtc:\n listen: ":18555/tcp"\n ice_servers: []\n',
|
||||
b'# This file is managed by Home Assistant\n# Do not edit it manually\n\napp:\n modules: ["api","exec","ffmpeg","http","mjpeg","onvif","rtmp","rtsp","srtp","webrtc","ws","debug"]\n\napi:\n listen: ":11984"\n unix_listen: "/test/path/go2rtc.sock"\n allow_paths: ["/","/api","/api/frame.jpeg","/api/schemes","/api/streams","/api/webrtc","/api/ws","/api/config","/api/log","/api/streams.dot"]\n local_auth: true\n username: user\n password: pass\n\n# ffmpeg needs the exec module\n# Restrict execution to only ffmpeg binary\nexec:\n allow_paths:\n - ffmpeg\n\nrtsp:\n listen: "127.0.0.1:18554"\n\nwebrtc:\n listen: ":18555/tcp"\n ice_servers: []\n',
|
||||
),
|
||||
dict({
|
||||
}),
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import NamedTuple
|
||||
from unittest.mock import ANY, AsyncMock, Mock, patch
|
||||
|
||||
@@ -39,9 +40,9 @@ from homeassistant.components.go2rtc.const import (
|
||||
CONF_DEBUG_UI,
|
||||
DEBUG_UI_URL_MESSAGE,
|
||||
DOMAIN,
|
||||
HA_MANAGED_UNIX_SOCKET,
|
||||
RECOMMENDED_VERSION,
|
||||
)
|
||||
from homeassistant.components.go2rtc.util import get_go2rtc_unix_socket_path
|
||||
from homeassistant.components.stream import Orientation
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME
|
||||
@@ -239,6 +240,7 @@ async def test_setup_go_binary(
|
||||
server_stop: Mock,
|
||||
init_test_integration: MockCamera,
|
||||
has_go2rtc_entry: bool,
|
||||
server_dir: Path,
|
||||
config: ConfigType,
|
||||
ui_enabled: bool,
|
||||
expected_username: str,
|
||||
@@ -255,6 +257,7 @@ async def test_setup_go_binary(
|
||||
enable_ui=ui_enabled,
|
||||
username=expected_username,
|
||||
password=expected_password,
|
||||
working_dir=str(server_dir),
|
||||
)
|
||||
call_kwargs = server.call_args[1]
|
||||
assert call_kwargs["username"] == expected_username
|
||||
@@ -1034,7 +1037,7 @@ async def test_stream_orientation_with_generic_camera(
|
||||
"rest_client",
|
||||
"server",
|
||||
)
|
||||
async def test_unix_socket_connection(hass: HomeAssistant) -> None:
|
||||
async def test_unix_socket_connection(hass: HomeAssistant, server_dir: Path) -> None:
|
||||
"""Test Unix socket is used for HA-managed go2rtc instances."""
|
||||
config = {DOMAIN: {}}
|
||||
|
||||
@@ -1056,7 +1059,7 @@ async def test_unix_socket_connection(hass: HomeAssistant) -> None:
|
||||
assert "connector" in call_kwargs
|
||||
connector = call_kwargs["connector"]
|
||||
assert isinstance(connector, UnixConnector)
|
||||
assert connector.path == HA_MANAGED_UNIX_SOCKET
|
||||
assert connector.path == get_go2rtc_unix_socket_path(server_dir)
|
||||
# Auth should be auto-generated when credentials are not explicitly configured
|
||||
assert "auth" in call_kwargs
|
||||
auth = call_kwargs["auth"]
|
||||
@@ -1120,7 +1123,7 @@ async def test_basic_auth_with_custom_url(hass: HomeAssistant) -> None:
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("rest_client")
|
||||
async def test_basic_auth_with_debug_ui(hass: HomeAssistant) -> None:
|
||||
async def test_basic_auth_with_debug_ui(hass: HomeAssistant, server_dir: Path) -> None:
|
||||
"""Test BasicAuth session is created when username and password are provided with debug_ui."""
|
||||
config = {
|
||||
DOMAIN: {
|
||||
@@ -1158,7 +1161,7 @@ async def test_basic_auth_with_debug_ui(hass: HomeAssistant) -> None:
|
||||
assert "connector" in call_kwargs
|
||||
connector = call_kwargs["connector"]
|
||||
assert isinstance(connector, UnixConnector)
|
||||
assert connector.path == HA_MANAGED_UNIX_SOCKET
|
||||
assert connector.path == get_go2rtc_unix_socket_path(server_dir)
|
||||
assert "auth" in call_kwargs
|
||||
auth = call_kwargs["auth"]
|
||||
assert isinstance(auth, BasicAuth)
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
import asyncio
|
||||
from collections.abc import Generator
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import subprocess
|
||||
from unittest.mock import AsyncMock, MagicMock, Mock, patch
|
||||
|
||||
@@ -47,16 +48,22 @@ def server(
|
||||
enable_ui: bool,
|
||||
username: str,
|
||||
password: str,
|
||||
) -> Server:
|
||||
server_dir: Path,
|
||||
) -> Generator[Server]:
|
||||
"""Fixture to initialize the Server."""
|
||||
return Server(
|
||||
hass,
|
||||
binary=TEST_BINARY,
|
||||
session=mock_session,
|
||||
enable_ui=enable_ui,
|
||||
username=username,
|
||||
password=password,
|
||||
)
|
||||
with patch(
|
||||
"homeassistant.components.go2rtc.server.get_go2rtc_unix_socket_path",
|
||||
return_value="/test/path/go2rtc.sock",
|
||||
):
|
||||
yield Server(
|
||||
hass,
|
||||
binary=TEST_BINARY,
|
||||
session=mock_session,
|
||||
enable_ui=enable_ui,
|
||||
username=username,
|
||||
password=password,
|
||||
working_dir=str(server_dir),
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
||||
@@ -4271,6 +4271,7 @@
|
||||
'drain',
|
||||
'drying',
|
||||
'finished',
|
||||
'flex_load_active',
|
||||
'freshen_up_and_moisten',
|
||||
'hygiene',
|
||||
'main_wash',
|
||||
@@ -4327,6 +4328,7 @@
|
||||
'drain',
|
||||
'drying',
|
||||
'finished',
|
||||
'flex_load_active',
|
||||
'freshen_up_and_moisten',
|
||||
'hygiene',
|
||||
'main_wash',
|
||||
@@ -6617,6 +6619,7 @@
|
||||
'drain',
|
||||
'drying',
|
||||
'finished',
|
||||
'flex_load_active',
|
||||
'freshen_up_and_moisten',
|
||||
'hygiene',
|
||||
'main_wash',
|
||||
@@ -6673,6 +6676,7 @@
|
||||
'drain',
|
||||
'drying',
|
||||
'finished',
|
||||
'flex_load_active',
|
||||
'freshen_up_and_moisten',
|
||||
'hygiene',
|
||||
'main_wash',
|
||||
|
||||
@@ -386,8 +386,8 @@ async def test_user_connection_works(
|
||||
"port": 1883,
|
||||
}
|
||||
# Check we have the latest Config Entry version
|
||||
assert result["result"].version == 1
|
||||
assert result["result"].minor_version == 2
|
||||
assert result["result"].version == 2
|
||||
assert result["result"].minor_version == 1
|
||||
# Check we tried the connection
|
||||
assert len(mock_try_connection.mock_calls) == 1
|
||||
# Check config entry got setup
|
||||
@@ -2590,7 +2590,7 @@ async def test_reconfigure_no_changed_password(
|
||||
[
|
||||
(1, 1, MOCK_ENTRY_DATA | MOCK_ENTRY_OPTIONS, {}, 1, 2),
|
||||
(1, 2, MOCK_ENTRY_DATA, MOCK_ENTRY_OPTIONS, 1, 2),
|
||||
(1, 3, MOCK_ENTRY_DATA, MOCK_ENTRY_OPTIONS, 1, 3),
|
||||
(2, 1, MOCK_ENTRY_DATA, MOCK_ENTRY_OPTIONS, 2, 1),
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures("mock_reload_after_entry_update")
|
||||
@@ -2631,11 +2631,10 @@ async def test_migrate_config_entry(
|
||||
"minor_version",
|
||||
"data",
|
||||
"options",
|
||||
"expected_version",
|
||||
"expected_minor_version",
|
||||
),
|
||||
[
|
||||
(2, 1, MOCK_ENTRY_DATA, MOCK_ENTRY_OPTIONS, 2, 1),
|
||||
(2, 2, MOCK_ENTRY_DATA, MOCK_ENTRY_OPTIONS),
|
||||
(3, 1, MOCK_ENTRY_DATA, MOCK_ENTRY_OPTIONS),
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures("mock_reload_after_entry_update")
|
||||
@@ -2646,8 +2645,6 @@ async def test_migrate_of_incompatible_config_entry(
|
||||
minor_version: int,
|
||||
data: dict[str, Any],
|
||||
options: dict[str, Any],
|
||||
expected_version: int,
|
||||
expected_minor_version: int,
|
||||
) -> None:
|
||||
"""Test migrating a config entry."""
|
||||
config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0]
|
||||
@@ -2660,8 +2657,6 @@ async def test_migrate_of_incompatible_config_entry(
|
||||
minor_version=minor_version,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert config_entry.version == expected_version
|
||||
assert config_entry.minor_version == expected_minor_version
|
||||
|
||||
# Try to start MQTT with incompatible config entry
|
||||
with pytest.raises(AssertionError):
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -19,7 +19,13 @@ from homeassistant.components.nederlandse_spoorwegen.const import (
|
||||
)
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
||||
from homeassistant.config_entries import ConfigSubentryDataWithId
|
||||
from homeassistant.const import CONF_API_KEY, CONF_NAME, CONF_PLATFORM, Platform
|
||||
from homeassistant.const import (
|
||||
CONF_API_KEY,
|
||||
CONF_NAME,
|
||||
CONF_PLATFORM,
|
||||
STATE_UNKNOWN,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
||||
import homeassistant.helpers.entity_registry as er
|
||||
import homeassistant.helpers.issue_registry as ir
|
||||
@@ -77,6 +83,7 @@ async def test_config_import(
|
||||
|
||||
|
||||
@pytest.mark.freeze_time("2025-09-15 14:30:00+00:00")
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_sensor(
|
||||
hass: HomeAssistant,
|
||||
mock_nsapi: AsyncMock,
|
||||
@@ -91,6 +98,7 @@ async def test_sensor(
|
||||
|
||||
|
||||
@pytest.mark.freeze_time("2025-09-15 14:30:00+00:00")
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_single_trip_sensor(
|
||||
hass: HomeAssistant,
|
||||
mock_single_trip_nsapi: AsyncMock,
|
||||
@@ -105,17 +113,22 @@ async def test_single_trip_sensor(
|
||||
|
||||
|
||||
@pytest.mark.freeze_time("2025-09-15 14:30:00+00:00")
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_no_trips_sensor(
|
||||
hass: HomeAssistant,
|
||||
mock_no_trips_nsapi: AsyncMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
snapshot: SnapshotAssertion,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Test sensor initialization."""
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)
|
||||
for entity_entry in er.async_entries_for_config_entry(
|
||||
entity_registry, mock_config_entry.entry_id
|
||||
):
|
||||
state = hass.states.get(entity_entry.entity_id)
|
||||
assert state is not None
|
||||
assert state.state == STATE_UNKNOWN
|
||||
|
||||
|
||||
async def test_sensor_with_api_connection_error(
|
||||
@@ -143,6 +156,7 @@ async def test_sensor_with_api_connection_error(
|
||||
("08:30:45", "Early commute", "Time with seconds - should truncate seconds"),
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_sensor_with_custom_time_parsing(
|
||||
hass: HomeAssistant,
|
||||
mock_nsapi: AsyncMock,
|
||||
@@ -178,17 +192,15 @@ async def test_sensor_with_custom_time_parsing(
|
||||
await setup_integration(hass, config_entry)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Should create one sensor for the route with time parsing
|
||||
# Should create 13 sensors for the route with time parsing
|
||||
sensor_states = hass.states.async_all("sensor")
|
||||
assert len(sensor_states) == 1
|
||||
assert len(sensor_states) == 13
|
||||
|
||||
# Verify sensor was created successfully with time parsing
|
||||
state = sensor_states[0]
|
||||
assert state is not None
|
||||
assert state.state != "unavailable"
|
||||
assert state.attributes.get("attribution") == "Data provided by NS"
|
||||
assert state.attributes.get("device_class") == "timestamp"
|
||||
assert state.attributes.get("icon") == "mdi:train"
|
||||
|
||||
# The sensor should have a friendly name based on the route name
|
||||
friendly_name = state.attributes.get("friendly_name", "").lower()
|
||||
|
||||
@@ -64,6 +64,8 @@ async def test_setup_component_with_webhook(
|
||||
|
||||
camera_entity_indoor = "camera.hall"
|
||||
camera_entity_outdoor = "camera.front"
|
||||
|
||||
# Test indoor camera events
|
||||
assert hass.states.get(camera_entity_indoor).state == "streaming"
|
||||
response = {
|
||||
"event_type": "off",
|
||||
@@ -87,6 +89,31 @@ async def test_setup_component_with_webhook(
|
||||
|
||||
assert hass.states.get(camera_entity_indoor).state == "streaming"
|
||||
|
||||
# Test outdoor camera events - not yet supported
|
||||
assert hass.states.get(camera_entity_outdoor).state == "streaming"
|
||||
response = {
|
||||
"event_type": "off",
|
||||
"device_id": "12:34:56:10:b9:0e",
|
||||
"camera_id": "12:34:56:10:b9:0e",
|
||||
"event_id": "601dce1560abca1ebad9b723",
|
||||
"push_type": "NOCamera-off",
|
||||
}
|
||||
await simulate_webhook(hass, webhook_id, response)
|
||||
|
||||
# The NOCamera-off push_type is not yet supported (assert should be "idle" when supported)
|
||||
assert hass.states.get(camera_entity_outdoor).state == "streaming"
|
||||
|
||||
response = {
|
||||
"event_type": "on",
|
||||
"device_id": "12:34:56:10:b9:0e",
|
||||
"camera_id": "12:34:56:10:b9:0e",
|
||||
"event_id": "646227f1dc0dfa000ec5f350",
|
||||
"push_type": "NOCamera-on",
|
||||
}
|
||||
await simulate_webhook(hass, webhook_id, response)
|
||||
|
||||
assert hass.states.get(camera_entity_outdoor).state == "streaming"
|
||||
|
||||
response = {
|
||||
"event_type": "light_mode",
|
||||
"device_id": "12:34:56:10:b9:0e",
|
||||
|
||||
@@ -105,7 +105,7 @@
|
||||
'state': '20.8',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensors[sensor.sfr_box_dsl_counter-entry]
|
||||
# name: test_sensors[sensor.sfr_box_dsl_connect_count-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
@@ -118,7 +118,7 @@
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.sfr_box_dsl_counter',
|
||||
'entity_id': 'sensor.sfr_box_dsl_connect_count',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
@@ -130,30 +130,31 @@
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'DSL counter',
|
||||
'original_name': 'DSL connect count',
|
||||
'platform': 'sfr_box',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'dsl_counter',
|
||||
'translation_key': 'dsl_connect_count',
|
||||
'unique_id': 'e4:5d:51:00:11:22_dsl_counter',
|
||||
'unit_of_measurement': None,
|
||||
'unit_of_measurement': 'connects',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensors[sensor.sfr_box_dsl_counter-state]
|
||||
# name: test_sensors[sensor.sfr_box_dsl_connect_count-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'SFR Box DSL counter',
|
||||
'friendly_name': 'SFR Box DSL connect count',
|
||||
'unit_of_measurement': 'connects',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.sfr_box_dsl_counter',
|
||||
'entity_id': 'sensor.sfr_box_dsl_connect_count',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '16',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensors[sensor.sfr_box_dsl_crc-entry]
|
||||
# name: test_sensors[sensor.sfr_box_dsl_crc_error_count-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
@@ -166,7 +167,7 @@
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.sfr_box_dsl_crc',
|
||||
'entity_id': 'sensor.sfr_box_dsl_crc_error_count',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
@@ -178,23 +179,24 @@
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'DSL CRC',
|
||||
'original_name': 'DSL CRC error count',
|
||||
'platform': 'sfr_box',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'dsl_crc',
|
||||
'translation_key': 'dsl_crc_error_count',
|
||||
'unique_id': 'e4:5d:51:00:11:22_dsl_crc',
|
||||
'unit_of_measurement': None,
|
||||
'unit_of_measurement': 'errors',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensors[sensor.sfr_box_dsl_crc-state]
|
||||
# name: test_sensors[sensor.sfr_box_dsl_crc_error_count-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'SFR Box DSL CRC',
|
||||
'friendly_name': 'SFR Box DSL CRC error count',
|
||||
'unit_of_measurement': 'errors',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.sfr_box_dsl_crc',
|
||||
'entity_id': 'sensor.sfr_box_dsl_crc_error_count',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
@@ -261,7 +263,6 @@
|
||||
'loss_of_signal',
|
||||
'loss_of_power',
|
||||
'loss_of_signal_quality',
|
||||
'unknown',
|
||||
]),
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
@@ -304,7 +305,6 @@
|
||||
'loss_of_signal',
|
||||
'loss_of_power',
|
||||
'loss_of_signal_quality',
|
||||
'unknown',
|
||||
]),
|
||||
}),
|
||||
'context': <ANY>,
|
||||
@@ -549,7 +549,6 @@
|
||||
'g_993_channel_analysis',
|
||||
'g_993_message_exchange',
|
||||
'showtime',
|
||||
'unknown',
|
||||
]),
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
@@ -596,7 +595,6 @@
|
||||
'g_993_channel_analysis',
|
||||
'g_993_message_exchange',
|
||||
'showtime',
|
||||
'unknown',
|
||||
]),
|
||||
}),
|
||||
'context': <ANY>,
|
||||
@@ -617,7 +615,6 @@
|
||||
'adsl',
|
||||
'ftth',
|
||||
'gprs',
|
||||
'unknown',
|
||||
]),
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
@@ -658,7 +655,6 @@
|
||||
'adsl',
|
||||
'ftth',
|
||||
'gprs',
|
||||
'unknown',
|
||||
]),
|
||||
}),
|
||||
'context': <ANY>,
|
||||
@@ -792,7 +788,6 @@
|
||||
'adsl_routed',
|
||||
'ftth_routed',
|
||||
'grps_ppp',
|
||||
'unknown',
|
||||
]),
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
@@ -834,7 +829,6 @@
|
||||
'adsl_routed',
|
||||
'ftth_routed',
|
||||
'grps_ppp',
|
||||
'unknown',
|
||||
]),
|
||||
}),
|
||||
'context': <ANY>,
|
||||
|
||||
@@ -2878,3 +2878,122 @@ async def test_subscribe_conditions(
|
||||
|
||||
assert condition_events == [{"sun"}]
|
||||
assert "Error while notifying condition platform listener" in caplog.text
|
||||
|
||||
|
||||
@patch("annotatedyaml.loader.load_yaml")
|
||||
@patch.object(Integration, "has_conditions", return_value=True)
|
||||
@pytest.mark.parametrize(
|
||||
("new_triggers_conditions_enabled", "expected_events"),
|
||||
[
|
||||
(True, [{"light.is_off", "light.is_on"}]),
|
||||
(False, []),
|
||||
],
|
||||
)
|
||||
async def test_subscribe_conditions_experimental_conditions(
|
||||
mock_has_conditions: Mock,
|
||||
mock_load_yaml: Mock,
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
new_triggers_conditions_enabled: bool,
|
||||
expected_events: list[set[str]],
|
||||
) -> None:
|
||||
"""Test condition.async_subscribe_platform_events doesn't send events for disabled conditions."""
|
||||
# Return empty conditions.yaml for light integration, the actual condition
|
||||
# descriptions are irrelevant for this test
|
||||
light_condition_descriptions = ""
|
||||
|
||||
def _load_yaml(fname, secrets=None):
|
||||
if fname.endswith("light/conditions.yaml"):
|
||||
condition_descriptions = light_condition_descriptions
|
||||
else:
|
||||
raise FileNotFoundError
|
||||
with io.StringIO(condition_descriptions) as file:
|
||||
return parse_yaml(file)
|
||||
|
||||
mock_load_yaml.side_effect = _load_yaml
|
||||
|
||||
condition_events = []
|
||||
|
||||
async def good_subscriber(new_conditions: set[str]):
|
||||
"""Simulate a working subscriber."""
|
||||
condition_events.append(new_conditions)
|
||||
|
||||
ws_client = await hass_ws_client(hass)
|
||||
|
||||
assert await async_setup_component(hass, "labs", {})
|
||||
await ws_client.send_json_auto_id(
|
||||
{
|
||||
"type": "labs/update",
|
||||
"domain": "automation",
|
||||
"preview_feature": "new_triggers_conditions",
|
||||
"enabled": new_triggers_conditions_enabled,
|
||||
}
|
||||
)
|
||||
|
||||
msg = await ws_client.receive_json()
|
||||
assert msg["success"]
|
||||
await hass.async_block_till_done()
|
||||
|
||||
condition.async_subscribe_platform_events(hass, good_subscriber)
|
||||
|
||||
assert await async_setup_component(hass, "light", {})
|
||||
await hass.async_block_till_done()
|
||||
assert condition_events == expected_events
|
||||
|
||||
|
||||
@patch("annotatedyaml.loader.load_yaml")
|
||||
@patch.object(Integration, "has_conditions", return_value=True)
|
||||
@patch(
|
||||
"homeassistant.components.light.condition.async_get_conditions",
|
||||
new=AsyncMock(return_value={}),
|
||||
)
|
||||
async def test_subscribe_conditions_no_conditions(
|
||||
mock_has_conditions: Mock,
|
||||
mock_load_yaml: Mock,
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test condition.async_subscribe_platform_events doesn't send events for platforms without conditions."""
|
||||
# Return empty conditions.yaml for light integration, the actual condition
|
||||
# descriptions are irrelevant for this test
|
||||
light_condition_descriptions = ""
|
||||
|
||||
def _load_yaml(fname, secrets=None):
|
||||
if fname.endswith("light/conditions.yaml"):
|
||||
condition_descriptions = light_condition_descriptions
|
||||
else:
|
||||
raise FileNotFoundError
|
||||
with io.StringIO(condition_descriptions) as file:
|
||||
return parse_yaml(file)
|
||||
|
||||
mock_load_yaml.side_effect = _load_yaml
|
||||
|
||||
condition_events = []
|
||||
|
||||
async def good_subscriber(new_conditions: set[str]):
|
||||
"""Simulate a working subscriber."""
|
||||
condition_events.append(new_conditions)
|
||||
|
||||
ws_client = await hass_ws_client(hass)
|
||||
|
||||
assert await async_setup_component(hass, "labs", {})
|
||||
await ws_client.send_json_auto_id(
|
||||
{
|
||||
"type": "labs/update",
|
||||
"domain": "automation",
|
||||
"preview_feature": "new_triggers_conditions",
|
||||
"enabled": True,
|
||||
}
|
||||
)
|
||||
|
||||
msg = await ws_client.receive_json()
|
||||
assert msg["success"]
|
||||
await hass.async_block_till_done()
|
||||
|
||||
condition.async_subscribe_platform_events(hass, good_subscriber)
|
||||
|
||||
assert await async_setup_component(hass, "light", {})
|
||||
await hass.async_block_till_done()
|
||||
assert condition_events == []
|
||||
|
||||
@@ -1006,3 +1006,122 @@ async def test_subscribe_triggers(
|
||||
assert await async_setup_component(hass, "sun", {})
|
||||
assert trigger_events == [{"sun"}]
|
||||
assert "Error while notifying trigger platform listener" in caplog.text
|
||||
|
||||
|
||||
@patch("annotatedyaml.loader.load_yaml")
|
||||
@patch.object(Integration, "has_triggers", return_value=True)
|
||||
@pytest.mark.parametrize(
|
||||
("new_triggers_conditions_enabled", "expected_events"),
|
||||
[
|
||||
(True, [{"light.turned_off", "light.turned_on"}]),
|
||||
(False, []),
|
||||
],
|
||||
)
|
||||
async def test_subscribe_triggers_experimental_triggers(
|
||||
mock_has_triggers: Mock,
|
||||
mock_load_yaml: Mock,
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
new_triggers_conditions_enabled: bool,
|
||||
expected_events: list[set[str]],
|
||||
) -> None:
|
||||
"""Test trigger.async_subscribe_platform_events doesn't send events for disabled triggers."""
|
||||
# Return empty triggers.yaml for light integration, the actual trigger descriptions
|
||||
# are irrelevant for this test
|
||||
light_trigger_descriptions = ""
|
||||
|
||||
def _load_yaml(fname, secrets=None):
|
||||
if fname.endswith("light/triggers.yaml"):
|
||||
trigger_descriptions = light_trigger_descriptions
|
||||
else:
|
||||
raise FileNotFoundError
|
||||
with io.StringIO(trigger_descriptions) as file:
|
||||
return parse_yaml(file)
|
||||
|
||||
mock_load_yaml.side_effect = _load_yaml
|
||||
|
||||
trigger_events = []
|
||||
|
||||
async def good_subscriber(new_triggers: set[str]):
|
||||
"""Simulate a working subscriber."""
|
||||
trigger_events.append(new_triggers)
|
||||
|
||||
ws_client = await hass_ws_client(hass)
|
||||
|
||||
assert await async_setup_component(hass, "labs", {})
|
||||
await ws_client.send_json_auto_id(
|
||||
{
|
||||
"type": "labs/update",
|
||||
"domain": "automation",
|
||||
"preview_feature": "new_triggers_conditions",
|
||||
"enabled": new_triggers_conditions_enabled,
|
||||
}
|
||||
)
|
||||
|
||||
msg = await ws_client.receive_json()
|
||||
assert msg["success"]
|
||||
await hass.async_block_till_done()
|
||||
|
||||
trigger.async_subscribe_platform_events(hass, good_subscriber)
|
||||
|
||||
assert await async_setup_component(hass, "light", {})
|
||||
await hass.async_block_till_done()
|
||||
assert trigger_events == expected_events
|
||||
|
||||
|
||||
@patch("annotatedyaml.loader.load_yaml")
|
||||
@patch.object(Integration, "has_triggers", return_value=True)
|
||||
@patch(
|
||||
"homeassistant.components.light.trigger.async_get_triggers",
|
||||
new=AsyncMock(return_value={}),
|
||||
)
|
||||
async def test_subscribe_triggers_no_triggers(
|
||||
mock_has_triggers: Mock,
|
||||
mock_load_yaml: Mock,
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test trigger.async_subscribe_platform_events doesn't send events for platforms without triggers."""
|
||||
# Return empty triggers.yaml for light integration, the actual trigger descriptions
|
||||
# are irrelevant for this test
|
||||
light_trigger_descriptions = ""
|
||||
|
||||
def _load_yaml(fname, secrets=None):
|
||||
if fname.endswith("light/triggers.yaml"):
|
||||
trigger_descriptions = light_trigger_descriptions
|
||||
else:
|
||||
raise FileNotFoundError
|
||||
with io.StringIO(trigger_descriptions) as file:
|
||||
return parse_yaml(file)
|
||||
|
||||
mock_load_yaml.side_effect = _load_yaml
|
||||
|
||||
trigger_events = []
|
||||
|
||||
async def good_subscriber(new_triggers: set[str]):
|
||||
"""Simulate a working subscriber."""
|
||||
trigger_events.append(new_triggers)
|
||||
|
||||
ws_client = await hass_ws_client(hass)
|
||||
|
||||
assert await async_setup_component(hass, "labs", {})
|
||||
await ws_client.send_json_auto_id(
|
||||
{
|
||||
"type": "labs/update",
|
||||
"domain": "automation",
|
||||
"preview_feature": "new_triggers_conditions",
|
||||
"enabled": True,
|
||||
}
|
||||
)
|
||||
|
||||
msg = await ws_client.receive_json()
|
||||
assert msg["success"]
|
||||
await hass.async_block_till_done()
|
||||
|
||||
trigger.async_subscribe_platform_events(hass, good_subscriber)
|
||||
|
||||
assert await async_setup_component(hass, "light", {})
|
||||
await hass.async_block_till_done()
|
||||
assert trigger_events == []
|
||||
|
||||
Reference in New Issue
Block a user