mirror of
https://github.com/home-assistant/core.git
synced 2025-12-04 06:58:33 +00:00
Compare commits
22 Commits
epenet-202
...
knx-data-s
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ea3c9e2520 | ||
|
|
c0863ca585 | ||
|
|
9d53d37cbf | ||
|
|
823f320425 | ||
|
|
b5a8516bd6 | ||
|
|
f05cb6b2c7 | ||
|
|
1a60c46d67 | ||
|
|
62fba5ca20 | ||
|
|
b54cde795c | ||
|
|
0f456373bf | ||
|
|
a5042027b8 | ||
|
|
b15b5ba95c | ||
|
|
cd6e72798e | ||
|
|
739157e59f | ||
|
|
267aa1af42 | ||
|
|
7328b61a69 | ||
|
|
203f2fb364 | ||
|
|
b956c17ce4 | ||
|
|
5163dc0567 | ||
|
|
31a0478717 | ||
|
|
24da3f0db8 | ||
|
|
786922fc5d |
12
.github/workflows/builder.yml
vendored
12
.github/workflows/builder.yml
vendored
@@ -30,7 +30,7 @@ jobs:
|
||||
architectures: ${{ env.ARCHITECTURES }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
@@ -96,7 +96,7 @@ jobs:
|
||||
os: ubuntu-24.04-arm
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
@@ -273,7 +273,7 @@ jobs:
|
||||
- green
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Set build additional args
|
||||
run: |
|
||||
@@ -311,7 +311,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Initialize git
|
||||
uses: home-assistant/actions/helpers/git-init@master
|
||||
@@ -464,7 +464,7 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
@@ -509,7 +509,7 @@ jobs:
|
||||
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -99,7 +99,7 @@ jobs:
|
||||
steps:
|
||||
- &checkout
|
||||
name: Check out code from GitHub
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- name: Generate partial Python venv restore key
|
||||
id: generate_python_cache_key
|
||||
run: |
|
||||
|
||||
2
.github/workflows/codeql.yml
vendored
2
.github/workflows/codeql.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@fe4161a26a8629af62121b670040955b330f9af2 # v4.31.6
|
||||
|
||||
6
.github/workflows/stale.yml
vendored
6
.github/workflows/stale.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
# - No PRs marked as no-stale
|
||||
# - No issues (-1)
|
||||
- name: 60 days stale PRs policy
|
||||
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||
uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 60
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
# - No issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: 90 days stale issues
|
||||
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||
uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
days-before-stale: 90
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
# - No Issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: Needs more information stale issues policy
|
||||
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||
uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
only-labels: "needs-more-information"
|
||||
|
||||
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
|
||||
2
.github/workflows/wheels.yml
vendored
2
.github/workflows/wheels.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
steps:
|
||||
- &checkout
|
||||
name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
|
||||
@@ -8,6 +8,8 @@
|
||||
"integration_type": "system",
|
||||
"preview_features": {
|
||||
"new_triggers_conditions": {
|
||||
"feedback_url": "https://forms.gle/fWFZqf5MzuwWTsCH8",
|
||||
"learn_more_url": "https://www.home-assistant.io/blog/2025/12/03/release-202512/#purpose-specific-triggers-and-conditions",
|
||||
"report_issue_url": "https://github.com/home-assistant/core/issues/new?template=bug_report.yml&integration_link=https://www.home-assistant.io/integrations/automation&integration_name=Automation"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -407,8 +407,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
return [
|
||||
RTCIceServer(
|
||||
urls=[
|
||||
"stun:stun.home-assistant.io:80",
|
||||
"stun:stun.home-assistant.io:3478",
|
||||
"stun:stun.home-assistant.io:80",
|
||||
]
|
||||
),
|
||||
]
|
||||
|
||||
@@ -561,7 +561,7 @@ class BaseCloudLLMEntity(Entity):
|
||||
"schema": _format_structured_output(
|
||||
structure, chat_log.llm_api
|
||||
),
|
||||
"strict": True,
|
||||
"strict": False,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.6.2"],
|
||||
"requirements": ["hass-nabucasa==1.7.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -17,7 +17,7 @@ DEFAULT_TTS_MODEL = "eleven_multilingual_v2"
|
||||
DEFAULT_STABILITY = 0.5
|
||||
DEFAULT_SIMILARITY = 0.75
|
||||
DEFAULT_STT_AUTO_LANGUAGE = False
|
||||
DEFAULT_STT_MODEL = "scribe_v1"
|
||||
DEFAULT_STT_MODEL = "scribe_v2"
|
||||
DEFAULT_STYLE = 0
|
||||
DEFAULT_USE_SPEAKER_BOOST = True
|
||||
|
||||
@@ -129,4 +129,5 @@ STT_LANGUAGES = [
|
||||
STT_MODELS = {
|
||||
"scribe_v1": "Scribe v1",
|
||||
"scribe_v1_experimental": "Scribe v1 Experimental",
|
||||
"scribe_v2": "Scribe v2 Realtime",
|
||||
}
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["iometer==0.2.0"],
|
||||
"requirements": ["iometer==0.3.0"],
|
||||
"zeroconf": ["_iometer._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -94,6 +94,8 @@ SERVICE_KNX_EVENT_REGISTER: Final = "event_register"
|
||||
SERVICE_KNX_EXPOSURE_REGISTER: Final = "exposure_register"
|
||||
SERVICE_KNX_READ: Final = "read"
|
||||
|
||||
REPAIR_ISSUE_DATA_SECURE_GROUP_KEY: Final = "data_secure_group_key_issue"
|
||||
|
||||
|
||||
class KNXConfigEntryData(TypedDict, total=False):
|
||||
"""Config entry for the KNX integration."""
|
||||
|
||||
@@ -77,6 +77,11 @@ class _KnxEntityBase(Entity):
|
||||
"""Store register state change callback and start device object."""
|
||||
self._device.register_device_updated_cb(self.after_update_callback)
|
||||
self._device.xknx.devices.async_add(self._device)
|
||||
if uid := self.unique_id:
|
||||
self._knx_module.add_to_group_address_entities(
|
||||
group_addresses=self._device.group_addresses(),
|
||||
identifier=(self.platform_data.domain, uid),
|
||||
)
|
||||
# super call needed to have methods of multi-inherited classes called
|
||||
# eg. for restoring state (like _KNXSwitch)
|
||||
await super().async_added_to_hass()
|
||||
@@ -85,6 +90,11 @@ class _KnxEntityBase(Entity):
|
||||
"""Disconnect device object when removed."""
|
||||
self._device.unregister_device_updated_cb(self.after_update_callback)
|
||||
self._device.xknx.devices.async_remove(self._device)
|
||||
if uid := self.unique_id:
|
||||
self._knx_module.remove_from_group_address_entities(
|
||||
group_addresses=self._device.group_addresses(),
|
||||
identifier=(self.platform_data.domain, uid),
|
||||
)
|
||||
|
||||
|
||||
class KnxYamlEntity(_KnxEntityBase):
|
||||
|
||||
@@ -56,6 +56,7 @@ from .const import (
|
||||
from .device import KNXInterfaceDevice
|
||||
from .expose import KNXExposeSensor, KNXExposeTime
|
||||
from .project import KNXProject
|
||||
from .repairs import data_secure_group_key_issue_dispatcher
|
||||
from .storage.config_store import KNXConfigStore
|
||||
from .telegrams import Telegrams
|
||||
|
||||
@@ -107,8 +108,12 @@ class KNXModule:
|
||||
|
||||
self._address_filter_transcoder: dict[AddressFilter, type[DPTBase]] = {}
|
||||
self.group_address_transcoder: dict[DeviceGroupAddress, type[DPTBase]] = {}
|
||||
self.group_address_entities: dict[
|
||||
DeviceGroupAddress, set[tuple[str, str]] # {(platform, unique_id),}
|
||||
] = {}
|
||||
self.knx_event_callback: TelegramQueue.Callback = self.register_event_callback()
|
||||
|
||||
self.entry.async_on_unload(data_secure_group_key_issue_dispatcher(self))
|
||||
self.entry.async_on_unload(
|
||||
self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, self.stop)
|
||||
)
|
||||
@@ -225,6 +230,29 @@ class KNXModule:
|
||||
threaded=True,
|
||||
)
|
||||
|
||||
def add_to_group_address_entities(
|
||||
self,
|
||||
group_addresses: set[DeviceGroupAddress],
|
||||
identifier: tuple[str, str], # (platform, unique_id)
|
||||
) -> None:
|
||||
"""Register entity in group_address_entities map."""
|
||||
for ga in group_addresses:
|
||||
if ga not in self.group_address_entities:
|
||||
self.group_address_entities[ga] = set()
|
||||
self.group_address_entities[ga].add(identifier)
|
||||
|
||||
def remove_from_group_address_entities(
|
||||
self,
|
||||
group_addresses: set[DeviceGroupAddress],
|
||||
identifier: tuple[str, str],
|
||||
) -> None:
|
||||
"""Unregister entity from group_address_entities map."""
|
||||
for ga in group_addresses:
|
||||
if ga in self.group_address_entities:
|
||||
self.group_address_entities[ga].discard(identifier)
|
||||
if not self.group_address_entities[ga]:
|
||||
del self.group_address_entities[ga]
|
||||
|
||||
def connection_state_changed_cb(self, state: XknxConnectionState) -> None:
|
||||
"""Call invoked after a KNX connection state change was received."""
|
||||
self.connected = state == XknxConnectionState.CONNECTED
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"loggers": ["xknx", "xknxproject"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": [
|
||||
"xknx==3.11.0",
|
||||
"xknx==3.12.0",
|
||||
"xknxproject==3.8.2",
|
||||
"knx-frontend==2025.10.31.195356"
|
||||
],
|
||||
|
||||
175
homeassistant/components/knx/repairs.py
Normal file
175
homeassistant/components/knx/repairs.py
Normal file
@@ -0,0 +1,175 @@
|
||||
"""Repairs for KNX integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from functools import partial
|
||||
from typing import TYPE_CHECKING, Any, Final
|
||||
|
||||
import voluptuous as vol
|
||||
from xknx.exceptions.exception import InvalidSecureConfiguration
|
||||
from xknx.telegram import GroupAddress, IndividualAddress, Telegram
|
||||
|
||||
from homeassistant import data_entry_flow
|
||||
from homeassistant.components.repairs import RepairsFlow
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import issue_registry as ir, selector
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .knx_module import KNXModule
|
||||
|
||||
from .const import (
|
||||
CONF_KNX_KNXKEY_PASSWORD,
|
||||
DOMAIN,
|
||||
REPAIR_ISSUE_DATA_SECURE_GROUP_KEY,
|
||||
KNXConfigEntryData,
|
||||
)
|
||||
from .storage.keyring import DEFAULT_KNX_KEYRING_FILENAME, save_uploaded_knxkeys_file
|
||||
from .telegrams import SIGNAL_KNX_DATA_SECURE_ISSUE_TELEGRAM, TelegramDict
|
||||
|
||||
CONF_KEYRING_FILE: Final = "knxkeys_file"
|
||||
|
||||
|
||||
async def async_create_fix_flow(
|
||||
hass: HomeAssistant,
|
||||
issue_id: str,
|
||||
data: dict[str, str | int | float | None] | None,
|
||||
) -> RepairsFlow:
|
||||
"""Create flow."""
|
||||
if issue_id == REPAIR_ISSUE_DATA_SECURE_GROUP_KEY:
|
||||
return DataSecureGroupIssueRepairFlow()
|
||||
# If KNX adds confirm-only repairs in the future, this should be changed
|
||||
# to return a ConfirmRepairFlow instead of raising a ValueError
|
||||
raise ValueError(f"unknown repair {issue_id}")
|
||||
|
||||
|
||||
######################
|
||||
# DataSecure key issue
|
||||
######################
|
||||
|
||||
|
||||
@callback
|
||||
def data_secure_group_key_issue_dispatcher(knx_module: KNXModule) -> Callable[[], None]:
|
||||
"""Watcher for DataSecure group key issues."""
|
||||
return async_dispatcher_connect(
|
||||
knx_module.hass,
|
||||
signal=SIGNAL_KNX_DATA_SECURE_ISSUE_TELEGRAM,
|
||||
target=partial(_data_secure_group_key_issue_handler, knx_module),
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def _data_secure_group_key_issue_handler(
|
||||
knx_module: KNXModule, telegram: Telegram, telegram_dict: TelegramDict
|
||||
) -> None:
|
||||
"""Handle DataSecure group key issue telegrams."""
|
||||
if telegram.destination_address not in knx_module.group_address_entities:
|
||||
# Only report issues for configured group addresses
|
||||
return
|
||||
|
||||
issue_registry = ir.async_get(knx_module.hass)
|
||||
new_ga = str(telegram.destination_address)
|
||||
new_ia = str(telegram.source_address)
|
||||
new_data = {new_ga: new_ia}
|
||||
|
||||
if existing_issue := issue_registry.async_get_issue(
|
||||
DOMAIN, REPAIR_ISSUE_DATA_SECURE_GROUP_KEY
|
||||
):
|
||||
assert isinstance(existing_issue.data, dict)
|
||||
existing_data: dict[str, str] = existing_issue.data # type: ignore[assignment]
|
||||
if new_ga in existing_data:
|
||||
current_ias = existing_data[new_ga].split(", ")
|
||||
if new_ia in current_ias:
|
||||
return
|
||||
current_ias = sorted([*current_ias, new_ia], key=IndividualAddress)
|
||||
new_data[new_ga] = ", ".join(current_ias)
|
||||
new_data_unsorted = existing_data | new_data
|
||||
new_data = {
|
||||
key: new_data_unsorted[key]
|
||||
for key in sorted(new_data_unsorted, key=GroupAddress)
|
||||
}
|
||||
|
||||
issue_registry.async_get_or_create(
|
||||
DOMAIN,
|
||||
REPAIR_ISSUE_DATA_SECURE_GROUP_KEY,
|
||||
data=new_data, # type: ignore[arg-type]
|
||||
is_fixable=True,
|
||||
is_persistent=True,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
translation_key=REPAIR_ISSUE_DATA_SECURE_GROUP_KEY,
|
||||
translation_placeholders={
|
||||
"addresses": "\n".join(
|
||||
f"`{ga}` from {ias}" for ga, ias in new_data.items()
|
||||
),
|
||||
"interface": str(knx_module.xknx.current_address),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class DataSecureGroupIssueRepairFlow(RepairsFlow):
|
||||
"""Handler for an issue fixing flow for outdated DataSecure keys."""
|
||||
|
||||
@callback
|
||||
def _async_get_placeholders(self) -> dict[str, str]:
|
||||
issue_registry = ir.async_get(self.hass)
|
||||
issue = issue_registry.async_get_issue(self.handler, self.issue_id)
|
||||
assert issue is not None
|
||||
return issue.translation_placeholders or {}
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> data_entry_flow.FlowResult:
|
||||
"""Handle the first step of a fix flow."""
|
||||
return await self.async_step_secure_knxkeys()
|
||||
|
||||
async def async_step_secure_knxkeys(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> data_entry_flow.FlowResult:
|
||||
"""Manage upload of new KNX Keyring file."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
password = user_input[CONF_KNX_KNXKEY_PASSWORD]
|
||||
keyring = None
|
||||
try:
|
||||
keyring = await save_uploaded_knxkeys_file(
|
||||
self.hass,
|
||||
uploaded_file_id=user_input[CONF_KEYRING_FILE],
|
||||
password=password,
|
||||
)
|
||||
except InvalidSecureConfiguration:
|
||||
errors[CONF_KNX_KNXKEY_PASSWORD] = "keyfile_invalid_signature"
|
||||
|
||||
if not errors and keyring:
|
||||
new_entry_data = KNXConfigEntryData(
|
||||
knxkeys_filename=f"{DOMAIN}/{DEFAULT_KNX_KEYRING_FILENAME}",
|
||||
knxkeys_password=password,
|
||||
)
|
||||
return self.finish_flow(new_entry_data)
|
||||
|
||||
fields = {
|
||||
vol.Required(CONF_KEYRING_FILE): selector.FileSelector(
|
||||
config=selector.FileSelectorConfig(accept=".knxkeys")
|
||||
),
|
||||
vol.Required(CONF_KNX_KNXKEY_PASSWORD): selector.TextSelector(),
|
||||
}
|
||||
return self.async_show_form(
|
||||
step_id="secure_knxkeys",
|
||||
data_schema=vol.Schema(fields),
|
||||
description_placeholders=self._async_get_placeholders(),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@callback
|
||||
def finish_flow(
|
||||
self, new_entry_data: KNXConfigEntryData
|
||||
) -> data_entry_flow.FlowResult:
|
||||
"""Finish the repair flow. Reload the config entry."""
|
||||
knx_config_entries = self.hass.config_entries.async_entries(DOMAIN)
|
||||
if knx_config_entries:
|
||||
config_entry = knx_config_entries[0] # single_config_entry
|
||||
new_data = {**config_entry.data, **new_entry_data}
|
||||
self.hass.config_entries.async_update_entry(config_entry, data=new_data)
|
||||
self.hass.config_entries.async_schedule_reload(config_entry.entry_id)
|
||||
return self.async_create_entry(data={})
|
||||
@@ -10,9 +10,10 @@ from xknx.secure.keyring import Keyring, sync_load_keyring
|
||||
|
||||
from homeassistant.components.file_upload import process_uploaded_file
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.helpers.storage import STORAGE_DIR
|
||||
|
||||
from ..const import DOMAIN
|
||||
from ..const import DOMAIN, REPAIR_ISSUE_DATA_SECURE_GROUP_KEY
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -45,4 +46,11 @@ async def save_uploaded_knxkeys_file(
|
||||
shutil.move(file_path, dest_file)
|
||||
return keyring
|
||||
|
||||
return await hass.async_add_executor_job(_process_upload)
|
||||
keyring = await hass.async_add_executor_job(_process_upload)
|
||||
|
||||
# If there is an existing DataSecure group key issue, remove it.
|
||||
# GAs might not be DataSecure anymore after uploading a valid keyring,
|
||||
# if they are, we raise the issue again when receiving a telegram.
|
||||
ir.async_delete_issue(hass, DOMAIN, REPAIR_ISSUE_DATA_SECURE_GROUP_KEY)
|
||||
|
||||
return keyring
|
||||
|
||||
@@ -671,6 +671,30 @@
|
||||
"message": "Invalid type for `knx.send` service: {type}"
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"data_secure_group_key_issue": {
|
||||
"fix_flow": {
|
||||
"error": {
|
||||
"keyfile_invalid_signature": "[%key:component::knx::config::error::keyfile_invalid_signature%]"
|
||||
},
|
||||
"step": {
|
||||
"secure_knxkeys": {
|
||||
"data": {
|
||||
"knxkeys_file": "[%key:component::knx::config::step::secure_knxkeys::data::knxkeys_file%]",
|
||||
"knxkeys_password": "[%key:component::knx::config::step::secure_knxkeys::data::knxkeys_password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"knxkeys_file": "[%key:component::knx::config::step::secure_knxkeys::data_description::knxkeys_file%]",
|
||||
"knxkeys_password": "[%key:component::knx::config::step::secure_knxkeys::data_description::knxkeys_password%]"
|
||||
},
|
||||
"description": "Telegrams for group addresses used in Home Assistant could not be decrypted because Data Secure keys are missing or invalid:\n\n{addresses}\n\nTo fix this, update the sending devices configurations via ETS and provide an updated KNX Keyring file. Make sure that the group addresses used in Home Assistant are associated with the interface used by Home Assistant (`{interface}` when the issue last occurred).",
|
||||
"title": "Update KNX Keyring"
|
||||
}
|
||||
}
|
||||
},
|
||||
"title": "KNX Data Secure telegrams can't be decrypted"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"communication_settings": {
|
||||
|
||||
@@ -26,6 +26,9 @@ STORAGE_KEY: Final = f"{DOMAIN}/telegrams_history.json"
|
||||
|
||||
# dispatcher signal for KNX interface device triggers
|
||||
SIGNAL_KNX_TELEGRAM: SignalType[Telegram, TelegramDict] = SignalType("knx_telegram")
|
||||
SIGNAL_KNX_DATA_SECURE_ISSUE_TELEGRAM: SignalType[Telegram, TelegramDict] = SignalType(
|
||||
"knx_data_secure_issue_telegram"
|
||||
)
|
||||
|
||||
|
||||
class DecodedTelegramPayload(TypedDict):
|
||||
@@ -74,6 +77,11 @@ class Telegrams:
|
||||
match_for_outgoing=True,
|
||||
)
|
||||
)
|
||||
self._xknx_data_secure_group_key_issue_cb_handle = (
|
||||
xknx.telegram_queue.register_data_secure_group_key_issue_cb(
|
||||
self._xknx_data_secure_group_key_issue_cb,
|
||||
)
|
||||
)
|
||||
self.recent_telegrams: deque[TelegramDict] = deque(maxlen=log_size)
|
||||
self.last_ga_telegrams: dict[str, TelegramDict] = {}
|
||||
|
||||
@@ -107,6 +115,14 @@ class Telegrams:
|
||||
self.last_ga_telegrams[telegram_dict["destination"]] = telegram_dict
|
||||
async_dispatcher_send(self.hass, SIGNAL_KNX_TELEGRAM, telegram, telegram_dict)
|
||||
|
||||
def _xknx_data_secure_group_key_issue_cb(self, telegram: Telegram) -> None:
|
||||
"""Handle telegrams with undecodable data secure payload from xknx."""
|
||||
telegram_dict = self.telegram_to_dict(telegram)
|
||||
self.recent_telegrams.append(telegram_dict)
|
||||
async_dispatcher_send(
|
||||
self.hass, SIGNAL_KNX_DATA_SECURE_ISSUE_TELEGRAM, telegram, telegram_dict
|
||||
)
|
||||
|
||||
def telegram_to_dict(self, telegram: Telegram) -> TelegramDict:
|
||||
"""Convert a Telegram to a dict."""
|
||||
dst_name = ""
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from contextlib import ExitStack
|
||||
from functools import wraps
|
||||
import inspect
|
||||
from typing import TYPE_CHECKING, Any, Final, overload
|
||||
@@ -34,7 +35,11 @@ from .storage.entity_store_validation import (
|
||||
validate_entity_data,
|
||||
)
|
||||
from .storage.serialize import get_serialized_schema
|
||||
from .telegrams import SIGNAL_KNX_TELEGRAM, TelegramDict
|
||||
from .telegrams import (
|
||||
SIGNAL_KNX_DATA_SECURE_ISSUE_TELEGRAM,
|
||||
SIGNAL_KNX_TELEGRAM,
|
||||
TelegramDict,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .knx_module import KNXModule
|
||||
@@ -334,11 +339,23 @@ def ws_subscribe_telegram(
|
||||
telegram_dict,
|
||||
)
|
||||
|
||||
connection.subscriptions[msg["id"]] = async_dispatcher_connect(
|
||||
hass,
|
||||
signal=SIGNAL_KNX_TELEGRAM,
|
||||
target=forward_telegram,
|
||||
stack = ExitStack()
|
||||
stack.callback(
|
||||
async_dispatcher_connect(
|
||||
hass,
|
||||
signal=SIGNAL_KNX_TELEGRAM,
|
||||
target=forward_telegram,
|
||||
)
|
||||
)
|
||||
stack.callback(
|
||||
async_dispatcher_connect(
|
||||
hass,
|
||||
signal=SIGNAL_KNX_DATA_SECURE_ISSUE_TELEGRAM,
|
||||
target=forward_telegram,
|
||||
)
|
||||
)
|
||||
|
||||
connection.subscriptions[msg["id"]] = stack.close
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
|
||||
@@ -19,5 +19,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/nest",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["google_nest_sdm"],
|
||||
"requirements": ["google-nest-sdm==9.1.1"]
|
||||
"requirements": ["google-nest-sdm==9.1.2"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"codeowners": ["@gjohansson-ST"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/nordpool",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pynordpool"],
|
||||
"quality_scale": "platinum",
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioonkyo"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["aioonkyo==0.3.0"],
|
||||
"requirements": ["aioonkyo==0.4.0"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1",
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/oralb",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["oralb_ble"],
|
||||
"requirements": ["oralb-ble==0.17.6"]
|
||||
|
||||
@@ -162,7 +162,7 @@ class PingDataSubProcess(PingData):
|
||||
|
||||
if pinger:
|
||||
with suppress(TypeError, ProcessLookupError):
|
||||
await pinger.kill() # type: ignore[func-returns-value]
|
||||
pinger.kill()
|
||||
del pinger
|
||||
|
||||
return None
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
"loggers": ["roborock"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": [
|
||||
"python-roborock==3.8.4",
|
||||
"python-roborock==3.9.2",
|
||||
"vacuum-map-parser-roborock==0.1.4"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""Roborock storage."""
|
||||
|
||||
import dataclasses
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
@@ -17,7 +16,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STORAGE_PATH = f".storage/{DOMAIN}"
|
||||
MAPS_PATH = "maps"
|
||||
CACHE_VERSION = 1
|
||||
CACHE_VERSION = 2
|
||||
|
||||
|
||||
def _storage_path_prefix(hass: HomeAssistant, entry_id: str) -> Path:
|
||||
@@ -44,6 +43,31 @@ async def async_cleanup_map_storage(hass: HomeAssistant, entry_id: str) -> None:
|
||||
await hass.async_add_executor_job(remove, path_prefix)
|
||||
|
||||
|
||||
class StoreImpl(Store[dict[str, Any]]):
|
||||
"""Store implementation for Roborock cache."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry_id: str) -> None:
|
||||
"""Initialize StoreImpl."""
|
||||
super().__init__(
|
||||
hass,
|
||||
version=CACHE_VERSION,
|
||||
key=f"{DOMAIN}/{entry_id}",
|
||||
private=True,
|
||||
)
|
||||
|
||||
async def _async_migrate_func(
|
||||
self,
|
||||
old_major_version: int,
|
||||
old_minor_version: int,
|
||||
old_data: dict[str, Any],
|
||||
) -> dict[str, Any]:
|
||||
"""Wipe out old caches with the old format."""
|
||||
if old_major_version == 1:
|
||||
# No need for migration as version 1 was never in any stable releases
|
||||
return {}
|
||||
return old_data
|
||||
|
||||
|
||||
class CacheStore(Cache):
|
||||
"""Store and retrieve cache for a Roborock device.
|
||||
|
||||
@@ -55,19 +79,14 @@ class CacheStore(Cache):
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry_id: str) -> None:
|
||||
"""Initialize CacheStore."""
|
||||
self._cache_store = Store[dict[str, Any]](
|
||||
hass,
|
||||
version=CACHE_VERSION,
|
||||
key=f"{DOMAIN}/{entry_id}",
|
||||
private=True,
|
||||
)
|
||||
self._cache_store = StoreImpl(hass, entry_id)
|
||||
self._cache_data: CacheData | None = None
|
||||
|
||||
async def get(self) -> CacheData:
|
||||
"""Retrieve cached metadata."""
|
||||
if self._cache_data is None:
|
||||
if data := await self._cache_store.async_load():
|
||||
self._cache_data = CacheData(**data)
|
||||
self._cache_data = CacheData.from_dict(data)
|
||||
else:
|
||||
self._cache_data = CacheData()
|
||||
|
||||
@@ -80,7 +99,7 @@ class CacheStore(Cache):
|
||||
async def flush(self) -> None:
|
||||
"""Flush cached metadata to disk."""
|
||||
if self._cache_data is not None:
|
||||
await self._cache_store.async_save(dataclasses.asdict(self._cache_data))
|
||||
await self._cache_store.async_save(self._cache_data.as_dict())
|
||||
|
||||
async def async_remove(self) -> None:
|
||||
"""Remove cached metadata from disk."""
|
||||
|
||||
@@ -150,12 +150,9 @@ class WLEDSegmentLight(WLEDEntity, LightEntity):
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
try:
|
||||
self.coordinator.data.state.segments[self._segment]
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
return super().available
|
||||
return (
|
||||
super().available and self._segment in self.coordinator.data.state.segments
|
||||
)
|
||||
|
||||
@property
|
||||
def rgb_color(self) -> tuple[int, int, int] | None:
|
||||
|
||||
@@ -97,12 +97,9 @@ class WLEDNumber(WLEDEntity, NumberEntity):
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
try:
|
||||
self.coordinator.data.state.segments[self._segment]
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
return super().available
|
||||
return (
|
||||
super().available and self._segment in self.coordinator.data.state.segments
|
||||
)
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
|
||||
@@ -31,10 +31,6 @@ rules:
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable:
|
||||
status: todo
|
||||
comment: |
|
||||
The WLEDSegmentLight.available property can just be an if .. in .. check
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
|
||||
@@ -173,12 +173,9 @@ class WLEDPaletteSelect(WLEDEntity, SelectEntity):
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
try:
|
||||
self.coordinator.data.state.segments[self._segment]
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
return super().available
|
||||
return (
|
||||
super().available and self._segment in self.coordinator.data.state.segments
|
||||
)
|
||||
|
||||
@property
|
||||
def current_option(self) -> str | None:
|
||||
|
||||
@@ -167,12 +167,9 @@ class WLEDReverseSwitch(WLEDEntity, SwitchEntity):
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
try:
|
||||
self.coordinator.data.state.segments[self._segment]
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
return super().available
|
||||
return (
|
||||
super().available and self._segment in self.coordinator.data.state.segments
|
||||
)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
|
||||
@@ -4511,7 +4511,7 @@
|
||||
},
|
||||
"nordpool": {
|
||||
"name": "Nord Pool",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "service",
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_polling",
|
||||
"single_config_entry": true
|
||||
@@ -4832,7 +4832,7 @@
|
||||
},
|
||||
"oralb": {
|
||||
"name": "Oral-B",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push"
|
||||
},
|
||||
|
||||
4
homeassistant/generated/labs.py
generated
4
homeassistant/generated/labs.py
generated
@@ -6,8 +6,8 @@ To update, run python3 -m script.hassfest
|
||||
LABS_PREVIEW_FEATURES = {
|
||||
"automation": {
|
||||
"new_triggers_conditions": {
|
||||
"feedback_url": "",
|
||||
"learn_more_url": "",
|
||||
"feedback_url": "https://forms.gle/fWFZqf5MzuwWTsCH8",
|
||||
"learn_more_url": "https://www.home-assistant.io/blog/2025/12/03/release-202512/#purpose-specific-triggers-and-conditions",
|
||||
"report_issue_url": "https://github.com/home-assistant/core/issues/new?template=bug_report.yml&integration_link=https://www.home-assistant.io/integrations/automation&integration_name=Automation",
|
||||
},
|
||||
},
|
||||
|
||||
@@ -36,7 +36,7 @@ fnv-hash-fast==1.6.0
|
||||
go2rtc-client==0.3.0
|
||||
ha-ffmpeg==3.2.2
|
||||
habluetooth==5.8.0
|
||||
hass-nabucasa==1.6.2
|
||||
hass-nabucasa==1.7.0
|
||||
hassil==3.5.0
|
||||
home-assistant-bluetooth==1.13.1
|
||||
home-assistant-frontend==20251202.0
|
||||
|
||||
@@ -48,7 +48,7 @@ dependencies = [
|
||||
"fnv-hash-fast==1.6.0",
|
||||
# hass-nabucasa is imported by helpers which don't depend on the cloud
|
||||
# integration
|
||||
"hass-nabucasa==1.6.2",
|
||||
"hass-nabucasa==1.7.0",
|
||||
# When bumping httpx, please check the version pins of
|
||||
# httpcore, anyio, and h11 in gen_requirements_all
|
||||
"httpx==0.28.1",
|
||||
|
||||
2
requirements.txt
generated
2
requirements.txt
generated
@@ -22,7 +22,7 @@ certifi>=2021.5.30
|
||||
ciso8601==2.3.3
|
||||
cronsim==2.7
|
||||
fnv-hash-fast==1.6.0
|
||||
hass-nabucasa==1.6.2
|
||||
hass-nabucasa==1.7.0
|
||||
httpx==0.28.1
|
||||
home-assistant-bluetooth==1.13.1
|
||||
ifaddr==0.2.0
|
||||
|
||||
12
requirements_all.txt
generated
12
requirements_all.txt
generated
@@ -340,7 +340,7 @@ aiontfy==0.6.1
|
||||
aionut==4.3.4
|
||||
|
||||
# homeassistant.components.onkyo
|
||||
aioonkyo==0.3.0
|
||||
aioonkyo==0.4.0
|
||||
|
||||
# homeassistant.components.openexchangerates
|
||||
aioopenexchangerates==0.6.8
|
||||
@@ -1090,7 +1090,7 @@ google-genai==1.38.0
|
||||
google-maps-routing==0.6.15
|
||||
|
||||
# homeassistant.components.nest
|
||||
google-nest-sdm==9.1.1
|
||||
google-nest-sdm==9.1.2
|
||||
|
||||
# homeassistant.components.google_photos
|
||||
google-photos-library-api==0.12.1
|
||||
@@ -1163,7 +1163,7 @@ habluetooth==5.8.0
|
||||
hanna-cloud==0.0.6
|
||||
|
||||
# homeassistant.components.cloud
|
||||
hass-nabucasa==1.6.2
|
||||
hass-nabucasa==1.7.0
|
||||
|
||||
# homeassistant.components.splunk
|
||||
hass-splunk==0.1.1
|
||||
@@ -1288,7 +1288,7 @@ insteon-frontend-home-assistant==0.5.0
|
||||
intellifire4py==4.2.1
|
||||
|
||||
# homeassistant.components.iometer
|
||||
iometer==0.2.0
|
||||
iometer==0.3.0
|
||||
|
||||
# homeassistant.components.iotty
|
||||
iottycloud==0.3.0
|
||||
@@ -2563,7 +2563,7 @@ python-rabbitair==0.0.8
|
||||
python-ripple-api==0.0.3
|
||||
|
||||
# homeassistant.components.roborock
|
||||
python-roborock==3.8.4
|
||||
python-roborock==3.9.2
|
||||
|
||||
# homeassistant.components.smarttub
|
||||
python-smarttub==0.0.45
|
||||
@@ -3191,7 +3191,7 @@ wyoming==1.7.2
|
||||
xiaomi-ble==1.2.0
|
||||
|
||||
# homeassistant.components.knx
|
||||
xknx==3.11.0
|
||||
xknx==3.12.0
|
||||
|
||||
# homeassistant.components.knx
|
||||
xknxproject==3.8.2
|
||||
|
||||
12
requirements_test_all.txt
generated
12
requirements_test_all.txt
generated
@@ -325,7 +325,7 @@ aiontfy==0.6.1
|
||||
aionut==4.3.4
|
||||
|
||||
# homeassistant.components.onkyo
|
||||
aioonkyo==0.3.0
|
||||
aioonkyo==0.4.0
|
||||
|
||||
# homeassistant.components.openexchangerates
|
||||
aioopenexchangerates==0.6.8
|
||||
@@ -966,7 +966,7 @@ google-genai==1.38.0
|
||||
google-maps-routing==0.6.15
|
||||
|
||||
# homeassistant.components.nest
|
||||
google-nest-sdm==9.1.1
|
||||
google-nest-sdm==9.1.2
|
||||
|
||||
# homeassistant.components.google_photos
|
||||
google-photos-library-api==0.12.1
|
||||
@@ -1033,7 +1033,7 @@ habluetooth==5.8.0
|
||||
hanna-cloud==0.0.6
|
||||
|
||||
# homeassistant.components.cloud
|
||||
hass-nabucasa==1.6.2
|
||||
hass-nabucasa==1.7.0
|
||||
|
||||
# homeassistant.components.assist_satellite
|
||||
# homeassistant.components.conversation
|
||||
@@ -1134,7 +1134,7 @@ insteon-frontend-home-assistant==0.5.0
|
||||
intellifire4py==4.2.1
|
||||
|
||||
# homeassistant.components.iometer
|
||||
iometer==0.2.0
|
||||
iometer==0.3.0
|
||||
|
||||
# homeassistant.components.iotty
|
||||
iottycloud==0.3.0
|
||||
@@ -2144,7 +2144,7 @@ python-pooldose==0.8.1
|
||||
python-rabbitair==0.0.8
|
||||
|
||||
# homeassistant.components.roborock
|
||||
python-roborock==3.8.4
|
||||
python-roborock==3.9.2
|
||||
|
||||
# homeassistant.components.smarttub
|
||||
python-smarttub==0.0.45
|
||||
@@ -2658,7 +2658,7 @@ wyoming==1.7.2
|
||||
xiaomi-ble==1.2.0
|
||||
|
||||
# homeassistant.components.knx
|
||||
xknx==3.11.0
|
||||
xknx==3.12.0
|
||||
|
||||
# homeassistant.components.knx
|
||||
xknxproject==3.8.2
|
||||
|
||||
@@ -205,8 +205,8 @@ async def test_ws_get_client_config(
|
||||
"iceServers": [
|
||||
{
|
||||
"urls": [
|
||||
"stun:stun.home-assistant.io:80",
|
||||
"stun:stun.home-assistant.io:3478",
|
||||
"stun:stun.home-assistant.io:80",
|
||||
]
|
||||
},
|
||||
],
|
||||
@@ -238,8 +238,8 @@ async def test_ws_get_client_config(
|
||||
"iceServers": [
|
||||
{
|
||||
"urls": [
|
||||
"stun:stun.home-assistant.io:80",
|
||||
"stun:stun.home-assistant.io:3478",
|
||||
"stun:stun.home-assistant.io:80",
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -11,6 +11,7 @@ import pytest
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import conversation
|
||||
from homeassistant.components.cloud.const import AI_TASK_ENTITY_UNIQUE_ID, DOMAIN
|
||||
from homeassistant.components.cloud.entity import (
|
||||
BaseCloudLLMEntity,
|
||||
_convert_content_to_param,
|
||||
@@ -18,7 +19,8 @@ from homeassistant.components.cloud.entity import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import llm, selector
|
||||
from homeassistant.helpers import entity_registry as er, llm, selector
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
@@ -219,3 +221,66 @@ async def test_prepare_chat_for_generation_passes_messages_through(
|
||||
|
||||
assert response["messages"] == messages
|
||||
assert response["conversation_id"] == "conversation-id"
|
||||
|
||||
|
||||
async def test_async_handle_chat_log_service_sets_structured_output_non_strict(
|
||||
hass: HomeAssistant,
|
||||
cloud: MagicMock,
|
||||
entity_registry: er.EntityRegistry,
|
||||
mock_cloud_login: None,
|
||||
) -> None:
|
||||
"""Ensure structured output requests always disable strict validation via service."""
|
||||
assert await async_setup_component(hass, DOMAIN, {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
on_start_callback = cloud.register_on_start.call_args[0][0]
|
||||
await on_start_callback()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
entity_id = entity_registry.async_get_entity_id(
|
||||
"ai_task", DOMAIN, AI_TASK_ENTITY_UNIQUE_ID
|
||||
)
|
||||
assert entity_id is not None
|
||||
|
||||
async def _empty_stream():
|
||||
return
|
||||
|
||||
async def _fake_delta_stream(
|
||||
self: conversation.ChatLog,
|
||||
agent_id: str,
|
||||
stream,
|
||||
):
|
||||
content = conversation.AssistantContent(
|
||||
agent_id=agent_id, content='{"value": "ok"}'
|
||||
)
|
||||
self.async_add_assistant_content_without_tools(content)
|
||||
yield content
|
||||
|
||||
cloud.llm.async_generate_data = AsyncMock(return_value=_empty_stream())
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.conversation.chat_log.ChatLog.async_add_delta_content_stream",
|
||||
_fake_delta_stream,
|
||||
):
|
||||
await hass.services.async_call(
|
||||
"ai_task",
|
||||
"generate_data",
|
||||
{
|
||||
"entity_id": entity_id,
|
||||
"task_name": "Device Report",
|
||||
"instructions": "Provide value.",
|
||||
"structure": {
|
||||
"value": {
|
||||
"selector": {"text": None},
|
||||
"required": True,
|
||||
}
|
||||
},
|
||||
},
|
||||
blocking=True,
|
||||
return_response=True,
|
||||
)
|
||||
|
||||
cloud.llm.async_generate_data.assert_awaited_once()
|
||||
_, kwargs = cloud.llm.async_generate_data.call_args
|
||||
|
||||
assert kwargs["response_format"]["json_schema"]["strict"] is False
|
||||
|
||||
@@ -11,9 +11,15 @@ from xknx import XKNX
|
||||
from xknx.core import XknxConnectionState, XknxConnectionType
|
||||
from xknx.dpt import DPTArray, DPTBinary
|
||||
from xknx.io import DEFAULT_MCAST_GRP, DEFAULT_MCAST_PORT
|
||||
from xknx.telegram import Telegram, TelegramDirection
|
||||
from xknx.telegram import Telegram, TelegramDirection, tpci
|
||||
from xknx.telegram.address import GroupAddress, IndividualAddress
|
||||
from xknx.telegram.apci import APCI, GroupValueRead, GroupValueResponse, GroupValueWrite
|
||||
from xknx.telegram.apci import (
|
||||
APCI,
|
||||
GroupValueRead,
|
||||
GroupValueResponse,
|
||||
GroupValueWrite,
|
||||
SecureAPDU,
|
||||
)
|
||||
|
||||
from homeassistant.components.knx.const import (
|
||||
CONF_KNX_AUTOMATIC,
|
||||
@@ -312,6 +318,23 @@ class KNXTestKit:
|
||||
source=source,
|
||||
)
|
||||
|
||||
def receive_data_secure_issue(
|
||||
self,
|
||||
group_address: str,
|
||||
source: str | None = None,
|
||||
) -> None:
|
||||
"""Inject incoming telegram with undecodable data secure payload."""
|
||||
telegram = Telegram(
|
||||
destination_address=GroupAddress(group_address),
|
||||
direction=TelegramDirection.INCOMING,
|
||||
source_address=IndividualAddress(source or self.INDIVIDUAL_ADDRESS),
|
||||
tpci=tpci.TDataGroup(),
|
||||
payload=SecureAPDU.from_knx(
|
||||
bytes.fromhex("03f110002446cfef4ac085e7092ab062b44d")
|
||||
),
|
||||
)
|
||||
self.xknx.telegram_queue.received_data_secure_group_key_issue(telegram)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_config_entry() -> MockConfigEntry:
|
||||
|
||||
133
tests/components/knx/test_repairs.py
Normal file
133
tests/components/knx/test_repairs.py
Normal file
@@ -0,0 +1,133 @@
|
||||
"""Test repair flows for KNX integration."""
|
||||
|
||||
import pytest
|
||||
from xknx.exceptions.exception import InvalidSecureConfiguration
|
||||
|
||||
from homeassistant.components.knx import repairs
|
||||
from homeassistant.components.knx.const import (
|
||||
CONF_KNX_KNXKEY_PASSWORD,
|
||||
DOMAIN,
|
||||
REPAIR_ISSUE_DATA_SECURE_GROUP_KEY,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
|
||||
from .conftest import KNXTestKit
|
||||
from .test_config_flow import FIXTURE_UPLOAD_UUID, patch_file_upload
|
||||
|
||||
from tests.components.repairs import (
|
||||
async_process_repairs_platforms,
|
||||
get_repairs,
|
||||
process_repair_fix_flow,
|
||||
start_repair_fix_flow,
|
||||
)
|
||||
from tests.typing import ClientSessionGenerator, WebSocketGenerator
|
||||
|
||||
|
||||
async def test_create_fix_flow_raises_on_unknown_issue_id(hass: HomeAssistant) -> None:
|
||||
"""Test create_fix_flow raises on unknown issue_id."""
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
await repairs.async_create_fix_flow(hass, "no_such_issue", None)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"configured_group_address",
|
||||
["1/2/5", "3/4/6"],
|
||||
)
|
||||
async def test_data_secure_group_key_issue_only_for_configured_group_address(
|
||||
hass: HomeAssistant,
|
||||
knx: KNXTestKit,
|
||||
configured_group_address: str,
|
||||
) -> None:
|
||||
"""Test that repair issue is only created for configured group addresses."""
|
||||
await knx.setup_integration(
|
||||
{
|
||||
"switch": {
|
||||
"name": "Test Switch",
|
||||
"address": configured_group_address,
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
issue_registry = ir.async_get(hass)
|
||||
assert bool(issue_registry.issues) is False
|
||||
# An issue should only be created if this address is configured.
|
||||
knx.receive_data_secure_issue("1/2/5")
|
||||
assert bool(issue_registry.issues) is (configured_group_address == "1/2/5")
|
||||
|
||||
|
||||
async def test_data_secure_group_key_issue_repair_flow(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
knx: KNXTestKit,
|
||||
) -> None:
|
||||
"""Test repair flow for DataSecure group key issue."""
|
||||
await knx.setup_integration(
|
||||
{
|
||||
"switch": [
|
||||
{"name": "Test 1", "address": "1/2/5"},
|
||||
{"name": "Test 2", "address": "11/0/0"},
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
knx.receive_data_secure_issue("11/0/0", source="1.0.1")
|
||||
knx.receive_data_secure_issue("1/2/5", source="1.0.10")
|
||||
knx.receive_data_secure_issue("1/2/5", source="1.0.1")
|
||||
_placeholders = {
|
||||
"addresses": "`1/2/5` from 1.0.1, 1.0.10\n`11/0/0` from 1.0.1", # check sorting
|
||||
"interface": "0.0.0",
|
||||
}
|
||||
issue_registry = ir.async_get(hass)
|
||||
issue = issue_registry.async_get_issue(DOMAIN, REPAIR_ISSUE_DATA_SECURE_GROUP_KEY)
|
||||
assert issue is not None
|
||||
assert issue.translation_placeholders == _placeholders
|
||||
|
||||
issues = await get_repairs(hass, hass_ws_client)
|
||||
assert issues
|
||||
|
||||
await async_process_repairs_platforms(hass)
|
||||
client = await hass_client()
|
||||
flow = await start_repair_fix_flow(
|
||||
client, DOMAIN, REPAIR_ISSUE_DATA_SECURE_GROUP_KEY
|
||||
)
|
||||
|
||||
flow_id = flow["flow_id"]
|
||||
assert flow["type"] == FlowResultType.FORM
|
||||
assert flow["step_id"] == "secure_knxkeys"
|
||||
assert flow["description_placeholders"] == _placeholders
|
||||
|
||||
# test error handling
|
||||
with patch_file_upload(
|
||||
side_effect=InvalidSecureConfiguration(),
|
||||
):
|
||||
flow = await process_repair_fix_flow(
|
||||
client,
|
||||
flow_id,
|
||||
{
|
||||
repairs.CONF_KEYRING_FILE: FIXTURE_UPLOAD_UUID,
|
||||
CONF_KNX_KNXKEY_PASSWORD: "invalid_password_mocked",
|
||||
},
|
||||
)
|
||||
assert flow["type"] == FlowResultType.FORM
|
||||
assert flow["step_id"] == "secure_knxkeys"
|
||||
assert flow["errors"] == {CONF_KNX_KNXKEY_PASSWORD: "keyfile_invalid_signature"}
|
||||
|
||||
# test successful file upload
|
||||
with patch_file_upload():
|
||||
flow = await process_repair_fix_flow(
|
||||
client,
|
||||
flow_id,
|
||||
{
|
||||
repairs.CONF_KEYRING_FILE: FIXTURE_UPLOAD_UUID,
|
||||
CONF_KNX_KNXKEY_PASSWORD: "password",
|
||||
},
|
||||
)
|
||||
assert flow["type"] == FlowResultType.CREATE_ENTRY
|
||||
assert (
|
||||
issue_registry.async_get_issue(DOMAIN, REPAIR_ISSUE_DATA_SECURE_GROUP_KEY)
|
||||
is None
|
||||
)
|
||||
@@ -311,6 +311,8 @@ async def test_knx_subscribe_telegrams_command_no_project(
|
||||
"switch", "turn_on", {"entity_id": "switch.test"}, blocking=True
|
||||
)
|
||||
await knx.assert_write("1/2/4", 1)
|
||||
# receive undecodable data secure telegram
|
||||
knx.receive_data_secure_issue("1/2/5")
|
||||
|
||||
# receive events
|
||||
res = await client.receive_json()
|
||||
@@ -355,6 +357,14 @@ async def test_knx_subscribe_telegrams_command_no_project(
|
||||
assert res["event"]["direction"] == "Outgoing"
|
||||
assert res["event"]["timestamp"] is not None
|
||||
|
||||
res = await client.receive_json()
|
||||
assert res["event"]["destination"] == "1/2/5"
|
||||
assert res["event"]["payload"] is None
|
||||
assert res["event"]["telegramtype"] == "SecureAPDU"
|
||||
assert res["event"]["source"] == "1.2.3"
|
||||
assert res["event"]["direction"] == "Incoming"
|
||||
assert res["event"]["timestamp"] is not None
|
||||
|
||||
|
||||
async def test_knx_subscribe_telegrams_command_project(
|
||||
hass: HomeAssistant,
|
||||
|
||||
@@ -22,7 +22,7 @@ class MockAsyncSubprocess:
|
||||
"""Fails immediately with a timeout."""
|
||||
raise TimeoutError
|
||||
|
||||
async def kill(self) -> None:
|
||||
def kill(self) -> None:
|
||||
"""Raise preset exception when called."""
|
||||
raise self.killsig
|
||||
|
||||
|
||||
Reference in New Issue
Block a user