Add thread dataset store (#87187)

* Add thread dataset store

* Address review comments

* Bump python-otbr-api to 1.0.3

* Remove stuff which we don't need yet
This commit is contained in:
Erik Montnemery 2023-02-03 15:47:41 +01:00 committed by GitHub
parent 1409b89af3
commit 527de22adf
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 359 additions and 13 deletions

View File

@ -8,9 +8,10 @@ from typing import Any, Concatenate, ParamSpec, TypeVar
import python_otbr_api
from homeassistant.components.thread import async_add_dataset
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import ConfigType
@ -58,7 +59,17 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up an Open Thread Border Router config entry."""
api = python_otbr_api.OTBR(entry.data["url"], async_get_clientsession(hass), 10)
hass.data[DOMAIN] = OTBRData(entry.data["url"], api)
otbrdata = OTBRData(entry.data["url"], api)
try:
dataset = await otbrdata.get_active_dataset_tlvs()
except HomeAssistantError as err:
raise ConfigEntryNotReady from err
if dataset:
await async_add_dataset(hass, entry.title, dataset.hex())
hass.data[DOMAIN] = otbrdata
return True

View File

@ -4,7 +4,7 @@
"config_flow": true,
"dependencies": ["thread"],
"documentation": "https://www.home-assistant.io/integrations/otbr",
"requirements": ["python-otbr-api==1.0.2"],
"requirements": ["python-otbr-api==1.0.3"],
"after_dependencies": ["hassio"],
"codeowners": ["@home-assistant/core"],
"iot_class": "local_polling",

View File

@ -6,6 +6,13 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.typing import ConfigType
from .const import DOMAIN
from .dataset_store import DatasetEntry, async_add_dataset
__all__ = [
"DOMAIN",
"DatasetEntry",
"async_add_dataset",
]
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
@ -16,6 +23,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
DOMAIN, context={"source": SOURCE_IMPORT}
)
)
hass.data[DOMAIN] = {}
return True

View File

@ -0,0 +1,123 @@
"""Persistently store thread datasets."""
from __future__ import annotations
import dataclasses
from datetime import datetime
from functools import cached_property
from typing import Any, cast
from python_otbr_api import tlv_parser
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.singleton import singleton
from homeassistant.helpers.storage import Store
from homeassistant.util import dt as dt_util, ulid as ulid_util
DATA_STORE = "thread.datasets"
STORAGE_KEY = "thread.datasets"
STORAGE_VERSION_MAJOR = 1
STORAGE_VERSION_MINOR = 1
SAVE_DELAY = 10
@dataclasses.dataclass(frozen=True)
class DatasetEntry:
"""Dataset store entry."""
preferred: bool
source: str
tlv: str
created: datetime = dataclasses.field(default_factory=dt_util.utcnow)
id: str = dataclasses.field(default_factory=ulid_util.ulid)
@cached_property
def dataset(self) -> dict[tlv_parser.MeshcopTLVType, str]:
"""Return the dataset in dict format."""
return tlv_parser.parse_tlv(self.tlv)
def to_json(self) -> dict[str, Any]:
"""Return a JSON serializable representation for storage."""
return {
"created": self.created.isoformat(),
"id": self.id,
"preferred": self.preferred,
"source": self.source,
"tlv": self.tlv,
}
class DatasetStore:
"""Class to hold a collection of thread datasets."""
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize the dataset store."""
self.hass = hass
self.datasets: dict[str, DatasetEntry] = {}
self._store: Store[dict[str, list[dict[str, Any]]]] = Store(
hass,
STORAGE_VERSION_MAJOR,
STORAGE_KEY,
atomic_writes=True,
minor_version=STORAGE_VERSION_MINOR,
)
@callback
def async_add(self, source: str, tlv: str) -> None:
"""Add dataset, does nothing if it already exists."""
# Make sure the tlv is valid
dataset = tlv_parser.parse_tlv(tlv)
# Bail out if the dataset already exists
if any(entry for entry in self.datasets.values() if entry.dataset == dataset):
return
# Set to preferred if this is the first dataset
preferred = not bool(self.datasets)
entry = DatasetEntry(preferred=preferred, source=source, tlv=tlv)
self.datasets[entry.id] = entry
self.async_schedule_save()
async def async_load(self) -> None:
"""Load the datasets."""
data = await self._store.async_load()
datasets: dict[str, DatasetEntry] = {}
if data is not None:
for dataset in data["datasets"]:
created = cast(datetime, dt_util.parse_datetime(dataset["created"]))
datasets[dataset["id"]] = DatasetEntry(
created=created,
id=dataset["id"],
preferred=dataset["preferred"],
source=dataset["source"],
tlv=dataset["tlv"],
)
self.datasets = datasets
@callback
def async_schedule_save(self) -> None:
"""Schedule saving the dataset store."""
self._store.async_delay_save(self._data_to_save, SAVE_DELAY)
@callback
def _data_to_save(self) -> dict[str, list[dict[str, str | None]]]:
"""Return data of datasets to store in a file."""
data = {}
data["datasets"] = [dataset.to_json() for dataset in self.datasets.values()]
return data
@singleton(DATA_STORE)
async def _async_get_store(hass: HomeAssistant) -> DatasetStore:
"""Get the dataset store."""
store = DatasetStore(hass)
await store.async_load()
return store
async def async_add_dataset(hass: HomeAssistant, source: str, tlv: str) -> None:
"""Add a dataset."""
store = await _async_get_store(hass)
store.async_add(source, tlv)

View File

@ -5,5 +5,6 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/thread",
"integration_type": "service",
"iot_class": "local_polling"
"iot_class": "local_polling",
"requirements": ["python-otbr-api==1.0.3"]
}

View File

@ -2087,7 +2087,8 @@ python-mystrom==1.1.2
python-nest==4.2.0
# homeassistant.components.otbr
python-otbr-api==1.0.2
# homeassistant.components.thread
python-otbr-api==1.0.3
# homeassistant.components.picnic
python-picnic-api==1.1.0

View File

@ -1477,7 +1477,8 @@ python-miio==0.5.12
python-nest==4.2.0
# homeassistant.components.otbr
python-otbr-api==1.0.2
# homeassistant.components.thread
python-otbr-api==1.0.3
# homeassistant.components.picnic
python-picnic-api==1.1.0

View File

@ -1,2 +1,8 @@
"""Tests for the Open Thread Border Router integration."""
BASE_URL = "http://core-silabs-multiprotocol:8081"
CONFIG_ENTRY_DATA = {"url": "http://core-silabs-multiprotocol:8081"}
DATASET = bytes.fromhex(
"0E080000000000010000000300001035060004001FFFE00208F642646DA209B1C00708FDF57B5A"
"0FE2AAF60510DE98B5BA1A528FEE049D4B4B01835375030D4F70656E5468726561642048410102"
"25A40410F5DD18371BFD29E1A601EF6FFAD94C030C0402A0F7F8"
)

View File

@ -5,9 +5,9 @@ import pytest
from homeassistant.components import otbr
from tests.common import MockConfigEntry
from . import CONFIG_ENTRY_DATA, DATASET
CONFIG_ENTRY_DATA = {"url": "http://core-silabs-multiprotocol:8081"}
from tests.common import MockConfigEntry
@pytest.fixture(name="otbr_config_entry")
@ -20,5 +20,5 @@ async def otbr_config_entry_fixture(hass):
title="Open Thread Border Router",
)
config_entry.add_to_hass(hass)
with patch("python_otbr_api.OTBR.get_active_dataset_tlvs"):
with patch("python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET):
assert await hass.config_entries.async_setup(config_entry.entry_id)

View File

@ -1,6 +1,7 @@
"""Test the Open Thread Border Router integration."""
from http import HTTPStatus
from unittest.mock import patch
import pytest
@ -8,15 +9,52 @@ from homeassistant.components import otbr
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from . import BASE_URL
from . import BASE_URL, CONFIG_ENTRY_DATA, DATASET
from tests.common import MockConfigEntry
from tests.test_util.aiohttp import AiohttpClientMocker
async def test_import_dataset(hass: HomeAssistant):
"""Test the active dataset is imported at setup."""
config_entry = MockConfigEntry(
data=CONFIG_ENTRY_DATA,
domain=otbr.DOMAIN,
options={},
title="My OTBR",
)
config_entry.add_to_hass(hass)
with patch(
"python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET
), patch(
"homeassistant.components.thread.dataset_store.DatasetStore.async_add"
) as mock_add:
assert await hass.config_entries.async_setup(config_entry.entry_id)
mock_add.assert_called_once_with(config_entry.title, DATASET.hex())
async def test_config_entry_not_ready(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
):
"""Test raising ConfigEntryNotReady ."""
config_entry = MockConfigEntry(
data=CONFIG_ENTRY_DATA,
domain=otbr.DOMAIN,
options={},
title="My OTBR",
)
config_entry.add_to_hass(hass)
aioclient_mock.get(f"{BASE_URL}/node/dataset/active", status=HTTPStatus.CREATED)
assert not await hass.config_entries.async_setup(config_entry.entry_id)
async def test_remove_entry(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_config_entry
):
"""Test async_get_thread_state."""
"""Test async_get_active_dataset_tlvs after removing the config entry."""
aioclient_mock.get(f"{BASE_URL}/node/dataset/active", text="0E")
@ -80,7 +118,7 @@ async def test_get_active_dataset_tlvs_201(
aioclient_mock.get(f"{BASE_URL}/node/dataset/active", status=HTTPStatus.CREATED)
with pytest.raises(HomeAssistantError):
assert await otbr.async_get_active_dataset_tlvs(hass) is None
assert await otbr.async_get_active_dataset_tlvs(hass)
async def test_get_active_dataset_tlvs_invalid(
@ -90,4 +128,4 @@ async def test_get_active_dataset_tlvs_invalid(
aioclient_mock.get(f"{BASE_URL}/node/dataset/active", text="unexpected")
with pytest.raises(HomeAssistantError):
assert await otbr.async_get_active_dataset_tlvs(hass) is None
assert await otbr.async_get_active_dataset_tlvs(hass)

View File

@ -0,0 +1,157 @@
"""Test the thread dataset store."""
import pytest
from python_otbr_api.tlv_parser import TLVError
from homeassistant.components.thread import dataset_store
from homeassistant.core import HomeAssistant
from tests.common import flush_store
DATASET_1 = (
"0E080000000000010000000300000F35060004001FFFE0020811111111222222220708FDAD70BF"
"E5AA15DD051000112233445566778899AABBCCDDEEFF030E4F70656E54687265616444656D6F01"
"0212340410445F2B5CA6F2A93A55CE570A70EFEECB0C0402A0F7F8"
)
# Same as DATASET_1, but PAN ID moved to the end
DATASET_1_REORDERED = (
"0E080000000000010000000300000F35060004001FFFE0020811111111222222220708FDAD70BF"
"E5AA15DD051000112233445566778899AABBCCDDEEFF030E4F70656E54687265616444656D6F04"
"10445F2B5CA6F2A93A55CE570A70EFEECB0C0402A0F7F801021234"
)
DATASET_2 = (
"0E080000000000010000000300000F35060004001FFFE0020811111111222222220708FDAD70BF"
"E5AA15DD051000112233445566778899AABBCCDDEEFF030E486f6d65417373697374616e742101"
"0212340410445F2B5CA6F2A93A55CE570A70EFEECB0C0402A0F7F8"
)
DATASET_3 = (
"0E080000000000010000000300000F35060004001FFFE0020811111111222222220708FDAD70BF"
"E5AA15DD051000112233445566778899AABBCCDDEEFF030E7ef09f90a3f09f90a5f09f90a47e01"
"0212340410445F2B5CA6F2A93A55CE570A70EFEECB0C0402A0F7F8"
)
async def test_add_invalid_dataset(hass: HomeAssistant) -> None:
"""Test adding an invalid dataset."""
with pytest.raises(TLVError, match="unknown type 222"):
await dataset_store.async_add_dataset(hass, "source", "DEADBEEF")
store = await dataset_store._async_get_store(hass)
assert len(store.datasets) == 0
async def test_add_dataset_twice(hass: HomeAssistant) -> None:
"""Test adding dataset twice does nothing."""
await dataset_store.async_add_dataset(hass, "source", DATASET_1)
store = await dataset_store._async_get_store(hass)
assert len(store.datasets) == 1
created = list(store.datasets.values())[0].created
await dataset_store.async_add_dataset(hass, "new_source", DATASET_1)
assert len(store.datasets) == 1
assert list(store.datasets.values())[0].created == created
async def test_add_dataset_reordered(hass: HomeAssistant) -> None:
"""Test adding dataset with keys in a different order does nothing."""
await dataset_store.async_add_dataset(hass, "source", DATASET_1)
store = await dataset_store._async_get_store(hass)
assert len(store.datasets) == 1
created = list(store.datasets.values())[0].created
await dataset_store.async_add_dataset(hass, "new_source", DATASET_1_REORDERED)
assert len(store.datasets) == 1
assert list(store.datasets.values())[0].created == created
async def test_load_datasets(hass: HomeAssistant) -> None:
"""Make sure that we can load/save data correctly."""
datasets = [
{
"source": "Google",
"tlv": DATASET_1,
},
{
"source": "Multipan",
"tlv": DATASET_2,
},
{
"source": "🎅",
"tlv": DATASET_3,
},
]
store1 = await dataset_store._async_get_store(hass)
for dataset in datasets:
store1.async_add(dataset["source"], dataset["tlv"])
assert len(store1.datasets) == 3
for dataset in store1.datasets.values():
if dataset.source == "Google":
dataset_1_store_1 = dataset
if dataset.source == "Multipan":
dataset_2_store_1 = dataset
if dataset.source == "🎅":
dataset_3_store_1 = dataset
assert dataset_1_store_1.preferred
assert not dataset_2_store_1.preferred
assert not dataset_3_store_1.preferred
store2 = dataset_store.DatasetStore(hass)
await flush_store(store1._store)
await store2.async_load()
assert len(store2.datasets) == 3
for dataset in store2.datasets.values():
if dataset.source == "Google":
dataset_1_store_2 = dataset
if dataset.source == "🎅":
dataset_3_store_2 = dataset
assert list(store1.datasets) == list(store2.datasets)
assert dataset_1_store_1 == dataset_1_store_2
assert dataset_3_store_1 == dataset_3_store_2
async def test_loading_datasets_from_storage(hass: HomeAssistant, hass_storage) -> None:
"""Test loading stored datasets on start."""
hass_storage[dataset_store.STORAGE_KEY] = {
"version": dataset_store.STORAGE_VERSION_MAJOR,
"minor_version": dataset_store.STORAGE_VERSION_MINOR,
"data": {
"datasets": [
{
"created": "2023-02-02T09:41:13.746514+00:00",
"id": "id1",
"preferred": True,
"source": "source_1",
"tlv": "DATASET_1",
},
{
"created": "2023-02-02T09:41:13.746514+00:00",
"id": "id2",
"preferred": True,
"source": "source_2",
"tlv": "DATASET_2",
},
{
"created": "2023-02-02T09:41:13.746514+00:00",
"id": "id3",
"preferred": True,
"source": "source_3",
"tlv": "DATASET_3",
},
]
},
}
store = await dataset_store._async_get_store(hass)
assert len(store.datasets) == 3