Store preferred dataset separately in thread dataset store (#87378)

This commit is contained in:
Erik Montnemery 2023-02-04 12:33:17 +01:00 committed by GitHub
parent ff7c455478
commit 0d713809e7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 20 additions and 16 deletions

View File

@ -24,7 +24,6 @@ SAVE_DELAY = 10
class DatasetEntry: class DatasetEntry:
"""Dataset store entry.""" """Dataset store entry."""
preferred: bool
source: str source: str
tlv: str tlv: str
@ -56,7 +55,6 @@ class DatasetEntry:
return { return {
"created": self.created.isoformat(), "created": self.created.isoformat(),
"id": self.id, "id": self.id,
"preferred": self.preferred,
"source": self.source, "source": self.source,
"tlv": self.tlv, "tlv": self.tlv,
} }
@ -69,7 +67,8 @@ class DatasetStore:
"""Initialize the dataset store.""" """Initialize the dataset store."""
self.hass = hass self.hass = hass
self.datasets: dict[str, DatasetEntry] = {} self.datasets: dict[str, DatasetEntry] = {}
self._store: Store[dict[str, list[dict[str, Any]]]] = Store( self.preferred_dataset: str | None = None
self._store: Store[dict[str, Any]] = Store(
hass, hass,
STORAGE_VERSION_MAJOR, STORAGE_VERSION_MAJOR,
STORAGE_KEY, STORAGE_KEY,
@ -86,10 +85,11 @@ class DatasetStore:
if any(entry for entry in self.datasets.values() if entry.dataset == dataset): if any(entry for entry in self.datasets.values() if entry.dataset == dataset):
return return
# Set to preferred if this is the first dataset entry = DatasetEntry(source=source, tlv=tlv)
preferred = not bool(self.datasets)
entry = DatasetEntry(preferred=preferred, source=source, tlv=tlv)
self.datasets[entry.id] = entry self.datasets[entry.id] = entry
# Set to preferred if there is no preferred dataset
if self.preferred_dataset is None:
self.preferred_dataset = entry.id
self.async_schedule_save() self.async_schedule_save()
@callback @callback
@ -102,6 +102,7 @@ class DatasetStore:
data = await self._store.async_load() data = await self._store.async_load()
datasets: dict[str, DatasetEntry] = {} datasets: dict[str, DatasetEntry] = {}
preferred_dataset: str | None = None
if data is not None: if data is not None:
for dataset in data["datasets"]: for dataset in data["datasets"]:
@ -109,12 +110,13 @@ class DatasetStore:
datasets[dataset["id"]] = DatasetEntry( datasets[dataset["id"]] = DatasetEntry(
created=created, created=created,
id=dataset["id"], id=dataset["id"],
preferred=dataset["preferred"],
source=dataset["source"], source=dataset["source"],
tlv=dataset["tlv"], tlv=dataset["tlv"],
) )
preferred_dataset = data["preferred_dataset"]
self.datasets = datasets self.datasets = datasets
self.preferred_dataset = preferred_dataset
@callback @callback
def async_schedule_save(self) -> None: def async_schedule_save(self) -> None:
@ -124,8 +126,9 @@ class DatasetStore:
@callback @callback
def _data_to_save(self) -> dict[str, list[dict[str, str | None]]]: def _data_to_save(self) -> dict[str, list[dict[str, str | None]]]:
"""Return data of datasets to store in a file.""" """Return data of datasets to store in a file."""
data = {} data: dict[str, Any] = {}
data["datasets"] = [dataset.to_json() for dataset in self.datasets.values()] data["datasets"] = [dataset.to_json() for dataset in self.datasets.values()]
data["preferred_dataset"] = self.preferred_dataset
return data return data

View File

@ -85,6 +85,7 @@ async def ws_list_datasets(
store = await dataset_store.async_get_store(hass) store = await dataset_store.async_get_store(hass)
result = [] result = []
preferred_dataset = store.preferred_dataset
for dataset in store.datasets.values(): for dataset in store.datasets.values():
result.append( result.append(
{ {
@ -93,7 +94,7 @@ async def ws_list_datasets(
"extended_pan_id": dataset.extended_pan_id, "extended_pan_id": dataset.extended_pan_id,
"network_name": dataset.network_name, "network_name": dataset.network_name,
"pan_id": dataset.pan_id, "pan_id": dataset.pan_id,
"preferred": dataset.preferred, "preferred": dataset.id == preferred_dataset,
"source": dataset.source, "source": dataset.source,
} }
) )

View File

@ -122,9 +122,7 @@ async def test_load_datasets(hass: HomeAssistant) -> None:
if dataset.source == "🎅": if dataset.source == "🎅":
dataset_3_store_1 = dataset dataset_3_store_1 = dataset
assert dataset_1_store_1.preferred assert store1.preferred_dataset == dataset_1_store_1.id
assert not dataset_2_store_1.preferred
assert not dataset_3_store_1.preferred
store2 = dataset_store.DatasetStore(hass) store2 = dataset_store.DatasetStore(hass)
await flush_store(store1._store) await flush_store(store1._store)
@ -135,12 +133,15 @@ async def test_load_datasets(hass: HomeAssistant) -> None:
for dataset in store2.datasets.values(): for dataset in store2.datasets.values():
if dataset.source == "Google": if dataset.source == "Google":
dataset_1_store_2 = dataset dataset_1_store_2 = dataset
if dataset.source == "Multipan":
dataset_2_store_2 = dataset
if dataset.source == "🎅": if dataset.source == "🎅":
dataset_3_store_2 = dataset dataset_3_store_2 = dataset
assert list(store1.datasets) == list(store2.datasets) assert list(store1.datasets) == list(store2.datasets)
assert dataset_1_store_1 == dataset_1_store_2 assert dataset_1_store_1 == dataset_1_store_2
assert dataset_2_store_1 == dataset_2_store_2
assert dataset_3_store_1 == dataset_3_store_2 assert dataset_3_store_1 == dataset_3_store_2
@ -154,27 +155,26 @@ async def test_loading_datasets_from_storage(hass: HomeAssistant, hass_storage)
{ {
"created": "2023-02-02T09:41:13.746514+00:00", "created": "2023-02-02T09:41:13.746514+00:00",
"id": "id1", "id": "id1",
"preferred": True,
"source": "source_1", "source": "source_1",
"tlv": "DATASET_1", "tlv": "DATASET_1",
}, },
{ {
"created": "2023-02-02T09:41:13.746514+00:00", "created": "2023-02-02T09:41:13.746514+00:00",
"id": "id2", "id": "id2",
"preferred": True,
"source": "source_2", "source": "source_2",
"tlv": "DATASET_2", "tlv": "DATASET_2",
}, },
{ {
"created": "2023-02-02T09:41:13.746514+00:00", "created": "2023-02-02T09:41:13.746514+00:00",
"id": "id3", "id": "id3",
"preferred": True,
"source": "source_3", "source": "source_3",
"tlv": "DATASET_3", "tlv": "DATASET_3",
}, },
] ],
"preferred_dataset": "id1",
}, },
} }
store = await dataset_store.async_get_store(hass) store = await dataset_store.async_get_store(hass)
assert len(store.datasets) == 3 assert len(store.datasets) == 3
assert store.preferred_dataset == "id1"