mirror of
https://github.com/home-assistant/core.git
synced 2026-03-24 13:08:42 +00:00
Compare commits
1 Commits
remove-int
...
add_condit
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
007d3640b3 |
5
.github/workflows/builder.yml
vendored
5
.github/workflows/builder.yml
vendored
@@ -224,6 +224,7 @@ jobs:
|
||||
matrix:
|
||||
machine:
|
||||
- generic-x86-64
|
||||
- intel-nuc
|
||||
- khadas-vim3
|
||||
- odroid-c2
|
||||
- odroid-c4
|
||||
@@ -247,6 +248,10 @@ jobs:
|
||||
- machine: qemux86-64
|
||||
arch: amd64
|
||||
runs-on: ubuntu-24.04
|
||||
# TODO: remove, intel-nuc is a legacy name for x86-64, renamed in 2021
|
||||
- machine: intel-nuc
|
||||
arch: amd64
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
@@ -138,7 +138,6 @@ class CloudBackupAgent(BackupAgent):
|
||||
base64md5hash=base64md5hash,
|
||||
metadata=metadata,
|
||||
size=size,
|
||||
on_progress=on_progress,
|
||||
)
|
||||
break
|
||||
except CloudApiNonRetryableError as err:
|
||||
|
||||
10
machine/intel-nuc
generated
Normal file
10
machine/intel-nuc
generated
Normal file
@@ -0,0 +1,10 @@
|
||||
# Automatically generated by hassfest.
|
||||
#
|
||||
# To update, run python3 -m script.hassfest -p docker
|
||||
ARG BUILD_FROM=ghcr.io/home-assistant/amd64-homeassistant:latest
|
||||
FROM ${BUILD_FROM}
|
||||
|
||||
RUN apk --no-cache add \
|
||||
libva-intel-driver
|
||||
|
||||
LABEL io.hass.machine="intel-nuc"
|
||||
@@ -26,21 +26,95 @@ def exists(value: Any) -> Any:
|
||||
return value
|
||||
|
||||
|
||||
def validate_field_schema(condition_schema: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Validate a field schema including context references."""
|
||||
|
||||
for field_name, field_schema in condition_schema.get("fields", {}).items():
|
||||
# Validate context if present
|
||||
if "context" in field_schema:
|
||||
if CONF_SELECTOR not in field_schema:
|
||||
raise vol.Invalid(
|
||||
f"Context defined without a selector in '{field_name}'"
|
||||
)
|
||||
|
||||
context = field_schema["context"]
|
||||
if not isinstance(context, dict):
|
||||
raise vol.Invalid(f"Context must be a dictionary in '{field_name}'")
|
||||
|
||||
# Determine which selector type is being used
|
||||
selector_config = field_schema[CONF_SELECTOR]
|
||||
selector_class = selector.selector(selector_config)
|
||||
|
||||
for context_key, field_ref in context.items():
|
||||
# Check if context key is allowed for this selector type
|
||||
allowed_keys = selector_class.allowed_context_keys
|
||||
if context_key not in allowed_keys:
|
||||
raise vol.Invalid(
|
||||
f"Invalid context key '{context_key}' for selector type '{selector_class.selector_type}'. "
|
||||
f"Allowed keys: {', '.join(sorted(allowed_keys)) if allowed_keys else 'none'}"
|
||||
)
|
||||
|
||||
# Check if the referenced field exists in condition schema or target
|
||||
if not isinstance(field_ref, str):
|
||||
raise vol.Invalid(
|
||||
f"Context value for '{context_key}' must be a string field reference"
|
||||
)
|
||||
|
||||
# Check if field exists in condition schema fields or target
|
||||
condition_fields = condition_schema["fields"]
|
||||
field_exists = field_ref in condition_fields
|
||||
if field_exists and "selector" in condition_fields[field_ref]:
|
||||
# Check if the selector type is allowed for this context key
|
||||
field_selector_config = condition_fields[field_ref][CONF_SELECTOR]
|
||||
field_selector_class = selector.selector(field_selector_config)
|
||||
if field_selector_class.selector_type not in allowed_keys.get(
|
||||
context_key, set()
|
||||
):
|
||||
raise vol.Invalid(
|
||||
f"The context '{context_key}' for '{field_name}' references '{field_ref}', but '{context_key}' "
|
||||
f"does not allow selectors of type '{field_selector_class.selector_type}'. Allowed selector types: {', '.join(allowed_keys.get(context_key, set()))}"
|
||||
)
|
||||
if not field_exists and "target" in condition_schema:
|
||||
# Target is a special field that always exists when defined
|
||||
field_exists = field_ref == "target"
|
||||
if field_exists and "target" not in allowed_keys.get(
|
||||
context_key, set()
|
||||
):
|
||||
raise vol.Invalid(
|
||||
f"The context '{context_key}' for '{field_name}' references 'target', but '{context_key}' "
|
||||
f"does not allow 'target'. Allowed selector types: {', '.join(allowed_keys.get(context_key, set()))}"
|
||||
)
|
||||
|
||||
if not field_exists:
|
||||
raise vol.Invalid(
|
||||
f"Context reference '{field_ref}' for key '{context_key}' does not exist "
|
||||
f"in condition schema fields or target"
|
||||
)
|
||||
|
||||
return condition_schema
|
||||
|
||||
|
||||
FIELD_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional("example"): exists,
|
||||
vol.Optional("default"): exists,
|
||||
vol.Optional("required"): bool,
|
||||
vol.Optional(CONF_SELECTOR): selector.validate_selector,
|
||||
vol.Optional("context"): {
|
||||
str: str # key is context key, value is field name in the schema which value should be used
|
||||
}, # Will be validated in validate_field_schema
|
||||
}
|
||||
)
|
||||
|
||||
CONDITION_SCHEMA = vol.Any(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional("target"): selector.TargetSelector.CONFIG_SCHEMA,
|
||||
vol.Optional("fields"): vol.Schema({str: FIELD_SCHEMA}),
|
||||
}
|
||||
vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional("target"): selector.TargetSelector.CONFIG_SCHEMA,
|
||||
vol.Optional("fields"): vol.Schema({str: FIELD_SCHEMA}),
|
||||
}
|
||||
),
|
||||
validate_field_schema,
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
@@ -88,6 +88,7 @@ class _MachineConfig:
|
||||
_MACHINES = {
|
||||
"generic-x86-64": _MachineConfig(arch="amd64", packages=("libva-intel-driver",)),
|
||||
"green": _MachineConfig(arch="aarch64"),
|
||||
"intel-nuc": _MachineConfig(arch="amd64", packages=("libva-intel-driver",)),
|
||||
"khadas-vim3": _MachineConfig(arch="aarch64"),
|
||||
"odroid-c2": _MachineConfig(arch="aarch64"),
|
||||
"odroid-c4": _MachineConfig(arch="aarch64"),
|
||||
|
||||
@@ -12,12 +12,10 @@ from hass_nabucasa.files import FilesError, StorageType
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.backup import (
|
||||
DATA_MANAGER,
|
||||
DOMAIN as BACKUP_DOMAIN,
|
||||
AddonInfo,
|
||||
AgentBackup,
|
||||
Folder,
|
||||
UploadBackupEvent,
|
||||
)
|
||||
from homeassistant.components.cloud import DOMAIN
|
||||
from homeassistant.components.cloud.backup import async_register_backup_agents_listener
|
||||
@@ -355,7 +353,6 @@ async def test_agents_upload(
|
||||
base64md5hash=ANY,
|
||||
metadata=ANY,
|
||||
size=ANY,
|
||||
on_progress=ANY,
|
||||
)
|
||||
metadata = cloud.files.upload.mock_calls[-1].kwargs["metadata"]
|
||||
assert metadata["backup_id"] == backup_id
|
||||
@@ -364,75 +361,6 @@ async def test_agents_upload(
|
||||
assert f"Uploading backup {backup_id}" in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files")
|
||||
async def test_agents_upload_on_progress(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
cloud: Mock,
|
||||
) -> None:
|
||||
"""Test agent upload backup emits UploadBackupEvent via on_progress."""
|
||||
client = await hass_client()
|
||||
backup_data = "test"
|
||||
backup_id = "test-backup"
|
||||
test_backup = AgentBackup(
|
||||
addons=[AddonInfo(name="Test", slug="test", version="1.0.0")],
|
||||
backup_id=backup_id,
|
||||
database_included=True,
|
||||
date="1970-01-01T00:00:00.000Z",
|
||||
extra_metadata={},
|
||||
folders=[Folder.MEDIA, Folder.SHARE],
|
||||
homeassistant_included=True,
|
||||
homeassistant_version="2024.12.0",
|
||||
name="Test",
|
||||
protected=True,
|
||||
size=len(backup_data),
|
||||
)
|
||||
|
||||
async def mock_upload(**kwargs: Any) -> None:
|
||||
"""Mock upload that calls on_progress."""
|
||||
on_progress = kwargs["on_progress"]
|
||||
on_progress(bytes_uploaded=2)
|
||||
await hass.async_block_till_done()
|
||||
on_progress(bytes_uploaded=4)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
cloud.files.upload.side_effect = mock_upload
|
||||
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
events: list[UploadBackupEvent] = []
|
||||
|
||||
def _collect(event: Any) -> None:
|
||||
if isinstance(event, UploadBackupEvent):
|
||||
events.append(event)
|
||||
|
||||
unsub = manager.async_subscribe_events(_collect)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.backup.manager.BackupManager.async_get_backup",
|
||||
) as fetch_backup,
|
||||
patch(
|
||||
"homeassistant.components.backup.manager.read_backup",
|
||||
return_value=test_backup,
|
||||
),
|
||||
patch("pathlib.Path.open") as mocked_open,
|
||||
):
|
||||
mocked_open.return_value.read = Mock(side_effect=[backup_data.encode(), b""])
|
||||
fetch_backup.return_value = test_backup
|
||||
resp = await client.post(
|
||||
"/api/backup/upload?agent_id=cloud.cloud",
|
||||
data={"file": StringIO(backup_data)},
|
||||
)
|
||||
|
||||
unsub()
|
||||
|
||||
assert resp.status == 201
|
||||
cloud_events = [e for e in events if e.agent_id == "cloud.cloud"]
|
||||
assert len(cloud_events) >= 1
|
||||
assert all(e.total_bytes == len(backup_data) for e in cloud_events)
|
||||
assert cloud_events[-1].uploaded_bytes == len(backup_data)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("side_effect", [FilesError("Boom!"), CloudError("Boom!")])
|
||||
@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files")
|
||||
async def test_agents_upload_fail(
|
||||
|
||||
Reference in New Issue
Block a user