Merge branch 'homee-switch' of https://github.com/Taraman17/ha-core into homee-switch

This commit is contained in:
Taraman17 2025-02-18 15:17:53 +00:00
commit 5ae663b1a9
15 changed files with 384 additions and 38 deletions

View File

@ -16,7 +16,7 @@ if TYPE_CHECKING:
STORE_DELAY_SAVE = 30
STORAGE_KEY = DOMAIN
STORAGE_VERSION = 1
STORAGE_VERSION_MINOR = 3
STORAGE_VERSION_MINOR = 4
class StoredBackupData(TypedDict):
@ -60,6 +60,13 @@ class _BackupStore(Store[StoredBackupData]):
else:
data["config"]["schedule"]["days"] = [state]
data["config"]["schedule"]["recurrence"] = "custom_days"
if old_minor_version < 4:
# Workaround for a bug in frontend which incorrectly set days to 0
# instead of to None for unlimited retention.
if data["config"]["retention"]["copies"] == 0:
data["config"]["retention"]["copies"] = None
if data["config"]["retention"]["days"] == 0:
data["config"]["retention"]["days"] = None
# Note: We allow reading data with major version 2.
# Reject if major version is higher than 2.

View File

@ -368,8 +368,10 @@ async def handle_config_info(
),
vol.Optional("retention"): vol.Schema(
{
vol.Optional("copies"): vol.Any(int, None),
vol.Optional("days"): vol.Any(int, None),
# Note: We can't use cv.positive_int because it allows 0 even
# though 0 is not positive.
vol.Optional("copies"): vol.Any(vol.All(int, vol.Range(min=1)), None),
vol.Optional("days"): vol.Any(vol.All(int, vol.Range(min=1)), None),
},
),
vol.Optional("schedule"): vol.Schema(

View File

@ -10,17 +10,17 @@
},
"period": {
"name": "Period",
"description": "A period of time in days; used only with Away, DayOff, or Custom. The system will revert to Auto at midnight (up to 99 days, today is day 1)."
"description": "A period of time in days; used only with Away, DayOff, or Custom mode. The system will revert to Auto mode at midnight (up to 99 days, today is day 1)."
},
"duration": {
"name": "Duration",
"description": "The duration in hours; used only with AutoWithEco (up to 24 hours)."
"description": "The duration in hours; used only with AutoWithEco mode (up to 24 hours)."
}
}
},
"reset_system": {
"name": "Reset system",
"description": "Sets the system to Auto mode and reset all the zones to follow their schedules. Not all Evohome systems support this feature (i.e. AutoWithReset mode)."
"description": "Sets the system to Auto mode and resets all the zones to follow their schedules. Not all Evohome systems support this feature (i.e. AutoWithReset mode)."
},
"refresh_system": {
"name": "Refresh system",

View File

@ -5,5 +5,5 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/rympro",
"iot_class": "cloud_polling",
"requirements": ["pyrympro==0.0.8"]
"requirements": ["pyrympro==0.0.9"]
}

View File

@ -170,6 +170,7 @@ MODELS_TV_ONLY = (
"BEAM",
"PLAYBAR",
"PLAYBASE",
"ULTRA",
)
MODELS_LINEIN_AND_TV = ("AMP",)

View File

@ -805,7 +805,7 @@ async def websocket_add_node(
]
msg[DATA_UNSUBSCRIBE] = unsubs
if controller.inclusion_state == InclusionState.INCLUDING:
if controller.inclusion_state in (InclusionState.INCLUDING, InclusionState.BUSY):
connection.send_result(
msg[ID],
True, # Inclusion is already in progress
@ -883,6 +883,11 @@ async def websocket_subscribe_s2_inclusion(
) -> None:
"""Subscribe to S2 inclusion initiated by the controller."""
@callback
def async_cleanup() -> None:
for unsub in unsubs:
unsub()
@callback
def forward_dsk(event: dict) -> None:
connection.send_message(
@ -891,9 +896,18 @@ async def websocket_subscribe_s2_inclusion(
)
)
unsub = driver.controller.on("validate dsk and enter pin", forward_dsk)
connection.subscriptions[msg["id"]] = unsub
msg[DATA_UNSUBSCRIBE] = [unsub]
@callback
def handle_requested_grant(event: dict) -> None:
"""Accept the requested security classes without user interaction."""
hass.async_create_task(
driver.controller.async_grant_security_classes(event["requested_grant"])
)
connection.subscriptions[msg["id"]] = async_cleanup
msg[DATA_UNSUBSCRIBE] = unsubs = [
driver.controller.on("grant security classes", handle_requested_grant),
driver.controller.on("validate dsk and enter pin", forward_dsk),
]
connection.send_result(msg[ID])

2
requirements_all.txt generated
View File

@ -2259,7 +2259,7 @@ pyrituals==0.0.6
pyroute2==0.7.5
# homeassistant.components.rympro
pyrympro==0.0.8
pyrympro==0.0.9
# homeassistant.components.sabnzbd
pysabnzbd==1.1.1

View File

@ -1843,7 +1843,7 @@ pyrituals==0.0.6
pyroute2==0.7.5
# homeassistant.components.rympro
pyrympro==0.0.8
pyrympro==0.0.9
# homeassistant.components.sabnzbd
pysabnzbd==1.1.1

View File

@ -39,7 +39,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -84,11 +84,100 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
# name: test_store_migration[store_data1]
dict({
'data': dict({
'backups': list([
dict({
'backup_id': 'abc123',
'failed_agent_ids': list([
'test.remote',
]),
}),
]),
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
]),
'include_addons': None,
'include_all_addons': False,
'include_database': True,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'state': 'never',
'time': None,
}),
}),
}),
'key': 'backup',
'minor_version': 4,
'version': 1,
})
# ---
# name: test_store_migration[store_data1].1
dict({
'data': dict({
'backups': list([
dict({
'backup_id': 'abc123',
'failed_agent_ids': list([
'test.remote',
]),
}),
]),
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
'test-agent',
]),
'include_addons': None,
'include_all_addons': False,
'include_database': True,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'state': 'never',
'time': None,
}),
}),
}),
'key': 'backup',
'minor_version': 4,
'version': 1,
})
# ---
# name: test_store_migration[store_data2]
dict({
'data': dict({
'backups': list([
@ -131,11 +220,11 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
# name: test_store_migration[store_data1].1
# name: test_store_migration[store_data2].1
dict({
'data': dict({
'backups': list([
@ -179,7 +268,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---

View File

@ -686,7 +686,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -800,7 +800,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -914,7 +914,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -1038,7 +1038,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -1205,7 +1205,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -1319,7 +1319,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -1435,7 +1435,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -1549,7 +1549,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -1667,7 +1667,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -1789,7 +1789,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -1903,7 +1903,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -2017,7 +2017,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -2131,7 +2131,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -2245,7 +2245,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -2323,6 +2323,154 @@
'type': 'result',
})
# ---
# name: test_config_update_errors[command10]
dict({
'id': 1,
'result': dict({
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
]),
'include_addons': None,
'include_all_addons': False,
'include_database': True,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'next_automatic_backup': None,
'next_automatic_backup_additional': False,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'time': None,
}),
}),
}),
'success': True,
'type': 'result',
})
# ---
# name: test_config_update_errors[command10].1
dict({
'id': 3,
'result': dict({
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
]),
'include_addons': None,
'include_all_addons': False,
'include_database': True,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'next_automatic_backup': None,
'next_automatic_backup_additional': False,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'time': None,
}),
}),
}),
'success': True,
'type': 'result',
})
# ---
# name: test_config_update_errors[command11]
dict({
'id': 1,
'result': dict({
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
]),
'include_addons': None,
'include_all_addons': False,
'include_database': True,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'next_automatic_backup': None,
'next_automatic_backup_additional': False,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'time': None,
}),
}),
}),
'success': True,
'type': 'result',
})
# ---
# name: test_config_update_errors[command11].1
dict({
'id': 3,
'result': dict({
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
]),
'include_addons': None,
'include_all_addons': False,
'include_database': True,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'next_automatic_backup': None,
'next_automatic_backup_additional': False,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'time': None,
}),
}),
}),
'success': True,
'type': 'result',
})
# ---
# name: test_config_update_errors[command1]
dict({
'id': 1,

View File

@ -57,6 +57,38 @@ def mock_delay_save() -> Generator[None]:
"key": DOMAIN,
"version": 1,
},
{
"data": {
"backups": [
{
"backup_id": "abc123",
"failed_agent_ids": ["test.remote"],
}
],
"config": {
"create_backup": {
"agent_ids": [],
"include_addons": None,
"include_all_addons": False,
"include_database": True,
"include_folders": None,
"name": None,
"password": None,
},
"last_attempted_automatic_backup": None,
"last_completed_automatic_backup": None,
"retention": {
"copies": 0,
"days": 0,
},
"schedule": {
"state": "never",
},
},
},
"key": DOMAIN,
"version": 1,
},
{
"data": {
"backups": [

View File

@ -1361,6 +1361,14 @@ async def test_config_update(
"type": "backup/config/update",
"agents": {"test-agent1": {"favorite": True}},
},
{
"type": "backup/config/update",
"retention": {"copies": 0},
},
{
"type": "backup/config/update",
"retention": {"days": 0},
},
],
)
async def test_config_update_errors(
@ -2158,7 +2166,7 @@ async def test_config_schedule_logic(
{
"type": "backup/config/update",
"create_backup": {"agent_ids": ["test.test-agent"]},
"retention": {"copies": 0, "days": None},
"retention": {"copies": 1, "days": None},
"schedule": {"recurrence": "daily"},
},
{
@ -2232,7 +2240,7 @@ async def test_config_schedule_logic(
{
"type": "backup/config/update",
"create_backup": {"agent_ids": ["test.test-agent"]},
"retention": {"copies": 0, "days": None},
"retention": {"copies": 1, "days": None},
"schedule": {"recurrence": "daily"},
},
{
@ -2301,7 +2309,7 @@ async def test_config_schedule_logic(
{
"type": "backup/config/update",
"create_backup": {"agent_ids": ["test.test-agent"]},
"retention": {"copies": 0, "days": None},
"retention": {"copies": 1, "days": None},
"schedule": {"recurrence": "daily"},
},
{
@ -3019,7 +3027,7 @@ async def test_config_retention_copies_logic_manual_backup(
{
"type": "backup/config/update",
"create_backup": {"agent_ids": ["test-agent"]},
"retention": {"copies": None, "days": 0},
"retention": {"copies": None, "days": 1},
"schedule": {"recurrence": "never"},
}
],

View File

@ -580,13 +580,19 @@ def alarm_clock_fixture_extended():
return alarm_clock
@pytest.fixture(name="speaker_model")
def speaker_model_fixture(request: pytest.FixtureRequest):
"""Create fixture for the speaker model."""
return getattr(request, "param", "Model Name")
@pytest.fixture(name="speaker_info")
def speaker_info_fixture():
def speaker_info_fixture(speaker_model):
"""Create speaker_info fixture."""
return {
"zone_name": "Zone A",
"uid": "RINCON_test",
"model_name": "Model Name",
"model_name": speaker_model,
"model_number": "S12",
"hardware_version": "1.20.1.6-1.1",
"software_version": "49.2-64250",

View File

@ -10,6 +10,7 @@ from syrupy import SnapshotAssertion
from homeassistant.components.media_player import (
ATTR_INPUT_SOURCE,
ATTR_INPUT_SOURCE_LIST,
ATTR_MEDIA_ANNOUNCE,
ATTR_MEDIA_CONTENT_ID,
ATTR_MEDIA_CONTENT_TYPE,
@ -1205,3 +1206,27 @@ async def test_media_get_queue(
)
soco_mock.get_queue.assert_called_with(max_items=0)
assert result == snapshot
@pytest.mark.parametrize(
("speaker_model", "source_list"),
[
("Sonos Arc Ultra", [SOURCE_TV]),
("Sonos Arc", [SOURCE_TV]),
("Sonos Playbar", [SOURCE_TV]),
("Sonos Connect", [SOURCE_LINEIN]),
("Sonos Play:5", [SOURCE_LINEIN]),
("Sonos Amp", [SOURCE_LINEIN, SOURCE_TV]),
("Sonos Era", None),
],
indirect=["speaker_model"],
)
async def test_media_source_list(
hass: HomeAssistant,
async_autosetup_sonos,
speaker_model: str,
source_list: list[str] | None,
) -> None:
"""Test the mapping between the speaker model name and source_list."""
state = hass.states.get("media_player.zone_a")
assert state.attributes.get(ATTR_INPUT_SOURCE_LIST) == source_list

View File

@ -5284,6 +5284,20 @@ async def test_subscribe_s2_inclusion(
assert msg["success"]
assert msg["result"] is None
# Test receiving requested grant event
event = Event(
type="grant security classes",
data={
"source": "controller",
"event": "grant security classes",
"requested": {
"securityClasses": [SecurityClass.S2_UNAUTHENTICATED],
"clientSideAuth": False,
},
},
)
client.driver.receive_event(event)
# Test receiving DSK request event
event = Event(
type="validate dsk and enter pin",