This commit is contained in:
Jonh Sady 2025-02-18 18:38:53 -03:00
commit 3b4508f98f
30 changed files with 488 additions and 95 deletions

View File

@ -16,7 +16,7 @@ if TYPE_CHECKING:
STORE_DELAY_SAVE = 30
STORAGE_KEY = DOMAIN
STORAGE_VERSION = 1
STORAGE_VERSION_MINOR = 3
STORAGE_VERSION_MINOR = 4
class StoredBackupData(TypedDict):
@ -60,6 +60,13 @@ class _BackupStore(Store[StoredBackupData]):
else:
data["config"]["schedule"]["days"] = [state]
data["config"]["schedule"]["recurrence"] = "custom_days"
if old_minor_version < 4:
# Workaround for a bug in frontend which incorrectly set days to 0
# instead of to None for unlimited retention.
if data["config"]["retention"]["copies"] == 0:
data["config"]["retention"]["copies"] = None
if data["config"]["retention"]["days"] == 0:
data["config"]["retention"]["days"] = None
# Note: We allow reading data with major version 2.
# Reject if major version is higher than 2.

View File

@ -368,8 +368,10 @@ async def handle_config_info(
),
vol.Optional("retention"): vol.Schema(
{
vol.Optional("copies"): vol.Any(int, None),
vol.Optional("days"): vol.Any(int, None),
# Note: We can't use cv.positive_int because it allows 0 even
# though 0 is not positive.
vol.Optional("copies"): vol.Any(vol.All(int, vol.Range(min=1)), None),
vol.Optional("days"): vol.Any(vol.All(int, vol.Range(min=1)), None),
},
),
vol.Optional("schedule"): vol.Schema(

View File

@ -30,7 +30,7 @@
"message": "Can't set preset mode to {preset_mode} when HVAC mode is not set to auto"
},
"set_data_error": {
"message": "An error occurred while sending the data to the BSBLAN device"
"message": "An error occurred while sending the data to the BSB-Lan device"
},
"set_temperature_error": {
"message": "An error occurred while setting the temperature"

View File

@ -16,7 +16,7 @@
"loggers": ["aioesphomeapi", "noiseprotocol", "bleak_esphome"],
"mqtt": ["esphome/discover/#"],
"requirements": [
"aioesphomeapi==29.0.2",
"aioesphomeapi==29.1.0",
"esphome-dashboard-api==1.2.3",
"bleak-esphome==2.7.1"
],

View File

@ -10,17 +10,17 @@
},
"period": {
"name": "Period",
"description": "A period of time in days; used only with Away, DayOff, or Custom. The system will revert to Auto at midnight (up to 99 days, today is day 1)."
"description": "A period of time in days; used only with Away, DayOff, or Custom mode. The system will revert to Auto mode at midnight (up to 99 days, today is day 1)."
},
"duration": {
"name": "Duration",
"description": "The duration in hours; used only with AutoWithEco (up to 24 hours)."
"description": "The duration in hours; used only with AutoWithEco mode (up to 24 hours)."
}
}
},
"reset_system": {
"name": "Reset system",
"description": "Sets the system to Auto mode and reset all the zones to follow their schedules. Not all Evohome systems support this feature (i.e. AutoWithReset mode)."
"description": "Sets the system to Auto mode and resets all the zones to follow their schedules. Not all Evohome systems support this feature (i.e. AutoWithReset mode)."
},
"refresh_system": {
"name": "Refresh system",

View File

@ -18,7 +18,7 @@
"data_description": {
"kamereon_account_id": "The Kamereon account ID associated with your vehicle"
},
"title": "Kamereon Account ID",
"title": "Kamereon account ID",
"description": "You have multiple Kamereon accounts associated to this email, please select one"
},
"reauth_confirm": {
@ -228,10 +228,10 @@
},
"exceptions": {
"invalid_device_id": {
"message": "No device with id {device_id} was found"
"message": "No device with ID {device_id} was found"
},
"no_config_entry_for_device": {
"message": "No loaded config entry was found for device with id {device_id}"
"message": "No loaded config entry was found for device with ID {device_id}"
}
}
}

View File

@ -222,7 +222,7 @@ class ReolinkVODMediaSource(MediaSource):
if main_enc == "h265":
_LOGGER.debug(
"Reolink camera %s uses h265 encoding for main stream,"
"playback only possible using sub stream",
"playback at high resolution may not work in all browsers/apps",
host.api.camera_name(channel),
)
@ -236,34 +236,29 @@ class ReolinkVODMediaSource(MediaSource):
can_play=False,
can_expand=True,
),
BrowseMediaSource(
domain=DOMAIN,
identifier=f"RES|{config_entry_id}|{channel}|main",
media_class=MediaClass.CHANNEL,
media_content_type=MediaType.PLAYLIST,
title="High resolution",
can_play=False,
can_expand=True,
),
]
if main_enc != "h265":
children.append(
BrowseMediaSource(
domain=DOMAIN,
identifier=f"RES|{config_entry_id}|{channel}|main",
media_class=MediaClass.CHANNEL,
media_content_type=MediaType.PLAYLIST,
title="High resolution",
can_play=False,
can_expand=True,
),
)
if host.api.supported(channel, "autotrack_stream"):
children.append(
BrowseMediaSource(
domain=DOMAIN,
identifier=f"RES|{config_entry_id}|{channel}|autotrack_sub",
media_class=MediaClass.CHANNEL,
media_content_type=MediaType.PLAYLIST,
title="Autotrack low resolution",
can_play=False,
can_expand=True,
),
)
if main_enc != "h265":
children.append(
children.extend(
[
BrowseMediaSource(
domain=DOMAIN,
identifier=f"RES|{config_entry_id}|{channel}|autotrack_sub",
media_class=MediaClass.CHANNEL,
media_content_type=MediaType.PLAYLIST,
title="Autotrack low resolution",
can_play=False,
can_expand=True,
),
BrowseMediaSource(
domain=DOMAIN,
identifier=f"RES|{config_entry_id}|{channel}|autotrack_main",
@ -273,11 +268,7 @@ class ReolinkVODMediaSource(MediaSource):
can_play=False,
can_expand=True,
),
)
if len(children) == 1:
return await self._async_generate_camera_days(
config_entry_id, channel, "sub"
]
)
title = host.api.camera_name(channel)

View File

@ -5,5 +5,5 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/rympro",
"iot_class": "cloud_polling",
"requirements": ["pyrympro==0.0.8"]
"requirements": ["pyrympro==0.0.9"]
}

View File

@ -170,6 +170,7 @@ MODELS_TV_ONLY = (
"BEAM",
"PLAYBAR",
"PLAYBASE",
"ULTRA",
)
MODELS_LINEIN_AND_TV = ("AMP",)

View File

@ -35,7 +35,9 @@ async def async_get_config_entry_diagnostics(
vehicles = [
{
"data": async_redact_data(x.coordinator.data, VEHICLE_REDACT),
# Stream diag will go here when implemented
"stream": {
"config": x.stream_vehicle.config,
},
}
for x in entry.runtime_data.vehicles
]
@ -45,6 +47,7 @@ async def async_get_config_entry_diagnostics(
if x.live_coordinator
else None,
"info": async_redact_data(x.info_coordinator.data, ENERGY_INFO_REDACT),
"history": x.history_coordinator.data if x.history_coordinator else None,
}
for x in entry.runtime_data.energysites
]

View File

@ -11,5 +11,5 @@
"documentation": "https://www.home-assistant.io/integrations/vicare",
"iot_class": "cloud_polling",
"loggers": ["PyViCare"],
"requirements": ["PyViCare==2.43.0"]
"requirements": ["PyViCare==2.43.1"]
}

View File

@ -8,5 +8,5 @@
"iot_class": "local_push",
"loggers": ["zeroconf"],
"quality_scale": "internal",
"requirements": ["zeroconf==0.144.3"]
"requirements": ["zeroconf==0.145.1"]
}

View File

@ -805,7 +805,7 @@ async def websocket_add_node(
]
msg[DATA_UNSUBSCRIBE] = unsubs
if controller.inclusion_state == InclusionState.INCLUDING:
if controller.inclusion_state in (InclusionState.INCLUDING, InclusionState.BUSY):
connection.send_result(
msg[ID],
True, # Inclusion is already in progress
@ -883,6 +883,11 @@ async def websocket_subscribe_s2_inclusion(
) -> None:
"""Subscribe to S2 inclusion initiated by the controller."""
@callback
def async_cleanup() -> None:
for unsub in unsubs:
unsub()
@callback
def forward_dsk(event: dict) -> None:
connection.send_message(
@ -891,9 +896,18 @@ async def websocket_subscribe_s2_inclusion(
)
)
unsub = driver.controller.on("validate dsk and enter pin", forward_dsk)
connection.subscriptions[msg["id"]] = unsub
msg[DATA_UNSUBSCRIBE] = [unsub]
@callback
def handle_requested_grant(event: dict) -> None:
"""Accept the requested security classes without user interaction."""
hass.async_create_task(
driver.controller.async_grant_security_classes(event["requested_grant"])
)
connection.subscriptions[msg["id"]] = async_cleanup
msg[DATA_UNSUBSCRIBE] = unsubs = [
driver.controller.on("grant security classes", handle_requested_grant),
driver.controller.on("validate dsk and enter pin", forward_dsk),
]
connection.send_result(msg[ID])

View File

@ -73,7 +73,7 @@ voluptuous-serialize==2.6.0
voluptuous==0.15.2
webrtc-models==0.3.0
yarl==1.18.3
zeroconf==0.144.3
zeroconf==0.145.1
# Constrain pycryptodome to avoid vulnerability
# see https://github.com/home-assistant/core/pull/16238

View File

@ -82,7 +82,7 @@ dependencies = [
"voluptuous-openapi==0.0.6",
"yarl==1.18.3",
"webrtc-models==0.3.0",
"zeroconf==0.144.3"
"zeroconf==0.145.1"
]
[project.urls]

2
requirements.txt generated
View File

@ -51,4 +51,4 @@ voluptuous-serialize==2.6.0
voluptuous-openapi==0.0.6
yarl==1.18.3
webrtc-models==0.3.0
zeroconf==0.144.3
zeroconf==0.145.1

8
requirements_all.txt generated
View File

@ -100,7 +100,7 @@ PyTransportNSW==0.1.1
PyTurboJPEG==1.7.5
# homeassistant.components.vicare
PyViCare==2.43.0
PyViCare==2.43.1
# homeassistant.components.xiaomi_aqara
PyXiaomiGateway==0.14.3
@ -243,7 +243,7 @@ aioelectricitymaps==0.4.0
aioemonitor==1.0.5
# homeassistant.components.esphome
aioesphomeapi==29.0.2
aioesphomeapi==29.1.0
# homeassistant.components.flo
aioflo==2021.11.0
@ -2262,7 +2262,7 @@ pyrituals==0.0.6
pyroute2==0.7.5
# homeassistant.components.rympro
pyrympro==0.0.8
pyrympro==0.0.9
# homeassistant.components.sabnzbd
pysabnzbd==1.1.1
@ -3137,7 +3137,7 @@ zamg==0.3.6
zengge==0.2
# homeassistant.components.zeroconf
zeroconf==0.144.3
zeroconf==0.145.1
# homeassistant.components.zeversolar
zeversolar==0.3.2

View File

@ -94,7 +94,7 @@ PyTransportNSW==0.1.1
PyTurboJPEG==1.7.5
# homeassistant.components.vicare
PyViCare==2.43.0
PyViCare==2.43.1
# homeassistant.components.xiaomi_aqara
PyXiaomiGateway==0.14.3
@ -231,7 +231,7 @@ aioelectricitymaps==0.4.0
aioemonitor==1.0.5
# homeassistant.components.esphome
aioesphomeapi==29.0.2
aioesphomeapi==29.1.0
# homeassistant.components.flo
aioflo==2021.11.0
@ -1843,7 +1843,7 @@ pyrituals==0.0.6
pyroute2==0.7.5
# homeassistant.components.rympro
pyrympro==0.0.8
pyrympro==0.0.9
# homeassistant.components.sabnzbd
pysabnzbd==1.1.1
@ -2520,7 +2520,7 @@ yt-dlp[default]==2025.01.26
zamg==0.3.6
# homeassistant.components.zeroconf
zeroconf==0.144.3
zeroconf==0.145.1
# homeassistant.components.zeversolar
zeversolar==0.3.2

View File

@ -39,7 +39,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -84,11 +84,100 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
# name: test_store_migration[store_data1]
dict({
'data': dict({
'backups': list([
dict({
'backup_id': 'abc123',
'failed_agent_ids': list([
'test.remote',
]),
}),
]),
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
]),
'include_addons': None,
'include_all_addons': False,
'include_database': True,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'state': 'never',
'time': None,
}),
}),
}),
'key': 'backup',
'minor_version': 4,
'version': 1,
})
# ---
# name: test_store_migration[store_data1].1
dict({
'data': dict({
'backups': list([
dict({
'backup_id': 'abc123',
'failed_agent_ids': list([
'test.remote',
]),
}),
]),
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
'test-agent',
]),
'include_addons': None,
'include_all_addons': False,
'include_database': True,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'state': 'never',
'time': None,
}),
}),
}),
'key': 'backup',
'minor_version': 4,
'version': 1,
})
# ---
# name: test_store_migration[store_data2]
dict({
'data': dict({
'backups': list([
@ -131,11 +220,11 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
# name: test_store_migration[store_data1].1
# name: test_store_migration[store_data2].1
dict({
'data': dict({
'backups': list([
@ -179,7 +268,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---

View File

@ -686,7 +686,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -800,7 +800,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -914,7 +914,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -1038,7 +1038,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -1205,7 +1205,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -1319,7 +1319,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -1435,7 +1435,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -1549,7 +1549,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -1667,7 +1667,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -1789,7 +1789,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -1903,7 +1903,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -2017,7 +2017,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -2131,7 +2131,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -2245,7 +2245,7 @@
}),
}),
'key': 'backup',
'minor_version': 3,
'minor_version': 4,
'version': 1,
})
# ---
@ -2323,6 +2323,154 @@
'type': 'result',
})
# ---
# name: test_config_update_errors[command10]
dict({
'id': 1,
'result': dict({
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
]),
'include_addons': None,
'include_all_addons': False,
'include_database': True,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'next_automatic_backup': None,
'next_automatic_backup_additional': False,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'time': None,
}),
}),
}),
'success': True,
'type': 'result',
})
# ---
# name: test_config_update_errors[command10].1
dict({
'id': 3,
'result': dict({
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
]),
'include_addons': None,
'include_all_addons': False,
'include_database': True,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'next_automatic_backup': None,
'next_automatic_backup_additional': False,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'time': None,
}),
}),
}),
'success': True,
'type': 'result',
})
# ---
# name: test_config_update_errors[command11]
dict({
'id': 1,
'result': dict({
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
]),
'include_addons': None,
'include_all_addons': False,
'include_database': True,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'next_automatic_backup': None,
'next_automatic_backup_additional': False,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'time': None,
}),
}),
}),
'success': True,
'type': 'result',
})
# ---
# name: test_config_update_errors[command11].1
dict({
'id': 3,
'result': dict({
'config': dict({
'agents': dict({
}),
'create_backup': dict({
'agent_ids': list([
]),
'include_addons': None,
'include_all_addons': False,
'include_database': True,
'include_folders': None,
'name': None,
'password': None,
}),
'last_attempted_automatic_backup': None,
'last_completed_automatic_backup': None,
'next_automatic_backup': None,
'next_automatic_backup_additional': False,
'retention': dict({
'copies': None,
'days': None,
}),
'schedule': dict({
'days': list([
]),
'recurrence': 'never',
'time': None,
}),
}),
}),
'success': True,
'type': 'result',
})
# ---
# name: test_config_update_errors[command1]
dict({
'id': 1,

View File

@ -57,6 +57,38 @@ def mock_delay_save() -> Generator[None]:
"key": DOMAIN,
"version": 1,
},
{
"data": {
"backups": [
{
"backup_id": "abc123",
"failed_agent_ids": ["test.remote"],
}
],
"config": {
"create_backup": {
"agent_ids": [],
"include_addons": None,
"include_all_addons": False,
"include_database": True,
"include_folders": None,
"name": None,
"password": None,
},
"last_attempted_automatic_backup": None,
"last_completed_automatic_backup": None,
"retention": {
"copies": 0,
"days": 0,
},
"schedule": {
"state": "never",
},
},
},
"key": DOMAIN,
"version": 1,
},
{
"data": {
"backups": [

View File

@ -1361,6 +1361,14 @@ async def test_config_update(
"type": "backup/config/update",
"agents": {"test-agent1": {"favorite": True}},
},
{
"type": "backup/config/update",
"retention": {"copies": 0},
},
{
"type": "backup/config/update",
"retention": {"days": 0},
},
],
)
async def test_config_update_errors(
@ -2158,7 +2166,7 @@ async def test_config_schedule_logic(
{
"type": "backup/config/update",
"create_backup": {"agent_ids": ["test.test-agent"]},
"retention": {"copies": 0, "days": None},
"retention": {"copies": 1, "days": None},
"schedule": {"recurrence": "daily"},
},
{
@ -2232,7 +2240,7 @@ async def test_config_schedule_logic(
{
"type": "backup/config/update",
"create_backup": {"agent_ids": ["test.test-agent"]},
"retention": {"copies": 0, "days": None},
"retention": {"copies": 1, "days": None},
"schedule": {"recurrence": "daily"},
},
{
@ -2301,7 +2309,7 @@ async def test_config_schedule_logic(
{
"type": "backup/config/update",
"create_backup": {"agent_ids": ["test.test-agent"]},
"retention": {"copies": 0, "days": None},
"retention": {"copies": 1, "days": None},
"schedule": {"recurrence": "daily"},
},
{
@ -3019,7 +3027,7 @@ async def test_config_retention_copies_logic_manual_backup(
{
"type": "backup/config/update",
"create_backup": {"agent_ids": ["test-agent"]},
"retention": {"copies": None, "days": 0},
"retention": {"copies": None, "days": 1},
"schedule": {"recurrence": "never"},
}
],

View File

@ -2,6 +2,7 @@
import asyncio
from collections.abc import AsyncGenerator, Generator
from pathlib import Path
from random import getrandbits
from typing import Any
from unittest.mock import AsyncMock, patch
@ -39,13 +40,22 @@ def temp_dir_prefix() -> str:
@pytest.fixture(autouse=True)
def mock_temp_dir(temp_dir_prefix: str) -> Generator[str]:
async def mock_temp_dir(
hass: HomeAssistant, tmp_path: Path, temp_dir_prefix: str
) -> AsyncGenerator[str]:
"""Mock the certificate temp directory."""
with patch(
# Patch temp dir name to avoid tests fail running in parallel
"homeassistant.components.mqtt.util.TEMP_DIR_NAME",
f"home-assistant-mqtt-{temp_dir_prefix}-{getrandbits(10):03x}",
) as mocked_temp_dir:
mqtt_temp_dir = f"home-assistant-mqtt-{temp_dir_prefix}-{getrandbits(10):03x}"
with (
patch(
"homeassistant.components.mqtt.util.tempfile.gettempdir",
return_value=tmp_path,
),
patch(
# Patch temp dir name to avoid tests fail running in parallel
"homeassistant.components.mqtt.util.TEMP_DIR_NAME",
mqtt_temp_dir,
) as mocked_temp_dir,
):
yield mocked_temp_dir

View File

@ -1034,6 +1034,7 @@ async def test_reloadable(
await help_test_reloadable(hass, mqtt_client_mock, domain, config)
@pytest.mark.usefixtures("mock_temp_dir")
@pytest.mark.parametrize(
("hass_config", "payload1", "state1", "payload2", "state2"),
[

View File

@ -1409,6 +1409,7 @@ async def test_reloadable(
await help_test_reloadable(hass, mqtt_client_mock, domain, config)
@pytest.mark.usefixtures("mock_temp_dir")
@pytest.mark.parametrize(
"hass_config",
[

View File

@ -235,12 +235,12 @@ async def test_browsing(
reolink_connect.model = TEST_HOST_MODEL
async def test_browsing_unsupported_encoding(
async def test_browsing_h265_encoding(
hass: HomeAssistant,
reolink_connect: MagicMock,
config_entry: MockConfigEntry,
) -> None:
"""Test browsing a Reolink camera with unsupported stream encoding."""
"""Test browsing a Reolink camera with h265 stream encoding."""
entry_id = config_entry.entry_id
with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]):
@ -249,7 +249,6 @@ async def test_browsing_unsupported_encoding(
browse_root_id = f"CAM|{entry_id}|{TEST_CHANNEL}"
# browse resolution select/camera recording days when main encoding unsupported
mock_status = MagicMock()
mock_status.year = TEST_YEAR
mock_status.month = TEST_MONTH
@ -261,6 +260,18 @@ async def test_browsing_unsupported_encoding(
browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/{browse_root_id}")
browse_resolution_id = f"RESs|{entry_id}|{TEST_CHANNEL}"
browse_res_sub_id = f"RES|{entry_id}|{TEST_CHANNEL}|sub"
browse_res_main_id = f"RES|{entry_id}|{TEST_CHANNEL}|main"
assert browse.domain == DOMAIN
assert browse.title == f"{TEST_NVR_NAME}"
assert browse.identifier == browse_resolution_id
assert browse.children[0].identifier == browse_res_sub_id
assert browse.children[1].identifier == browse_res_main_id
browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/{browse_res_sub_id}")
browse_days_id = f"DAYS|{entry_id}|{TEST_CHANNEL}|sub"
browse_day_0_id = (
f"DAY|{entry_id}|{TEST_CHANNEL}|sub|{TEST_YEAR}|{TEST_MONTH}|{TEST_DAY}"

View File

@ -580,13 +580,19 @@ def alarm_clock_fixture_extended():
return alarm_clock
@pytest.fixture(name="speaker_model")
def speaker_model_fixture(request: pytest.FixtureRequest):
"""Create fixture for the speaker model."""
return getattr(request, "param", "Model Name")
@pytest.fixture(name="speaker_info")
def speaker_info_fixture():
def speaker_info_fixture(speaker_model):
"""Create speaker_info fixture."""
return {
"zone_name": "Zone A",
"uid": "RINCON_test",
"model_name": "Model Name",
"model_name": speaker_model,
"model_number": "S12",
"hardware_version": "1.20.1.6-1.1",
"software_version": "49.2-64250",

View File

@ -10,6 +10,7 @@ from syrupy import SnapshotAssertion
from homeassistant.components.media_player import (
ATTR_INPUT_SOURCE,
ATTR_INPUT_SOURCE_LIST,
ATTR_MEDIA_ANNOUNCE,
ATTR_MEDIA_CONTENT_ID,
ATTR_MEDIA_CONTENT_TYPE,
@ -1205,3 +1206,27 @@ async def test_media_get_queue(
)
soco_mock.get_queue.assert_called_with(max_items=0)
assert result == snapshot
@pytest.mark.parametrize(
("speaker_model", "source_list"),
[
("Sonos Arc Ultra", [SOURCE_TV]),
("Sonos Arc", [SOURCE_TV]),
("Sonos Playbar", [SOURCE_TV]),
("Sonos Connect", [SOURCE_LINEIN]),
("Sonos Play:5", [SOURCE_LINEIN]),
("Sonos Amp", [SOURCE_LINEIN, SOURCE_TV]),
("Sonos Era", None),
],
indirect=["speaker_model"],
)
async def test_media_source_list(
hass: HomeAssistant,
async_autosetup_sonos,
speaker_model: str,
source_list: list[str] | None,
) -> None:
"""Test the mapping between the speaker model name and source_list."""
state = hass.states.get("media_player.zone_a")
assert state.attributes.get(ATTR_INPUT_SOURCE_LIST) == source_list

View File

@ -3,6 +3,29 @@
dict({
'energysites': list([
dict({
'history': dict({
'battery_energy_exported': 36,
'battery_energy_imported_from_generator': 0,
'battery_energy_imported_from_grid': 0,
'battery_energy_imported_from_solar': 684,
'consumer_energy_imported_from_battery': 36,
'consumer_energy_imported_from_generator': 0,
'consumer_energy_imported_from_grid': 0,
'consumer_energy_imported_from_solar': 38,
'generator_energy_exported': 0,
'grid_energy_exported_from_battery': 0,
'grid_energy_exported_from_generator': 0,
'grid_energy_exported_from_solar': 2,
'grid_energy_imported': 0,
'grid_services_energy_exported': 0,
'grid_services_energy_imported': 0,
'solar_energy_exported': 724,
'total_battery_charge': 684,
'total_battery_discharge': 36,
'total_grid_energy_exported': 2,
'total_home_usage': 74,
'total_solar_generation': 724,
}),
'info': dict({
'backup_reserve_percent': 0,
'battery_count': 2,
@ -432,6 +455,13 @@
'vehicle_state_webcam_available': True,
'vin': '**REDACTED**',
}),
'stream': dict({
'config': dict({
'fields': dict({
}),
'prefer_typed': None,
}),
}),
}),
]),
})

View File

@ -5284,6 +5284,20 @@ async def test_subscribe_s2_inclusion(
assert msg["success"]
assert msg["result"] is None
# Test receiving requested grant event
event = Event(
type="grant security classes",
data={
"source": "controller",
"event": "grant security classes",
"requested": {
"securityClasses": [SecurityClass.S2_UNAUTHENTICATED],
"clientSideAuth": False,
},
},
)
client.driver.receive_event(event)
# Test receiving DSK request event
event = Event(
type="validate dsk and enter pin",