mirror of
https://github.com/home-assistant/core.git
synced 2025-07-17 02:07:09 +00:00
Support muting and relative-volume media_players in Google Assistant (#38651)
Support the action.devices.commands.mute intent to mute and unmute media_players that declare support for mute/unmute. For media players with support for volume up/down, but no support for setting the volume to a specific number, allow use of the action.devices.commands.relativeMute intent to control volume up/down. This will improve support for IR blasters and other open-loop media_player integrations.
This commit is contained in:
parent
a6f869aeee
commit
53b729a0d1
@ -121,6 +121,7 @@ COMMAND_PREVIOUS_INPUT = f"{PREFIX_COMMANDS}PreviousInput"
|
|||||||
COMMAND_OPENCLOSE = f"{PREFIX_COMMANDS}OpenClose"
|
COMMAND_OPENCLOSE = f"{PREFIX_COMMANDS}OpenClose"
|
||||||
COMMAND_SET_VOLUME = f"{PREFIX_COMMANDS}setVolume"
|
COMMAND_SET_VOLUME = f"{PREFIX_COMMANDS}setVolume"
|
||||||
COMMAND_VOLUME_RELATIVE = f"{PREFIX_COMMANDS}volumeRelative"
|
COMMAND_VOLUME_RELATIVE = f"{PREFIX_COMMANDS}volumeRelative"
|
||||||
|
COMMAND_MUTE = f"{PREFIX_COMMANDS}mute"
|
||||||
COMMAND_ARMDISARM = f"{PREFIX_COMMANDS}ArmDisarm"
|
COMMAND_ARMDISARM = f"{PREFIX_COMMANDS}ArmDisarm"
|
||||||
COMMAND_MEDIA_NEXT = f"{PREFIX_COMMANDS}mediaNext"
|
COMMAND_MEDIA_NEXT = f"{PREFIX_COMMANDS}mediaNext"
|
||||||
COMMAND_MEDIA_PAUSE = f"{PREFIX_COMMANDS}mediaPause"
|
COMMAND_MEDIA_PAUSE = f"{PREFIX_COMMANDS}mediaPause"
|
||||||
@ -1627,75 +1628,132 @@ class OpenCloseTrait(_Trait):
|
|||||||
|
|
||||||
@register_trait
|
@register_trait
|
||||||
class VolumeTrait(_Trait):
|
class VolumeTrait(_Trait):
|
||||||
"""Trait to control brightness of a device.
|
"""Trait to control volume of a device.
|
||||||
|
|
||||||
https://developers.google.com/actions/smarthome/traits/volume
|
https://developers.google.com/actions/smarthome/traits/volume
|
||||||
"""
|
"""
|
||||||
|
|
||||||
name = TRAIT_VOLUME
|
name = TRAIT_VOLUME
|
||||||
commands = [COMMAND_SET_VOLUME, COMMAND_VOLUME_RELATIVE]
|
commands = [COMMAND_SET_VOLUME, COMMAND_VOLUME_RELATIVE, COMMAND_MUTE]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def supported(domain, features, device_class):
|
def supported(domain, features, device_class):
|
||||||
"""Test if state is supported."""
|
"""Test if trait is supported."""
|
||||||
if domain == media_player.DOMAIN:
|
if domain == media_player.DOMAIN:
|
||||||
return features & media_player.SUPPORT_VOLUME_SET
|
return features & (
|
||||||
|
media_player.SUPPORT_VOLUME_SET | media_player.SUPPORT_VOLUME_STEP
|
||||||
|
)
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def sync_attributes(self):
|
def sync_attributes(self):
|
||||||
"""Return brightness attributes for a sync request."""
|
"""Return volume attributes for a sync request."""
|
||||||
return {}
|
features = self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||||
|
return {
|
||||||
|
"volumeCanMuteAndUnmute": bool(features & media_player.SUPPORT_VOLUME_MUTE),
|
||||||
|
"commandOnlyVolume": self.state.attributes.get(ATTR_ASSUMED_STATE, False),
|
||||||
|
# Volume amounts in SET_VOLUME and VOLUME_RELATIVE are on a scale
|
||||||
|
# from 0 to this value.
|
||||||
|
"volumeMaxLevel": 100,
|
||||||
|
# Default change for queries like "Hey Google, volume up".
|
||||||
|
# 10% corresponds to the default behavior for the
|
||||||
|
# media_player.volume{up,down} services.
|
||||||
|
"levelStepSize": 10,
|
||||||
|
}
|
||||||
|
|
||||||
def query_attributes(self):
|
def query_attributes(self):
|
||||||
"""Return brightness query attributes."""
|
"""Return volume query attributes."""
|
||||||
response = {}
|
response = {}
|
||||||
|
|
||||||
level = self.state.attributes.get(media_player.ATTR_MEDIA_VOLUME_LEVEL)
|
level = self.state.attributes.get(media_player.ATTR_MEDIA_VOLUME_LEVEL)
|
||||||
muted = self.state.attributes.get(media_player.ATTR_MEDIA_VOLUME_MUTED)
|
|
||||||
if level is not None:
|
if level is not None:
|
||||||
# Convert 0.0-1.0 to 0-100
|
# Convert 0.0-1.0 to 0-100
|
||||||
response["currentVolume"] = int(level * 100)
|
response["currentVolume"] = int(level * 100)
|
||||||
|
|
||||||
|
muted = self.state.attributes.get(media_player.ATTR_MEDIA_VOLUME_MUTED)
|
||||||
|
if muted is not None:
|
||||||
response["isMuted"] = bool(muted)
|
response["isMuted"] = bool(muted)
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
async def _execute_set_volume(self, data, params):
|
async def _set_volume_absolute(self, data, level):
|
||||||
level = params["volumeLevel"]
|
|
||||||
|
|
||||||
await self.hass.services.async_call(
|
await self.hass.services.async_call(
|
||||||
media_player.DOMAIN,
|
media_player.DOMAIN,
|
||||||
media_player.SERVICE_VOLUME_SET,
|
media_player.SERVICE_VOLUME_SET,
|
||||||
{
|
{
|
||||||
ATTR_ENTITY_ID: self.state.entity_id,
|
ATTR_ENTITY_ID: self.state.entity_id,
|
||||||
media_player.ATTR_MEDIA_VOLUME_LEVEL: level / 100,
|
media_player.ATTR_MEDIA_VOLUME_LEVEL: level,
|
||||||
},
|
},
|
||||||
blocking=True,
|
blocking=True,
|
||||||
context=data.context,
|
context=data.context,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def _execute_set_volume(self, data, params):
|
||||||
|
level = max(0, min(100, params["volumeLevel"]))
|
||||||
|
|
||||||
|
if not (
|
||||||
|
self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||||
|
& media_player.SUPPORT_VOLUME_SET
|
||||||
|
):
|
||||||
|
raise SmartHomeError(ERR_NOT_SUPPORTED, "Command not supported")
|
||||||
|
|
||||||
|
await self._set_volume_absolute(data, level / 100)
|
||||||
|
|
||||||
async def _execute_volume_relative(self, data, params):
|
async def _execute_volume_relative(self, data, params):
|
||||||
# This could also support up/down commands using relativeSteps
|
relative = params["relativeSteps"]
|
||||||
relative = params["volumeRelativeLevel"]
|
features = self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||||
current = self.state.attributes.get(media_player.ATTR_MEDIA_VOLUME_LEVEL)
|
|
||||||
|
if features & media_player.SUPPORT_VOLUME_SET:
|
||||||
|
current = self.state.attributes.get(media_player.ATTR_MEDIA_VOLUME_LEVEL)
|
||||||
|
target = max(0.0, min(1.0, current + relative / 100))
|
||||||
|
|
||||||
|
await self._set_volume_absolute(data, target)
|
||||||
|
|
||||||
|
elif features & media_player.SUPPORT_VOLUME_STEP:
|
||||||
|
svc = media_player.SERVICE_VOLUME_UP
|
||||||
|
if relative < 0:
|
||||||
|
svc = media_player.SERVICE_VOLUME_DOWN
|
||||||
|
relative = -relative
|
||||||
|
|
||||||
|
for i in range(relative):
|
||||||
|
await self.hass.services.async_call(
|
||||||
|
media_player.DOMAIN,
|
||||||
|
svc,
|
||||||
|
{ATTR_ENTITY_ID: self.state.entity_id},
|
||||||
|
blocking=True,
|
||||||
|
context=data.context,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise SmartHomeError(ERR_NOT_SUPPORTED, "Command not supported")
|
||||||
|
|
||||||
|
async def _execute_mute(self, data, params):
|
||||||
|
mute = params["mute"]
|
||||||
|
|
||||||
|
if not (
|
||||||
|
self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||||
|
& media_player.SUPPORT_VOLUME_MUTE
|
||||||
|
):
|
||||||
|
raise SmartHomeError(ERR_NOT_SUPPORTED, "Command not supported")
|
||||||
|
|
||||||
await self.hass.services.async_call(
|
await self.hass.services.async_call(
|
||||||
media_player.DOMAIN,
|
media_player.DOMAIN,
|
||||||
media_player.SERVICE_VOLUME_SET,
|
media_player.SERVICE_VOLUME_MUTE,
|
||||||
{
|
{
|
||||||
ATTR_ENTITY_ID: self.state.entity_id,
|
ATTR_ENTITY_ID: self.state.entity_id,
|
||||||
media_player.ATTR_MEDIA_VOLUME_LEVEL: current + relative / 100,
|
media_player.ATTR_MEDIA_VOLUME_MUTED: mute,
|
||||||
},
|
},
|
||||||
blocking=True,
|
blocking=True,
|
||||||
context=data.context,
|
context=data.context,
|
||||||
)
|
)
|
||||||
|
|
||||||
async def execute(self, command, data, params, challenge):
|
async def execute(self, command, data, params, challenge):
|
||||||
"""Execute a brightness command."""
|
"""Execute a volume command."""
|
||||||
if command == COMMAND_SET_VOLUME:
|
if command == COMMAND_SET_VOLUME:
|
||||||
await self._execute_set_volume(data, params)
|
await self._execute_set_volume(data, params)
|
||||||
elif command == COMMAND_VOLUME_RELATIVE:
|
elif command == COMMAND_VOLUME_RELATIVE:
|
||||||
await self._execute_volume_relative(data, params)
|
await self._execute_volume_relative(data, params)
|
||||||
|
elif command == COMMAND_MUTE:
|
||||||
|
await self._execute_mute(data, params)
|
||||||
else:
|
else:
|
||||||
raise SmartHomeError(ERR_NOT_SUPPORTED, "Command not supported")
|
raise SmartHomeError(ERR_NOT_SUPPORTED, "Command not supported")
|
||||||
|
|
||||||
|
@ -2003,9 +2003,7 @@ async def test_volume_media_player(hass):
|
|||||||
"""Test volume trait support for media player domain."""
|
"""Test volume trait support for media player domain."""
|
||||||
assert helpers.get_google_type(media_player.DOMAIN, None) is not None
|
assert helpers.get_google_type(media_player.DOMAIN, None) is not None
|
||||||
assert trait.VolumeTrait.supported(
|
assert trait.VolumeTrait.supported(
|
||||||
media_player.DOMAIN,
|
media_player.DOMAIN, media_player.SUPPORT_VOLUME_SET, None,
|
||||||
media_player.SUPPORT_VOLUME_SET | media_player.SUPPORT_VOLUME_MUTE,
|
|
||||||
None,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
trt = trait.VolumeTrait(
|
trt = trait.VolumeTrait(
|
||||||
@ -2014,16 +2012,21 @@ async def test_volume_media_player(hass):
|
|||||||
"media_player.bla",
|
"media_player.bla",
|
||||||
media_player.STATE_PLAYING,
|
media_player.STATE_PLAYING,
|
||||||
{
|
{
|
||||||
|
ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_SET,
|
||||||
media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3,
|
media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3,
|
||||||
media_player.ATTR_MEDIA_VOLUME_MUTED: False,
|
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
BASIC_CONFIG,
|
BASIC_CONFIG,
|
||||||
)
|
)
|
||||||
|
|
||||||
assert trt.sync_attributes() == {}
|
assert trt.sync_attributes() == {
|
||||||
|
"volumeMaxLevel": 100,
|
||||||
|
"levelStepSize": 10,
|
||||||
|
"volumeCanMuteAndUnmute": False,
|
||||||
|
"commandOnlyVolume": False,
|
||||||
|
}
|
||||||
|
|
||||||
assert trt.query_attributes() == {"currentVolume": 30, "isMuted": False}
|
assert trt.query_attributes() == {"currentVolume": 30}
|
||||||
|
|
||||||
calls = async_mock_service(
|
calls = async_mock_service(
|
||||||
hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET
|
hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET
|
||||||
@ -2035,40 +2038,130 @@ async def test_volume_media_player(hass):
|
|||||||
media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6,
|
media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.6,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
calls = async_mock_service(
|
||||||
|
hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET
|
||||||
|
)
|
||||||
|
await trt.execute(
|
||||||
|
trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {}
|
||||||
|
)
|
||||||
|
assert len(calls) == 1
|
||||||
|
assert calls[0].data == {
|
||||||
|
ATTR_ENTITY_ID: "media_player.bla",
|
||||||
|
media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.4,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
async def test_volume_media_player_relative(hass):
|
async def test_volume_media_player_relative(hass):
|
||||||
"""Test volume trait support for media player domain."""
|
"""Test volume trait support for relative-volume-only media players."""
|
||||||
|
assert trait.VolumeTrait.supported(
|
||||||
|
media_player.DOMAIN, media_player.SUPPORT_VOLUME_STEP, None,
|
||||||
|
)
|
||||||
trt = trait.VolumeTrait(
|
trt = trait.VolumeTrait(
|
||||||
hass,
|
hass,
|
||||||
State(
|
State(
|
||||||
"media_player.bla",
|
"media_player.bla",
|
||||||
media_player.STATE_PLAYING,
|
media_player.STATE_PLAYING,
|
||||||
{
|
{
|
||||||
media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.3,
|
ATTR_ASSUMED_STATE: True,
|
||||||
|
ATTR_SUPPORTED_FEATURES: media_player.SUPPORT_VOLUME_STEP,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
BASIC_CONFIG,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert trt.sync_attributes() == {
|
||||||
|
"volumeMaxLevel": 100,
|
||||||
|
"levelStepSize": 10,
|
||||||
|
"volumeCanMuteAndUnmute": False,
|
||||||
|
"commandOnlyVolume": True,
|
||||||
|
}
|
||||||
|
|
||||||
|
assert trt.query_attributes() == {}
|
||||||
|
|
||||||
|
calls = async_mock_service(
|
||||||
|
hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP
|
||||||
|
)
|
||||||
|
|
||||||
|
await trt.execute(
|
||||||
|
trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": 10}, {},
|
||||||
|
)
|
||||||
|
assert len(calls) == 10
|
||||||
|
for call in calls:
|
||||||
|
assert call.data == {
|
||||||
|
ATTR_ENTITY_ID: "media_player.bla",
|
||||||
|
}
|
||||||
|
|
||||||
|
calls = async_mock_service(
|
||||||
|
hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN
|
||||||
|
)
|
||||||
|
await trt.execute(
|
||||||
|
trait.COMMAND_VOLUME_RELATIVE, BASIC_DATA, {"relativeSteps": -10}, {},
|
||||||
|
)
|
||||||
|
assert len(calls) == 10
|
||||||
|
for call in calls:
|
||||||
|
assert call.data == {
|
||||||
|
ATTR_ENTITY_ID: "media_player.bla",
|
||||||
|
}
|
||||||
|
|
||||||
|
with pytest.raises(SmartHomeError):
|
||||||
|
await trt.execute(trait.COMMAND_SET_VOLUME, BASIC_DATA, {"volumeLevel": 42}, {})
|
||||||
|
|
||||||
|
with pytest.raises(SmartHomeError):
|
||||||
|
await trt.execute(trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {})
|
||||||
|
|
||||||
|
|
||||||
|
async def test_media_player_mute(hass):
|
||||||
|
"""Test volume trait support for muting."""
|
||||||
|
assert trait.VolumeTrait.supported(
|
||||||
|
media_player.DOMAIN,
|
||||||
|
media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE,
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
trt = trait.VolumeTrait(
|
||||||
|
hass,
|
||||||
|
State(
|
||||||
|
"media_player.bla",
|
||||||
|
media_player.STATE_PLAYING,
|
||||||
|
{
|
||||||
|
ATTR_SUPPORTED_FEATURES: (
|
||||||
|
media_player.SUPPORT_VOLUME_STEP | media_player.SUPPORT_VOLUME_MUTE
|
||||||
|
),
|
||||||
media_player.ATTR_MEDIA_VOLUME_MUTED: False,
|
media_player.ATTR_MEDIA_VOLUME_MUTED: False,
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
BASIC_CONFIG,
|
BASIC_CONFIG,
|
||||||
)
|
)
|
||||||
|
|
||||||
assert trt.sync_attributes() == {}
|
assert trt.sync_attributes() == {
|
||||||
|
"volumeMaxLevel": 100,
|
||||||
|
"levelStepSize": 10,
|
||||||
|
"volumeCanMuteAndUnmute": True,
|
||||||
|
"commandOnlyVolume": False,
|
||||||
|
}
|
||||||
|
assert trt.query_attributes() == {"isMuted": False}
|
||||||
|
|
||||||
assert trt.query_attributes() == {"currentVolume": 30, "isMuted": False}
|
mute_calls = async_mock_service(
|
||||||
|
hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE
|
||||||
calls = async_mock_service(
|
|
||||||
hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET
|
|
||||||
)
|
)
|
||||||
|
|
||||||
await trt.execute(
|
await trt.execute(
|
||||||
trait.COMMAND_VOLUME_RELATIVE,
|
trait.COMMAND_MUTE, BASIC_DATA, {"mute": True}, {},
|
||||||
BASIC_DATA,
|
|
||||||
{"volumeRelativeLevel": 20, "relativeSteps": 2},
|
|
||||||
{},
|
|
||||||
)
|
)
|
||||||
assert len(calls) == 1
|
assert len(mute_calls) == 1
|
||||||
assert calls[0].data == {
|
assert mute_calls[0].data == {
|
||||||
ATTR_ENTITY_ID: "media_player.bla",
|
ATTR_ENTITY_ID: "media_player.bla",
|
||||||
media_player.ATTR_MEDIA_VOLUME_LEVEL: 0.5,
|
media_player.ATTR_MEDIA_VOLUME_MUTED: True,
|
||||||
|
}
|
||||||
|
|
||||||
|
unmute_calls = async_mock_service(
|
||||||
|
hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE
|
||||||
|
)
|
||||||
|
await trt.execute(
|
||||||
|
trait.COMMAND_MUTE, BASIC_DATA, {"mute": False}, {},
|
||||||
|
)
|
||||||
|
assert len(unmute_calls) == 1
|
||||||
|
assert unmute_calls[0].data == {
|
||||||
|
ATTR_ENTITY_ID: "media_player.bla",
|
||||||
|
media_player.ATTR_MEDIA_VOLUME_MUTED: False,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user