mirror of
https://github.com/home-assistant/core.git
synced 2025-07-23 21:27:38 +00:00
commit
c526fcd40f
@ -245,7 +245,7 @@ def async_api_turn_on(hass, config, request, entity):
|
|||||||
|
|
||||||
yield from hass.services.async_call(domain, SERVICE_TURN_ON, {
|
yield from hass.services.async_call(domain, SERVICE_TURN_ON, {
|
||||||
ATTR_ENTITY_ID: entity.entity_id
|
ATTR_ENTITY_ID: entity.entity_id
|
||||||
}, blocking=True)
|
}, blocking=False)
|
||||||
|
|
||||||
return api_message(request)
|
return api_message(request)
|
||||||
|
|
||||||
@ -261,7 +261,7 @@ def async_api_turn_off(hass, config, request, entity):
|
|||||||
|
|
||||||
yield from hass.services.async_call(domain, SERVICE_TURN_OFF, {
|
yield from hass.services.async_call(domain, SERVICE_TURN_OFF, {
|
||||||
ATTR_ENTITY_ID: entity.entity_id
|
ATTR_ENTITY_ID: entity.entity_id
|
||||||
}, blocking=True)
|
}, blocking=False)
|
||||||
|
|
||||||
return api_message(request)
|
return api_message(request)
|
||||||
|
|
||||||
@ -276,7 +276,7 @@ def async_api_set_brightness(hass, config, request, entity):
|
|||||||
yield from hass.services.async_call(entity.domain, SERVICE_TURN_ON, {
|
yield from hass.services.async_call(entity.domain, SERVICE_TURN_ON, {
|
||||||
ATTR_ENTITY_ID: entity.entity_id,
|
ATTR_ENTITY_ID: entity.entity_id,
|
||||||
light.ATTR_BRIGHTNESS_PCT: brightness,
|
light.ATTR_BRIGHTNESS_PCT: brightness,
|
||||||
}, blocking=True)
|
}, blocking=False)
|
||||||
|
|
||||||
return api_message(request)
|
return api_message(request)
|
||||||
|
|
||||||
@ -300,7 +300,7 @@ def async_api_adjust_brightness(hass, config, request, entity):
|
|||||||
yield from hass.services.async_call(entity.domain, SERVICE_TURN_ON, {
|
yield from hass.services.async_call(entity.domain, SERVICE_TURN_ON, {
|
||||||
ATTR_ENTITY_ID: entity.entity_id,
|
ATTR_ENTITY_ID: entity.entity_id,
|
||||||
light.ATTR_BRIGHTNESS_PCT: brightness,
|
light.ATTR_BRIGHTNESS_PCT: brightness,
|
||||||
}, blocking=True)
|
}, blocking=False)
|
||||||
|
|
||||||
return api_message(request)
|
return api_message(request)
|
||||||
|
|
||||||
@ -321,14 +321,14 @@ def async_api_set_color(hass, config, request, entity):
|
|||||||
yield from hass.services.async_call(entity.domain, SERVICE_TURN_ON, {
|
yield from hass.services.async_call(entity.domain, SERVICE_TURN_ON, {
|
||||||
ATTR_ENTITY_ID: entity.entity_id,
|
ATTR_ENTITY_ID: entity.entity_id,
|
||||||
light.ATTR_RGB_COLOR: rgb,
|
light.ATTR_RGB_COLOR: rgb,
|
||||||
}, blocking=True)
|
}, blocking=False)
|
||||||
else:
|
else:
|
||||||
xyz = color_util.color_RGB_to_xy(*rgb)
|
xyz = color_util.color_RGB_to_xy(*rgb)
|
||||||
yield from hass.services.async_call(entity.domain, SERVICE_TURN_ON, {
|
yield from hass.services.async_call(entity.domain, SERVICE_TURN_ON, {
|
||||||
ATTR_ENTITY_ID: entity.entity_id,
|
ATTR_ENTITY_ID: entity.entity_id,
|
||||||
light.ATTR_XY_COLOR: (xyz[0], xyz[1]),
|
light.ATTR_XY_COLOR: (xyz[0], xyz[1]),
|
||||||
light.ATTR_BRIGHTNESS: xyz[2],
|
light.ATTR_BRIGHTNESS: xyz[2],
|
||||||
}, blocking=True)
|
}, blocking=False)
|
||||||
|
|
||||||
return api_message(request)
|
return api_message(request)
|
||||||
|
|
||||||
@ -343,7 +343,7 @@ def async_api_set_color_temperature(hass, config, request, entity):
|
|||||||
yield from hass.services.async_call(entity.domain, SERVICE_TURN_ON, {
|
yield from hass.services.async_call(entity.domain, SERVICE_TURN_ON, {
|
||||||
ATTR_ENTITY_ID: entity.entity_id,
|
ATTR_ENTITY_ID: entity.entity_id,
|
||||||
light.ATTR_KELVIN: kelvin,
|
light.ATTR_KELVIN: kelvin,
|
||||||
}, blocking=True)
|
}, blocking=False)
|
||||||
|
|
||||||
return api_message(request)
|
return api_message(request)
|
||||||
|
|
||||||
@ -361,7 +361,7 @@ def async_api_decrease_color_temp(hass, config, request, entity):
|
|||||||
yield from hass.services.async_call(entity.domain, SERVICE_TURN_ON, {
|
yield from hass.services.async_call(entity.domain, SERVICE_TURN_ON, {
|
||||||
ATTR_ENTITY_ID: entity.entity_id,
|
ATTR_ENTITY_ID: entity.entity_id,
|
||||||
light.ATTR_COLOR_TEMP: value,
|
light.ATTR_COLOR_TEMP: value,
|
||||||
}, blocking=True)
|
}, blocking=False)
|
||||||
|
|
||||||
return api_message(request)
|
return api_message(request)
|
||||||
|
|
||||||
@ -379,7 +379,7 @@ def async_api_increase_color_temp(hass, config, request, entity):
|
|||||||
yield from hass.services.async_call(entity.domain, SERVICE_TURN_ON, {
|
yield from hass.services.async_call(entity.domain, SERVICE_TURN_ON, {
|
||||||
ATTR_ENTITY_ID: entity.entity_id,
|
ATTR_ENTITY_ID: entity.entity_id,
|
||||||
light.ATTR_COLOR_TEMP: value,
|
light.ATTR_COLOR_TEMP: value,
|
||||||
}, blocking=True)
|
}, blocking=False)
|
||||||
|
|
||||||
return api_message(request)
|
return api_message(request)
|
||||||
|
|
||||||
@ -391,7 +391,7 @@ def async_api_activate(hass, config, request, entity):
|
|||||||
"""Process a activate request."""
|
"""Process a activate request."""
|
||||||
yield from hass.services.async_call(entity.domain, SERVICE_TURN_ON, {
|
yield from hass.services.async_call(entity.domain, SERVICE_TURN_ON, {
|
||||||
ATTR_ENTITY_ID: entity.entity_id
|
ATTR_ENTITY_ID: entity.entity_id
|
||||||
}, blocking=True)
|
}, blocking=False)
|
||||||
|
|
||||||
return api_message(request)
|
return api_message(request)
|
||||||
|
|
||||||
@ -421,8 +421,8 @@ def async_api_set_percentage(hass, config, request, entity):
|
|||||||
service = SERVICE_SET_COVER_POSITION
|
service = SERVICE_SET_COVER_POSITION
|
||||||
data[cover.ATTR_POSITION] = percentage
|
data[cover.ATTR_POSITION] = percentage
|
||||||
|
|
||||||
yield from hass.services.async_call(entity.domain, service,
|
yield from hass.services.async_call(
|
||||||
data, blocking=True)
|
entity.domain, service, data, blocking=False)
|
||||||
|
|
||||||
return api_message(request)
|
return api_message(request)
|
||||||
|
|
||||||
@ -469,8 +469,8 @@ def async_api_adjust_percentage(hass, config, request, entity):
|
|||||||
|
|
||||||
data[cover.ATTR_POSITION] = max(0, percentage_delta + current)
|
data[cover.ATTR_POSITION] = max(0, percentage_delta + current)
|
||||||
|
|
||||||
yield from hass.services.async_call(entity.domain, service,
|
yield from hass.services.async_call(
|
||||||
data, blocking=True)
|
entity.domain, service, data, blocking=False)
|
||||||
|
|
||||||
return api_message(request)
|
return api_message(request)
|
||||||
|
|
||||||
@ -482,7 +482,7 @@ def async_api_lock(hass, config, request, entity):
|
|||||||
"""Process a lock request."""
|
"""Process a lock request."""
|
||||||
yield from hass.services.async_call(entity.domain, SERVICE_LOCK, {
|
yield from hass.services.async_call(entity.domain, SERVICE_LOCK, {
|
||||||
ATTR_ENTITY_ID: entity.entity_id
|
ATTR_ENTITY_ID: entity.entity_id
|
||||||
}, blocking=True)
|
}, blocking=False)
|
||||||
|
|
||||||
return api_message(request)
|
return api_message(request)
|
||||||
|
|
||||||
@ -495,7 +495,7 @@ def async_api_unlock(hass, config, request, entity):
|
|||||||
"""Process a unlock request."""
|
"""Process a unlock request."""
|
||||||
yield from hass.services.async_call(entity.domain, SERVICE_UNLOCK, {
|
yield from hass.services.async_call(entity.domain, SERVICE_UNLOCK, {
|
||||||
ATTR_ENTITY_ID: entity.entity_id
|
ATTR_ENTITY_ID: entity.entity_id
|
||||||
}, blocking=True)
|
}, blocking=False)
|
||||||
|
|
||||||
return api_message(request)
|
return api_message(request)
|
||||||
|
|
||||||
@ -512,8 +512,9 @@ def async_api_set_volume(hass, config, request, entity):
|
|||||||
media_player.ATTR_MEDIA_VOLUME_LEVEL: volume,
|
media_player.ATTR_MEDIA_VOLUME_LEVEL: volume,
|
||||||
}
|
}
|
||||||
|
|
||||||
yield from hass.services.async_call(entity.domain, SERVICE_VOLUME_SET,
|
yield from hass.services.async_call(
|
||||||
data, blocking=True)
|
entity.domain, SERVICE_VOLUME_SET,
|
||||||
|
data, blocking=False)
|
||||||
|
|
||||||
return api_message(request)
|
return api_message(request)
|
||||||
|
|
||||||
@ -540,9 +541,9 @@ def async_api_adjust_volume(hass, config, request, entity):
|
|||||||
media_player.ATTR_MEDIA_VOLUME_LEVEL: volume,
|
media_player.ATTR_MEDIA_VOLUME_LEVEL: volume,
|
||||||
}
|
}
|
||||||
|
|
||||||
yield from hass.services.async_call(entity.domain,
|
yield from hass.services.async_call(
|
||||||
media_player.SERVICE_VOLUME_SET,
|
entity.domain, media_player.SERVICE_VOLUME_SET,
|
||||||
data, blocking=True)
|
data, blocking=False)
|
||||||
|
|
||||||
return api_message(request)
|
return api_message(request)
|
||||||
|
|
||||||
@ -559,9 +560,9 @@ def async_api_set_mute(hass, config, request, entity):
|
|||||||
media_player.ATTR_MEDIA_VOLUME_MUTED: mute,
|
media_player.ATTR_MEDIA_VOLUME_MUTED: mute,
|
||||||
}
|
}
|
||||||
|
|
||||||
yield from hass.services.async_call(entity.domain,
|
yield from hass.services.async_call(
|
||||||
media_player.SERVICE_VOLUME_MUTE,
|
entity.domain, media_player.SERVICE_VOLUME_MUTE,
|
||||||
data, blocking=True)
|
data, blocking=False)
|
||||||
|
|
||||||
return api_message(request)
|
return api_message(request)
|
||||||
|
|
||||||
@ -575,8 +576,9 @@ def async_api_play(hass, config, request, entity):
|
|||||||
ATTR_ENTITY_ID: entity.entity_id
|
ATTR_ENTITY_ID: entity.entity_id
|
||||||
}
|
}
|
||||||
|
|
||||||
yield from hass.services.async_call(entity.domain, SERVICE_MEDIA_PLAY,
|
yield from hass.services.async_call(
|
||||||
data, blocking=True)
|
entity.domain, SERVICE_MEDIA_PLAY,
|
||||||
|
data, blocking=False)
|
||||||
|
|
||||||
return api_message(request)
|
return api_message(request)
|
||||||
|
|
||||||
@ -590,8 +592,9 @@ def async_api_pause(hass, config, request, entity):
|
|||||||
ATTR_ENTITY_ID: entity.entity_id
|
ATTR_ENTITY_ID: entity.entity_id
|
||||||
}
|
}
|
||||||
|
|
||||||
yield from hass.services.async_call(entity.domain, SERVICE_MEDIA_PAUSE,
|
yield from hass.services.async_call(
|
||||||
data, blocking=True)
|
entity.domain, SERVICE_MEDIA_PAUSE,
|
||||||
|
data, blocking=False)
|
||||||
|
|
||||||
return api_message(request)
|
return api_message(request)
|
||||||
|
|
||||||
@ -605,8 +608,9 @@ def async_api_stop(hass, config, request, entity):
|
|||||||
ATTR_ENTITY_ID: entity.entity_id
|
ATTR_ENTITY_ID: entity.entity_id
|
||||||
}
|
}
|
||||||
|
|
||||||
yield from hass.services.async_call(entity.domain, SERVICE_MEDIA_STOP,
|
yield from hass.services.async_call(
|
||||||
data, blocking=True)
|
entity.domain, SERVICE_MEDIA_STOP,
|
||||||
|
data, blocking=False)
|
||||||
|
|
||||||
return api_message(request)
|
return api_message(request)
|
||||||
|
|
||||||
@ -620,9 +624,9 @@ def async_api_next(hass, config, request, entity):
|
|||||||
ATTR_ENTITY_ID: entity.entity_id
|
ATTR_ENTITY_ID: entity.entity_id
|
||||||
}
|
}
|
||||||
|
|
||||||
yield from hass.services.async_call(entity.domain,
|
yield from hass.services.async_call(
|
||||||
SERVICE_MEDIA_NEXT_TRACK,
|
entity.domain, SERVICE_MEDIA_NEXT_TRACK,
|
||||||
data, blocking=True)
|
data, blocking=False)
|
||||||
|
|
||||||
return api_message(request)
|
return api_message(request)
|
||||||
|
|
||||||
@ -636,8 +640,8 @@ def async_api_previous(hass, config, request, entity):
|
|||||||
ATTR_ENTITY_ID: entity.entity_id
|
ATTR_ENTITY_ID: entity.entity_id
|
||||||
}
|
}
|
||||||
|
|
||||||
yield from hass.services.async_call(entity.domain,
|
yield from hass.services.async_call(
|
||||||
SERVICE_MEDIA_PREVIOUS_TRACK,
|
entity.domain, SERVICE_MEDIA_PREVIOUS_TRACK,
|
||||||
data, blocking=True)
|
data, blocking=False)
|
||||||
|
|
||||||
return api_message(request)
|
return api_message(request)
|
||||||
|
@ -62,6 +62,13 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||||||
_LOGGER.error("Unable to connect to Concord232: %s", str(ex))
|
_LOGGER.error("Unable to connect to Concord232: %s", str(ex))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# The order of zones returned by client.list_zones() can vary.
|
||||||
|
# When the zones are not named, this can result in the same entity
|
||||||
|
# name mapping to different sensors in an unpredictable way. Sort
|
||||||
|
# the zones by zone number to prevent this.
|
||||||
|
|
||||||
|
client.zones.sort(key=lambda zone: zone['number'])
|
||||||
|
|
||||||
for zone in client.zones:
|
for zone in client.zones:
|
||||||
_LOGGER.info("Loading Zone found: %s", zone['name'])
|
_LOGGER.info("Loading Zone found: %s", zone['name'])
|
||||||
if zone['number'] not in exclude:
|
if zone['number'] not in exclude:
|
||||||
@ -118,7 +125,7 @@ class Concord232ZoneSensor(BinarySensorDevice):
|
|||||||
def is_on(self):
|
def is_on(self):
|
||||||
"""Return true if the binary sensor is on."""
|
"""Return true if the binary sensor is on."""
|
||||||
# True means "faulted" or "open" or "abnormal state"
|
# True means "faulted" or "open" or "abnormal state"
|
||||||
return bool(self._zone['state'] == 'Normal')
|
return bool(self._zone['state'] != 'Normal')
|
||||||
|
|
||||||
def update(self):
|
def update(self):
|
||||||
"""Get updated stats from API."""
|
"""Get updated stats from API."""
|
||||||
|
@ -62,10 +62,9 @@ def setup_platform(hass, config: ConfigType,
|
|||||||
node.nid, node.parent_nid)
|
node.nid, node.parent_nid)
|
||||||
else:
|
else:
|
||||||
device_type = _detect_device_type(node)
|
device_type = _detect_device_type(node)
|
||||||
if device_type in ['moisture', 'opening']:
|
subnode_id = int(node.nid[-1])
|
||||||
subnode_id = int(node.nid[-1])
|
if device_type == 'opening':
|
||||||
# Leak and door/window sensors work the same way with negative
|
# Door/window sensors use an optional "negative" node
|
||||||
# nodes and heartbeat nodes
|
|
||||||
if subnode_id == 4:
|
if subnode_id == 4:
|
||||||
# Subnode 4 is the heartbeat node, which we will represent
|
# Subnode 4 is the heartbeat node, which we will represent
|
||||||
# as a separate binary_sensor
|
# as a separate binary_sensor
|
||||||
@ -74,6 +73,14 @@ def setup_platform(hass, config: ConfigType,
|
|||||||
devices.append(device)
|
devices.append(device)
|
||||||
elif subnode_id == 2:
|
elif subnode_id == 2:
|
||||||
parent_device.add_negative_node(node)
|
parent_device.add_negative_node(node)
|
||||||
|
elif device_type == 'moisture':
|
||||||
|
# Moisure nodes have a subnode 2, but we ignore it because it's
|
||||||
|
# just the inverse of the primary node.
|
||||||
|
if subnode_id == 4:
|
||||||
|
# Heartbeat node
|
||||||
|
device = ISYBinarySensorHeartbeat(node, parent_device)
|
||||||
|
parent_device.add_heartbeat_device(device)
|
||||||
|
devices.append(device)
|
||||||
else:
|
else:
|
||||||
# We don't yet have any special logic for other sensor types,
|
# We don't yet have any special logic for other sensor types,
|
||||||
# so add the nodes as individual devices
|
# so add the nodes as individual devices
|
||||||
@ -165,7 +172,8 @@ class ISYBinarySensorDevice(isy.ISYDevice, BinarySensorDevice):
|
|||||||
"""
|
"""
|
||||||
self._negative_node = child
|
self._negative_node = child
|
||||||
|
|
||||||
if not _is_val_unknown(self._negative_node):
|
# pylint: disable=protected-access
|
||||||
|
if not _is_val_unknown(self._negative_node.status._val):
|
||||||
# If the negative node has a value, it means the negative node is
|
# If the negative node has a value, it means the negative node is
|
||||||
# in use for this device. Therefore, we cannot determine the state
|
# in use for this device. Therefore, we cannot determine the state
|
||||||
# of the sensor until we receive our first ON event.
|
# of the sensor until we receive our first ON event.
|
||||||
|
@ -25,11 +25,11 @@ CONF_DEVICE_ID = 'device_id'
|
|||||||
CONF_CALENDARS = 'calendars'
|
CONF_CALENDARS = 'calendars'
|
||||||
CONF_CUSTOM_CALENDARS = 'custom_calendars'
|
CONF_CUSTOM_CALENDARS = 'custom_calendars'
|
||||||
CONF_CALENDAR = 'calendar'
|
CONF_CALENDAR = 'calendar'
|
||||||
CONF_ALL_DAY = 'all_day'
|
|
||||||
CONF_SEARCH = 'search'
|
CONF_SEARCH = 'search'
|
||||||
|
|
||||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||||
vol.Required(CONF_URL): vol.Url,
|
# pylint: disable=no-value-for-parameter
|
||||||
|
vol.Required(CONF_URL): vol.Url(),
|
||||||
vol.Optional(CONF_CALENDARS, default=[]):
|
vol.Optional(CONF_CALENDARS, default=[]):
|
||||||
vol.All(cv.ensure_list, vol.Schema([
|
vol.All(cv.ensure_list, vol.Schema([
|
||||||
cv.string
|
cv.string
|
||||||
@ -88,7 +88,7 @@ def setup_platform(hass, config, add_devices, disc_info=None):
|
|||||||
WebDavCalendarEventDevice(hass,
|
WebDavCalendarEventDevice(hass,
|
||||||
device_data,
|
device_data,
|
||||||
calendar,
|
calendar,
|
||||||
cust_calendar.get(CONF_ALL_DAY),
|
True,
|
||||||
cust_calendar.get(CONF_SEARCH))
|
cust_calendar.get(CONF_SEARCH))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -582,9 +582,10 @@ def _is_latest(js_option, request):
|
|||||||
from user_agents import parse
|
from user_agents import parse
|
||||||
useragent = parse(request.headers.get('User-Agent'))
|
useragent = parse(request.headers.get('User-Agent'))
|
||||||
|
|
||||||
# on iOS every browser is a Safari which we support from version 10.
|
# on iOS every browser is a Safari which we support from version 11.
|
||||||
if useragent.os.family == 'iOS':
|
if useragent.os.family == 'iOS':
|
||||||
return useragent.os.version[0] >= 10
|
# Was >= 10, temp setting it to 12 to work around issue #11387
|
||||||
|
return useragent.os.version[0] >= 12
|
||||||
|
|
||||||
family_min_version = {
|
family_min_version = {
|
||||||
'Chrome': 50, # Probably can reduce this
|
'Chrome': 50, # Probably can reduce this
|
||||||
|
@ -169,6 +169,8 @@ CONFIG_SCHEMA = vol.Schema({
|
|||||||
vol.Optional(CONF_PATH, default=DEFAULT_PATH): cv.string,
|
vol.Optional(CONF_PATH, default=DEFAULT_PATH): cv.string,
|
||||||
vol.Optional(CONF_RESOLVENAMES, default=DEFAULT_RESOLVENAMES):
|
vol.Optional(CONF_RESOLVENAMES, default=DEFAULT_RESOLVENAMES):
|
||||||
vol.In(CONF_RESOLVENAMES_OPTIONS),
|
vol.In(CONF_RESOLVENAMES_OPTIONS),
|
||||||
|
vol.Optional(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string,
|
||||||
|
vol.Optional(CONF_PASSWORD, default=DEFAULT_PASSWORD): cv.string,
|
||||||
vol.Optional(CONF_CALLBACK_IP): cv.string,
|
vol.Optional(CONF_CALLBACK_IP): cv.string,
|
||||||
vol.Optional(CONF_CALLBACK_PORT): cv.port,
|
vol.Optional(CONF_CALLBACK_PORT): cv.port,
|
||||||
}},
|
}},
|
||||||
@ -747,10 +749,6 @@ class HMDevice(Entity):
|
|||||||
"""Return device specific state attributes."""
|
"""Return device specific state attributes."""
|
||||||
attr = {}
|
attr = {}
|
||||||
|
|
||||||
# No data available
|
|
||||||
if not self.available:
|
|
||||||
return attr
|
|
||||||
|
|
||||||
# Generate a dictionary with attributes
|
# Generate a dictionary with attributes
|
||||||
for node, data in HM_ATTRIBUTE_SUPPORT.items():
|
for node, data in HM_ATTRIBUTE_SUPPORT.items():
|
||||||
# Is an attribute and exists for this object
|
# Is an attribute and exists for this object
|
||||||
@ -806,6 +804,9 @@ class HMDevice(Entity):
|
|||||||
if attribute == 'UNREACH':
|
if attribute == 'UNREACH':
|
||||||
self._available = bool(value)
|
self._available = bool(value)
|
||||||
has_changed = True
|
has_changed = True
|
||||||
|
elif not self.available:
|
||||||
|
self._available = False
|
||||||
|
has_changed = True
|
||||||
|
|
||||||
# If it has changed data point, update HASS
|
# If it has changed data point, update HASS
|
||||||
if has_changed:
|
if has_changed:
|
||||||
|
@ -160,6 +160,8 @@ class HueBridge(object):
|
|||||||
self.allow_hue_groups = allow_hue_groups
|
self.allow_hue_groups = allow_hue_groups
|
||||||
|
|
||||||
self.bridge = None
|
self.bridge = None
|
||||||
|
self.lights = {}
|
||||||
|
self.lightgroups = {}
|
||||||
|
|
||||||
self.configured = False
|
self.configured = False
|
||||||
self.config_request_id = None
|
self.config_request_id = None
|
||||||
|
@ -31,10 +31,6 @@ DEPENDENCIES = ['hue']
|
|||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
DATA_KEY = 'hue_lights'
|
|
||||||
DATA_LIGHTS = 'lights'
|
|
||||||
DATA_LIGHTGROUPS = 'lightgroups'
|
|
||||||
|
|
||||||
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10)
|
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10)
|
||||||
MIN_TIME_BETWEEN_FORCED_SCANS = timedelta(milliseconds=100)
|
MIN_TIME_BETWEEN_FORCED_SCANS = timedelta(milliseconds=100)
|
||||||
|
|
||||||
@ -93,8 +89,6 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||||||
if discovery_info is None or 'bridge_id' not in discovery_info:
|
if discovery_info is None or 'bridge_id' not in discovery_info:
|
||||||
return
|
return
|
||||||
|
|
||||||
setup_data(hass)
|
|
||||||
|
|
||||||
if config is not None and len(config) > 0:
|
if config is not None and len(config) > 0:
|
||||||
# Legacy configuration, will be removed in 0.60
|
# Legacy configuration, will be removed in 0.60
|
||||||
config_str = yaml.dump([config])
|
config_str = yaml.dump([config])
|
||||||
@ -110,12 +104,6 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||||||
unthrottled_update_lights(hass, bridge, add_devices)
|
unthrottled_update_lights(hass, bridge, add_devices)
|
||||||
|
|
||||||
|
|
||||||
def setup_data(hass):
|
|
||||||
"""Initialize internal data. Useful from tests."""
|
|
||||||
if DATA_KEY not in hass.data:
|
|
||||||
hass.data[DATA_KEY] = {DATA_LIGHTS: {}, DATA_LIGHTGROUPS: {}}
|
|
||||||
|
|
||||||
|
|
||||||
@util.Throttle(MIN_TIME_BETWEEN_SCANS, MIN_TIME_BETWEEN_FORCED_SCANS)
|
@util.Throttle(MIN_TIME_BETWEEN_SCANS, MIN_TIME_BETWEEN_FORCED_SCANS)
|
||||||
def update_lights(hass, bridge, add_devices):
|
def update_lights(hass, bridge, add_devices):
|
||||||
"""Update the Hue light objects with latest info from the bridge."""
|
"""Update the Hue light objects with latest info from the bridge."""
|
||||||
@ -176,18 +164,17 @@ def process_lights(hass, api, bridge, bridge_type, update_lights_cb):
|
|||||||
|
|
||||||
new_lights = []
|
new_lights = []
|
||||||
|
|
||||||
lights = hass.data[DATA_KEY][DATA_LIGHTS]
|
|
||||||
for light_id, info in api_lights.items():
|
for light_id, info in api_lights.items():
|
||||||
if light_id not in lights:
|
if light_id not in bridge.lights:
|
||||||
lights[light_id] = HueLight(
|
bridge.lights[light_id] = HueLight(
|
||||||
int(light_id), info, bridge,
|
int(light_id), info, bridge,
|
||||||
update_lights_cb,
|
update_lights_cb,
|
||||||
bridge_type, bridge.allow_unreachable,
|
bridge_type, bridge.allow_unreachable,
|
||||||
bridge.allow_in_emulated_hue)
|
bridge.allow_in_emulated_hue)
|
||||||
new_lights.append(lights[light_id])
|
new_lights.append(bridge.lights[light_id])
|
||||||
else:
|
else:
|
||||||
lights[light_id].info = info
|
bridge.lights[light_id].info = info
|
||||||
lights[light_id].schedule_update_ha_state()
|
bridge.lights[light_id].schedule_update_ha_state()
|
||||||
|
|
||||||
return new_lights
|
return new_lights
|
||||||
|
|
||||||
@ -202,23 +189,22 @@ def process_groups(hass, api, bridge, bridge_type, update_lights_cb):
|
|||||||
|
|
||||||
new_lights = []
|
new_lights = []
|
||||||
|
|
||||||
groups = hass.data[DATA_KEY][DATA_LIGHTGROUPS]
|
|
||||||
for lightgroup_id, info in api_groups.items():
|
for lightgroup_id, info in api_groups.items():
|
||||||
if 'state' not in info:
|
if 'state' not in info:
|
||||||
_LOGGER.warning('Group info does not contain state. '
|
_LOGGER.warning('Group info does not contain state. '
|
||||||
'Please update your hub.')
|
'Please update your hub.')
|
||||||
return []
|
return []
|
||||||
|
|
||||||
if lightgroup_id not in groups:
|
if lightgroup_id not in bridge.lightgroups:
|
||||||
groups[lightgroup_id] = HueLight(
|
bridge.lightgroups[lightgroup_id] = HueLight(
|
||||||
int(lightgroup_id), info, bridge,
|
int(lightgroup_id), info, bridge,
|
||||||
update_lights_cb,
|
update_lights_cb,
|
||||||
bridge_type, bridge.allow_unreachable,
|
bridge_type, bridge.allow_unreachable,
|
||||||
bridge.allow_in_emulated_hue, True)
|
bridge.allow_in_emulated_hue, True)
|
||||||
new_lights.append(groups[lightgroup_id])
|
new_lights.append(bridge.lightgroups[lightgroup_id])
|
||||||
else:
|
else:
|
||||||
groups[lightgroup_id].info = info
|
bridge.lightgroups[lightgroup_id].info = info
|
||||||
groups[lightgroup_id].schedule_update_ha_state()
|
bridge.lightgroups[lightgroup_id].schedule_update_ha_state()
|
||||||
|
|
||||||
return new_lights
|
return new_lights
|
||||||
|
|
||||||
|
@ -25,46 +25,53 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
|||||||
|
|
||||||
|
|
||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument
|
||||||
def setup_platform(hass, config: ConfigType,
|
def setup_platform(
|
||||||
add_devices: Callable[[list], None], discovery_info=None):
|
hass, config: ConfigType,
|
||||||
|
add_devices: Callable[[list], None], discovery_info=None):
|
||||||
"""Set up the Sesame platform."""
|
"""Set up the Sesame platform."""
|
||||||
import pysesame
|
import pysesame
|
||||||
|
|
||||||
email = config.get(CONF_EMAIL)
|
email = config.get(CONF_EMAIL)
|
||||||
password = config.get(CONF_PASSWORD)
|
password = config.get(CONF_PASSWORD)
|
||||||
|
|
||||||
add_devices([SesameDevice(sesame) for
|
add_devices([SesameDevice(sesame) for sesame in
|
||||||
sesame in pysesame.get_sesames(email, password)])
|
pysesame.get_sesames(email, password)],
|
||||||
|
update_before_add=True)
|
||||||
|
|
||||||
|
|
||||||
class SesameDevice(LockDevice):
|
class SesameDevice(LockDevice):
|
||||||
"""Representation of a Sesame device."""
|
"""Representation of a Sesame device."""
|
||||||
|
|
||||||
_sesame = None
|
|
||||||
|
|
||||||
def __init__(self, sesame: object) -> None:
|
def __init__(self, sesame: object) -> None:
|
||||||
"""Initialize the Sesame device."""
|
"""Initialize the Sesame device."""
|
||||||
self._sesame = sesame
|
self._sesame = sesame
|
||||||
|
|
||||||
|
# Cached properties from pysesame object.
|
||||||
|
self._device_id = None
|
||||||
|
self._nickname = None
|
||||||
|
self._is_unlocked = False
|
||||||
|
self._api_enabled = False
|
||||||
|
self._battery = -1
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self) -> str:
|
def name(self) -> str:
|
||||||
"""Return the name of the device."""
|
"""Return the name of the device."""
|
||||||
return self._sesame.nickname
|
return self._nickname
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def available(self) -> bool:
|
def available(self) -> bool:
|
||||||
"""Return True if entity is available."""
|
"""Return True if entity is available."""
|
||||||
return self._sesame.api_enabled
|
return self._api_enabled
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_locked(self) -> bool:
|
def is_locked(self) -> bool:
|
||||||
"""Return True if the device is currently locked, else False."""
|
"""Return True if the device is currently locked, else False."""
|
||||||
return not self._sesame.is_unlocked
|
return not self._is_unlocked
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def state(self) -> str:
|
def state(self) -> str:
|
||||||
"""Get the state of the device."""
|
"""Get the state of the device."""
|
||||||
if self._sesame.is_unlocked:
|
if self._is_unlocked:
|
||||||
return STATE_UNLOCKED
|
return STATE_UNLOCKED
|
||||||
return STATE_LOCKED
|
return STATE_LOCKED
|
||||||
|
|
||||||
@ -79,11 +86,16 @@ class SesameDevice(LockDevice):
|
|||||||
def update(self) -> None:
|
def update(self) -> None:
|
||||||
"""Update the internal state of the device."""
|
"""Update the internal state of the device."""
|
||||||
self._sesame.update_state()
|
self._sesame.update_state()
|
||||||
|
self._nickname = self._sesame.nickname
|
||||||
|
self._api_enabled = self._sesame.api_enabled
|
||||||
|
self._is_unlocked = self._sesame.is_unlocked
|
||||||
|
self._device_id = self._sesame.device_id
|
||||||
|
self._battery = self._sesame.battery
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def device_state_attributes(self) -> dict:
|
def device_state_attributes(self) -> dict:
|
||||||
"""Return the state attributes."""
|
"""Return the state attributes."""
|
||||||
attributes = {}
|
attributes = {}
|
||||||
attributes[ATTR_DEVICE_ID] = self._sesame.device_id
|
attributes[ATTR_DEVICE_ID] = self._device_id
|
||||||
attributes[ATTR_BATTERY_LEVEL] = self._sesame.battery
|
attributes[ATTR_BATTERY_LEVEL] = self._battery
|
||||||
return attributes
|
return attributes
|
||||||
|
@ -322,17 +322,17 @@ class LgWebOSDevice(MediaPlayerDevice):
|
|||||||
|
|
||||||
def select_source(self, source):
|
def select_source(self, source):
|
||||||
"""Select input source."""
|
"""Select input source."""
|
||||||
source = self._source_list.get(source)
|
source_dict = self._source_list.get(source)
|
||||||
if source is None:
|
if source_dict is None:
|
||||||
_LOGGER.warning("Source %s not found for %s", source, self.name)
|
_LOGGER.warning("Source %s not found for %s", source, self.name)
|
||||||
return
|
return
|
||||||
self._current_source_id = self._source_list[source]['id']
|
self._current_source_id = source_dict['id']
|
||||||
if source.get('title'):
|
if source_dict.get('title'):
|
||||||
self._current_source = self._source_list[source]['title']
|
self._current_source = source_dict['title']
|
||||||
self._client.launch_app(self._source_list[source]['id'])
|
self._client.launch_app(source_dict['id'])
|
||||||
elif source.get('label'):
|
elif source_dict.get('label'):
|
||||||
self._current_source = self._source_list[source]['label']
|
self._current_source = source_dict['label']
|
||||||
self._client.set_input(self._source_list[source]['id'])
|
self._client.set_input(source_dict['id'])
|
||||||
|
|
||||||
def media_play(self):
|
def media_play(self):
|
||||||
"""Send play command."""
|
"""Send play command."""
|
||||||
|
@ -141,10 +141,17 @@ class ModbusRegisterSwitch(ModbusCoilSwitch):
|
|||||||
self._verify_register = (
|
self._verify_register = (
|
||||||
verify_register if verify_register else self._register)
|
verify_register if verify_register else self._register)
|
||||||
self._register_type = register_type
|
self._register_type = register_type
|
||||||
self._state_on = (
|
|
||||||
state_on if state_on else self._command_on)
|
if state_on is not None:
|
||||||
self._state_off = (
|
self._state_on = state_on
|
||||||
state_off if state_off else self._command_off)
|
else:
|
||||||
|
self._state_on = self._command_on
|
||||||
|
|
||||||
|
if state_off is not None:
|
||||||
|
self._state_off = state_off
|
||||||
|
else:
|
||||||
|
self._state_off = self._command_off
|
||||||
|
|
||||||
self._is_on = None
|
self._is_on = None
|
||||||
|
|
||||||
def turn_on(self, **kwargs):
|
def turn_on(self, **kwargs):
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"""Constants used by Home Assistant components."""
|
"""Constants used by Home Assistant components."""
|
||||||
MAJOR_VERSION = 0
|
MAJOR_VERSION = 0
|
||||||
MINOR_VERSION = 60
|
MINOR_VERSION = 60
|
||||||
PATCH_VERSION = '0'
|
PATCH_VERSION = '1'
|
||||||
__short_version__ = '{}.{}'.format(MAJOR_VERSION, MINOR_VERSION)
|
__short_version__ = '{}.{}'.format(MAJOR_VERSION, MINOR_VERSION)
|
||||||
__version__ = '{}.{}'.format(__short_version__, PATCH_VERSION)
|
__version__ = '{}.{}'.format(__short_version__, PATCH_VERSION)
|
||||||
REQUIRED_PYTHON_VER = (3, 4, 2)
|
REQUIRED_PYTHON_VER = (3, 4, 2)
|
||||||
|
@ -346,6 +346,7 @@ def test_exclude_filters(hass):
|
|||||||
))
|
))
|
||||||
|
|
||||||
msg = yield from smart_home.async_handle_message(hass, config, request)
|
msg = yield from smart_home.async_handle_message(hass, config, request)
|
||||||
|
yield from hass.async_block_till_done()
|
||||||
|
|
||||||
msg = msg['event']
|
msg = msg['event']
|
||||||
|
|
||||||
@ -378,6 +379,7 @@ def test_include_filters(hass):
|
|||||||
))
|
))
|
||||||
|
|
||||||
msg = yield from smart_home.async_handle_message(hass, config, request)
|
msg = yield from smart_home.async_handle_message(hass, config, request)
|
||||||
|
yield from hass.async_block_till_done()
|
||||||
|
|
||||||
msg = msg['event']
|
msg = msg['event']
|
||||||
|
|
||||||
@ -393,6 +395,7 @@ def test_api_entity_not_exists(hass):
|
|||||||
|
|
||||||
msg = yield from smart_home.async_handle_message(
|
msg = yield from smart_home.async_handle_message(
|
||||||
hass, DEFAULT_CONFIG, request)
|
hass, DEFAULT_CONFIG, request)
|
||||||
|
yield from hass.async_block_till_done()
|
||||||
|
|
||||||
assert 'event' in msg
|
assert 'event' in msg
|
||||||
msg = msg['event']
|
msg = msg['event']
|
||||||
@ -442,6 +445,7 @@ def test_api_turn_on(hass, domain):
|
|||||||
|
|
||||||
msg = yield from smart_home.async_handle_message(
|
msg = yield from smart_home.async_handle_message(
|
||||||
hass, DEFAULT_CONFIG, request)
|
hass, DEFAULT_CONFIG, request)
|
||||||
|
yield from hass.async_block_till_done()
|
||||||
|
|
||||||
assert 'event' in msg
|
assert 'event' in msg
|
||||||
msg = msg['event']
|
msg = msg['event']
|
||||||
@ -475,6 +479,7 @@ def test_api_turn_off(hass, domain):
|
|||||||
|
|
||||||
msg = yield from smart_home.async_handle_message(
|
msg = yield from smart_home.async_handle_message(
|
||||||
hass, DEFAULT_CONFIG, request)
|
hass, DEFAULT_CONFIG, request)
|
||||||
|
yield from hass.async_block_till_done()
|
||||||
|
|
||||||
assert 'event' in msg
|
assert 'event' in msg
|
||||||
msg = msg['event']
|
msg = msg['event']
|
||||||
@ -501,6 +506,7 @@ def test_api_set_brightness(hass):
|
|||||||
|
|
||||||
msg = yield from smart_home.async_handle_message(
|
msg = yield from smart_home.async_handle_message(
|
||||||
hass, DEFAULT_CONFIG, request)
|
hass, DEFAULT_CONFIG, request)
|
||||||
|
yield from hass.async_block_till_done()
|
||||||
|
|
||||||
assert 'event' in msg
|
assert 'event' in msg
|
||||||
msg = msg['event']
|
msg = msg['event']
|
||||||
@ -532,6 +538,7 @@ def test_api_adjust_brightness(hass, result, adjust):
|
|||||||
|
|
||||||
msg = yield from smart_home.async_handle_message(
|
msg = yield from smart_home.async_handle_message(
|
||||||
hass, DEFAULT_CONFIG, request)
|
hass, DEFAULT_CONFIG, request)
|
||||||
|
yield from hass.async_block_till_done()
|
||||||
|
|
||||||
assert 'event' in msg
|
assert 'event' in msg
|
||||||
msg = msg['event']
|
msg = msg['event']
|
||||||
@ -566,6 +573,7 @@ def test_api_set_color_rgb(hass):
|
|||||||
|
|
||||||
msg = yield from smart_home.async_handle_message(
|
msg = yield from smart_home.async_handle_message(
|
||||||
hass, DEFAULT_CONFIG, request)
|
hass, DEFAULT_CONFIG, request)
|
||||||
|
yield from hass.async_block_till_done()
|
||||||
|
|
||||||
assert 'event' in msg
|
assert 'event' in msg
|
||||||
msg = msg['event']
|
msg = msg['event']
|
||||||
@ -600,6 +608,7 @@ def test_api_set_color_xy(hass):
|
|||||||
|
|
||||||
msg = yield from smart_home.async_handle_message(
|
msg = yield from smart_home.async_handle_message(
|
||||||
hass, DEFAULT_CONFIG, request)
|
hass, DEFAULT_CONFIG, request)
|
||||||
|
yield from hass.async_block_till_done()
|
||||||
|
|
||||||
assert 'event' in msg
|
assert 'event' in msg
|
||||||
msg = msg['event']
|
msg = msg['event']
|
||||||
@ -629,6 +638,7 @@ def test_api_set_color_temperature(hass):
|
|||||||
|
|
||||||
msg = yield from smart_home.async_handle_message(
|
msg = yield from smart_home.async_handle_message(
|
||||||
hass, DEFAULT_CONFIG, request)
|
hass, DEFAULT_CONFIG, request)
|
||||||
|
yield from hass.async_block_till_done()
|
||||||
|
|
||||||
assert 'event' in msg
|
assert 'event' in msg
|
||||||
msg = msg['event']
|
msg = msg['event']
|
||||||
@ -658,6 +668,7 @@ def test_api_decrease_color_temp(hass, result, initial):
|
|||||||
|
|
||||||
msg = yield from smart_home.async_handle_message(
|
msg = yield from smart_home.async_handle_message(
|
||||||
hass, DEFAULT_CONFIG, request)
|
hass, DEFAULT_CONFIG, request)
|
||||||
|
yield from hass.async_block_till_done()
|
||||||
|
|
||||||
assert 'event' in msg
|
assert 'event' in msg
|
||||||
msg = msg['event']
|
msg = msg['event']
|
||||||
@ -687,6 +698,7 @@ def test_api_increase_color_temp(hass, result, initial):
|
|||||||
|
|
||||||
msg = yield from smart_home.async_handle_message(
|
msg = yield from smart_home.async_handle_message(
|
||||||
hass, DEFAULT_CONFIG, request)
|
hass, DEFAULT_CONFIG, request)
|
||||||
|
yield from hass.async_block_till_done()
|
||||||
|
|
||||||
assert 'event' in msg
|
assert 'event' in msg
|
||||||
msg = msg['event']
|
msg = msg['event']
|
||||||
@ -714,6 +726,7 @@ def test_api_activate(hass, domain):
|
|||||||
|
|
||||||
msg = yield from smart_home.async_handle_message(
|
msg = yield from smart_home.async_handle_message(
|
||||||
hass, DEFAULT_CONFIG, request)
|
hass, DEFAULT_CONFIG, request)
|
||||||
|
yield from hass.async_block_till_done()
|
||||||
|
|
||||||
assert 'event' in msg
|
assert 'event' in msg
|
||||||
msg = msg['event']
|
msg = msg['event']
|
||||||
@ -740,6 +753,7 @@ def test_api_set_percentage_fan(hass):
|
|||||||
|
|
||||||
msg = yield from smart_home.async_handle_message(
|
msg = yield from smart_home.async_handle_message(
|
||||||
hass, DEFAULT_CONFIG, request)
|
hass, DEFAULT_CONFIG, request)
|
||||||
|
yield from hass.async_block_till_done()
|
||||||
|
|
||||||
assert 'event' in msg
|
assert 'event' in msg
|
||||||
msg = msg['event']
|
msg = msg['event']
|
||||||
@ -769,6 +783,7 @@ def test_api_set_percentage_cover(hass):
|
|||||||
|
|
||||||
msg = yield from smart_home.async_handle_message(
|
msg = yield from smart_home.async_handle_message(
|
||||||
hass, DEFAULT_CONFIG, request)
|
hass, DEFAULT_CONFIG, request)
|
||||||
|
yield from hass.async_block_till_done()
|
||||||
|
|
||||||
assert 'event' in msg
|
assert 'event' in msg
|
||||||
msg = msg['event']
|
msg = msg['event']
|
||||||
@ -800,6 +815,7 @@ def test_api_adjust_percentage_fan(hass, result, adjust):
|
|||||||
|
|
||||||
msg = yield from smart_home.async_handle_message(
|
msg = yield from smart_home.async_handle_message(
|
||||||
hass, DEFAULT_CONFIG, request)
|
hass, DEFAULT_CONFIG, request)
|
||||||
|
yield from hass.async_block_till_done()
|
||||||
|
|
||||||
assert 'event' in msg
|
assert 'event' in msg
|
||||||
msg = msg['event']
|
msg = msg['event']
|
||||||
@ -832,6 +848,7 @@ def test_api_adjust_percentage_cover(hass, result, adjust):
|
|||||||
|
|
||||||
msg = yield from smart_home.async_handle_message(
|
msg = yield from smart_home.async_handle_message(
|
||||||
hass, DEFAULT_CONFIG, request)
|
hass, DEFAULT_CONFIG, request)
|
||||||
|
yield from hass.async_block_till_done()
|
||||||
|
|
||||||
assert 'event' in msg
|
assert 'event' in msg
|
||||||
msg = msg['event']
|
msg = msg['event']
|
||||||
@ -859,6 +876,7 @@ def test_api_lock(hass, domain):
|
|||||||
|
|
||||||
msg = yield from smart_home.async_handle_message(
|
msg = yield from smart_home.async_handle_message(
|
||||||
hass, DEFAULT_CONFIG, request)
|
hass, DEFAULT_CONFIG, request)
|
||||||
|
yield from hass.async_block_till_done()
|
||||||
|
|
||||||
assert 'event' in msg
|
assert 'event' in msg
|
||||||
msg = msg['event']
|
msg = msg['event']
|
||||||
@ -885,6 +903,7 @@ def test_api_play(hass, domain):
|
|||||||
|
|
||||||
msg = yield from smart_home.async_handle_message(
|
msg = yield from smart_home.async_handle_message(
|
||||||
hass, DEFAULT_CONFIG, request)
|
hass, DEFAULT_CONFIG, request)
|
||||||
|
yield from hass.async_block_till_done()
|
||||||
|
|
||||||
assert 'event' in msg
|
assert 'event' in msg
|
||||||
msg = msg['event']
|
msg = msg['event']
|
||||||
@ -911,6 +930,7 @@ def test_api_pause(hass, domain):
|
|||||||
|
|
||||||
msg = yield from smart_home.async_handle_message(
|
msg = yield from smart_home.async_handle_message(
|
||||||
hass, DEFAULT_CONFIG, request)
|
hass, DEFAULT_CONFIG, request)
|
||||||
|
yield from hass.async_block_till_done()
|
||||||
|
|
||||||
assert 'event' in msg
|
assert 'event' in msg
|
||||||
msg = msg['event']
|
msg = msg['event']
|
||||||
@ -937,6 +957,7 @@ def test_api_stop(hass, domain):
|
|||||||
|
|
||||||
msg = yield from smart_home.async_handle_message(
|
msg = yield from smart_home.async_handle_message(
|
||||||
hass, DEFAULT_CONFIG, request)
|
hass, DEFAULT_CONFIG, request)
|
||||||
|
yield from hass.async_block_till_done()
|
||||||
|
|
||||||
assert 'event' in msg
|
assert 'event' in msg
|
||||||
msg = msg['event']
|
msg = msg['event']
|
||||||
@ -963,6 +984,7 @@ def test_api_next(hass, domain):
|
|||||||
|
|
||||||
msg = yield from smart_home.async_handle_message(
|
msg = yield from smart_home.async_handle_message(
|
||||||
hass, DEFAULT_CONFIG, request)
|
hass, DEFAULT_CONFIG, request)
|
||||||
|
yield from hass.async_block_till_done()
|
||||||
|
|
||||||
assert 'event' in msg
|
assert 'event' in msg
|
||||||
msg = msg['event']
|
msg = msg['event']
|
||||||
@ -989,6 +1011,7 @@ def test_api_previous(hass, domain):
|
|||||||
|
|
||||||
msg = yield from smart_home.async_handle_message(
|
msg = yield from smart_home.async_handle_message(
|
||||||
hass, DEFAULT_CONFIG, request)
|
hass, DEFAULT_CONFIG, request)
|
||||||
|
yield from hass.async_block_till_done()
|
||||||
|
|
||||||
assert 'event' in msg
|
assert 'event' in msg
|
||||||
msg = msg['event']
|
msg = msg['event']
|
||||||
@ -1017,6 +1040,7 @@ def test_api_set_volume(hass):
|
|||||||
|
|
||||||
msg = yield from smart_home.async_handle_message(
|
msg = yield from smart_home.async_handle_message(
|
||||||
hass, DEFAULT_CONFIG, request)
|
hass, DEFAULT_CONFIG, request)
|
||||||
|
yield from hass.async_block_till_done()
|
||||||
|
|
||||||
assert 'event' in msg
|
assert 'event' in msg
|
||||||
msg = msg['event']
|
msg = msg['event']
|
||||||
@ -1048,6 +1072,7 @@ def test_api_adjust_volume(hass, result, adjust):
|
|||||||
|
|
||||||
msg = yield from smart_home.async_handle_message(
|
msg = yield from smart_home.async_handle_message(
|
||||||
hass, DEFAULT_CONFIG, request)
|
hass, DEFAULT_CONFIG, request)
|
||||||
|
yield from hass.async_block_till_done()
|
||||||
|
|
||||||
assert 'event' in msg
|
assert 'event' in msg
|
||||||
msg = msg['event']
|
msg = msg['event']
|
||||||
@ -1077,6 +1102,7 @@ def test_api_mute(hass, domain):
|
|||||||
|
|
||||||
msg = yield from smart_home.async_handle_message(
|
msg = yield from smart_home.async_handle_message(
|
||||||
hass, DEFAULT_CONFIG, request)
|
hass, DEFAULT_CONFIG, request)
|
||||||
|
yield from hass.async_block_till_done()
|
||||||
|
|
||||||
assert 'event' in msg
|
assert 'event' in msg
|
||||||
msg = msg['event']
|
msg = msg['event']
|
||||||
|
@ -121,8 +121,10 @@ class TestComponentsWebDavCalendar(unittest.TestCase):
|
|||||||
assert len(devices) == 2
|
assert len(devices) == 2
|
||||||
assert devices[0].name == "First"
|
assert devices[0].name == "First"
|
||||||
assert devices[0].dev_id == "First"
|
assert devices[0].dev_id == "First"
|
||||||
|
self.assertFalse(devices[0].data.include_all_day)
|
||||||
assert devices[1].name == "Second"
|
assert devices[1].name == "Second"
|
||||||
assert devices[1].dev_id == "Second"
|
assert devices[1].dev_id == "Second"
|
||||||
|
self.assertFalse(devices[1].data.include_all_day)
|
||||||
|
|
||||||
caldav.setup_platform(self.hass,
|
caldav.setup_platform(self.hass,
|
||||||
{
|
{
|
||||||
@ -167,6 +169,7 @@ class TestComponentsWebDavCalendar(unittest.TestCase):
|
|||||||
assert len(devices) == 1
|
assert len(devices) == 1
|
||||||
assert devices[0].name == "HomeOffice"
|
assert devices[0].name == "HomeOffice"
|
||||||
assert devices[0].dev_id == "Second HomeOffice"
|
assert devices[0].dev_id == "Second HomeOffice"
|
||||||
|
self.assertTrue(devices[0].data.include_all_day)
|
||||||
|
|
||||||
caldav.setup_platform(self.hass,
|
caldav.setup_platform(self.hass,
|
||||||
{
|
{
|
||||||
|
@ -36,27 +36,45 @@ class TestSetup(unittest.TestCase):
|
|||||||
self.mock_lights = []
|
self.mock_lights = []
|
||||||
self.mock_groups = []
|
self.mock_groups = []
|
||||||
self.mock_add_devices = MagicMock()
|
self.mock_add_devices = MagicMock()
|
||||||
hue_light.setup_data(self.hass)
|
|
||||||
|
|
||||||
def setup_mocks_for_process_lights(self):
|
def setup_mocks_for_process_lights(self):
|
||||||
"""Set up all mocks for process_lights tests."""
|
"""Set up all mocks for process_lights tests."""
|
||||||
self.mock_bridge = MagicMock()
|
self.mock_bridge = self.create_mock_bridge('host')
|
||||||
self.mock_api = MagicMock()
|
self.mock_api = MagicMock()
|
||||||
self.mock_api.get.return_value = {}
|
self.mock_api.get.return_value = {}
|
||||||
self.mock_bridge.get_api.return_value = self.mock_api
|
self.mock_bridge.get_api.return_value = self.mock_api
|
||||||
self.mock_bridge_type = MagicMock()
|
self.mock_bridge_type = MagicMock()
|
||||||
hue_light.setup_data(self.hass)
|
|
||||||
|
|
||||||
def setup_mocks_for_process_groups(self):
|
def setup_mocks_for_process_groups(self):
|
||||||
"""Set up all mocks for process_groups tests."""
|
"""Set up all mocks for process_groups tests."""
|
||||||
self.mock_bridge = MagicMock()
|
self.mock_bridge = self.create_mock_bridge('host')
|
||||||
self.mock_bridge.get_group.return_value = {
|
self.mock_bridge.get_group.return_value = {
|
||||||
'name': 'Group 0', 'state': {'any_on': True}}
|
'name': 'Group 0', 'state': {'any_on': True}}
|
||||||
|
|
||||||
self.mock_api = MagicMock()
|
self.mock_api = MagicMock()
|
||||||
self.mock_api.get.return_value = {}
|
self.mock_api.get.return_value = {}
|
||||||
self.mock_bridge.get_api.return_value = self.mock_api
|
self.mock_bridge.get_api.return_value = self.mock_api
|
||||||
|
|
||||||
self.mock_bridge_type = MagicMock()
|
self.mock_bridge_type = MagicMock()
|
||||||
hue_light.setup_data(self.hass)
|
|
||||||
|
def create_mock_bridge(self, host, allow_hue_groups=True):
|
||||||
|
"""Return a mock HueBridge with reasonable defaults."""
|
||||||
|
mock_bridge = MagicMock()
|
||||||
|
mock_bridge.host = host
|
||||||
|
mock_bridge.allow_hue_groups = allow_hue_groups
|
||||||
|
mock_bridge.lights = {}
|
||||||
|
mock_bridge.lightgroups = {}
|
||||||
|
return mock_bridge
|
||||||
|
|
||||||
|
def create_mock_lights(self, lights):
|
||||||
|
"""Return a dict suitable for mocking api.get('lights')."""
|
||||||
|
mock_bridge_lights = lights
|
||||||
|
|
||||||
|
for light_id, info in mock_bridge_lights.items():
|
||||||
|
if 'state' not in info:
|
||||||
|
info['state'] = {'on': False}
|
||||||
|
|
||||||
|
return mock_bridge_lights
|
||||||
|
|
||||||
def test_setup_platform_no_discovery_info(self):
|
def test_setup_platform_no_discovery_info(self):
|
||||||
"""Test setup_platform without discovery info."""
|
"""Test setup_platform without discovery info."""
|
||||||
@ -211,6 +229,70 @@ class TestSetup(unittest.TestCase):
|
|||||||
self.mock_add_devices.assert_called_once_with(
|
self.mock_add_devices.assert_called_once_with(
|
||||||
self.mock_lights)
|
self.mock_lights)
|
||||||
|
|
||||||
|
@MockDependency('phue')
|
||||||
|
def test_update_lights_with_two_bridges(self, mock_phue):
|
||||||
|
"""Test the update_lights function with two bridges."""
|
||||||
|
self.setup_mocks_for_update_lights()
|
||||||
|
|
||||||
|
mock_bridge_one = self.create_mock_bridge('one', False)
|
||||||
|
mock_bridge_one_lights = self.create_mock_lights(
|
||||||
|
{1: {'name': 'b1l1'}, 2: {'name': 'b1l2'}})
|
||||||
|
|
||||||
|
mock_bridge_two = self.create_mock_bridge('two', False)
|
||||||
|
mock_bridge_two_lights = self.create_mock_lights(
|
||||||
|
{1: {'name': 'b2l1'}, 3: {'name': 'b2l3'}})
|
||||||
|
|
||||||
|
with patch('homeassistant.components.light.hue.get_bridge_type',
|
||||||
|
return_value=self.mock_bridge_type):
|
||||||
|
with patch('homeassistant.components.light.hue.HueLight.'
|
||||||
|
'schedule_update_ha_state'):
|
||||||
|
mock_api = MagicMock()
|
||||||
|
mock_api.get.return_value = mock_bridge_one_lights
|
||||||
|
with patch.object(mock_bridge_one, 'get_api',
|
||||||
|
return_value=mock_api):
|
||||||
|
hue_light.unthrottled_update_lights(
|
||||||
|
self.hass, mock_bridge_one, self.mock_add_devices)
|
||||||
|
|
||||||
|
mock_api = MagicMock()
|
||||||
|
mock_api.get.return_value = mock_bridge_two_lights
|
||||||
|
with patch.object(mock_bridge_two, 'get_api',
|
||||||
|
return_value=mock_api):
|
||||||
|
hue_light.unthrottled_update_lights(
|
||||||
|
self.hass, mock_bridge_two, self.mock_add_devices)
|
||||||
|
|
||||||
|
self.assertEquals(sorted(mock_bridge_one.lights.keys()), [1, 2])
|
||||||
|
self.assertEquals(sorted(mock_bridge_two.lights.keys()), [1, 3])
|
||||||
|
|
||||||
|
self.assertEquals(len(self.mock_add_devices.mock_calls), 2)
|
||||||
|
|
||||||
|
# first call
|
||||||
|
name, args, kwargs = self.mock_add_devices.mock_calls[0]
|
||||||
|
self.assertEquals(len(args), 1)
|
||||||
|
self.assertEquals(len(kwargs), 0)
|
||||||
|
|
||||||
|
# one argument, a list of lights in bridge one; each of them is an
|
||||||
|
# object of type HueLight so we can't straight up compare them
|
||||||
|
lights = args[0]
|
||||||
|
self.assertEquals(
|
||||||
|
lights[0].unique_id,
|
||||||
|
'{}.b1l1.Light.1'.format(hue_light.HueLight))
|
||||||
|
self.assertEquals(
|
||||||
|
lights[1].unique_id,
|
||||||
|
'{}.b1l2.Light.2'.format(hue_light.HueLight))
|
||||||
|
|
||||||
|
# second call works the same
|
||||||
|
name, args, kwargs = self.mock_add_devices.mock_calls[1]
|
||||||
|
self.assertEquals(len(args), 1)
|
||||||
|
self.assertEquals(len(kwargs), 0)
|
||||||
|
|
||||||
|
lights = args[0]
|
||||||
|
self.assertEquals(
|
||||||
|
lights[0].unique_id,
|
||||||
|
'{}.b2l1.Light.1'.format(hue_light.HueLight))
|
||||||
|
self.assertEquals(
|
||||||
|
lights[1].unique_id,
|
||||||
|
'{}.b2l3.Light.3'.format(hue_light.HueLight))
|
||||||
|
|
||||||
def test_process_lights_api_error(self):
|
def test_process_lights_api_error(self):
|
||||||
"""Test the process_lights function when the bridge errors out."""
|
"""Test the process_lights function when the bridge errors out."""
|
||||||
self.setup_mocks_for_process_lights()
|
self.setup_mocks_for_process_lights()
|
||||||
@ -221,9 +303,7 @@ class TestSetup(unittest.TestCase):
|
|||||||
None)
|
None)
|
||||||
|
|
||||||
self.assertEquals([], ret)
|
self.assertEquals([], ret)
|
||||||
self.assertEquals(
|
self.assertEquals(self.mock_bridge.lights, {})
|
||||||
{},
|
|
||||||
self.hass.data[hue_light.DATA_KEY][hue_light.DATA_LIGHTS])
|
|
||||||
|
|
||||||
def test_process_lights_no_lights(self):
|
def test_process_lights_no_lights(self):
|
||||||
"""Test the process_lights function when bridge returns no lights."""
|
"""Test the process_lights function when bridge returns no lights."""
|
||||||
@ -234,9 +314,7 @@ class TestSetup(unittest.TestCase):
|
|||||||
None)
|
None)
|
||||||
|
|
||||||
self.assertEquals([], ret)
|
self.assertEquals([], ret)
|
||||||
self.assertEquals(
|
self.assertEquals(self.mock_bridge.lights, {})
|
||||||
{},
|
|
||||||
self.hass.data[hue_light.DATA_KEY][hue_light.DATA_LIGHTS])
|
|
||||||
|
|
||||||
@patch('homeassistant.components.light.hue.HueLight')
|
@patch('homeassistant.components.light.hue.HueLight')
|
||||||
def test_process_lights_some_lights(self, mock_hue_light):
|
def test_process_lights_some_lights(self, mock_hue_light):
|
||||||
@ -260,9 +338,7 @@ class TestSetup(unittest.TestCase):
|
|||||||
self.mock_bridge_type, self.mock_bridge.allow_unreachable,
|
self.mock_bridge_type, self.mock_bridge.allow_unreachable,
|
||||||
self.mock_bridge.allow_in_emulated_hue),
|
self.mock_bridge.allow_in_emulated_hue),
|
||||||
])
|
])
|
||||||
self.assertEquals(
|
self.assertEquals(len(self.mock_bridge.lights), 2)
|
||||||
len(self.hass.data[hue_light.DATA_KEY][hue_light.DATA_LIGHTS]),
|
|
||||||
2)
|
|
||||||
|
|
||||||
@patch('homeassistant.components.light.hue.HueLight')
|
@patch('homeassistant.components.light.hue.HueLight')
|
||||||
def test_process_lights_new_light(self, mock_hue_light):
|
def test_process_lights_new_light(self, mock_hue_light):
|
||||||
@ -274,8 +350,7 @@ class TestSetup(unittest.TestCase):
|
|||||||
self.setup_mocks_for_process_lights()
|
self.setup_mocks_for_process_lights()
|
||||||
self.mock_api.get.return_value = {
|
self.mock_api.get.return_value = {
|
||||||
1: {'state': 'on'}, 2: {'state': 'off'}}
|
1: {'state': 'on'}, 2: {'state': 'off'}}
|
||||||
self.hass.data[
|
self.mock_bridge.lights = {1: MagicMock()}
|
||||||
hue_light.DATA_KEY][hue_light.DATA_LIGHTS][1] = MagicMock()
|
|
||||||
|
|
||||||
ret = hue_light.process_lights(
|
ret = hue_light.process_lights(
|
||||||
self.hass, self.mock_api, self.mock_bridge, self.mock_bridge_type,
|
self.hass, self.mock_api, self.mock_bridge, self.mock_bridge_type,
|
||||||
@ -288,11 +363,9 @@ class TestSetup(unittest.TestCase):
|
|||||||
self.mock_bridge_type, self.mock_bridge.allow_unreachable,
|
self.mock_bridge_type, self.mock_bridge.allow_unreachable,
|
||||||
self.mock_bridge.allow_in_emulated_hue),
|
self.mock_bridge.allow_in_emulated_hue),
|
||||||
])
|
])
|
||||||
self.hass.data[hue_light.DATA_KEY][hue_light.DATA_LIGHTS][
|
self.assertEquals(len(self.mock_bridge.lights), 2)
|
||||||
1].schedule_update_ha_state.assert_called_once_with()
|
self.mock_bridge.lights[1]\
|
||||||
self.assertEquals(
|
.schedule_update_ha_state.assert_called_once_with()
|
||||||
len(self.hass.data[hue_light.DATA_KEY][hue_light.DATA_LIGHTS]),
|
|
||||||
2)
|
|
||||||
|
|
||||||
def test_process_groups_api_error(self):
|
def test_process_groups_api_error(self):
|
||||||
"""Test the process_groups function when the bridge errors out."""
|
"""Test the process_groups function when the bridge errors out."""
|
||||||
@ -304,9 +377,7 @@ class TestSetup(unittest.TestCase):
|
|||||||
None)
|
None)
|
||||||
|
|
||||||
self.assertEquals([], ret)
|
self.assertEquals([], ret)
|
||||||
self.assertEquals(
|
self.assertEquals(self.mock_bridge.lightgroups, {})
|
||||||
{},
|
|
||||||
self.hass.data[hue_light.DATA_KEY][hue_light.DATA_LIGHTGROUPS])
|
|
||||||
|
|
||||||
def test_process_groups_no_state(self):
|
def test_process_groups_no_state(self):
|
||||||
"""Test the process_groups function when bridge returns no status."""
|
"""Test the process_groups function when bridge returns no status."""
|
||||||
@ -318,9 +389,7 @@ class TestSetup(unittest.TestCase):
|
|||||||
None)
|
None)
|
||||||
|
|
||||||
self.assertEquals([], ret)
|
self.assertEquals([], ret)
|
||||||
self.assertEquals(
|
self.assertEquals(self.mock_bridge.lightgroups, {})
|
||||||
{},
|
|
||||||
self.hass.data[hue_light.DATA_KEY][hue_light.DATA_LIGHTGROUPS])
|
|
||||||
|
|
||||||
@patch('homeassistant.components.light.hue.HueLight')
|
@patch('homeassistant.components.light.hue.HueLight')
|
||||||
def test_process_groups_some_groups(self, mock_hue_light):
|
def test_process_groups_some_groups(self, mock_hue_light):
|
||||||
@ -344,10 +413,7 @@ class TestSetup(unittest.TestCase):
|
|||||||
self.mock_bridge_type, self.mock_bridge.allow_unreachable,
|
self.mock_bridge_type, self.mock_bridge.allow_unreachable,
|
||||||
self.mock_bridge.allow_in_emulated_hue, True),
|
self.mock_bridge.allow_in_emulated_hue, True),
|
||||||
])
|
])
|
||||||
self.assertEquals(
|
self.assertEquals(len(self.mock_bridge.lightgroups), 2)
|
||||||
len(self.hass.data[
|
|
||||||
hue_light.DATA_KEY][hue_light.DATA_LIGHTGROUPS]),
|
|
||||||
2)
|
|
||||||
|
|
||||||
@patch('homeassistant.components.light.hue.HueLight')
|
@patch('homeassistant.components.light.hue.HueLight')
|
||||||
def test_process_groups_new_group(self, mock_hue_light):
|
def test_process_groups_new_group(self, mock_hue_light):
|
||||||
@ -359,8 +425,7 @@ class TestSetup(unittest.TestCase):
|
|||||||
self.setup_mocks_for_process_groups()
|
self.setup_mocks_for_process_groups()
|
||||||
self.mock_api.get.return_value = {
|
self.mock_api.get.return_value = {
|
||||||
1: {'state': 'on'}, 2: {'state': 'off'}}
|
1: {'state': 'on'}, 2: {'state': 'off'}}
|
||||||
self.hass.data[
|
self.mock_bridge.lightgroups = {1: MagicMock()}
|
||||||
hue_light.DATA_KEY][hue_light.DATA_LIGHTGROUPS][1] = MagicMock()
|
|
||||||
|
|
||||||
ret = hue_light.process_groups(
|
ret = hue_light.process_groups(
|
||||||
self.hass, self.mock_api, self.mock_bridge, self.mock_bridge_type,
|
self.hass, self.mock_api, self.mock_bridge, self.mock_bridge_type,
|
||||||
@ -373,12 +438,9 @@ class TestSetup(unittest.TestCase):
|
|||||||
self.mock_bridge_type, self.mock_bridge.allow_unreachable,
|
self.mock_bridge_type, self.mock_bridge.allow_unreachable,
|
||||||
self.mock_bridge.allow_in_emulated_hue, True),
|
self.mock_bridge.allow_in_emulated_hue, True),
|
||||||
])
|
])
|
||||||
self.hass.data[hue_light.DATA_KEY][hue_light.DATA_LIGHTGROUPS][
|
self.assertEquals(len(self.mock_bridge.lightgroups), 2)
|
||||||
1].schedule_update_ha_state.assert_called_once_with()
|
self.mock_bridge.lightgroups[1]\
|
||||||
self.assertEquals(
|
.schedule_update_ha_state.assert_called_once_with()
|
||||||
len(self.hass.data[
|
|
||||||
hue_light.DATA_KEY][hue_light.DATA_LIGHTGROUPS]),
|
|
||||||
2)
|
|
||||||
|
|
||||||
|
|
||||||
class TestHueLight(unittest.TestCase):
|
class TestHueLight(unittest.TestCase):
|
||||||
|
60
tests/components/media_player/test_webostv.py
Normal file
60
tests/components/media_player/test_webostv.py
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
"""The tests for the LG webOS media player platform."""
|
||||||
|
import unittest
|
||||||
|
from unittest import mock
|
||||||
|
|
||||||
|
from homeassistant.components.media_player import webostv
|
||||||
|
|
||||||
|
|
||||||
|
class FakeLgWebOSDevice(webostv.LgWebOSDevice):
|
||||||
|
"""A fake device without the client setup required for the real one."""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
"""Initialise parameters needed for tests with fake values."""
|
||||||
|
self._source_list = {}
|
||||||
|
self._client = mock.MagicMock()
|
||||||
|
self._name = 'fake_device'
|
||||||
|
self._current_source = None
|
||||||
|
|
||||||
|
|
||||||
|
class TestLgWebOSDevice(unittest.TestCase):
|
||||||
|
"""Test the LgWebOSDevice class."""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
"""Configure a fake device for each test."""
|
||||||
|
self.device = FakeLgWebOSDevice()
|
||||||
|
|
||||||
|
def test_select_source_with_empty_source_list(self):
|
||||||
|
"""Ensure we don't call client methods when we don't have sources."""
|
||||||
|
self.device.select_source('nonexistent')
|
||||||
|
assert 0 == self.device._client.launch_app.call_count
|
||||||
|
assert 0 == self.device._client.set_input.call_count
|
||||||
|
|
||||||
|
def test_select_source_with_titled_entry(self):
|
||||||
|
"""Test that a titled source is treated as an app."""
|
||||||
|
self.device._source_list = {
|
||||||
|
'existent': {
|
||||||
|
'id': 'existent_id',
|
||||||
|
'title': 'existent_title',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
self.device.select_source('existent')
|
||||||
|
|
||||||
|
assert 'existent_title' == self.device._current_source
|
||||||
|
assert [mock.call('existent_id')] == (
|
||||||
|
self.device._client.launch_app.call_args_list)
|
||||||
|
|
||||||
|
def test_select_source_with_labelled_entry(self):
|
||||||
|
"""Test that a labelled source is treated as an input source."""
|
||||||
|
self.device._source_list = {
|
||||||
|
'existent': {
|
||||||
|
'id': 'existent_id',
|
||||||
|
'label': 'existent_label',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
self.device.select_source('existent')
|
||||||
|
|
||||||
|
assert 'existent_label' == self.device._current_source
|
||||||
|
assert [mock.call('existent_id')] == (
|
||||||
|
self.device._client.set_input.call_args_list)
|
Loading…
x
Reference in New Issue
Block a user