Merge pull request #11716 from home-assistant/release-0-61-1

0.61.1
This commit is contained in:
Paulus Schoutsen 2018-01-16 14:33:12 -08:00 committed by GitHub
commit 37eb6c90b6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 94 additions and 94 deletions

View File

@ -18,9 +18,8 @@ from homeassistant.helpers import config_validation as cv
from homeassistant.components.binary_sensor import (
BinarySensorDevice, PLATFORM_SCHEMA)
from homeassistant.components.rfxtrx import (
ATTR_NAME, ATTR_DATA_BITS, ATTR_OFF_DELAY, ATTR_FIRE_EVENT,
CONF_AUTOMATIC_ADD, CONF_FIRE_EVENT,
CONF_DATA_BITS, CONF_DEVICES)
ATTR_NAME, CONF_AUTOMATIC_ADD, CONF_FIRE_EVENT,
CONF_OFF_DELAY, CONF_DATA_BITS, CONF_DEVICES)
from homeassistant.util import slugify
from homeassistant.util import dt as dt_util
@ -35,9 +34,11 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_DEVICE_CLASS): cv.string,
vol.Optional(CONF_FIRE_EVENT, default=False): cv.boolean,
vol.Optional(CONF_DATA_BITS): cv.positive_int,
vol.Optional(CONF_COMMAND_ON): cv.byte,
vol.Optional(CONF_COMMAND_OFF): cv.byte
vol.Optional(CONF_OFF_DELAY, default=None):
vol.Any(cv.time_period, cv.positive_timedelta),
vol.Optional(CONF_DATA_BITS, default=None): cv.positive_int,
vol.Optional(CONF_COMMAND_ON, default=None): cv.byte,
vol.Optional(CONF_COMMAND_OFF, default=None): cv.byte
})
},
vol.Optional(CONF_AUTOMATIC_ADD, default=False): cv.boolean,
@ -59,16 +60,16 @@ def setup_platform(hass, config, add_devices_callback, discovery_info=None):
if entity[CONF_DATA_BITS] is not None:
_LOGGER.debug("Masked device id: %s",
rfxtrx.get_pt2262_deviceid(device_id,
entity[ATTR_DATA_BITS]))
entity[CONF_DATA_BITS]))
_LOGGER.debug("Add %s rfxtrx.binary_sensor (class %s)",
entity[ATTR_NAME], entity[CONF_DEVICE_CLASS])
device = RfxtrxBinarySensor(event, entity[ATTR_NAME],
entity[CONF_DEVICE_CLASS],
entity[ATTR_FIRE_EVENT],
entity[ATTR_OFF_DELAY],
entity[ATTR_DATA_BITS],
entity[CONF_FIRE_EVENT],
entity[CONF_OFF_DELAY],
entity[CONF_DATA_BITS],
entity[CONF_COMMAND_ON],
entity[CONF_COMMAND_OFF])
device.hass = hass

View File

@ -13,7 +13,7 @@ import logging
import voluptuous as vol
from homeassistant.components.calendar import (
CalendarEventDevice, PLATFORM_SCHEMA)
CalendarEventDevice, DOMAIN, PLATFORM_SCHEMA)
from homeassistant.components.google import (
CONF_DEVICE_ID)
from homeassistant.const import (
@ -26,7 +26,6 @@ from homeassistant.util import Throttle
REQUIREMENTS = ['todoist-python==7.0.17']
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'todoist'
# Calendar Platform: Does this calendar event last all day?
ALL_DAY = 'all_day'
@ -78,7 +77,7 @@ SUMMARY = 'summary'
# Todoist API: Fetch all Tasks
TASKS = 'items'
SERVICE_NEW_TASK = 'new_task'
SERVICE_NEW_TASK = 'todoist_new_task'
NEW_TASK_SERVICE_SCHEMA = vol.Schema({
vol.Required(CONTENT): cv.string,
vol.Optional(PROJECT_NAME, default='inbox'): vol.All(cv.string, vol.Lower),

View File

@ -20,14 +20,19 @@ from homeassistant.components import recorder, script
from homeassistant.components.http import HomeAssistantView
from homeassistant.const import ATTR_HIDDEN
from homeassistant.components.recorder.util import session_scope, execute
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'history'
DEPENDENCIES = ['recorder', 'http']
CONF_ORDER = 'use_include_order'
CONFIG_SCHEMA = vol.Schema({
DOMAIN: recorder.FILTER_SCHEMA,
DOMAIN: recorder.FILTER_SCHEMA.extend({
vol.Optional(CONF_ORDER, default=False): cv.boolean,
})
}, extra=vol.ALLOW_EXTRA)
SIGNIFICANT_DOMAINS = ('thermostat', 'climate')
@ -242,8 +247,9 @@ def async_setup(hass, config):
if include:
filters.included_entities = include[CONF_ENTITIES]
filters.included_domains = include[CONF_DOMAINS]
use_include_order = config[DOMAIN].get(CONF_ORDER)
hass.http.register_view(HistoryPeriodView(filters))
hass.http.register_view(HistoryPeriodView(filters, use_include_order))
yield from hass.components.frontend.async_register_built_in_panel(
'history', 'history', 'mdi:poll-box')
@ -257,9 +263,10 @@ class HistoryPeriodView(HomeAssistantView):
name = 'api:history:view-period'
extra_urls = ['/api/history/period/{datetime}']
def __init__(self, filters):
def __init__(self, filters, use_include_order):
"""Initialize the history period view."""
self.filters = filters
self.use_include_order = use_include_order
@asyncio.coroutine
def get(self, request, datetime=None):
@ -305,19 +312,22 @@ class HistoryPeriodView(HomeAssistantView):
_LOGGER.debug(
'Extracted %d states in %fs', sum(map(len, result)), elapsed)
# Reorder the result to respect the ordering given by any
# entities explicitly included in the configuration.
# Optionally reorder the result to respect the ordering given
# by any entities explicitly included in the configuration.
sorted_result = []
for order_entity in self.filters.included_entities:
for state_list in result:
if state_list[0].entity_id == order_entity:
sorted_result.append(state_list)
result.remove(state_list)
break
sorted_result.extend(result)
if self.use_include_order:
result = list(result)
sorted_result = []
for order_entity in self.filters.included_entities:
for state_list in result:
if state_list[0].entity_id == order_entity:
sorted_result.append(state_list)
result.remove(state_list)
break
sorted_result.extend(result)
result = sorted_result
return self.json(sorted_result)
return self.json(result)
class Filters(object):

View File

@ -31,42 +31,23 @@ def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
return
endpoint = discovery_info['endpoint']
try:
discovery_info['color_capabilities'] \
= yield from endpoint.light_color['color_capabilities']
except (AttributeError, KeyError):
pass
if discovery_info.get('color_capabilities') is None:
# ZCL Version 4 devices don't support the color_capabilities attribute.
# In this version XY support is mandatory, but we need to probe to
# determine if the device supports color temperature.
discovery_info['color_capabilities'] = CAPABILITIES_COLOR_XY
result = yield from safe_read(
endpoint.light_color, ['color_temperature'])
if result.get('color_temperature') is not UNSUPPORTED_ATTRIBUTE:
discovery_info['color_capabilities'] |= CAPABILITIES_COLOR_TEMP
if hasattr(endpoint, 'light_color'):
caps = yield from zha.safe_read(
endpoint.light_color, ['color_capabilities'])
discovery_info['color_capabilities'] = caps.get('color_capabilities')
if discovery_info['color_capabilities'] is None:
# ZCL Version 4 devices don't support the color_capabilities
# attribute. In this version XY support is mandatory, but we need
# to probe to determine if the device supports color temperature.
discovery_info['color_capabilities'] = CAPABILITIES_COLOR_XY
result = yield from zha.safe_read(
endpoint.light_color, ['color_temperature'])
if result.get('color_temperature') is not UNSUPPORTED_ATTRIBUTE:
discovery_info['color_capabilities'] |= CAPABILITIES_COLOR_TEMP
async_add_devices([Light(**discovery_info)], update_before_add=True)
@asyncio.coroutine
def safe_read(cluster, attributes):
"""Swallow all exceptions from network read.
If we throw during initialization, setup fails. Rather have an
entity that exists, but is in a maybe wrong state, than no entity.
"""
try:
result, _ = yield from cluster.read_attributes(
attributes,
allow_cache=False,
)
return result
except Exception: # pylint: disable=broad-except
return {}
class Light(zha.Entity, light.Light):
"""Representation of a ZHA or ZLL light."""
@ -174,23 +155,23 @@ class Light(zha.Entity, light.Light):
@asyncio.coroutine
def async_update(self):
"""Retrieve latest state."""
result = yield from safe_read(self._endpoint.on_off, ['on_off'])
result = yield from zha.safe_read(self._endpoint.on_off, ['on_off'])
self._state = result.get('on_off', self._state)
if self._supported_features & light.SUPPORT_BRIGHTNESS:
result = yield from safe_read(self._endpoint.level,
['current_level'])
result = yield from zha.safe_read(self._endpoint.level,
['current_level'])
self._brightness = result.get('current_level', self._brightness)
if self._supported_features & light.SUPPORT_COLOR_TEMP:
result = yield from safe_read(self._endpoint.light_color,
['color_temperature'])
result = yield from zha.safe_read(self._endpoint.light_color,
['color_temperature'])
self._color_temp = result.get('color_temperature',
self._color_temp)
if self._supported_features & light.SUPPORT_XY_COLOR:
result = yield from safe_read(self._endpoint.light_color,
['current_x', 'current_y'])
result = yield from zha.safe_read(self._endpoint.light_color,
['current_x', 'current_y'])
if 'current_x' in result and 'current_y' in result:
self._xy_color = (result['current_x'], result['current_y'])

View File

@ -12,7 +12,7 @@ import voluptuous as vol
from homeassistant.components.media_player import (
SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET, SUPPORT_SELECT_SOURCE,
PLATFORM_SCHEMA, MediaPlayerDevice)
DOMAIN, PLATFORM_SCHEMA, MediaPlayerDevice)
from homeassistant.const import (
STATE_ON, STATE_OFF, STATE_IDLE, STATE_PLAYING, STATE_UNKNOWN, CONF_HOST,
CONF_PORT, ATTR_ENTITY_ID)
@ -22,7 +22,7 @@ REQUIREMENTS = ['snapcast==2.0.8']
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'snapcast'
DATA_KEY = 'snapcast'
SERVICE_SNAPSHOT = 'snapcast_snapshot'
SERVICE_RESTORE = 'snapcast_restore'
@ -59,7 +59,7 @@ def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
def _handle_service(service):
"""Handle services."""
entity_ids = service.data.get(ATTR_ENTITY_ID)
devices = [device for device in hass.data[DOMAIN]
devices = [device for device in hass.data[DATA_KEY]
if device.entity_id in entity_ids]
for device in devices:
if service.service == SERVICE_SNAPSHOT:
@ -84,7 +84,7 @@ def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
groups = [SnapcastGroupDevice(group) for group in server.groups]
clients = [SnapcastClientDevice(client) for client in server.clients]
devices = groups + clients
hass.data[DOMAIN] = devices
hass.data[DATA_KEY] = devices
async_add_devices(devices)
return True

View File

@ -14,7 +14,7 @@ from homeassistant.components.media_player import (
SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PREVIOUS_TRACK,
SUPPORT_TURN_OFF, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_STEP,
SUPPORT_VOLUME_SET, SUPPORT_TURN_ON, SUPPORT_PLAY, MediaPlayerDevice,
PLATFORM_SCHEMA)
DOMAIN, PLATFORM_SCHEMA)
from homeassistant.const import (CONF_HOST, CONF_NAME, STATE_OFF, CONF_PORT,
STATE_PAUSED, STATE_PLAYING,
STATE_UNAVAILABLE)
@ -23,7 +23,6 @@ REQUIREMENTS = ['libsoundtouch==0.7.2']
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'media_player'
SERVICE_PLAY_EVERYWHERE = 'soundtouch_play_everywhere'
SERVICE_CREATE_ZONE = 'soundtouch_create_zone'
SERVICE_ADD_ZONE_SLAVE = 'soundtouch_add_zone_slave'

View File

@ -22,11 +22,10 @@ def purge_old_data(instance, purge_days):
# updated in a long time
protected_states = session.query(States.state_id, States.event_id,
func.max(States.last_updated)) \
.group_by(States.entity_id).subquery()
.group_by(States.entity_id).all()
protected_state_ids = session.query(States.state_id).join(
protected_states, States.state_id == protected_states.c.state_id)\
.subquery()
protected_state_ids = tuple((state[0] for state in protected_states))
protected_event_ids = tuple((state[1] for state in protected_states))
deleted_rows = session.query(States) \
.filter((States.last_updated < purge_before)) \
@ -39,11 +38,6 @@ def purge_old_data(instance, purge_days):
# Otherwise, if the SQL server has "ON DELETE CASCADE" as default, it
# will delete the protected state when deleting its associated
# event. Also, we would be producing NULLed foreign keys otherwise.
protected_event_ids = session.query(States.event_id).join(
protected_states, States.state_id == protected_states.c.state_id)\
.filter(~States.event_id is not None).subquery()
deleted_rows = session.query(Events) \
.filter((Events.time_fired < purge_before)) \
.filter(~Events.event_id.in_(

View File

@ -33,9 +33,8 @@ ATTR_STATE = 'state'
ATTR_NAME = 'name'
ATTR_FIRE_EVENT = 'fire_event'
ATTR_DATA_TYPE = 'data_type'
ATTR_DATA_BITS = 'data_bits'
ATTR_DUMMY = 'dummy'
ATTR_OFF_DELAY = 'off_delay'
CONF_DATA_BITS = 'data_bits'
CONF_AUTOMATIC_ADD = 'automatic_add'
CONF_DATA_TYPE = 'data_type'
CONF_SIGNAL_REPETITIONS = 'signal_repetitions'
@ -44,6 +43,7 @@ CONF_DATA_BITS = 'data_bits'
CONF_DUMMY = 'dummy'
CONF_DEVICE = 'device'
CONF_DEBUG = 'debug'
CONF_OFF_DELAY = 'off_delay'
EVENT_BUTTON_PRESSED = 'button_pressed'
DATA_TYPES = OrderedDict([
@ -143,12 +143,13 @@ def get_rfx_object(packetid):
def get_pt2262_deviceid(device_id, nb_data_bits):
"""Extract and return the address bits from a Lighting4/PT2262 packet."""
if nb_data_bits is None:
return
import binascii
try:
data = bytearray.fromhex(device_id)
except ValueError:
return None
mask = 0xFF & ~((1 << nb_data_bits) - 1)
data[len(data)-1] &= mask

View File

@ -9,7 +9,7 @@ from datetime import timedelta
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.components.sensor import DOMAIN, PLATFORM_SCHEMA
from homeassistant.helpers.entity import Entity
from homeassistant.const import (CONF_USERNAME, CONF_PASSWORD, CONF_PIN,
ATTR_ATTRIBUTION, ATTR_COMMAND,
@ -23,9 +23,8 @@ REQUIREMENTS = ['motorparts==1.0.2']
_LOGGER = logging.getLogger(__name__)
MIN_TIME_BETWEEN_UPDATES = timedelta(days=7)
DOMAIN = 'mopar'
ATTR_VEHICLE_INDEX = 'vehicle_index'
SERVICE_REMOTE_COMMAND = 'remote_command'
SERVICE_REMOTE_COMMAND = 'mopar_remote_command'
COOKIE_FILE = 'mopar_cookies.pickle'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({

View File

@ -15,7 +15,8 @@ import voluptuous as vol
from homeassistant.util.dt import utcnow
from homeassistant.util import Throttle
from homeassistant.components.switch import (SwitchDevice, PLATFORM_SCHEMA)
from homeassistant.components.switch import (
SwitchDevice, DOMAIN, PLATFORM_SCHEMA)
from homeassistant.const import (
CONF_FRIENDLY_NAME, CONF_SWITCHES,
CONF_COMMAND_OFF, CONF_COMMAND_ON,
@ -28,12 +29,11 @@ _LOGGER = logging.getLogger(__name__)
TIME_BETWEEN_UPDATES = timedelta(seconds=5)
DOMAIN = 'broadlink'
DEFAULT_NAME = 'Broadlink switch'
DEFAULT_TIMEOUT = 10
DEFAULT_RETRY = 3
SERVICE_LEARN = 'learn_command'
SERVICE_SEND = 'send_packet'
SERVICE_LEARN = 'broadlink_learn_command'
SERVICE_SEND = 'broadlink_send_packet'
CONF_SLOTS = 'slots'
RM_TYPES = ['rm', 'rm2', 'rm_mini', 'rm_pro_phicomm', 'rm2_home_plus',

View File

@ -23,8 +23,6 @@ CONF_SCENARIO = 'scenario'
CONF_SCS_ID = 'scs_id'
DOMAIN = 'scsgate'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_DEVICES): vol.Schema({cv.slug: scsgate.SCSGATE_SCHEMA}),
})

View File

@ -315,3 +315,21 @@ def get_discovery_info(hass, discovery_info):
all_discovery_info = hass.data.get(DISCOVERY_KEY, {})
discovery_info = all_discovery_info.get(discovery_key, None)
return discovery_info
@asyncio.coroutine
def safe_read(cluster, attributes):
"""Swallow all exceptions from network read.
If we throw during initialization, setup fails. Rather have an entity that
exists, but is in a maybe wrong state, than no entity. This method should
probably only be used during initialization.
"""
try:
result, _ = yield from cluster.read_attributes(
attributes,
allow_cache=False,
)
return result
except Exception: # pylint: disable=broad-except
return {}

View File

@ -2,7 +2,7 @@
"""Constants used by Home Assistant components."""
MAJOR_VERSION = 0
MINOR_VERSION = 61
PATCH_VERSION = '0'
PATCH_VERSION = '1'
__short_version__ = '{}.{}'.format(MAJOR_VERSION, MINOR_VERSION)
__version__ = '{}.{}'.format(__short_version__, PATCH_VERSION)
REQUIRED_PYTHON_VER = (3, 4, 2)