mirror of
https://github.com/home-assistant/core.git
synced 2025-07-23 05:07:41 +00:00
commit
37bb626dd2
25
.coveragerc
25
.coveragerc
@ -71,6 +71,9 @@ omit =
|
||||
homeassistant/components/envisalink.py
|
||||
homeassistant/components/*/envisalink.py
|
||||
|
||||
homeassistant/components/gc100.py
|
||||
homeassistant/components/*/gc100.py
|
||||
|
||||
homeassistant/components/google.py
|
||||
homeassistant/components/*/google.py
|
||||
|
||||
@ -107,6 +110,9 @@ omit =
|
||||
homeassistant/components/lametric.py
|
||||
homeassistant/components/*/lametric.py
|
||||
|
||||
homeassistant/components/linode.py
|
||||
homeassistant/components/*/linode.py
|
||||
|
||||
homeassistant/components/lutron.py
|
||||
homeassistant/components/*/lutron.py
|
||||
|
||||
@ -272,8 +278,10 @@ omit =
|
||||
homeassistant/components/camera/mjpeg.py
|
||||
homeassistant/components/camera/rpi_camera.py
|
||||
homeassistant/components/camera/onvif.py
|
||||
homeassistant/components/camera/ring.py
|
||||
homeassistant/components/camera/synology.py
|
||||
homeassistant/components/camera/yi.py
|
||||
homeassistant/components/climate/ephember.py
|
||||
homeassistant/components/climate/eq3btsmart.py
|
||||
homeassistant/components/climate/flexit.py
|
||||
homeassistant/components/climate/heatmiser.py
|
||||
@ -324,6 +332,7 @@ omit =
|
||||
homeassistant/components/emoncms_history.py
|
||||
homeassistant/components/emulated_hue/upnp.py
|
||||
homeassistant/components/fan/mqtt.py
|
||||
homeassistant/components/fan/xiaomi_miio.py
|
||||
homeassistant/components/feedreader.py
|
||||
homeassistant/components/foursquare.py
|
||||
homeassistant/components/ifttt.py
|
||||
@ -411,6 +420,7 @@ omit =
|
||||
homeassistant/components/notify/aws_sns.py
|
||||
homeassistant/components/notify/aws_sqs.py
|
||||
homeassistant/components/notify/ciscospark.py
|
||||
homeassistant/components/notify/clickatell.py
|
||||
homeassistant/components/notify/clicksend.py
|
||||
homeassistant/components/notify/clicksend_tts.py
|
||||
homeassistant/components/notify/discord.py
|
||||
@ -444,8 +454,10 @@ omit =
|
||||
homeassistant/components/notify/telstra.py
|
||||
homeassistant/components/notify/twitter.py
|
||||
homeassistant/components/notify/xmpp.py
|
||||
homeassistant/components/notify/yessssms.py
|
||||
homeassistant/components/nuimo_controller.py
|
||||
homeassistant/components/prometheus.py
|
||||
homeassistant/components/remember_the_milk/__init__.py
|
||||
homeassistant/components/remote/harmony.py
|
||||
homeassistant/components/remote/itach.py
|
||||
homeassistant/components/scene/hunterdouglas_powerview.py
|
||||
@ -462,7 +474,6 @@ omit =
|
||||
homeassistant/components/sensor/broadlink.py
|
||||
homeassistant/components/sensor/buienradar.py
|
||||
homeassistant/components/sensor/citybikes.py
|
||||
homeassistant/components/sensor/coinmarketcap.py
|
||||
homeassistant/components/sensor/cert_expiry.py
|
||||
homeassistant/components/sensor/comed_hourly_pricing.py
|
||||
homeassistant/components/sensor/cpuspeed.py
|
||||
@ -470,6 +481,7 @@ omit =
|
||||
homeassistant/components/sensor/cups.py
|
||||
homeassistant/components/sensor/currencylayer.py
|
||||
homeassistant/components/sensor/darksky.py
|
||||
homeassistant/components/sensor/deluge.py
|
||||
homeassistant/components/sensor/deutsche_bahn.py
|
||||
homeassistant/components/sensor/dht.py
|
||||
homeassistant/components/sensor/dnsip.py
|
||||
@ -497,17 +509,18 @@ omit =
|
||||
homeassistant/components/sensor/gpsd.py
|
||||
homeassistant/components/sensor/gtfs.py
|
||||
homeassistant/components/sensor/haveibeenpwned.py
|
||||
homeassistant/components/sensor/hddtemp.py
|
||||
homeassistant/components/sensor/hp_ilo.py
|
||||
homeassistant/components/sensor/htu21d.py
|
||||
homeassistant/components/sensor/hydroquebec.py
|
||||
homeassistant/components/sensor/imap.py
|
||||
homeassistant/components/sensor/imap_email_content.py
|
||||
homeassistant/components/sensor/influxdb.py
|
||||
homeassistant/components/sensor/irish_rail_transport.py
|
||||
homeassistant/components/sensor/kwb.py
|
||||
homeassistant/components/sensor/lastfm.py
|
||||
homeassistant/components/sensor/linux_battery.py
|
||||
homeassistant/components/sensor/loopenergy.py
|
||||
homeassistant/components/sensor/luftdaten.py
|
||||
homeassistant/components/sensor/lyft.py
|
||||
homeassistant/components/sensor/metoffice.py
|
||||
homeassistant/components/sensor/miflora.py
|
||||
@ -515,6 +528,7 @@ omit =
|
||||
homeassistant/components/sensor/mopar.py
|
||||
homeassistant/components/sensor/mqtt_room.py
|
||||
homeassistant/components/sensor/mvglive.py
|
||||
homeassistant/components/sensor/nederlandse_spoorwegen.py
|
||||
homeassistant/components/sensor/netdata.py
|
||||
homeassistant/components/sensor/neurio_energy.py
|
||||
homeassistant/components/sensor/nut.py
|
||||
@ -551,6 +565,7 @@ omit =
|
||||
homeassistant/components/sensor/swiss_public_transport.py
|
||||
homeassistant/components/sensor/synologydsm.py
|
||||
homeassistant/components/sensor/systemmonitor.py
|
||||
homeassistant/components/sensor/sytadin.py
|
||||
homeassistant/components/sensor/tank_utility.py
|
||||
homeassistant/components/sensor/ted5000.py
|
||||
homeassistant/components/sensor/temper.py
|
||||
@ -565,6 +580,7 @@ omit =
|
||||
homeassistant/components/sensor/ups.py
|
||||
homeassistant/components/sensor/vasttrafik.py
|
||||
homeassistant/components/sensor/waqi.py
|
||||
homeassistant/components/sensor/whois.py
|
||||
homeassistant/components/sensor/worldtidesinfo.py
|
||||
homeassistant/components/sensor/worxlandroid.py
|
||||
homeassistant/components/sensor/xbox_live.py
|
||||
@ -576,6 +592,7 @@ omit =
|
||||
homeassistant/components/switch/anel_pwrctrl.py
|
||||
homeassistant/components/switch/arest.py
|
||||
homeassistant/components/switch/broadlink.py
|
||||
homeassistant/components/switch/deluge.py
|
||||
homeassistant/components/switch/digitalloggers.py
|
||||
homeassistant/components/switch/dlink.py
|
||||
homeassistant/components/switch/edimax.py
|
||||
@ -588,17 +605,19 @@ omit =
|
||||
homeassistant/components/switch/orvibo.py
|
||||
homeassistant/components/switch/pilight.py
|
||||
homeassistant/components/switch/pulseaudio_loopback.py
|
||||
homeassistant/components/switch/rainbird.py
|
||||
homeassistant/components/switch/rainmachine.py
|
||||
homeassistant/components/switch/rest.py
|
||||
homeassistant/components/switch/rpi_rf.py
|
||||
homeassistant/components/switch/snmp.py
|
||||
homeassistant/components/switch/tplink.py
|
||||
homeassistant/components/switch/telnet.py
|
||||
homeassistant/components/switch/transmission.py
|
||||
homeassistant/components/switch/wake_on_lan.py
|
||||
homeassistant/components/switch/xiaomi_miio.py
|
||||
homeassistant/components/telegram_bot/*
|
||||
homeassistant/components/thingspeak.py
|
||||
homeassistant/components/tts/amazon_polly.py
|
||||
homeassistant/components/tts/microsoft.py
|
||||
homeassistant/components/tts/picotts.py
|
||||
homeassistant/components/vacuum/roomba.py
|
||||
homeassistant/components/weather/bom.py
|
||||
|
3
.gitmodules
vendored
3
.gitmodules
vendored
@ -1,3 +0,0 @@
|
||||
[submodule "homeassistant/components/frontend/www_static/home-assistant-polymer"]
|
||||
path = homeassistant/components/frontend/www_static/home-assistant-polymer
|
||||
url = https://github.com/home-assistant/home-assistant-polymer.git
|
11
CODEOWNERS
11
CODEOWNERS
@ -41,6 +41,7 @@ homeassistant/components/*/zwave.py @home-assistant/z-wave
|
||||
# Indiviudal components
|
||||
homeassistant/components/alarm_control_panel/egardia.py @jeroenterheerdt
|
||||
homeassistant/components/camera/yi.py @bachya
|
||||
homeassistant/components/climate/ephember.py @ttroy50
|
||||
homeassistant/components/climate/eq3btsmart.py @rytilahti
|
||||
homeassistant/components/climate/sensibo.py @andrey-git
|
||||
homeassistant/components/cover/template.py @PhracturedBlue
|
||||
@ -50,17 +51,21 @@ homeassistant/components/light/tplink.py @rytilahti
|
||||
homeassistant/components/light/yeelight.py @rytilahti
|
||||
homeassistant/components/media_player/kodi.py @armills
|
||||
homeassistant/components/media_player/monoprice.py @etsinko
|
||||
homeassistant/components/media_player/yamaha_musiccast.py @jalmeroth
|
||||
homeassistant/components/sensor/airvisual.py @bachya
|
||||
homeassistant/components/sensor/irish_rail_transport.py @ttroy50
|
||||
homeassistant/components/sensor/miflora.py @danielhiversen
|
||||
homeassistant/components/sensor/sytadin.py @gautric
|
||||
homeassistant/components/sensor/tibber.py @danielhiversen
|
||||
homeassistant/components/sensor/waqi.py @andrey-git
|
||||
homeassistant/components/switch/rainmachine.py @bachya
|
||||
homeassistant/components/switch/tplink.py @rytilahti
|
||||
homeassistant/components/xiaomi_aqara.py @danielhiversen @syssi
|
||||
|
||||
homeassistant/components/*/axis.py @Kane610
|
||||
homeassistant/components/*/broadlink.py @danielhiversen
|
||||
homeassistant/components/*/rfxtrx.py @danielhiversen
|
||||
homeassistant/components/tesla.py @zabuldon
|
||||
homeassistant/components/*/tesla.py @zabuldon
|
||||
homeassistant/components/*/xiaomi_aqara.py @danielhiversen
|
||||
homeassistant/components/*/xiaomi_miio.py @rytilahti
|
||||
homeassistant/components/*/tradfri.py @ggravlingen
|
||||
homeassistant/components/*/xiaomi_aqara.py @danielhiversen @syssi
|
||||
homeassistant/components/*/xiaomi_miio.py @rytilahti @syssi
|
||||
|
@ -3,7 +3,7 @@
|
||||
# This way, the development image and the production image are kept in sync.
|
||||
|
||||
FROM python:3.6
|
||||
MAINTAINER Paulus Schoutsen <Paulus@PaulusSchoutsen.nl>
|
||||
LABEL maintainer="Paulus Schoutsen <Paulus@PaulusSchoutsen.nl>"
|
||||
|
||||
# Uncomment any of the following lines to disable the installation.
|
||||
#ENV INSTALL_TELLSTICK no
|
||||
|
@ -1,5 +1,4 @@
|
||||
include README.rst
|
||||
include LICENSE.md
|
||||
graft homeassistant
|
||||
prune homeassistant/components/frontend/www_static/home-assistant-polymer
|
||||
recursive-exclude * *.py[co]
|
||||
|
BIN
docs/screenshot-components.png
Executable file → Normal file
BIN
docs/screenshot-components.png
Executable file → Normal file
Binary file not shown.
Before Width: | Height: | Size: 46 KiB After Width: | Height: | Size: 205 KiB |
@ -88,7 +88,7 @@ def async_from_config_dict(config: Dict[str, Any],
|
||||
if sys.version_info[:2] < (3, 5):
|
||||
_LOGGER.warning(
|
||||
'Python 3.4 support has been deprecated and will be removed in '
|
||||
'the begining of 2018. Please upgrade Python or your operating '
|
||||
'the beginning of 2018. Please upgrade Python or your operating '
|
||||
'system. More info: https://home-assistant.io/blog/2017/10/06/'
|
||||
'deprecating-python-3.4-support/'
|
||||
)
|
||||
|
@ -1,65 +1,61 @@
|
||||
alarm_disarm:
|
||||
description: Send the alarm the command for disarm
|
||||
# Describes the format for available alarm control panel services
|
||||
|
||||
alarm_disarm:
|
||||
description: Send the alarm the command for disarm.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name of alarm control panel to disarm
|
||||
description: Name of alarm control panel to disarm.
|
||||
example: 'alarm_control_panel.downstairs'
|
||||
code:
|
||||
description: An optional code to disarm the alarm control panel with
|
||||
description: An optional code to disarm the alarm control panel with.
|
||||
example: 1234
|
||||
|
||||
alarm_arm_home:
|
||||
description: Send the alarm the command for arm home
|
||||
|
||||
description: Send the alarm the command for arm home.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name of alarm control panel to arm home
|
||||
description: Name of alarm control panel to arm home.
|
||||
example: 'alarm_control_panel.downstairs'
|
||||
code:
|
||||
description: An optional code to arm home the alarm control panel with
|
||||
description: An optional code to arm home the alarm control panel with.
|
||||
example: 1234
|
||||
|
||||
alarm_arm_away:
|
||||
description: Send the alarm the command for arm away
|
||||
|
||||
description: Send the alarm the command for arm away.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name of alarm control panel to arm away
|
||||
description: Name of alarm control panel to arm away.
|
||||
example: 'alarm_control_panel.downstairs'
|
||||
code:
|
||||
description: An optional code to arm away the alarm control panel with
|
||||
description: An optional code to arm away the alarm control panel with.
|
||||
example: 1234
|
||||
|
||||
alarm_arm_night:
|
||||
description: Send the alarm the command for arm night
|
||||
|
||||
description: Send the alarm the command for arm night.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name of alarm control panel to arm night
|
||||
description: Name of alarm control panel to arm night.
|
||||
example: 'alarm_control_panel.downstairs'
|
||||
code:
|
||||
description: An optional code to arm night the alarm control panel with
|
||||
description: An optional code to arm night the alarm control panel with.
|
||||
example: 1234
|
||||
|
||||
alarm_trigger:
|
||||
description: Send the alarm the command for trigger
|
||||
|
||||
description: Send the alarm the command for trigger.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name of alarm control panel to trigger
|
||||
description: Name of alarm control panel to trigger.
|
||||
example: 'alarm_control_panel.downstairs'
|
||||
code:
|
||||
description: An optional code to trigger the alarm control panel with
|
||||
description: An optional code to trigger the alarm control panel with.
|
||||
example: 1234
|
||||
|
||||
envisalink_alarm_keypress:
|
||||
description: Send custom keypresses to the alarm
|
||||
|
||||
description: Send custom keypresses to the alarm.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name of the alarm control panel to trigger
|
||||
description: Name of the alarm control panel to trigger.
|
||||
example: 'alarm_control_panel.downstairs'
|
||||
keypress:
|
||||
description: 'String to send to the alarm panel (1-6 characters)'
|
||||
description: 'String to send to the alarm panel (1-6 characters).'
|
||||
example: '*71'
|
||||
|
@ -16,7 +16,7 @@ from homeassistant.const import (
|
||||
STATE_ALARM_ARMED_HOME, STATE_ALARM_ARMED_NIGHT, STATE_ALARM_DISARMED,
|
||||
STATE_ALARM_ARMING, STATE_ALARM_DISARMING, STATE_UNKNOWN, CONF_NAME)
|
||||
|
||||
REQUIREMENTS = ['total_connect_client==0.11']
|
||||
REQUIREMENTS = ['total_connect_client==0.12']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -1,11 +1,13 @@
|
||||
"""Support for alexa Smart Home Skill API."""
|
||||
import asyncio
|
||||
import logging
|
||||
import math
|
||||
from uuid import uuid4
|
||||
|
||||
from homeassistant.const import (
|
||||
ATTR_SUPPORTED_FEATURES, ATTR_ENTITY_ID, SERVICE_TURN_ON, SERVICE_TURN_OFF)
|
||||
from homeassistant.components import switch, light
|
||||
import homeassistant.util.color as color_util
|
||||
from homeassistant.util.decorator import Registry
|
||||
|
||||
HANDLERS = Registry()
|
||||
@ -22,7 +24,10 @@ MAPPING_COMPONENT = {
|
||||
switch.DOMAIN: ['SWITCH', ('Alexa.PowerController',), None],
|
||||
light.DOMAIN: [
|
||||
'LIGHT', ('Alexa.PowerController',), {
|
||||
light.SUPPORT_BRIGHTNESS: 'Alexa.BrightnessController'
|
||||
light.SUPPORT_BRIGHTNESS: 'Alexa.BrightnessController',
|
||||
light.SUPPORT_RGB_COLOR: 'Alexa.ColorController',
|
||||
light.SUPPORT_XY_COLOR: 'Alexa.ColorController',
|
||||
light.SUPPORT_COLOR_TEMP: 'Alexa.ColorTemperatureController',
|
||||
}
|
||||
],
|
||||
}
|
||||
@ -193,11 +198,114 @@ def async_api_turn_off(hass, request, entity):
|
||||
@asyncio.coroutine
|
||||
def async_api_set_brightness(hass, request, entity):
|
||||
"""Process a set brightness request."""
|
||||
brightness = request[API_PAYLOAD]['brightness']
|
||||
brightness = int(request[API_PAYLOAD]['brightness'])
|
||||
|
||||
yield from hass.services.async_call(entity.domain, SERVICE_TURN_ON, {
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
light.ATTR_BRIGHTNESS: brightness,
|
||||
light.ATTR_BRIGHTNESS_PCT: brightness,
|
||||
}, blocking=True)
|
||||
|
||||
return api_message(request)
|
||||
|
||||
|
||||
@HANDLERS.register(('Alexa.BrightnessController', 'AdjustBrightness'))
|
||||
@extract_entity
|
||||
@asyncio.coroutine
|
||||
def async_api_adjust_brightness(hass, request, entity):
|
||||
"""Process a adjust brightness request."""
|
||||
brightness_delta = int(request[API_PAYLOAD]['brightnessDelta'])
|
||||
|
||||
# read current state
|
||||
try:
|
||||
current = math.floor(
|
||||
int(entity.attributes.get(light.ATTR_BRIGHTNESS)) / 255 * 100)
|
||||
except ZeroDivisionError:
|
||||
current = 0
|
||||
|
||||
# set brightness
|
||||
brightness = max(0, brightness_delta + current)
|
||||
yield from hass.services.async_call(entity.domain, SERVICE_TURN_ON, {
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
light.ATTR_BRIGHTNESS_PCT: brightness,
|
||||
}, blocking=True)
|
||||
|
||||
return api_message(request)
|
||||
|
||||
|
||||
@HANDLERS.register(('Alexa.ColorController', 'SetColor'))
|
||||
@extract_entity
|
||||
@asyncio.coroutine
|
||||
def async_api_set_color(hass, request, entity):
|
||||
"""Process a set color request."""
|
||||
supported = entity.attributes.get(ATTR_SUPPORTED_FEATURES)
|
||||
rgb = color_util.color_hsb_to_RGB(
|
||||
float(request[API_PAYLOAD]['color']['hue']),
|
||||
float(request[API_PAYLOAD]['color']['saturation']),
|
||||
float(request[API_PAYLOAD]['color']['brightness'])
|
||||
)
|
||||
|
||||
if supported & light.SUPPORT_RGB_COLOR > 0:
|
||||
yield from hass.services.async_call(entity.domain, SERVICE_TURN_ON, {
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
light.ATTR_RGB_COLOR: rgb,
|
||||
}, blocking=True)
|
||||
else:
|
||||
xyz = color_util.color_RGB_to_xy(*rgb)
|
||||
yield from hass.services.async_call(entity.domain, SERVICE_TURN_ON, {
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
light.ATTR_XY_COLOR: (xyz[0], xyz[1]),
|
||||
light.ATTR_BRIGHTNESS: xyz[2],
|
||||
}, blocking=True)
|
||||
|
||||
return api_message(request)
|
||||
|
||||
|
||||
@HANDLERS.register(('Alexa.ColorTemperatureController', 'SetColorTemperature'))
|
||||
@extract_entity
|
||||
@asyncio.coroutine
|
||||
def async_api_set_color_temperature(hass, request, entity):
|
||||
"""Process a set color temperature request."""
|
||||
kelvin = int(request[API_PAYLOAD]['colorTemperatureInKelvin'])
|
||||
|
||||
yield from hass.services.async_call(entity.domain, SERVICE_TURN_ON, {
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
light.ATTR_KELVIN: kelvin,
|
||||
}, blocking=True)
|
||||
|
||||
return api_message(request)
|
||||
|
||||
|
||||
@HANDLERS.register(
|
||||
('Alexa.ColorTemperatureController', 'DecreaseColorTemperature'))
|
||||
@extract_entity
|
||||
@asyncio.coroutine
|
||||
def async_api_decrease_color_temp(hass, request, entity):
|
||||
"""Process a decrease color temperature request."""
|
||||
current = int(entity.attributes.get(light.ATTR_COLOR_TEMP))
|
||||
max_mireds = int(entity.attributes.get(light.ATTR_MAX_MIREDS))
|
||||
|
||||
value = min(max_mireds, current + 50)
|
||||
yield from hass.services.async_call(entity.domain, SERVICE_TURN_ON, {
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
light.ATTR_COLOR_TEMP: value,
|
||||
}, blocking=True)
|
||||
|
||||
return api_message(request)
|
||||
|
||||
|
||||
@HANDLERS.register(
|
||||
('Alexa.ColorTemperatureController', 'IncreaseColorTemperature'))
|
||||
@extract_entity
|
||||
@asyncio.coroutine
|
||||
def async_api_increase_color_temp(hass, request, entity):
|
||||
"""Process a increase color temperature request."""
|
||||
current = int(entity.attributes.get(light.ATTR_COLOR_TEMP))
|
||||
min_mireds = int(entity.attributes.get(light.ATTR_MIN_MIREDS))
|
||||
|
||||
value = max(min_mireds, current - 50)
|
||||
yield from hass.services.async_call(entity.domain, SERVICE_TURN_ON, {
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
light.ATTR_COLOR_TEMP: value,
|
||||
}, blocking=True)
|
||||
|
||||
return api_message(request)
|
||||
|
@ -262,7 +262,11 @@ class APIEventView(HomeAssistantView):
|
||||
def post(self, request, event_type):
|
||||
"""Fire events."""
|
||||
body = yield from request.text()
|
||||
event_data = json.loads(body) if body else None
|
||||
try:
|
||||
event_data = json.loads(body) if body else None
|
||||
except ValueError:
|
||||
return self.json_message('Event data should be valid JSON',
|
||||
HTTP_BAD_REQUEST)
|
||||
|
||||
if event_data is not None and not isinstance(event_data, dict):
|
||||
return self.json_message('Event data should be a JSON object',
|
||||
@ -309,7 +313,11 @@ class APIDomainServicesView(HomeAssistantView):
|
||||
"""
|
||||
hass = request.app['hass']
|
||||
body = yield from request.text()
|
||||
data = json.loads(body) if body else None
|
||||
try:
|
||||
data = json.loads(body) if body else None
|
||||
except ValueError:
|
||||
return self.json_message('Data should be valid JSON',
|
||||
HTTP_BAD_REQUEST)
|
||||
|
||||
with AsyncTrackStates(hass) as changed_states:
|
||||
yield from hass.services.async_call(domain, service, data, True)
|
||||
|
@ -21,7 +21,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
TRIGGER_SCHEMA = vol.Schema({
|
||||
vol.Required(CONF_PLATFORM): 'event',
|
||||
vol.Required(CONF_EVENT_TYPE): cv.string,
|
||||
vol.Optional(CONF_EVENT_DATA, default={}): dict,
|
||||
vol.Optional(CONF_EVENT_DATA): dict,
|
||||
})
|
||||
|
||||
|
||||
@ -37,6 +37,8 @@ def async_trigger(hass, config, action):
|
||||
def handle_event(event):
|
||||
"""Listen for events and calls the action when data matches."""
|
||||
if event_data_schema:
|
||||
# Check that the event data matches the configured
|
||||
# schema if one was provided
|
||||
try:
|
||||
event_data_schema(event.data)
|
||||
except vol.Invalid:
|
||||
|
@ -38,13 +38,14 @@ def async_trigger(hass, config, action):
|
||||
time_delta = config.get(CONF_FOR)
|
||||
value_template = config.get(CONF_VALUE_TEMPLATE)
|
||||
async_remove_track_same = None
|
||||
already_triggered = False
|
||||
|
||||
if value_template is not None:
|
||||
value_template.hass = hass
|
||||
|
||||
@callback
|
||||
def check_numeric_state(entity, from_s, to_s):
|
||||
"""Return True if they should trigger."""
|
||||
"""Return True if criteria are now met."""
|
||||
if to_s is None:
|
||||
return False
|
||||
|
||||
@ -56,51 +57,39 @@ def async_trigger(hass, config, action):
|
||||
'above': above,
|
||||
}
|
||||
}
|
||||
|
||||
# If new one doesn't match, nothing to do
|
||||
if not condition.async_numeric_state(
|
||||
hass, to_s, below, above, value_template, variables):
|
||||
return False
|
||||
|
||||
return True
|
||||
return condition.async_numeric_state(
|
||||
hass, to_s, below, above, value_template, variables)
|
||||
|
||||
@callback
|
||||
def state_automation_listener(entity, from_s, to_s):
|
||||
"""Listen for state changes and calls action."""
|
||||
nonlocal async_remove_track_same
|
||||
|
||||
if not check_numeric_state(entity, from_s, to_s):
|
||||
return
|
||||
|
||||
variables = {
|
||||
'trigger': {
|
||||
'platform': 'numeric_state',
|
||||
'entity_id': entity,
|
||||
'below': below,
|
||||
'above': above,
|
||||
'from_state': from_s,
|
||||
'to_state': to_s,
|
||||
}
|
||||
}
|
||||
|
||||
# Only match if old didn't exist or existed but didn't match
|
||||
# Written as: skip if old one did exist and matched
|
||||
if from_s is not None and condition.async_numeric_state(
|
||||
hass, from_s, below, above, value_template, variables):
|
||||
return
|
||||
nonlocal already_triggered, async_remove_track_same
|
||||
|
||||
@callback
|
||||
def call_action():
|
||||
"""Call action with right context."""
|
||||
hass.async_run_job(action, variables)
|
||||
hass.async_run_job(action, {
|
||||
'trigger': {
|
||||
'platform': 'numeric_state',
|
||||
'entity_id': entity,
|
||||
'below': below,
|
||||
'above': above,
|
||||
'from_state': from_s,
|
||||
'to_state': to_s,
|
||||
}
|
||||
})
|
||||
|
||||
if not time_delta:
|
||||
call_action()
|
||||
return
|
||||
matching = check_numeric_state(entity, from_s, to_s)
|
||||
|
||||
async_remove_track_same = async_track_same_state(
|
||||
hass, time_delta, call_action, entity_ids=entity_id,
|
||||
async_check_same_func=check_numeric_state)
|
||||
if matching and not already_triggered:
|
||||
if time_delta:
|
||||
async_remove_track_same = async_track_same_state(
|
||||
hass, time_delta, call_action, entity_ids=entity_id,
|
||||
async_check_same_func=check_numeric_state)
|
||||
else:
|
||||
call_action()
|
||||
|
||||
already_triggered = matching
|
||||
|
||||
unsub = async_track_state_change(
|
||||
hass, entity_id, state_automation_listener)
|
||||
|
@ -1,6 +1,7 @@
|
||||
# Describes the format for available automation services
|
||||
|
||||
turn_on:
|
||||
description: Enable an automation.
|
||||
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name of the automation to turn on.
|
||||
@ -8,7 +9,6 @@ turn_on:
|
||||
|
||||
turn_off:
|
||||
description: Disable an automation.
|
||||
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name of the automation to turn off.
|
||||
@ -16,7 +16,6 @@ turn_off:
|
||||
|
||||
toggle:
|
||||
description: Toggle an automation.
|
||||
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name of the automation to toggle on/off.
|
||||
@ -24,7 +23,6 @@ toggle:
|
||||
|
||||
trigger:
|
||||
description: Trigger the action of an automation.
|
||||
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name of the automation to trigger.
|
||||
|
@ -11,19 +11,20 @@ import os
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.discovery import SERVICE_AXIS
|
||||
from homeassistant.config import load_yaml_config_file
|
||||
from homeassistant.const import (ATTR_LOCATION, ATTR_TRIPPED,
|
||||
CONF_HOST, CONF_INCLUDE, CONF_NAME,
|
||||
CONF_PASSWORD, CONF_PORT, CONF_TRIGGER_TIME,
|
||||
CONF_USERNAME, EVENT_HOMEASSISTANT_STOP)
|
||||
from homeassistant.components.discovery import SERVICE_AXIS
|
||||
CONF_EVENT, CONF_HOST, CONF_INCLUDE,
|
||||
CONF_NAME, CONF_PASSWORD, CONF_PORT,
|
||||
CONF_TRIGGER_TIME, CONF_USERNAME,
|
||||
EVENT_HOMEASSISTANT_STOP)
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers import discovery
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.dispatcher import dispatcher_send
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
|
||||
REQUIREMENTS = ['axis==12']
|
||||
REQUIREMENTS = ['axis==14']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@ -87,10 +88,13 @@ def request_configuration(hass, config, name, host, serialnumber):
|
||||
configurator.notify_errors(request_id,
|
||||
"Functionality mandatory.")
|
||||
return False
|
||||
|
||||
callback_data[CONF_INCLUDE] = callback_data[CONF_INCLUDE].split()
|
||||
callback_data[CONF_HOST] = host
|
||||
|
||||
if CONF_NAME not in callback_data:
|
||||
callback_data[CONF_NAME] = name
|
||||
|
||||
try:
|
||||
device_config = DEVICE_SCHEMA(callback_data)
|
||||
except vol.Invalid:
|
||||
@ -101,7 +105,6 @@ def request_configuration(hass, config, name, host, serialnumber):
|
||||
if setup_device(hass, config, device_config):
|
||||
config_file = _read_config(hass)
|
||||
config_file[serialnumber] = dict(device_config)
|
||||
del config_file[serialnumber]['hass']
|
||||
_write_config(hass, config_file)
|
||||
configurator.request_done(request_id)
|
||||
else:
|
||||
@ -146,10 +149,10 @@ def request_configuration(hass, config, name, host, serialnumber):
|
||||
def setup(hass, config):
|
||||
"""Common setup for Axis devices."""
|
||||
def _shutdown(call): # pylint: disable=unused-argument
|
||||
"""Stop the metadatastream on shutdown."""
|
||||
"""Stop the event stream on shutdown."""
|
||||
for serialnumber, device in AXIS_DEVICES.items():
|
||||
_LOGGER.info("Stopping metadatastream for %s.", serialnumber)
|
||||
device.stop_metadatastream()
|
||||
_LOGGER.info("Stopping event stream for %s.", serialnumber)
|
||||
device.stop()
|
||||
|
||||
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown)
|
||||
|
||||
@ -162,7 +165,7 @@ def setup(hass, config):
|
||||
if serialnumber not in AXIS_DEVICES:
|
||||
config_file = _read_config(hass)
|
||||
if serialnumber in config_file:
|
||||
# Device config saved to file
|
||||
# Device config previously saved to file
|
||||
try:
|
||||
device_config = DEVICE_SCHEMA(config_file[serialnumber])
|
||||
device_config[CONF_HOST] = host
|
||||
@ -178,10 +181,8 @@ def setup(hass, config):
|
||||
else:
|
||||
# Device already registered, but on a different IP
|
||||
device = AXIS_DEVICES[serialnumber]
|
||||
device.url = host
|
||||
async_dispatcher_send(hass,
|
||||
DOMAIN + '_' + device.name + '_new_ip',
|
||||
host)
|
||||
device.config.host = host
|
||||
dispatcher_send(hass, DOMAIN + '_' + device.name + '_new_ip', host)
|
||||
|
||||
# Register discovery service
|
||||
discovery.listen(hass, SERVICE_AXIS, axis_device_discovered)
|
||||
@ -202,10 +203,11 @@ def setup(hass, config):
|
||||
"""Service to send a message."""
|
||||
for _, device in AXIS_DEVICES.items():
|
||||
if device.name == call.data[CONF_NAME]:
|
||||
response = device.do_request(call.data[SERVICE_CGI],
|
||||
call.data[SERVICE_ACTION],
|
||||
call.data[SERVICE_PARAM])
|
||||
hass.bus.async_fire(SERVICE_VAPIX_CALL_RESPONSE, response)
|
||||
response = device.vapix.do_request(
|
||||
call.data[SERVICE_CGI],
|
||||
call.data[SERVICE_ACTION],
|
||||
call.data[SERVICE_PARAM])
|
||||
hass.bus.fire(SERVICE_VAPIX_CALL_RESPONSE, response)
|
||||
return True
|
||||
_LOGGER.info("Couldn\'t find device %s", call.data[CONF_NAME])
|
||||
return False
|
||||
@ -216,7 +218,6 @@ def setup(hass, config):
|
||||
vapix_service,
|
||||
descriptions[DOMAIN][SERVICE_VAPIX_CALL],
|
||||
schema=SERVICE_SCHEMA)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@ -224,9 +225,28 @@ def setup_device(hass, config, device_config):
|
||||
"""Set up device."""
|
||||
from axis import AxisDevice
|
||||
|
||||
device_config['hass'] = hass
|
||||
device = AxisDevice(device_config) # Initialize device
|
||||
enable_metadatastream = False
|
||||
def signal_callback(action, event):
|
||||
"""Callback to configure events when initialized on event stream."""
|
||||
if action == 'add':
|
||||
event_config = {
|
||||
CONF_EVENT: event,
|
||||
CONF_NAME: device_config[CONF_NAME],
|
||||
ATTR_LOCATION: device_config[ATTR_LOCATION],
|
||||
CONF_TRIGGER_TIME: device_config[CONF_TRIGGER_TIME]
|
||||
}
|
||||
component = event.event_platform
|
||||
discovery.load_platform(hass,
|
||||
component,
|
||||
DOMAIN,
|
||||
event_config,
|
||||
config)
|
||||
|
||||
event_types = list(filter(lambda x: x in device_config[CONF_INCLUDE],
|
||||
EVENT_TYPES))
|
||||
device_config['events'] = event_types
|
||||
device_config['signal'] = signal_callback
|
||||
device = AxisDevice(hass.loop, **device_config)
|
||||
device.name = device_config[CONF_NAME]
|
||||
|
||||
if device.serial_number is None:
|
||||
# If there is no serial number a connection could not be made
|
||||
@ -234,16 +254,10 @@ def setup_device(hass, config, device_config):
|
||||
return False
|
||||
|
||||
for component in device_config[CONF_INCLUDE]:
|
||||
if component in EVENT_TYPES:
|
||||
# Sensors are created by device calling event_initialized
|
||||
# when receiving initialize messages on metadatastream
|
||||
device.add_event_topic(convert(component, 'type', 'subscribe'))
|
||||
if not enable_metadatastream:
|
||||
enable_metadatastream = True
|
||||
else:
|
||||
if component == 'camera':
|
||||
camera_config = {
|
||||
CONF_HOST: device_config[CONF_HOST],
|
||||
CONF_NAME: device_config[CONF_NAME],
|
||||
CONF_HOST: device_config[CONF_HOST],
|
||||
CONF_PORT: device_config[CONF_PORT],
|
||||
CONF_USERNAME: device_config[CONF_USERNAME],
|
||||
CONF_PASSWORD: device_config[CONF_PASSWORD]
|
||||
@ -254,17 +268,8 @@ def setup_device(hass, config, device_config):
|
||||
camera_config,
|
||||
config)
|
||||
|
||||
if enable_metadatastream:
|
||||
device.initialize_new_event = event_initialized
|
||||
if not device.initiate_metadatastream():
|
||||
hass.components.persistent_notification.create(
|
||||
'Dependency missing for sensors, '
|
||||
'please check documentation',
|
||||
title=DOMAIN,
|
||||
notification_id='axis_notification')
|
||||
|
||||
AXIS_DEVICES[device.serial_number] = device
|
||||
|
||||
hass.add_job(device.start)
|
||||
return True
|
||||
|
||||
|
||||
@ -287,25 +292,16 @@ def _write_config(hass, config):
|
||||
outfile.write(data)
|
||||
|
||||
|
||||
def event_initialized(event):
|
||||
"""Register event initialized on metadatastream here."""
|
||||
hass = event.device_config('hass')
|
||||
discovery.load_platform(hass,
|
||||
convert(event.topic, 'topic', 'platform'),
|
||||
DOMAIN, {'axis_event': event})
|
||||
|
||||
|
||||
class AxisDeviceEvent(Entity):
|
||||
"""Representation of a Axis device event."""
|
||||
|
||||
def __init__(self, axis_event):
|
||||
def __init__(self, event_config):
|
||||
"""Initialize the event."""
|
||||
self.axis_event = axis_event
|
||||
self._event_class = convert(self.axis_event.topic, 'topic', 'class')
|
||||
self._name = '{}_{}_{}'.format(self.axis_event.device_name,
|
||||
convert(self.axis_event.topic,
|
||||
'topic', 'type'),
|
||||
self.axis_event = event_config[CONF_EVENT]
|
||||
self._name = '{}_{}_{}'.format(event_config[CONF_NAME],
|
||||
self.axis_event.event_type,
|
||||
self.axis_event.id)
|
||||
self.location = event_config[ATTR_LOCATION]
|
||||
self.axis_event.callback = self._update_callback
|
||||
|
||||
def _update_callback(self):
|
||||
@ -321,7 +317,7 @@ class AxisDeviceEvent(Entity):
|
||||
@property
|
||||
def device_class(self):
|
||||
"""Return the class of the event."""
|
||||
return self._event_class
|
||||
return self.axis_event.event_class
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
@ -336,52 +332,6 @@ class AxisDeviceEvent(Entity):
|
||||
tripped = self.axis_event.is_tripped
|
||||
attr[ATTR_TRIPPED] = 'True' if tripped else 'False'
|
||||
|
||||
location = self.axis_event.device_config(ATTR_LOCATION)
|
||||
if location:
|
||||
attr[ATTR_LOCATION] = location
|
||||
attr[ATTR_LOCATION] = self.location
|
||||
|
||||
return attr
|
||||
|
||||
|
||||
def convert(item, from_key, to_key):
|
||||
"""Translate between Axis and HASS syntax."""
|
||||
for entry in REMAP:
|
||||
if entry[from_key] == item:
|
||||
return entry[to_key]
|
||||
|
||||
|
||||
REMAP = [{'type': 'motion',
|
||||
'class': 'motion',
|
||||
'topic': 'tns1:VideoAnalytics/tnsaxis:MotionDetection',
|
||||
'subscribe': 'onvif:VideoAnalytics/axis:MotionDetection',
|
||||
'platform': 'binary_sensor'},
|
||||
{'type': 'vmd3',
|
||||
'class': 'motion',
|
||||
'topic': 'tns1:RuleEngine/tnsaxis:VMD3/vmd3_video_1',
|
||||
'subscribe': 'onvif:RuleEngine/axis:VMD3/vmd3_video_1',
|
||||
'platform': 'binary_sensor'},
|
||||
{'type': 'pir',
|
||||
'class': 'motion',
|
||||
'topic': 'tns1:Device/tnsaxis:Sensor/PIR',
|
||||
'subscribe': 'onvif:Device/axis:Sensor/axis:PIR',
|
||||
'platform': 'binary_sensor'},
|
||||
{'type': 'sound',
|
||||
'class': 'sound',
|
||||
'topic': 'tns1:AudioSource/tnsaxis:TriggerLevel',
|
||||
'subscribe': 'onvif:AudioSource/axis:TriggerLevel',
|
||||
'platform': 'binary_sensor'},
|
||||
{'type': 'daynight',
|
||||
'class': 'light',
|
||||
'topic': 'tns1:VideoSource/tnsaxis:DayNightVision',
|
||||
'subscribe': 'onvif:VideoSource/axis:DayNightVision',
|
||||
'platform': 'binary_sensor'},
|
||||
{'type': 'tampering',
|
||||
'class': 'safety',
|
||||
'topic': 'tns1:VideoSource/tnsaxis:Tampering',
|
||||
'subscribe': 'onvif:VideoSource/axis:Tampering',
|
||||
'platform': 'binary_sensor'},
|
||||
{'type': 'input',
|
||||
'class': 'input',
|
||||
'topic': 'tns1:Device/tnsaxis:IO/Port',
|
||||
'subscribe': 'onvif:Device/axis:IO/Port',
|
||||
'platform': 'binary_sensor'}, ]
|
||||
|
@ -21,19 +21,19 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Setup Axis device event."""
|
||||
add_devices([AxisBinarySensor(discovery_info['axis_event'], hass)], True)
|
||||
add_devices([AxisBinarySensor(hass, discovery_info)], True)
|
||||
|
||||
|
||||
class AxisBinarySensor(AxisDeviceEvent, BinarySensorDevice):
|
||||
"""Representation of a binary Axis event."""
|
||||
|
||||
def __init__(self, axis_event, hass):
|
||||
def __init__(self, hass, event_config):
|
||||
"""Initialize the binary sensor."""
|
||||
self.hass = hass
|
||||
self._state = False
|
||||
self._delay = axis_event.device_config(CONF_TRIGGER_TIME)
|
||||
self._delay = event_config[CONF_TRIGGER_TIME]
|
||||
self._timer = None
|
||||
AxisDeviceEvent.__init__(self, axis_event)
|
||||
AxisDeviceEvent.__init__(self, event_config)
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
|
@ -22,6 +22,10 @@ from homeassistant.helpers.event import async_track_state_change
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_OBSERVATIONS = 'observations'
|
||||
ATTR_PROBABILITY = 'probability'
|
||||
ATTR_PROBABILITY_THRESHOLD = 'probability_threshold'
|
||||
|
||||
CONF_OBSERVATIONS = 'observations'
|
||||
CONF_PRIOR = 'prior'
|
||||
CONF_PROBABILITY_THRESHOLD = 'probability_threshold'
|
||||
@ -29,7 +33,8 @@ CONF_P_GIVEN_F = 'prob_given_false'
|
||||
CONF_P_GIVEN_T = 'prob_given_true'
|
||||
CONF_TO_STATE = 'to_state'
|
||||
|
||||
DEFAULT_NAME = 'BayesianBinary'
|
||||
DEFAULT_NAME = "Bayesian Binary Sensor"
|
||||
DEFAULT_PROBABILITY_THRESHOLD = 0.5
|
||||
|
||||
NUMERIC_STATE_SCHEMA = vol.Schema({
|
||||
CONF_PLATFORM: 'numeric_state',
|
||||
@ -49,16 +54,14 @@ STATE_SCHEMA = vol.Schema({
|
||||
}, required=True)
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME):
|
||||
cv.string,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_DEVICE_CLASS): cv.string,
|
||||
vol.Required(CONF_OBSERVATIONS): vol.Schema(
|
||||
vol.All(cv.ensure_list, [vol.Any(NUMERIC_STATE_SCHEMA,
|
||||
STATE_SCHEMA)])
|
||||
),
|
||||
vol.Required(CONF_OBSERVATIONS):
|
||||
vol.Schema(vol.All(cv.ensure_list,
|
||||
[vol.Any(NUMERIC_STATE_SCHEMA, STATE_SCHEMA)])),
|
||||
vol.Required(CONF_PRIOR): vol.Coerce(float),
|
||||
vol.Optional(CONF_PROBABILITY_THRESHOLD):
|
||||
vol.Coerce(float),
|
||||
vol.Optional(CONF_PROBABILITY_THRESHOLD,
|
||||
default=DEFAULT_PROBABILITY_THRESHOLD): vol.Coerce(float),
|
||||
})
|
||||
|
||||
|
||||
@ -73,16 +76,16 @@ def update_probability(prior, prob_true, prob_false):
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
"""Set up the Threshold sensor."""
|
||||
"""Set up the Bayesian Binary sensor."""
|
||||
name = config.get(CONF_NAME)
|
||||
observations = config.get(CONF_OBSERVATIONS)
|
||||
prior = config.get(CONF_PRIOR)
|
||||
probability_threshold = config.get(CONF_PROBABILITY_THRESHOLD, 0.5)
|
||||
probability_threshold = config.get(CONF_PROBABILITY_THRESHOLD)
|
||||
device_class = config.get(CONF_DEVICE_CLASS)
|
||||
|
||||
async_add_devices([
|
||||
BayesianBinarySensor(name, prior, observations, probability_threshold,
|
||||
device_class)
|
||||
BayesianBinarySensor(
|
||||
name, prior, observations, probability_threshold, device_class)
|
||||
], True)
|
||||
|
||||
|
||||
@ -107,7 +110,7 @@ class BayesianBinarySensor(BinarySensorDevice):
|
||||
self.entity_obs = dict.fromkeys(to_observe, [])
|
||||
|
||||
for ind, obs in enumerate(self._observations):
|
||||
obs["id"] = ind
|
||||
obs['id'] = ind
|
||||
self.entity_obs[obs['entity_id']].append(obs)
|
||||
|
||||
self.watchers = {
|
||||
@ -117,7 +120,7 @@ class BayesianBinarySensor(BinarySensorDevice):
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_added_to_hass(self):
|
||||
"""Call when entity about to be added to hass."""
|
||||
"""Call when entity about to be added."""
|
||||
@callback
|
||||
# pylint: disable=invalid-name
|
||||
def async_threshold_sensor_state_listener(entity, old_state,
|
||||
@ -135,8 +138,8 @@ class BayesianBinarySensor(BinarySensorDevice):
|
||||
|
||||
prior = self.prior
|
||||
for obs in self.current_obs.values():
|
||||
prior = update_probability(prior, obs['prob_true'],
|
||||
obs['prob_false'])
|
||||
prior = update_probability(
|
||||
prior, obs['prob_true'], obs['prob_false'])
|
||||
self.probability = prior
|
||||
|
||||
self.hass.async_add_job(self.async_update_ha_state, True)
|
||||
@ -206,9 +209,9 @@ class BayesianBinarySensor(BinarySensorDevice):
|
||||
def device_state_attributes(self):
|
||||
"""Return the state attributes of the sensor."""
|
||||
return {
|
||||
'observations': [val for val in self.current_obs.values()],
|
||||
'probability': round(self.probability, 2),
|
||||
'probability_threshold': self._probability_threshold
|
||||
ATTR_OBSERVATIONS: [val for val in self.current_obs.values()],
|
||||
ATTR_PROBABILITY: round(self.probability, 2),
|
||||
ATTR_PROBABILITY_THRESHOLD: self._probability_threshold,
|
||||
}
|
||||
|
||||
@asyncio.coroutine
|
||||
|
69
homeassistant/components/binary_sensor/gc100.py
Normal file
69
homeassistant/components/binary_sensor/gc100.py
Normal file
@ -0,0 +1,69 @@
|
||||
"""
|
||||
Support for binary sensor using GC100.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/binary_sensor.gc100/
|
||||
"""
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.gc100 import DATA_GC100, CONF_PORTS
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDevice, PLATFORM_SCHEMA)
|
||||
from homeassistant.const import DEVICE_DEFAULT_NAME
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
DEPENDENCIES = ['gc100']
|
||||
|
||||
_SENSORS_SCHEMA = vol.Schema({
|
||||
cv.string: cv.string,
|
||||
})
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_PORTS): vol.All(cv.ensure_list, [_SENSORS_SCHEMA])
|
||||
})
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the GC100 devices."""
|
||||
binary_sensors = []
|
||||
ports = config.get(CONF_PORTS)
|
||||
for port in ports:
|
||||
for port_addr, port_name in port.items():
|
||||
binary_sensors.append(GC100BinarySensor(
|
||||
port_name, port_addr, hass.data[DATA_GC100]))
|
||||
add_devices(binary_sensors, True)
|
||||
|
||||
|
||||
class GC100BinarySensor(BinarySensorDevice):
|
||||
"""Representation of a binary sensor from GC100."""
|
||||
|
||||
def __init__(self, name, port_addr, gc100):
|
||||
"""Initialize the GC100 binary sensor."""
|
||||
# pylint: disable=no-member
|
||||
self._name = name or DEVICE_DEFAULT_NAME
|
||||
self._port_addr = port_addr
|
||||
self._gc100 = gc100
|
||||
self._state = None
|
||||
|
||||
# Subscribe to be notified about state changes (PUSH)
|
||||
self._gc100.subscribe(self._port_addr, self.set_state)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the sensor."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return the state of the entity."""
|
||||
return self._state
|
||||
|
||||
def update(self):
|
||||
"""Update the sensor state."""
|
||||
self._gc100.read_sensor(self._port_addr, self.set_state)
|
||||
|
||||
def set_state(self, state):
|
||||
"""Set the current state."""
|
||||
self._state = state == 1
|
||||
self.schedule_update_ha_state()
|
96
homeassistant/components/binary_sensor/linode.py
Normal file
96
homeassistant/components/binary_sensor/linode.py
Normal file
@ -0,0 +1,96 @@
|
||||
"""
|
||||
Support for monitoring the state of Linode Nodes.
|
||||
|
||||
For more details about this component, please refer to the documentation at
|
||||
https://home-assistant.io/components/binary_sensor.linode/
|
||||
"""
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDevice, PLATFORM_SCHEMA)
|
||||
from homeassistant.components.linode import (
|
||||
CONF_NODES, ATTR_CREATED, ATTR_NODE_ID, ATTR_NODE_NAME,
|
||||
ATTR_IPV4_ADDRESS, ATTR_IPV6_ADDRESS, ATTR_MEMORY,
|
||||
ATTR_REGION, ATTR_VCPUS, DATA_LINODE)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_NAME = 'Node'
|
||||
DEFAULT_DEVICE_CLASS = 'moving'
|
||||
DEPENDENCIES = ['linode']
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_NODES): vol.All(cv.ensure_list, [cv.string]),
|
||||
})
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the Linode droplet sensor."""
|
||||
linode = hass.data.get(DATA_LINODE)
|
||||
nodes = config.get(CONF_NODES)
|
||||
|
||||
dev = []
|
||||
for node in nodes:
|
||||
node_id = linode.get_node_id(node)
|
||||
if node_id is None:
|
||||
_LOGGER.error("Node %s is not available", node)
|
||||
return
|
||||
dev.append(LinodeBinarySensor(linode, node_id))
|
||||
|
||||
add_devices(dev, True)
|
||||
|
||||
|
||||
class LinodeBinarySensor(BinarySensorDevice):
|
||||
"""Representation of a Linode droplet sensor."""
|
||||
|
||||
def __init__(self, li, node_id):
|
||||
"""Initialize a new Linode sensor."""
|
||||
self._linode = li
|
||||
self._node_id = node_id
|
||||
self._state = None
|
||||
self.data = None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the sensor."""
|
||||
if self.data is not None:
|
||||
return self.data.label
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return true if the binary sensor is on."""
|
||||
if self.data is not None:
|
||||
return self.data.status == 'running'
|
||||
return False
|
||||
|
||||
@property
|
||||
def device_class(self):
|
||||
"""Return the class of this sensor."""
|
||||
return DEFAULT_DEVICE_CLASS
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return the state attributes of the Linode Node."""
|
||||
if self.data:
|
||||
return {
|
||||
ATTR_CREATED: self.data.created,
|
||||
ATTR_NODE_ID: self.data.id,
|
||||
ATTR_NODE_NAME: self.data.label,
|
||||
ATTR_IPV4_ADDRESS: self.data.ipv4,
|
||||
ATTR_IPV6_ADDRESS: self.data.ipv6,
|
||||
ATTR_MEMORY: self.data.specs.memory,
|
||||
ATTR_REGION: self.data.region.country,
|
||||
ATTR_VCPUS: self.data.specs.vcpus,
|
||||
}
|
||||
return {}
|
||||
|
||||
def update(self):
|
||||
"""Update state of sensor."""
|
||||
self._linode.update()
|
||||
if self._linode.data is not None:
|
||||
for node in self._linode.data:
|
||||
if node.id == self._node_id:
|
||||
self.data = node
|
64
homeassistant/components/binary_sensor/random.py
Normal file
64
homeassistant/components/binary_sensor/random.py
Normal file
@ -0,0 +1,64 @@
|
||||
"""
|
||||
Support for showing random states.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/binary_sensor.random/
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDevice, PLATFORM_SCHEMA, DEVICE_CLASSES_SCHEMA)
|
||||
from homeassistant.const import CONF_NAME, CONF_DEVICE_CLASS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_NAME = 'Random Binary Sensor'
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
|
||||
})
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
"""Set up the Random binary sensor."""
|
||||
name = config.get(CONF_NAME)
|
||||
device_class = config.get(CONF_DEVICE_CLASS)
|
||||
|
||||
async_add_devices([RandomSensor(name, device_class)], True)
|
||||
|
||||
|
||||
class RandomSensor(BinarySensorDevice):
|
||||
"""Representation of a Random binary sensor."""
|
||||
|
||||
def __init__(self, name, device_class):
|
||||
"""Initialize the Random binary sensor."""
|
||||
self._name = name
|
||||
self._device_class = device_class
|
||||
self._state = None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the sensor."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return true if sensor is on."""
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def device_class(self):
|
||||
"""Return the sensor class of the sensor."""
|
||||
return self._device_class
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_update(self):
|
||||
"""Get new state and update the sensor's state."""
|
||||
from random import getrandbits
|
||||
self._state = bool(getrandbits(1))
|
@ -62,7 +62,6 @@ def setup_platform(hass, config, add_devices_callback, discovery_info=None):
|
||||
entity[CONF_COMMAND_ON],
|
||||
entity[CONF_COMMAND_OFF])
|
||||
device.hass = hass
|
||||
device.is_lighting4 = (packet_id[2:4] == '13')
|
||||
sensors.append(device)
|
||||
rfxtrx.RFX_DEVICES[device_id] = device
|
||||
|
||||
@ -86,17 +85,16 @@ def setup_platform(hass, config, add_devices_callback, discovery_info=None):
|
||||
if not config[ATTR_AUTOMATIC_ADD]:
|
||||
return
|
||||
|
||||
poss_dev = rfxtrx.find_possible_pt2262_device(device_id)
|
||||
|
||||
if poss_dev is not None:
|
||||
poss_id = slugify(poss_dev.event.device.id_string.lower())
|
||||
_LOGGER.info("Found possible matching deviceid %s.",
|
||||
poss_id)
|
||||
if event.device.packettype == 0x13:
|
||||
poss_dev = rfxtrx.find_possible_pt2262_device(device_id)
|
||||
if poss_dev is not None:
|
||||
poss_id = slugify(poss_dev.event.device.id_string.lower())
|
||||
_LOGGER.info("Found possible matching deviceid %s.",
|
||||
poss_id)
|
||||
|
||||
pkt_id = "".join("{0:02x}".format(x) for x in event.data)
|
||||
sensor = RfxtrxBinarySensor(event, pkt_id)
|
||||
sensor.hass = hass
|
||||
sensor.is_lighting4 = (pkt_id[2:4] == '13')
|
||||
rfxtrx.RFX_DEVICES[device_id] = sensor
|
||||
add_devices_callback([sensor])
|
||||
_LOGGER.info("Added binary sensor %s "
|
||||
@ -114,6 +112,7 @@ def setup_platform(hass, config, add_devices_callback, discovery_info=None):
|
||||
slugify(event.device.id_string.lower()),
|
||||
event.device.__class__.__name__,
|
||||
event.device.subtype)
|
||||
|
||||
if sensor.is_lighting4:
|
||||
if sensor.data_bits is not None:
|
||||
cmd = rfxtrx.get_pt2262_cmd(device_id, sensor.data_bits)
|
||||
@ -154,7 +153,7 @@ class RfxtrxBinarySensor(BinarySensorDevice):
|
||||
self._device_class = device_class
|
||||
self._off_delay = off_delay
|
||||
self._state = False
|
||||
self.is_lighting4 = False
|
||||
self.is_lighting4 = (event.device.packettype == 0x13)
|
||||
self.delay_listener = None
|
||||
self._data_bits = data_bits
|
||||
self._cmd_on = cmd_on
|
||||
|
@ -11,7 +11,7 @@ import voluptuous as vol
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
from homeassistant.components.ring import (
|
||||
CONF_ATTRIBUTION, DEFAULT_ENTITY_NAMESPACE)
|
||||
CONF_ATTRIBUTION, DEFAULT_ENTITY_NAMESPACE, DATA_RING)
|
||||
|
||||
from homeassistant.const import (
|
||||
ATTR_ATTRIBUTION, CONF_ENTITY_NAMESPACE, CONF_MONITORED_CONDITIONS)
|
||||
@ -27,21 +27,21 @@ SCAN_INTERVAL = timedelta(seconds=5)
|
||||
|
||||
# Sensor types: Name, category, device_class
|
||||
SENSOR_TYPES = {
|
||||
'ding': ['Ding', ['doorbell'], 'occupancy'],
|
||||
'motion': ['Motion', ['doorbell'], 'motion'],
|
||||
'ding': ['Ding', ['doorbell', 'stickup_cams'], 'occupancy'],
|
||||
'motion': ['Motion', ['doorbell', 'stickup_cams'], 'motion'],
|
||||
}
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_ENTITY_NAMESPACE, default=DEFAULT_ENTITY_NAMESPACE):
|
||||
cv.string,
|
||||
vol.Required(CONF_MONITORED_CONDITIONS, default=[]):
|
||||
vol.Required(CONF_MONITORED_CONDITIONS, default=list(SENSOR_TYPES)):
|
||||
vol.All(cv.ensure_list, [vol.In(SENSOR_TYPES)]),
|
||||
})
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up a sensor for a Ring device."""
|
||||
ring = hass.data.get('ring')
|
||||
ring = hass.data[DATA_RING]
|
||||
|
||||
sensors = []
|
||||
for sensor_type in config.get(CONF_MONITORED_CONDITIONS):
|
||||
@ -50,6 +50,12 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
sensors.append(RingBinarySensor(hass,
|
||||
device,
|
||||
sensor_type))
|
||||
|
||||
for device in ring.stickup_cams:
|
||||
if 'stickup_cams' in SENSOR_TYPES[sensor_type][1]:
|
||||
sensors.append(RingBinarySensor(hass,
|
||||
device,
|
||||
sensor_type))
|
||||
add_devices(sensors, True)
|
||||
return True
|
||||
|
||||
|
34
homeassistant/components/binary_sensor/tellduslive.py
Normal file
34
homeassistant/components/binary_sensor/tellduslive.py
Normal file
@ -0,0 +1,34 @@
|
||||
"""
|
||||
Support for binary sensors using Tellstick Net.
|
||||
|
||||
This platform uses the Telldus Live online service.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/binary_sensor.tellduslive/
|
||||
|
||||
"""
|
||||
import logging
|
||||
|
||||
from homeassistant.components.tellduslive import TelldusLiveEntity
|
||||
from homeassistant.components.binary_sensor import BinarySensorDevice
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up Tellstick sensors."""
|
||||
if discovery_info is None:
|
||||
return
|
||||
add_devices(
|
||||
TelldusLiveSensor(hass, binary_sensor)
|
||||
for binary_sensor in discovery_info
|
||||
)
|
||||
|
||||
|
||||
class TelldusLiveSensor(TelldusLiveEntity, BinarySensorDevice):
|
||||
"""Representation of a Tellstick sensor."""
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return true if switch is on."""
|
||||
return self.device.is_on
|
@ -1,11 +1,13 @@
|
||||
"""
|
||||
A sensor that monitors trands in other components.
|
||||
A sensor that monitors trends in other components.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/sensor.trend/
|
||||
"""
|
||||
import asyncio
|
||||
from collections import deque
|
||||
import logging
|
||||
import math
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@ -16,21 +18,40 @@ from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDevice, ENTITY_ID_FORMAT, PLATFORM_SCHEMA,
|
||||
DEVICE_CLASSES_SCHEMA)
|
||||
from homeassistant.const import (
|
||||
ATTR_FRIENDLY_NAME, ATTR_ENTITY_ID, CONF_DEVICE_CLASS, STATE_UNKNOWN)
|
||||
ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME,
|
||||
CONF_DEVICE_CLASS, CONF_ENTITY_ID, CONF_FRIENDLY_NAME,
|
||||
STATE_UNKNOWN)
|
||||
from homeassistant.helpers.entity import generate_entity_id
|
||||
from homeassistant.helpers.event import track_state_change
|
||||
from homeassistant.helpers.event import async_track_state_change
|
||||
from homeassistant.util import utcnow
|
||||
|
||||
REQUIREMENTS = ['numpy==1.13.3']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_ATTRIBUTE = 'attribute'
|
||||
ATTR_GRADIENT = 'gradient'
|
||||
ATTR_MIN_GRADIENT = 'min_gradient'
|
||||
ATTR_INVERT = 'invert'
|
||||
ATTR_SAMPLE_DURATION = 'sample_duration'
|
||||
ATTR_SAMPLE_COUNT = 'sample_count'
|
||||
|
||||
CONF_SENSORS = 'sensors'
|
||||
CONF_ATTRIBUTE = 'attribute'
|
||||
CONF_MAX_SAMPLES = 'max_samples'
|
||||
CONF_MIN_GRADIENT = 'min_gradient'
|
||||
CONF_INVERT = 'invert'
|
||||
CONF_SAMPLE_DURATION = 'sample_duration'
|
||||
|
||||
SENSOR_SCHEMA = vol.Schema({
|
||||
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
|
||||
vol.Required(CONF_ENTITY_ID): cv.entity_id,
|
||||
vol.Optional(CONF_ATTRIBUTE): cv.string,
|
||||
vol.Optional(ATTR_FRIENDLY_NAME): cv.string,
|
||||
vol.Optional(CONF_INVERT, default=False): cv.boolean,
|
||||
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
|
||||
vol.Optional(CONF_FRIENDLY_NAME): cv.string,
|
||||
vol.Optional(CONF_MAX_SAMPLES, default=2): cv.positive_int,
|
||||
vol.Optional(CONF_MIN_GRADIENT, default=0.0): vol.Coerce(float),
|
||||
vol.Optional(CONF_INVERT, default=False): cv.boolean,
|
||||
vol.Optional(CONF_SAMPLE_DURATION, default=0): cv.positive_int,
|
||||
})
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
@ -43,17 +64,21 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the trend sensors."""
|
||||
sensors = []
|
||||
|
||||
for device, device_config in config[CONF_SENSORS].items():
|
||||
for device_id, device_config in config[CONF_SENSORS].items():
|
||||
entity_id = device_config[ATTR_ENTITY_ID]
|
||||
attribute = device_config.get(CONF_ATTRIBUTE)
|
||||
friendly_name = device_config.get(ATTR_FRIENDLY_NAME, device)
|
||||
device_class = device_config.get(CONF_DEVICE_CLASS)
|
||||
friendly_name = device_config.get(ATTR_FRIENDLY_NAME, device_id)
|
||||
invert = device_config[CONF_INVERT]
|
||||
max_samples = device_config[CONF_MAX_SAMPLES]
|
||||
min_gradient = device_config[CONF_MIN_GRADIENT]
|
||||
sample_duration = device_config[CONF_SAMPLE_DURATION]
|
||||
|
||||
sensors.append(
|
||||
SensorTrend(
|
||||
hass, device, friendly_name, entity_id, attribute,
|
||||
device_class, invert)
|
||||
hass, device_id, friendly_name, entity_id, attribute,
|
||||
device_class, invert, max_samples, min_gradient,
|
||||
sample_duration)
|
||||
)
|
||||
if not sensors:
|
||||
_LOGGER.error("No sensors added")
|
||||
@ -65,30 +90,23 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
class SensorTrend(BinarySensorDevice):
|
||||
"""Representation of a trend Sensor."""
|
||||
|
||||
def __init__(self, hass, device_id, friendly_name,
|
||||
target_entity, attribute, device_class, invert):
|
||||
def __init__(self, hass, device_id, friendly_name, entity_id,
|
||||
attribute, device_class, invert, max_samples,
|
||||
min_gradient, sample_duration):
|
||||
"""Initialize the sensor."""
|
||||
self._hass = hass
|
||||
self.entity_id = generate_entity_id(
|
||||
ENTITY_ID_FORMAT, device_id, hass=hass)
|
||||
self._name = friendly_name
|
||||
self._target_entity = target_entity
|
||||
self._entity_id = entity_id
|
||||
self._attribute = attribute
|
||||
self._device_class = device_class
|
||||
self._invert = invert
|
||||
self._sample_duration = sample_duration
|
||||
self._min_gradient = min_gradient
|
||||
self._gradient = None
|
||||
self._state = None
|
||||
self.from_state = None
|
||||
self.to_state = None
|
||||
|
||||
@callback
|
||||
def trend_sensor_state_listener(entity, old_state, new_state):
|
||||
"""Handle the target device state changes."""
|
||||
self.from_state = old_state
|
||||
self.to_state = new_state
|
||||
hass.async_add_job(self.async_update_ha_state(True))
|
||||
|
||||
track_state_change(hass, target_entity,
|
||||
trend_sensor_state_listener)
|
||||
self.samples = deque(maxlen=max_samples)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@ -105,33 +123,77 @@ class SensorTrend(BinarySensorDevice):
|
||||
"""Return the sensor class of the sensor."""
|
||||
return self._device_class
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return the state attributes of the sensor."""
|
||||
return {
|
||||
ATTR_ENTITY_ID: self._entity_id,
|
||||
ATTR_FRIENDLY_NAME: self._name,
|
||||
ATTR_INVERT: self._invert,
|
||||
ATTR_GRADIENT: self._gradient,
|
||||
ATTR_MIN_GRADIENT: self._min_gradient,
|
||||
ATTR_SAMPLE_DURATION: self._sample_duration,
|
||||
ATTR_SAMPLE_COUNT: len(self.samples),
|
||||
}
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
"""No polling needed."""
|
||||
return False
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_added_to_hass(self):
|
||||
"""Complete device setup after being added to hass."""
|
||||
@callback
|
||||
def trend_sensor_state_listener(entity, old_state, new_state):
|
||||
"""Handle state changes on the observed device."""
|
||||
try:
|
||||
if self._attribute:
|
||||
state = new_state.attributes.get(self._attribute)
|
||||
else:
|
||||
state = new_state.state
|
||||
if state != STATE_UNKNOWN:
|
||||
sample = (utcnow().timestamp(), float(state))
|
||||
self.samples.append(sample)
|
||||
self.async_schedule_update_ha_state(True)
|
||||
except (ValueError, TypeError) as ex:
|
||||
_LOGGER.error(ex)
|
||||
|
||||
async_track_state_change(
|
||||
self.hass, self._entity_id,
|
||||
trend_sensor_state_listener)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_update(self):
|
||||
"""Get the latest data and update the states."""
|
||||
if self.from_state is None or self.to_state is None:
|
||||
return
|
||||
if (self.from_state.state == STATE_UNKNOWN or
|
||||
self.to_state.state == STATE_UNKNOWN):
|
||||
return
|
||||
try:
|
||||
if self._attribute:
|
||||
from_value = float(
|
||||
self.from_state.attributes.get(self._attribute))
|
||||
to_value = float(
|
||||
self.to_state.attributes.get(self._attribute))
|
||||
else:
|
||||
from_value = float(self.from_state.state)
|
||||
to_value = float(self.to_state.state)
|
||||
# Remove outdated samples
|
||||
if self._sample_duration > 0:
|
||||
cutoff = utcnow().timestamp() - self._sample_duration
|
||||
while self.samples and self.samples[0][0] < cutoff:
|
||||
self.samples.popleft()
|
||||
|
||||
self._state = to_value > from_value
|
||||
if self._invert:
|
||||
self._state = not self._state
|
||||
if len(self.samples) < 2:
|
||||
return
|
||||
|
||||
except (ValueError, TypeError) as ex:
|
||||
self._state = None
|
||||
_LOGGER.error(ex)
|
||||
# Calculate gradient of linear trend
|
||||
yield from self.hass.async_add_job(self._calculate_gradient)
|
||||
|
||||
# Update state
|
||||
self._state = (
|
||||
abs(self._gradient) > abs(self._min_gradient) and
|
||||
math.copysign(self._gradient, self._min_gradient) == self._gradient
|
||||
)
|
||||
|
||||
if self._invert:
|
||||
self._state = not self._state
|
||||
|
||||
def _calculate_gradient(self):
|
||||
"""Compute the linear trend gradient of the current samples.
|
||||
|
||||
This need run inside executor.
|
||||
"""
|
||||
import numpy as np
|
||||
timestamps = np.array([t for t, _ in self.samples])
|
||||
values = np.array([s for _, s in self.samples])
|
||||
coeffs = np.polyfit(timestamps, values, 1)
|
||||
self._gradient = coeffs[0]
|
||||
|
@ -25,13 +25,9 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
for (_, gateway) in hass.data[PY_XIAOMI_GATEWAY].gateways.items():
|
||||
for device in gateway.devices['binary_sensor']:
|
||||
model = device['model']
|
||||
if model == 'motion':
|
||||
if model in ['motion', 'sensor_motion.aq2']:
|
||||
devices.append(XiaomiMotionSensor(device, hass, gateway))
|
||||
elif model == 'sensor_motion.aq2':
|
||||
devices.append(XiaomiMotionSensor(device, hass, gateway))
|
||||
elif model == 'magnet':
|
||||
devices.append(XiaomiDoorSensor(device, gateway))
|
||||
elif model == 'sensor_magnet.aq2':
|
||||
elif model in ['magnet', 'sensor_magnet.aq2']:
|
||||
devices.append(XiaomiDoorSensor(device, gateway))
|
||||
elif model == 'sensor_wleak.aq1':
|
||||
devices.append(XiaomiWaterLeakSensor(device, gateway))
|
||||
@ -39,10 +35,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
devices.append(XiaomiSmokeSensor(device, gateway))
|
||||
elif model == 'natgas':
|
||||
devices.append(XiaomiNatgasSensor(device, gateway))
|
||||
elif model == 'switch':
|
||||
devices.append(XiaomiButton(device, 'Switch', 'status',
|
||||
hass, gateway))
|
||||
elif model == 'sensor_switch.aq2':
|
||||
elif model in ['switch', 'sensor_switch.aq2', 'sensor_switch.aq3']:
|
||||
devices.append(XiaomiButton(device, 'Switch', 'status',
|
||||
hass, gateway))
|
||||
elif model == '86sw1':
|
||||
@ -289,9 +282,17 @@ class XiaomiButton(XiaomiBinarySensor):
|
||||
def __init__(self, device, name, data_key, hass, xiaomi_hub):
|
||||
"""Initialize the XiaomiButton."""
|
||||
self._hass = hass
|
||||
self._last_action = None
|
||||
XiaomiBinarySensor.__init__(self, device, name, xiaomi_hub,
|
||||
data_key, None)
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return the state attributes."""
|
||||
attrs = {ATTR_LAST_ACTION: self._last_action}
|
||||
attrs.update(super().device_state_attributes)
|
||||
return attrs
|
||||
|
||||
def parse_data(self, data):
|
||||
"""Parse data sent by gateway."""
|
||||
value = data.get(self._data_key)
|
||||
@ -317,6 +318,8 @@ class XiaomiButton(XiaomiBinarySensor):
|
||||
'entity_id': self.entity_id,
|
||||
'click_type': click_type
|
||||
})
|
||||
self._last_action = click_type
|
||||
|
||||
if value in ['long_click_press', 'long_click_release']:
|
||||
return True
|
||||
return False
|
||||
|
@ -1,19 +1,21 @@
|
||||
# Describes the format for available calendar services
|
||||
|
||||
todoist:
|
||||
new_task:
|
||||
description: Create a new task and add it to a project.
|
||||
fields:
|
||||
content:
|
||||
description: The name of the task. [Required]
|
||||
description: The name of the task (Required).
|
||||
example: Pick up the mail
|
||||
project:
|
||||
description: The name of the project this task should belong to. Defaults to Inbox. [Optional]
|
||||
description: The name of the project this task should belong to. Defaults to Inbox (Optional).
|
||||
example: Errands
|
||||
labels:
|
||||
description: Any labels that you want to apply to this task, separated by a comma. [Optional]
|
||||
description: Any labels that you want to apply to this task, separated by a comma (Optional).
|
||||
example: Chores,Deliveries
|
||||
priority:
|
||||
description: The priority of this task, from 1 (normal) to 4 (urgent). [Optional]
|
||||
description: The priority of this task, from 1 (normal) to 4 (urgent) (Optional).
|
||||
example: 2
|
||||
due_date:
|
||||
description: The day this task is due, in format YYYY-MM-DD. [Optional]
|
||||
description: The day this task is due, in format YYYY-MM-DD (Optional).
|
||||
example: "2018-04-01"
|
||||
|
@ -29,18 +29,22 @@ from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.config_validation import PLATFORM_SCHEMA # noqa
|
||||
from homeassistant.components.http import HomeAssistantView, KEY_AUTHENTICATED
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
DOMAIN = 'camera'
|
||||
DEPENDENCIES = ['http']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SERVICE_EN_MOTION = 'enable_motion_detection'
|
||||
SERVICE_DISEN_MOTION = 'disable_motion_detection'
|
||||
DOMAIN = 'camera'
|
||||
DEPENDENCIES = ['http']
|
||||
SERVICE_ENABLE_MOTION = 'enable_motion_detection'
|
||||
SERVICE_DISABLE_MOTION = 'disable_motion_detection'
|
||||
SERVICE_SNAPSHOT = 'snapshot'
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
ENTITY_ID_FORMAT = DOMAIN + '.{}'
|
||||
|
||||
ATTR_FILENAME = 'filename'
|
||||
|
||||
STATE_RECORDING = 'recording'
|
||||
STATE_STREAMING = 'streaming'
|
||||
STATE_IDLE = 'idle'
|
||||
@ -55,13 +59,17 @@ CAMERA_SERVICE_SCHEMA = vol.Schema({
|
||||
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
|
||||
})
|
||||
|
||||
CAMERA_SERVICE_SNAPSHOT = CAMERA_SERVICE_SCHEMA.extend({
|
||||
vol.Required(ATTR_FILENAME): cv.template
|
||||
})
|
||||
|
||||
|
||||
@bind_hass
|
||||
def enable_motion_detection(hass, entity_id=None):
|
||||
"""Enable Motion Detection."""
|
||||
data = {ATTR_ENTITY_ID: entity_id} if entity_id else None
|
||||
hass.async_add_job(hass.services.async_call(
|
||||
DOMAIN, SERVICE_EN_MOTION, data))
|
||||
DOMAIN, SERVICE_ENABLE_MOTION, data))
|
||||
|
||||
|
||||
@bind_hass
|
||||
@ -69,9 +77,20 @@ def disable_motion_detection(hass, entity_id=None):
|
||||
"""Disable Motion Detection."""
|
||||
data = {ATTR_ENTITY_ID: entity_id} if entity_id else None
|
||||
hass.async_add_job(hass.services.async_call(
|
||||
DOMAIN, SERVICE_DISEN_MOTION, data))
|
||||
DOMAIN, SERVICE_DISABLE_MOTION, data))
|
||||
|
||||
|
||||
@bind_hass
|
||||
def async_snapshot(hass, filename, entity_id=None):
|
||||
"""Make a snapshot from a camera."""
|
||||
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
|
||||
data[ATTR_FILENAME] = filename
|
||||
|
||||
hass.async_add_job(hass.services.async_call(
|
||||
DOMAIN, SERVICE_SNAPSHOT, data))
|
||||
|
||||
|
||||
@bind_hass
|
||||
@asyncio.coroutine
|
||||
def async_get_image(hass, entity_id, timeout=10):
|
||||
"""Fetch a image from a camera entity."""
|
||||
@ -119,7 +138,8 @@ def async_setup(hass, config):
|
||||
entity.async_update_token()
|
||||
hass.async_add_job(entity.async_update_ha_state())
|
||||
|
||||
async_track_time_interval(hass, update_tokens, TOKEN_CHANGE_INTERVAL)
|
||||
hass.helpers.event.async_track_time_interval(
|
||||
update_tokens, TOKEN_CHANGE_INTERVAL)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_handle_camera_service(service):
|
||||
@ -128,9 +148,9 @@ def async_setup(hass, config):
|
||||
|
||||
update_tasks = []
|
||||
for camera in target_cameras:
|
||||
if service.service == SERVICE_EN_MOTION:
|
||||
if service.service == SERVICE_ENABLE_MOTION:
|
||||
yield from camera.async_enable_motion_detection()
|
||||
elif service.service == SERVICE_DISEN_MOTION:
|
||||
elif service.service == SERVICE_DISABLE_MOTION:
|
||||
yield from camera.async_disable_motion_detection()
|
||||
|
||||
if not camera.should_poll:
|
||||
@ -140,16 +160,50 @@ def async_setup(hass, config):
|
||||
if update_tasks:
|
||||
yield from asyncio.wait(update_tasks, loop=hass.loop)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_handle_snapshot_service(service):
|
||||
"""Handle snapshot services calls."""
|
||||
target_cameras = component.async_extract_from_service(service)
|
||||
filename = service.data[ATTR_FILENAME]
|
||||
filename.hass = hass
|
||||
|
||||
for camera in target_cameras:
|
||||
snapshot_file = filename.async_render(
|
||||
variables={ATTR_ENTITY_ID: camera})
|
||||
|
||||
# check if we allow to access to that file
|
||||
if not hass.config.is_allowed_path(snapshot_file):
|
||||
_LOGGER.error(
|
||||
"Can't write %s, no access to path!", snapshot_file)
|
||||
continue
|
||||
|
||||
image = yield from camera.async_camera_image()
|
||||
|
||||
def _write_image(to_file, image_data):
|
||||
"""Executor helper to write image."""
|
||||
with open(to_file, 'wb') as img_file:
|
||||
img_file.write(image_data)
|
||||
|
||||
try:
|
||||
yield from hass.async_add_job(
|
||||
_write_image, snapshot_file, image)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't write image to file: %s", err)
|
||||
|
||||
descriptions = yield from hass.async_add_job(
|
||||
load_yaml_config_file, os.path.join(
|
||||
os.path.dirname(__file__), 'services.yaml'))
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_EN_MOTION, async_handle_camera_service,
|
||||
descriptions.get(SERVICE_EN_MOTION), schema=CAMERA_SERVICE_SCHEMA)
|
||||
DOMAIN, SERVICE_ENABLE_MOTION, async_handle_camera_service,
|
||||
descriptions.get(SERVICE_ENABLE_MOTION), schema=CAMERA_SERVICE_SCHEMA)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_DISEN_MOTION, async_handle_camera_service,
|
||||
descriptions.get(SERVICE_DISEN_MOTION), schema=CAMERA_SERVICE_SCHEMA)
|
||||
DOMAIN, SERVICE_DISABLE_MOTION, async_handle_camera_service,
|
||||
descriptions.get(SERVICE_DISABLE_MOTION), schema=CAMERA_SERVICE_SCHEMA)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_SNAPSHOT, async_handle_snapshot_service,
|
||||
descriptions.get(SERVICE_SNAPSHOT),
|
||||
schema=CAMERA_SERVICE_SNAPSHOT)
|
||||
|
||||
return True
|
||||
|
||||
|
@ -11,7 +11,7 @@ from homeassistant.const import (
|
||||
CONF_AUTHENTICATION, HTTP_DIGEST_AUTHENTICATION)
|
||||
from homeassistant.components.camera.mjpeg import (
|
||||
CONF_MJPEG_URL, CONF_STILL_IMAGE_URL, MjpegCamera)
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.dispatcher import dispatcher_connect
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@ -52,9 +52,9 @@ class AxisCamera(MjpegCamera):
|
||||
"""Initialize Axis Communications camera component."""
|
||||
super().__init__(hass, config)
|
||||
self.port = port
|
||||
async_dispatcher_connect(hass,
|
||||
DOMAIN + '_' + config[CONF_NAME] + '_new_ip',
|
||||
self._new_ip)
|
||||
dispatcher_connect(hass,
|
||||
DOMAIN + '_' + config[CONF_NAME] + '_new_ip',
|
||||
self._new_ip)
|
||||
|
||||
def _new_ip(self, host):
|
||||
"""Set new IP for video stream."""
|
||||
|
141
homeassistant/components/camera/ring.py
Normal file
141
homeassistant/components/camera/ring.py
Normal file
@ -0,0 +1,141 @@
|
||||
"""
|
||||
This component provides support to the Ring Door Bell camera.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/camera.ring/
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.components.ring import DATA_RING, CONF_ATTRIBUTION
|
||||
from homeassistant.components.camera import Camera, PLATFORM_SCHEMA
|
||||
from homeassistant.components.ffmpeg import DATA_FFMPEG
|
||||
from homeassistant.const import ATTR_ATTRIBUTION, CONF_SCAN_INTERVAL
|
||||
from homeassistant.helpers.aiohttp_client import async_aiohttp_proxy_stream
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
CONF_FFMPEG_ARGUMENTS = 'ffmpeg_arguments'
|
||||
|
||||
DEPENDENCIES = ['ring', 'ffmpeg']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=90)
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_FFMPEG_ARGUMENTS): cv.string,
|
||||
vol.Optional(CONF_SCAN_INTERVAL, default=SCAN_INTERVAL):
|
||||
cv.time_period,
|
||||
})
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
"""Set up a Ring Door Bell and StickUp Camera."""
|
||||
ring = hass.data[DATA_RING]
|
||||
|
||||
cams = []
|
||||
for camera in ring.doorbells:
|
||||
cams.append(RingCam(hass, camera, config))
|
||||
|
||||
for camera in ring.stickup_cams:
|
||||
cams.append(RingCam(hass, camera, config))
|
||||
|
||||
async_add_devices(cams, True)
|
||||
return True
|
||||
|
||||
|
||||
class RingCam(Camera):
|
||||
"""An implementation of a Ring Door Bell camera."""
|
||||
|
||||
def __init__(self, hass, camera, device_info):
|
||||
"""Initialize a Ring Door Bell camera."""
|
||||
super(RingCam, self).__init__()
|
||||
self._camera = camera
|
||||
self._hass = hass
|
||||
self._name = self._camera.name
|
||||
self._ffmpeg = hass.data[DATA_FFMPEG]
|
||||
self._ffmpeg_arguments = device_info.get(CONF_FFMPEG_ARGUMENTS)
|
||||
self._last_video_id = self._camera.last_recording_id
|
||||
self._video_url = self._camera.recording_url(self._last_video_id)
|
||||
self._expires_at = None
|
||||
self._utcnow = None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of this camera."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return the state attributes."""
|
||||
return {
|
||||
ATTR_ATTRIBUTION: CONF_ATTRIBUTION,
|
||||
'device_id': self._camera.id,
|
||||
'firmware': self._camera.firmware,
|
||||
'kind': self._camera.kind,
|
||||
'timezone': self._camera.timezone,
|
||||
'type': self._camera.family,
|
||||
'video_url': self._video_url,
|
||||
'video_id': self._last_video_id
|
||||
}
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_camera_image(self):
|
||||
"""Return a still image response from the camera."""
|
||||
from haffmpeg import ImageFrame, IMAGE_JPEG
|
||||
ffmpeg = ImageFrame(self._ffmpeg.binary, loop=self.hass.loop)
|
||||
|
||||
if self._video_url is None:
|
||||
return
|
||||
|
||||
image = yield from asyncio.shield(ffmpeg.get_image(
|
||||
self._video_url, output_format=IMAGE_JPEG,
|
||||
extra_cmd=self._ffmpeg_arguments), loop=self.hass.loop)
|
||||
return image
|
||||
|
||||
@asyncio.coroutine
|
||||
def handle_async_mjpeg_stream(self, request):
|
||||
"""Generate an HTTP MJPEG stream from the camera."""
|
||||
from haffmpeg import CameraMjpeg
|
||||
|
||||
if self._video_url is None:
|
||||
return
|
||||
|
||||
stream = CameraMjpeg(self._ffmpeg.binary, loop=self.hass.loop)
|
||||
yield from stream.open_camera(
|
||||
self._video_url, extra_cmd=self._ffmpeg_arguments)
|
||||
|
||||
yield from async_aiohttp_proxy_stream(
|
||||
self.hass, request, stream,
|
||||
'multipart/x-mixed-replace;boundary=ffserver')
|
||||
yield from stream.close()
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
"""Update the image periodically."""
|
||||
return True
|
||||
|
||||
def update(self):
|
||||
"""Update camera entity and refresh attributes."""
|
||||
# extract the video expiration from URL
|
||||
x_amz_expires = int(self._video_url.split('&')[0].split('=')[-1])
|
||||
x_amz_date = self._video_url.split('&')[1].split('=')[-1]
|
||||
|
||||
self._utcnow = dt_util.utcnow()
|
||||
self._expires_at = \
|
||||
timedelta(seconds=x_amz_expires) + \
|
||||
dt_util.as_utc(datetime.strptime(x_amz_date, "%Y%m%dT%H%M%SZ"))
|
||||
|
||||
if self._last_video_id != self._camera.last_recording_id:
|
||||
_LOGGER.debug("Updated Ring DoorBell last_video_id")
|
||||
self._last_video_id = self._camera.last_recording_id
|
||||
|
||||
if self._utcnow >= self._expires_at:
|
||||
_LOGGER.debug("Updated Ring DoorBell video_url")
|
||||
self._video_url = self._camera.recording_url(self._last_video_id)
|
@ -1,17 +1,25 @@
|
||||
# Describes the format for available camera services
|
||||
|
||||
enable_motion_detection:
|
||||
description: Enable the motion detection in a camera
|
||||
|
||||
description: Enable the motion detection in a camera.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of entities to enable motion detection
|
||||
description: Name(s) of entities to enable motion detection.
|
||||
example: 'camera.living_room_camera'
|
||||
|
||||
disable_motion_detection:
|
||||
description: Disable the motion detection in a camera
|
||||
|
||||
description: Disable the motion detection in a camera.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of entities to disable motion detection
|
||||
description: Name(s) of entities to disable motion detection.
|
||||
example: 'camera.living_room_camera'
|
||||
|
||||
snapshot:
|
||||
description: Take a snapshot from a camera.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of entities to create snapshots from.
|
||||
example: 'camera.living_room_camera'
|
||||
filename:
|
||||
description: Template of a Filename. Variable is entity_id.
|
||||
example: '/tmp/snapshot_{{ entity_id }}'
|
||||
|
117
homeassistant/components/climate/ephember.py
Normal file
117
homeassistant/components/climate/ephember.py
Normal file
@ -0,0 +1,117 @@
|
||||
"""
|
||||
Support for the EPH Controls Ember themostats.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/climate.ephember/
|
||||
"""
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
ClimateDevice, PLATFORM_SCHEMA, STATE_HEAT, STATE_IDLE)
|
||||
from homeassistant.const import (
|
||||
TEMP_CELSIUS, CONF_USERNAME, CONF_PASSWORD)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
REQUIREMENTS = ['pyephember==0.1.1']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Return cached results if last scan was less then this time ago
|
||||
SCAN_INTERVAL = timedelta(seconds=120)
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_USERNAME): cv.string,
|
||||
vol.Required(CONF_PASSWORD): cv.string
|
||||
})
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the ephember thermostat."""
|
||||
from pyephember.pyephember import EphEmber
|
||||
|
||||
username = config.get(CONF_USERNAME)
|
||||
password = config.get(CONF_PASSWORD)
|
||||
|
||||
try:
|
||||
ember = EphEmber(username, password)
|
||||
zones = ember.get_zones()
|
||||
for zone in zones:
|
||||
add_devices([EphEmberThermostat(ember, zone)])
|
||||
except RuntimeError:
|
||||
_LOGGER.error("Cannot connect to EphEmber")
|
||||
return
|
||||
|
||||
return
|
||||
|
||||
|
||||
class EphEmberThermostat(ClimateDevice):
|
||||
"""Representation of a HeatmiserV3 thermostat."""
|
||||
|
||||
def __init__(self, ember, zone):
|
||||
"""Initialize the thermostat."""
|
||||
self._ember = ember
|
||||
self._zone_name = zone['name']
|
||||
self._zone = zone
|
||||
self._hot_water = zone['isHotWater']
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the thermostat, if any."""
|
||||
return self._zone_name
|
||||
|
||||
@property
|
||||
def temperature_unit(self):
|
||||
"""Return the unit of measurement which this thermostat uses."""
|
||||
return TEMP_CELSIUS
|
||||
|
||||
@property
|
||||
def current_temperature(self):
|
||||
"""Return the current temperature."""
|
||||
return self._zone['currentTemperature']
|
||||
|
||||
@property
|
||||
def target_temperature(self):
|
||||
"""Return the temperature we try to reach."""
|
||||
return self._zone['targetTemperature']
|
||||
|
||||
@property
|
||||
def current_operation(self):
|
||||
"""Return current operation ie. heat, cool, idle."""
|
||||
if self._zone['isCurrentlyActive']:
|
||||
return STATE_HEAT
|
||||
else:
|
||||
return STATE_IDLE
|
||||
|
||||
@property
|
||||
def is_aux_heat_on(self):
|
||||
"""Return true if aux heater."""
|
||||
return self._zone['isBoostActive']
|
||||
|
||||
def turn_aux_heat_on(self):
|
||||
"""Turn auxiliary heater on."""
|
||||
self._ember.activate_boost_by_name(
|
||||
self._zone_name, self._zone['targetTemperature'])
|
||||
|
||||
def turn_aux_heat_off(self):
|
||||
"""Turn auxiliary heater off."""
|
||||
self._ember.deactivate_boost_by_name(self._zone_name)
|
||||
|
||||
def set_temperature(self, **kwargs):
|
||||
"""Set new target temperature."""
|
||||
return
|
||||
|
||||
@property
|
||||
def min_temp(self):
|
||||
"""Return the minimum temperature."""
|
||||
return self._zone['targetTemperature']
|
||||
|
||||
@property
|
||||
def max_temp(self):
|
||||
"""Return the maximum temperature."""
|
||||
return self._zone['targetTemperature']
|
||||
|
||||
def update(self):
|
||||
"""Get the latest data."""
|
||||
self._zone = self._ember.get_zone(self._zone_name)
|
@ -36,7 +36,8 @@ CONF_MAX_TEMP = 'max_temp'
|
||||
CONF_TARGET_TEMP = 'target_temp'
|
||||
CONF_AC_MODE = 'ac_mode'
|
||||
CONF_MIN_DUR = 'min_cycle_duration'
|
||||
CONF_TOLERANCE = 'tolerance'
|
||||
CONF_COLD_TOLERANCE = 'cold_tolerance'
|
||||
CONF_HOT_TOLERANCE = 'hot_tolerance'
|
||||
CONF_KEEP_ALIVE = 'keep_alive'
|
||||
|
||||
|
||||
@ -48,7 +49,10 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_MIN_DUR): vol.All(cv.time_period, cv.positive_timedelta),
|
||||
vol.Optional(CONF_MIN_TEMP): vol.Coerce(float),
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_TOLERANCE, default=DEFAULT_TOLERANCE): vol.Coerce(float),
|
||||
vol.Optional(CONF_COLD_TOLERANCE, default=DEFAULT_TOLERANCE): vol.Coerce(
|
||||
float),
|
||||
vol.Optional(CONF_HOT_TOLERANCE, default=DEFAULT_TOLERANCE): vol.Coerce(
|
||||
float),
|
||||
vol.Optional(CONF_TARGET_TEMP): vol.Coerce(float),
|
||||
vol.Optional(CONF_KEEP_ALIVE): vol.All(
|
||||
cv.time_period, cv.positive_timedelta),
|
||||
@ -66,12 +70,14 @@ def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
target_temp = config.get(CONF_TARGET_TEMP)
|
||||
ac_mode = config.get(CONF_AC_MODE)
|
||||
min_cycle_duration = config.get(CONF_MIN_DUR)
|
||||
tolerance = config.get(CONF_TOLERANCE)
|
||||
cold_tolerance = config.get(CONF_COLD_TOLERANCE)
|
||||
hot_tolerance = config.get(CONF_HOT_TOLERANCE)
|
||||
keep_alive = config.get(CONF_KEEP_ALIVE)
|
||||
|
||||
async_add_devices([GenericThermostat(
|
||||
hass, name, heater_entity_id, sensor_entity_id, min_temp, max_temp,
|
||||
target_temp, ac_mode, min_cycle_duration, tolerance, keep_alive)])
|
||||
target_temp, ac_mode, min_cycle_duration, cold_tolerance,
|
||||
hot_tolerance, keep_alive)])
|
||||
|
||||
|
||||
class GenericThermostat(ClimateDevice):
|
||||
@ -79,14 +85,15 @@ class GenericThermostat(ClimateDevice):
|
||||
|
||||
def __init__(self, hass, name, heater_entity_id, sensor_entity_id,
|
||||
min_temp, max_temp, target_temp, ac_mode, min_cycle_duration,
|
||||
tolerance, keep_alive):
|
||||
cold_tolerance, hot_tolerance, keep_alive):
|
||||
"""Initialize the thermostat."""
|
||||
self.hass = hass
|
||||
self._name = name
|
||||
self.heater_entity_id = heater_entity_id
|
||||
self.ac_mode = ac_mode
|
||||
self.min_cycle_duration = min_cycle_duration
|
||||
self._tolerance = tolerance
|
||||
self._cold_tolerance = cold_tolerance
|
||||
self._hot_tolerance = hot_tolerance
|
||||
self._keep_alive = keep_alive
|
||||
self._enabled = True
|
||||
|
||||
@ -261,25 +268,29 @@ class GenericThermostat(ClimateDevice):
|
||||
if self.ac_mode:
|
||||
is_cooling = self._is_device_active
|
||||
if is_cooling:
|
||||
too_cold = self._target_temp - self._cur_temp > self._tolerance
|
||||
too_cold = self._target_temp - self._cur_temp >= \
|
||||
self._cold_tolerance
|
||||
if too_cold:
|
||||
_LOGGER.info('Turning off AC %s', self.heater_entity_id)
|
||||
switch.async_turn_off(self.hass, self.heater_entity_id)
|
||||
else:
|
||||
too_hot = self._cur_temp - self._target_temp > self._tolerance
|
||||
too_hot = self._cur_temp - self._target_temp >= \
|
||||
self._hot_tolerance
|
||||
if too_hot:
|
||||
_LOGGER.info('Turning on AC %s', self.heater_entity_id)
|
||||
switch.async_turn_on(self.hass, self.heater_entity_id)
|
||||
else:
|
||||
is_heating = self._is_device_active
|
||||
if is_heating:
|
||||
too_hot = self._cur_temp - self._target_temp > self._tolerance
|
||||
too_hot = self._cur_temp - self._target_temp >= \
|
||||
self._hot_tolerance
|
||||
if too_hot:
|
||||
_LOGGER.info('Turning off heater %s',
|
||||
self.heater_entity_id)
|
||||
switch.async_turn_off(self.hass, self.heater_entity_id)
|
||||
else:
|
||||
too_cold = self._target_temp - self._cur_temp > self._tolerance
|
||||
too_cold = self._target_temp - self._cur_temp >= \
|
||||
self._cold_tolerance
|
||||
if too_cold:
|
||||
_LOGGER.info('Turning on heater %s', self.heater_entity_id)
|
||||
switch.async_turn_on(self.hass, self.heater_entity_id)
|
||||
|
@ -11,16 +11,15 @@ import datetime
|
||||
import requests
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.components.climate import (
|
||||
ClimateDevice, PLATFORM_SCHEMA, ATTR_FAN_MODE, ATTR_FAN_LIST,
|
||||
ATTR_OPERATION_MODE, ATTR_OPERATION_LIST)
|
||||
from homeassistant.const import (
|
||||
CONF_PASSWORD, CONF_USERNAME, TEMP_CELSIUS, TEMP_FAHRENHEIT,
|
||||
ATTR_TEMPERATURE)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
ATTR_TEMPERATURE, CONF_REGION)
|
||||
|
||||
REQUIREMENTS = ['evohomeclient==0.2.5',
|
||||
'somecomfort==0.4.1']
|
||||
REQUIREMENTS = ['evohomeclient==0.2.5', 'somecomfort==0.4.1']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@ -31,7 +30,6 @@ ATTR_CURRENT_OPERATION = 'equipment_output_status'
|
||||
CONF_AWAY_TEMPERATURE = 'away_temperature'
|
||||
CONF_COOL_AWAY_TEMPERATURE = 'away_cool_temperature'
|
||||
CONF_HEAT_AWAY_TEMPERATURE = 'away_heat_temperature'
|
||||
CONF_REGION = 'region'
|
||||
|
||||
DEFAULT_AWAY_TEMPERATURE = 16
|
||||
DEFAULT_COOL_AWAY_TEMPERATURE = 30
|
||||
|
@ -1,132 +1,102 @@
|
||||
# Describes the format for available climate services
|
||||
|
||||
set_aux_heat:
|
||||
description: Turn auxiliary heater on/off for climate device
|
||||
|
||||
description: Turn auxiliary heater on/off for climate device.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of entities to change
|
||||
description: Name(s) of entities to change.
|
||||
example: 'climate.kitchen'
|
||||
|
||||
aux_heat:
|
||||
description: New value of axillary heater
|
||||
description: New value of axillary heater.
|
||||
example: true
|
||||
|
||||
set_away_mode:
|
||||
description: Turn away mode on/off for climate device
|
||||
|
||||
description: Turn away mode on/off for climate device.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of entities to change
|
||||
description: Name(s) of entities to change.
|
||||
example: 'climate.kitchen'
|
||||
|
||||
away_mode:
|
||||
description: New value of away mode
|
||||
description: New value of away mode.
|
||||
example: true
|
||||
|
||||
set_hold_mode:
|
||||
description: Turn hold mode for climate device
|
||||
|
||||
description: Turn hold mode for climate device.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of entities to change
|
||||
description: Name(s) of entities to change.
|
||||
example: 'climate.kitchen'
|
||||
|
||||
hold_mode:
|
||||
description: New value of hold mode
|
||||
example: 'away'
|
||||
|
||||
set_temperature:
|
||||
description: Set target temperature of climate device
|
||||
|
||||
description: Set target temperature of climate device.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of entities to change
|
||||
description: Name(s) of entities to change.
|
||||
example: 'climate.kitchen'
|
||||
|
||||
temperature:
|
||||
description: New target temperature for hvac
|
||||
description: New target temperature for HVAC.
|
||||
example: 25
|
||||
|
||||
target_temp_high:
|
||||
description: New target high tempereature for hvac
|
||||
description: New target high tempereature for HVAC.
|
||||
example: 26
|
||||
|
||||
target_temp_low:
|
||||
description: New target low temperature for hvac
|
||||
description: New target low temperature for HVAC.
|
||||
example: 20
|
||||
|
||||
operation_mode:
|
||||
description: Operation mode to set temperature to. This defaults to current_operation mode if not set, or set incorrectly.
|
||||
example: 'Heat'
|
||||
|
||||
set_humidity:
|
||||
description: Set target humidity of climate device
|
||||
|
||||
description: Set target humidity of climate device.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of entities to change
|
||||
description: Name(s) of entities to change.
|
||||
example: 'climate.kitchen'
|
||||
|
||||
humidity:
|
||||
description: New target humidity for climate device
|
||||
description: New target humidity for climate device.
|
||||
example: 60
|
||||
|
||||
set_fan_mode:
|
||||
description: Set fan operation for climate device
|
||||
|
||||
description: Set fan operation for climate device.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of entities to change
|
||||
description: Name(s) of entities to change.
|
||||
example: 'climate.nest'
|
||||
|
||||
fan_mode:
|
||||
description: New value of fan mode
|
||||
description: New value of fan mode.
|
||||
example: On Low
|
||||
|
||||
set_operation_mode:
|
||||
description: Set operation mode for climate device
|
||||
|
||||
description: Set operation mode for climate device.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of entities to change
|
||||
description: Name(s) of entities to change.
|
||||
example: 'climate.nest'
|
||||
|
||||
operation_mode:
|
||||
description: New value of operation mode
|
||||
description: New value of operation mode.
|
||||
example: Heat
|
||||
|
||||
|
||||
set_swing_mode:
|
||||
description: Set swing operation for climate device
|
||||
|
||||
description: Set swing operation for climate device.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of entities to change
|
||||
description: Name(s) of entities to change.
|
||||
example: 'climate.nest'
|
||||
|
||||
swing_mode:
|
||||
description: New value of swing mode
|
||||
description: New value of swing mode.
|
||||
example: 1
|
||||
|
||||
ecobee_set_fan_min_on_time:
|
||||
description: Set the minimum fan on time
|
||||
|
||||
description: Set the minimum fan on time.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of entities to change
|
||||
description: Name(s) of entities to change.
|
||||
example: 'climate.kitchen'
|
||||
|
||||
fan_min_on_time:
|
||||
description: New value of fan min on time
|
||||
description: New value of fan min on time.
|
||||
example: 5
|
||||
|
||||
ecobee_resume_program:
|
||||
description: Resume the programmed schedule
|
||||
|
||||
description: Resume the programmed schedule.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of entities to change
|
||||
description: Name(s) of entities to change.
|
||||
example: 'climate.kitchen'
|
||||
|
||||
resume_all:
|
||||
description: Resume all events and return to the scheduled program. This default to false which removes only the top event.
|
||||
example: true
|
||||
|
@ -3,23 +3,20 @@ Toon van Eneco Thermostat Support.
|
||||
|
||||
This provides a component for the rebranded Quby thermostat as provided by
|
||||
Eneco.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/climate.toon/
|
||||
"""
|
||||
|
||||
from homeassistant.components.climate import (ClimateDevice,
|
||||
ATTR_TEMPERATURE,
|
||||
STATE_PERFORMANCE,
|
||||
STATE_HEAT,
|
||||
STATE_ECO,
|
||||
STATE_COOL)
|
||||
from homeassistant.const import TEMP_CELSIUS
|
||||
|
||||
import homeassistant.components.toon as toon_main
|
||||
from homeassistant.components.climate import (
|
||||
ClimateDevice, ATTR_TEMPERATURE, STATE_PERFORMANCE, STATE_HEAT, STATE_ECO,
|
||||
STATE_COOL)
|
||||
from homeassistant.const import TEMP_CELSIUS
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Setup thermostat."""
|
||||
# Add toon
|
||||
add_devices((ThermostatDevice(hass), ), True)
|
||||
"""Set up the Toon thermostat."""
|
||||
add_devices([ThermostatDevice(hass)], True)
|
||||
|
||||
|
||||
class ThermostatDevice(ClimateDevice):
|
||||
@ -31,25 +28,21 @@ class ThermostatDevice(ClimateDevice):
|
||||
self.hass = hass
|
||||
self.thermos = hass.data[toon_main.TOON_HANDLE]
|
||||
|
||||
# set up internal state vars
|
||||
self._state = None
|
||||
self._temperature = None
|
||||
self._setpoint = None
|
||||
self._operation_list = [STATE_PERFORMANCE,
|
||||
STATE_HEAT,
|
||||
STATE_ECO,
|
||||
STATE_COOL]
|
||||
self._operation_list = [
|
||||
STATE_PERFORMANCE,
|
||||
STATE_HEAT,
|
||||
STATE_ECO,
|
||||
STATE_COOL,
|
||||
]
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Name of this Thermostat."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
"""Polling is required."""
|
||||
return True
|
||||
|
||||
@property
|
||||
def temperature_unit(self):
|
||||
"""The unit of measurement used by the platform."""
|
||||
@ -83,10 +76,12 @@ class ThermostatDevice(ClimateDevice):
|
||||
|
||||
def set_operation_mode(self, operation_mode):
|
||||
"""Set new operation mode as toonlib requires it."""
|
||||
toonlib_values = {STATE_PERFORMANCE: 'Comfort',
|
||||
STATE_HEAT: 'Home',
|
||||
STATE_ECO: 'Away',
|
||||
STATE_COOL: 'Sleep'}
|
||||
toonlib_values = {
|
||||
STATE_PERFORMANCE: 'Comfort',
|
||||
STATE_HEAT: 'Home',
|
||||
STATE_ECO: 'Away',
|
||||
STATE_COOL: 'Sleep',
|
||||
}
|
||||
|
||||
self.thermos.set_state(toonlib_values[operation_mode])
|
||||
|
||||
|
@ -6,22 +6,23 @@ import os
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_START
|
||||
from homeassistant.const import (
|
||||
EVENT_HOMEASSISTANT_START, CONF_REGION, CONF_MODE)
|
||||
|
||||
from . import http_api, iot
|
||||
from .const import CONFIG_DIR, DOMAIN, SERVERS
|
||||
|
||||
|
||||
REQUIREMENTS = ['warrant==0.5.0']
|
||||
DEPENDENCIES = ['http']
|
||||
CONF_MODE = 'mode'
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_COGNITO_CLIENT_ID = 'cognito_client_id'
|
||||
CONF_USER_POOL_ID = 'user_pool_id'
|
||||
CONF_REGION = 'region'
|
||||
CONF_RELAYER = 'relayer'
|
||||
CONF_USER_POOL_ID = 'user_pool_id'
|
||||
|
||||
MODE_DEV = 'development'
|
||||
DEFAULT_MODE = MODE_DEV
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
DEPENDENCIES = ['http']
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
|
@ -62,7 +62,7 @@ class CloudIoT:
|
||||
self.client = client = yield from session.ws_connect(
|
||||
self.cloud.relayer, headers={
|
||||
hdrs.AUTHORIZATION:
|
||||
'Bearer {}'.format(self.cloud.access_token)
|
||||
'Bearer {}'.format(self.cloud.id_token)
|
||||
})
|
||||
self.tries = 0
|
||||
|
||||
|
@ -8,7 +8,6 @@ from homeassistant.core import callback
|
||||
from homeassistant.const import EVENT_COMPONENT_LOADED, CONF_ID
|
||||
from homeassistant.setup import (
|
||||
async_prepare_setup_platform, ATTR_COMPONENT)
|
||||
from homeassistant.components.frontend import register_built_in_panel
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.util.yaml import load_yaml, dump
|
||||
|
||||
@ -21,7 +20,8 @@ ON_DEMAND = ('zwave')
|
||||
@asyncio.coroutine
|
||||
def async_setup(hass, config):
|
||||
"""Set up the config component."""
|
||||
register_built_in_panel(hass, 'config', 'Configuration', 'mdi:settings')
|
||||
yield from hass.components.frontend.async_register_built_in_panel(
|
||||
'config', 'config', 'mdi:settings')
|
||||
|
||||
@asyncio.coroutine
|
||||
def setup_panel(panel_name):
|
||||
|
@ -1,71 +1,63 @@
|
||||
open_cover:
|
||||
description: Open all or specified cover
|
||||
# Describes the format for available cover services
|
||||
|
||||
open_cover:
|
||||
description: Open all or specified cover.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of cover(s) to open
|
||||
description: Name(s) of cover(s) to open.
|
||||
example: 'cover.living_room'
|
||||
|
||||
close_cover:
|
||||
description: Close all or specified cover
|
||||
|
||||
description: Close all or specified cover.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of cover(s) to close
|
||||
description: Name(s) of cover(s) to close.
|
||||
example: 'cover.living_room'
|
||||
|
||||
set_cover_position:
|
||||
description: Move to specific position all or specified cover
|
||||
|
||||
description: Move to specific position all or specified cover.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of cover(s) to set cover position
|
||||
description: Name(s) of cover(s) to set cover position.
|
||||
example: 'cover.living_room'
|
||||
|
||||
position:
|
||||
description: Position of the cover (0 to 100)
|
||||
description: Position of the cover (0 to 100).
|
||||
example: 30
|
||||
|
||||
stop_cover:
|
||||
description: Stop all or specified cover
|
||||
|
||||
description: Stop all or specified cover.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of cover(s) to stop
|
||||
description: Name(s) of cover(s) to stop.
|
||||
example: 'cover.living_room'
|
||||
|
||||
open_cover_tilt:
|
||||
description: Open all or specified cover tilt
|
||||
|
||||
description: Open all or specified cover tilt.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of cover(s) tilt to open
|
||||
description: Name(s) of cover(s) tilt to open.
|
||||
example: 'cover.living_room'
|
||||
|
||||
close_cover_tilt:
|
||||
description: Close all or specified cover tilt
|
||||
|
||||
description: Close all or specified cover tilt.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of cover(s) to close tilt
|
||||
description: Name(s) of cover(s) to close tilt.
|
||||
example: 'cover.living_room'
|
||||
|
||||
set_cover_tilt_position:
|
||||
description: Move to specific position all or specified cover tilt
|
||||
|
||||
description: Move to specific position all or specified cover tilt.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of cover(s) to set cover tilt position
|
||||
description: Name(s) of cover(s) to set cover tilt position.
|
||||
example: 'cover.living_room'
|
||||
|
||||
position:
|
||||
description: Position of the cover (0 to 100)
|
||||
description: Position of the cover (0 to 100).
|
||||
example: 30
|
||||
|
||||
stop_cover_tilt:
|
||||
description: Stop all or specified cover
|
||||
|
||||
description: Stop all or specified cover.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of cover(s) to stop
|
||||
description: Name(s) of cover(s) to stop.
|
||||
example: 'cover.living_room'
|
||||
|
@ -19,7 +19,8 @@ from homeassistant.const import (
|
||||
CONF_FRIENDLY_NAME, CONF_ENTITY_ID,
|
||||
EVENT_HOMEASSISTANT_START, MATCH_ALL,
|
||||
CONF_VALUE_TEMPLATE, CONF_ICON_TEMPLATE,
|
||||
CONF_OPTIMISTIC, STATE_OPEN, STATE_CLOSED)
|
||||
CONF_ENTITY_PICTURE_TEMPLATE, CONF_OPTIMISTIC,
|
||||
STATE_OPEN, STATE_CLOSED)
|
||||
from homeassistant.exceptions import TemplateError
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity import async_generate_entity_id
|
||||
@ -57,6 +58,7 @@ COVER_SCHEMA = vol.Schema({
|
||||
vol.Optional(CONF_POSITION_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_TILT_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_ICON_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_ENTITY_PICTURE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_OPTIMISTIC): cv.boolean,
|
||||
vol.Optional(CONF_TILT_OPTIMISTIC): cv.boolean,
|
||||
vol.Optional(POSITION_ACTION): cv.SCRIPT_SCHEMA,
|
||||
@ -81,6 +83,8 @@ def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
position_template = device_config.get(CONF_POSITION_TEMPLATE)
|
||||
tilt_template = device_config.get(CONF_TILT_TEMPLATE)
|
||||
icon_template = device_config.get(CONF_ICON_TEMPLATE)
|
||||
entity_picture_template = device_config.get(
|
||||
CONF_ENTITY_PICTURE_TEMPLATE)
|
||||
open_action = device_config.get(OPEN_ACTION)
|
||||
close_action = device_config.get(CLOSE_ACTION)
|
||||
stop_action = device_config.get(STOP_ACTION)
|
||||
@ -114,6 +118,11 @@ def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
if str(temp_ids) != MATCH_ALL:
|
||||
template_entity_ids |= set(temp_ids)
|
||||
|
||||
if entity_picture_template is not None:
|
||||
temp_ids = entity_picture_template.extract_entities()
|
||||
if str(temp_ids) != MATCH_ALL:
|
||||
template_entity_ids |= set(temp_ids)
|
||||
|
||||
if not template_entity_ids:
|
||||
template_entity_ids = MATCH_ALL
|
||||
|
||||
@ -124,8 +133,8 @@ def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
hass,
|
||||
device, friendly_name, state_template,
|
||||
position_template, tilt_template, icon_template,
|
||||
open_action, close_action, stop_action,
|
||||
position_action, tilt_action,
|
||||
entity_picture_template, open_action, close_action,
|
||||
stop_action, position_action, tilt_action,
|
||||
optimistic, tilt_optimistic, entity_ids
|
||||
)
|
||||
)
|
||||
@ -142,8 +151,8 @@ class CoverTemplate(CoverDevice):
|
||||
|
||||
def __init__(self, hass, device_id, friendly_name, state_template,
|
||||
position_template, tilt_template, icon_template,
|
||||
open_action, close_action, stop_action,
|
||||
position_action, tilt_action,
|
||||
entity_picture_template, open_action, close_action,
|
||||
stop_action, position_action, tilt_action,
|
||||
optimistic, tilt_optimistic, entity_ids):
|
||||
"""Initialize the Template cover."""
|
||||
self.hass = hass
|
||||
@ -154,6 +163,7 @@ class CoverTemplate(CoverDevice):
|
||||
self._position_template = position_template
|
||||
self._tilt_template = tilt_template
|
||||
self._icon_template = icon_template
|
||||
self._entity_picture_template = entity_picture_template
|
||||
self._open_script = None
|
||||
if open_action is not None:
|
||||
self._open_script = Script(hass, open_action)
|
||||
@ -173,6 +183,7 @@ class CoverTemplate(CoverDevice):
|
||||
(not state_template and not position_template))
|
||||
self._tilt_optimistic = tilt_optimistic or not tilt_template
|
||||
self._icon = None
|
||||
self._entity_picture = None
|
||||
self._position = None
|
||||
self._tilt_value = None
|
||||
self._entities = entity_ids
|
||||
@ -185,6 +196,8 @@ class CoverTemplate(CoverDevice):
|
||||
self._tilt_template.hass = self.hass
|
||||
if self._icon_template is not None:
|
||||
self._icon_template.hass = self.hass
|
||||
if self._entity_picture_template is not None:
|
||||
self._entity_picture_template.hass = self.hass
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_added_to_hass(self):
|
||||
@ -236,6 +249,11 @@ class CoverTemplate(CoverDevice):
|
||||
"""Return the icon to use in the frontend, if any."""
|
||||
return self._icon
|
||||
|
||||
@property
|
||||
def entity_picture(self):
|
||||
"""Return the entity picture to use in the frontend, if any."""
|
||||
return self._entity_picture
|
||||
|
||||
@property
|
||||
def supported_features(self):
|
||||
"""Flag supported features."""
|
||||
@ -283,7 +301,7 @@ class CoverTemplate(CoverDevice):
|
||||
def async_stop_cover(self, **kwargs):
|
||||
"""Fire the stop action."""
|
||||
if self._stop_script:
|
||||
self.hass.async_add_job(self._stop_script.async_run())
|
||||
yield from self._stop_script.async_run()
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_set_cover_position(self, **kwargs):
|
||||
@ -369,16 +387,28 @@ class CoverTemplate(CoverDevice):
|
||||
except ValueError as ex:
|
||||
_LOGGER.error(ex)
|
||||
self._tilt_value = None
|
||||
if self._icon_template is not None:
|
||||
|
||||
for property_name, template in (
|
||||
('_icon', self._icon_template),
|
||||
('_entity_picture', self._entity_picture_template)):
|
||||
if template is None:
|
||||
continue
|
||||
|
||||
try:
|
||||
self._icon = self._icon_template.async_render()
|
||||
setattr(self, property_name, template.async_render())
|
||||
except TemplateError as ex:
|
||||
friendly_property_name = property_name[1:].replace('_', ' ')
|
||||
if ex.args and ex.args[0].startswith(
|
||||
"UndefinedError: 'None' has no attribute"):
|
||||
# Common during HA startup - so just a warning
|
||||
_LOGGER.warning('Could not render icon template %s,'
|
||||
' the state is unknown.', self._name)
|
||||
_LOGGER.warning('Could not render %s template %s,'
|
||||
' the state is unknown.',
|
||||
friendly_property_name, self._name)
|
||||
return
|
||||
self._icon = super().icon
|
||||
_LOGGER.error('Could not render icon template %s: %s',
|
||||
self._name, ex)
|
||||
|
||||
try:
|
||||
setattr(self, property_name,
|
||||
getattr(super(), property_name))
|
||||
except AttributeError:
|
||||
_LOGGER.error('Could not render %s template %s: %s',
|
||||
friendly_property_name, self._name, ex)
|
||||
|
@ -21,7 +21,7 @@ from homeassistant.components import group, zone
|
||||
from homeassistant.config import load_yaml_config_file, async_log_exception
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers import config_per_platform
|
||||
from homeassistant.helpers import config_per_platform, discovery
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.restore_state import async_get_last_state
|
||||
@ -175,6 +175,13 @@ def async_setup(hass: HomeAssistantType, config: ConfigType):
|
||||
|
||||
tracker.async_setup_group()
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_platform_discovered(platform, info):
|
||||
"""Load a platform."""
|
||||
yield from async_setup_platform(platform, {}, disc_info=info)
|
||||
|
||||
discovery.async_listen_platform(hass, DOMAIN, async_platform_discovered)
|
||||
|
||||
# Clean up stale devices
|
||||
async_track_utc_time_change(
|
||||
hass, tracker.async_update_stale, second=range(0, 60, 5))
|
||||
|
@ -12,18 +12,17 @@ from collections import namedtuple
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.components.device_tracker import (
|
||||
DOMAIN, PLATFORM_SCHEMA, DeviceScanner)
|
||||
from homeassistant.const import (
|
||||
CONF_HOST, CONF_PASSWORD, CONF_USERNAME, CONF_PORT)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
CONF_HOST, CONF_PASSWORD, CONF_USERNAME, CONF_PORT, CONF_MODE,
|
||||
CONF_PROTOCOL)
|
||||
|
||||
REQUIREMENTS = ['pexpect==4.0.1']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_MODE = 'mode'
|
||||
CONF_PROTOCOL = 'protocol'
|
||||
CONF_PUB_KEY = 'pub_key'
|
||||
CONF_SSH_KEY = 'ssh_key'
|
||||
|
||||
@ -36,10 +35,8 @@ PLATFORM_SCHEMA = vol.All(
|
||||
PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Required(CONF_USERNAME): cv.string,
|
||||
vol.Optional(CONF_PROTOCOL, default='ssh'):
|
||||
vol.In(['ssh', 'telnet']),
|
||||
vol.Optional(CONF_MODE, default='router'):
|
||||
vol.In(['router', 'ap']),
|
||||
vol.Optional(CONF_PROTOCOL, default='ssh'): vol.In(['ssh', 'telnet']),
|
||||
vol.Optional(CONF_MODE, default='router'): vol.In(['router', 'ap']),
|
||||
vol.Optional(CONF_PORT, default=DEFAULT_SSH_PORT): cv.port,
|
||||
vol.Exclusive(CONF_PASSWORD, SECRET_GROUP): cv.string,
|
||||
vol.Exclusive(CONF_SSH_KEY, SECRET_GROUP): cv.isfile,
|
||||
@ -102,21 +99,18 @@ class AsusWrtDeviceScanner(DeviceScanner):
|
||||
self.success_init = False
|
||||
return
|
||||
|
||||
self.connection = SshConnection(self.host, self.port,
|
||||
self.username,
|
||||
self.password,
|
||||
self.ssh_key,
|
||||
self.mode == "ap")
|
||||
self.connection = SshConnection(
|
||||
self.host, self.port, self.username, self.password,
|
||||
self.ssh_key, self.mode == 'ap')
|
||||
else:
|
||||
if not self.password:
|
||||
_LOGGER.error("No password specified")
|
||||
self.success_init = False
|
||||
return
|
||||
|
||||
self.connection = TelnetConnection(self.host, self.port,
|
||||
self.username,
|
||||
self.password,
|
||||
self.mode == "ap")
|
||||
self.connection = TelnetConnection(
|
||||
self.host, self.port, self.username, self.password,
|
||||
self.mode == 'ap')
|
||||
|
||||
self.last_results = {}
|
||||
|
||||
|
@ -23,7 +23,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
|
||||
REQUIREMENTS = ['aioautomatic==0.6.3']
|
||||
REQUIREMENTS = ['aioautomatic==0.6.4']
|
||||
DEPENDENCIES = ['http']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@ -35,7 +35,7 @@ CONF_CURRENT_LOCATION = 'current_location'
|
||||
|
||||
DEFAULT_TIMEOUT = 5
|
||||
|
||||
DEFAULT_SCOPE = ['location', 'vehicle:profile', 'trip']
|
||||
DEFAULT_SCOPE = ['location', 'trip', 'vehicle:events', 'vehicle:profile']
|
||||
FULL_SCOPE = DEFAULT_SCOPE + ['current_location']
|
||||
|
||||
ATTR_FUEL_LEVEL = 'fuel_level'
|
||||
|
@ -21,6 +21,9 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEPENDENCIES = ['http']
|
||||
|
||||
ATTR_CURRENT_LATITUDE = 'currentLatitude'
|
||||
ATTR_CURRENT_LONGITUDE = 'currentLongitude'
|
||||
|
||||
BEACON_DEV_PREFIX = 'beacon'
|
||||
CONF_MOBILE_BEACONS = 'mobile_beacons'
|
||||
|
||||
@ -72,6 +75,9 @@ class GeofencyView(HomeAssistantView):
|
||||
location_name = data['name']
|
||||
else:
|
||||
location_name = STATE_NOT_HOME
|
||||
if ATTR_CURRENT_LATITUDE in data:
|
||||
data[ATTR_LATITUDE] = data[ATTR_CURRENT_LATITUDE]
|
||||
data[ATTR_LONGITUDE] = data[ATTR_CURRENT_LONGITUDE]
|
||||
|
||||
return (yield from self._set_location(hass, data, location_name))
|
||||
|
||||
@ -96,8 +102,12 @@ class GeofencyView(HomeAssistantView):
|
||||
data['device'] = slugify(data['device'])
|
||||
data['name'] = slugify(data['name'])
|
||||
|
||||
data[ATTR_LATITUDE] = float(data[ATTR_LATITUDE])
|
||||
data[ATTR_LONGITUDE] = float(data[ATTR_LONGITUDE])
|
||||
gps_attributes = [ATTR_LATITUDE, ATTR_LONGITUDE,
|
||||
ATTR_CURRENT_LATITUDE, ATTR_CURRENT_LONGITUDE]
|
||||
|
||||
for attribute in gps_attributes:
|
||||
if attribute in data:
|
||||
data[attribute] = float(data[attribute])
|
||||
|
||||
return data
|
||||
|
||||
|
@ -76,25 +76,47 @@ class MikrotikScanner(DeviceScanner):
|
||||
port=int(self.port)
|
||||
)
|
||||
|
||||
routerboard_info = self.client(cmd='/system/routerboard/getall')
|
||||
try:
|
||||
routerboard_info = self.client(
|
||||
cmd='/system/routerboard/getall')
|
||||
except (librouteros.exceptions.TrapError,
|
||||
librouteros.exceptions.MultiTrapError,
|
||||
librouteros.exceptions.ConnectionError):
|
||||
routerboard_info = None
|
||||
raise
|
||||
|
||||
if routerboard_info:
|
||||
_LOGGER.info("Connected to Mikrotik %s with IP %s",
|
||||
routerboard_info[0].get('model', 'Router'),
|
||||
self.host)
|
||||
|
||||
self.connected = True
|
||||
self.capsman_exist = self.client(
|
||||
cmd='/capsman/interface/getall'
|
||||
)
|
||||
|
||||
try:
|
||||
self.capsman_exist = self.client(
|
||||
cmd='/caps-man/interface/getall'
|
||||
)
|
||||
except (librouteros.exceptions.TrapError,
|
||||
librouteros.exceptions.MultiTrapError,
|
||||
librouteros.exceptions.ConnectionError):
|
||||
self.capsman_exist = False
|
||||
|
||||
if not self.capsman_exist:
|
||||
_LOGGER.info(
|
||||
'Mikrotik %s: Not a CAPSman controller. Trying '
|
||||
'local interfaces ',
|
||||
self.host
|
||||
)
|
||||
self.wireless_exist = self.client(
|
||||
cmd='/interface/wireless/getall'
|
||||
)
|
||||
|
||||
try:
|
||||
self.wireless_exist = self.client(
|
||||
cmd='/interface/wireless/getall'
|
||||
)
|
||||
except (librouteros.exceptions.TrapError,
|
||||
librouteros.exceptions.MultiTrapError,
|
||||
librouteros.exceptions.ConnectionError):
|
||||
self.wireless_exist = False
|
||||
|
||||
if not self.wireless_exist:
|
||||
_LOGGER.info(
|
||||
'Mikrotik %s: Wireless adapters not found. Try to '
|
||||
@ -104,6 +126,7 @@ class MikrotikScanner(DeviceScanner):
|
||||
)
|
||||
|
||||
except (librouteros.exceptions.TrapError,
|
||||
librouteros.exceptions.MultiTrapError,
|
||||
librouteros.exceptions.ConnectionError) as api_error:
|
||||
_LOGGER.error("Connection error: %s", api_error)
|
||||
|
||||
|
@ -20,7 +20,7 @@ from homeassistant.const import STATE_HOME
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.util import slugify, decorator
|
||||
|
||||
REQUIREMENTS = ['libnacl==1.6.0']
|
||||
REQUIREMENTS = ['libnacl==1.6.1']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@ -199,7 +199,7 @@ class OwnTracksContext:
|
||||
self.async_see = async_see
|
||||
self.secret = secret
|
||||
self.max_gps_accuracy = max_gps_accuracy
|
||||
self.mobile_beacons_active = defaultdict(list)
|
||||
self.mobile_beacons_active = defaultdict(set)
|
||||
self.regions_entered = defaultdict(list)
|
||||
self.import_waypoints = import_waypoints
|
||||
self.waypoint_whitelist = waypoint_whitelist
|
||||
@ -234,10 +234,25 @@ class OwnTracksContext:
|
||||
return True
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_see_beacons(self, dev_id, kwargs_param):
|
||||
def async_see_beacons(self, hass, dev_id, kwargs_param):
|
||||
"""Set active beacons to the current location."""
|
||||
kwargs = kwargs_param.copy()
|
||||
|
||||
# Mobile beacons should always be set to the location of the
|
||||
# tracking device. I get the device state and make the necessary
|
||||
# changes to kwargs.
|
||||
device_tracker_state = hass.states.get(
|
||||
"device_tracker.{}".format(dev_id))
|
||||
|
||||
if device_tracker_state is not None:
|
||||
acc = device_tracker_state.attributes.get("gps_accuracy")
|
||||
lat = device_tracker_state.attributes.get("latitude")
|
||||
lon = device_tracker_state.attributes.get("longitude")
|
||||
kwargs['gps_accuracy'] = acc
|
||||
kwargs['gps'] = (lat, lon)
|
||||
|
||||
# the battery state applies to the tracking device, not the beacon
|
||||
# kwargs location is the beacon's configured lat/lon
|
||||
kwargs.pop('battery', None)
|
||||
for beacon in self.mobile_beacons_active[dev_id]:
|
||||
kwargs['dev_id'] = "{}_{}".format(BEACON_DEV_ID, beacon)
|
||||
@ -261,7 +276,7 @@ def async_handle_location_message(hass, context, message):
|
||||
return
|
||||
|
||||
yield from context.async_see(**kwargs)
|
||||
yield from context.async_see_beacons(dev_id, kwargs)
|
||||
yield from context.async_see_beacons(hass, dev_id, kwargs)
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
@ -271,11 +286,15 @@ def _async_transition_message_enter(hass, context, message, location):
|
||||
dev_id, kwargs = _parse_see_args(message)
|
||||
|
||||
if zone is None and message.get('t') == 'b':
|
||||
# Not a HA zone, and a beacon so assume mobile
|
||||
# Not a HA zone, and a beacon so mobile beacon.
|
||||
# kwargs will contain the lat/lon of the beacon
|
||||
# which is not where the beacon actually is
|
||||
# and is probably set to 0/0
|
||||
beacons = context.mobile_beacons_active[dev_id]
|
||||
if location not in beacons:
|
||||
beacons.append(location)
|
||||
beacons.add(location)
|
||||
_LOGGER.info("Added beacon %s", location)
|
||||
yield from context.async_see_beacons(hass, dev_id, kwargs)
|
||||
else:
|
||||
# Normal region
|
||||
regions = context.regions_entered[dev_id]
|
||||
@ -283,9 +302,8 @@ def _async_transition_message_enter(hass, context, message, location):
|
||||
regions.append(location)
|
||||
_LOGGER.info("Enter region %s", location)
|
||||
_set_gps_from_zone(kwargs, location, zone)
|
||||
|
||||
yield from context.async_see(**kwargs)
|
||||
yield from context.async_see_beacons(dev_id, kwargs)
|
||||
yield from context.async_see(**kwargs)
|
||||
yield from context.async_see_beacons(hass, dev_id, kwargs)
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
@ -297,30 +315,29 @@ def _async_transition_message_leave(hass, context, message, location):
|
||||
if location in regions:
|
||||
regions.remove(location)
|
||||
|
||||
new_region = regions[-1] if regions else None
|
||||
|
||||
if new_region:
|
||||
# Exit to previous region
|
||||
zone = hass.states.get(
|
||||
"zone.{}".format(slugify(new_region)))
|
||||
_set_gps_from_zone(kwargs, new_region, zone)
|
||||
_LOGGER.info("Exit to %s", new_region)
|
||||
yield from context.async_see(**kwargs)
|
||||
yield from context.async_see_beacons(dev_id, kwargs)
|
||||
return
|
||||
|
||||
beacons = context.mobile_beacons_active[dev_id]
|
||||
if location in beacons:
|
||||
beacons.remove(location)
|
||||
_LOGGER.info("Remove beacon %s", location)
|
||||
yield from context.async_see_beacons(hass, dev_id, kwargs)
|
||||
else:
|
||||
new_region = regions[-1] if regions else None
|
||||
if new_region:
|
||||
# Exit to previous region
|
||||
zone = hass.states.get(
|
||||
"zone.{}".format(slugify(new_region)))
|
||||
_set_gps_from_zone(kwargs, new_region, zone)
|
||||
_LOGGER.info("Exit to %s", new_region)
|
||||
yield from context.async_see(**kwargs)
|
||||
yield from context.async_see_beacons(hass, dev_id, kwargs)
|
||||
return
|
||||
|
||||
_LOGGER.info("Exit to GPS")
|
||||
|
||||
# Check for GPS accuracy
|
||||
if context.async_valid_accuracy(message):
|
||||
yield from context.async_see(**kwargs)
|
||||
yield from context.async_see_beacons(dev_id, kwargs)
|
||||
|
||||
beacons = context.mobile_beacons_active[dev_id]
|
||||
if location in beacons:
|
||||
beacons.remove(location)
|
||||
_LOGGER.info("Remove beacon %s", location)
|
||||
yield from context.async_see_beacons(hass, dev_id, kwargs)
|
||||
|
||||
|
||||
@HANDLERS.register('transition')
|
||||
|
@ -1,41 +1,33 @@
|
||||
# Describes the format for available device tracker services
|
||||
|
||||
see:
|
||||
description: Control tracked device
|
||||
|
||||
description: Control tracked device.
|
||||
fields:
|
||||
mac:
|
||||
description: MAC address of device
|
||||
example: 'FF:FF:FF:FF:FF:FF'
|
||||
|
||||
dev_id:
|
||||
description: Id of device (find id in known_devices.yaml)
|
||||
description: Id of device (find id in known_devices.yaml).
|
||||
example: 'phonedave'
|
||||
|
||||
host_name:
|
||||
description: Hostname of device
|
||||
example: 'Dave'
|
||||
|
||||
location_name:
|
||||
description: Name of location where device is located (not_home is away)
|
||||
description: Name of location where device is located (not_home is away).
|
||||
example: 'home'
|
||||
|
||||
gps:
|
||||
description: GPS coordinates where device is located (latitude, longitude)
|
||||
description: GPS coordinates where device is located (latitude, longitude).
|
||||
example: '[51.509802, -0.086692]'
|
||||
|
||||
gps_accuracy:
|
||||
description: Accuracy of GPS coordinates
|
||||
description: Accuracy of GPS coordinates.
|
||||
example: '80'
|
||||
|
||||
battery:
|
||||
description: Battery level of device
|
||||
description: Battery level of device.
|
||||
example: '100'
|
||||
|
||||
icloud:
|
||||
icloud_lost_iphone:
|
||||
description: Service to play the lost iphone sound on an iDevice
|
||||
|
||||
description: Service to play the lost iphone sound on an iDevice.
|
||||
fields:
|
||||
account_name:
|
||||
description: Name of the account in the config that will be used to look for the device. This is optional, if it isn't given it will use all accounts.
|
||||
@ -43,10 +35,8 @@ icloud:
|
||||
device_name:
|
||||
description: Name of the device that will play the sound. This is optional, if it isn't given it will play on all devices for the given account.
|
||||
example: 'iphonebart'
|
||||
|
||||
icloud_set_interval:
|
||||
description: Service to set the interval of an iDevice
|
||||
|
||||
description: Service to set the interval of an iDevice.
|
||||
fields:
|
||||
account_name:
|
||||
description: Name of the account in the config that will be used to look for the device. This is optional, if it isn't given it will use all accounts.
|
||||
@ -57,10 +47,8 @@ icloud:
|
||||
interval:
|
||||
description: The interval (in minutes) that the iDevice will have until the according device_tracker entity changes from zone or until this service is used again. This is optional, if it isn't given the interval of the device will revert back to the original interval based on the current state.
|
||||
example: 1
|
||||
|
||||
icloud_update:
|
||||
description: Service to ask for an update of an iDevice.
|
||||
|
||||
fields:
|
||||
account_name:
|
||||
description: Name of the account in the config that will be used to look for the device. This is optional, if it isn't given it will use all accounts.
|
||||
@ -68,10 +56,8 @@ icloud:
|
||||
device_name:
|
||||
description: Name of the device that will be updated. This is optional, if it isn't given it will update all devices for the given account.
|
||||
example: 'iphonebart'
|
||||
|
||||
icloud_reset_account:
|
||||
description: Service to restart an iCloud account. Helpful when not all devices are found after initializing or when you add a new device.
|
||||
|
||||
fields:
|
||||
account_name:
|
||||
description: Name of the account in the config that will be restarted. This is optional, if it isn't given it will restart all accounts.
|
||||
|
@ -16,7 +16,7 @@ from homeassistant.const import CONF_HOST
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
REQUIREMENTS = ['pysnmp==4.3.10']
|
||||
REQUIREMENTS = ['pysnmp==4.4.1']
|
||||
|
||||
CONF_COMMUNITY = 'community'
|
||||
CONF_AUTHKEY = 'authkey'
|
||||
@ -26,11 +26,11 @@ CONF_BASEOID = 'baseoid'
|
||||
DEFAULT_COMMUNITY = 'public'
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_BASEOID): cv.string,
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Optional(CONF_COMMUNITY, default=DEFAULT_COMMUNITY): cv.string,
|
||||
vol.Inclusive(CONF_AUTHKEY, 'keys'): cv.string,
|
||||
vol.Inclusive(CONF_PRIVKEY, 'keys'): cv.string,
|
||||
vol.Required(CONF_BASEOID): cv.string
|
||||
})
|
||||
|
||||
|
||||
|
@ -19,16 +19,29 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_DHCP_SOFTWARE = 'dhcp_software'
|
||||
DEFAULT_DHCP_SOFTWARE = 'dnsmasq'
|
||||
DHCP_SOFTWARES = [
|
||||
'dnsmasq',
|
||||
'odhcpd'
|
||||
]
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Required(CONF_PASSWORD): cv.string,
|
||||
vol.Required(CONF_USERNAME): cv.string
|
||||
vol.Required(CONF_USERNAME): cv.string,
|
||||
vol.Optional(CONF_DHCP_SOFTWARE,
|
||||
default=DEFAULT_DHCP_SOFTWARE): vol.In(DHCP_SOFTWARES)
|
||||
})
|
||||
|
||||
|
||||
def get_scanner(hass, config):
|
||||
"""Validate the configuration and return an ubus scanner."""
|
||||
scanner = UbusDeviceScanner(config[DOMAIN])
|
||||
dhcp_sw = config[DOMAIN][CONF_DHCP_SOFTWARE]
|
||||
if dhcp_sw == 'dnsmasq':
|
||||
scanner = DnsmasqUbusDeviceScanner(config[DOMAIN])
|
||||
else:
|
||||
scanner = OdhcpdUbusDeviceScanner(config[DOMAIN])
|
||||
|
||||
return scanner if scanner.success_init else None
|
||||
|
||||
@ -70,7 +83,6 @@ class UbusDeviceScanner(DeviceScanner):
|
||||
self.session_id = _get_session_id(self.url, self.username,
|
||||
self.password)
|
||||
self.hostapd = []
|
||||
self.leasefile = None
|
||||
self.mac2name = None
|
||||
self.success_init = self.session_id is not None
|
||||
|
||||
@ -79,44 +91,29 @@ class UbusDeviceScanner(DeviceScanner):
|
||||
self._update_info()
|
||||
return self.last_results
|
||||
|
||||
def _generate_mac2name(self):
|
||||
"""Must be implemented depending on the software."""
|
||||
raise NotImplementedError
|
||||
|
||||
@_refresh_on_acccess_denied
|
||||
def get_device_name(self, mac):
|
||||
"""Return the name of the given device or None if we don't know."""
|
||||
if self.leasefile is None:
|
||||
result = _req_json_rpc(
|
||||
self.url, self.session_id, 'call', 'uci', 'get',
|
||||
config="dhcp", type="dnsmasq")
|
||||
if result:
|
||||
values = result["values"].values()
|
||||
self.leasefile = next(iter(values))["leasefile"]
|
||||
else:
|
||||
return
|
||||
|
||||
if self.mac2name is None:
|
||||
result = _req_json_rpc(
|
||||
self.url, self.session_id, 'call', 'file', 'read',
|
||||
path=self.leasefile)
|
||||
if result:
|
||||
self.mac2name = dict()
|
||||
for line in result["data"].splitlines():
|
||||
hosts = line.split(" ")
|
||||
self.mac2name[hosts[1].upper()] = hosts[3]
|
||||
else:
|
||||
# Error, handled in the _req_json_rpc
|
||||
return
|
||||
|
||||
return self.mac2name.get(mac.upper(), None)
|
||||
self._generate_mac2name()
|
||||
name = self.mac2name.get(mac.upper(), None)
|
||||
self.mac2name = None
|
||||
return name
|
||||
|
||||
@_refresh_on_acccess_denied
|
||||
def _update_info(self):
|
||||
"""Ensure the information from the Luci router is up to date.
|
||||
"""Ensure the information from the router is up to date.
|
||||
|
||||
Returns boolean if scanning successful.
|
||||
"""
|
||||
if not self.success_init:
|
||||
return False
|
||||
|
||||
_LOGGER.info("Checking ARP")
|
||||
_LOGGER.info("Checking hostapd")
|
||||
|
||||
if not self.hostapd:
|
||||
hostapd = _req_json_rpc(
|
||||
@ -136,6 +133,57 @@ class UbusDeviceScanner(DeviceScanner):
|
||||
return bool(results)
|
||||
|
||||
|
||||
class DnsmasqUbusDeviceScanner(UbusDeviceScanner):
|
||||
"""Implement the Ubus device scanning for the dnsmasq DHCP server."""
|
||||
|
||||
def __init__(self, config):
|
||||
"""Initialize the scanner."""
|
||||
super(DnsmasqUbusDeviceScanner, self).__init__(config)
|
||||
self.leasefile = None
|
||||
|
||||
def _generate_mac2name(self):
|
||||
if self.leasefile is None:
|
||||
result = _req_json_rpc(
|
||||
self.url, self.session_id, 'call', 'uci', 'get',
|
||||
config="dhcp", type="dnsmasq")
|
||||
if result:
|
||||
values = result["values"].values()
|
||||
self.leasefile = next(iter(values))["leasefile"]
|
||||
else:
|
||||
return
|
||||
|
||||
result = _req_json_rpc(
|
||||
self.url, self.session_id, 'call', 'file', 'read',
|
||||
path=self.leasefile)
|
||||
if result:
|
||||
self.mac2name = dict()
|
||||
for line in result["data"].splitlines():
|
||||
hosts = line.split(" ")
|
||||
self.mac2name[hosts[1].upper()] = hosts[3]
|
||||
else:
|
||||
# Error, handled in the _req_json_rpc
|
||||
return
|
||||
|
||||
|
||||
class OdhcpdUbusDeviceScanner(UbusDeviceScanner):
|
||||
"""Implement the Ubus device scanning for the odhcp DHCP server."""
|
||||
|
||||
def _generate_mac2name(self):
|
||||
result = _req_json_rpc(
|
||||
self.url, self.session_id, 'call', 'dhcp', 'ipv4leases')
|
||||
if result:
|
||||
self.mac2name = dict()
|
||||
for device in result["device"].values():
|
||||
for lease in device['leases']:
|
||||
mac = lease['mac'] # mac = aabbccddeeff
|
||||
# Convert it to expected format with colon
|
||||
mac = ":".join(mac[i:i+2] for i in range(0, len(mac), 2))
|
||||
self.mac2name[mac.upper()] = lease['hostname']
|
||||
else:
|
||||
# Error, handled in the _req_json_rpc
|
||||
return
|
||||
|
||||
|
||||
def _req_json_rpc(url, session_id, rpcmethod, subsystem, method, **params):
|
||||
"""Perform one JSON RPC operation."""
|
||||
data = json.dumps({"jsonrpc": "2.0",
|
||||
|
@ -1,8 +1,8 @@
|
||||
"""
|
||||
Support for API.AI webhook.
|
||||
Support for Dialogflow webhook.
|
||||
|
||||
For more details about this component, please refer to the documentation at
|
||||
https://home-assistant.io/components/apiai/
|
||||
https://home-assistant.io/components/dialogflow/
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
@ -15,17 +15,16 @@ from homeassistant.components.http import HomeAssistantView
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
INTENTS_API_ENDPOINT = '/api/apiai'
|
||||
|
||||
CONF_INTENTS = 'intents'
|
||||
CONF_SPEECH = 'speech'
|
||||
CONF_ACTION = 'action'
|
||||
CONF_ASYNC_ACTION = 'async_action'
|
||||
|
||||
DEFAULT_CONF_ASYNC_ACTION = False
|
||||
|
||||
DOMAIN = 'apiai'
|
||||
DEPENDENCIES = ['http']
|
||||
DOMAIN = 'dialogflow'
|
||||
|
||||
INTENTS_API_ENDPOINT = '/api/dialogflow'
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: {}
|
||||
@ -34,30 +33,30 @@ CONFIG_SCHEMA = vol.Schema({
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_setup(hass, config):
|
||||
"""Activate API.AI component."""
|
||||
hass.http.register_view(ApiaiIntentsView)
|
||||
"""Set up Dialogflow component."""
|
||||
hass.http.register_view(DialogflowIntentsView)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class ApiaiIntentsView(HomeAssistantView):
|
||||
"""Handle API.AI requests."""
|
||||
class DialogflowIntentsView(HomeAssistantView):
|
||||
"""Handle Dialogflow requests."""
|
||||
|
||||
url = INTENTS_API_ENDPOINT
|
||||
name = 'api:apiai'
|
||||
name = 'api:dialogflow'
|
||||
|
||||
@asyncio.coroutine
|
||||
def post(self, request):
|
||||
"""Handle API.AI."""
|
||||
"""Handle Dialogflow."""
|
||||
hass = request.app['hass']
|
||||
data = yield from request.json()
|
||||
|
||||
_LOGGER.debug("Received api.ai request: %s", data)
|
||||
_LOGGER.debug("Received Dialogflow request: %s", data)
|
||||
|
||||
req = data.get('result')
|
||||
|
||||
if req is None:
|
||||
_LOGGER.error("Received invalid data from api.ai: %s", data)
|
||||
_LOGGER.error("Received invalid data from Dialogflow: %s", data)
|
||||
return self.json_message(
|
||||
"Expected result value not received", HTTP_BAD_REQUEST)
|
||||
|
||||
@ -68,13 +67,13 @@ class ApiaiIntentsView(HomeAssistantView):
|
||||
|
||||
action = req.get('action')
|
||||
parameters = req.get('parameters')
|
||||
apiai_response = ApiaiResponse(parameters)
|
||||
dialogflow_response = DialogflowResponse(parameters)
|
||||
|
||||
if action == "":
|
||||
_LOGGER.warning("Received intent with empty action")
|
||||
apiai_response.add_speech(
|
||||
"You have not defined an action in your api.ai intent.")
|
||||
return self.json(apiai_response)
|
||||
dialogflow_response.add_speech(
|
||||
"You have not defined an action in your Dialogflow intent.")
|
||||
return self.json(dialogflow_response)
|
||||
|
||||
try:
|
||||
intent_response = yield from intent.async_handle(
|
||||
@ -83,31 +82,31 @@ class ApiaiIntentsView(HomeAssistantView):
|
||||
in parameters.items()})
|
||||
|
||||
except intent.UnknownIntent as err:
|
||||
_LOGGER.warning('Received unknown intent %s', action)
|
||||
apiai_response.add_speech(
|
||||
_LOGGER.warning("Received unknown intent %s", action)
|
||||
dialogflow_response.add_speech(
|
||||
"This intent is not yet configured within Home Assistant.")
|
||||
return self.json(apiai_response)
|
||||
return self.json(dialogflow_response)
|
||||
|
||||
except intent.InvalidSlotInfo as err:
|
||||
_LOGGER.error('Received invalid slot data: %s', err)
|
||||
_LOGGER.error("Received invalid slot data: %s", err)
|
||||
return self.json_message('Invalid slot data received',
|
||||
HTTP_BAD_REQUEST)
|
||||
except intent.IntentError:
|
||||
_LOGGER.exception('Error handling request for %s', action)
|
||||
_LOGGER.exception("Error handling request for %s", action)
|
||||
return self.json_message('Error handling intent', HTTP_BAD_REQUEST)
|
||||
|
||||
if 'plain' in intent_response.speech:
|
||||
apiai_response.add_speech(
|
||||
dialogflow_response.add_speech(
|
||||
intent_response.speech['plain']['speech'])
|
||||
|
||||
return self.json(apiai_response)
|
||||
return self.json(dialogflow_response)
|
||||
|
||||
|
||||
class ApiaiResponse(object):
|
||||
"""Help generating the response for API.AI."""
|
||||
class DialogflowResponse(object):
|
||||
"""Help generating the response for Dialogflow."""
|
||||
|
||||
def __init__(self, parameters):
|
||||
"""Initialize the response."""
|
||||
"""Initialize the Dialogflow response."""
|
||||
self.speech = None
|
||||
self.parameters = {}
|
||||
# Parameter names replace '.' and '-' for '_'
|
||||
@ -125,7 +124,7 @@ class ApiaiResponse(object):
|
||||
self.speech = text
|
||||
|
||||
def as_dict(self):
|
||||
"""Return response in an API.AI valid dict."""
|
||||
"""Return response in a Dialogflow valid dictionary."""
|
||||
return {
|
||||
'speech': self.speech,
|
||||
'displayText': self.speech,
|
@ -21,7 +21,7 @@ from homeassistant.helpers.event import async_track_point_in_utc_time
|
||||
from homeassistant.helpers.discovery import async_load_platform, async_discover
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
REQUIREMENTS = ['netdisco==1.2.2']
|
||||
REQUIREMENTS = ['netdisco==1.2.3']
|
||||
|
||||
DOMAIN = 'discovery'
|
||||
|
||||
|
@ -17,6 +17,7 @@ from homeassistant.util import sanitize_filename
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_FILENAME = 'filename'
|
||||
ATTR_SUBDIR = 'subdir'
|
||||
ATTR_URL = 'url'
|
||||
|
||||
@ -29,6 +30,7 @@ SERVICE_DOWNLOAD_FILE = 'download_file'
|
||||
SERVICE_DOWNLOAD_FILE_SCHEMA = vol.Schema({
|
||||
vol.Required(ATTR_URL): cv.url,
|
||||
vol.Optional(ATTR_SUBDIR): cv.string,
|
||||
vol.Optional(ATTR_FILENAME): cv.string,
|
||||
})
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
@ -62,6 +64,8 @@ def setup(hass, config):
|
||||
|
||||
subdir = service.data.get(ATTR_SUBDIR)
|
||||
|
||||
filename = service.data.get(ATTR_FILENAME)
|
||||
|
||||
if subdir:
|
||||
subdir = sanitize_filename(subdir)
|
||||
|
||||
@ -70,9 +74,9 @@ def setup(hass, config):
|
||||
req = requests.get(url, stream=True, timeout=10)
|
||||
|
||||
if req.status_code == 200:
|
||||
filename = None
|
||||
|
||||
if 'content-disposition' in req.headers:
|
||||
if filename is None and \
|
||||
'content-disposition' in req.headers:
|
||||
match = re.findall(r"filename=(\S+)",
|
||||
req.headers['content-disposition'])
|
||||
|
||||
@ -80,8 +84,7 @@ def setup(hass, config):
|
||||
filename = match[0].strip("'\" ")
|
||||
|
||||
if not filename:
|
||||
filename = os.path.basename(
|
||||
url).strip()
|
||||
filename = os.path.basename(url).strip()
|
||||
|
||||
if not filename:
|
||||
filename = 'ha_download'
|
||||
|
@ -1,4 +1,9 @@
|
||||
"""Integrate with DuckDNS."""
|
||||
"""
|
||||
Integrate with DuckDNS.
|
||||
|
||||
For more details about this component, please refer to the documentation at
|
||||
https://home-assistant.io/components/duckdns/
|
||||
"""
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
@ -11,13 +16,18 @@ import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
DOMAIN = 'duckdns'
|
||||
UPDATE_URL = 'https://www.duckdns.org/update'
|
||||
INTERVAL = timedelta(minutes=5)
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
SERVICE_SET_TXT = 'set_txt'
|
||||
|
||||
ATTR_TXT = 'txt'
|
||||
|
||||
DOMAIN = 'duckdns'
|
||||
|
||||
INTERVAL = timedelta(minutes=5)
|
||||
|
||||
SERVICE_SET_TXT = 'set_txt'
|
||||
|
||||
UPDATE_URL = 'https://www.duckdns.org/update'
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
vol.Required(CONF_DOMAIN): cv.string,
|
||||
@ -59,8 +69,8 @@ def async_setup(hass, config):
|
||||
@asyncio.coroutine
|
||||
def update_domain_service(call):
|
||||
"""Update the DuckDNS entry."""
|
||||
yield from _update_duckdns(session, domain, token,
|
||||
txt=call.data[ATTR_TXT])
|
||||
yield from _update_duckdns(
|
||||
session, domain, token, txt=call.data[ATTR_TXT])
|
||||
|
||||
async_track_time_interval(hass, update_domain_interval, INTERVAL)
|
||||
hass.services.async_register(
|
||||
@ -96,7 +106,7 @@ def _update_duckdns(session, domain, token, *, txt=_SENTINEL, clear=False):
|
||||
body = yield from resp.text()
|
||||
|
||||
if body != 'OK':
|
||||
_LOGGER.warning('Updating DuckDNS domain %s failed', domain)
|
||||
_LOGGER.warning("Updating DuckDNS domain failed: %s", domain)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -76,7 +76,6 @@ def setup(hass, yaml_config):
|
||||
|
||||
server = HomeAssistantWSGI(
|
||||
hass,
|
||||
development=False,
|
||||
server_host=config.host_ip_addr,
|
||||
server_port=config.listen_port,
|
||||
api_password=None,
|
||||
|
@ -287,6 +287,11 @@ def parse_hue_api_put_light_body(request_json, entity):
|
||||
report_brightness = True
|
||||
result = (brightness > 0)
|
||||
|
||||
elif entity.domain == "scene":
|
||||
brightness = None
|
||||
report_brightness = False
|
||||
result = True
|
||||
|
||||
elif (entity.domain == "script" or
|
||||
entity.domain == "media_player" or
|
||||
entity.domain == "fan"):
|
||||
|
@ -186,7 +186,7 @@ class MqttFan(FanEntity):
|
||||
yield from mqtt.async_subscribe(
|
||||
self.hass, self._topic[CONF_SPEED_STATE_TOPIC], speed_received,
|
||||
self._qos)
|
||||
self._speed = SPEED_OFF
|
||||
self._speed = SPEED_OFF
|
||||
|
||||
@callback
|
||||
def oscillation_received(topic, payload, qos):
|
||||
@ -202,7 +202,7 @@ class MqttFan(FanEntity):
|
||||
yield from mqtt.async_subscribe(
|
||||
self.hass, self._topic[CONF_OSCILLATION_STATE_TOPIC],
|
||||
oscillation_received, self._qos)
|
||||
self._oscillation = False
|
||||
self._oscillation = False
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
|
@ -1,60 +1,51 @@
|
||||
# Describes the format for available fan services
|
||||
|
||||
set_speed:
|
||||
description: Sets fan speed
|
||||
|
||||
description: Sets fan speed.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of the entities to set
|
||||
example: 'fan.living_room'
|
||||
|
||||
speed:
|
||||
description: Speed setting
|
||||
example: 'low'
|
||||
|
||||
turn_on:
|
||||
description: Turns fan on
|
||||
|
||||
description: Turns fan on.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Names(s) of the entities to turn on
|
||||
example: 'fan.living_room'
|
||||
|
||||
speed:
|
||||
description: Speed setting
|
||||
example: 'high'
|
||||
|
||||
turn_off:
|
||||
description: Turns fan off
|
||||
|
||||
description: Turns fan off.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Names(s) of the entities to turn off
|
||||
example: 'fan.living_room'
|
||||
|
||||
oscillate:
|
||||
description: Oscillates the fan
|
||||
|
||||
description: Oscillates the fan.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of the entities to oscillate
|
||||
example: 'fan.desk_fan'
|
||||
|
||||
oscillating:
|
||||
description: Flag to turn on/off oscillation
|
||||
example: True
|
||||
|
||||
toggle:
|
||||
description: Toggle the fan on/off
|
||||
|
||||
description: Toggle the fan on/off.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of the entities to toggle
|
||||
exampl: 'fan.living_room'
|
||||
|
||||
set_direction:
|
||||
description: Set the fan rotation direction
|
||||
|
||||
description: Set the fan rotation.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of the entities to toggle
|
||||
@ -64,8 +55,7 @@ set_direction:
|
||||
example: 'left'
|
||||
|
||||
dyson_set_night_mode:
|
||||
description: Set the fan in night mode
|
||||
|
||||
description: Set the fan in night mode.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of the entities to enable/disable night mode
|
||||
|
332
homeassistant/components/fan/xiaomi_miio.py
Normal file
332
homeassistant/components/fan/xiaomi_miio.py
Normal file
@ -0,0 +1,332 @@
|
||||
"""
|
||||
Support for Xiaomi Mi Air Purifier 2.
|
||||
|
||||
For more details about this platform, please refer to the documentation
|
||||
https://home-assistant.io/components/fan.xiaomi_miio/
|
||||
"""
|
||||
import asyncio
|
||||
from functools import partial
|
||||
import logging
|
||||
import os
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.helpers.entity import ToggleEntity
|
||||
from homeassistant.components.fan import (FanEntity, PLATFORM_SCHEMA,
|
||||
SUPPORT_SET_SPEED, DOMAIN)
|
||||
from homeassistant.config import load_yaml_config_file
|
||||
from homeassistant.const import (CONF_NAME, CONF_HOST, CONF_TOKEN,
|
||||
ATTR_ENTITY_ID, )
|
||||
from homeassistant.exceptions import PlatformNotReady
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_NAME = 'Xiaomi Air Purifier'
|
||||
PLATFORM = 'xiaomi_miio'
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Required(CONF_TOKEN): vol.All(cv.string, vol.Length(min=32, max=32)),
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
})
|
||||
|
||||
REQUIREMENTS = ['python-miio==0.3.0']
|
||||
|
||||
ATTR_TEMPERATURE = 'temperature'
|
||||
ATTR_HUMIDITY = 'humidity'
|
||||
ATTR_AIR_QUALITY_INDEX = 'aqi'
|
||||
ATTR_MODE = 'mode'
|
||||
ATTR_FILTER_HOURS_USED = 'filter_hours_used'
|
||||
ATTR_FILTER_LIFE = 'filter_life_remaining'
|
||||
ATTR_FAVORITE_LEVEL = 'favorite_level'
|
||||
ATTR_BUZZER = 'buzzer'
|
||||
ATTR_CHILD_LOCK = 'child_lock'
|
||||
ATTR_LED = 'led'
|
||||
ATTR_LED_BRIGHTNESS = 'led_brightness'
|
||||
ATTR_MOTOR_SPEED = 'motor_speed'
|
||||
|
||||
ATTR_BRIGHTNESS = 'brightness'
|
||||
ATTR_LEVEL = 'level'
|
||||
|
||||
SUCCESS = ['ok']
|
||||
|
||||
SERVICE_SET_BUZZER_ON = 'xiaomi_miio_set_buzzer_on'
|
||||
SERVICE_SET_BUZZER_OFF = 'xiaomi_miio_set_buzzer_off'
|
||||
SERVICE_SET_LED_ON = 'xiaomi_miio_set_led_on'
|
||||
SERVICE_SET_LED_OFF = 'xiaomi_miio_set_led_off'
|
||||
SERVICE_SET_FAVORITE_LEVEL = 'xiaomi_miio_set_favorite_level'
|
||||
SERVICE_SET_LED_BRIGHTNESS = 'xiaomi_miio_set_led_brightness'
|
||||
|
||||
AIRPURIFIER_SERVICE_SCHEMA = vol.Schema({
|
||||
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
|
||||
})
|
||||
|
||||
SERVICE_SCHEMA_LED_BRIGHTNESS = AIRPURIFIER_SERVICE_SCHEMA.extend({
|
||||
vol.Required(ATTR_BRIGHTNESS):
|
||||
vol.All(vol.Coerce(int), vol.Clamp(min=0, max=2))
|
||||
})
|
||||
|
||||
SERVICE_SCHEMA_FAVORITE_LEVEL = AIRPURIFIER_SERVICE_SCHEMA.extend({
|
||||
vol.Required(ATTR_LEVEL):
|
||||
vol.All(vol.Coerce(int), vol.Clamp(min=0, max=16))
|
||||
})
|
||||
|
||||
SERVICE_TO_METHOD = {
|
||||
SERVICE_SET_BUZZER_ON: {'method': 'async_set_buzzer_on'},
|
||||
SERVICE_SET_BUZZER_OFF: {'method': 'async_set_buzzer_off'},
|
||||
SERVICE_SET_LED_ON: {'method': 'async_set_led_on'},
|
||||
SERVICE_SET_LED_OFF: {'method': 'async_set_led_off'},
|
||||
SERVICE_SET_FAVORITE_LEVEL: {
|
||||
'method': 'async_set_favorite_level',
|
||||
'schema': SERVICE_SCHEMA_FAVORITE_LEVEL},
|
||||
SERVICE_SET_LED_BRIGHTNESS: {
|
||||
'method': 'async_set_led_brightness',
|
||||
'schema': SERVICE_SCHEMA_LED_BRIGHTNESS},
|
||||
}
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
@asyncio.coroutine
|
||||
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
"""Set up the air purifier from config."""
|
||||
from miio import AirPurifier, DeviceException
|
||||
if PLATFORM not in hass.data:
|
||||
hass.data[PLATFORM] = {}
|
||||
|
||||
host = config.get(CONF_HOST)
|
||||
name = config.get(CONF_NAME)
|
||||
token = config.get(CONF_TOKEN)
|
||||
|
||||
_LOGGER.info("Initializing with host %s (token %s...)", host, token[:5])
|
||||
|
||||
try:
|
||||
air_purifier = AirPurifier(host, token)
|
||||
|
||||
xiaomi_air_purifier = XiaomiAirPurifier(name, air_purifier)
|
||||
hass.data[PLATFORM][host] = xiaomi_air_purifier
|
||||
except DeviceException:
|
||||
raise PlatformNotReady
|
||||
|
||||
async_add_devices([xiaomi_air_purifier], update_before_add=True)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_service_handler(service):
|
||||
"""Map services to methods on XiaomiAirPurifier."""
|
||||
method = SERVICE_TO_METHOD.get(service.service)
|
||||
params = {key: value for key, value in service.data.items()
|
||||
if key != ATTR_ENTITY_ID}
|
||||
entity_ids = service.data.get(ATTR_ENTITY_ID)
|
||||
if entity_ids:
|
||||
target_air_purifiers = [air for air in hass.data[PLATFORM].values()
|
||||
if air.entity_id in entity_ids]
|
||||
else:
|
||||
target_air_purifiers = hass.data[PLATFORM].values()
|
||||
|
||||
update_tasks = []
|
||||
for air_purifier in target_air_purifiers:
|
||||
yield from getattr(air_purifier, method['method'])(**params)
|
||||
update_tasks.append(air_purifier.async_update_ha_state(True))
|
||||
|
||||
if update_tasks:
|
||||
yield from asyncio.wait(update_tasks, loop=hass.loop)
|
||||
|
||||
descriptions = yield from hass.async_add_job(
|
||||
load_yaml_config_file, os.path.join(
|
||||
os.path.dirname(__file__), 'xiaomi_miio_services.yaml'))
|
||||
|
||||
for air_purifier_service in SERVICE_TO_METHOD:
|
||||
schema = SERVICE_TO_METHOD[air_purifier_service].get(
|
||||
'schema', AIRPURIFIER_SERVICE_SCHEMA)
|
||||
hass.services.async_register(
|
||||
DOMAIN, air_purifier_service, async_service_handler,
|
||||
description=descriptions.get(air_purifier_service), schema=schema)
|
||||
|
||||
|
||||
class XiaomiAirPurifier(FanEntity):
|
||||
"""Representation of a Xiaomi Air Purifier."""
|
||||
|
||||
def __init__(self, name, air_purifier):
|
||||
"""Initialize the air purifier."""
|
||||
self._name = name
|
||||
|
||||
self._air_purifier = air_purifier
|
||||
self._state = None
|
||||
self._state_attrs = {
|
||||
ATTR_AIR_QUALITY_INDEX: None,
|
||||
ATTR_TEMPERATURE: None,
|
||||
ATTR_HUMIDITY: None,
|
||||
ATTR_MODE: None,
|
||||
ATTR_FILTER_HOURS_USED: None,
|
||||
ATTR_FILTER_LIFE: None,
|
||||
ATTR_FAVORITE_LEVEL: None,
|
||||
ATTR_BUZZER: None,
|
||||
ATTR_CHILD_LOCK: None,
|
||||
ATTR_LED: None,
|
||||
ATTR_LED_BRIGHTNESS: None,
|
||||
ATTR_MOTOR_SPEED: None
|
||||
}
|
||||
|
||||
@property
|
||||
def supported_features(self):
|
||||
"""Flag supported features."""
|
||||
return SUPPORT_SET_SPEED
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
"""Poll the fan."""
|
||||
return True
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the device if any."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def available(self):
|
||||
"""Return true when state is known."""
|
||||
return self._state is not None
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return the state attributes of the device."""
|
||||
return self._state_attrs
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return true if fan is on."""
|
||||
return self._state
|
||||
|
||||
@asyncio.coroutine
|
||||
def _try_command(self, mask_error, func, *args, **kwargs):
|
||||
"""Call a air purifier command handling error messages."""
|
||||
from miio import DeviceException
|
||||
try:
|
||||
result = yield from self.hass.async_add_job(
|
||||
partial(func, *args, **kwargs))
|
||||
|
||||
_LOGGER.debug("Response received from air purifier: %s", result)
|
||||
|
||||
return result == SUCCESS
|
||||
except DeviceException as exc:
|
||||
_LOGGER.error(mask_error, exc)
|
||||
return False
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_turn_on(self: ToggleEntity, speed: str=None, **kwargs) -> None:
|
||||
"""Turn the fan on."""
|
||||
if speed:
|
||||
# If operation mode was set the device must not be turned on.
|
||||
yield from self.async_set_speed(speed)
|
||||
return
|
||||
|
||||
yield from self._try_command(
|
||||
"Turning the air purifier on failed.", self._air_purifier.on)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_turn_off(self: ToggleEntity, **kwargs) -> None:
|
||||
"""Turn the fan off."""
|
||||
yield from self._try_command(
|
||||
"Turning the air purifier off failed.", self._air_purifier.off)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_update(self):
|
||||
"""Fetch state from the device."""
|
||||
from miio import DeviceException
|
||||
|
||||
try:
|
||||
state = yield from self.hass.async_add_job(
|
||||
self._air_purifier.status)
|
||||
_LOGGER.debug("Got new state: %s", state)
|
||||
|
||||
self._state = state.is_on
|
||||
self._state_attrs = {
|
||||
ATTR_TEMPERATURE: state.temperature,
|
||||
ATTR_HUMIDITY: state.humidity,
|
||||
ATTR_AIR_QUALITY_INDEX: state.aqi,
|
||||
ATTR_MODE: state.mode.value,
|
||||
ATTR_FILTER_HOURS_USED: state.filter_hours_used,
|
||||
ATTR_FILTER_LIFE: state.filter_life_remaining,
|
||||
ATTR_FAVORITE_LEVEL: state.favorite_level,
|
||||
ATTR_BUZZER: state.buzzer,
|
||||
ATTR_CHILD_LOCK: state.child_lock,
|
||||
ATTR_LED: state.led,
|
||||
ATTR_MOTOR_SPEED: state.motor_speed
|
||||
}
|
||||
|
||||
if state.led_brightness:
|
||||
self._state_attrs[
|
||||
ATTR_LED_BRIGHTNESS] = state.led_brightness.value
|
||||
|
||||
except DeviceException as ex:
|
||||
_LOGGER.error("Got exception while fetching the state: %s", ex)
|
||||
|
||||
@property
|
||||
def speed_list(self: ToggleEntity) -> list:
|
||||
"""Get the list of available speeds."""
|
||||
from miio.airpurifier import OperationMode
|
||||
return [mode.name for mode in OperationMode]
|
||||
|
||||
@property
|
||||
def speed(self):
|
||||
"""Return the current speed."""
|
||||
if self._state:
|
||||
from miio.airpurifier import OperationMode
|
||||
|
||||
return OperationMode(self._state_attrs[ATTR_MODE]).name
|
||||
|
||||
return None
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_set_speed(self: ToggleEntity, speed: str) -> None:
|
||||
"""Set the speed of the fan."""
|
||||
_LOGGER.debug("Setting the operation mode to: " + speed)
|
||||
from miio.airpurifier import OperationMode
|
||||
|
||||
yield from self._try_command(
|
||||
"Setting operation mode of the air purifier failed.",
|
||||
self._air_purifier.set_mode, OperationMode[speed])
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_set_buzzer_on(self):
|
||||
"""Turn the buzzer on."""
|
||||
yield from self._try_command(
|
||||
"Turning the buzzer of air purifier on failed.",
|
||||
self._air_purifier.set_buzzer, True)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_set_buzzer_off(self):
|
||||
"""Turn the buzzer on."""
|
||||
yield from self._try_command(
|
||||
"Turning the buzzer of air purifier off failed.",
|
||||
self._air_purifier.set_buzzer, False)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_set_led_on(self):
|
||||
"""Turn the led on."""
|
||||
yield from self._try_command(
|
||||
"Turning the led of air purifier off failed.",
|
||||
self._air_purifier.set_led, True)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_set_led_off(self):
|
||||
"""Turn the led off."""
|
||||
yield from self._try_command(
|
||||
"Turning the led of air purifier off failed.",
|
||||
self._air_purifier.set_led, False)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_set_led_brightness(self, brightness: int=2):
|
||||
"""Set the led brightness."""
|
||||
from miio.airpurifier import LedBrightness
|
||||
|
||||
yield from self._try_command(
|
||||
"Setting the led brightness of the air purifier failed.",
|
||||
self._air_purifier.set_led_brightness, LedBrightness(brightness))
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_set_favorite_level(self, level: int=1):
|
||||
"""Set the favorite level."""
|
||||
yield from self._try_command(
|
||||
"Setting the favorite level of the air purifier failed.",
|
||||
self._air_purifier.set_favorite_level, level)
|
56
homeassistant/components/fan/xiaomi_miio_services.yaml
Normal file
56
homeassistant/components/fan/xiaomi_miio_services.yaml
Normal file
@ -0,0 +1,56 @@
|
||||
|
||||
xiaomi_miio_set_buzzer_on:
|
||||
description: Turn the buzzer on.
|
||||
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name of the air purifier entity.
|
||||
example: 'fan.xiaomi_air_purifier'
|
||||
|
||||
xiaomi_miio_set_buzzer_off:
|
||||
description: Turn the buzzer off.
|
||||
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name of the air purifier entity.
|
||||
example: 'fan.xiaomi_air_purifier'
|
||||
|
||||
xiaomi_miio_set_led_on:
|
||||
description: Turn the led on.
|
||||
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name of the air purifier entity.
|
||||
example: 'fan.xiaomi_air_purifier'
|
||||
|
||||
xiaomi_miio_set_led_off:
|
||||
description: Turn the led off.
|
||||
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name of the air purifier entity.
|
||||
example: 'fan.xiaomi_air_purifier'
|
||||
|
||||
xiaomi_miio_set_favorite_level:
|
||||
description: Set the favorite level.
|
||||
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name of the air purifier entity.
|
||||
example: 'fan.xiaomi_air_purifier'
|
||||
|
||||
level:
|
||||
description: Level, between 0 and 16.
|
||||
example: '1'
|
||||
|
||||
xiaomi_miio_set_led_brightness:
|
||||
description: Set the led brightness.
|
||||
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name of the air purifier entity.
|
||||
example: 'fan.xiaomi_air_purifier'
|
||||
|
||||
brightness:
|
||||
description: Brightness (0 = Bright, 1 = Dim, 2 = Off)
|
||||
example: '1'
|
@ -1,4 +1,9 @@
|
||||
"""Handle the frontend for Home Assistant."""
|
||||
"""
|
||||
Handle the frontend for Home Assistant.
|
||||
|
||||
For more details about this component, please refer to the documentation at
|
||||
https://home-assistant.io/components/frontend/
|
||||
"""
|
||||
import asyncio
|
||||
import hashlib
|
||||
import json
|
||||
@ -7,17 +12,16 @@ import os
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.components.http.auth import is_trusted_ip
|
||||
from homeassistant.config import find_config_file, load_yaml_config_file
|
||||
from homeassistant.const import CONF_NAME, EVENT_THEMES_UPDATED
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.components import api
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.components.http.auth import is_trusted_ip
|
||||
from homeassistant.components.http.const import KEY_DEVELOPMENT
|
||||
from .version import FINGERPRINTS
|
||||
|
||||
REQUIREMENTS = ['home-assistant-frontend==20171103.0']
|
||||
|
||||
DOMAIN = 'frontend'
|
||||
DEPENDENCIES = ['api', 'websocket_api']
|
||||
@ -25,11 +29,16 @@ DEPENDENCIES = ['api', 'websocket_api']
|
||||
URL_PANEL_COMPONENT = '/frontend/panels/{}.html'
|
||||
URL_PANEL_COMPONENT_FP = '/frontend/panels/{}-{}.html'
|
||||
|
||||
STATIC_PATH = os.path.join(os.path.dirname(__file__), 'www_static/')
|
||||
POLYMER_PATH = os.path.join(os.path.dirname(__file__),
|
||||
'home-assistant-polymer/')
|
||||
FINAL_PATH = os.path.join(POLYMER_PATH, 'final')
|
||||
|
||||
CONF_THEMES = 'themes'
|
||||
CONF_EXTRA_HTML_URL = 'extra_html_url'
|
||||
CONF_FRONTEND_REPO = 'development_repo'
|
||||
|
||||
ATTR_THEMES = 'themes'
|
||||
ATTR_EXTRA_HTML_URL = 'extra_html_url'
|
||||
DEFAULT_THEME_COLOR = '#03A9F4'
|
||||
|
||||
MANIFEST_JSON = {
|
||||
'background_color': '#FFFFFF',
|
||||
'description': 'Open-source home automation platform running on Python 3.',
|
||||
@ -50,9 +59,9 @@ for size in (192, 384, 512, 1024):
|
||||
'type': 'image/png'
|
||||
})
|
||||
|
||||
DATA_FINALIZE_PANEL = 'frontend_finalize_panel'
|
||||
DATA_PANELS = 'frontend_panels'
|
||||
DATA_EXTRA_HTML_URL = 'frontend_extra_html_url'
|
||||
DATA_INDEX_VIEW = 'frontend_index_view'
|
||||
DATA_THEMES = 'frontend_themes'
|
||||
DATA_DEFAULT_THEME = 'frontend_default_theme'
|
||||
DEFAULT_THEME = 'default'
|
||||
@ -60,15 +69,16 @@ DEFAULT_THEME = 'default'
|
||||
PRIMARY_COLOR = 'primary-color'
|
||||
|
||||
# To keep track we don't register a component twice (gives a warning)
|
||||
_REGISTERED_COMPONENTS = set()
|
||||
# _REGISTERED_COMPONENTS = set()
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
vol.Optional(ATTR_THEMES): vol.Schema({
|
||||
vol.Optional(CONF_FRONTEND_REPO): cv.isdir,
|
||||
vol.Optional(CONF_THEMES): vol.Schema({
|
||||
cv.string: {cv.string: cv.string}
|
||||
}),
|
||||
vol.Optional(ATTR_EXTRA_HTML_URL):
|
||||
vol.Optional(CONF_EXTRA_HTML_URL):
|
||||
vol.All(cv.ensure_list, [cv.string]),
|
||||
}),
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
@ -80,101 +90,175 @@ SERVICE_SET_THEME_SCHEMA = vol.Schema({
|
||||
})
|
||||
|
||||
|
||||
class AbstractPanel:
|
||||
"""Abstract class for panels."""
|
||||
|
||||
# Name of the webcomponent
|
||||
component_name = None
|
||||
|
||||
# Icon to show in the sidebar (optional)
|
||||
sidebar_icon = None
|
||||
|
||||
# Title to show in the sidebar (optional)
|
||||
sidebar_title = None
|
||||
|
||||
# Url to the webcomponent
|
||||
webcomponent_url = None
|
||||
|
||||
# Url to show the panel in the frontend
|
||||
frontend_url_path = None
|
||||
|
||||
# Config to pass to the webcomponent
|
||||
config = None
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_register(self, hass):
|
||||
"""Register panel with HASS."""
|
||||
panels = hass.data.get(DATA_PANELS)
|
||||
if panels is None:
|
||||
panels = hass.data[DATA_PANELS] = {}
|
||||
|
||||
if self.frontend_url_path in panels:
|
||||
_LOGGER.warning("Overwriting component %s", self.frontend_url_path)
|
||||
|
||||
if DATA_FINALIZE_PANEL in hass.data:
|
||||
yield from hass.data[DATA_FINALIZE_PANEL](self)
|
||||
|
||||
panels[self.frontend_url_path] = self
|
||||
|
||||
@callback
|
||||
def async_register_index_routes(self, router, index_view):
|
||||
"""Register routes for panel to be served by index view."""
|
||||
router.add_route(
|
||||
'get', '/{}'.format(self.frontend_url_path), index_view.get)
|
||||
router.add_route(
|
||||
'get', '/{}/{{extra:.+}}'.format(self.frontend_url_path),
|
||||
index_view.get)
|
||||
|
||||
def as_dict(self):
|
||||
"""Panel as dictionary."""
|
||||
return {
|
||||
'component_name': self.component_name,
|
||||
'icon': self.sidebar_icon,
|
||||
'title': self.sidebar_title,
|
||||
'url': self.webcomponent_url,
|
||||
'url_path': self.frontend_url_path,
|
||||
'config': self.config,
|
||||
}
|
||||
|
||||
|
||||
class BuiltInPanel(AbstractPanel):
|
||||
"""Panel that is part of hass_frontend."""
|
||||
|
||||
def __init__(self, component_name, sidebar_title, sidebar_icon,
|
||||
frontend_url_path, config):
|
||||
"""Initialize a built-in panel."""
|
||||
self.component_name = component_name
|
||||
self.sidebar_title = sidebar_title
|
||||
self.sidebar_icon = sidebar_icon
|
||||
self.frontend_url_path = frontend_url_path or component_name
|
||||
self.config = config
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_finalize(self, hass, frontend_repository_path):
|
||||
"""Finalize this panel for usage.
|
||||
|
||||
If frontend_repository_path is set, will be prepended to path of
|
||||
built-in components.
|
||||
"""
|
||||
panel_path = 'panels/ha-panel-{}.html'.format(self.component_name)
|
||||
|
||||
if frontend_repository_path is None:
|
||||
import hass_frontend
|
||||
|
||||
self.webcomponent_url = \
|
||||
'/static/panels/ha-panel-{}-{}.html'.format(
|
||||
self.component_name,
|
||||
hass_frontend.FINGERPRINTS[panel_path])
|
||||
|
||||
else:
|
||||
# Dev mode
|
||||
self.webcomponent_url = \
|
||||
'/home-assistant-polymer/panels/{}/ha-panel-{}.html'.format(
|
||||
self.component_name, self.component_name)
|
||||
|
||||
|
||||
class ExternalPanel(AbstractPanel):
|
||||
"""Panel that is added by a custom component."""
|
||||
|
||||
REGISTERED_COMPONENTS = set()
|
||||
|
||||
def __init__(self, component_name, path, md5, sidebar_title, sidebar_icon,
|
||||
frontend_url_path, config):
|
||||
"""Initialize an external panel."""
|
||||
self.component_name = component_name
|
||||
self.path = path
|
||||
self.md5 = md5
|
||||
self.sidebar_title = sidebar_title
|
||||
self.sidebar_icon = sidebar_icon
|
||||
self.frontend_url_path = frontend_url_path or component_name
|
||||
self.config = config
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_finalize(self, hass, frontend_repository_path):
|
||||
"""Finalize this panel for usage.
|
||||
|
||||
frontend_repository_path is set, will be prepended to path of built-in
|
||||
components.
|
||||
"""
|
||||
try:
|
||||
if self.md5 is None:
|
||||
yield from hass.async_add_job(_fingerprint, self.path)
|
||||
except OSError:
|
||||
_LOGGER.error('Cannot find or access %s at %s',
|
||||
self.component_name, self.path)
|
||||
hass.data[DATA_PANELS].pop(self.frontend_url_path)
|
||||
|
||||
self.webcomponent_url = \
|
||||
URL_PANEL_COMPONENT_FP.format(self.component_name, self.md5)
|
||||
|
||||
if self.component_name not in self.REGISTERED_COMPONENTS:
|
||||
hass.http.register_static_path(
|
||||
self.webcomponent_url, self.path,
|
||||
# if path is None, we're in prod mode, so cache static assets
|
||||
frontend_repository_path is None)
|
||||
self.REGISTERED_COMPONENTS.add(self.component_name)
|
||||
|
||||
|
||||
@bind_hass
|
||||
def register_built_in_panel(hass, component_name, sidebar_title=None,
|
||||
sidebar_icon=None, url_path=None, config=None):
|
||||
@asyncio.coroutine
|
||||
def async_register_built_in_panel(hass, component_name, sidebar_title=None,
|
||||
sidebar_icon=None, frontend_url_path=None,
|
||||
config=None):
|
||||
"""Register a built-in panel."""
|
||||
nondev_path = 'panels/ha-panel-{}.html'.format(component_name)
|
||||
|
||||
if hass.http.development:
|
||||
url = ('/static/home-assistant-polymer/panels/'
|
||||
'{0}/ha-panel-{0}.html'.format(component_name))
|
||||
path = os.path.join(
|
||||
STATIC_PATH, 'home-assistant-polymer/panels/',
|
||||
'{0}/ha-panel-{0}.html'.format(component_name))
|
||||
else:
|
||||
url = None # use default url generate mechanism
|
||||
path = os.path.join(STATIC_PATH, nondev_path)
|
||||
|
||||
# Fingerprint doesn't exist when adding new built-in panel
|
||||
register_panel(hass, component_name, path,
|
||||
FINGERPRINTS.get(nondev_path, 'dev'), sidebar_title,
|
||||
sidebar_icon, url_path, url, config)
|
||||
panel = BuiltInPanel(component_name, sidebar_title, sidebar_icon,
|
||||
frontend_url_path, config)
|
||||
yield from panel.async_register(hass)
|
||||
|
||||
|
||||
@bind_hass
|
||||
def register_panel(hass, component_name, path, md5=None, sidebar_title=None,
|
||||
sidebar_icon=None, url_path=None, url=None, config=None):
|
||||
@asyncio.coroutine
|
||||
def async_register_panel(hass, component_name, path, md5=None,
|
||||
sidebar_title=None, sidebar_icon=None,
|
||||
frontend_url_path=None, config=None):
|
||||
"""Register a panel for the frontend.
|
||||
|
||||
component_name: name of the web component
|
||||
path: path to the HTML of the web component
|
||||
(required unless url is provided)
|
||||
md5: the md5 hash of the web component (for versioning, optional)
|
||||
md5: the md5 hash of the web component (for versioning in url, optional)
|
||||
sidebar_title: title to show in the sidebar (optional)
|
||||
sidebar_icon: icon to show next to title in sidebar (optional)
|
||||
url_path: name to use in the url (defaults to component_name)
|
||||
url: for the web component (optional)
|
||||
config: config to be passed into the web component
|
||||
"""
|
||||
panels = hass.data.get(DATA_PANELS)
|
||||
if panels is None:
|
||||
panels = hass.data[DATA_PANELS] = {}
|
||||
|
||||
if url_path is None:
|
||||
url_path = component_name
|
||||
|
||||
if url_path in panels:
|
||||
_LOGGER.warning("Overwriting component %s", url_path)
|
||||
|
||||
if url is None:
|
||||
if not os.path.isfile(path):
|
||||
_LOGGER.error(
|
||||
"Panel %s component does not exist: %s", component_name, path)
|
||||
return
|
||||
|
||||
if md5 is None:
|
||||
with open(path) as fil:
|
||||
md5 = hashlib.md5(fil.read().encode('utf-8')).hexdigest()
|
||||
|
||||
data = {
|
||||
'url_path': url_path,
|
||||
'component_name': component_name,
|
||||
}
|
||||
|
||||
if sidebar_title:
|
||||
data['title'] = sidebar_title
|
||||
if sidebar_icon:
|
||||
data['icon'] = sidebar_icon
|
||||
if config is not None:
|
||||
data['config'] = config
|
||||
|
||||
if url is not None:
|
||||
data['url'] = url
|
||||
else:
|
||||
url = URL_PANEL_COMPONENT.format(component_name)
|
||||
|
||||
if url not in _REGISTERED_COMPONENTS:
|
||||
hass.http.register_static_path(url, path)
|
||||
_REGISTERED_COMPONENTS.add(url)
|
||||
|
||||
fprinted_url = URL_PANEL_COMPONENT_FP.format(component_name, md5)
|
||||
data['url'] = fprinted_url
|
||||
|
||||
panels[url_path] = data
|
||||
|
||||
# Register index view for this route if IndexView already loaded
|
||||
# Otherwise it will be done during setup.
|
||||
index_view = hass.data.get(DATA_INDEX_VIEW)
|
||||
|
||||
if index_view:
|
||||
hass.http.app.router.add_route(
|
||||
'get', '/{}'.format(url_path), index_view.get)
|
||||
hass.http.app.router.add_route(
|
||||
'get', '/{}/{{extra:.+}}'.format(url_path), index_view.get)
|
||||
panel = ExternalPanel(component_name, path, md5, sidebar_title,
|
||||
sidebar_icon, frontend_url_path, config)
|
||||
yield from panel.async_register(hass)
|
||||
|
||||
|
||||
@bind_hass
|
||||
@callback
|
||||
def add_extra_html_url(hass, url):
|
||||
"""Register extra html url to load."""
|
||||
url_set = hass.data.get(DATA_EXTRA_HTML_URL)
|
||||
@ -188,57 +272,74 @@ def add_manifest_json_key(key, val):
|
||||
MANIFEST_JSON[key] = val
|
||||
|
||||
|
||||
def setup(hass, config):
|
||||
@asyncio.coroutine
|
||||
def async_setup(hass, config):
|
||||
"""Set up the serving of the frontend."""
|
||||
hass.http.register_view(BootstrapView)
|
||||
hass.http.register_view(ManifestJSONView)
|
||||
|
||||
if hass.http.development:
|
||||
sw_path = "home-assistant-polymer/build/service_worker.js"
|
||||
else:
|
||||
sw_path = "service_worker.js"
|
||||
conf = config.get(DOMAIN, {})
|
||||
|
||||
hass.http.register_static_path("/service_worker.js",
|
||||
os.path.join(STATIC_PATH, sw_path), False)
|
||||
hass.http.register_static_path("/robots.txt",
|
||||
os.path.join(STATIC_PATH, "robots.txt"))
|
||||
hass.http.register_static_path("/static", STATIC_PATH)
|
||||
repo_path = conf.get(CONF_FRONTEND_REPO)
|
||||
is_dev = repo_path is not None
|
||||
|
||||
if is_dev:
|
||||
hass.http.register_static_path(
|
||||
"/home-assistant-polymer", repo_path, False)
|
||||
hass.http.register_static_path(
|
||||
"/static/translations",
|
||||
os.path.join(repo_path, "build/translations"), False)
|
||||
sw_path = os.path.join(repo_path, "build/service_worker.js")
|
||||
static_path = os.path.join(repo_path, 'hass_frontend')
|
||||
else:
|
||||
import hass_frontend
|
||||
frontend_path = hass_frontend.where()
|
||||
sw_path = os.path.join(frontend_path, "service_worker.js")
|
||||
static_path = frontend_path
|
||||
|
||||
hass.http.register_static_path("/service_worker.js", sw_path, False)
|
||||
hass.http.register_static_path(
|
||||
"/robots.txt", os.path.join(static_path, "robots.txt"), not is_dev)
|
||||
hass.http.register_static_path("/static", static_path, not is_dev)
|
||||
|
||||
local = hass.config.path('www')
|
||||
if os.path.isdir(local):
|
||||
hass.http.register_static_path("/local", local)
|
||||
hass.http.register_static_path("/local", local, not is_dev)
|
||||
|
||||
index_view = hass.data[DATA_INDEX_VIEW] = IndexView()
|
||||
index_view = IndexView(is_dev)
|
||||
hass.http.register_view(index_view)
|
||||
|
||||
# Components have registered panels before frontend got setup.
|
||||
# Now register their urls.
|
||||
if DATA_PANELS in hass.data:
|
||||
for url_path in hass.data[DATA_PANELS]:
|
||||
hass.http.app.router.add_route(
|
||||
'get', '/{}'.format(url_path), index_view.get)
|
||||
hass.http.app.router.add_route(
|
||||
'get', '/{}/{{extra:.+}}'.format(url_path), index_view.get)
|
||||
else:
|
||||
hass.data[DATA_PANELS] = {}
|
||||
@asyncio.coroutine
|
||||
def finalize_panel(panel):
|
||||
"""Finalize setup of a panel."""
|
||||
yield from panel.async_finalize(hass, repo_path)
|
||||
panel.async_register_index_routes(hass.http.app.router, index_view)
|
||||
|
||||
yield from asyncio.wait([
|
||||
async_register_built_in_panel(hass, panel)
|
||||
for panel in ('dev-event', 'dev-info', 'dev-service', 'dev-state',
|
||||
'dev-template', 'dev-mqtt', 'kiosk')], loop=hass.loop)
|
||||
|
||||
hass.data[DATA_FINALIZE_PANEL] = finalize_panel
|
||||
|
||||
# Finalize registration of panels that registered before frontend was setup
|
||||
# This includes the built-in panels from line above.
|
||||
yield from asyncio.wait(
|
||||
[finalize_panel(panel) for panel in hass.data[DATA_PANELS].values()],
|
||||
loop=hass.loop)
|
||||
|
||||
if DATA_EXTRA_HTML_URL not in hass.data:
|
||||
hass.data[DATA_EXTRA_HTML_URL] = set()
|
||||
|
||||
for panel in ('dev-event', 'dev-info', 'dev-service', 'dev-state',
|
||||
'dev-template', 'dev-mqtt', 'kiosk'):
|
||||
register_built_in_panel(hass, panel)
|
||||
|
||||
themes = config.get(DOMAIN, {}).get(ATTR_THEMES)
|
||||
setup_themes(hass, themes)
|
||||
|
||||
for url in config.get(DOMAIN, {}).get(ATTR_EXTRA_HTML_URL, []):
|
||||
for url in conf.get(CONF_EXTRA_HTML_URL, []):
|
||||
add_extra_html_url(hass, url)
|
||||
|
||||
yield from async_setup_themes(hass, conf.get(CONF_THEMES))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def setup_themes(hass, themes):
|
||||
@asyncio.coroutine
|
||||
def async_setup_themes(hass, themes):
|
||||
"""Set up themes data and services."""
|
||||
hass.http.register_view(ThemesView)
|
||||
hass.data[DATA_DEFAULT_THEME] = DEFAULT_THEME
|
||||
@ -278,40 +379,22 @@ def setup_themes(hass, themes):
|
||||
def reload_themes(_):
|
||||
"""Reload themes."""
|
||||
path = find_config_file(hass.config.config_dir)
|
||||
new_themes = load_yaml_config_file(path)[DOMAIN].get(ATTR_THEMES, {})
|
||||
new_themes = load_yaml_config_file(path)[DOMAIN].get(CONF_THEMES, {})
|
||||
hass.data[DATA_THEMES] = new_themes
|
||||
if hass.data[DATA_DEFAULT_THEME] not in new_themes:
|
||||
hass.data[DATA_DEFAULT_THEME] = DEFAULT_THEME
|
||||
update_theme_and_fire_event()
|
||||
|
||||
descriptions = load_yaml_config_file(
|
||||
descriptions = yield from hass.async_add_job(
|
||||
load_yaml_config_file,
|
||||
os.path.join(os.path.dirname(__file__), 'services.yaml'))
|
||||
hass.services.register(DOMAIN, SERVICE_SET_THEME,
|
||||
set_theme,
|
||||
descriptions[SERVICE_SET_THEME],
|
||||
SERVICE_SET_THEME_SCHEMA)
|
||||
hass.services.register(DOMAIN, SERVICE_RELOAD_THEMES, reload_themes,
|
||||
descriptions[SERVICE_RELOAD_THEMES])
|
||||
|
||||
|
||||
class BootstrapView(HomeAssistantView):
|
||||
"""View to bootstrap frontend with all needed data."""
|
||||
|
||||
url = '/api/bootstrap'
|
||||
name = 'api:bootstrap'
|
||||
|
||||
@callback
|
||||
def get(self, request):
|
||||
"""Return all data needed to bootstrap Home Assistant."""
|
||||
hass = request.app['hass']
|
||||
|
||||
return self.json({
|
||||
'config': hass.config.as_dict(),
|
||||
'states': hass.states.async_all(),
|
||||
'events': api.async_events_json(hass),
|
||||
'services': api.async_services_json(hass),
|
||||
'panels': hass.data[DATA_PANELS],
|
||||
})
|
||||
hass.services.async_register(DOMAIN, SERVICE_SET_THEME,
|
||||
set_theme,
|
||||
descriptions[SERVICE_SET_THEME],
|
||||
SERVICE_SET_THEME_SCHEMA)
|
||||
hass.services.async_register(DOMAIN, SERVICE_RELOAD_THEMES, reload_themes,
|
||||
descriptions[SERVICE_RELOAD_THEMES])
|
||||
|
||||
|
||||
class IndexView(HomeAssistantView):
|
||||
@ -322,10 +405,11 @@ class IndexView(HomeAssistantView):
|
||||
requires_auth = False
|
||||
extra_urls = ['/states', '/states/{extra}']
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, use_repo):
|
||||
"""Initialize the frontend view."""
|
||||
from jinja2 import FileSystemLoader, Environment
|
||||
|
||||
self.use_repo = use_repo
|
||||
self.templates = Environment(
|
||||
autoescape=True,
|
||||
loader=FileSystemLoader(
|
||||
@ -338,18 +422,23 @@ class IndexView(HomeAssistantView):
|
||||
"""Serve the index view."""
|
||||
hass = request.app['hass']
|
||||
|
||||
if request.app[KEY_DEVELOPMENT]:
|
||||
core_url = '/static/home-assistant-polymer/build/core.js'
|
||||
if self.use_repo:
|
||||
core_url = '/home-assistant-polymer/build/core.js'
|
||||
compatibility_url = \
|
||||
'/static/home-assistant-polymer/build/compatibility.js'
|
||||
ui_url = '/static/home-assistant-polymer/src/home-assistant.html'
|
||||
'/home-assistant-polymer/build/compatibility.js'
|
||||
ui_url = '/home-assistant-polymer/src/home-assistant.html'
|
||||
icons_fp = ''
|
||||
icons_url = '/static/mdi.html'
|
||||
else:
|
||||
import hass_frontend
|
||||
core_url = '/static/core-{}.js'.format(
|
||||
FINGERPRINTS['core.js'])
|
||||
hass_frontend.FINGERPRINTS['core.js'])
|
||||
compatibility_url = '/static/compatibility-{}.js'.format(
|
||||
FINGERPRINTS['compatibility.js'])
|
||||
hass_frontend.FINGERPRINTS['compatibility.js'])
|
||||
ui_url = '/static/frontend-{}.html'.format(
|
||||
FINGERPRINTS['frontend.html'])
|
||||
hass_frontend.FINGERPRINTS['frontend.html'])
|
||||
icons_fp = '-{}'.format(hass_frontend.FINGERPRINTS['mdi.html'])
|
||||
icons_url = '/static/mdi{}.html'.format(icons_fp)
|
||||
|
||||
if request.path == '/':
|
||||
panel = 'states'
|
||||
@ -359,17 +448,13 @@ class IndexView(HomeAssistantView):
|
||||
if panel == 'states':
|
||||
panel_url = ''
|
||||
else:
|
||||
panel_url = hass.data[DATA_PANELS][panel]['url']
|
||||
panel_url = hass.data[DATA_PANELS][panel].webcomponent_url
|
||||
|
||||
no_auth = 'true'
|
||||
if hass.config.api.api_password:
|
||||
# require password if set
|
||||
if hass.config.api.api_password and not is_trusted_ip(request):
|
||||
# do not try to auto connect on load
|
||||
no_auth = 'false'
|
||||
if is_trusted_ip(request):
|
||||
# bypass for trusted networks
|
||||
no_auth = 'true'
|
||||
|
||||
icons_url = '/static/mdi-{}.html'.format(FINGERPRINTS['mdi.html'])
|
||||
template = yield from hass.async_add_job(
|
||||
self.templates.get_template, 'index.html')
|
||||
|
||||
@ -379,9 +464,9 @@ class IndexView(HomeAssistantView):
|
||||
resp = template.render(
|
||||
core_url=core_url, ui_url=ui_url,
|
||||
compatibility_url=compatibility_url, no_auth=no_auth,
|
||||
icons_url=icons_url, icons=FINGERPRINTS['mdi.html'],
|
||||
icons_url=icons_url, icons=icons_fp,
|
||||
panel_url=panel_url, panels=hass.data[DATA_PANELS],
|
||||
dev_mode=request.app[KEY_DEVELOPMENT],
|
||||
dev_mode=self.use_repo,
|
||||
theme_color=MANIFEST_JSON['theme_color'],
|
||||
extra_urls=hass.data[DATA_EXTRA_HTML_URL])
|
||||
|
||||
@ -418,3 +503,9 @@ class ThemesView(HomeAssistantView):
|
||||
'themes': hass.data[DATA_THEMES],
|
||||
'default_theme': hass.data[DATA_DEFAULT_THEME],
|
||||
})
|
||||
|
||||
|
||||
def _fingerprint(path):
|
||||
"""Fingerprint a file."""
|
||||
with open(path) as fil:
|
||||
return hashlib.md5(fil.read().encode('utf-8')).hexdigest()
|
||||
|
@ -8,4 +8,4 @@ set_theme:
|
||||
example: 'light'
|
||||
|
||||
reload_themes:
|
||||
description: Reload themes from yaml config.
|
||||
description: Reload themes from yaml configuration.
|
||||
|
@ -8,11 +8,13 @@
|
||||
<link rel='icon' href='/static/icons/favicon.ico'>
|
||||
<link rel='apple-touch-icon' sizes='180x180'
|
||||
href='/static/icons/favicon-apple-180x180.png'>
|
||||
<link rel="mask-icon" href="/static/icons/home-assistant-icon.svg" color="#3fbbf4">
|
||||
<link rel='preload' href='{{ core_url }}' as='script'/>
|
||||
{% for panel in panels.values() -%}
|
||||
<link rel='prefetch' href='{{ panel.url }}'>
|
||||
{% endfor -%}
|
||||
<link rel="mask-icon" href="/static/icons/mask-icon.svg" color="#3fbbf4">
|
||||
{% if not dev_mode %}
|
||||
<link rel='preload' href='{{ core_url }}' as='script'/>
|
||||
{% for panel in panels.values() -%}
|
||||
<link rel='prefetch' href='{{ panel.webcomponent_url }}'>
|
||||
{% endfor -%}
|
||||
{% endif %}
|
||||
<meta name='apple-mobile-web-app-capable' content='yes'>
|
||||
<meta name="msapplication-square70x70logo" content="/static/icons/tile-win-70x70.png"/>
|
||||
<meta name="msapplication-square150x150logo" content="/static/icons/tile-win-150x150.png"/>
|
||||
@ -36,7 +38,7 @@
|
||||
display: block;
|
||||
content: "";
|
||||
height: 48px;
|
||||
background-color: #03A9F4;
|
||||
background-color: {{ theme_color }};
|
||||
}
|
||||
|
||||
#ha-init-skeleton .message {
|
||||
@ -50,7 +52,7 @@
|
||||
}
|
||||
|
||||
#ha-init-skeleton a {
|
||||
color: #03A9F4;
|
||||
color: {{ theme_color }};
|
||||
text-decoration: none;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
@ -1,24 +0,0 @@
|
||||
"""DO NOT MODIFY. Auto-generated by script/fingerprint_frontend."""
|
||||
|
||||
FINGERPRINTS = {
|
||||
"compatibility.js": "1686167ff210e001f063f5c606b2e74b",
|
||||
"core.js": "2a7d01e45187c7d4635da05065b5e54e",
|
||||
"frontend.html": "2de1bde3b4a6c6c47dd95504fc098906",
|
||||
"mdi.html": "2e848b4da029bf73d426d5ba058a088d",
|
||||
"micromarkdown-js.html": "93b5ec4016f0bba585521cf4d18dec1a",
|
||||
"panels/ha-panel-config.html": "52e2e1d477bfd6dc3708d65b8337f0af",
|
||||
"panels/ha-panel-dev-event.html": "d409e7ab537d9fe629126d122345279c",
|
||||
"panels/ha-panel-dev-info.html": "b0e55eb657fd75f21aba2426ac0cedc0",
|
||||
"panels/ha-panel-dev-mqtt.html": "94b222b013a98583842de3e72d5888c6",
|
||||
"panels/ha-panel-dev-service.html": "422b2c181ee0713fa31d45a64e605baf",
|
||||
"panels/ha-panel-dev-state.html": "7948d3dba058f31517d880df8ed0e857",
|
||||
"panels/ha-panel-dev-template.html": "928e7b81b9c113b70edc9f4a1d051827",
|
||||
"panels/ha-panel-hassio.html": "b46e7619f3c355f872d5370741d89f6a",
|
||||
"panels/ha-panel-history.html": "fe2daac10a14f51fa3eb7d23978df1f7",
|
||||
"panels/ha-panel-iframe.html": "56930204d6e067a3d600cf030f4b34c8",
|
||||
"panels/ha-panel-kiosk.html": "b40aa5cb52dd7675bea744afcf9eebf8",
|
||||
"panels/ha-panel-logbook.html": "771afdcf48dc7e308b0282417d2e02d8",
|
||||
"panels/ha-panel-mailbox.html": "a8cca44ca36553e91565e3c894ea6323",
|
||||
"panels/ha-panel-map.html": "565db019147162080c21af962afc097f",
|
||||
"panels/ha-panel-shopping-list.html": "d8cfd0ecdb3aa6214c0f6908c34c7141"
|
||||
}
|
@ -1 +0,0 @@
|
||||
!function(){"use strict";function e(e,t){if(void 0===e||null===e)throw new TypeError("Cannot convert first argument to object");for(var r=Object(e),n=1;n<arguments.length;n++){var o=arguments[n];if(void 0!==o&&null!==o)for(var i=Object.keys(Object(o)),l=0,c=i.length;l<c;l++){var a=i[l],b=Object.getOwnPropertyDescriptor(o,a);void 0!==b&&b.enumerable&&(r[a]=o[a])}}return r}({assign:e,polyfill:function(){Object.assign||Object.defineProperty(Object,"assign",{enumerable:!1,configurable:!0,writable:!0,value:e})}}).polyfill()}();
|
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
@ -1,16 +0,0 @@
|
||||
(function () {
|
||||
'use strict';
|
||||
|
||||
(()=>{'use strict';if(!window.customElements)return;const a=window.HTMLElement,b=window.customElements.define,c=window.customElements.get,d=new Map,e=new Map;let f=!1,g=!1;window.HTMLElement=function(){if(!f){const a=d.get(this.constructor),b=c.call(window.customElements,a);g=!0;const e=new b;return e}f=!1;},window.HTMLElement.prototype=a.prototype;Object.defineProperty(window,'customElements',{value:window.customElements,configurable:!0,writable:!0}),Object.defineProperty(window.customElements,'define',{value:(c,h)=>{const i=h.prototype,j=class extends a{constructor(){super(),Object.setPrototypeOf(this,i),g||(f=!0,h.call(this)),g=!1;}},k=j.prototype;j.observedAttributes=h.observedAttributes,k.connectedCallback=i.connectedCallback,k.disconnectedCallback=i.disconnectedCallback,k.attributeChangedCallback=i.attributeChangedCallback,k.adoptedCallback=i.adoptedCallback,d.set(h,c),e.set(c,h),b.call(window.customElements,c,j);},configurable:!0,writable:!0}),Object.defineProperty(window.customElements,'get',{value:(a)=>e.get(a),configurable:!0,writable:!0});})();
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
}());
|
Binary file not shown.
@ -1 +0,0 @@
|
||||
Copyright 2011 Google Inc. All Rights Reserved.
|
@ -1,17 +0,0 @@
|
||||
<p>Roboto has a dual nature. It has a mechanical skeleton and the forms are
|
||||
largely geometric. At the same time, the font features friendly and open
|
||||
curves. While some grotesks distort their letterforms to force a rigid rhythm,
|
||||
Roboto doesn’t compromise, allowing letters to be settled into their natural
|
||||
width. This makes for a more natural reading rhythm more commonly found in
|
||||
humanist and serif types.</p>
|
||||
|
||||
<p>This is the normal family, which can be used alongside the
|
||||
<a href="http://www.google.com/fonts/specimen/Roboto+Condensed">Roboto Condensed</a> family and the
|
||||
<a href="http://www.google.com/fonts/specimen/Roboto+Slab">Roboto Slab</a> family.</p>
|
||||
|
||||
<p>
|
||||
<b>Updated January 14 2015:</b>
|
||||
Christian Robertson and the Material Design team unveiled the latest version of Roboto at Google I/O last year, and it is now available from Google Fonts.
|
||||
Existing websites using Roboto via Google Fonts will start using the latest version automatically.
|
||||
If you have installed the fonts on your computer, please download them again and re-install.
|
||||
</p>
|
@ -1,202 +0,0 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
@ -1,129 +0,0 @@
|
||||
{
|
||||
"name": "Roboto",
|
||||
"designer": "Christian Robertson",
|
||||
"license": "Apache2",
|
||||
"visibility": "External",
|
||||
"category": "Sans Serif",
|
||||
"size": 86523,
|
||||
"fonts": [
|
||||
{
|
||||
"name": "Roboto",
|
||||
"style": "normal",
|
||||
"weight": 100,
|
||||
"filename": "Roboto-Thin.ttf",
|
||||
"postScriptName": "Roboto-Thin",
|
||||
"fullName": "Roboto Thin",
|
||||
"copyright": "Copyright 2011 Google Inc. All Rights Reserved."
|
||||
},
|
||||
{
|
||||
"name": "Roboto",
|
||||
"style": "italic",
|
||||
"weight": 100,
|
||||
"filename": "Roboto-ThinItalic.ttf",
|
||||
"postScriptName": "Roboto-ThinItalic",
|
||||
"fullName": "Roboto Thin Italic",
|
||||
"copyright": "Copyright 2011 Google Inc. All Rights Reserved."
|
||||
},
|
||||
{
|
||||
"name": "Roboto",
|
||||
"style": "normal",
|
||||
"weight": 300,
|
||||
"filename": "Roboto-Light.ttf",
|
||||
"postScriptName": "Roboto-Light",
|
||||
"fullName": "Roboto Light",
|
||||
"copyright": "Copyright 2011 Google Inc. All Rights Reserved."
|
||||
},
|
||||
{
|
||||
"name": "Roboto",
|
||||
"style": "italic",
|
||||
"weight": 300,
|
||||
"filename": "Roboto-LightItalic.ttf",
|
||||
"postScriptName": "Roboto-LightItalic",
|
||||
"fullName": "Roboto Light Italic",
|
||||
"copyright": "Copyright 2011 Google Inc. All Rights Reserved."
|
||||
},
|
||||
{
|
||||
"name": "Roboto",
|
||||
"style": "normal",
|
||||
"weight": 400,
|
||||
"filename": "Roboto-Regular.ttf",
|
||||
"postScriptName": "Roboto-Regular",
|
||||
"fullName": "Roboto",
|
||||
"copyright": "Copyright 2011 Google Inc. All Rights Reserved."
|
||||
},
|
||||
{
|
||||
"name": "Roboto",
|
||||
"style": "italic",
|
||||
"weight": 400,
|
||||
"filename": "Roboto-Italic.ttf",
|
||||
"postScriptName": "Roboto-Italic",
|
||||
"fullName": "Roboto Italic",
|
||||
"copyright": "Copyright 2011 Google Inc. All Rights Reserved."
|
||||
},
|
||||
{
|
||||
"name": "Roboto",
|
||||
"style": "normal",
|
||||
"weight": 500,
|
||||
"filename": "Roboto-Medium.ttf",
|
||||
"postScriptName": "Roboto-Medium",
|
||||
"fullName": "Roboto Medium",
|
||||
"copyright": "Copyright 2011 Google Inc. All Rights Reserved."
|
||||
},
|
||||
{
|
||||
"name": "Roboto",
|
||||
"style": "italic",
|
||||
"weight": 500,
|
||||
"filename": "Roboto-MediumItalic.ttf",
|
||||
"postScriptName": "Roboto-MediumItalic",
|
||||
"fullName": "Roboto Medium Italic",
|
||||
"copyright": "Copyright 2011 Google Inc. All Rights Reserved."
|
||||
},
|
||||
{
|
||||
"name": "Roboto",
|
||||
"style": "normal",
|
||||
"weight": 700,
|
||||
"filename": "Roboto-Bold.ttf",
|
||||
"postScriptName": "Roboto-Bold",
|
||||
"fullName": "Roboto Bold",
|
||||
"copyright": "Copyright 2011 Google Inc. All Rights Reserved."
|
||||
},
|
||||
{
|
||||
"name": "Roboto",
|
||||
"style": "italic",
|
||||
"weight": 700,
|
||||
"filename": "Roboto-BoldItalic.ttf",
|
||||
"postScriptName": "Roboto-BoldItalic",
|
||||
"fullName": "Roboto Bold Italic",
|
||||
"copyright": "Copyright 2011 Google Inc. All Rights Reserved."
|
||||
},
|
||||
{
|
||||
"name": "Roboto",
|
||||
"style": "normal",
|
||||
"weight": 900,
|
||||
"filename": "Roboto-Black.ttf",
|
||||
"postScriptName": "Roboto-Black",
|
||||
"fullName": "Roboto Black",
|
||||
"copyright": "Copyright 2011 Google Inc. All Rights Reserved."
|
||||
},
|
||||
{
|
||||
"name": "Roboto",
|
||||
"style": "italic",
|
||||
"weight": 900,
|
||||
"filename": "Roboto-BlackItalic.ttf",
|
||||
"postScriptName": "Roboto-BlackItalic",
|
||||
"fullName": "Roboto Black Italic",
|
||||
"copyright": "Copyright 2011 Google Inc. All Rights Reserved."
|
||||
}
|
||||
],
|
||||
"subsets": [
|
||||
"cyrillic",
|
||||
"cyrillic-ext",
|
||||
"greek",
|
||||
"greek-ext",
|
||||
"latin",
|
||||
"latin-ext",
|
||||
"menu",
|
||||
"vietnamese"
|
||||
],
|
||||
"dateAdded": "2013-01-09"
|
||||
}
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,17 +0,0 @@
|
||||
<p>
|
||||
Roboto Mono is a monospaced addition to the <a href="https://www.google.com/fonts/specimen/Roboto">Roboto</a> type family.
|
||||
Like the other members of the Roboto family, the fonts are optimized for readability on screens across a wide variety of devices and reading environments.
|
||||
While the monospaced version is related to its variable width cousin, it doesn’t hesitate to change forms to better fit the constraints of a monospaced environment.
|
||||
For example, narrow glyphs like ‘I’, ‘l’ and ‘i’ have added serifs for more even texture while wider glyphs are adjusted for weight.
|
||||
Curved caps like ‘C’ and ‘O’ take on the straighter sides from Roboto Condensed.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
Special consideration is given to glyphs important for reading and writing software source code.
|
||||
Letters with similar shapes are easy to tell apart.
|
||||
Digit ‘1’, lowercase ‘l’ and capital ‘I’ are easily differentiated as are zero and the letter ‘O’.
|
||||
Punctuation important for code has also been considered.
|
||||
For example, the curly braces ‘{ }’ have exaggerated points to clearly differentiate them from parenthesis ‘( )’ and braces ‘[ ]’.
|
||||
Periods and commas are also exaggerated to identify them more quickly.
|
||||
The scale and weight of symbols commonly used as operators have also been optimized.
|
||||
</p>
|
@ -1,202 +0,0 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
@ -1,111 +0,0 @@
|
||||
{
|
||||
"name": "Roboto Mono",
|
||||
"designer": "Christian Robertson",
|
||||
"license": "Apache2",
|
||||
"visibility": "External",
|
||||
"category": "Monospace",
|
||||
"size": 51290,
|
||||
"fonts": [
|
||||
{
|
||||
"name": "Roboto Mono",
|
||||
"postScriptName": "RobotoMono-Thin",
|
||||
"fullName": "Roboto Mono Thin",
|
||||
"style": "normal",
|
||||
"weight": 100,
|
||||
"filename": "RobotoMono-Thin.ttf",
|
||||
"copyright": "Copyright 2015 Google Inc. All Rights Reserved."
|
||||
},
|
||||
{
|
||||
"name": "Roboto Mono",
|
||||
"postScriptName": "RobotoMono-ThinItalic",
|
||||
"fullName": "Roboto Mono Thin Italic",
|
||||
"style": "italic",
|
||||
"weight": 100,
|
||||
"filename": "RobotoMono-ThinItalic.ttf",
|
||||
"copyright": "Copyright 2015 Google Inc. All Rights Reserved."
|
||||
},
|
||||
{
|
||||
"name": "Roboto Mono",
|
||||
"postScriptName": "RobotoMono-Light",
|
||||
"fullName": "Roboto Mono Light",
|
||||
"style": "normal",
|
||||
"weight": 300,
|
||||
"filename": "RobotoMono-Light.ttf",
|
||||
"copyright": "Copyright 2015 Google Inc. All Rights Reserved."
|
||||
},
|
||||
{
|
||||
"name": "Roboto Mono",
|
||||
"postScriptName": "RobotoMono-LightItalic",
|
||||
"fullName": "Roboto Mono Light Italic",
|
||||
"style": "italic",
|
||||
"weight": 300,
|
||||
"filename": "RobotoMono-LightItalic.ttf",
|
||||
"copyright": "Copyright 2015 Google Inc. All Rights Reserved."
|
||||
},
|
||||
{
|
||||
"name": "Roboto Mono",
|
||||
"postScriptName": "RobotoMono-Regular",
|
||||
"fullName": "Roboto Mono",
|
||||
"style": "normal",
|
||||
"weight": 400,
|
||||
"filename": "RobotoMono-Regular.ttf",
|
||||
"copyright": "Copyright 2015 Google Inc. All Rights Reserved."
|
||||
},
|
||||
{
|
||||
"name": "Roboto Mono",
|
||||
"postScriptName": "RobotoMono-Italic",
|
||||
"fullName": "Roboto Mono Italic",
|
||||
"style": "italic",
|
||||
"weight": 400,
|
||||
"filename": "RobotoMono-Italic.ttf",
|
||||
"copyright": "Copyright 2015 Google Inc. All Rights Reserved."
|
||||
},
|
||||
{
|
||||
"name": "Roboto Mono",
|
||||
"postScriptName": "RobotoMono-Medium",
|
||||
"fullName": "Roboto Mono Medium",
|
||||
"style": "normal",
|
||||
"weight": 500,
|
||||
"filename": "RobotoMono-Medium.ttf",
|
||||
"copyright": "Copyright 2015 Google Inc. All Rights Reserved."
|
||||
},
|
||||
{
|
||||
"name": "Roboto Mono",
|
||||
"postScriptName": "RobotoMono-MediumItalic",
|
||||
"fullName": "Roboto Mono Medium Italic",
|
||||
"style": "italic",
|
||||
"weight": 500,
|
||||
"filename": "RobotoMono-MediumItalic.ttf",
|
||||
"copyright": "Copyright 2015 Google Inc. All Rights Reserved."
|
||||
},
|
||||
{
|
||||
"name": "Roboto Mono",
|
||||
"postScriptName": "RobotoMono-Bold",
|
||||
"fullName": "Roboto Mono Bold",
|
||||
"style": "normal",
|
||||
"weight": 700,
|
||||
"filename": "RobotoMono-Bold.ttf",
|
||||
"copyright": "Copyright 2015 Google Inc. All Rights Reserved."
|
||||
},
|
||||
{
|
||||
"name": "Roboto Mono",
|
||||
"postScriptName": "RobotoMono-BoldItalic",
|
||||
"fullName": "Roboto Mono Bold Italic",
|
||||
"style": "italic",
|
||||
"weight": 700,
|
||||
"filename": "RobotoMono-BoldItalic.ttf",
|
||||
"copyright": "Copyright 2015 Google Inc. All Rights Reserved."
|
||||
}
|
||||
],
|
||||
"subsets": [
|
||||
"cyrillic",
|
||||
"cyrillic-ext",
|
||||
"greek",
|
||||
"greek-ext",
|
||||
"latin",
|
||||
"latin-ext",
|
||||
"menu",
|
||||
"vietnamese"
|
||||
],
|
||||
"dateAdded": "2015-05-13"
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user