mirror of
https://github.com/home-assistant/core.git
synced 2025-07-21 12:17:07 +00:00
commit
9b1ed4e79b
21
.coveragerc
21
.coveragerc
@ -65,6 +65,9 @@ omit =
|
||||
homeassistant/components/isy994.py
|
||||
homeassistant/components/*/isy994.py
|
||||
|
||||
homeassistant/components/juicenet.py
|
||||
homeassistant/components/*/juicenet.py
|
||||
|
||||
homeassistant/components/kira.py
|
||||
homeassistant/components/*/kira.py
|
||||
|
||||
@ -74,6 +77,9 @@ omit =
|
||||
homeassistant/components/lutron_caseta.py
|
||||
homeassistant/components/*/lutron_caseta.py
|
||||
|
||||
homeassistant/components/mailgun.py
|
||||
homeassistant/components/*/mailgun.py
|
||||
|
||||
homeassistant/components/modbus.py
|
||||
homeassistant/components/*/modbus.py
|
||||
|
||||
@ -197,6 +203,7 @@ omit =
|
||||
homeassistant/components/binary_sensor/pilight.py
|
||||
homeassistant/components/binary_sensor/ping.py
|
||||
homeassistant/components/binary_sensor/rest.py
|
||||
homeassistant/components/binary_sensor/tapsaff.py
|
||||
homeassistant/components/browser.py
|
||||
homeassistant/components/camera/amcrest.py
|
||||
homeassistant/components/camera/bloomsky.py
|
||||
@ -204,8 +211,10 @@ omit =
|
||||
homeassistant/components/camera/foscam.py
|
||||
homeassistant/components/camera/mjpeg.py
|
||||
homeassistant/components/camera/rpi_camera.py
|
||||
homeassistant/components/camera/onvif.py
|
||||
homeassistant/components/camera/synology.py
|
||||
homeassistant/components/climate/eq3btsmart.py
|
||||
homeassistant/components/climate/flexit.py
|
||||
homeassistant/components/climate/heatmiser.py
|
||||
homeassistant/components/climate/homematic.py
|
||||
homeassistant/components/climate/knx.py
|
||||
@ -286,6 +295,7 @@ omit =
|
||||
homeassistant/components/lirc.py
|
||||
homeassistant/components/lock/nuki.py
|
||||
homeassistant/components/lock/lockitron.py
|
||||
homeassistant/components/lock/sesame.py
|
||||
homeassistant/components/media_player/anthemav.py
|
||||
homeassistant/components/media_player/apple_tv.py
|
||||
homeassistant/components/media_player/aquostv.py
|
||||
@ -310,6 +320,7 @@ omit =
|
||||
homeassistant/components/media_player/mpchc.py
|
||||
homeassistant/components/media_player/mpd.py
|
||||
homeassistant/components/media_player/nad.py
|
||||
homeassistant/components/media_player/nadtcp.py
|
||||
homeassistant/components/media_player/onkyo.py
|
||||
homeassistant/components/media_player/openhome.py
|
||||
homeassistant/components/media_player/panasonic_viera.py
|
||||
@ -341,7 +352,6 @@ omit =
|
||||
homeassistant/components/notify/kodi.py
|
||||
homeassistant/components/notify/lannouncer.py
|
||||
homeassistant/components/notify/llamalab_automate.py
|
||||
homeassistant/components/notify/mailgun.py
|
||||
homeassistant/components/notify/matrix.py
|
||||
homeassistant/components/notify/message_bird.py
|
||||
homeassistant/components/notify/nfandroidtv.py
|
||||
@ -370,8 +380,10 @@ omit =
|
||||
homeassistant/components/sensor/arwn.py
|
||||
homeassistant/components/sensor/bbox.py
|
||||
homeassistant/components/sensor/bitcoin.py
|
||||
homeassistant/components/sensor/blockchain.py
|
||||
homeassistant/components/sensor/bom.py
|
||||
homeassistant/components/sensor/broadlink.py
|
||||
homeassistant/components/sensor/buienradar.py
|
||||
homeassistant/components/sensor/dublin_bus_transport.py
|
||||
homeassistant/components/sensor/coinmarketcap.py
|
||||
homeassistant/components/sensor/cert_expiry.py
|
||||
@ -391,6 +403,7 @@ omit =
|
||||
homeassistant/components/sensor/eliqonline.py
|
||||
homeassistant/components/sensor/emoncms.py
|
||||
homeassistant/components/sensor/envirophat.py
|
||||
homeassistant/components/sensor/etherscan.py
|
||||
homeassistant/components/sensor/fastdotcom.py
|
||||
homeassistant/components/sensor/fedex.py
|
||||
homeassistant/components/sensor/fido.py
|
||||
@ -398,6 +411,7 @@ omit =
|
||||
homeassistant/components/sensor/fixer.py
|
||||
homeassistant/components/sensor/fritzbox_callmonitor.py
|
||||
homeassistant/components/sensor/fritzbox_netmonitor.py
|
||||
homeassistant/components/sensor/gitter.py
|
||||
homeassistant/components/sensor/glances.py
|
||||
homeassistant/components/sensor/google_travel_time.py
|
||||
homeassistant/components/sensor/gpsd.py
|
||||
@ -435,6 +449,8 @@ omit =
|
||||
homeassistant/components/sensor/pushbullet.py
|
||||
homeassistant/components/sensor/pvoutput.py
|
||||
homeassistant/components/sensor/qnap.py
|
||||
homeassistant/components/sensor/radarr.py
|
||||
homeassistant/components/sensor/ripple.py
|
||||
homeassistant/components/sensor/sabnzbd.py
|
||||
homeassistant/components/sensor/scrape.py
|
||||
homeassistant/components/sensor/sensehat.py
|
||||
@ -464,6 +480,7 @@ omit =
|
||||
homeassistant/components/sensor/xbox_live.py
|
||||
homeassistant/components/sensor/yweather.py
|
||||
homeassistant/components/sensor/zamg.py
|
||||
homeassistant/components/spc.py
|
||||
homeassistant/components/switch/acer_projector.py
|
||||
homeassistant/components/switch/anel_pwrctrl.py
|
||||
homeassistant/components/switch/arest.py
|
||||
@ -492,8 +509,10 @@ omit =
|
||||
homeassistant/components/tts/picotts.py
|
||||
homeassistant/components/upnp.py
|
||||
homeassistant/components/weather/bom.py
|
||||
homeassistant/components/weather/buienradar.py
|
||||
homeassistant/components/weather/metoffice.py
|
||||
homeassistant/components/weather/openweathermap.py
|
||||
homeassistant/components/weather/yweather.py
|
||||
homeassistant/components/weather/zamg.py
|
||||
homeassistant/components/zeroconf.py
|
||||
homeassistant/components/zwave/util.py
|
||||
|
@ -8,6 +8,7 @@ MAINTAINER Paulus Schoutsen <Paulus@PaulusSchoutsen.nl>
|
||||
#ENV INSTALL_LIBCEC no
|
||||
#ENV INSTALL_PHANTOMJS no
|
||||
#ENV INSTALL_COAP_CLIENT no
|
||||
#ENV INSTALL_SSOCR no
|
||||
|
||||
VOLUME /config
|
||||
|
||||
|
@ -31,50 +31,8 @@ def attempt_use_uvloop():
|
||||
pass
|
||||
|
||||
|
||||
def monkey_patch_asyncio():
|
||||
"""Replace weakref.WeakSet to address Python 3 bug.
|
||||
|
||||
Under heavy threading operations that schedule calls into
|
||||
the asyncio event loop, Task objects are created. Due to
|
||||
a bug in Python, GC may have an issue when switching between
|
||||
the threads and objects with __del__ (which various components
|
||||
in HASS have).
|
||||
|
||||
This monkey-patch removes the weakref.Weakset, and replaces it
|
||||
with an object that ignores the only call utilizing it (the
|
||||
Task.__init__ which calls _all_tasks.add(self)). It also removes
|
||||
the __del__ which could trigger the future objects __del__ at
|
||||
unpredictable times.
|
||||
|
||||
The side-effect of this manipulation of the Task is that
|
||||
Task.all_tasks() is no longer accurate, and there will be no
|
||||
warning emitted if a Task is GC'd while in use.
|
||||
|
||||
On Python 3.6, after the bug is fixed, this monkey-patch can be
|
||||
disabled.
|
||||
|
||||
See https://bugs.python.org/issue26617 for details of the Python
|
||||
bug.
|
||||
"""
|
||||
# pylint: disable=no-self-use, protected-access, bare-except
|
||||
import asyncio.tasks
|
||||
|
||||
class IgnoreCalls:
|
||||
"""Ignore add calls."""
|
||||
|
||||
def add(self, other):
|
||||
"""No-op add."""
|
||||
return
|
||||
|
||||
asyncio.tasks.Task._all_tasks = IgnoreCalls()
|
||||
try:
|
||||
del asyncio.tasks.Task.__del__
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def validate_python() -> None:
|
||||
"""Validate we're running the right Python version."""
|
||||
"""Validate that the right Python version is running."""
|
||||
if sys.platform == "win32" and \
|
||||
sys.version_info[:3] < REQUIRED_PYTHON_VER_WIN:
|
||||
print("Home Assistant requires at least Python {}.{}.{}".format(
|
||||
@ -215,7 +173,7 @@ def daemonize() -> None:
|
||||
|
||||
|
||||
def check_pid(pid_file: str) -> None:
|
||||
"""Check that HA is not already running."""
|
||||
"""Check that Home Assistant is not already running."""
|
||||
# Check pid file
|
||||
try:
|
||||
pid = int(open(pid_file, 'r').readline())
|
||||
@ -329,7 +287,7 @@ def setup_and_run_hass(config_dir: str,
|
||||
|
||||
|
||||
def try_to_restart() -> None:
|
||||
"""Attempt to clean up state and start a new homeassistant instance."""
|
||||
"""Attempt to clean up state and start a new Home Assistant instance."""
|
||||
# Things should be mostly shut down already at this point, now just try
|
||||
# to clean up things that may have been left behind.
|
||||
sys.stderr.write('Home Assistant attempting to restart.\n')
|
||||
@ -361,11 +319,11 @@ def try_to_restart() -> None:
|
||||
else:
|
||||
os.closerange(3, max_fd)
|
||||
|
||||
# Now launch into a new instance of Home-Assistant. If this fails we
|
||||
# Now launch into a new instance of Home Assistant. If this fails we
|
||||
# fall through and exit with error 100 (RESTART_EXIT_CODE) in which case
|
||||
# systemd will restart us when RestartForceExitStatus=100 is set in the
|
||||
# systemd.service file.
|
||||
sys.stderr.write("Restarting Home-Assistant\n")
|
||||
sys.stderr.write("Restarting Home Assistant\n")
|
||||
args = cmdline()
|
||||
os.execv(args[0], args)
|
||||
|
||||
@ -374,18 +332,13 @@ def main() -> int:
|
||||
"""Start Home Assistant."""
|
||||
validate_python()
|
||||
|
||||
if os.environ.get('HASS_MONKEYPATCH_ASYNCIO') == '1':
|
||||
if sys.version_info[:3] >= (3, 6):
|
||||
if os.environ.get('HASS_NO_MONKEY') != '1':
|
||||
if sys.version_info[:2] >= (3, 6):
|
||||
monkey_patch.disable_c_asyncio()
|
||||
monkey_patch.patch_weakref_tasks()
|
||||
elif sys.version_info[:3] < (3, 5, 3):
|
||||
monkey_patch.patch_weakref_tasks()
|
||||
|
||||
attempt_use_uvloop()
|
||||
|
||||
if sys.version_info[:3] < (3, 5, 3):
|
||||
monkey_patch_asyncio()
|
||||
|
||||
args = get_arguments()
|
||||
|
||||
if args.script is not None:
|
||||
|
96
homeassistant/components/alarm_control_panel/spc.py
Normal file
96
homeassistant/components/alarm_control_panel/spc.py
Normal file
@ -0,0 +1,96 @@
|
||||
"""
|
||||
Support for Vanderbilt (formerly Siemens) SPC alarm systems.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/alarm_control_panel.spc/
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import homeassistant.components.alarm_control_panel as alarm
|
||||
from homeassistant.components.spc import (
|
||||
SpcWebGateway, ATTR_DISCOVER_AREAS, DATA_API, DATA_REGISTRY)
|
||||
from homeassistant.const import (
|
||||
STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_HOME, STATE_ALARM_DISARMED,
|
||||
STATE_UNKNOWN)
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SPC_AREA_MODE_TO_STATE = {'0': STATE_ALARM_DISARMED,
|
||||
'1': STATE_ALARM_ARMED_HOME,
|
||||
'3': STATE_ALARM_ARMED_AWAY}
|
||||
|
||||
|
||||
def _get_alarm_state(spc_mode):
|
||||
return SPC_AREA_MODE_TO_STATE.get(spc_mode, STATE_UNKNOWN)
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_setup_platform(hass, config, async_add_entities,
|
||||
discovery_info=None):
|
||||
"""Set up the SPC alarm control panel platform."""
|
||||
if (discovery_info is None or
|
||||
discovery_info[ATTR_DISCOVER_AREAS] is None):
|
||||
return
|
||||
|
||||
entities = [SpcAlarm(hass=hass,
|
||||
area_id=area['id'],
|
||||
name=area['name'],
|
||||
state=_get_alarm_state(area['mode']))
|
||||
for area in discovery_info[ATTR_DISCOVER_AREAS]]
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class SpcAlarm(alarm.AlarmControlPanel):
|
||||
"""Represents the SPC alarm panel."""
|
||||
|
||||
def __init__(self, hass, area_id, name, state):
|
||||
"""Initialize the SPC alarm panel."""
|
||||
self._hass = hass
|
||||
self._area_id = area_id
|
||||
self._name = name
|
||||
self._state = state
|
||||
self._api = hass.data[DATA_API]
|
||||
|
||||
hass.data[DATA_REGISTRY].register_alarm_device(area_id, self)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_update_from_spc(self, state):
|
||||
"""Update the alarm panel with a new state."""
|
||||
self._state = state
|
||||
yield from self.async_update_ha_state()
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
"""No polling needed."""
|
||||
return False
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the device."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
"""Return the state of the device."""
|
||||
return self._state
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_alarm_disarm(self, code=None):
|
||||
"""Send disarm command."""
|
||||
yield from self._api.send_area_command(
|
||||
self._area_id, SpcWebGateway.AREA_COMMAND_UNSET)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_alarm_arm_home(self, code=None):
|
||||
"""Send arm home command."""
|
||||
yield from self._api.send_area_command(
|
||||
self._area_id, SpcWebGateway.AREA_COMMAND_PART_SET)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_alarm_arm_away(self, code=None):
|
||||
"""Send arm away command."""
|
||||
yield from self._api.send_area_command(
|
||||
self._area_id, SpcWebGateway.AREA_COMMAND_SET)
|
@ -29,6 +29,7 @@ import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.components.frontend import register_built_in_panel
|
||||
|
||||
DOMAIN = 'automation'
|
||||
DEPENDENCIES = ['group']
|
||||
ENTITY_ID_FORMAT = DOMAIN + '.{}'
|
||||
|
||||
GROUP_NAME_ALL_AUTOMATIONS = 'all automations'
|
||||
|
@ -80,6 +80,12 @@ class EnOceanBinarySensor(enocean.EnOceanDevice, BinarySensorDevice):
|
||||
elif value2 == 0x10:
|
||||
self.which = 1
|
||||
self.onoff = 1
|
||||
elif value2 == 0x37:
|
||||
self.which = 10
|
||||
self.onoff = 0
|
||||
elif value2 == 0x15:
|
||||
self.which = 10
|
||||
self.onoff = 1
|
||||
self.hass.bus.fire('button_pressed', {'id': self.dev_id,
|
||||
'pushed': value,
|
||||
'which': self.which,
|
||||
|
@ -1,5 +1,5 @@
|
||||
"""
|
||||
Support for Homematic binary sensors.
|
||||
Support for HomeMatic binary sensors.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/binary_sensor.homematic/
|
||||
@ -29,7 +29,7 @@ SENSOR_TYPES_CLASS = {
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the Homematic binary sensor platform."""
|
||||
"""Set up the HomeMatic binary sensor platform."""
|
||||
if discovery_info is None:
|
||||
return
|
||||
|
||||
@ -43,7 +43,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
|
||||
|
||||
class HMBinarySensor(HMDevice, BinarySensorDevice):
|
||||
"""Representation of a binary Homematic device."""
|
||||
"""Representation of a binary HomeMatic device."""
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
@ -54,16 +54,14 @@ class HMBinarySensor(HMDevice, BinarySensorDevice):
|
||||
|
||||
@property
|
||||
def device_class(self):
|
||||
"""Return the class of this sensor, from DEVICE_CLASSES."""
|
||||
# If state is MOTION (RemoteMotion works only)
|
||||
"""Return the class of this sensor from DEVICE_CLASSES."""
|
||||
# If state is MOTION (Only RemoteMotion working)
|
||||
if self._state == 'MOTION':
|
||||
return 'motion'
|
||||
return SENSOR_TYPES_CLASS.get(self._hmdevice.__class__.__name__, None)
|
||||
|
||||
def _init_data_struct(self):
|
||||
"""Generate a data struct (self._data) from the Homematic metadata."""
|
||||
# add state to data struct
|
||||
"""Generate the data dictionary (self._data) from metadata."""
|
||||
# Add state to data struct
|
||||
if self._state:
|
||||
_LOGGER.debug("%s init datastruct with main node '%s'", self._name,
|
||||
self._state)
|
||||
self._data.update({self._state: STATE_UNKNOWN})
|
||||
|
99
homeassistant/components/binary_sensor/spc.py
Normal file
99
homeassistant/components/binary_sensor/spc.py
Normal file
@ -0,0 +1,99 @@
|
||||
"""
|
||||
Support for Vanderbilt (formerly Siemens) SPC alarm systems.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/binary_sensor.spc/
|
||||
"""
|
||||
import logging
|
||||
import asyncio
|
||||
|
||||
from homeassistant.components.spc import (
|
||||
ATTR_DISCOVER_DEVICES, DATA_REGISTRY)
|
||||
from homeassistant.components.binary_sensor import BinarySensorDevice
|
||||
from homeassistant.const import (STATE_UNAVAILABLE, STATE_ON, STATE_OFF)
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SPC_TYPE_TO_DEVICE_CLASS = {'0': 'motion',
|
||||
'1': 'opening',
|
||||
'3': 'smoke'}
|
||||
|
||||
|
||||
SPC_INPUT_TO_SENSOR_STATE = {'0': STATE_OFF,
|
||||
'1': STATE_ON}
|
||||
|
||||
|
||||
def _get_device_class(spc_type):
|
||||
return SPC_TYPE_TO_DEVICE_CLASS.get(spc_type, None)
|
||||
|
||||
|
||||
def _get_sensor_state(spc_input):
|
||||
return SPC_INPUT_TO_SENSOR_STATE.get(spc_input, STATE_UNAVAILABLE)
|
||||
|
||||
|
||||
def _create_sensor(hass, zone):
|
||||
return SpcBinarySensor(zone_id=zone['id'],
|
||||
name=zone['zone_name'],
|
||||
state=_get_sensor_state(zone['input']),
|
||||
device_class=_get_device_class(zone['type']),
|
||||
spc_registry=hass.data[DATA_REGISTRY])
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_setup_platform(hass, config, async_add_entities,
|
||||
discovery_info=None):
|
||||
"""Initialize the platform."""
|
||||
if (discovery_info is None or
|
||||
discovery_info[ATTR_DISCOVER_DEVICES] is None):
|
||||
return
|
||||
|
||||
async_add_entities(
|
||||
_create_sensor(hass, zone)
|
||||
for zone in discovery_info[ATTR_DISCOVER_DEVICES]
|
||||
if _get_device_class(zone['type']))
|
||||
|
||||
|
||||
class SpcBinarySensor(BinarySensorDevice):
|
||||
"""Represents a sensor based on an SPC zone."""
|
||||
|
||||
def __init__(self, zone_id, name, state, device_class, spc_registry):
|
||||
"""Initialize the sensor device."""
|
||||
self._zone_id = zone_id
|
||||
self._name = name
|
||||
self._state = state
|
||||
self._device_class = device_class
|
||||
|
||||
spc_registry.register_sensor_device(zone_id, self)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_update_from_spc(self, state):
|
||||
"""Update the state of the device."""
|
||||
self._state = state
|
||||
yield from self.async_update_ha_state()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""The name of the device."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Whether the device is switched on."""
|
||||
return self._state == STATE_ON
|
||||
|
||||
@property
|
||||
def hidden(self) -> bool:
|
||||
"""Whether the device is hidden by default."""
|
||||
# these type of sensors are probably mainly used for automations
|
||||
return True
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
"""No polling needed."""
|
||||
return False
|
||||
|
||||
@property
|
||||
def device_class(self):
|
||||
"""The device class."""
|
||||
return self._device_class
|
86
homeassistant/components/binary_sensor/tapsaff.py
Normal file
86
homeassistant/components/binary_sensor/tapsaff.py
Normal file
@ -0,0 +1,86 @@
|
||||
"""
|
||||
Support for Taps Affs.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/binary_sensor.tapsaff/
|
||||
"""
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDevice, PLATFORM_SCHEMA)
|
||||
from homeassistant.const import (CONF_NAME)
|
||||
|
||||
REQUIREMENTS = ['tapsaff==0.1.3']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_LOCATION = 'location'
|
||||
|
||||
DEFAULT_NAME = 'Taps Aff'
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=30)
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_LOCATION): cv.string,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
})
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the Taps Aff binary sensor."""
|
||||
name = config.get(CONF_NAME)
|
||||
location = config.get(CONF_LOCATION)
|
||||
|
||||
taps_aff_data = TapsAffData(location)
|
||||
|
||||
add_devices([TapsAffSensor(taps_aff_data, name)], True)
|
||||
|
||||
|
||||
class TapsAffSensor(BinarySensorDevice):
|
||||
"""Implementation of a Taps Aff binary sensor."""
|
||||
|
||||
def __init__(self, taps_aff_data, name):
|
||||
"""Initialize the Taps Aff sensor."""
|
||||
self.data = taps_aff_data
|
||||
self._name = name
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the sensor."""
|
||||
return '{}'.format(self._name)
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return true if taps aff."""
|
||||
return self.data.is_taps_aff
|
||||
|
||||
def update(self):
|
||||
"""Get the latest data."""
|
||||
self.data.update()
|
||||
|
||||
|
||||
class TapsAffData(object):
|
||||
"""Class for handling the data retrieval for pins."""
|
||||
|
||||
def __init__(self, location):
|
||||
"""Initialize the sensor."""
|
||||
from tapsaff import TapsAff
|
||||
|
||||
self._is_taps_aff = None
|
||||
self.taps_aff = TapsAff(location)
|
||||
|
||||
@property
|
||||
def is_taps_aff(self):
|
||||
"""Return true if taps aff."""
|
||||
return self._is_taps_aff
|
||||
|
||||
def update(self):
|
||||
"""Get the latest data from the Taps Aff API and updates the states."""
|
||||
try:
|
||||
self._is_taps_aff = self.taps_aff.is_taps_aff
|
||||
except RuntimeError:
|
||||
_LOGGER.error("Update failed. Check configured location")
|
@ -39,7 +39,6 @@ def setup_platform(hass, config, add_devices, disc_info=None):
|
||||
for data in disc_info[CONF_ENTITIES] if data[CONF_TRACK]])
|
||||
|
||||
|
||||
# pylint: disable=too-many-instance-attributes
|
||||
class GoogleCalendarEventDevice(CalendarEventDevice):
|
||||
"""A calendar event device."""
|
||||
|
||||
|
102
homeassistant/components/camera/onvif.py
Normal file
102
homeassistant/components/camera/onvif.py
Normal file
@ -0,0 +1,102 @@
|
||||
"""
|
||||
Support for ONVIF Cameras with FFmpeg as decoder.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/camera.onvif/
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_NAME, CONF_HOST, CONF_USERNAME, CONF_PASSWORD, CONF_PORT)
|
||||
from homeassistant.components.camera import Camera, PLATFORM_SCHEMA
|
||||
from homeassistant.components.ffmpeg import (
|
||||
DATA_FFMPEG)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import (
|
||||
async_aiohttp_proxy_stream)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
REQUIREMENTS = ['onvif-py3==0.1.3',
|
||||
'suds-py3==1.3.3.0',
|
||||
'http://github.com/tgaugry/suds-passworddigest-py3'
|
||||
'/archive/86fc50e39b4d2b8997481967d6a7fe1c57118999.zip'
|
||||
'#suds-passworddigest-py3==0.1.2a']
|
||||
DEPENDENCIES = ['ffmpeg']
|
||||
DEFAULT_NAME = 'ONVIF Camera'
|
||||
DEFAULT_PORT = 5000
|
||||
DEFAULT_USERNAME = 'admin'
|
||||
DEFAULT_PASSWORD = '888888'
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_PASSWORD, default=DEFAULT_PASSWORD): cv.string,
|
||||
vol.Optional(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string,
|
||||
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
})
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
"""Set up a ONVIF camera."""
|
||||
if not hass.data[DATA_FFMPEG].async_run_test(config.get(CONF_HOST)):
|
||||
return
|
||||
async_add_devices([ONVIFCamera(hass, config)])
|
||||
|
||||
|
||||
class ONVIFCamera(Camera):
|
||||
"""An implementation of an ONVIF camera."""
|
||||
|
||||
def __init__(self, hass, config):
|
||||
"""Initialize a ONVIF camera."""
|
||||
from onvif import ONVIFService
|
||||
super().__init__()
|
||||
|
||||
self._name = config.get(CONF_NAME)
|
||||
self._ffmpeg_arguments = '-q:v 2'
|
||||
media = ONVIFService(
|
||||
'http://{}:{}/onvif/device_service'.format(
|
||||
config.get(CONF_HOST), config.get(CONF_PORT)),
|
||||
config.get(CONF_USERNAME),
|
||||
config.get(CONF_PASSWORD),
|
||||
'{}/deps/onvif/wsdl/media.wsdl'.format(hass.config.config_dir)
|
||||
)
|
||||
self._input = media.GetStreamUri().Uri
|
||||
_LOGGER.debug("ONVIF Camera Using the following URL for %s: %s",
|
||||
self._name, self._input)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_camera_image(self):
|
||||
"""Return a still image response from the camera."""
|
||||
from haffmpeg import ImageFrame, IMAGE_JPEG
|
||||
ffmpeg = ImageFrame(
|
||||
self.hass.data[DATA_FFMPEG].binary, loop=self.hass.loop)
|
||||
|
||||
image = yield from ffmpeg.get_image(
|
||||
self._input, output_format=IMAGE_JPEG,
|
||||
extra_cmd=self._ffmpeg_arguments)
|
||||
return image
|
||||
|
||||
@asyncio.coroutine
|
||||
def handle_async_mjpeg_stream(self, request):
|
||||
"""Generate an HTTP MJPEG stream from the camera."""
|
||||
from haffmpeg import CameraMjpeg
|
||||
|
||||
stream = CameraMjpeg(self.hass.data[DATA_FFMPEG].binary,
|
||||
loop=self.hass.loop)
|
||||
yield from stream.open_camera(
|
||||
self._input, extra_cmd=self._ffmpeg_arguments)
|
||||
|
||||
yield from async_aiohttp_proxy_stream(
|
||||
self.hass, request, stream,
|
||||
'multipart/x-mixed-replace;boundary=ffserver')
|
||||
yield from stream.close()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of this camera."""
|
||||
return self._name
|
148
homeassistant/components/climate/flexit.py
Normal file
148
homeassistant/components/climate/flexit.py
Normal file
@ -0,0 +1,148 @@
|
||||
"""
|
||||
Platform for Flexit AC units with CI66 Modbus adapter.
|
||||
|
||||
Example configuration:
|
||||
|
||||
climate:
|
||||
- platform: flexit
|
||||
name: Main AC
|
||||
slave: 21
|
||||
|
||||
For more details about this platform, please refer to the documentation
|
||||
https://home-assistant.io/components/climate.flexit/
|
||||
"""
|
||||
import logging
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_NAME, CONF_SLAVE, TEMP_CELSIUS,
|
||||
ATTR_TEMPERATURE, DEVICE_DEFAULT_NAME)
|
||||
from homeassistant.components.climate import (ClimateDevice, PLATFORM_SCHEMA)
|
||||
import homeassistant.components.modbus as modbus
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
REQUIREMENTS = ['pyflexit==0.3']
|
||||
DEPENDENCIES = ['modbus']
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_SLAVE): vol.All(int, vol.Range(min=0, max=32)),
|
||||
vol.Optional(CONF_NAME, default=DEVICE_DEFAULT_NAME): cv.string
|
||||
})
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the Flexit Platform."""
|
||||
modbus_slave = config.get(CONF_SLAVE, None)
|
||||
name = config.get(CONF_NAME, None)
|
||||
add_devices([Flexit(modbus_slave, name)], True)
|
||||
|
||||
|
||||
class Flexit(ClimateDevice):
|
||||
"""Representation of a Flexit AC unit."""
|
||||
|
||||
def __init__(self, modbus_slave, name):
|
||||
"""Initialize the unit."""
|
||||
from pyflexit import pyflexit
|
||||
self._name = name
|
||||
self._slave = modbus_slave
|
||||
self._target_temperature = None
|
||||
self._current_temperature = None
|
||||
self._current_fan_mode = None
|
||||
self._current_operation = None
|
||||
self._fan_list = ['Off', 'Low', 'Medium', 'High']
|
||||
self._current_operation = None
|
||||
self._filter_hours = None
|
||||
self._filter_alarm = None
|
||||
self._heat_recovery = None
|
||||
self._heater_enabled = False
|
||||
self._heating = None
|
||||
self._cooling = None
|
||||
self._alarm = False
|
||||
self.unit = pyflexit.pyflexit(modbus.HUB, modbus_slave)
|
||||
|
||||
def update(self):
|
||||
"""Update unit attributes."""
|
||||
if not self.unit.update():
|
||||
_LOGGER.warning("Modbus read failed")
|
||||
|
||||
self._target_temperature = self.unit.get_target_temp
|
||||
self._current_temperature = self.unit.get_temp
|
||||
self._current_fan_mode =\
|
||||
self._fan_list[self.unit.get_fan_speed]
|
||||
self._filter_hours = self.unit.get_filter_hours
|
||||
# Mechanical heat recovery, 0-100%
|
||||
self._heat_recovery = self.unit.get_heat_recovery
|
||||
# Heater active 0-100%
|
||||
self._heating = self.unit.get_heating
|
||||
# Cooling active 0-100%
|
||||
self._cooling = self.unit.get_cooling
|
||||
# Filter alarm 0/1
|
||||
self._filter_alarm = self.unit.get_filter_alarm
|
||||
# Heater enabled or not. Does not mean it's necessarily heating
|
||||
self._heater_enabled = self.unit.get_heater_enabled
|
||||
# Current operation mode
|
||||
self._current_operation = self.unit.get_operation
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return device specific state attributes."""
|
||||
return {
|
||||
'filter_hours': self._filter_hours,
|
||||
'filter_alarm': self._filter_alarm,
|
||||
'heat_recovery': self._heat_recovery,
|
||||
'heating': self._heating,
|
||||
'heater_enabled': self._heater_enabled,
|
||||
'cooling': self._cooling
|
||||
}
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
"""Return the polling state."""
|
||||
return True
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the climate device."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def temperature_unit(self):
|
||||
"""Return the unit of measurement."""
|
||||
return TEMP_CELSIUS
|
||||
|
||||
@property
|
||||
def current_temperature(self):
|
||||
"""Return the current temperature."""
|
||||
return self._current_temperature
|
||||
|
||||
@property
|
||||
def target_temperature(self):
|
||||
"""Return the temperature we try to reach."""
|
||||
return self._target_temperature
|
||||
|
||||
@property
|
||||
def current_operation(self):
|
||||
"""Return current operation ie. heat, cool, idle."""
|
||||
return self._current_operation
|
||||
|
||||
@property
|
||||
def current_fan_mode(self):
|
||||
"""Return the fan setting."""
|
||||
return self._current_fan_mode
|
||||
|
||||
@property
|
||||
def fan_list(self):
|
||||
"""Return the list of available fan modes."""
|
||||
return self._fan_list
|
||||
|
||||
def set_temperature(self, **kwargs):
|
||||
"""Set new target temperature."""
|
||||
if kwargs.get(ATTR_TEMPERATURE) is not None:
|
||||
self._target_temperature = kwargs.get(ATTR_TEMPERATURE)
|
||||
self.unit.set_temp(self._target_temperature)
|
||||
|
||||
def set_fan_mode(self, fan):
|
||||
"""Set new fan mode."""
|
||||
self.unit.set_fan_speed(fan)
|
@ -45,7 +45,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
add_devices([WinkAC(climate, hass, temp_unit)])
|
||||
|
||||
|
||||
# pylint: disable=abstract-method,too-many-public-methods, too-many-branches
|
||||
# pylint: disable=abstract-method
|
||||
class WinkThermostat(WinkDevice, ClimateDevice):
|
||||
"""Representation of a Wink thermostat."""
|
||||
|
||||
|
@ -14,11 +14,13 @@ from homeassistant import core
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers import script
|
||||
|
||||
|
||||
REQUIREMENTS = ['fuzzywuzzy==0.15.0']
|
||||
|
||||
ATTR_TEXT = 'text'
|
||||
|
||||
ATTR_SENTENCE = 'sentence'
|
||||
DOMAIN = 'conversation'
|
||||
|
||||
REGEX_TURN_COMMAND = re.compile(r'turn (?P<name>(?: |\w)+) (?P<command>\w+)')
|
||||
@ -29,9 +31,12 @@ SERVICE_PROCESS_SCHEMA = vol.Schema({
|
||||
vol.Required(ATTR_TEXT): vol.All(cv.string, vol.Lower),
|
||||
})
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({}),
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
CONFIG_SCHEMA = vol.Schema({DOMAIN: vol.Schema({
|
||||
cv.string: vol.Schema({
|
||||
vol.Required(ATTR_SENTENCE): cv.string,
|
||||
vol.Required('action'): cv.SCRIPT_SCHEMA,
|
||||
})
|
||||
})}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
def setup(hass, config):
|
||||
@ -40,9 +45,30 @@ def setup(hass, config):
|
||||
from fuzzywuzzy import process as fuzzyExtract
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
config = config.get(DOMAIN, {})
|
||||
|
||||
choices = {attrs[ATTR_SENTENCE]: script.Script(
|
||||
hass,
|
||||
attrs['action'],
|
||||
name)
|
||||
for name, attrs in config.items()}
|
||||
|
||||
def process(service):
|
||||
"""Parse text into commands."""
|
||||
# if actually configured
|
||||
if choices:
|
||||
text = service.data[ATTR_TEXT]
|
||||
match = fuzzyExtract.extractOne(text, choices.keys())
|
||||
scorelimit = 60 # arbitrary value
|
||||
logging.info(
|
||||
'matched up text %s and found %s',
|
||||
text,
|
||||
[match[0] if match[1] > scorelimit else 'nothing']
|
||||
)
|
||||
if match[1] > scorelimit:
|
||||
choices[match[0]].run() # run respective script
|
||||
return
|
||||
|
||||
text = service.data[ATTR_TEXT]
|
||||
match = REGEX_TURN_COMMAND.match(text)
|
||||
|
||||
|
@ -27,6 +27,7 @@ from homeassistant.const import (
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DOMAIN = 'cover'
|
||||
DEPENDENCIES = ['group']
|
||||
SCAN_INTERVAL = timedelta(seconds=15)
|
||||
|
||||
GROUP_NAME_ALL_COVERS = 'all covers'
|
||||
|
@ -1,5 +1,5 @@
|
||||
"""
|
||||
The homematic cover platform.
|
||||
The HomeMatic cover platform.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/cover.homematic/
|
||||
@ -29,7 +29,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
|
||||
|
||||
class HMCover(HMDevice, CoverDevice):
|
||||
"""Representation a Homematic Cover."""
|
||||
"""Representation a HomeMatic Cover."""
|
||||
|
||||
@property
|
||||
def current_cover_position(self):
|
||||
@ -70,7 +70,6 @@ class HMCover(HMDevice, CoverDevice):
|
||||
self._hmdevice.stop(self._channel)
|
||||
|
||||
def _init_data_struct(self):
|
||||
"""Generate a data dict (self._data) from hm metadata."""
|
||||
# Add state to data dict
|
||||
"""Generate a data dictoinary (self._data) from metadata."""
|
||||
self._state = "LEVEL"
|
||||
self._data.update({self._state: STATE_UNKNOWN})
|
||||
|
@ -14,9 +14,7 @@ from homeassistant.const import (
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
import homeassistant.loader as loader
|
||||
|
||||
REQUIREMENTS = [
|
||||
'https://github.com/arraylabs/pymyq/archive/v0.0.8.zip'
|
||||
'#pymyq==0.0.8']
|
||||
REQUIREMENTS = ['pymyq==0.0.8']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -27,6 +27,7 @@ from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.restore_state import async_get_last_state
|
||||
from homeassistant.helpers.typing import GPSType, ConfigType, HomeAssistantType
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.loader import get_component
|
||||
import homeassistant.util as util
|
||||
from homeassistant.util.async import run_coroutine_threadsafe
|
||||
import homeassistant.util.dt as dt_util
|
||||
@ -41,7 +42,7 @@ from homeassistant.const import (
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DOMAIN = 'device_tracker'
|
||||
DEPENDENCIES = ['zone']
|
||||
DEPENDENCIES = ['zone', 'group']
|
||||
|
||||
GROUP_NAME_ALL_DEVICES = 'all devices'
|
||||
ENTITY_ID_ALL_DEVICES = group.ENTITY_ID_FORMAT.format('all_devices')
|
||||
@ -122,12 +123,7 @@ def async_setup(hass: HomeAssistantType, config: ConfigType):
|
||||
"""Set up the device tracker."""
|
||||
yaml_path = hass.config.path(YAML_DEVICES)
|
||||
|
||||
try:
|
||||
conf = config.get(DOMAIN, [])
|
||||
except vol.Invalid as ex:
|
||||
async_log_exception(ex, DOMAIN, config, hass)
|
||||
return False
|
||||
else:
|
||||
conf = conf[0] if conf else {}
|
||||
consider_home = conf.get(CONF_CONSIDER_HOME, DEFAULT_CONSIDER_HOME)
|
||||
track_new = conf.get(CONF_TRACK_NEW, DEFAULT_TRACK_NEW)
|
||||
@ -180,7 +176,7 @@ def async_setup(hass: HomeAssistantType, config: ConfigType):
|
||||
if setup_tasks:
|
||||
yield from asyncio.wait(setup_tasks, loop=hass.loop)
|
||||
|
||||
yield from tracker.async_setup_group()
|
||||
tracker.async_setup_group()
|
||||
|
||||
@callback
|
||||
def async_device_tracker_discovered(service, info):
|
||||
@ -233,7 +229,7 @@ class DeviceTracker(object):
|
||||
self.mac_to_dev = {dev.mac: dev for dev in devices if dev.mac}
|
||||
self.consider_home = consider_home
|
||||
self.track_new = track_new
|
||||
self.group = None # type: group.Group
|
||||
self.group = None
|
||||
self._is_updating = asyncio.Lock(loop=hass.loop)
|
||||
|
||||
for dev in devices:
|
||||
@ -246,18 +242,21 @@ class DeviceTracker(object):
|
||||
def see(self, mac: str=None, dev_id: str=None, host_name: str=None,
|
||||
location_name: str=None, gps: GPSType=None, gps_accuracy=None,
|
||||
battery: str=None, attributes: dict=None,
|
||||
source_type: str=SOURCE_TYPE_GPS):
|
||||
source_type: str=SOURCE_TYPE_GPS, picture: str=None,
|
||||
icon: str=None):
|
||||
"""Notify the device tracker that you see a device."""
|
||||
self.hass.add_job(
|
||||
self.async_see(mac, dev_id, host_name, location_name, gps,
|
||||
gps_accuracy, battery, attributes, source_type)
|
||||
gps_accuracy, battery, attributes, source_type,
|
||||
picture, icon)
|
||||
)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_see(self, mac: str=None, dev_id: str=None, host_name: str=None,
|
||||
location_name: str=None, gps: GPSType=None,
|
||||
gps_accuracy=None, battery: str=None, attributes: dict=None,
|
||||
source_type: str=SOURCE_TYPE_GPS):
|
||||
source_type: str=SOURCE_TYPE_GPS, picture: str=None,
|
||||
icon: str=None):
|
||||
"""Notify the device tracker that you see a device.
|
||||
|
||||
This method is a coroutine.
|
||||
@ -285,7 +284,8 @@ class DeviceTracker(object):
|
||||
dev_id = util.ensure_unique_string(dev_id, self.devices.keys())
|
||||
device = Device(
|
||||
self.hass, self.consider_home, self.track_new,
|
||||
dev_id, mac, (host_name or dev_id).replace('_', ' '))
|
||||
dev_id, mac, (host_name or dev_id).replace('_', ' '),
|
||||
picture=picture, icon=icon)
|
||||
self.devices[dev_id] = device
|
||||
if mac is not None:
|
||||
self.mac_to_dev[mac] = device
|
||||
@ -303,9 +303,10 @@ class DeviceTracker(object):
|
||||
})
|
||||
|
||||
# During init, we ignore the group
|
||||
if self.group is not None:
|
||||
yield from self.group.async_update_tracked_entity_ids(
|
||||
list(self.group.tracking) + [device.entity_id])
|
||||
if self.group and self.track_new:
|
||||
self.group.async_set_group(
|
||||
self.hass, util.slugify(GROUP_NAME_ALL_DEVICES), visible=False,
|
||||
name=GROUP_NAME_ALL_DEVICES, add=[device.entity_id])
|
||||
|
||||
# lookup mac vendor string to be stored in config
|
||||
yield from device.set_vendor_for_mac()
|
||||
@ -327,16 +328,19 @@ class DeviceTracker(object):
|
||||
update_config, self.hass.config.path(YAML_DEVICES),
|
||||
dev_id, device)
|
||||
|
||||
@asyncio.coroutine
|
||||
@callback
|
||||
def async_setup_group(self):
|
||||
"""Initialize group for all tracked devices.
|
||||
|
||||
This method is a coroutine.
|
||||
This method must be run in the event loop.
|
||||
"""
|
||||
entity_ids = (dev.entity_id for dev in self.devices.values()
|
||||
if dev.track)
|
||||
self.group = yield from group.Group.async_create_group(
|
||||
self.hass, GROUP_NAME_ALL_DEVICES, entity_ids, False)
|
||||
entity_ids = [dev.entity_id for dev in self.devices.values()
|
||||
if dev.track]
|
||||
|
||||
self.group = get_component('group')
|
||||
self.group.async_set_group(
|
||||
self.hass, util.slugify(GROUP_NAME_ALL_DEVICES), visible=False,
|
||||
name=GROUP_NAME_ALL_DEVICES, entity_ids=entity_ids)
|
||||
|
||||
@callback
|
||||
def async_update_stale(self, now: dt_util.dt.datetime):
|
||||
|
@ -116,7 +116,6 @@ def async_setup_scanner(hass, config, async_see, discovery_info=None):
|
||||
"key for topic %s", topic)
|
||||
return None
|
||||
|
||||
# pylint: disable=too-many-return-statements
|
||||
def validate_payload(topic, payload, data_type):
|
||||
"""Validate the OwnTracks payload."""
|
||||
try:
|
||||
|
@ -57,7 +57,7 @@ class Host(object):
|
||||
def update(self, see):
|
||||
"""Update device state by sending one or more ping messages."""
|
||||
failed = 0
|
||||
while failed < self._count: # check more times if host in unreachable
|
||||
while failed < self._count: # check more times if host is unreachable
|
||||
if self.ping():
|
||||
see(dev_id=self.dev_id, source_type=SOURCE_TYPE_ROUTER)
|
||||
return True
|
||||
|
@ -19,7 +19,7 @@ from homeassistant.util import Throttle
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
REQUIREMENTS = ['pysnmp==4.3.7']
|
||||
REQUIREMENTS = ['pysnmp==4.3.8']
|
||||
|
||||
CONF_COMMUNITY = 'community'
|
||||
CONF_AUTHKEY = 'authkey'
|
||||
|
@ -5,7 +5,6 @@ For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/device_tracker.unifi/
|
||||
"""
|
||||
import logging
|
||||
import urllib
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
@ -15,7 +14,7 @@ from homeassistant.components.device_tracker import (
|
||||
from homeassistant.const import CONF_HOST, CONF_USERNAME, CONF_PASSWORD
|
||||
from homeassistant.const import CONF_VERIFY_SSL
|
||||
|
||||
REQUIREMENTS = ['pyunifi==2.12']
|
||||
REQUIREMENTS = ['pyunifi==2.13']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
CONF_PORT = 'port'
|
||||
@ -40,7 +39,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
|
||||
def get_scanner(hass, config):
|
||||
"""Set up the Unifi device_tracker."""
|
||||
from pyunifi.controller import Controller
|
||||
from pyunifi.controller import Controller, APIError
|
||||
|
||||
host = config[DOMAIN].get(CONF_HOST)
|
||||
username = config[DOMAIN].get(CONF_USERNAME)
|
||||
@ -53,7 +52,7 @@ def get_scanner(hass, config):
|
||||
try:
|
||||
ctrl = Controller(host, username, password, port, version='v4',
|
||||
site_id=site_id, ssl_verify=verify_ssl)
|
||||
except urllib.error.HTTPError as ex:
|
||||
except APIError as ex:
|
||||
_LOGGER.error("Failed to connect to Unifi: %s", ex)
|
||||
persistent_notification.create(
|
||||
hass, 'Failed to connect to Unifi. '
|
||||
@ -77,9 +76,10 @@ class UnifiScanner(DeviceScanner):
|
||||
|
||||
def _update(self):
|
||||
"""Get the clients from the device."""
|
||||
from pyunifi.controller import APIError
|
||||
try:
|
||||
clients = self._controller.get_clients()
|
||||
except urllib.error.HTTPError as ex:
|
||||
except APIError as ex:
|
||||
_LOGGER.error("Failed to scan clients: %s", ex)
|
||||
clients = []
|
||||
|
||||
|
@ -7,7 +7,10 @@ https://home-assistant.io/components/device_tracker.volvooncall/
|
||||
import logging
|
||||
|
||||
from homeassistant.util import slugify
|
||||
from homeassistant.components.volvooncall import DOMAIN
|
||||
from homeassistant.helpers.dispatcher import (
|
||||
dispatcher_connect, dispatcher_send)
|
||||
from homeassistant.components.volvooncall import (
|
||||
DATA_KEY, SIGNAL_VEHICLE_SEEN)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@ -18,19 +21,19 @@ def setup_scanner(hass, config, see, discovery_info=None):
|
||||
return
|
||||
|
||||
vin, _ = discovery_info
|
||||
vehicle = hass.data[DOMAIN].vehicles[vin]
|
||||
|
||||
host_name = vehicle.registration_number
|
||||
dev_id = 'volvo_' + slugify(host_name)
|
||||
vehicle = hass.data[DATA_KEY].vehicles[vin]
|
||||
|
||||
def see_vehicle(vehicle):
|
||||
"""Handle the reporting of the vehicle position."""
|
||||
host_name = vehicle.registration_number
|
||||
dev_id = 'volvo_{}'.format(slugify(host_name))
|
||||
see(dev_id=dev_id,
|
||||
host_name=host_name,
|
||||
gps=(vehicle.position['latitude'],
|
||||
vehicle.position['longitude']))
|
||||
vehicle.position['longitude']),
|
||||
icon='mdi:car')
|
||||
|
||||
hass.data[DOMAIN].entities[vin].append(see_vehicle)
|
||||
see_vehicle(vehicle)
|
||||
dispatcher_connect(hass, SIGNAL_VEHICLE_SEEN, see_vehicle)
|
||||
dispatcher_send(hass, SIGNAL_VEHICLE_SEEN, vehicle)
|
||||
|
||||
return True
|
||||
|
98
homeassistant/components/dyson.py
Normal file
98
homeassistant/components/dyson.py
Normal file
@ -0,0 +1,98 @@
|
||||
"""Parent component for Dyson Pure Cool Link devices."""
|
||||
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers import discovery
|
||||
from homeassistant.const import CONF_USERNAME, CONF_PASSWORD, CONF_TIMEOUT, \
|
||||
CONF_DEVICES
|
||||
|
||||
REQUIREMENTS = ['libpurecoollink==0.1.5']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_LANGUAGE = "language"
|
||||
CONF_RETRY = "retry"
|
||||
|
||||
DEFAULT_TIMEOUT = 5
|
||||
DEFAULT_RETRY = 10
|
||||
|
||||
DOMAIN = "dyson"
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
vol.Required(CONF_USERNAME): cv.string,
|
||||
vol.Required(CONF_PASSWORD): cv.string,
|
||||
vol.Required(CONF_LANGUAGE): cv.string,
|
||||
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
|
||||
vol.Optional(CONF_RETRY, default=DEFAULT_RETRY): cv.positive_int,
|
||||
vol.Optional(CONF_DEVICES, default=[]):
|
||||
vol.All(cv.ensure_list, [dict]),
|
||||
})
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
DYSON_DEVICES = "dyson_devices"
|
||||
|
||||
|
||||
def setup(hass, config):
|
||||
"""Set up the Dyson parent component."""
|
||||
_LOGGER.info("Creating new Dyson component")
|
||||
|
||||
if DYSON_DEVICES not in hass.data:
|
||||
hass.data[DYSON_DEVICES] = []
|
||||
|
||||
from libpurecoollink.dyson import DysonAccount
|
||||
dyson_account = DysonAccount(config[DOMAIN].get(CONF_USERNAME),
|
||||
config[DOMAIN].get(CONF_PASSWORD),
|
||||
config[DOMAIN].get(CONF_LANGUAGE))
|
||||
|
||||
logged = dyson_account.login()
|
||||
|
||||
timeout = config[DOMAIN].get(CONF_TIMEOUT)
|
||||
retry = config[DOMAIN].get(CONF_RETRY)
|
||||
|
||||
if not logged:
|
||||
_LOGGER.error("Not connected to Dyson account. Unable to add devices")
|
||||
return False
|
||||
|
||||
_LOGGER.info("Connected to Dyson account")
|
||||
dyson_devices = dyson_account.devices()
|
||||
if CONF_DEVICES in config[DOMAIN] and config[DOMAIN].get(CONF_DEVICES):
|
||||
configured_devices = config[DOMAIN].get(CONF_DEVICES)
|
||||
for device in configured_devices:
|
||||
dyson_device = next((d for d in dyson_devices if
|
||||
d.serial == device["device_id"]), None)
|
||||
if dyson_device:
|
||||
connected = dyson_device.connect(None, device["device_ip"],
|
||||
timeout, retry)
|
||||
if connected:
|
||||
_LOGGER.info("Connected to device %s", dyson_device)
|
||||
hass.data[DYSON_DEVICES].append(dyson_device)
|
||||
else:
|
||||
_LOGGER.warning("Unable to connect to device %s",
|
||||
dyson_device)
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Unable to find device %s in Dyson account",
|
||||
device["device_id"])
|
||||
else:
|
||||
# Not yet reliable
|
||||
for device in dyson_devices:
|
||||
_LOGGER.info("Trying to connect to device %s with timeout=%i "
|
||||
"and retry=%i", device, timeout, retry)
|
||||
connected = device.connect(None, None, timeout, retry)
|
||||
if connected:
|
||||
_LOGGER.info("Connected to device %s", device)
|
||||
hass.data[DYSON_DEVICES].append(device)
|
||||
else:
|
||||
_LOGGER.warning("Unable to connect to device %s", device)
|
||||
|
||||
# Start fan/sensors components
|
||||
if hass.data[DYSON_DEVICES]:
|
||||
_LOGGER.debug("Starting sensor/fan components")
|
||||
discovery.load_platform(hass, "sensor", DOMAIN, {}, config)
|
||||
discovery.load_platform(hass, "fan", DOMAIN, {}, config)
|
||||
|
||||
return True
|
@ -24,7 +24,7 @@ from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.event import async_track_point_in_utc_time
|
||||
from homeassistant.util.dt import utcnow
|
||||
|
||||
REQUIREMENTS = ['pyeight==0.0.6']
|
||||
REQUIREMENTS = ['pyeight==0.0.7']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -25,7 +25,7 @@ import homeassistant.helpers.config_validation as cv
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DOMAIN = 'fan'
|
||||
|
||||
DEPENDENCIES = ['group']
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
GROUP_NAME_ALL_FANS = 'all fans'
|
||||
@ -73,7 +73,7 @@ FAN_TURN_ON_SCHEMA = vol.Schema({
|
||||
}) # type: dict
|
||||
|
||||
FAN_TURN_OFF_SCHEMA = vol.Schema({
|
||||
vol.Required(ATTR_ENTITY_ID): cv.entity_ids
|
||||
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids
|
||||
}) # type: dict
|
||||
|
||||
FAN_OSCILLATE_SCHEMA = vol.Schema({
|
||||
@ -139,9 +139,7 @@ def turn_on(hass, entity_id: str=None, speed: str=None) -> None:
|
||||
|
||||
def turn_off(hass, entity_id: str=None) -> None:
|
||||
"""Turn all or specified fan off."""
|
||||
data = {
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
}
|
||||
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
|
||||
|
||||
hass.services.call(DOMAIN, SERVICE_TURN_OFF, data)
|
||||
|
||||
@ -218,8 +216,7 @@ def async_setup(hass, config: dict):
|
||||
if not fan.should_poll:
|
||||
continue
|
||||
|
||||
update_coro = hass.async_add_job(
|
||||
fan.async_update_ha_state(True))
|
||||
update_coro = hass.async_add_job(fan.async_update_ha_state(True))
|
||||
if hasattr(fan, 'async_update'):
|
||||
update_tasks.append(update_coro)
|
||||
else:
|
||||
|
@ -9,31 +9,36 @@ from homeassistant.components.fan import (SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH,
|
||||
SUPPORT_OSCILLATE, SUPPORT_DIRECTION)
|
||||
from homeassistant.const import STATE_OFF
|
||||
|
||||
FAN_NAME = 'Living Room Fan'
|
||||
FAN_ENTITY_ID = 'fan.living_room_fan'
|
||||
|
||||
DEMO_SUPPORT = SUPPORT_SET_SPEED | SUPPORT_OSCILLATE | SUPPORT_DIRECTION
|
||||
FULL_SUPPORT = SUPPORT_SET_SPEED | SUPPORT_OSCILLATE | SUPPORT_DIRECTION
|
||||
LIMITED_SUPPORT = SUPPORT_SET_SPEED
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
|
||||
"""Set up the demo fan platform."""
|
||||
add_devices_callback([
|
||||
DemoFan(hass, FAN_NAME, STATE_OFF),
|
||||
DemoFan(hass, "Living Room Fan", FULL_SUPPORT),
|
||||
DemoFan(hass, "Ceiling Fan", LIMITED_SUPPORT),
|
||||
])
|
||||
|
||||
|
||||
class DemoFan(FanEntity):
|
||||
"""A demonstration fan component."""
|
||||
|
||||
def __init__(self, hass, name: str, initial_state: str) -> None:
|
||||
def __init__(self, hass, name: str, supported_features: int) -> None:
|
||||
"""Initialize the entity."""
|
||||
self.hass = hass
|
||||
self._speed = initial_state
|
||||
self.oscillating = False
|
||||
self.direction = "forward"
|
||||
self._supported_features = supported_features
|
||||
self._speed = STATE_OFF
|
||||
self.oscillating = None
|
||||
self.direction = None
|
||||
self._name = name
|
||||
|
||||
if supported_features & SUPPORT_OSCILLATE:
|
||||
self.oscillating = False
|
||||
if supported_features & SUPPORT_DIRECTION:
|
||||
self.direction = "forward"
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Get entity name."""
|
||||
@ -88,4 +93,4 @@ class DemoFan(FanEntity):
|
||||
@property
|
||||
def supported_features(self) -> int:
|
||||
"""Flag supported features."""
|
||||
return DEMO_SUPPORT
|
||||
return self._supported_features
|
||||
|
218
homeassistant/components/fan/dyson.py
Normal file
218
homeassistant/components/fan/dyson.py
Normal file
@ -0,0 +1,218 @@
|
||||
"""Support for Dyson Pure Cool link fan."""
|
||||
import logging
|
||||
import asyncio
|
||||
from os import path
|
||||
import voluptuous as vol
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.components.fan import (FanEntity, SUPPORT_OSCILLATE,
|
||||
SUPPORT_SET_SPEED,
|
||||
DOMAIN)
|
||||
from homeassistant.helpers.entity import ToggleEntity
|
||||
from homeassistant.components.dyson import DYSON_DEVICES
|
||||
from homeassistant.config import load_yaml_config_file
|
||||
|
||||
DEPENDENCIES = ['dyson']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
DYSON_FAN_DEVICES = "dyson_fan_devices"
|
||||
SERVICE_SET_NIGHT_MODE = 'dyson_set_night_mode'
|
||||
|
||||
DYSON_SET_NIGHT_MODE_SCHEMA = vol.Schema({
|
||||
vol.Required('entity_id'): cv.entity_id,
|
||||
vol.Required('night_mode'): cv.boolean
|
||||
})
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Setup the Dyson fan components."""
|
||||
_LOGGER.info("Creating new Dyson fans")
|
||||
if DYSON_FAN_DEVICES not in hass.data:
|
||||
hass.data[DYSON_FAN_DEVICES] = []
|
||||
|
||||
# Get Dyson Devices from parent component
|
||||
for device in hass.data[DYSON_DEVICES]:
|
||||
dyson_entity = DysonPureCoolLinkDevice(hass, device)
|
||||
hass.data[DYSON_FAN_DEVICES].append(dyson_entity)
|
||||
|
||||
add_devices(hass.data[DYSON_FAN_DEVICES])
|
||||
|
||||
descriptions = load_yaml_config_file(
|
||||
path.join(path.dirname(__file__), 'services.yaml'))
|
||||
|
||||
def service_handle(service):
|
||||
"""Handle dyson services."""
|
||||
entity_id = service.data.get('entity_id')
|
||||
night_mode = service.data.get('night_mode')
|
||||
fan_device = next([fan for fan in hass.data[DYSON_FAN_DEVICES] if
|
||||
fan.entity_id == entity_id].__iter__(), None)
|
||||
if fan_device is None:
|
||||
_LOGGER.warning("Unable to find Dyson fan device %s",
|
||||
str(entity_id))
|
||||
return
|
||||
|
||||
if service.service == SERVICE_SET_NIGHT_MODE:
|
||||
fan_device.night_mode(night_mode)
|
||||
|
||||
# Register dyson service(s)
|
||||
hass.services.register(DOMAIN, SERVICE_SET_NIGHT_MODE,
|
||||
service_handle,
|
||||
descriptions.get(SERVICE_SET_NIGHT_MODE),
|
||||
schema=DYSON_SET_NIGHT_MODE_SCHEMA)
|
||||
|
||||
|
||||
class DysonPureCoolLinkDevice(FanEntity):
|
||||
"""Representation of a Dyson fan."""
|
||||
|
||||
def __init__(self, hass, device):
|
||||
"""Initialize the fan."""
|
||||
_LOGGER.info("Creating device %s", device.name)
|
||||
self.hass = hass
|
||||
self._device = device
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_added_to_hass(self):
|
||||
"""Callback when entity is added to hass."""
|
||||
self.hass.async_add_job(
|
||||
self._device.add_message_listener(self.on_message))
|
||||
|
||||
def on_message(self, message):
|
||||
"""Called when new messages received from the fan."""
|
||||
_LOGGER.debug(
|
||||
"Message received for fan device %s : %s", self.name, message)
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
"""No polling needed."""
|
||||
return False
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the display name of this fan."""
|
||||
return self._device.name
|
||||
|
||||
def set_speed(self: ToggleEntity, speed: str) -> None:
|
||||
"""Set the speed of the fan. Never called ??."""
|
||||
_LOGGER.debug("Set fan speed to: " + speed)
|
||||
from libpurecoollink.const import FanSpeed, FanMode
|
||||
if speed == FanSpeed.FAN_SPEED_AUTO.value:
|
||||
self._device.set_configuration(fan_mode=FanMode.AUTO)
|
||||
else:
|
||||
fan_speed = FanSpeed('{0:04d}'.format(int(speed)))
|
||||
self._device.set_configuration(fan_mode=FanMode.FAN,
|
||||
fan_speed=fan_speed)
|
||||
|
||||
def turn_on(self: ToggleEntity, speed: str=None, **kwargs) -> None:
|
||||
"""Turn on the fan."""
|
||||
_LOGGER.debug("Turn on fan %s with speed %s", self.name, speed)
|
||||
from libpurecoollink.const import FanSpeed, FanMode
|
||||
if speed:
|
||||
if speed == FanSpeed.FAN_SPEED_AUTO.value:
|
||||
self._device.set_configuration(fan_mode=FanMode.AUTO)
|
||||
else:
|
||||
fan_speed = FanSpeed('{0:04d}'.format(int(speed)))
|
||||
self._device.set_configuration(fan_mode=FanMode.FAN,
|
||||
fan_speed=fan_speed)
|
||||
else:
|
||||
# Speed not set, just turn on
|
||||
self._device.set_configuration(fan_mode=FanMode.FAN)
|
||||
|
||||
def turn_off(self: ToggleEntity, **kwargs) -> None:
|
||||
"""Turn off the fan."""
|
||||
_LOGGER.debug("Turn off fan %s", self.name)
|
||||
from libpurecoollink.const import FanMode
|
||||
self._device.set_configuration(fan_mode=FanMode.OFF)
|
||||
|
||||
def oscillate(self: ToggleEntity, oscillating: bool) -> None:
|
||||
"""Turn on/off oscillating."""
|
||||
_LOGGER.debug("Turn oscillation %s for device %s", oscillating,
|
||||
self.name)
|
||||
from libpurecoollink.const import Oscillation
|
||||
|
||||
if oscillating:
|
||||
self._device.set_configuration(
|
||||
oscillation=Oscillation.OSCILLATION_ON)
|
||||
else:
|
||||
self._device.set_configuration(
|
||||
oscillation=Oscillation.OSCILLATION_OFF)
|
||||
|
||||
@property
|
||||
def oscillating(self):
|
||||
"""Return the oscillation state."""
|
||||
return self._device.state and self._device.state.oscillation == "ON"
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return true if the entity is on."""
|
||||
if self._device.state:
|
||||
return self._device.state.fan_state == "FAN"
|
||||
return False
|
||||
|
||||
@property
|
||||
def speed(self) -> str:
|
||||
"""Return the current speed."""
|
||||
if self._device.state:
|
||||
from libpurecoollink.const import FanSpeed
|
||||
if self._device.state.speed == FanSpeed.FAN_SPEED_AUTO.value:
|
||||
return self._device.state.speed
|
||||
else:
|
||||
return int(self._device.state.speed)
|
||||
return None
|
||||
|
||||
@property
|
||||
def current_direction(self):
|
||||
"""Return direction of the fan [forward, reverse]."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def is_night_mode(self):
|
||||
"""Return Night mode."""
|
||||
return self._device.state.night_mode == "ON"
|
||||
|
||||
def night_mode(self: ToggleEntity, night_mode: bool) -> None:
|
||||
"""Turn fan in night mode."""
|
||||
_LOGGER.debug("Set %s night mode %s", self.name, night_mode)
|
||||
from libpurecoollink.const import NightMode
|
||||
if night_mode:
|
||||
self._device.set_configuration(night_mode=NightMode.NIGHT_MODE_ON)
|
||||
else:
|
||||
self._device.set_configuration(night_mode=NightMode.NIGHT_MODE_OFF)
|
||||
|
||||
@property
|
||||
def is_auto_mode(self):
|
||||
"""Return auto mode."""
|
||||
return self._device.state.fan_mode == "AUTO"
|
||||
|
||||
def auto_mode(self: ToggleEntity, auto_mode: bool) -> None:
|
||||
"""Turn fan in auto mode."""
|
||||
_LOGGER.debug("Set %s auto mode %s", self.name, auto_mode)
|
||||
from libpurecoollink.const import FanMode
|
||||
if auto_mode:
|
||||
self._device.set_configuration(fan_mode=FanMode.AUTO)
|
||||
else:
|
||||
self._device.set_configuration(fan_mode=FanMode.FAN)
|
||||
|
||||
@property
|
||||
def speed_list(self: ToggleEntity) -> list:
|
||||
"""Get the list of available speeds."""
|
||||
from libpurecoollink.const import FanSpeed
|
||||
supported_speeds = [FanSpeed.FAN_SPEED_AUTO.value,
|
||||
int(FanSpeed.FAN_SPEED_1.value),
|
||||
int(FanSpeed.FAN_SPEED_2.value),
|
||||
int(FanSpeed.FAN_SPEED_3.value),
|
||||
int(FanSpeed.FAN_SPEED_4.value),
|
||||
int(FanSpeed.FAN_SPEED_5.value),
|
||||
int(FanSpeed.FAN_SPEED_6.value),
|
||||
int(FanSpeed.FAN_SPEED_7.value),
|
||||
int(FanSpeed.FAN_SPEED_8.value),
|
||||
int(FanSpeed.FAN_SPEED_9.value),
|
||||
int(FanSpeed.FAN_SPEED_10.value)]
|
||||
|
||||
return supported_speeds
|
||||
|
||||
@property
|
||||
def supported_features(self: ToggleEntity) -> int:
|
||||
"""Flag supported features."""
|
||||
return SUPPORT_OSCILLATE | SUPPORT_SET_SPEED
|
@ -58,7 +58,18 @@ set_direction:
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of the entities to toggle
|
||||
exampl: 'fan.living_room'
|
||||
example: 'fan.living_room'
|
||||
direction:
|
||||
description: The direction to rotate
|
||||
example: 'left'
|
||||
|
||||
dyson_set_night_mode:
|
||||
description: Set the fan in night mode
|
||||
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of the entities to enable/disable night mode
|
||||
example: 'fan.living_room'
|
||||
night_mode:
|
||||
description: Night mode status
|
||||
example: true
|
||||
|
@ -36,7 +36,7 @@ SPEED_TO_VALUE = {
|
||||
|
||||
|
||||
def get_device(values, **kwargs):
|
||||
"""Create zwave entity device."""
|
||||
"""Create Z-Wave entity device."""
|
||||
return ZwaveFan(values)
|
||||
|
||||
|
||||
|
@ -3,8 +3,8 @@
|
||||
FINGERPRINTS = {
|
||||
"compatibility.js": "8e4c44b5f4288cc48ec1ba94a9bec812",
|
||||
"core.js": "d4a7cb8c80c62b536764e0e81385f6aa",
|
||||
"frontend.html": "ed18c05632c071eb4f7b012382d0f810",
|
||||
"mdi.html": "f407a5a57addbe93817ee1b244d33fbe",
|
||||
"frontend.html": "cca45decbed803e7f0ec0b4f6e18fe53",
|
||||
"mdi.html": "1a5ad9654c1f0e57440e30afd92846a5",
|
||||
"micromarkdown-js.html": "93b5ec4016f0bba585521cf4d18dec1a",
|
||||
"panels/ha-panel-automation.html": "21cba0a4fee9d2b45dda47f7a1dd82d8",
|
||||
"panels/ha-panel-config.html": "59d9eb28758b497a4d9b2428f978b9b1",
|
||||
@ -18,6 +18,6 @@ FINGERPRINTS = {
|
||||
"panels/ha-panel-iframe.html": "d920f0aa3c903680f2f8795e2255daab",
|
||||
"panels/ha-panel-logbook.html": "6dd6a16f52117318b202e60f98400163",
|
||||
"panels/ha-panel-map.html": "31c592c239636f91e07c7ac232a5ebc4",
|
||||
"panels/ha-panel-zwave.html": "780a792213e98510b475f752c40ef0f9",
|
||||
"panels/ha-panel-zwave.html": "92edac58dd52c297c761fd9acec7f436",
|
||||
"websocket_test.html": "575de64b431fe11c3785bf96d7813450"
|
||||
}
|
||||
|
File diff suppressed because one or more lines are too long
Binary file not shown.
@ -1 +1 @@
|
||||
Subproject commit 75679e90f2aa11bc1b42188965746217feef0ea6
|
||||
Subproject commit 81ab4ff8a8ef7cc4b96b60f63c16472b0427adc7
|
File diff suppressed because one or more lines are too long
Binary file not shown.
@ -31,7 +31,7 @@
|
||||
});
|
||||
this.selectedNodeAttrs = att.sort();
|
||||
},
|
||||
});</script><dom-module id="zwave-values" assetpath="./"><template><style include="iron-flex ha-style">.content{margin-top:24px}paper-card{display:block;margin:0 auto;max-width:600px}.device-picker{@apply(--layout-horizontal);@apply(--layout-center-center);padding-left:24px;padding-right:24px;padding-bottom:24px}.help-text{padding-left:24px;padding-right:24px}</style><div class="content"><paper-card heading="Node Values"><div class="device-picker"><paper-dropdown-menu label="Value" class="flex"><paper-listbox class="dropdown-content" selected="{{selectedValue}}"><template is="dom-repeat" items="[[values]]" as="item"><paper-item>[[computeSelectCaption(item)]]</paper-item></template></paper-listbox></paper-dropdown-menu></div><template is="dom-if" if="[[!computeIsValueSelected(selectedValue)]]"><paper-input float-label="Value Name" type="text" value="{{newValueNameInput}}" placeholder="[[computeGetValueName(selectedValue)]]"></paper-input><ha-call-service-button hass="[[hass]]" domain="zwave" service="rename_value" service-data="[[computeValueNameServiceData(newValueNameInput)]]">Rename Value</ha-call-service-button></template></paper-card></div></template></dom-module><script>Polymer({
|
||||
});</script><dom-module id="zwave-values" assetpath="./"><template><style include="iron-flex ha-style">.content{margin-top:24px}paper-card{display:block;margin:0 auto;max-width:600px}.device-picker{@apply(--layout-horizontal);@apply(--layout-center-center);padding-left:24px;padding-right:24px;padding-bottom:24px}.help-text{padding-left:24px;padding-right:24px}</style><div class="content"><paper-card heading="Node Values"><div class="device-picker"><paper-dropdown-menu label="Value" class="flex"><paper-listbox class="dropdown-content" selected="{{selectedValue}}"><template is="dom-repeat" items="[[values]]" as="item"><paper-item>[[computeSelectCaption(item)]]</paper-item></template></paper-listbox></paper-dropdown-menu></div><template is="dom-if" if="[[!computeIsValueSelected(selectedValue)]]"><div class="card-actions"><paper-input float-label="Value Name" type="text" value="{{newValueNameInput}}" placeholder="[[computeGetValueName(selectedValue)]]"></paper-input><ha-call-service-button hass="[[hass]]" domain="zwave" service="rename_value" service-data="[[computeValueNameServiceData(newValueNameInput)]]">Rename Value</ha-call-service-button></div></template></paper-card></div></template></dom-module><script>Polymer({
|
||||
is: 'zwave-values',
|
||||
|
||||
properties: {
|
||||
@ -71,7 +71,7 @@
|
||||
},
|
||||
|
||||
computeSelectCaption: function (item) {
|
||||
return item.value.label;
|
||||
return item.value.label + ' (Instance: ' + item.value.instance + ', Index: ' + item.value.index + ')';
|
||||
},
|
||||
|
||||
computeGetValueName: function (selectedValue) {
|
||||
|
Binary file not shown.
File diff suppressed because one or more lines are too long
@ -14,7 +14,8 @@ from homeassistant import config as conf_util, core as ha
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID, CONF_ICON, CONF_NAME, STATE_CLOSED, STATE_HOME,
|
||||
STATE_NOT_HOME, STATE_OFF, STATE_ON, STATE_OPEN, STATE_LOCKED,
|
||||
STATE_UNLOCKED, STATE_UNKNOWN, ATTR_ASSUMED_STATE, SERVICE_RELOAD)
|
||||
STATE_UNLOCKED, STATE_OK, STATE_PROBLEM, STATE_UNKNOWN,
|
||||
ATTR_ASSUMED_STATE, SERVICE_RELOAD)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.entity import Entity, async_generate_entity_id
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
@ -30,13 +31,23 @@ CONF_ENTITIES = 'entities'
|
||||
CONF_VIEW = 'view'
|
||||
CONF_CONTROL = 'control'
|
||||
|
||||
ATTR_ADD_ENTITIES = 'add_entities'
|
||||
ATTR_AUTO = 'auto'
|
||||
ATTR_CONTROL = 'control'
|
||||
ATTR_ENTITIES = 'entities'
|
||||
ATTR_ICON = 'icon'
|
||||
ATTR_NAME = 'name'
|
||||
ATTR_OBJECT_ID = 'object_id'
|
||||
ATTR_ORDER = 'order'
|
||||
ATTR_VIEW = 'view'
|
||||
ATTR_VISIBLE = 'visible'
|
||||
ATTR_CONTROL = 'control'
|
||||
|
||||
SERVICE_SET_VISIBILITY = 'set_visibility'
|
||||
SERVICE_SET = 'set'
|
||||
SERVICE_REMOVE = 'remove'
|
||||
|
||||
CONTROL_TYPES = vol.In(['hidden', None])
|
||||
|
||||
SET_VISIBILITY_SERVICE_SCHEMA = vol.Schema({
|
||||
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
|
||||
vol.Required(ATTR_VISIBLE): cv.boolean
|
||||
@ -44,6 +55,21 @@ SET_VISIBILITY_SERVICE_SCHEMA = vol.Schema({
|
||||
|
||||
RELOAD_SERVICE_SCHEMA = vol.Schema({})
|
||||
|
||||
SET_SERVICE_SCHEMA = vol.Schema({
|
||||
vol.Required(ATTR_OBJECT_ID): cv.slug,
|
||||
vol.Optional(ATTR_NAME): cv.string,
|
||||
vol.Optional(ATTR_VIEW): cv.boolean,
|
||||
vol.Optional(ATTR_ICON): cv.string,
|
||||
vol.Optional(ATTR_CONTROL): CONTROL_TYPES,
|
||||
vol.Optional(ATTR_VISIBLE): cv.boolean,
|
||||
vol.Exclusive(ATTR_ENTITIES, 'entities'): cv.entity_ids,
|
||||
vol.Exclusive(ATTR_ADD_ENTITIES, 'entities'): cv.entity_ids,
|
||||
})
|
||||
|
||||
REMOVE_SERVICE_SCHEMA = vol.Schema({
|
||||
vol.Required(ATTR_OBJECT_ID): cv.slug,
|
||||
})
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -60,7 +86,7 @@ GROUP_SCHEMA = vol.Schema({
|
||||
CONF_VIEW: cv.boolean,
|
||||
CONF_NAME: cv.string,
|
||||
CONF_ICON: cv.icon,
|
||||
CONF_CONTROL: cv.string,
|
||||
CONF_CONTROL: CONTROL_TYPES,
|
||||
})
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
@ -69,7 +95,8 @@ CONFIG_SCHEMA = vol.Schema({
|
||||
|
||||
# List of ON/OFF state tuples for groupable states
|
||||
_GROUP_TYPES = [(STATE_ON, STATE_OFF), (STATE_HOME, STATE_NOT_HOME),
|
||||
(STATE_OPEN, STATE_CLOSED), (STATE_LOCKED, STATE_UNLOCKED)]
|
||||
(STATE_OPEN, STATE_CLOSED), (STATE_LOCKED, STATE_UNLOCKED),
|
||||
(STATE_PROBLEM, STATE_OK)]
|
||||
|
||||
|
||||
def _get_group_on_off(state):
|
||||
@ -99,10 +126,10 @@ def reload(hass):
|
||||
hass.add_job(async_reload, hass)
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
@callback
|
||||
def async_reload(hass):
|
||||
"""Reload the automation from config."""
|
||||
yield from hass.services.async_call(DOMAIN, SERVICE_RELOAD)
|
||||
hass.async_add_job(hass.services.async_call(DOMAIN, SERVICE_RELOAD))
|
||||
|
||||
|
||||
def set_visibility(hass, entity_id=None, visible=True):
|
||||
@ -111,6 +138,46 @@ def set_visibility(hass, entity_id=None, visible=True):
|
||||
hass.services.call(DOMAIN, SERVICE_SET_VISIBILITY, data)
|
||||
|
||||
|
||||
def set_group(hass, object_id, name=None, entity_ids=None, visible=None,
|
||||
icon=None, view=None, control=None, add=None):
|
||||
"""Create a new user group."""
|
||||
hass.add_job(
|
||||
async_set_group, hass, object_id, name, entity_ids, visible, icon,
|
||||
view, control, add)
|
||||
|
||||
|
||||
@callback
|
||||
def async_set_group(hass, object_id, name=None, entity_ids=None, visible=None,
|
||||
icon=None, view=None, control=None, add=None):
|
||||
"""Create a new user group."""
|
||||
data = {
|
||||
key: value for key, value in [
|
||||
(ATTR_OBJECT_ID, object_id),
|
||||
(ATTR_NAME, name),
|
||||
(ATTR_ENTITIES, entity_ids),
|
||||
(ATTR_VISIBLE, visible),
|
||||
(ATTR_ICON, icon),
|
||||
(ATTR_VIEW, view),
|
||||
(ATTR_CONTROL, control),
|
||||
(ATTR_ADD_ENTITIES, add),
|
||||
] if value is not None
|
||||
}
|
||||
|
||||
hass.async_add_job(hass.services.async_call(DOMAIN, SERVICE_SET, data))
|
||||
|
||||
|
||||
def remove(hass, name):
|
||||
"""Remove a user group."""
|
||||
hass.add_job(async_remove, hass, name)
|
||||
|
||||
|
||||
@callback
|
||||
def async_remove(hass, object_id):
|
||||
"""Remove a user group."""
|
||||
data = {ATTR_OBJECT_ID: object_id}
|
||||
hass.async_add_job(hass.services.async_call(DOMAIN, SERVICE_REMOVE, data))
|
||||
|
||||
|
||||
def expand_entity_ids(hass, entity_ids):
|
||||
"""Return entity_ids with group entity ids replaced by their members.
|
||||
|
||||
@ -170,6 +237,7 @@ def get_entity_ids(hass, entity_id, domain_filter=None):
|
||||
def async_setup(hass, config):
|
||||
"""Set up all groups found definded in the configuration."""
|
||||
component = EntityComponent(_LOGGER, DOMAIN, hass)
|
||||
service_groups = {}
|
||||
|
||||
yield from _async_process_config(hass, config, component)
|
||||
|
||||
@ -179,29 +247,116 @@ def async_setup(hass, config):
|
||||
)
|
||||
|
||||
@asyncio.coroutine
|
||||
def reload_service_handler(service_call):
|
||||
def reload_service_handler(service):
|
||||
"""Remove all groups and load new ones from config."""
|
||||
conf = yield from component.async_prepare_reload()
|
||||
if conf is None:
|
||||
return
|
||||
yield from _async_process_config(hass, conf, component)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_RELOAD, reload_service_handler,
|
||||
descriptions[DOMAIN][SERVICE_RELOAD], schema=RELOAD_SERVICE_SCHEMA)
|
||||
|
||||
@asyncio.coroutine
|
||||
def groups_service_handler(service):
|
||||
"""Handle dynamic group service functions."""
|
||||
object_id = service.data[ATTR_OBJECT_ID]
|
||||
|
||||
# new group
|
||||
if service.service == SERVICE_SET and object_id not in service_groups:
|
||||
entity_ids = service.data.get(ATTR_ENTITIES) or \
|
||||
service.data.get(ATTR_ADD_ENTITIES) or None
|
||||
|
||||
extra_arg = {attr: service.data[attr] for attr in (
|
||||
ATTR_VISIBLE, ATTR_ICON, ATTR_VIEW, ATTR_CONTROL
|
||||
) if service.data.get(attr) is not None}
|
||||
|
||||
new_group = yield from Group.async_create_group(
|
||||
hass, service.data.get(ATTR_NAME, object_id),
|
||||
object_id=object_id,
|
||||
entity_ids=entity_ids,
|
||||
user_defined=False,
|
||||
**extra_arg
|
||||
)
|
||||
|
||||
service_groups[object_id] = new_group
|
||||
return
|
||||
|
||||
# update group
|
||||
if service.service == SERVICE_SET:
|
||||
group = service_groups[object_id]
|
||||
need_update = False
|
||||
|
||||
if ATTR_ADD_ENTITIES in service.data:
|
||||
delta = service.data[ATTR_ADD_ENTITIES]
|
||||
entity_ids = set(group.tracking) | set(delta)
|
||||
yield from group.async_update_tracked_entity_ids(entity_ids)
|
||||
|
||||
if ATTR_ENTITIES in service.data:
|
||||
entity_ids = service.data[ATTR_ENTITIES]
|
||||
yield from group.async_update_tracked_entity_ids(entity_ids)
|
||||
|
||||
if ATTR_NAME in service.data:
|
||||
group.name = service.data[ATTR_NAME]
|
||||
need_update = True
|
||||
|
||||
if ATTR_VISIBLE in service.data:
|
||||
group.visible = service.data[ATTR_VISIBLE]
|
||||
need_update = True
|
||||
|
||||
if ATTR_ICON in service.data:
|
||||
group.icon = service.data[ATTR_ICON]
|
||||
need_update = True
|
||||
|
||||
if ATTR_CONTROL in service.data:
|
||||
group.control = service.data[ATTR_CONTROL]
|
||||
need_update = True
|
||||
|
||||
if ATTR_VIEW in service.data:
|
||||
group.view = service.data[ATTR_VIEW]
|
||||
need_update = True
|
||||
|
||||
if need_update:
|
||||
yield from group.async_update_ha_state()
|
||||
|
||||
return
|
||||
|
||||
# remove group
|
||||
if service.service == SERVICE_REMOVE:
|
||||
if object_id not in service_groups:
|
||||
_LOGGER.warning("Group '%s' not exists!", object_id)
|
||||
return
|
||||
|
||||
del_group = service_groups.pop(object_id)
|
||||
yield from del_group.async_stop()
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_SET, groups_service_handler,
|
||||
descriptions[DOMAIN][SERVICE_SET], schema=SET_SERVICE_SCHEMA)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_REMOVE, groups_service_handler,
|
||||
descriptions[DOMAIN][SERVICE_REMOVE], schema=REMOVE_SERVICE_SCHEMA)
|
||||
|
||||
@asyncio.coroutine
|
||||
def visibility_service_handler(service):
|
||||
"""Change visibility of a group."""
|
||||
visible = service.data.get(ATTR_VISIBLE)
|
||||
tasks = [group.async_set_visible(visible) for group
|
||||
in component.async_extract_from_service(service,
|
||||
expand_group=False)]
|
||||
|
||||
tasks = []
|
||||
for group in component.async_extract_from_service(service,
|
||||
expand_group=False):
|
||||
group.visible = visible
|
||||
tasks.append(group.async_update_ha_state())
|
||||
|
||||
if tasks:
|
||||
yield from asyncio.wait(tasks, loop=hass.loop)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_SET_VISIBILITY, visibility_service_handler,
|
||||
descriptions[DOMAIN][SERVICE_SET_VISIBILITY],
|
||||
schema=SET_VISIBILITY_SERVICE_SCHEMA)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_RELOAD, reload_service_handler,
|
||||
descriptions[DOMAIN][SERVICE_RELOAD], schema=RELOAD_SERVICE_SCHEMA)
|
||||
|
||||
return True
|
||||
|
||||
@ -231,8 +386,8 @@ def _async_process_config(hass, config, component):
|
||||
class Group(Entity):
|
||||
"""Track a group of entity ids."""
|
||||
|
||||
def __init__(self, hass, name, order=None, user_defined=True, icon=None,
|
||||
view=False, control=None):
|
||||
def __init__(self, hass, name, order=None, visible=True, icon=None,
|
||||
view=False, control=None, user_defined=True):
|
||||
"""Initialize a group.
|
||||
|
||||
This Object has factory function for creation.
|
||||
@ -240,31 +395,33 @@ class Group(Entity):
|
||||
self.hass = hass
|
||||
self._name = name
|
||||
self._state = STATE_UNKNOWN
|
||||
self._user_defined = user_defined
|
||||
self._order = order
|
||||
self._icon = icon
|
||||
self._view = view
|
||||
self.view = view
|
||||
self.tracking = []
|
||||
self.group_on = None
|
||||
self.group_off = None
|
||||
self.visible = visible
|
||||
self.control = control
|
||||
self._user_defined = user_defined
|
||||
self._order = order
|
||||
self._assumed_state = False
|
||||
self._async_unsub_state_changed = None
|
||||
self._visible = True
|
||||
self._control = control
|
||||
|
||||
@staticmethod
|
||||
def create_group(hass, name, entity_ids=None, user_defined=True,
|
||||
icon=None, view=False, control=None, object_id=None):
|
||||
visible=True, icon=None, view=False, control=None,
|
||||
object_id=None):
|
||||
"""Initialize a group."""
|
||||
return run_coroutine_threadsafe(
|
||||
Group.async_create_group(hass, name, entity_ids, user_defined,
|
||||
icon, view, control, object_id),
|
||||
Group.async_create_group(
|
||||
hass, name, entity_ids, user_defined, visible, icon, view,
|
||||
control, object_id),
|
||||
hass.loop).result()
|
||||
|
||||
@staticmethod
|
||||
@asyncio.coroutine
|
||||
def async_create_group(hass, name, entity_ids=None, user_defined=True,
|
||||
icon=None, view=False, control=None,
|
||||
visible=True, icon=None, view=False, control=None,
|
||||
object_id=None):
|
||||
"""Initialize a group.
|
||||
|
||||
@ -273,8 +430,9 @@ class Group(Entity):
|
||||
group = Group(
|
||||
hass, name,
|
||||
order=len(hass.states.async_entity_ids(DOMAIN)),
|
||||
user_defined=user_defined, icon=icon, view=view,
|
||||
control=control)
|
||||
visible=visible, icon=icon, view=view, control=control,
|
||||
user_defined=user_defined
|
||||
)
|
||||
|
||||
group.entity_id = async_generate_entity_id(
|
||||
ENTITY_ID_FORMAT, object_id or name, hass=hass)
|
||||
@ -297,6 +455,11 @@ class Group(Entity):
|
||||
"""Return the name of the group."""
|
||||
return self._name
|
||||
|
||||
@name.setter
|
||||
def name(self, value):
|
||||
"""Set Group name."""
|
||||
self._name = value
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
"""Return the state of the group."""
|
||||
@ -307,19 +470,16 @@ class Group(Entity):
|
||||
"""Return the icon of the group."""
|
||||
return self._icon
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_set_visible(self, visible):
|
||||
"""Change visibility of the group."""
|
||||
if self._visible != visible:
|
||||
self._visible = visible
|
||||
yield from self.async_update_ha_state()
|
||||
@icon.setter
|
||||
def icon(self, value):
|
||||
"""Set Icon for group."""
|
||||
self._icon = value
|
||||
|
||||
@property
|
||||
def hidden(self):
|
||||
"""If group should be hidden or not."""
|
||||
# Visibility from set_visibility service overrides
|
||||
if self._visible:
|
||||
return not self._user_defined or self._view
|
||||
if self.visible and not self.view:
|
||||
return False
|
||||
return True
|
||||
|
||||
@property
|
||||
@ -331,10 +491,10 @@ class Group(Entity):
|
||||
}
|
||||
if not self._user_defined:
|
||||
data[ATTR_AUTO] = True
|
||||
if self._view:
|
||||
if self.view:
|
||||
data[ATTR_VIEW] = True
|
||||
if self._control:
|
||||
data[ATTR_CONTROL] = self._control
|
||||
if self.control:
|
||||
data[ATTR_CONTROL] = self.control
|
||||
return data
|
||||
|
||||
@property
|
||||
|
@ -1,5 +1,5 @@
|
||||
"""
|
||||
Support for Homematic devices.
|
||||
Support for HomeMatic devices.
|
||||
|
||||
For more details about this component, please refer to the documentation at
|
||||
https://home-assistant.io/components/homematic/
|
||||
@ -21,7 +21,7 @@ from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.event import track_time_interval
|
||||
from homeassistant.config import load_yaml_config_file
|
||||
|
||||
REQUIREMENTS = ['pyhomematic==0.1.27']
|
||||
REQUIREMENTS = ['pyhomematic==0.1.28']
|
||||
|
||||
DOMAIN = 'homematic'
|
||||
|
||||
@ -228,7 +228,7 @@ def set_var_value(hass, entity_id, value):
|
||||
|
||||
|
||||
def set_dev_value(hass, address, channel, param, value, proxy=None):
|
||||
"""Send virtual keypress to the Homematic controlller."""
|
||||
"""Call setValue XML-RPC method of supplied proxy."""
|
||||
data = {
|
||||
ATTR_ADDRESS: address,
|
||||
ATTR_CHANNEL: channel,
|
||||
@ -245,16 +245,15 @@ def reconnect(hass):
|
||||
hass.services.call(DOMAIN, SERVICE_RECONNECT, {})
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def setup(hass, config):
|
||||
"""Set up the Homematic component."""
|
||||
from pyhomematic import HMConnection
|
||||
|
||||
hass.data[DATA_DELAY] = config[DOMAIN].get(CONF_DELAY)
|
||||
hass.data[DATA_DEVINIT] = {}
|
||||
hass.data[DATA_STORE] = []
|
||||
hass.data[DATA_STORE] = set()
|
||||
|
||||
# Create hosts list for pyhomematic
|
||||
# Create hosts-dictionary for pyhomematic
|
||||
remotes = {}
|
||||
hosts = {}
|
||||
for rname, rconfig in config[DOMAIN][CONF_HOSTS].items():
|
||||
@ -286,10 +285,10 @@ def setup(hass, config):
|
||||
interface_id='homeassistant'
|
||||
)
|
||||
|
||||
# Start server thread, connect to peer, initialize to receive events
|
||||
# Start server thread, connect to hosts, initialize to receive events
|
||||
hass.data[DATA_HOMEMATIC].start()
|
||||
|
||||
# Stops server when Homeassistant is shutting down
|
||||
# Stops server when HASS is shutting down
|
||||
hass.bus.listen_once(
|
||||
EVENT_HOMEASSISTANT_STOP, hass.data[DATA_HOMEMATIC].stop)
|
||||
|
||||
@ -299,12 +298,12 @@ def setup(hass, config):
|
||||
entity_hubs.append(HMHub(
|
||||
hass, hub_data[CONF_NAME], hub_data[CONF_VARIABLES]))
|
||||
|
||||
# Register Homematic services
|
||||
# Register HomeMatic services
|
||||
descriptions = load_yaml_config_file(
|
||||
os.path.join(os.path.dirname(__file__), 'services.yaml'))
|
||||
|
||||
def _hm_service_virtualkey(service):
|
||||
"""Service handle virtualkey services."""
|
||||
"""Service to handle virtualkey servicecalls."""
|
||||
address = service.data.get(ATTR_ADDRESS)
|
||||
channel = service.data.get(ATTR_CHANNEL)
|
||||
param = service.data.get(ATTR_PARAM)
|
||||
@ -315,18 +314,18 @@ def setup(hass, config):
|
||||
_LOGGER.error("%s not found for service virtualkey!", address)
|
||||
return
|
||||
|
||||
# If param exists for this device
|
||||
# Parameter doesn't exist for device
|
||||
if param not in hmdevice.ACTIONNODE:
|
||||
_LOGGER.error("%s not datapoint in hm device %s", param, address)
|
||||
return
|
||||
|
||||
# Channel exists?
|
||||
# Channel doesn't exist for device
|
||||
if channel not in hmdevice.ACTIONNODE[param]:
|
||||
_LOGGER.error("%i is not a channel in hm device %s",
|
||||
channel, address)
|
||||
return
|
||||
|
||||
# Call key
|
||||
# Call parameter
|
||||
hmdevice.actionNodeData(param, True, channel)
|
||||
|
||||
hass.services.register(
|
||||
@ -335,7 +334,7 @@ def setup(hass, config):
|
||||
schema=SCHEMA_SERVICE_VIRTUALKEY)
|
||||
|
||||
def _service_handle_value(service):
|
||||
"""Set value on homematic variable."""
|
||||
"""Service to call setValue method for HomeMatic system variable."""
|
||||
entity_ids = service.data.get(ATTR_ENTITY_ID)
|
||||
name = service.data[ATTR_NAME]
|
||||
value = service.data[ATTR_VALUE]
|
||||
@ -347,7 +346,7 @@ def setup(hass, config):
|
||||
entities = entity_hubs
|
||||
|
||||
if not entities:
|
||||
_LOGGER.error("Homematic controller not found!")
|
||||
_LOGGER.error("No HomeMatic hubs available")
|
||||
return
|
||||
|
||||
for hub in entities:
|
||||
@ -359,7 +358,7 @@ def setup(hass, config):
|
||||
schema=SCHEMA_SERVICE_SET_VAR_VALUE)
|
||||
|
||||
def _service_handle_reconnect(service):
|
||||
"""Reconnect to all homematic hubs."""
|
||||
"""Service to reconnect all HomeMatic hubs."""
|
||||
hass.data[DATA_HOMEMATIC].reconnect()
|
||||
|
||||
hass.services.register(
|
||||
@ -368,7 +367,7 @@ def setup(hass, config):
|
||||
schema=SCHEMA_SERVICE_RECONNECT)
|
||||
|
||||
def _service_handle_device(service):
|
||||
"""Service handle set_dev_value services."""
|
||||
"""Service to call setValue method for HomeMatic devices."""
|
||||
address = service.data.get(ATTR_ADDRESS)
|
||||
channel = service.data.get(ATTR_CHANNEL)
|
||||
param = service.data.get(ATTR_PARAM)
|
||||
@ -380,7 +379,6 @@ def setup(hass, config):
|
||||
_LOGGER.error("%s not found!", address)
|
||||
return
|
||||
|
||||
# Call key
|
||||
hmdevice.setValue(param, value, channel)
|
||||
|
||||
hass.services.register(
|
||||
@ -392,10 +390,9 @@ def setup(hass, config):
|
||||
|
||||
|
||||
def _system_callback_handler(hass, config, src, *args):
|
||||
"""Handle the callback."""
|
||||
"""System callback handler."""
|
||||
# New devices available at hub
|
||||
if src == 'newDevices':
|
||||
_LOGGER.debug("newDevices with: %s", args)
|
||||
# pylint: disable=unused-variable
|
||||
(interface_id, dev_descriptions) = args
|
||||
proxy = interface_id.split('-')[-1]
|
||||
|
||||
@ -403,34 +400,25 @@ def _system_callback_handler(hass, config, src, *args):
|
||||
if not hass.data[DATA_DEVINIT][proxy]:
|
||||
return
|
||||
|
||||
# Get list of all keys of the devices (ignoring channels)
|
||||
key_dict = {}
|
||||
addresses = []
|
||||
for dev in dev_descriptions:
|
||||
key_dict[dev['ADDRESS'].split(':')[0]] = True
|
||||
|
||||
# Remove device they allready init by HA
|
||||
tmp_devs = key_dict.copy()
|
||||
for dev in tmp_devs:
|
||||
if dev in hass.data[DATA_STORE]:
|
||||
del key_dict[dev]
|
||||
else:
|
||||
hass.data[DATA_STORE].append(dev)
|
||||
address = dev['ADDRESS'].split(':')[0]
|
||||
if address not in hass.data[DATA_STORE]:
|
||||
hass.data[DATA_STORE].add(address)
|
||||
addresses.append(address)
|
||||
|
||||
# Register EVENTS
|
||||
# Search all device with a EVENTNODE that include data
|
||||
# Search all devices with an EVENTNODE that includes data
|
||||
bound_event_callback = partial(_hm_event_handler, hass, proxy)
|
||||
for dev in key_dict:
|
||||
for dev in addresses:
|
||||
hmdevice = hass.data[DATA_HOMEMATIC].devices[proxy].get(dev)
|
||||
|
||||
# Have events?
|
||||
if hmdevice.EVENTNODE:
|
||||
_LOGGER.debug("Register Events from %s", dev)
|
||||
hmdevice.setEventCallback(
|
||||
callback=bound_event_callback, bequeath=True)
|
||||
|
||||
# If configuration allows autodetection of devices,
|
||||
# all devices not configured are added.
|
||||
if key_dict:
|
||||
# Create HASS entities
|
||||
if addresses:
|
||||
for component_name, discovery_type in (
|
||||
('switch', DISCOVER_SWITCHES),
|
||||
('light', DISCOVER_LIGHTS),
|
||||
@ -440,18 +428,18 @@ def _system_callback_handler(hass, config, src, *args):
|
||||
('climate', DISCOVER_CLIMATE)):
|
||||
# Get all devices of a specific type
|
||||
found_devices = _get_devices(
|
||||
hass, discovery_type, key_dict, proxy)
|
||||
hass, discovery_type, addresses, proxy)
|
||||
|
||||
# When devices of this type are found
|
||||
# they are setup in HA and an event is fired
|
||||
# they are setup in HASS and an discovery event is fired
|
||||
if found_devices:
|
||||
# Fire discovery event
|
||||
discovery.load_platform(hass, component_name, DOMAIN, {
|
||||
ATTR_DISCOVER_DEVICES: found_devices
|
||||
}, config)
|
||||
|
||||
# Homegear error message
|
||||
elif src == 'error':
|
||||
_LOGGER.debug("Error: %s", args)
|
||||
_LOGGER.error("Error: %s", args)
|
||||
(interface_id, errorcode, message) = args
|
||||
hass.bus.fire(EVENT_ERROR, {
|
||||
ATTR_ERRORCODE: errorcode,
|
||||
@ -460,7 +448,7 @@ def _system_callback_handler(hass, config, src, *args):
|
||||
|
||||
|
||||
def _get_devices(hass, discovery_type, keys, proxy):
|
||||
"""Get the Homematic devices for given discovery_type."""
|
||||
"""Get the HomeMatic devices for given discovery_type."""
|
||||
device_arr = []
|
||||
|
||||
for key in keys:
|
||||
@ -468,11 +456,11 @@ def _get_devices(hass, discovery_type, keys, proxy):
|
||||
class_name = device.__class__.__name__
|
||||
metadata = {}
|
||||
|
||||
# Class supported by discovery type
|
||||
# Class not supported by discovery type
|
||||
if class_name not in HM_DEVICE_TYPES[discovery_type]:
|
||||
continue
|
||||
|
||||
# Load metadata if needed to generate a param list
|
||||
# Load metadata needed to generate a parameter list
|
||||
if discovery_type == DISCOVER_SENSORS:
|
||||
metadata.update(device.SENSORNODE)
|
||||
elif discovery_type == DISCOVER_BINARY_SENSORS:
|
||||
@ -480,8 +468,7 @@ def _get_devices(hass, discovery_type, keys, proxy):
|
||||
else:
|
||||
metadata.update({None: device.ELEMENT})
|
||||
|
||||
if metadata:
|
||||
# Generate options for 1...n elements with 1...n params
|
||||
# Generate options for 1...n elements with 1...n parameters
|
||||
for param, channels in metadata.items():
|
||||
if param in HM_IGNORE_DISCOVERY_NODE:
|
||||
continue
|
||||
@ -511,14 +498,11 @@ def _get_devices(hass, discovery_type, keys, proxy):
|
||||
except vol.MultipleInvalid as err:
|
||||
_LOGGER.error("Invalid device config: %s",
|
||||
str(err))
|
||||
else:
|
||||
_LOGGER.debug("Got no params for %s", key)
|
||||
_LOGGER.debug("%s autodiscovery done: %s", discovery_type, str(device_arr))
|
||||
return device_arr
|
||||
|
||||
|
||||
def _create_ha_name(name, channel, param, count):
|
||||
"""Generate a unique object name."""
|
||||
"""Generate a unique entity id."""
|
||||
# HMDevice is a simple device
|
||||
if count == 1 and param is None:
|
||||
return name
|
||||
@ -527,11 +511,11 @@ def _create_ha_name(name, channel, param, count):
|
||||
if count > 1 and param is None:
|
||||
return "{} {}".format(name, channel)
|
||||
|
||||
# With multiple param first elements
|
||||
# With multiple parameters on first channel
|
||||
if count == 1 and param is not None:
|
||||
return "{} {}".format(name, param)
|
||||
|
||||
# Multiple param on object with multiple elements
|
||||
# Multiple parameters with multiple channels
|
||||
if count > 1 and param is not None:
|
||||
return "{} {} {}".format(name, channel, param)
|
||||
|
||||
@ -546,14 +530,14 @@ def _hm_event_handler(hass, proxy, device, caller, attribute, value):
|
||||
_LOGGER.error("Event handling channel convert error!")
|
||||
return
|
||||
|
||||
# is not a event?
|
||||
# Return if not an event supported by device
|
||||
if attribute not in hmdevice.EVENTNODE:
|
||||
return
|
||||
|
||||
_LOGGER.debug("Event %s for %s channel %i", attribute,
|
||||
hmdevice.NAME, channel)
|
||||
|
||||
# keypress event
|
||||
# Keypress event
|
||||
if attribute in HM_PRESS_EVENTS:
|
||||
hass.bus.fire(EVENT_KEYPRESS, {
|
||||
ATTR_NAME: hmdevice.NAME,
|
||||
@ -562,7 +546,7 @@ def _hm_event_handler(hass, proxy, device, caller, attribute, value):
|
||||
})
|
||||
return
|
||||
|
||||
# impulse event
|
||||
# Impulse event
|
||||
if attribute in HM_IMPULSE_EVENTS:
|
||||
hass.bus.fire(EVENT_IMPULSE, {
|
||||
ATTR_NAME: hmdevice.NAME,
|
||||
@ -574,7 +558,7 @@ def _hm_event_handler(hass, proxy, device, caller, attribute, value):
|
||||
|
||||
|
||||
def _device_from_servicecall(hass, service):
|
||||
"""Extract homematic device from service call."""
|
||||
"""Extract HomeMatic device from service call."""
|
||||
address = service.data.get(ATTR_ADDRESS)
|
||||
proxy = service.data.get(ATTR_PROXY)
|
||||
if address == 'BIDCOS-RF':
|
||||
@ -589,10 +573,10 @@ def _device_from_servicecall(hass, service):
|
||||
|
||||
|
||||
class HMHub(Entity):
|
||||
"""The Homematic hub. I.e. CCU2/HomeGear."""
|
||||
"""The HomeMatic hub. (CCU2/HomeGear)."""
|
||||
|
||||
def __init__(self, hass, name, use_variables):
|
||||
"""Initialize Homematic hub."""
|
||||
"""Initialize HomeMatic hub."""
|
||||
self.hass = hass
|
||||
self.entity_id = "{}.{}".format(DOMAIN, name.lower())
|
||||
self._homematic = hass.data[DATA_HOMEMATIC]
|
||||
@ -601,7 +585,7 @@ class HMHub(Entity):
|
||||
self._state = STATE_UNKNOWN
|
||||
self._use_variables = use_variables
|
||||
|
||||
# load data
|
||||
# Load data
|
||||
track_time_interval(hass, self._update_hub, SCAN_INTERVAL_HUB)
|
||||
self._update_hub(None)
|
||||
|
||||
@ -617,7 +601,7 @@ class HMHub(Entity):
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
"""Return false. Homematic Hub object update variable."""
|
||||
"""Return false. HomeMatic Hub object updates variables."""
|
||||
return False
|
||||
|
||||
@property
|
||||
@ -660,7 +644,7 @@ class HMHub(Entity):
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
def hm_set_variable(self, name, value):
|
||||
"""Set variable on homematic controller."""
|
||||
"""Set variable value on CCU/Homegear."""
|
||||
if name not in self._variables:
|
||||
_LOGGER.error("Variable %s not found on %s", name, self.name)
|
||||
return
|
||||
@ -676,10 +660,10 @@ class HMHub(Entity):
|
||||
|
||||
|
||||
class HMDevice(Entity):
|
||||
"""The Homematic device base object."""
|
||||
"""The HomeMatic device base object."""
|
||||
|
||||
def __init__(self, hass, config):
|
||||
"""Initialize a generic Homematic device."""
|
||||
"""Initialize a generic HomeMatic device."""
|
||||
self.hass = hass
|
||||
self._homematic = hass.data[DATA_HOMEMATIC]
|
||||
self._name = config.get(ATTR_NAME)
|
||||
@ -692,13 +676,13 @@ class HMDevice(Entity):
|
||||
self._connected = False
|
||||
self._available = False
|
||||
|
||||
# Set param to uppercase
|
||||
# Set parameter to uppercase
|
||||
if self._state:
|
||||
self._state = self._state.upper()
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
"""Return false. Homematic states are pushed by the XML RPC Server."""
|
||||
"""Return false. HomeMatic states are pushed by the XML-RPC Server."""
|
||||
return False
|
||||
|
||||
@property
|
||||
@ -721,49 +705,44 @@ class HMDevice(Entity):
|
||||
"""Return device specific state attributes."""
|
||||
attr = {}
|
||||
|
||||
# no data available to create
|
||||
# No data available
|
||||
if not self.available:
|
||||
return attr
|
||||
|
||||
# Generate an attributes list
|
||||
# Generate a dictionary with attributes
|
||||
for node, data in HM_ATTRIBUTE_SUPPORT.items():
|
||||
# Is an attributes and exists for this object
|
||||
# Is an attribute and exists for this object
|
||||
if node in self._data:
|
||||
value = data[1].get(self._data[node], self._data[node])
|
||||
attr[data[0]] = value
|
||||
|
||||
# static attributes
|
||||
# Static attributes
|
||||
attr['id'] = self._hmdevice.ADDRESS
|
||||
attr['proxy'] = self._proxy
|
||||
|
||||
return attr
|
||||
|
||||
def link_homematic(self):
|
||||
"""Connect to Homematic."""
|
||||
# Device is already linked
|
||||
"""Connect to HomeMatic."""
|
||||
if self._connected:
|
||||
return True
|
||||
|
||||
# Init
|
||||
# Initialize
|
||||
self._hmdevice = self._homematic.devices[self._proxy][self._address]
|
||||
self._connected = True
|
||||
|
||||
# Check if Homematic class is okay for HA class
|
||||
_LOGGER.info("Start linking %s to %s", self._address, self._name)
|
||||
try:
|
||||
# Init datapoints of this object
|
||||
# Initialize datapoints of this object
|
||||
self._init_data()
|
||||
if self.hass.data[DATA_DELAY]:
|
||||
# We delay / pause loading of data to avoid overloading
|
||||
# of CCU / Homegear when doing auto detection
|
||||
# We optionally delay / pause loading of data to avoid
|
||||
# overloading of CCU / Homegear
|
||||
time.sleep(self.hass.data[DATA_DELAY])
|
||||
self._load_data_from_hm()
|
||||
_LOGGER.debug("%s datastruct: %s", self._name, str(self._data))
|
||||
|
||||
# Link events from pyhomatic
|
||||
# Link events from pyhomematic
|
||||
self._subscribe_homematic_events()
|
||||
self._available = not self._hmdevice.UNREACH
|
||||
_LOGGER.debug("%s linking done", self._name)
|
||||
# pylint: disable=broad-except
|
||||
except Exception as err:
|
||||
self._connected = False
|
||||
@ -774,29 +753,28 @@ class HMDevice(Entity):
|
||||
"""Handle all pyhomematic device events."""
|
||||
_LOGGER.debug("%s received event '%s' value: %s", self._name,
|
||||
attribute, value)
|
||||
have_change = False
|
||||
has_changed = False
|
||||
|
||||
# Is data needed for this instance?
|
||||
if attribute in self._data:
|
||||
# Did data change?
|
||||
if self._data[attribute] != value:
|
||||
self._data[attribute] = value
|
||||
have_change = True
|
||||
has_changed = True
|
||||
|
||||
# If available it has changed
|
||||
# Availability has changed
|
||||
if attribute == 'UNREACH':
|
||||
self._available = bool(value)
|
||||
have_change = True
|
||||
has_changed = True
|
||||
|
||||
# If it has changed data point, update HA
|
||||
if have_change:
|
||||
_LOGGER.debug("%s update_ha_state after '%s'", self._name,
|
||||
attribute)
|
||||
# If it has changed data point, update HASS
|
||||
if has_changed:
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
def _subscribe_homematic_events(self):
|
||||
"""Subscribe all required events to handle job."""
|
||||
channels_to_sub = {0: True} # add channel 0 for UNREACH
|
||||
channels_to_sub = set()
|
||||
channels_to_sub.add(0) # Add channel 0 for UNREACH
|
||||
|
||||
# Push data to channels_to_sub from hmdevice metadata
|
||||
for metadata in (self._hmdevice.SENSORNODE, self._hmdevice.BINARYNODE,
|
||||
@ -814,8 +792,7 @@ class HMDevice(Entity):
|
||||
|
||||
# Prepare for subscription
|
||||
try:
|
||||
if int(channel) >= 0:
|
||||
channels_to_sub.update({int(channel): True})
|
||||
channels_to_sub.add(int(channel))
|
||||
except (ValueError, TypeError):
|
||||
_LOGGER.error("Invalid channel in metadata from %s",
|
||||
self._name)
|
||||
@ -858,14 +835,14 @@ class HMDevice(Entity):
|
||||
return None
|
||||
|
||||
def _init_data(self):
|
||||
"""Generate a data dict (self._data) from the Homematic metadata."""
|
||||
# Add all attributes to data dict
|
||||
"""Generate a data dict (self._data) from the HomeMatic metadata."""
|
||||
# Add all attributes to data dictionary
|
||||
for data_note in self._hmdevice.ATTRIBUTENODE:
|
||||
self._data.update({data_note: STATE_UNKNOWN})
|
||||
|
||||
# init device specified data
|
||||
# Initialize device specific data
|
||||
self._init_data_struct()
|
||||
|
||||
def _init_data_struct(self):
|
||||
"""Generate a data dict from the Homematic device metadata."""
|
||||
"""Generate a data dictionary from the HomeMatic device metadata."""
|
||||
raise NotImplementedError
|
||||
|
@ -51,7 +51,7 @@ CONF_TRUSTED_NETWORKS = 'trusted_networks'
|
||||
CONF_LOGIN_ATTEMPTS_THRESHOLD = 'login_attempts_threshold'
|
||||
CONF_IP_BAN_ENABLED = 'ip_ban_enabled'
|
||||
|
||||
# TLS configuation follows the best-practice guidelines specified here:
|
||||
# TLS configuration follows the best-practice guidelines specified here:
|
||||
# https://wiki.mozilla.org/Security/Server_Side_TLS
|
||||
# Intermediate guidelines are followed.
|
||||
SSL_VERSION = ssl.PROTOCOL_SSLv23
|
||||
@ -339,7 +339,7 @@ class HomeAssistantWSGI(object):
|
||||
|
||||
@asyncio.coroutine
|
||||
def stop(self):
|
||||
"""Stop the wsgi server."""
|
||||
"""Stop the WSGI server."""
|
||||
if self.server:
|
||||
self.server.close()
|
||||
yield from self.server.wait_closed()
|
||||
|
@ -19,6 +19,8 @@ from .const import (
|
||||
KEY_FAILED_LOGIN_ATTEMPTS)
|
||||
from .util import get_real_ip
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
NOTIFICATION_ID_BAN = 'ip-ban'
|
||||
NOTIFICATION_ID_LOGIN = 'http-login'
|
||||
|
||||
@ -29,8 +31,6 @@ SCHEMA_IP_BAN_ENTRY = vol.Schema({
|
||||
vol.Optional('banned_at'): vol.Any(None, cv.datetime)
|
||||
})
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def ban_middleware(app, handler):
|
||||
|
@ -6,7 +6,7 @@ KEY_REAL_IP = 'ha_real_ip'
|
||||
KEY_BANS_ENABLED = 'ha_bans_enabled'
|
||||
KEY_BANNED_IPS = 'ha_banned_ips'
|
||||
KEY_FAILED_LOGIN_ATTEMPTS = 'ha_failed_login_attempts'
|
||||
KEY_LOGIN_THRESHOLD = 'ha_login_treshold'
|
||||
KEY_LOGIN_THRESHOLD = 'ha_login_threshold'
|
||||
KEY_DEVELOPMENT = 'ha_development'
|
||||
|
||||
HTTP_HEADER_X_FORWARDED_FOR = 'X-Forwarded-For'
|
||||
|
@ -7,22 +7,56 @@ https://home-assistant.io/components/image_processing.opencv/
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import requests
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import split_entity_id
|
||||
from homeassistant.components.image_processing import (
|
||||
ImageProcessingEntity, PLATFORM_SCHEMA)
|
||||
from homeassistant.components.opencv import (
|
||||
ATTR_MATCHES, CLASSIFIER_GROUP_CONFIG, CONF_CLASSIFIER, CONF_ENTITY_ID,
|
||||
CONF_NAME, process_image)
|
||||
CONF_SOURCE, CONF_ENTITY_ID, CONF_NAME, PLATFORM_SCHEMA,
|
||||
ImageProcessingEntity)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
REQUIREMENTS = ['numpy==1.13.0']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEPENDENCIES = ['opencv']
|
||||
ATTR_MATCHES = 'matches'
|
||||
ATTR_TOTAL_MATCHES = 'total_matches'
|
||||
|
||||
CASCADE_URL = \
|
||||
'https://raw.githubusercontent.com/opencv/opencv/master/data/' + \
|
||||
'lbpcascades/lbpcascade_frontalface.xml'
|
||||
|
||||
CONF_CLASSIFIER = 'classifer'
|
||||
CONF_FILE = 'file'
|
||||
CONF_MIN_SIZE = 'min_size'
|
||||
CONF_NEIGHBORS = 'neighbors'
|
||||
CONF_SCALE = 'scale'
|
||||
|
||||
DEFAULT_CLASSIFIER_PATH = 'lbp_frontalface.xml'
|
||||
DEFAULT_MIN_SIZE = (30, 30)
|
||||
DEFAULT_NEIGHBORS = 4
|
||||
DEFAULT_SCALE = 1.1
|
||||
DEFAULT_TIMEOUT = 10
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=2)
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(CLASSIFIER_GROUP_CONFIG)
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_CLASSIFIER, default=None): {
|
||||
cv.string: vol.Any(
|
||||
cv.isfile,
|
||||
vol.Schema({
|
||||
vol.Required(CONF_FILE): cv.isfile,
|
||||
vol.Optional(CONF_SCALE, DEFAULT_SCALE): float,
|
||||
vol.Optional(CONF_NEIGHBORS, DEFAULT_NEIGHBORS):
|
||||
cv.positive_int,
|
||||
vol.Optional(CONF_MIN_SIZE, DEFAULT_MIN_SIZE):
|
||||
vol.Schema((int, int))
|
||||
})
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
def _create_processor_from_config(hass, camera_entity, config):
|
||||
@ -37,41 +71,63 @@ def _create_processor_from_config(hass, camera_entity, config):
|
||||
return processor
|
||||
|
||||
|
||||
def _get_default_classifier(dest_path):
|
||||
"""Download the default OpenCV classifier."""
|
||||
_LOGGER.info('Downloading default classifier')
|
||||
req = requests.get(CASCADE_URL, stream=True)
|
||||
with open(dest_path, 'wb') as fil:
|
||||
for chunk in req.iter_content(chunk_size=1024):
|
||||
if chunk: # filter out keep-alive new chunks
|
||||
fil.write(chunk)
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the OpenCV image processing platform."""
|
||||
if discovery_info is None:
|
||||
try:
|
||||
# Verify opencv python package is preinstalled
|
||||
# pylint: disable=unused-import,unused-variable
|
||||
import cv2 # noqa
|
||||
except ImportError:
|
||||
_LOGGER.error("No opencv library found! " +
|
||||
"Install or compile for your system " +
|
||||
"following instructions here: " +
|
||||
"http://opencv.org/releases.html")
|
||||
return
|
||||
|
||||
devices = []
|
||||
for camera_entity in discovery_info[CONF_ENTITY_ID]:
|
||||
devices.append(
|
||||
_create_processor_from_config(hass, camera_entity, discovery_info))
|
||||
entities = []
|
||||
if config[CONF_CLASSIFIER] is None:
|
||||
dest_path = hass.config.path(DEFAULT_CLASSIFIER_PATH)
|
||||
_get_default_classifier(dest_path)
|
||||
config[CONF_CLASSIFIER] = {
|
||||
'Face': dest_path
|
||||
}
|
||||
|
||||
add_devices(devices)
|
||||
for camera in config[CONF_SOURCE]:
|
||||
entities.append(OpenCVImageProcessor(
|
||||
hass, camera[CONF_ENTITY_ID], camera.get(CONF_NAME),
|
||||
config[CONF_CLASSIFIER]
|
||||
))
|
||||
|
||||
add_devices(entities)
|
||||
|
||||
|
||||
class OpenCVImageProcessor(ImageProcessingEntity):
|
||||
"""Representation of an OpenCV image processor."""
|
||||
|
||||
def __init__(self, hass, camera_entity, name, classifier_configs):
|
||||
def __init__(self, hass, camera_entity, name, classifiers):
|
||||
"""Initialize the OpenCV entity."""
|
||||
self.hass = hass
|
||||
self._camera_entity = camera_entity
|
||||
if name:
|
||||
self._name = name
|
||||
self._classifier_configs = classifier_configs
|
||||
else:
|
||||
self._name = "OpenCV {0}".format(
|
||||
split_entity_id(camera_entity)[1])
|
||||
self._classifiers = classifiers
|
||||
self._matches = {}
|
||||
self._total_matches = 0
|
||||
self._last_image = None
|
||||
|
||||
@property
|
||||
def last_image(self):
|
||||
"""Return the last image."""
|
||||
return self._last_image
|
||||
|
||||
@property
|
||||
def matches(self):
|
||||
"""Return the matches it found."""
|
||||
return self._matches
|
||||
|
||||
@property
|
||||
def camera_entity(self):
|
||||
"""Return camera entity id from process pictures."""
|
||||
@ -85,20 +141,54 @@ class OpenCVImageProcessor(ImageProcessingEntity):
|
||||
@property
|
||||
def state(self):
|
||||
"""Return the state of the entity."""
|
||||
total_matches = 0
|
||||
for group in self._matches.values():
|
||||
total_matches += len(group)
|
||||
return total_matches
|
||||
return self._total_matches
|
||||
|
||||
@property
|
||||
def state_attributes(self):
|
||||
"""Return device specific state attributes."""
|
||||
return {
|
||||
ATTR_MATCHES: self._matches
|
||||
ATTR_MATCHES: self._matches,
|
||||
ATTR_TOTAL_MATCHES: self._total_matches
|
||||
}
|
||||
|
||||
def process_image(self, image):
|
||||
"""Process the image."""
|
||||
self._last_image = image
|
||||
self._matches = process_image(
|
||||
image, self._classifier_configs, False)
|
||||
import cv2 # pylint: disable=import-error
|
||||
import numpy
|
||||
|
||||
# pylint: disable=no-member
|
||||
cv_image = cv2.imdecode(numpy.asarray(bytearray(image)),
|
||||
cv2.IMREAD_UNCHANGED)
|
||||
|
||||
for name, classifier in self._classifiers.items():
|
||||
scale = DEFAULT_SCALE
|
||||
neighbors = DEFAULT_NEIGHBORS
|
||||
min_size = DEFAULT_MIN_SIZE
|
||||
if isinstance(classifier, dict):
|
||||
path = classifier[CONF_FILE]
|
||||
scale = classifier.get(CONF_SCALE, scale)
|
||||
neighbors = classifier.get(CONF_NEIGHBORS, neighbors)
|
||||
min_size = classifier.get(CONF_MIN_SIZE, min_size)
|
||||
else:
|
||||
path = classifier
|
||||
|
||||
# pylint: disable=no-member
|
||||
cascade = cv2.CascadeClassifier(path)
|
||||
|
||||
detections = cascade.detectMultiScale(
|
||||
cv_image,
|
||||
scaleFactor=scale,
|
||||
minNeighbors=neighbors,
|
||||
minSize=min_size)
|
||||
matches = {}
|
||||
total_matches = 0
|
||||
regions = []
|
||||
# pylint: disable=invalid-name
|
||||
for (x, y, w, h) in detections:
|
||||
regions.append((int(x), int(y), int(w), int(h)))
|
||||
total_matches += 1
|
||||
|
||||
matches[name] = regions
|
||||
|
||||
self._matches = matches
|
||||
self._total_matches = total_matches
|
||||
|
@ -20,7 +20,9 @@ from homeassistant.components.image_processing import (
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_DIGITS = 'digits'
|
||||
CONF_EXTRA_ARGUMENTS = 'extra_arguments'
|
||||
CONF_HEIGHT = 'height'
|
||||
CONF_ROTATE = 'rotate'
|
||||
CONF_SSOCR_BIN = 'ssocr_bin'
|
||||
CONF_THRESHOLD = 'threshold'
|
||||
CONF_WIDTH = 'width'
|
||||
@ -30,10 +32,12 @@ CONF_Y_POS = 'y_position'
|
||||
DEFAULT_BINARY = 'ssocr'
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_EXTRA_ARGUMENTS, default=''): cv.string,
|
||||
vol.Optional(CONF_DIGITS, default=-1): cv.positive_int,
|
||||
vol.Optional(CONF_HEIGHT, default=0): cv.positive_int,
|
||||
vol.Optional(CONF_SSOCR_BIN, default=DEFAULT_BINARY): cv.string,
|
||||
vol.Optional(CONF_THRESHOLD, default=0): cv.positive_int,
|
||||
vol.Optional(CONF_ROTATE, default=0): cv.positive_int,
|
||||
vol.Optional(CONF_WIDTH, default=0): cv.positive_int,
|
||||
vol.Optional(CONF_X_POS, default=0): cv.string,
|
||||
vol.Optional(CONF_Y_POS, default=0): cv.positive_int,
|
||||
@ -65,14 +69,18 @@ class ImageProcessingSsocr(ImageProcessingEntity):
|
||||
self._name = "SevenSegement OCR {0}".format(
|
||||
split_entity_id(camera_entity)[1])
|
||||
self._state = None
|
||||
|
||||
self.filepath = os.path.join(self.hass.config.config_dir, 'ocr.png')
|
||||
self._command = [
|
||||
config[CONF_SSOCR_BIN], 'erosion', 'make_mono', 'crop',
|
||||
str(config[CONF_X_POS]), str(config[CONF_Y_POS]),
|
||||
str(config[CONF_WIDTH]), str(config[CONF_HEIGHT]), '-t',
|
||||
str(config[CONF_THRESHOLD]), '-d', str(config[CONF_DIGITS]),
|
||||
self.filepath
|
||||
]
|
||||
crop = ['crop', str(config[CONF_X_POS]), str(config[CONF_Y_POS]),
|
||||
str(config[CONF_WIDTH]), str(config[CONF_HEIGHT])]
|
||||
digits = ['-d', str(config[CONF_DIGITS])]
|
||||
rotate = ['rotate', str(config[CONF_ROTATE])]
|
||||
threshold = ['-t', str(config[CONF_THRESHOLD])]
|
||||
extra_arguments = config[CONF_EXTRA_ARGUMENTS].split(' ')
|
||||
|
||||
self._command = [config[CONF_SSOCR_BIN]] + crop + digits + threshold +\
|
||||
rotate + extra_arguments
|
||||
self._command.append(self.filepath)
|
||||
|
||||
@property
|
||||
def device_class(self):
|
||||
|
@ -6,6 +6,8 @@ https://home-assistant.io/components/influxdb/
|
||||
"""
|
||||
import logging
|
||||
|
||||
import re
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
@ -96,7 +98,7 @@ def setup(hass, config):
|
||||
|
||||
try:
|
||||
influx = InfluxDBClient(**kwargs)
|
||||
influx.query("SHOW DIAGNOSTICS;", database=conf[CONF_DB_NAME])
|
||||
influx.query("SHOW SERIES LIMIT 1;", database=conf[CONF_DB_NAME])
|
||||
except exceptions.InfluxDBClientError as exc:
|
||||
_LOGGER.error("Database host is not accessible due to '%s', please "
|
||||
"check your entries in the configuration file and that "
|
||||
@ -147,6 +149,8 @@ def setup(hass, config):
|
||||
}
|
||||
]
|
||||
|
||||
non_digit_tail = re.compile(r'[\d.]+')
|
||||
non_decimal = re.compile(r'[^\d.]+')
|
||||
for key, value in state.attributes.items():
|
||||
if key != 'unit_of_measurement':
|
||||
# If the key is already in fields
|
||||
@ -161,6 +165,9 @@ def setup(hass, config):
|
||||
except (ValueError, TypeError):
|
||||
new_key = "{}_str".format(key)
|
||||
json_body[0]['fields'][new_key] = str(value)
|
||||
if non_digit_tail.match(json_body[0]['fields'][new_key]):
|
||||
json_body[0]['fields'][key] = float(
|
||||
non_decimal.sub('', value))
|
||||
|
||||
json_body[0]['tags'].update(tags)
|
||||
|
||||
|
74
homeassistant/components/juicenet.py
Normal file
74
homeassistant/components/juicenet.py
Normal file
@ -0,0 +1,74 @@
|
||||
"""
|
||||
Support for Juicenet cloud.
|
||||
|
||||
For more details about this component, please refer to the documentation at
|
||||
https://home-assistant.io/components/juicenet
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.helpers import discovery
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN
|
||||
from homeassistant.helpers.entity import Entity
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
REQUIREMENTS = ['python-juicenet==0.0.5']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DOMAIN = 'juicenet'
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
vol.Required(CONF_ACCESS_TOKEN): cv.string
|
||||
})
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
def setup(hass, config):
|
||||
"""Set up the Juicenet component."""
|
||||
import pyjuicenet
|
||||
|
||||
hass.data[DOMAIN] = {}
|
||||
|
||||
access_token = config[DOMAIN].get(CONF_ACCESS_TOKEN)
|
||||
hass.data[DOMAIN]['api'] = pyjuicenet.Api(access_token)
|
||||
|
||||
discovery.load_platform(hass, 'sensor', DOMAIN, {}, config)
|
||||
return True
|
||||
|
||||
|
||||
class JuicenetDevice(Entity):
|
||||
"""Represent a base Juicenet device."""
|
||||
|
||||
def __init__(self, device, sensor_type, hass):
|
||||
"""Initialise the sensor."""
|
||||
self.hass = hass
|
||||
self.device = device
|
||||
self.type = sensor_type
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the device."""
|
||||
return self.device.name()
|
||||
|
||||
def update(self):
|
||||
"""Update state of the device."""
|
||||
self.device.update_state()
|
||||
|
||||
@property
|
||||
def _manufacturer_device_id(self):
|
||||
"""Return the manufacturer device id."""
|
||||
return self.device.id()
|
||||
|
||||
@property
|
||||
def _token(self):
|
||||
"""Return the device API token."""
|
||||
return self.device.token()
|
||||
|
||||
@property
|
||||
def unique_id(self):
|
||||
"""Return an unique ID."""
|
||||
return "{}-{}".format(self.device.id(), self.type)
|
@ -26,6 +26,7 @@ from homeassistant.helpers.restore_state import async_restore_state
|
||||
import homeassistant.util.color as color_util
|
||||
|
||||
DOMAIN = "light"
|
||||
DEPENDENCIES = ['group']
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
GROUP_NAME_ALL_LIGHTS = 'all lights'
|
||||
|
@ -4,7 +4,6 @@ Support for the LIFX platform that implements lights.
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/light.lifx/
|
||||
"""
|
||||
import colorsys
|
||||
import logging
|
||||
import asyncio
|
||||
import sys
|
||||
@ -24,8 +23,6 @@ from homeassistant.components.light import (
|
||||
SUPPORT_XY_COLOR, SUPPORT_TRANSITION, SUPPORT_EFFECT,
|
||||
preprocess_turn_on_alternatives)
|
||||
from homeassistant.config import load_yaml_config_file
|
||||
from homeassistant.util.color import (
|
||||
color_temperature_mired_to_kelvin, color_temperature_kelvin_to_mired)
|
||||
from homeassistant import util
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.event import async_track_point_in_utc_time
|
||||
@ -37,7 +34,7 @@ from . import effects as lifx_effects
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
REQUIREMENTS = ['aiolifx==0.4.7']
|
||||
REQUIREMENTS = ['aiolifx==0.4.8']
|
||||
|
||||
UDP_BROADCAST_PORT = 56700
|
||||
|
||||
@ -49,16 +46,15 @@ CONF_SERVER = 'server'
|
||||
SERVICE_LIFX_SET_STATE = 'lifx_set_state'
|
||||
|
||||
ATTR_HSBK = 'hsbk'
|
||||
ATTR_INFRARED = 'infrared'
|
||||
ATTR_POWER = 'power'
|
||||
|
||||
BYTE_MAX = 255
|
||||
SHORT_MAX = 65535
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_SERVER, default='0.0.0.0'): cv.string,
|
||||
})
|
||||
|
||||
LIFX_SET_STATE_SCHEMA = LIGHT_TURN_ON_SCHEMA.extend({
|
||||
ATTR_INFRARED: vol.All(vol.Coerce(int), vol.Clamp(min=0, max=255)),
|
||||
ATTR_POWER: cv.boolean,
|
||||
})
|
||||
|
||||
@ -200,15 +196,14 @@ class AwaitAioLIFX:
|
||||
return self.message
|
||||
|
||||
|
||||
def convert_rgb_to_hsv(rgb):
|
||||
"""Convert Home Assistant RGB values to HSV values."""
|
||||
red, green, blue = [_ / BYTE_MAX for _ in rgb]
|
||||
def convert_8_to_16(value):
|
||||
"""Scale an 8 bit level into 16 bits."""
|
||||
return (value << 8) | value
|
||||
|
||||
hue, saturation, brightness = colorsys.rgb_to_hsv(red, green, blue)
|
||||
|
||||
return [int(hue * SHORT_MAX),
|
||||
int(saturation * SHORT_MAX),
|
||||
int(brightness * SHORT_MAX)]
|
||||
def convert_16_to_8(value):
|
||||
"""Scale a 16 bit level into 8 bits."""
|
||||
return value >> 8
|
||||
|
||||
|
||||
class LIFXLight(Light):
|
||||
@ -260,14 +255,14 @@ class LIFXLight(Light):
|
||||
@property
|
||||
def brightness(self):
|
||||
"""Return the brightness of this light between 0..255."""
|
||||
brightness = int(self._bri / (BYTE_MAX + 1))
|
||||
brightness = convert_16_to_8(self._bri)
|
||||
_LOGGER.debug("brightness: %d", brightness)
|
||||
return brightness
|
||||
|
||||
@property
|
||||
def color_temp(self):
|
||||
"""Return the color temperature."""
|
||||
temperature = color_temperature_kelvin_to_mired(self._kel)
|
||||
temperature = color_util.color_temperature_kelvin_to_mired(self._kel)
|
||||
|
||||
_LOGGER.debug("color_temp: %d", temperature)
|
||||
return temperature
|
||||
@ -280,7 +275,7 @@ class LIFXLight(Light):
|
||||
kelvin = 6500
|
||||
else:
|
||||
kelvin = 9000
|
||||
return math.floor(color_temperature_kelvin_to_mired(kelvin))
|
||||
return math.floor(color_util.color_temperature_kelvin_to_mired(kelvin))
|
||||
|
||||
@property
|
||||
def max_mireds(self):
|
||||
@ -290,7 +285,7 @@ class LIFXLight(Light):
|
||||
kelvin = 2700
|
||||
else:
|
||||
kelvin = 2500
|
||||
return math.ceil(color_temperature_kelvin_to_mired(kelvin))
|
||||
return math.ceil(color_util.color_temperature_kelvin_to_mired(kelvin))
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
@ -370,6 +365,9 @@ class LIFXLight(Light):
|
||||
yield from lifx_effects.default_effect(self, **kwargs)
|
||||
return
|
||||
|
||||
if ATTR_INFRARED in kwargs:
|
||||
self.device.set_infrared(convert_8_to_16(kwargs[ATTR_INFRARED]))
|
||||
|
||||
if ATTR_TRANSITION in kwargs:
|
||||
fade = int(kwargs[ATTR_TRANSITION] * 1000)
|
||||
else:
|
||||
@ -446,7 +444,9 @@ class LIFXLight(Light):
|
||||
|
||||
if ATTR_RGB_COLOR in kwargs:
|
||||
hue, saturation, brightness = \
|
||||
convert_rgb_to_hsv(kwargs[ATTR_RGB_COLOR])
|
||||
color_util.color_RGB_to_hsv(*kwargs[ATTR_RGB_COLOR])
|
||||
saturation = convert_8_to_16(saturation)
|
||||
brightness = convert_8_to_16(brightness)
|
||||
changed_color = True
|
||||
else:
|
||||
hue = self._hue
|
||||
@ -455,12 +455,12 @@ class LIFXLight(Light):
|
||||
|
||||
if ATTR_XY_COLOR in kwargs:
|
||||
hue, saturation = color_util.color_xy_to_hs(*kwargs[ATTR_XY_COLOR])
|
||||
saturation = saturation * (BYTE_MAX + 1)
|
||||
saturation = convert_8_to_16(saturation)
|
||||
changed_color = True
|
||||
|
||||
# When color or temperature is set, use a default value for the other
|
||||
if ATTR_COLOR_TEMP in kwargs:
|
||||
kelvin = int(color_temperature_mired_to_kelvin(
|
||||
kelvin = int(color_util.color_temperature_mired_to_kelvin(
|
||||
kwargs[ATTR_COLOR_TEMP]))
|
||||
if not changed_color:
|
||||
saturation = 0
|
||||
@ -472,7 +472,7 @@ class LIFXLight(Light):
|
||||
kelvin = self._kel
|
||||
|
||||
if ATTR_BRIGHTNESS in kwargs:
|
||||
brightness = kwargs[ATTR_BRIGHTNESS] * (BYTE_MAX + 1)
|
||||
brightness = convert_8_to_16(kwargs[ATTR_BRIGHTNESS])
|
||||
changed_color = True
|
||||
else:
|
||||
brightness = self._bri
|
||||
@ -491,12 +491,8 @@ class LIFXLight(Light):
|
||||
self._bri = bri
|
||||
self._kel = kel
|
||||
|
||||
red, green, blue = colorsys.hsv_to_rgb(
|
||||
hue / SHORT_MAX, sat / SHORT_MAX, bri / SHORT_MAX)
|
||||
|
||||
red = int(red * BYTE_MAX)
|
||||
green = int(green * BYTE_MAX)
|
||||
blue = int(blue * BYTE_MAX)
|
||||
red, green, blue = color_util.color_hsv_to_RGB(
|
||||
hue, convert_16_to_8(sat), convert_16_to_8(bri))
|
||||
|
||||
_LOGGER.debug("set_color: %d %d %d %d [%d %d %d]",
|
||||
hue, sat, bri, kel, red, green, blue)
|
||||
|
@ -22,9 +22,18 @@ SERVICE_EFFECT_STOP = 'lifx_effect_stop'
|
||||
ATTR_POWER_ON = 'power_on'
|
||||
ATTR_PERIOD = 'period'
|
||||
ATTR_CYCLES = 'cycles'
|
||||
ATTR_MODE = 'mode'
|
||||
ATTR_SPREAD = 'spread'
|
||||
ATTR_CHANGE = 'change'
|
||||
|
||||
MODE_BLINK = 'blink'
|
||||
MODE_BREATHE = 'breathe'
|
||||
MODE_PING = 'ping'
|
||||
MODE_STROBE = 'strobe'
|
||||
MODE_SOLID = 'solid'
|
||||
|
||||
MODES = [MODE_BLINK, MODE_BREATHE, MODE_PING, MODE_STROBE, MODE_SOLID]
|
||||
|
||||
# aiolifx waveform modes
|
||||
WAVEFORM_SINE = 1
|
||||
WAVEFORM_PULSE = 4
|
||||
@ -44,13 +53,13 @@ LIFX_EFFECT_BREATHE_SCHEMA = LIFX_EFFECT_SCHEMA.extend({
|
||||
vol.Coerce(tuple)),
|
||||
ATTR_COLOR_TEMP: vol.All(vol.Coerce(int), vol.Range(min=1)),
|
||||
ATTR_KELVIN: vol.All(vol.Coerce(int), vol.Range(min=0)),
|
||||
vol.Optional(ATTR_PERIOD, default=1.0):
|
||||
vol.All(vol.Coerce(float), vol.Range(min=0.05)),
|
||||
vol.Optional(ATTR_CYCLES, default=1.0):
|
||||
vol.All(vol.Coerce(float), vol.Range(min=1)),
|
||||
ATTR_PERIOD: vol.All(vol.Coerce(float), vol.Range(min=0.05)),
|
||||
ATTR_CYCLES: vol.All(vol.Coerce(float), vol.Range(min=1)),
|
||||
})
|
||||
|
||||
LIFX_EFFECT_PULSE_SCHEMA = LIFX_EFFECT_BREATHE_SCHEMA
|
||||
LIFX_EFFECT_PULSE_SCHEMA = LIFX_EFFECT_BREATHE_SCHEMA.extend({
|
||||
vol.Optional(ATTR_MODE, default=MODE_BLINK): vol.In(MODES),
|
||||
})
|
||||
|
||||
LIFX_EFFECT_COLORLOOP_SCHEMA = LIFX_EFFECT_SCHEMA.extend({
|
||||
ATTR_BRIGHTNESS: VALID_BRIGHTNESS,
|
||||
@ -217,14 +226,13 @@ class LIFXEffect(object):
|
||||
return [random.randint(0, 65535), 65535, 0, NEUTRAL_WHITE]
|
||||
|
||||
|
||||
class LIFXEffectBreathe(LIFXEffect):
|
||||
"""Representation of a breathe effect."""
|
||||
class LIFXEffectPulse(LIFXEffect):
|
||||
"""Representation of a pulse effect."""
|
||||
|
||||
def __init__(self, hass, lights):
|
||||
"""Initialize the breathe effect."""
|
||||
super(LIFXEffectBreathe, self).__init__(hass, lights)
|
||||
self.name = SERVICE_EFFECT_BREATHE
|
||||
self.waveform = WAVEFORM_SINE
|
||||
"""Initialize the pulse effect."""
|
||||
super().__init__(hass, lights)
|
||||
self.name = SERVICE_EFFECT_PULSE
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_play(self, **kwargs):
|
||||
@ -235,13 +243,42 @@ class LIFXEffectBreathe(LIFXEffect):
|
||||
@asyncio.coroutine
|
||||
def async_light_play(self, light, **kwargs):
|
||||
"""Play a light effect on the bulb."""
|
||||
period = kwargs[ATTR_PERIOD]
|
||||
cycles = kwargs[ATTR_CYCLES]
|
||||
hsbk, color_changed = light.find_hsbk(**kwargs)
|
||||
|
||||
if kwargs[ATTR_MODE] == MODE_STROBE:
|
||||
# Strobe must flash from a dark color
|
||||
light.device.set_color([0, 0, 0, NEUTRAL_WHITE])
|
||||
yield from asyncio.sleep(0.1)
|
||||
default_period = 0.1
|
||||
default_cycles = 10
|
||||
else:
|
||||
default_period = 1.0
|
||||
default_cycles = 1
|
||||
|
||||
period = kwargs.get(ATTR_PERIOD, default_period)
|
||||
cycles = kwargs.get(ATTR_CYCLES, default_cycles)
|
||||
|
||||
# Breathe has a special waveform
|
||||
if kwargs[ATTR_MODE] == MODE_BREATHE:
|
||||
waveform = WAVEFORM_SINE
|
||||
else:
|
||||
waveform = WAVEFORM_PULSE
|
||||
|
||||
# Ping and solid have special duty cycles
|
||||
if kwargs[ATTR_MODE] == MODE_PING:
|
||||
ping_duration = int(5000 - min(2500, 300*period))
|
||||
duty_cycle = 2**15 - ping_duration
|
||||
elif kwargs[ATTR_MODE] == MODE_SOLID:
|
||||
duty_cycle = -2**15
|
||||
else:
|
||||
duty_cycle = 0
|
||||
|
||||
# Set default effect color based on current setting
|
||||
if not color_changed:
|
||||
if light.lifxwhite or hsbk[1] < 65536/2:
|
||||
if kwargs[ATTR_MODE] == MODE_STROBE:
|
||||
# Strobe: cold white
|
||||
hsbk = [hsbk[0], 0, 65535, 5600]
|
||||
elif light.lifxwhite or hsbk[1] < 65536/2:
|
||||
# White: toggle brightness
|
||||
hsbk[2] = 65535 if hsbk[2] < 65536/2 else 0
|
||||
else:
|
||||
@ -254,8 +291,8 @@ class LIFXEffectBreathe(LIFXEffect):
|
||||
'color': hsbk,
|
||||
'period': int(period*1000),
|
||||
'cycles': cycles,
|
||||
'duty_cycle': 0,
|
||||
'waveform': self.waveform,
|
||||
'duty_cycle': duty_cycle,
|
||||
'waveform': waveform,
|
||||
}
|
||||
light.device.set_waveform(args)
|
||||
|
||||
@ -269,14 +306,21 @@ class LIFXEffectBreathe(LIFXEffect):
|
||||
return [hsbk[0], hsbk[1], 0, hsbk[2]]
|
||||
|
||||
|
||||
class LIFXEffectPulse(LIFXEffectBreathe):
|
||||
"""Representation of a pulse effect."""
|
||||
class LIFXEffectBreathe(LIFXEffectPulse):
|
||||
"""Representation of a breathe effect."""
|
||||
|
||||
def __init__(self, hass, lights):
|
||||
"""Initialize the pulse effect."""
|
||||
super(LIFXEffectPulse, self).__init__(hass, lights)
|
||||
self.name = SERVICE_EFFECT_PULSE
|
||||
self.waveform = WAVEFORM_PULSE
|
||||
"""Initialize the breathe effect."""
|
||||
super().__init__(hass, lights)
|
||||
self.name = SERVICE_EFFECT_BREATHE
|
||||
_LOGGER.warning("'lifx_effect_breathe' is deprecated. Please use "
|
||||
"'lifx_effect_pulse' with 'mode: breathe'")
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_perform(self, **kwargs):
|
||||
"""Prepare all lights for the effect."""
|
||||
kwargs[ATTR_MODE] = MODE_BREATHE
|
||||
yield from super().async_perform(**kwargs)
|
||||
|
||||
|
||||
class LIFXEffectColorloop(LIFXEffect):
|
||||
@ -284,7 +328,7 @@ class LIFXEffectColorloop(LIFXEffect):
|
||||
|
||||
def __init__(self, hass, lights):
|
||||
"""Initialize the colorloop effect."""
|
||||
super(LIFXEffectColorloop, self).__init__(hass, lights)
|
||||
super().__init__(hass, lights)
|
||||
self.name = SERVICE_EFFECT_COLORLOOP
|
||||
|
||||
@asyncio.coroutine
|
||||
@ -335,7 +379,7 @@ class LIFXEffectStop(LIFXEffect):
|
||||
|
||||
def __init__(self, hass, lights):
|
||||
"""Initialize the stop effect."""
|
||||
super(LIFXEffectStop, self).__init__(hass, lights)
|
||||
super().__init__(hass, lights)
|
||||
self.name = SERVICE_EFFECT_STOP
|
||||
|
||||
@asyncio.coroutine
|
||||
|
@ -9,6 +9,10 @@ lifx_set_state:
|
||||
'...':
|
||||
description: All turn_on parameters can be used to specify a color
|
||||
|
||||
infrared:
|
||||
description: Automatic infrared level (0..255) when light brightness is low
|
||||
example: 255
|
||||
|
||||
transition:
|
||||
description: Duration in seconds it takes to get to the final state
|
||||
example: 10
|
||||
@ -19,36 +23,7 @@ lifx_set_state:
|
||||
|
||||
|
||||
lifx_effect_breathe:
|
||||
description: Run a breathe effect by fading to a color and back.
|
||||
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of entities to run the effect on
|
||||
example: 'light.kitchen'
|
||||
|
||||
brightness:
|
||||
description: Number between 0..255 indicating brightness when the effect peaks
|
||||
example: 120
|
||||
|
||||
color_name:
|
||||
description: A human readable color name
|
||||
example: 'red'
|
||||
|
||||
rgb_color:
|
||||
description: Color for the fade in RGB-format
|
||||
example: '[255, 100, 100]'
|
||||
|
||||
period:
|
||||
description: Duration of the effect in seconds (default 1.0)
|
||||
example: 3
|
||||
|
||||
cycles:
|
||||
description: Number of times the effect should run (default 1.0)
|
||||
example: 2
|
||||
|
||||
power_on:
|
||||
description: Powered off lights are temporarily turned on during the effect (default True)
|
||||
example: False
|
||||
description: Deprecated, use lifx_effect_pulse
|
||||
|
||||
lifx_effect_pulse:
|
||||
description: Run a flash effect by changing to a color and back.
|
||||
@ -58,6 +33,10 @@ lifx_effect_pulse:
|
||||
description: Name(s) of entities to run the effect on
|
||||
example: 'light.kitchen'
|
||||
|
||||
mode:
|
||||
description: 'Decides how colors are changed. Possible values: blink, breathe, ping, strobe, solid'
|
||||
example: strobe
|
||||
|
||||
brightness:
|
||||
description: Number between 0..255 indicating brightness of the temporary color
|
||||
example: 120
|
||||
|
@ -11,14 +11,14 @@ from homeassistant.components.light import (
|
||||
from homeassistant.components.lutron import (
|
||||
LutronDevice, LUTRON_DEVICES, LUTRON_CONTROLLER)
|
||||
|
||||
DEPENDENCIES = ['lutron']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEPENDENCIES = ['lutron']
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up Lutron lights."""
|
||||
"""Set up the Lutron lights."""
|
||||
devs = []
|
||||
for (area_name, device) in hass.data[LUTRON_DEVICES]['light']:
|
||||
dev = LutronLight(area_name, device, hass.data[LUTRON_CONTROLLER])
|
||||
|
@ -157,8 +157,6 @@ class Luminary(Light):
|
||||
|
||||
def turn_on(self, **kwargs):
|
||||
"""Turn the device on."""
|
||||
self._luminary.set_onoff(1)
|
||||
|
||||
if ATTR_TRANSITION in kwargs:
|
||||
transition = int(kwargs[ATTR_TRANSITION] * 10)
|
||||
_LOGGER.debug("turn_on requested transition time for light: "
|
||||
@ -168,6 +166,16 @@ class Luminary(Light):
|
||||
_LOGGER.debug("turn_on requested transition time for light: "
|
||||
"%s is: %s", self._name, transition)
|
||||
|
||||
if ATTR_BRIGHTNESS in kwargs:
|
||||
self._brightness = kwargs[ATTR_BRIGHTNESS]
|
||||
_LOGGER.debug("turn_on requested brightness for light: %s is: %s ",
|
||||
self._name, self._brightness)
|
||||
self._luminary.set_luminance(
|
||||
int(self._brightness / 2.55),
|
||||
transition)
|
||||
else:
|
||||
self._luminary.set_onoff(1)
|
||||
|
||||
if ATTR_RGB_COLOR in kwargs:
|
||||
red, green, blue = kwargs[ATTR_RGB_COLOR]
|
||||
_LOGGER.debug("turn_on requested ATTR_RGB_COLOR for light:"
|
||||
@ -191,14 +199,6 @@ class Luminary(Light):
|
||||
"%s: %s", self._name, kelvin)
|
||||
self._luminary.set_temperature(kelvin, transition)
|
||||
|
||||
if ATTR_BRIGHTNESS in kwargs:
|
||||
self._brightness = kwargs[ATTR_BRIGHTNESS]
|
||||
_LOGGER.debug("turn_on requested brightness for light: %s is: %s ",
|
||||
self._name, self._brightness)
|
||||
self._luminary.set_luminance(
|
||||
int(self._brightness / 2.55),
|
||||
transition)
|
||||
|
||||
if ATTR_EFFECT in kwargs:
|
||||
effect = kwargs.get(ATTR_EFFECT)
|
||||
if effect == EFFECT_RANDOM:
|
||||
|
@ -59,7 +59,7 @@ def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
state_template = device_config[CONF_VALUE_TEMPLATE]
|
||||
on_action = device_config[CONF_ON_ACTION]
|
||||
off_action = device_config[CONF_OFF_ACTION]
|
||||
level_action = device_config[CONF_LEVEL_ACTION]
|
||||
level_action = device_config.get(CONF_LEVEL_ACTION)
|
||||
level_template = device_config[CONF_LEVEL_TEMPLATE]
|
||||
|
||||
template_entity_ids = set()
|
||||
@ -108,6 +108,8 @@ class LightTemplate(Light):
|
||||
self._template = state_template
|
||||
self._on_script = Script(hass, on_action)
|
||||
self._off_script = Script(hass, off_action)
|
||||
self._level_script = None
|
||||
if level_action is not None:
|
||||
self._level_script = Script(hass, level_action)
|
||||
self._level_template = level_template
|
||||
|
||||
|
@ -7,7 +7,8 @@ https://home-assistant.io/components/light.vera/
|
||||
import logging
|
||||
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS, ENTITY_ID_FORMAT, Light, SUPPORT_BRIGHTNESS)
|
||||
ATTR_BRIGHTNESS, ATTR_RGB_COLOR, ENTITY_ID_FORMAT,
|
||||
SUPPORT_BRIGHTNESS, SUPPORT_RGB_COLOR, Light)
|
||||
from homeassistant.components.vera import (
|
||||
VERA_CONTROLLER, VERA_DEVICES, VeraDevice)
|
||||
|
||||
@ -15,8 +16,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEPENDENCIES = ['vera']
|
||||
|
||||
SUPPORT_VERA = SUPPORT_BRIGHTNESS
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
@ -31,23 +30,34 @@ class VeraLight(VeraDevice, Light):
|
||||
def __init__(self, vera_device, controller):
|
||||
"""Initialize the light."""
|
||||
self._state = False
|
||||
self._color = None
|
||||
self._brightness = None
|
||||
VeraDevice.__init__(self, vera_device, controller)
|
||||
self.entity_id = ENTITY_ID_FORMAT.format(self.vera_id)
|
||||
|
||||
@property
|
||||
def brightness(self):
|
||||
"""Return the brightness of the light."""
|
||||
if self.vera_device.is_dimmable:
|
||||
return self.vera_device.get_brightness()
|
||||
return self._brightness
|
||||
|
||||
@property
|
||||
def rgb_color(self):
|
||||
"""Return the color of the light."""
|
||||
return self._color
|
||||
|
||||
@property
|
||||
def supported_features(self):
|
||||
"""Flag supported features."""
|
||||
return SUPPORT_VERA
|
||||
if self._color:
|
||||
return SUPPORT_BRIGHTNESS | SUPPORT_RGB_COLOR
|
||||
else:
|
||||
return SUPPORT_BRIGHTNESS
|
||||
|
||||
def turn_on(self, **kwargs):
|
||||
"""Turn the light on."""
|
||||
if ATTR_BRIGHTNESS in kwargs and self.vera_device.is_dimmable:
|
||||
if ATTR_RGB_COLOR in kwargs and self._color:
|
||||
self.vera_device.set_color(kwargs[ATTR_RGB_COLOR])
|
||||
elif ATTR_BRIGHTNESS in kwargs and self.vera_device.is_dimmable:
|
||||
self.vera_device.set_brightness(kwargs[ATTR_BRIGHTNESS])
|
||||
else:
|
||||
self.vera_device.switch_on()
|
||||
@ -69,3 +79,5 @@ class VeraLight(VeraDevice, Light):
|
||||
def update(self):
|
||||
"""Call to update state."""
|
||||
self._state = self.vera_device.is_switched_on()
|
||||
self._brightness = self.vera_device.get_brightness()
|
||||
self._color = self.vera_device.get_color()
|
||||
|
@ -16,13 +16,13 @@ from homeassistant.util.color import (
|
||||
from homeassistant.const import CONF_DEVICES, CONF_NAME
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS, ATTR_RGB_COLOR, ATTR_TRANSITION, ATTR_COLOR_TEMP,
|
||||
ATTR_FLASH, FLASH_SHORT, FLASH_LONG,
|
||||
ATTR_FLASH, FLASH_SHORT, FLASH_LONG, ATTR_EFFECT,
|
||||
SUPPORT_BRIGHTNESS, SUPPORT_RGB_COLOR, SUPPORT_TRANSITION,
|
||||
SUPPORT_COLOR_TEMP, SUPPORT_FLASH,
|
||||
SUPPORT_COLOR_TEMP, SUPPORT_FLASH, SUPPORT_EFFECT,
|
||||
Light, PLATFORM_SCHEMA)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
REQUIREMENTS = ['yeelight==0.2.2']
|
||||
REQUIREMENTS = ['yeelight==0.3.0']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@ -50,8 +50,44 @@ SUPPORT_YEELIGHT = (SUPPORT_BRIGHTNESS |
|
||||
|
||||
SUPPORT_YEELIGHT_RGB = (SUPPORT_YEELIGHT |
|
||||
SUPPORT_RGB_COLOR |
|
||||
SUPPORT_EFFECT |
|
||||
SUPPORT_COLOR_TEMP)
|
||||
|
||||
EFFECT_DISCO = "Disco"
|
||||
EFFECT_TEMP = "Slow Temp"
|
||||
EFFECT_STROBE = "Strobe epilepsy!"
|
||||
EFFECT_STROBE_COLOR = "Strobe color"
|
||||
EFFECT_ALARM = "Alarm"
|
||||
EFFECT_POLICE = "Police"
|
||||
EFFECT_POLICE2 = "Police2"
|
||||
EFFECT_CHRISTMAS = "Christmas"
|
||||
EFFECT_RGB = "RGB"
|
||||
EFFECT_RANDOM_LOOP = "Random Loop"
|
||||
EFFECT_FAST_RANDOM_LOOP = "Fast Random Loop"
|
||||
EFFECT_SLOWDOWN = "Slowdown"
|
||||
EFFECT_WHATSAPP = "WhatsApp"
|
||||
EFFECT_FACEBOOK = "Facebook"
|
||||
EFFECT_TWITTER = "Twitter"
|
||||
EFFECT_STOP = "Stop"
|
||||
|
||||
YEELIGHT_EFFECT_LIST = [
|
||||
EFFECT_DISCO,
|
||||
EFFECT_TEMP,
|
||||
EFFECT_STROBE,
|
||||
EFFECT_STROBE_COLOR,
|
||||
EFFECT_ALARM,
|
||||
EFFECT_POLICE,
|
||||
EFFECT_POLICE2,
|
||||
EFFECT_CHRISTMAS,
|
||||
EFFECT_RGB,
|
||||
EFFECT_RANDOM_LOOP,
|
||||
EFFECT_FAST_RANDOM_LOOP,
|
||||
EFFECT_SLOWDOWN,
|
||||
EFFECT_WHATSAPP,
|
||||
EFFECT_FACEBOOK,
|
||||
EFFECT_TWITTER,
|
||||
EFFECT_STOP]
|
||||
|
||||
|
||||
def _cmd(func):
|
||||
"""Define a wrapper to catch exceptions from the bulb."""
|
||||
@ -116,6 +152,11 @@ class YeelightLight(Light):
|
||||
"""Flag supported features."""
|
||||
return self._supported_features
|
||||
|
||||
@property
|
||||
def effect_list(self):
|
||||
"""Return the list of supported effects."""
|
||||
return YEELIGHT_EFFECT_LIST
|
||||
|
||||
@property
|
||||
def unique_id(self) -> str:
|
||||
"""Return the ID of this light."""
|
||||
@ -286,6 +327,54 @@ class YeelightLight(Light):
|
||||
except BulbException as ex:
|
||||
_LOGGER.error("Unable to set flash: %s", ex)
|
||||
|
||||
@_cmd
|
||||
def set_effect(self, effect) -> None:
|
||||
"""Activate effect."""
|
||||
if effect:
|
||||
from yeelight import (Flow, BulbException)
|
||||
from yeelight.transitions import (disco, temp, strobe, pulse,
|
||||
strobe_color, alarm, police,
|
||||
police2, christmas, rgb,
|
||||
randomloop, slowdown)
|
||||
if effect == EFFECT_STOP:
|
||||
self._bulb.stop_flow()
|
||||
return
|
||||
if effect == EFFECT_DISCO:
|
||||
flow = Flow(count=0, transitions=disco())
|
||||
if effect == EFFECT_TEMP:
|
||||
flow = Flow(count=0, transitions=temp())
|
||||
if effect == EFFECT_STROBE:
|
||||
flow = Flow(count=0, transitions=strobe())
|
||||
if effect == EFFECT_STROBE_COLOR:
|
||||
flow = Flow(count=0, transitions=strobe_color())
|
||||
if effect == EFFECT_ALARM:
|
||||
flow = Flow(count=0, transitions=alarm())
|
||||
if effect == EFFECT_POLICE:
|
||||
flow = Flow(count=0, transitions=police())
|
||||
if effect == EFFECT_POLICE2:
|
||||
flow = Flow(count=0, transitions=police2())
|
||||
if effect == EFFECT_CHRISTMAS:
|
||||
flow = Flow(count=0, transitions=christmas())
|
||||
if effect == EFFECT_RGB:
|
||||
flow = Flow(count=0, transitions=rgb())
|
||||
if effect == EFFECT_RANDOM_LOOP:
|
||||
flow = Flow(count=0, transitions=randomloop())
|
||||
if effect == EFFECT_FAST_RANDOM_LOOP:
|
||||
flow = Flow(count=0, transitions=randomloop(duration=250))
|
||||
if effect == EFFECT_SLOWDOWN:
|
||||
flow = Flow(count=0, transitions=slowdown())
|
||||
if effect == EFFECT_WHATSAPP:
|
||||
flow = Flow(count=2, transitions=pulse(37, 211, 102))
|
||||
if effect == EFFECT_FACEBOOK:
|
||||
flow = Flow(count=2, transitions=pulse(59, 89, 152))
|
||||
if effect == EFFECT_TWITTER:
|
||||
flow = Flow(count=2, transitions=pulse(0, 172, 237))
|
||||
|
||||
try:
|
||||
self._bulb.start_flow(flow)
|
||||
except BulbException as ex:
|
||||
_LOGGER.error("Unable to set effect: %s", ex)
|
||||
|
||||
def turn_on(self, **kwargs) -> None:
|
||||
"""Turn the bulb on."""
|
||||
import yeelight
|
||||
@ -293,6 +382,7 @@ class YeelightLight(Light):
|
||||
colortemp = kwargs.get(ATTR_COLOR_TEMP)
|
||||
rgb = kwargs.get(ATTR_RGB_COLOR)
|
||||
flash = kwargs.get(ATTR_FLASH)
|
||||
effect = kwargs.get(ATTR_EFFECT)
|
||||
|
||||
duration = int(self.config[CONF_TRANSITION]) # in ms
|
||||
if ATTR_TRANSITION in kwargs: # passed kwarg overrides config
|
||||
@ -317,6 +407,7 @@ class YeelightLight(Light):
|
||||
self.set_colortemp(colortemp, duration)
|
||||
self.set_brightness(brightness, duration)
|
||||
self.set_flash(flash)
|
||||
self.set_effect(effect)
|
||||
except yeelight.BulbException as ex:
|
||||
_LOGGER.error("Unable to set bulb properties: %s", ex)
|
||||
return
|
||||
|
@ -47,11 +47,11 @@ TEMP_COLD_HASS = (TEMP_COLOR_MAX - TEMP_COLOR_MIN) / 3 + TEMP_COLOR_MIN
|
||||
|
||||
def get_device(node, values, node_config, **kwargs):
|
||||
"""Create Z-Wave entity device."""
|
||||
name = '{}.{}'.format(DOMAIN, zwave.object_id(values.primary))
|
||||
refresh = node_config.get(zwave.CONF_REFRESH_VALUE)
|
||||
delay = node_config.get(zwave.CONF_REFRESH_DELAY)
|
||||
_LOGGER.debug("name=%s node_config=%s CONF_REFRESH_VALUE=%s"
|
||||
" CONF_REFRESH_DELAY=%s", name, node_config, refresh, delay)
|
||||
_LOGGER.debug("node=%d value=%d node_config=%s CONF_REFRESH_VALUE=%s"
|
||||
" CONF_REFRESH_DELAY=%s", node.node_id,
|
||||
values.primary.value_id, node_config, refresh, delay)
|
||||
|
||||
if node.has_command_class(zwave.const.COMMAND_CLASS_SWITCH_COLOR):
|
||||
return ZwaveColorLight(values, refresh, delay)
|
||||
|
@ -25,6 +25,8 @@ from homeassistant.components import group
|
||||
ATTR_CHANGED_BY = 'changed_by'
|
||||
|
||||
DOMAIN = 'lock'
|
||||
DEPENDENCIES = ['group']
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
ENTITY_ID_ALL_LOCKS = group.ENTITY_ID_FORMAT.format('all_locks')
|
||||
ENTITY_ID_FORMAT = DOMAIN + '.{}'
|
||||
@ -33,8 +35,6 @@ GROUP_NAME_ALL_LOCKS = 'all locks'
|
||||
|
||||
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10)
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
LOCK_SERVICE_SCHEMA = vol.Schema({
|
||||
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
|
||||
vol.Optional(ATTR_CODE): cv.string,
|
||||
|
89
homeassistant/components/lock/sesame.py
Normal file
89
homeassistant/components/lock/sesame.py
Normal file
@ -0,0 +1,89 @@
|
||||
"""
|
||||
Support for Sesame, by CANDY HOUSE.
|
||||
|
||||
For more details about this platform, please refer to the documentation
|
||||
https://home-assistant.io/components/lock.sesame/
|
||||
"""
|
||||
from typing import Callable # noqa
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.components.lock import LockDevice, PLATFORM_SCHEMA
|
||||
from homeassistant.const import (
|
||||
ATTR_BATTERY_LEVEL, CONF_EMAIL, CONF_PASSWORD,
|
||||
STATE_LOCKED, STATE_UNLOCKED)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
REQUIREMENTS = ['pysesame==0.1.0']
|
||||
|
||||
ATTR_DEVICE_ID = 'device_id'
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_EMAIL): cv.string,
|
||||
vol.Required(CONF_PASSWORD): cv.string
|
||||
})
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def setup_platform(hass, config: ConfigType,
|
||||
add_devices: Callable[[list], None], discovery_info=None):
|
||||
"""Set up the Sesame platform."""
|
||||
import pysesame
|
||||
|
||||
email = config.get(CONF_EMAIL)
|
||||
password = config.get(CONF_PASSWORD)
|
||||
|
||||
add_devices([SesameDevice(sesame) for
|
||||
sesame in pysesame.get_sesames(email, password)])
|
||||
|
||||
|
||||
class SesameDevice(LockDevice):
|
||||
"""Representation of a Sesame device."""
|
||||
|
||||
_sesame = None
|
||||
|
||||
def __init__(self, sesame: object) -> None:
|
||||
"""Initialize the Sesame device."""
|
||||
self._sesame = sesame
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return the name of the device."""
|
||||
return self._sesame.nickname
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return self._sesame.api_enabled
|
||||
|
||||
@property
|
||||
def is_locked(self) -> bool:
|
||||
"""Return True if the device is currently locked, else False."""
|
||||
return not self._sesame.is_unlocked
|
||||
|
||||
@property
|
||||
def state(self) -> str:
|
||||
"""Get the state of the device."""
|
||||
if self._sesame.is_unlocked:
|
||||
return STATE_UNLOCKED
|
||||
return STATE_LOCKED
|
||||
|
||||
def lock(self, **kwargs) -> None:
|
||||
"""Lock the device."""
|
||||
self._sesame.lock()
|
||||
|
||||
def unlock(self, **kwargs) -> None:
|
||||
"""Unlock the device."""
|
||||
self._sesame.unlock()
|
||||
|
||||
def update(self) -> None:
|
||||
"""Update the internal state of the device."""
|
||||
self._sesame.update_state()
|
||||
|
||||
@property
|
||||
def device_state_attributes(self) -> dict:
|
||||
"""Return the state attributes."""
|
||||
attributes = {}
|
||||
attributes[ATTR_DEVICE_ID] = self._sesame.device_id
|
||||
attributes[ATTR_BATTERY_LEVEL] = self._sesame.battery
|
||||
return attributes
|
@ -7,6 +7,10 @@ https://home-assistant.io/components/lutron/
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.helpers import discovery
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
@ -19,6 +23,14 @@ _LOGGER = logging.getLogger(__name__)
|
||||
LUTRON_CONTROLLER = 'lutron_controller'
|
||||
LUTRON_DEVICES = 'lutron_devices'
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Required(CONF_PASSWORD): cv.string,
|
||||
vol.Required(CONF_USERNAME): cv.string,
|
||||
})
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
def setup(hass, base_config):
|
||||
"""Set up the Lutron component."""
|
||||
@ -29,13 +41,11 @@ def setup(hass, base_config):
|
||||
|
||||
config = base_config.get(DOMAIN)
|
||||
hass.data[LUTRON_CONTROLLER] = Lutron(
|
||||
config['lutron_host'],
|
||||
config['lutron_user'],
|
||||
config['lutron_password']
|
||||
)
|
||||
config[CONF_HOST], config[CONF_USERNAME], config[CONF_USERNAME])
|
||||
|
||||
hass.data[LUTRON_CONTROLLER].load_xml_db()
|
||||
hass.data[LUTRON_CONTROLLER].connect()
|
||||
_LOGGER.info("Connected to Main Repeater at %s", config['lutron_host'])
|
||||
_LOGGER.info("Connected to main repeater at %s", config[CONF_HOST])
|
||||
|
||||
# Sort our devices into types
|
||||
for area in hass.data[LUTRON_CONTROLLER].areas:
|
||||
|
51
homeassistant/components/mailgun.py
Normal file
51
homeassistant/components/mailgun.py
Normal file
@ -0,0 +1,51 @@
|
||||
"""
|
||||
Support for Mailgun.
|
||||
|
||||
For more details about this component, please refer to the documentation at
|
||||
https://home-assistant.io/components/mailgun/
|
||||
"""
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.const import CONF_API_KEY, CONF_DOMAIN
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
|
||||
|
||||
DOMAIN = 'mailgun'
|
||||
API_PATH = '/api/{}'.format(DOMAIN)
|
||||
DATA_MAILGUN = DOMAIN
|
||||
DEPENDENCIES = ['http']
|
||||
MESSAGE_RECEIVED = '{}_message_received'.format(DOMAIN)
|
||||
CONF_SANDBOX = 'sandbox'
|
||||
DEFAULT_SANDBOX = False
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
vol.Required(CONF_API_KEY): cv.string,
|
||||
vol.Required(CONF_DOMAIN): cv.string,
|
||||
vol.Optional(CONF_SANDBOX, default=DEFAULT_SANDBOX): cv.boolean
|
||||
}),
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
def setup(hass, config):
|
||||
"""Set up the Mailgun component."""
|
||||
hass.data[DATA_MAILGUN] = config[DOMAIN]
|
||||
hass.http.register_view(MailgunReceiveMessageView())
|
||||
return True
|
||||
|
||||
|
||||
class MailgunReceiveMessageView(HomeAssistantView):
|
||||
"""Handle data from Mailgun inbound messages."""
|
||||
|
||||
url = API_PATH
|
||||
name = 'api:{}'.format(DOMAIN)
|
||||
|
||||
@callback
|
||||
def post(self, request): # pylint: disable=no-self-use
|
||||
"""Handle Mailgun message POST."""
|
||||
hass = request.app['hass']
|
||||
data = yield from request.post()
|
||||
hass.bus.async_fire(MESSAGE_RECEIVED, dict(data))
|
||||
return
|
@ -108,7 +108,6 @@ def _retry(func):
|
||||
class SharpAquosTVDevice(MediaPlayerDevice):
|
||||
"""Representation of a Aquos TV."""
|
||||
|
||||
# pylint: disable=too-many-public-methods
|
||||
def __init__(self, name, remote, power_on_enabled=False):
|
||||
"""Initialize the aquos device."""
|
||||
global SUPPORT_SHARPTV
|
||||
|
@ -19,11 +19,13 @@ from homeassistant.const import (
|
||||
CONF_NAME, STATE_ON)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
REQUIREMENTS = ['denonavr==0.4.2']
|
||||
REQUIREMENTS = ['denonavr==0.4.4']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_NAME = None
|
||||
DEFAULT_SHOW_SOURCES = False
|
||||
CONF_SHOW_ALL_SOURCES = 'show_all_sources'
|
||||
KEY_DENON_CACHE = 'denonavr_hosts'
|
||||
|
||||
SUPPORT_DENON = SUPPORT_VOLUME_STEP | SUPPORT_VOLUME_MUTE | \
|
||||
@ -37,6 +39,8 @@ SUPPORT_MEDIA_MODES = SUPPORT_PLAY_MEDIA | \
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_HOST): cv.string,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_SHOW_ALL_SOURCES, default=DEFAULT_SHOW_SOURCES):
|
||||
cv.boolean,
|
||||
})
|
||||
|
||||
|
||||
@ -52,6 +56,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
cache = hass.data[KEY_DENON_CACHE] = set()
|
||||
|
||||
# Start assignment of host and name
|
||||
show_all_sources = config.get(CONF_SHOW_ALL_SOURCES)
|
||||
# 1. option: manual setting
|
||||
if config.get(CONF_HOST) is not None:
|
||||
host = config.get(CONF_HOST)
|
||||
@ -60,7 +65,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
if host not in cache:
|
||||
cache.add(host)
|
||||
receivers.append(
|
||||
DenonDevice(denonavr.DenonAVR(host, name)))
|
||||
DenonDevice(denonavr.DenonAVR(host, name, show_all_sources)))
|
||||
_LOGGER.info("Denon receiver at host %s initialized", host)
|
||||
# 2. option: discovery using netdisco
|
||||
if discovery_info is not None:
|
||||
@ -70,7 +75,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
if host not in cache:
|
||||
cache.add(host)
|
||||
receivers.append(
|
||||
DenonDevice(denonavr.DenonAVR(host, name)))
|
||||
DenonDevice(denonavr.DenonAVR(host, name, show_all_sources)))
|
||||
_LOGGER.info("Denon receiver at host %s initialized", host)
|
||||
# 3. option: discovery using denonavr library
|
||||
if config.get(CONF_HOST) is None and discovery_info is None:
|
||||
@ -85,7 +90,8 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
if host not in cache:
|
||||
cache.add(host)
|
||||
receivers.append(
|
||||
DenonDevice(denonavr.DenonAVR(host, name)))
|
||||
DenonDevice(
|
||||
denonavr.DenonAVR(host, name, show_all_sources)))
|
||||
_LOGGER.info("Denon receiver at host %s initialized", host)
|
||||
|
||||
# Add all freshly discovered receivers
|
||||
|
@ -15,7 +15,8 @@ from homeassistant.components.media_player import (
|
||||
MediaPlayerDevice)
|
||||
from homeassistant.const import (
|
||||
STATE_IDLE, STATE_OFF, STATE_PAUSED, STATE_PLAYING, STATE_STANDBY,
|
||||
STATE_UNKNOWN, CONF_HOST, CONF_PORT, CONF_NAME, CONF_DEVICE, CONF_DEVICES)
|
||||
STATE_UNKNOWN, CONF_HOST, CONF_PORT, CONF_SSL, CONF_NAME, CONF_DEVICE,
|
||||
CONF_DEVICES)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@ -24,19 +25,21 @@ SUPPORT_FIRETV = SUPPORT_PAUSE | \
|
||||
SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_PREVIOUS_TRACK | \
|
||||
SUPPORT_NEXT_TRACK | SUPPORT_VOLUME_SET | SUPPORT_PLAY
|
||||
|
||||
DEFAULT_SSL = False
|
||||
DEFAULT_DEVICE = 'default'
|
||||
DEFAULT_HOST = 'localhost'
|
||||
DEFAULT_NAME = 'Amazon Fire TV'
|
||||
DEFAULT_PORT = 5556
|
||||
DEVICE_ACTION_URL = 'http://{0}:{1}/devices/action/{2}/{3}'
|
||||
DEVICE_LIST_URL = 'http://{0}:{1}/devices/list'
|
||||
DEVICE_STATE_URL = 'http://{0}:{1}/devices/state/{2}'
|
||||
DEVICE_ACTION_URL = '{0}://{1}:{2}/devices/action/{3}/{4}'
|
||||
DEVICE_LIST_URL = '{0}://{1}:{2}/devices/list'
|
||||
DEVICE_STATE_URL = '{0}://{1}:{2}/devices/state/{3}'
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_DEVICE, default=DEFAULT_DEVICE): cv.string,
|
||||
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean,
|
||||
})
|
||||
|
||||
|
||||
@ -44,14 +47,17 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the FireTV platform."""
|
||||
name = config.get(CONF_NAME)
|
||||
ssl = config.get(CONF_SSL)
|
||||
proto = 'https' if ssl else 'http'
|
||||
host = config.get(CONF_HOST)
|
||||
port = config.get(CONF_PORT)
|
||||
device_id = config.get(CONF_DEVICE)
|
||||
|
||||
try:
|
||||
response = requests.get(DEVICE_LIST_URL.format(host, port)).json()
|
||||
response = requests.get(
|
||||
DEVICE_LIST_URL.format(proto, host, port)).json()
|
||||
if device_id in response[CONF_DEVICES].keys():
|
||||
add_devices([FireTVDevice(host, port, device_id, name)])
|
||||
add_devices([FireTVDevice(proto, host, port, device_id, name)])
|
||||
_LOGGER.info("Device %s accessible and ready for control",
|
||||
device_id)
|
||||
else:
|
||||
@ -72,8 +78,9 @@ class FireTV(object):
|
||||
be running via Python 2).
|
||||
"""
|
||||
|
||||
def __init__(self, host, port, device_id):
|
||||
def __init__(self, proto, host, port, device_id):
|
||||
"""Initialize the FireTV server."""
|
||||
self.proto = proto
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.device_id = device_id
|
||||
@ -84,7 +91,8 @@ class FireTV(object):
|
||||
try:
|
||||
response = requests.get(
|
||||
DEVICE_STATE_URL.format(
|
||||
self.host, self.port, self.device_id), timeout=10).json()
|
||||
self.proto, self.host, self.port, self.device_id
|
||||
), timeout=10).json()
|
||||
return response.get('state', STATE_UNKNOWN)
|
||||
except requests.exceptions.RequestException:
|
||||
_LOGGER.error(
|
||||
@ -95,7 +103,8 @@ class FireTV(object):
|
||||
"""Perform an action on the device."""
|
||||
try:
|
||||
requests.get(DEVICE_ACTION_URL.format(
|
||||
self.host, self.port, self.device_id, action_id), timeout=10)
|
||||
self.proto, self.host, self.port, self.device_id, action_id
|
||||
), timeout=10)
|
||||
except requests.exceptions.RequestException:
|
||||
_LOGGER.error(
|
||||
"Action request for %s was not accepted for device %s",
|
||||
@ -105,9 +114,9 @@ class FireTV(object):
|
||||
class FireTVDevice(MediaPlayerDevice):
|
||||
"""Representation of an Amazon Fire TV device on the network."""
|
||||
|
||||
def __init__(self, host, port, device, name):
|
||||
def __init__(self, proto, host, port, device, name):
|
||||
"""Initialize the FireTV device."""
|
||||
self._firetv = FireTV(host, port, device)
|
||||
self._firetv = FireTV(proto, host, port, device)
|
||||
self._name = name
|
||||
self._state = STATE_UNKNOWN
|
||||
|
||||
|
@ -17,8 +17,7 @@ from homeassistant.const import (
|
||||
CONF_NAME, STATE_OFF, STATE_ON)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
REQUIREMENTS = ['https://github.com/joopert/nad_receiver/archive/'
|
||||
'0.0.3.zip#nad_receiver==0.0.3']
|
||||
REQUIREMENTS = ['nad_receiver==0.0.6']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
181
homeassistant/components/media_player/nadtcp.py
Normal file
181
homeassistant/components/media_player/nadtcp.py
Normal file
@ -0,0 +1,181 @@
|
||||
"""
|
||||
Support for NAD digital amplifiers which can be remote controlled via tcp/ip.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/media_player.nadtcp/
|
||||
"""
|
||||
import logging
|
||||
import voluptuous as vol
|
||||
from homeassistant.components.media_player import (
|
||||
SUPPORT_VOLUME_SET,
|
||||
SUPPORT_VOLUME_MUTE, SUPPORT_TURN_ON, SUPPORT_TURN_OFF,
|
||||
SUPPORT_VOLUME_STEP, SUPPORT_SELECT_SOURCE, MediaPlayerDevice,
|
||||
PLATFORM_SCHEMA)
|
||||
from homeassistant.const import (
|
||||
CONF_NAME, STATE_OFF, STATE_ON)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
REQUIREMENTS = ['nad_receiver==0.0.6']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_NAME = 'NAD amplifier'
|
||||
DEFAULT_MIN_VOLUME = -60
|
||||
DEFAULT_MAX_VOLUME = -10
|
||||
DEFAULT_VOLUME_STEP = 4
|
||||
|
||||
SUPPORT_NAD = SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE | SUPPORT_TURN_ON | \
|
||||
SUPPORT_TURN_OFF | SUPPORT_VOLUME_STEP | SUPPORT_SELECT_SOURCE
|
||||
|
||||
CONF_MIN_VOLUME = 'min_volume'
|
||||
CONF_MAX_VOLUME = 'max_volume'
|
||||
CONF_VOLUME_STEP = 'volume_step'
|
||||
CONF_HOST = 'host'
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_MIN_VOLUME, default=DEFAULT_MIN_VOLUME): int,
|
||||
vol.Optional(CONF_MAX_VOLUME, default=DEFAULT_MAX_VOLUME): int,
|
||||
vol.Optional(CONF_VOLUME_STEP, default=DEFAULT_VOLUME_STEP): int,
|
||||
})
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Setup the NAD platform."""
|
||||
from nad_receiver import NADReceiverTCP
|
||||
add_devices([NADtcp(
|
||||
NADReceiverTCP(config.get(CONF_HOST)),
|
||||
config.get(CONF_NAME),
|
||||
config.get(CONF_MIN_VOLUME),
|
||||
config.get(CONF_MAX_VOLUME),
|
||||
config.get(CONF_VOLUME_STEP),
|
||||
)])
|
||||
|
||||
|
||||
class NADtcp(MediaPlayerDevice):
|
||||
"""Representation of a NAD Digital amplifier."""
|
||||
|
||||
def __init__(self, nad_device, name, min_volume, max_volume, volume_step):
|
||||
"""Initialize the amplifier."""
|
||||
self._name = name
|
||||
self.nad_device = nad_device
|
||||
self._min_vol = (min_volume + 90) * 2 # from dB to nad vol (0-200)
|
||||
self._max_vol = (max_volume + 90) * 2 # from dB to nad vol (0-200)
|
||||
self._volume_step = volume_step
|
||||
self._state = None
|
||||
self._mute = None
|
||||
self._nad_volume = None
|
||||
self._volume = None
|
||||
self._source = None
|
||||
self._source_list = self.nad_device.available_sources()
|
||||
|
||||
self.update()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the device."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
"""Return the state of the device."""
|
||||
return self._state
|
||||
|
||||
def update(self):
|
||||
"""Get the latest details from the device."""
|
||||
try:
|
||||
nad_status = self.nad_device.status()
|
||||
except OSError:
|
||||
return
|
||||
if nad_status is None:
|
||||
return
|
||||
|
||||
# Update on/off state
|
||||
if nad_status['power']:
|
||||
self._state = STATE_ON
|
||||
else:
|
||||
self._state = STATE_OFF
|
||||
|
||||
# Update current volume
|
||||
self._volume = self.nad_vol_to_internal_vol(nad_status['volume'])
|
||||
self._nad_volume = nad_status['volume']
|
||||
|
||||
# Update muted state
|
||||
self._mute = nad_status['muted']
|
||||
|
||||
# Update current source
|
||||
self._source = nad_status['source']
|
||||
|
||||
def nad_vol_to_internal_vol(self, nad_volume):
|
||||
"""Convert nad volume range (0-200) to internal volume range.
|
||||
|
||||
Takes into account configured min and max volume.
|
||||
"""
|
||||
if nad_volume < self._min_vol:
|
||||
volume_internal = 0.0
|
||||
if nad_volume > self._max_vol:
|
||||
volume_internal = 1.0
|
||||
else:
|
||||
volume_internal = (nad_volume - self._min_vol) / \
|
||||
(self._max_vol - self._min_vol)
|
||||
return volume_internal
|
||||
|
||||
@property
|
||||
def supported_features(self):
|
||||
"""Flag media player features that are supported."""
|
||||
return SUPPORT_NAD
|
||||
|
||||
def turn_off(self):
|
||||
"""Turn the media player off."""
|
||||
self.nad_device.power_off()
|
||||
|
||||
def turn_on(self):
|
||||
"""Turn the media player on."""
|
||||
self.nad_device.power_on()
|
||||
|
||||
def volume_up(self):
|
||||
"""Step volume up in the configured increments."""
|
||||
self.nad_device.set_volume(self._nad_volume + 2 * self._volume_step)
|
||||
|
||||
def volume_down(self):
|
||||
"""Step volume down in the configured increments."""
|
||||
self.nad_device.set_volume(self._nad_volume - 2 * self._volume_step)
|
||||
|
||||
def set_volume_level(self, volume):
|
||||
"""Set volume level, range 0..1."""
|
||||
nad_volume_to_set = \
|
||||
int(round(volume * (self._max_vol - self._min_vol) +
|
||||
self._min_vol))
|
||||
self.nad_device.set_volume(nad_volume_to_set)
|
||||
|
||||
def mute_volume(self, mute):
|
||||
"""Mute (true) or unmute (false) media player."""
|
||||
if mute:
|
||||
self.nad_device.mute()
|
||||
else:
|
||||
self.nad_device.unmute()
|
||||
|
||||
def select_source(self, source):
|
||||
"""Select input source."""
|
||||
self.nad_device.select_source(source)
|
||||
|
||||
@property
|
||||
def source(self):
|
||||
"""Name of the current input source."""
|
||||
return self._source
|
||||
|
||||
@property
|
||||
def source_list(self):
|
||||
"""List of available input sources."""
|
||||
return self.nad_device.available_sources()
|
||||
|
||||
@property
|
||||
def volume_level(self):
|
||||
"""Volume level of the media player (0..1)."""
|
||||
return self._volume
|
||||
|
||||
@property
|
||||
def is_volume_muted(self):
|
||||
"""Boolean if volume is currently muted."""
|
||||
return self._mute
|
@ -134,7 +134,6 @@ def setup_plexserver(host, token, hass, config, add_devices_callback):
|
||||
track_utc_time_change(hass, lambda now: update_devices(), second=30)
|
||||
|
||||
@util.Throttle(MIN_TIME_BETWEEN_SCANS, MIN_TIME_BETWEEN_FORCED_SCANS)
|
||||
# pylint: disable=too-many-branches
|
||||
def update_devices():
|
||||
"""Update the devices objects."""
|
||||
try:
|
||||
@ -231,11 +230,9 @@ def request_configuration(host, hass, config, add_devices_callback):
|
||||
}])
|
||||
|
||||
|
||||
# pylint: disable=too-many-instance-attributes, too-many-public-methods
|
||||
class PlexClient(MediaPlayerDevice):
|
||||
"""Representation of a Plex device."""
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
def __init__(self, config, device, session, plex_sessions,
|
||||
update_devices, update_sessions):
|
||||
"""Initialize the Plex device."""
|
||||
@ -299,7 +296,6 @@ class PlexClient(MediaPlayerDevice):
|
||||
'media_player', prefix,
|
||||
self.name.lower().replace('-', '_'))
|
||||
|
||||
# pylint: disable=too-many-branches, too-many-statements
|
||||
def refresh(self, device, session):
|
||||
"""Refresh key device data."""
|
||||
# new data refresh
|
||||
|
@ -22,14 +22,13 @@ from homeassistant.components.media_player import (
|
||||
SUPPORT_PLAY)
|
||||
from homeassistant.const import (
|
||||
STATE_IDLE, STATE_PAUSED, STATE_PLAYING, STATE_OFF, ATTR_ENTITY_ID,
|
||||
CONF_HOSTS)
|
||||
CONF_HOSTS, ATTR_TIME)
|
||||
from homeassistant.config import load_yaml_config_file
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.util.dt import utcnow
|
||||
|
||||
REQUIREMENTS = ['SoCo==0.12']
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# The soco library is excessively chatty when it comes to logging and
|
||||
@ -68,7 +67,6 @@ ATTR_ALARM_ID = 'alarm_id'
|
||||
ATTR_VOLUME = 'volume'
|
||||
ATTR_ENABLED = 'enabled'
|
||||
ATTR_INCLUDE_LINKED_ZONES = 'include_linked_zones'
|
||||
ATTR_TIME = 'time'
|
||||
ATTR_MASTER = 'master'
|
||||
ATTR_WITH_GROUP = 'with_group'
|
||||
|
||||
@ -526,7 +524,7 @@ class SonosDevice(MediaPlayerDevice):
|
||||
support_previous_track = False
|
||||
support_next_track = False
|
||||
support_play = False
|
||||
support_stop = False
|
||||
support_stop = True
|
||||
support_pause = False
|
||||
|
||||
if is_playing_tv:
|
||||
@ -927,8 +925,8 @@ class SonosDevice(MediaPlayerDevice):
|
||||
@soco_error
|
||||
def turn_off(self):
|
||||
"""Turn off media player."""
|
||||
if self._support_pause:
|
||||
self.media_pause()
|
||||
if self._support_stop:
|
||||
self.media_stop()
|
||||
|
||||
@soco_error
|
||||
@soco_filter_upnperror(UPNP_ERRORS_TO_IGNORE)
|
||||
|
@ -33,7 +33,7 @@ SUPPORT_SPOTIFY = SUPPORT_VOLUME_SET | SUPPORT_PAUSE | SUPPORT_PLAY |\
|
||||
SUPPORT_NEXT_TRACK | SUPPORT_PREVIOUS_TRACK | SUPPORT_SELECT_SOURCE |\
|
||||
SUPPORT_PLAY_MEDIA | SUPPORT_SHUFFLE_SET
|
||||
|
||||
SCOPE = 'user-read-playback-state user-modify-playback-state'
|
||||
SCOPE = 'user-read-playback-state user-modify-playback-state user-read-private'
|
||||
DEFAULT_CACHE_PATH = '.spotify-token-cache'
|
||||
AUTH_CALLBACK_PATH = '/api/spotify'
|
||||
AUTH_CALLBACK_NAME = 'api:spotify'
|
||||
@ -135,6 +135,7 @@ class SpotifyMediaPlayer(MediaPlayerDevice):
|
||||
self._volume = None
|
||||
self._shuffle = False
|
||||
self._player = None
|
||||
self._user = None
|
||||
self._aliases = aliases
|
||||
self._token_info = self._oauth.get_cached_token()
|
||||
|
||||
@ -153,6 +154,7 @@ class SpotifyMediaPlayer(MediaPlayerDevice):
|
||||
if self._player is None or token_refreshed:
|
||||
self._player = \
|
||||
spotipy.Spotify(auth=self._token_info.get('access_token'))
|
||||
self._user = self._player.me()
|
||||
|
||||
def update(self):
|
||||
"""Update state and attributes."""
|
||||
@ -308,4 +310,7 @@ class SpotifyMediaPlayer(MediaPlayerDevice):
|
||||
@property
|
||||
def supported_features(self):
|
||||
"""Return the media player features that are supported."""
|
||||
if self._user is not None and self._user['product'] == 'premium':
|
||||
return SUPPORT_SPOTIFY
|
||||
else:
|
||||
return None
|
||||
|
@ -95,7 +95,7 @@ class LogitechMediaServer(object):
|
||||
result = []
|
||||
data = yield from self.async_query('players', 'status')
|
||||
|
||||
for players in data['players_loop']:
|
||||
for players in data.get('players_loop', []):
|
||||
player = SqueezeBoxDevice(
|
||||
self, players['playerid'], players['name'])
|
||||
yield from player.async_update()
|
||||
|
@ -102,7 +102,7 @@ def valid_discovery_topic(value):
|
||||
_VALID_QOS_SCHEMA = vol.All(vol.Coerce(int), vol.In([0, 1, 2]))
|
||||
|
||||
CLIENT_KEY_AUTH_MSG = 'client_key and client_cert must both be present in ' \
|
||||
'the mqtt broker config'
|
||||
'the MQTT broker configuration'
|
||||
|
||||
MQTT_WILL_BIRTH_SCHEMA = vol.Schema({
|
||||
vol.Required(ATTR_TOPIC): valid_publish_topic,
|
||||
@ -126,9 +126,8 @@ CONFIG_SCHEMA = vol.Schema({
|
||||
vol.Inclusive(CONF_CLIENT_CERT, 'client_key_auth',
|
||||
msg=CLIENT_KEY_AUTH_MSG): cv.isfile,
|
||||
vol.Optional(CONF_TLS_INSECURE): cv.boolean,
|
||||
vol.Optional(CONF_TLS_VERSION,
|
||||
default=DEFAULT_TLS_PROTOCOL): vol.Any('auto', '1.0',
|
||||
'1.1', '1.2'),
|
||||
vol.Optional(CONF_TLS_VERSION, default=DEFAULT_TLS_PROTOCOL):
|
||||
vol.Any('auto', '1.0', '1.1', '1.2'),
|
||||
vol.Optional(CONF_PROTOCOL, default=DEFAULT_PROTOCOL):
|
||||
vol.All(cv.string, vol.In([PROTOCOL_31, PROTOCOL_311])),
|
||||
vol.Optional(CONF_EMBEDDED): HBMQTT_CONFIG_SCHEMA,
|
||||
@ -237,9 +236,7 @@ def subscribe(hass, topic, msg_callback, qos=DEFAULT_QOS,
|
||||
encoding='utf-8'):
|
||||
"""Subscribe to an MQTT topic."""
|
||||
async_remove = run_coroutine_threadsafe(
|
||||
async_subscribe(hass, topic, msg_callback,
|
||||
qos, encoding),
|
||||
hass.loop
|
||||
async_subscribe(hass, topic, msg_callback, qos, encoding), hass.loop
|
||||
).result()
|
||||
|
||||
def remove():
|
||||
@ -649,7 +646,7 @@ def _match_topic(subscription, topic):
|
||||
if sub_part == "+":
|
||||
reg_ex_parts.append(r"([^\/]+)")
|
||||
else:
|
||||
reg_ex_parts.append(sub_part)
|
||||
reg_ex_parts.append(re.escape(sub_part))
|
||||
|
||||
reg_ex = "^" + (r'\/'.join(reg_ex_parts)) + suffix + "$"
|
||||
|
||||
|
@ -29,10 +29,18 @@ NEST_CONFIG_FILE = 'nest.conf'
|
||||
CONF_CLIENT_ID = 'client_id'
|
||||
CONF_CLIENT_SECRET = 'client_secret'
|
||||
|
||||
ATTR_HOME_MODE = 'home_mode'
|
||||
ATTR_STRUCTURE = 'structure'
|
||||
|
||||
SENSOR_SCHEMA = vol.Schema({
|
||||
vol.Optional(CONF_MONITORED_CONDITIONS): vol.All(cv.ensure_list)
|
||||
})
|
||||
|
||||
AWAY_SCHEMA = vol.Schema({
|
||||
vol.Required(ATTR_HOME_MODE): cv.string,
|
||||
vol.Optional(ATTR_STRUCTURE): vol.All(cv.ensure_list, cv.string)
|
||||
})
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
vol.Required(CONF_CLIENT_ID): cv.string,
|
||||
@ -126,6 +134,24 @@ def setup(hass, config):
|
||||
client_id=client_id, client_secret=client_secret)
|
||||
setup_nest(hass, nest, config)
|
||||
|
||||
def set_mode(service):
|
||||
"""Set the home/away mode for a Nest structure."""
|
||||
if ATTR_STRUCTURE in service.data:
|
||||
structures = service.data[ATTR_STRUCTURE]
|
||||
else:
|
||||
structures = hass.data[DATA_NEST].local_structure
|
||||
|
||||
for structure in nest.structures:
|
||||
if structure.name in structures:
|
||||
_LOGGER.info("Setting mode for %s", structure.name)
|
||||
structure.away = service.data[ATTR_HOME_MODE]
|
||||
else:
|
||||
_LOGGER.error("Invalid structure %s",
|
||||
service.data[ATTR_STRUCTURE])
|
||||
|
||||
hass.services.register(
|
||||
DOMAIN, 'set_mode', set_mode, schema=AWAY_SCHEMA)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@ -138,21 +164,21 @@ class NestDevice(object):
|
||||
self.nest = nest
|
||||
|
||||
if CONF_STRUCTURE not in conf:
|
||||
self._structure = [s.name for s in nest.structures]
|
||||
self.local_structure = [s.name for s in nest.structures]
|
||||
else:
|
||||
self._structure = conf[CONF_STRUCTURE]
|
||||
_LOGGER.debug("Structures to include: %s", self._structure)
|
||||
self.local_structure = conf[CONF_STRUCTURE]
|
||||
_LOGGER.debug("Structures to include: %s", self.local_structure)
|
||||
|
||||
def thermostats(self):
|
||||
"""Generate a list of thermostats and their location."""
|
||||
try:
|
||||
for structure in self.nest.structures:
|
||||
if structure.name in self._structure:
|
||||
if structure.name in self.local_structure:
|
||||
for device in structure.thermostats:
|
||||
yield (structure, device)
|
||||
else:
|
||||
_LOGGER.debug("Ignoring structure %s, not in %s",
|
||||
structure.name, self._structure)
|
||||
structure.name, self.local_structure)
|
||||
except socket.error:
|
||||
_LOGGER.error(
|
||||
"Connection error logging into the nest web service.")
|
||||
@ -161,12 +187,12 @@ class NestDevice(object):
|
||||
"""Generate a list of smoke co alarams."""
|
||||
try:
|
||||
for structure in self.nest.structures:
|
||||
if structure.name in self._structure:
|
||||
if structure.name in self.local_structure:
|
||||
for device in structure.smoke_co_alarms:
|
||||
yield(structure, device)
|
||||
else:
|
||||
_LOGGER.info("Ignoring structure %s, not in %s",
|
||||
structure.name, self._structure)
|
||||
structure.name, self.local_structure)
|
||||
except socket.error:
|
||||
_LOGGER.error(
|
||||
"Connection error logging into the nest web service.")
|
||||
@ -175,12 +201,12 @@ class NestDevice(object):
|
||||
"""Generate a list of cameras."""
|
||||
try:
|
||||
for structure in self.nest.structures:
|
||||
if structure.name in self._structure:
|
||||
if structure.name in self.local_structure:
|
||||
for device in structure.cameras:
|
||||
yield(structure, device)
|
||||
else:
|
||||
_LOGGER.info("Ignoring structure %s, not in %s",
|
||||
structure.name, self._structure)
|
||||
structure.name, self.local_structure)
|
||||
except socket.error:
|
||||
_LOGGER.error(
|
||||
"Connection error logging into the nest web service.")
|
||||
|
@ -18,7 +18,7 @@ from homeassistant.util import Throttle
|
||||
|
||||
REQUIREMENTS = [
|
||||
'https://github.com/jabesq/netatmo-api-python/archive/'
|
||||
'v0.9.1.zip#lnetatmo==0.9.1']
|
||||
'v0.9.2.zip#lnetatmo==0.9.2']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -25,7 +25,7 @@ from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.components.frontend import add_manifest_json_key
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
REQUIREMENTS = ['pywebpush==1.0.0', 'PyJWT==1.4.2']
|
||||
REQUIREMENTS = ['pywebpush==1.0.4', 'PyJWT==1.5.0']
|
||||
|
||||
DEPENDENCIES = ['frontend']
|
||||
|
||||
@ -48,6 +48,7 @@ ATTR_ENDPOINT = 'endpoint'
|
||||
ATTR_KEYS = 'keys'
|
||||
ATTR_AUTH = 'auth'
|
||||
ATTR_P256DH = 'p256dh'
|
||||
ATTR_EXPIRATIONTIME = 'expirationTime'
|
||||
|
||||
ATTR_TAG = 'tag'
|
||||
ATTR_ACTION = 'action'
|
||||
@ -71,7 +72,9 @@ SUBSCRIPTION_SCHEMA = vol.All(dict,
|
||||
vol.Schema({
|
||||
# pylint: disable=no-value-for-parameter
|
||||
vol.Required(ATTR_ENDPOINT): vol.Url(),
|
||||
vol.Required(ATTR_KEYS): KEYS_SCHEMA
|
||||
vol.Required(ATTR_KEYS): KEYS_SCHEMA,
|
||||
vol.Optional(ATTR_EXPIRATIONTIME):
|
||||
vol.Any(None, cv.positive_int)
|
||||
}))
|
||||
|
||||
REGISTER_SCHEMA = vol.Schema({
|
||||
@ -115,7 +118,7 @@ def get_service(hass, config, discovery_info=None):
|
||||
add_manifest_json_key(
|
||||
ATTR_GCM_SENDER_ID, config.get(ATTR_GCM_SENDER_ID))
|
||||
|
||||
return HTML5NotificationService(gcm_api_key, registrations)
|
||||
return HTML5NotificationService(gcm_api_key, registrations, json_path)
|
||||
|
||||
|
||||
def _load_config(filename):
|
||||
@ -327,10 +330,11 @@ class HTML5PushCallbackView(HomeAssistantView):
|
||||
class HTML5NotificationService(BaseNotificationService):
|
||||
"""Implement the notification service for HTML5."""
|
||||
|
||||
def __init__(self, gcm_key, registrations):
|
||||
def __init__(self, gcm_key, registrations, json_path):
|
||||
"""Initialize the service."""
|
||||
self._gcm_key = gcm_key
|
||||
self.registrations = registrations
|
||||
self.registrations_json_path = json_path
|
||||
|
||||
@property
|
||||
def targets(self):
|
||||
@ -383,7 +387,7 @@ class HTML5NotificationService(BaseNotificationService):
|
||||
if not targets:
|
||||
targets = self.registrations.keys()
|
||||
|
||||
for target in targets:
|
||||
for target in list(targets):
|
||||
info = self.registrations.get(target)
|
||||
if info is None:
|
||||
_LOGGER.error("%s is not a valid HTML5 push notification"
|
||||
@ -399,5 +403,16 @@ class HTML5NotificationService(BaseNotificationService):
|
||||
jwt_token = jwt.encode(jwt_claims, jwt_secret).decode('utf-8')
|
||||
payload[ATTR_DATA][ATTR_JWT] = jwt_token
|
||||
|
||||
WebPusher(info[ATTR_SUBSCRIPTION]).send(
|
||||
response = WebPusher(info[ATTR_SUBSCRIPTION]).send(
|
||||
json.dumps(payload), gcm_key=self._gcm_key, ttl='86400')
|
||||
|
||||
# pylint: disable=no-member
|
||||
if response.status_code == 410:
|
||||
_LOGGER.info("Notification channel has expired")
|
||||
reg = self.registrations.pop(target)
|
||||
if not _save_config(self.registrations_json_path,
|
||||
self.registrations):
|
||||
self.registrations[target] = reg
|
||||
_LOGGER.error("Error saving registration.")
|
||||
else:
|
||||
_LOGGER.info("Configuration saved")
|
||||
|
@ -8,40 +8,37 @@ import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.mailgun import CONF_SANDBOX, DATA_MAILGUN
|
||||
from homeassistant.components.notify import (
|
||||
PLATFORM_SCHEMA, BaseNotificationService, ATTR_TITLE, ATTR_TITLE_DEFAULT,
|
||||
ATTR_DATA)
|
||||
from homeassistant.const import (
|
||||
CONF_TOKEN, CONF_DOMAIN, CONF_RECIPIENT, CONF_SENDER)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
CONF_API_KEY, CONF_DOMAIN, CONF_RECIPIENT, CONF_SENDER)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEPENDENCIES = ['mailgun']
|
||||
REQUIREMENTS = ['pymailgunner==1.4']
|
||||
|
||||
# Images to attach to notification
|
||||
ATTR_IMAGES = 'images'
|
||||
|
||||
CONF_SANDBOX = 'sandbox'
|
||||
|
||||
DEFAULT_SENDER = 'hass@{domain}'
|
||||
DEFAULT_SANDBOX = False
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_TOKEN): cv.string,
|
||||
vol.Required(CONF_RECIPIENT): vol.Email(),
|
||||
vol.Optional(CONF_DOMAIN): cv.string,
|
||||
vol.Optional(CONF_SENDER): vol.Email(),
|
||||
vol.Optional(CONF_SANDBOX, default=DEFAULT_SANDBOX): cv.boolean,
|
||||
vol.Optional(CONF_SENDER): vol.Email()
|
||||
})
|
||||
|
||||
|
||||
def get_service(hass, config, discovery_info=None):
|
||||
"""Get the Mailgun notification service."""
|
||||
data = hass.data[DATA_MAILGUN]
|
||||
mailgun_service = MailgunNotificationService(
|
||||
config.get(CONF_DOMAIN), config.get(CONF_SANDBOX),
|
||||
config.get(CONF_TOKEN), config.get(CONF_SENDER),
|
||||
data.get(CONF_DOMAIN), data.get(CONF_SANDBOX),
|
||||
data.get(CONF_API_KEY), config.get(CONF_SENDER),
|
||||
config.get(CONF_RECIPIENT))
|
||||
if mailgun_service.connection_is_valid():
|
||||
return mailgun_service
|
||||
@ -52,19 +49,19 @@ def get_service(hass, config, discovery_info=None):
|
||||
class MailgunNotificationService(BaseNotificationService):
|
||||
"""Implement a notification service for the Mailgun mail service."""
|
||||
|
||||
def __init__(self, domain, sandbox, token, sender, recipient):
|
||||
def __init__(self, domain, sandbox, api_key, sender, recipient):
|
||||
"""Initialize the service."""
|
||||
self._client = None # Mailgun API client
|
||||
self._domain = domain
|
||||
self._sandbox = sandbox
|
||||
self._token = token
|
||||
self._api_key = api_key
|
||||
self._sender = sender
|
||||
self._recipient = recipient
|
||||
|
||||
def initialize_client(self):
|
||||
"""Initialize the connection to Mailgun."""
|
||||
from pymailgunner import Client
|
||||
self._client = Client(self._token, self._domain, self._sandbox)
|
||||
self._client = Client(self._api_key, self._domain, self._sandbox)
|
||||
_LOGGER.debug("Mailgun domain: %s", self._client.domain)
|
||||
self._domain = self._client.domain
|
||||
if not self._sender:
|
||||
|
@ -13,7 +13,7 @@ from homeassistant.components.notify import (
|
||||
from homeassistant.const import (CONF_API_KEY, CONF_SENDER, CONF_RECIPIENT)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
REQUIREMENTS = ['sendgrid==4.1.0']
|
||||
REQUIREMENTS = ['sendgrid==4.2.0']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -16,7 +16,7 @@ from homeassistant.const import CONF_PASSWORD, CONF_SENDER, CONF_RECIPIENT
|
||||
REQUIREMENTS = ['sleekxmpp==1.3.2',
|
||||
'dnspython3==1.15.0',
|
||||
'pyasn1==0.2.3',
|
||||
'pyasn1-modules==0.0.8']
|
||||
'pyasn1-modules==0.0.9']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@ -33,8 +33,8 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
def get_service(hass, config, discovery_info=None):
|
||||
"""Get the Jabber (XMPP) notification service."""
|
||||
return XmppNotificationService(
|
||||
config.get('sender'), config.get('password'), config.get('recipient'),
|
||||
config.get('tls'))
|
||||
config.get(CONF_SENDER), config.get(CONF_PASSWORD),
|
||||
config.get(CONF_RECIPIENT), config.get(CONF_TLS))
|
||||
|
||||
|
||||
class XmppNotificationService(BaseNotificationService):
|
||||
@ -52,7 +52,7 @@ class XmppNotificationService(BaseNotificationService):
|
||||
title = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)
|
||||
data = '{}: {}'.format(title, message) if title else message
|
||||
|
||||
send_message(self._sender + '/home-assistant', self._password,
|
||||
send_message('{}/home-assistant'.format(self._sender), self._password,
|
||||
self._recipient, self._tls, data)
|
||||
|
||||
|
||||
|
@ -1,183 +0,0 @@
|
||||
"""
|
||||
Support for OpenCV image/video processing.
|
||||
|
||||
For more details about this component, please refer to the documentation at
|
||||
https://home-assistant.io/components/opencv/
|
||||
"""
|
||||
import logging
|
||||
import os
|
||||
import voluptuous as vol
|
||||
|
||||
import requests
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_NAME,
|
||||
CONF_ENTITY_ID,
|
||||
CONF_FILE_PATH
|
||||
)
|
||||
from homeassistant.helpers import (
|
||||
discovery,
|
||||
config_validation as cv,
|
||||
)
|
||||
|
||||
REQUIREMENTS = ['opencv-python==3.2.0.6', 'numpy==1.12.0']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_MATCHES = 'matches'
|
||||
|
||||
BASE_PATH = os.path.realpath(__file__)
|
||||
|
||||
CASCADE_URL = \
|
||||
'https://raw.githubusercontent.com/opencv/opencv/master/data/' +\
|
||||
'lbpcascades/lbpcascade_frontalface.xml'
|
||||
|
||||
CONF_CLASSIFIER = 'classifier'
|
||||
CONF_COLOR = 'color'
|
||||
CONF_GROUPS = 'classifier_group'
|
||||
CONF_MIN_SIZE = 'min_size'
|
||||
CONF_NEIGHBORS = 'neighbors'
|
||||
CONF_SCALE = 'scale'
|
||||
|
||||
DATA_CLASSIFIER_GROUPS = 'classifier_groups'
|
||||
|
||||
DEFAULT_COLOR = (255, 255, 0)
|
||||
DEFAULT_CLASSIFIER_PATH = 'lbp_frontalface.xml'
|
||||
DEFAULT_NAME = 'OpenCV'
|
||||
DEFAULT_MIN_SIZE = (30, 30)
|
||||
DEFAULT_NEIGHBORS = 4
|
||||
DEFAULT_SCALE = 1.1
|
||||
|
||||
DOMAIN = 'opencv'
|
||||
|
||||
CLASSIFIER_GROUP_CONFIG = {
|
||||
vol.Required(CONF_CLASSIFIER): vol.All(
|
||||
cv.ensure_list,
|
||||
[vol.Schema({
|
||||
vol.Optional(CONF_COLOR, default=DEFAULT_COLOR):
|
||||
vol.Schema((int, int, int)),
|
||||
vol.Optional(CONF_FILE_PATH, default=None): cv.isfile,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME):
|
||||
cv.string,
|
||||
vol.Optional(CONF_MIN_SIZE, default=DEFAULT_MIN_SIZE):
|
||||
vol.Schema((int, int)),
|
||||
vol.Optional(CONF_NEIGHBORS, default=DEFAULT_NEIGHBORS):
|
||||
cv.positive_int,
|
||||
vol.Optional(CONF_SCALE, default=DEFAULT_SCALE):
|
||||
float
|
||||
})]),
|
||||
vol.Required(CONF_ENTITY_ID): cv.entity_ids,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
}
|
||||
CLASSIFIER_GROUP_SCHEMA = vol.Schema(CLASSIFIER_GROUP_CONFIG)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
vol.Required(CONF_GROUPS): vol.All(
|
||||
cv.ensure_list,
|
||||
[CLASSIFIER_GROUP_SCHEMA]
|
||||
),
|
||||
})
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
# NOTE:
|
||||
# pylint cannot find any of the members of cv2, using disable=no-member
|
||||
# to pass linting
|
||||
|
||||
|
||||
def cv_image_to_bytes(cv_image):
|
||||
"""Convert OpenCV image to bytes."""
|
||||
import cv2 # pylint: disable=import-error
|
||||
|
||||
# pylint: disable=no-member
|
||||
encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 90]
|
||||
# pylint: disable=no-member
|
||||
success, data = cv2.imencode('.jpg', cv_image, encode_param)
|
||||
|
||||
if success:
|
||||
return data.tobytes()
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def cv_image_from_bytes(image):
|
||||
"""Convert image bytes to OpenCV image."""
|
||||
import cv2 # pylint: disable=import-error
|
||||
import numpy
|
||||
|
||||
# pylint: disable=no-member
|
||||
return cv2.imdecode(numpy.asarray(bytearray(image)), cv2.IMREAD_UNCHANGED)
|
||||
|
||||
|
||||
def process_image(image, classifier_group, is_camera):
|
||||
"""Process the image given a classifier group."""
|
||||
import cv2 # pylint: disable=import-error
|
||||
import numpy
|
||||
|
||||
# pylint: disable=no-member
|
||||
cv_image = cv2.imdecode(numpy.asarray(bytearray(image)),
|
||||
cv2.IMREAD_UNCHANGED)
|
||||
group_matches = {}
|
||||
for classifier_config in classifier_group:
|
||||
classifier_path = classifier_config[CONF_FILE_PATH]
|
||||
classifier_name = classifier_config[CONF_NAME]
|
||||
color = classifier_config[CONF_COLOR]
|
||||
scale = classifier_config[CONF_SCALE]
|
||||
neighbors = classifier_config[CONF_NEIGHBORS]
|
||||
min_size = classifier_config[CONF_MIN_SIZE]
|
||||
|
||||
# pylint: disable=no-member
|
||||
classifier = cv2.CascadeClassifier(classifier_path)
|
||||
|
||||
detections = classifier.detectMultiScale(cv_image,
|
||||
scaleFactor=scale,
|
||||
minNeighbors=neighbors,
|
||||
minSize=min_size)
|
||||
regions = []
|
||||
# pylint: disable=invalid-name
|
||||
for (x, y, w, h) in detections:
|
||||
if is_camera:
|
||||
# pylint: disable=no-member
|
||||
cv2.rectangle(cv_image,
|
||||
(x, y),
|
||||
(x + w, y + h),
|
||||
color,
|
||||
2)
|
||||
else:
|
||||
regions.append((int(x), int(y), int(w), int(h)))
|
||||
group_matches[classifier_name] = regions
|
||||
|
||||
if is_camera:
|
||||
return cv_image_to_bytes(cv_image)
|
||||
else:
|
||||
return group_matches
|
||||
|
||||
|
||||
def setup(hass, config):
|
||||
"""Set up the OpenCV platform entities."""
|
||||
default_classifier = hass.config.path(DEFAULT_CLASSIFIER_PATH)
|
||||
|
||||
if not os.path.isfile(default_classifier):
|
||||
_LOGGER.info('Downloading default classifier')
|
||||
|
||||
req = requests.get(CASCADE_URL, stream=True)
|
||||
with open(default_classifier, 'wb') as fil:
|
||||
for chunk in req.iter_content(chunk_size=1024):
|
||||
if chunk: # filter out keep-alive new chunks
|
||||
fil.write(chunk)
|
||||
|
||||
for group in config[DOMAIN][CONF_GROUPS]:
|
||||
grp = {}
|
||||
|
||||
for classifier, config in group.items():
|
||||
config = dict(config)
|
||||
|
||||
if config[CONF_FILE_PATH] is None:
|
||||
config[CONF_FILE_PATH] = default_classifier
|
||||
|
||||
grp[classifier] = config
|
||||
|
||||
discovery.load_platform(hass, 'image_processing', DOMAIN, grp)
|
||||
|
||||
return True
|
@ -26,6 +26,7 @@ DOMAIN = 'persistent_notification'
|
||||
ENTITY_ID_FORMAT = DOMAIN + '.{}'
|
||||
|
||||
SERVICE_CREATE = 'create'
|
||||
SERVICE_DISMISS = 'dismiss'
|
||||
|
||||
SCHEMA_SERVICE_CREATE = vol.Schema({
|
||||
vol.Required(ATTR_MESSAGE): cv.template,
|
||||
@ -33,6 +34,10 @@ SCHEMA_SERVICE_CREATE = vol.Schema({
|
||||
vol.Optional(ATTR_NOTIFICATION_ID): cv.string,
|
||||
})
|
||||
|
||||
SCHEMA_SERVICE_DISMISS = vol.Schema({
|
||||
vol.Required(ATTR_NOTIFICATION_ID): cv.string,
|
||||
})
|
||||
|
||||
|
||||
DEFAULT_OBJECT_ID = 'notification'
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@ -43,6 +48,11 @@ def create(hass, message, title=None, notification_id=None):
|
||||
hass.add_job(async_create, hass, message, title, notification_id)
|
||||
|
||||
|
||||
def dismiss(hass, notification_id):
|
||||
"""Remove a notification."""
|
||||
hass.add_job(async_dismiss, hass, notification_id)
|
||||
|
||||
|
||||
@callback
|
||||
def async_create(hass, message, title=None, notification_id=None):
|
||||
"""Generate a notification."""
|
||||
@ -57,6 +67,14 @@ def async_create(hass, message, title=None, notification_id=None):
|
||||
hass.async_add_job(hass.services.async_call(DOMAIN, SERVICE_CREATE, data))
|
||||
|
||||
|
||||
@callback
|
||||
def async_dismiss(hass, notification_id):
|
||||
"""Remove a notification."""
|
||||
data = {ATTR_NOTIFICATION_ID: notification_id}
|
||||
|
||||
hass.async_add_job(hass.services.async_call(DOMAIN, SERVICE_DISMISS, data))
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_setup(hass, config):
|
||||
"""Set up the persistent notification component."""
|
||||
@ -92,12 +110,25 @@ def async_setup(hass, config):
|
||||
|
||||
hass.states.async_set(entity_id, message, attr)
|
||||
|
||||
@callback
|
||||
def dismiss_service(call):
|
||||
"""Handle the dismiss notification service call."""
|
||||
notification_id = call.data.get(ATTR_NOTIFICATION_ID)
|
||||
entity_id = ENTITY_ID_FORMAT.format(slugify(notification_id))
|
||||
|
||||
hass.states.async_remove(entity_id)
|
||||
|
||||
descriptions = yield from hass.async_add_job(
|
||||
load_yaml_config_file, os.path.join(
|
||||
os.path.dirname(__file__), 'services.yaml')
|
||||
)
|
||||
|
||||
hass.services.async_register(DOMAIN, SERVICE_CREATE, create_service,
|
||||
descriptions[DOMAIN][SERVICE_CREATE],
|
||||
SCHEMA_SERVICE_CREATE)
|
||||
|
||||
hass.services.async_register(DOMAIN, SERVICE_DISMISS, dismiss_service,
|
||||
descriptions[DOMAIN][SERVICE_DISMISS],
|
||||
SCHEMA_SERVICE_DISMISS)
|
||||
|
||||
return True
|
||||
|
@ -10,8 +10,9 @@ import asyncio
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
STATE_UNKNOWN, TEMP_CELSIUS, ATTR_TEMPERATURE, CONF_SENSORS,
|
||||
ATTR_UNIT_OF_MEASUREMENT, ATTR_ICON)
|
||||
STATE_OK, STATE_PROBLEM, STATE_UNKNOWN, TEMP_CELSIUS, ATTR_TEMPERATURE,
|
||||
CONF_SENSORS, ATTR_UNIT_OF_MEASUREMENT, ATTR_ICON)
|
||||
from homeassistant.components import group
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
@ -69,6 +70,10 @@ PLANT_SCHEMA = vol.Schema({
|
||||
})
|
||||
|
||||
DOMAIN = 'plant'
|
||||
DEPENDENCIES = ['zone', 'group']
|
||||
|
||||
GROUP_NAME_ALL_PLANTS = 'all plants'
|
||||
ENTITY_ID_ALL_PLANTS = group.ENTITY_ID_FORMAT.format('all_plants')
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: {
|
||||
@ -80,7 +85,8 @@ CONFIG_SCHEMA = vol.Schema({
|
||||
@asyncio.coroutine
|
||||
def async_setup(hass, config):
|
||||
"""Set up the Plant component."""
|
||||
component = EntityComponent(_LOGGER, DOMAIN, hass)
|
||||
component = EntityComponent(_LOGGER, DOMAIN, hass,
|
||||
group_name=GROUP_NAME_ALL_PLANTS)
|
||||
|
||||
entities = []
|
||||
for plant_name, plant_config in config[DOMAIN].items():
|
||||
@ -199,11 +205,11 @@ class Plant(Entity):
|
||||
self._icon = params['icon']
|
||||
|
||||
if len(result) == 0:
|
||||
self._state = 'ok'
|
||||
self._state = STATE_OK
|
||||
self._icon = 'mdi:thumb-up'
|
||||
self._problems = PROBLEM_NONE
|
||||
else:
|
||||
self._state = 'problem'
|
||||
self._state = STATE_PROBLEM
|
||||
self._problems = ','.join(result)
|
||||
_LOGGER.debug("New data processed")
|
||||
self.hass.async_add_job(self.async_update_ha_state())
|
||||
|
124
homeassistant/components/python_script.py
Normal file
124
homeassistant/components/python_script.py
Normal file
@ -0,0 +1,124 @@
|
||||
"""Component to allow running Python scripts."""
|
||||
import glob
|
||||
import os
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.util import sanitize_filename
|
||||
|
||||
DOMAIN = 'python_script'
|
||||
REQUIREMENTS = ['restrictedpython==4.0a2']
|
||||
FOLDER = 'python_scripts'
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema(dict)
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
ALLOWED_HASS = set(['bus', 'services', 'states'])
|
||||
ALLOWED_EVENTBUS = set(['fire'])
|
||||
ALLOWED_STATEMACHINE = set(['entity_ids', 'all', 'get', 'is_state',
|
||||
'is_state_attr', 'remove', 'set'])
|
||||
ALLOWED_SERVICEREGISTRY = set(['services', 'has_service', 'call'])
|
||||
|
||||
|
||||
class ScriptError(HomeAssistantError):
|
||||
"""When a script error occurs."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def setup(hass, config):
|
||||
"""Initialize the python_script component."""
|
||||
path = hass.config.path(FOLDER)
|
||||
|
||||
if not os.path.isdir(path):
|
||||
_LOGGER.warning('Folder %s not found in config folder', FOLDER)
|
||||
return False
|
||||
|
||||
def python_script_service_handler(call):
|
||||
"""Handle python script service calls."""
|
||||
execute_script(hass, call.service, call.data)
|
||||
|
||||
for fil in glob.iglob(os.path.join(path, '*.py')):
|
||||
name = os.path.splitext(os.path.basename(fil))[0]
|
||||
hass.services.register(DOMAIN, name, python_script_service_handler)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def execute_script(hass, name, data=None):
|
||||
"""Execute a script."""
|
||||
filename = '{}.py'.format(name)
|
||||
with open(hass.config.path(FOLDER, sanitize_filename(filename))) as fil:
|
||||
source = fil.read()
|
||||
execute(hass, filename, source, data)
|
||||
|
||||
|
||||
def execute(hass, filename, source, data=None):
|
||||
"""Execute Python source."""
|
||||
from RestrictedPython import compile_restricted_exec
|
||||
from RestrictedPython.Guards import safe_builtins, full_write_guard
|
||||
|
||||
compiled = compile_restricted_exec(source, filename=filename)
|
||||
|
||||
if compiled.errors:
|
||||
_LOGGER.error('Error loading script %s: %s', filename,
|
||||
', '.join(compiled.errors))
|
||||
return
|
||||
|
||||
if compiled.warnings:
|
||||
_LOGGER.warning('Warning loading script %s: %s', filename,
|
||||
', '.join(compiled.warnings))
|
||||
|
||||
def protected_getattr(obj, name, default=None):
|
||||
"""Restricted method to get attributes."""
|
||||
# pylint: disable=too-many-boolean-expressions
|
||||
if name.startswith('async_'):
|
||||
raise ScriptError('Not allowed to access async methods')
|
||||
elif (obj is hass and name not in ALLOWED_HASS or
|
||||
obj is hass.bus and name not in ALLOWED_EVENTBUS or
|
||||
obj is hass.states and name not in ALLOWED_STATEMACHINE or
|
||||
obj is hass.services and name not in ALLOWED_SERVICEREGISTRY):
|
||||
raise ScriptError('Not allowed to access {}.{}'.format(
|
||||
obj.__class__.__name__, name))
|
||||
|
||||
return getattr(obj, name, default)
|
||||
|
||||
restricted_globals = {
|
||||
'__builtins__': safe_builtins,
|
||||
'_print_': StubPrinter,
|
||||
'_getattr_': protected_getattr,
|
||||
'_write_': full_write_guard,
|
||||
}
|
||||
logger = logging.getLogger('{}.{}'.format(__name__, filename))
|
||||
local = {
|
||||
'hass': hass,
|
||||
'data': data or {},
|
||||
'logger': logger
|
||||
}
|
||||
|
||||
try:
|
||||
_LOGGER.info('Executing %s: %s', filename, data)
|
||||
# pylint: disable=exec-used
|
||||
exec(compiled.code, restricted_globals, local)
|
||||
except ScriptError as err:
|
||||
logger.error('Error executing script: %s', err)
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
logger.exception('Error executing script: %s', err)
|
||||
|
||||
|
||||
class StubPrinter:
|
||||
"""Class to handle printing inside scripts."""
|
||||
|
||||
def __init__(self, _getattr_):
|
||||
"""Initialize our printer."""
|
||||
pass
|
||||
|
||||
def _call_print(self, *objects, **kwargs):
|
||||
"""Print text."""
|
||||
# pylint: disable=no-self-use
|
||||
_LOGGER.warning(
|
||||
"Don't use print() inside scripts. Use logger.info() instead.")
|
@ -30,6 +30,8 @@ ATTR_NUM_REPEATS = 'num_repeats'
|
||||
ATTR_DELAY_SECS = 'delay_secs'
|
||||
|
||||
DOMAIN = 'remote'
|
||||
DEPENDENCIES = ['group']
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
ENTITY_ID_ALL_REMOTES = group.ENTITY_ID_FORMAT.format('all_remotes')
|
||||
ENTITY_ID_FORMAT = DOMAIN + '.{}'
|
||||
@ -38,7 +40,6 @@ GROUP_NAME_ALL_REMOTES = 'all remotes'
|
||||
|
||||
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10)
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
SERVICE_SEND_COMMAND = 'send_command'
|
||||
SERVICE_SYNC = 'sync'
|
||||
|
||||
|
@ -24,6 +24,9 @@ from homeassistant.helpers.script import Script
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DOMAIN = 'script'
|
||||
DEPENDENCIES = ['group']
|
||||
|
||||
ATTR_CAN_CANCEL = 'can_cancel'
|
||||
ATTR_LAST_ACTION = 'last_action'
|
||||
ATTR_LAST_TRIGGERED = 'last_triggered'
|
||||
@ -31,8 +34,6 @@ ATTR_VARIABLES = 'variables'
|
||||
|
||||
CONF_SEQUENCE = 'sequence'
|
||||
|
||||
DOMAIN = 'script'
|
||||
|
||||
ENTITY_ID_FORMAT = DOMAIN + '.{}'
|
||||
|
||||
GROUP_NAME_ALL_SCRIPTS = 'all scripts'
|
||||
|
@ -10,23 +10,22 @@ from datetime import timedelta
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
||||
from homeassistant.const import (CONF_DISPLAY_OPTIONS, ATTR_ATTRIBUTION)
|
||||
from homeassistant.const import (
|
||||
CONF_DISPLAY_OPTIONS, ATTR_ATTRIBUTION, CONF_CURRENCY)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
REQUIREMENTS = ['blockchain==1.3.3']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_ATTRIBUTION = "Data provided by blockchain.info"
|
||||
CONF_CURRENCY = 'currency'
|
||||
|
||||
DEFAULT_CURRENCY = 'USD'
|
||||
|
||||
ICON = 'mdi:currency-btc'
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=5)
|
||||
SCAN_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
OPTION_TYPES = {
|
||||
'exchangerate': ['Exchange rate (1 BTC)', None],
|
||||
@ -74,7 +73,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
for variable in config[CONF_DISPLAY_OPTIONS]:
|
||||
dev.append(BitcoinSensor(data, variable, currency))
|
||||
|
||||
add_devices(dev)
|
||||
add_devices(dev, True)
|
||||
|
||||
|
||||
class BitcoinSensor(Entity):
|
||||
@ -88,7 +87,6 @@ class BitcoinSensor(Entity):
|
||||
self._currency = currency
|
||||
self.type = option_type
|
||||
self._state = None
|
||||
self.update()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@ -154,8 +152,8 @@ class BitcoinSensor(Entity):
|
||||
elif self.type == 'total_btc_sent':
|
||||
self._state = '{0:.2f}'.format(stats.total_btc_sent * 0.00000001)
|
||||
elif self.type == 'estimated_btc_sent':
|
||||
self._state = '{0:.2f}'.format(stats.estimated_btc_sent *
|
||||
0.00000001)
|
||||
self._state = '{0:.2f}'.format(
|
||||
stats.estimated_btc_sent * 0.00000001)
|
||||
elif self.type == 'total_btc':
|
||||
self._state = '{0:.2f}'.format(stats.total_btc * 0.00000001)
|
||||
elif self.type == 'total_blocks':
|
||||
@ -166,8 +164,8 @@ class BitcoinSensor(Entity):
|
||||
self._state = '{0:.2f}'.format(
|
||||
stats.estimated_transaction_volume_usd)
|
||||
elif self.type == 'miners_revenue_btc':
|
||||
self._state = '{0:.1f}'.format(stats.miners_revenue_btc *
|
||||
0.00000001)
|
||||
self._state = '{0:.1f}'.format(
|
||||
stats.miners_revenue_btc * 0.00000001)
|
||||
elif self.type == 'market_price_usd':
|
||||
self._state = '{0:.2f}'.format(stats.market_price_usd)
|
||||
|
||||
@ -180,7 +178,6 @@ class BitcoinData(object):
|
||||
self.stats = None
|
||||
self.ticker = None
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def update(self):
|
||||
"""Get the latest data from blockchain.info."""
|
||||
from blockchain import statistics, exchangerates
|
||||
|
91
homeassistant/components/sensor/blockchain.py
Normal file
91
homeassistant/components/sensor/blockchain.py
Normal file
@ -0,0 +1,91 @@
|
||||
"""
|
||||
Support for Blockchain.info sensors.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/sensor.blockchain/
|
||||
"""
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
||||
from homeassistant.const import (CONF_NAME, ATTR_ATTRIBUTION)
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
REQUIREMENTS = ['python-blockchain-api==0.0.2']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_ADDRESSES = 'addresses'
|
||||
CONF_ATTRIBUTION = "Data provided by blockchain.info"
|
||||
|
||||
DEFAULT_NAME = 'Bitcoin Balance'
|
||||
|
||||
ICON = 'mdi:currency-btc'
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_ADDRESSES): [cv.string],
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
})
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the Blockchain.info sensors."""
|
||||
from pyblockchain import validate_address
|
||||
|
||||
addresses = config.get(CONF_ADDRESSES)
|
||||
name = config.get(CONF_NAME)
|
||||
|
||||
for address in addresses:
|
||||
if not validate_address(address):
|
||||
_LOGGER.error("Bitcoin address is not valid: %s", address)
|
||||
return False
|
||||
|
||||
add_devices([BlockchainSensor(name, addresses)], True)
|
||||
|
||||
|
||||
class BlockchainSensor(Entity):
|
||||
"""Representation of a Blockchain.info sensor."""
|
||||
|
||||
def __init__(self, name, addresses):
|
||||
"""Initialize the sensor."""
|
||||
self._name = name
|
||||
self.addresses = addresses
|
||||
self._state = None
|
||||
self._unit_of_measurement = 'BTC'
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the sensor."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
"""Return the state of the sensor."""
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def unit_of_measurement(self):
|
||||
"""Return the unit of measurement this sensor expresses itself in."""
|
||||
return self._unit_of_measurement
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
"""Return the icon to use in the frontend, if any."""
|
||||
return ICON
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return the state attributes of the sensor."""
|
||||
return {
|
||||
ATTR_ATTRIBUTION: CONF_ATTRIBUTION,
|
||||
}
|
||||
|
||||
def update(self):
|
||||
"""Get the latest state of the sensor."""
|
||||
from pyblockchain import get_balance
|
||||
self._state = get_balance(self.addresses)
|
327
homeassistant/components/sensor/buienradar.py
Executable file
327
homeassistant/components/sensor/buienradar.py
Executable file
@ -0,0 +1,327 @@
|
||||
"""
|
||||
Support for Buienradar.nl weather service.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/sensor.buienradar/
|
||||
"""
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import async_timeout
|
||||
import aiohttp
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
||||
from homeassistant.const import (
|
||||
ATTR_ATTRIBUTION, CONF_LATITUDE, CONF_LONGITUDE,
|
||||
CONF_MONITORED_CONDITIONS, CONF_NAME, TEMP_CELSIUS)
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.event import (
|
||||
async_track_point_in_utc_time)
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
REQUIREMENTS = ['buienradar==0.4']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Supported sensor types:
|
||||
SENSOR_TYPES = {
|
||||
'stationname': ['Stationname', None, None],
|
||||
'symbol': ['Symbol', None, None],
|
||||
'humidity': ['Humidity', '%', 'mdi:water-percent'],
|
||||
'temperature': ['Temperature', TEMP_CELSIUS, 'mdi:thermometer'],
|
||||
'groundtemperature': ['Ground Temperature', TEMP_CELSIUS,
|
||||
'mdi:thermometer'],
|
||||
'windspeed': ['Wind speed', 'm/s', 'mdi:weather-windy'],
|
||||
'windforce': ['Wind force', 'Bft', 'mdi:weather-windy'],
|
||||
'winddirection': ['Wind direction', '°', 'mdi:compass-outline'],
|
||||
'windazimuth': ['Wind direction azimuth', None, 'mdi:compass-outline'],
|
||||
'pressure': ['Pressure', 'hPa', 'mdi:gauge'],
|
||||
'visibility': ['Visibility', 'm', None],
|
||||
'windgust': ['Wind gust', 'm/s', 'mdi:weather-windy'],
|
||||
'precipitation': ['Precipitation', 'mm/h', 'mdi:weather-pouring'],
|
||||
'irradiance': ['Irradiance', 'W/m2', 'mdi:sunglasses'],
|
||||
}
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_MONITORED_CONDITIONS,
|
||||
default=['symbol', 'temperature']): vol.All(
|
||||
cv.ensure_list, vol.Length(min=1),
|
||||
[vol.In(SENSOR_TYPES.keys())]),
|
||||
vol.Optional(CONF_LATITUDE): cv.latitude,
|
||||
vol.Optional(CONF_LONGITUDE): cv.longitude,
|
||||
})
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
"""Setup the buienradar sensor."""
|
||||
latitude = config.get(CONF_LATITUDE, hass.config.latitude)
|
||||
longitude = config.get(CONF_LONGITUDE, hass.config.longitude)
|
||||
|
||||
if None in (latitude, longitude):
|
||||
_LOGGER.error("Latitude or longitude not set in HomeAssistant config")
|
||||
return False
|
||||
|
||||
coordinates = {CONF_LATITUDE: float(latitude),
|
||||
CONF_LONGITUDE: float(longitude)}
|
||||
|
||||
dev = []
|
||||
for sensor_type in config[CONF_MONITORED_CONDITIONS]:
|
||||
dev.append(BrSensor(sensor_type, config.get(CONF_NAME, 'br')))
|
||||
async_add_devices(dev)
|
||||
|
||||
data = BrData(hass, coordinates, dev)
|
||||
# schedule the first update in 1 minute from now:
|
||||
_LOGGER.debug("Start running....")
|
||||
yield from data.schedule_update(1)
|
||||
|
||||
|
||||
class BrSensor(Entity):
|
||||
"""Representation of an Buienradar sensor."""
|
||||
|
||||
def __init__(self, sensor_type, client_name):
|
||||
"""Initialize the sensor."""
|
||||
self.client_name = client_name
|
||||
self._name = SENSOR_TYPES[sensor_type][0]
|
||||
self.type = sensor_type
|
||||
self._state = None
|
||||
self._unit_of_measurement = SENSOR_TYPES[self.type][1]
|
||||
self._entity_picture = None
|
||||
self._attribution = None
|
||||
self._stationname = None
|
||||
|
||||
def load_data(self, data):
|
||||
"""Load the sensor with relevant data."""
|
||||
# Find sensor
|
||||
from buienradar.buienradar import (ATTRIBUTION, IMAGE,
|
||||
STATIONNAME, SYMBOL)
|
||||
|
||||
self._attribution = data.get(ATTRIBUTION)
|
||||
self._stationname = data.get(STATIONNAME)
|
||||
if self.type == SYMBOL:
|
||||
# update weather symbol & status text
|
||||
new_state = data.get(self.type)
|
||||
img = data.get(IMAGE)
|
||||
|
||||
# pylint: disable=protected-access
|
||||
if new_state != self._state or img != self._entity_picture:
|
||||
self._state = new_state
|
||||
self._entity_picture = img
|
||||
return True
|
||||
else:
|
||||
# update all other sensors
|
||||
new_state = data.get(self.type)
|
||||
# pylint: disable=protected-access
|
||||
if new_state != self._state:
|
||||
self._state = new_state
|
||||
return True
|
||||
|
||||
@property
|
||||
def attribution(self):
|
||||
"""Return the attribution."""
|
||||
return self._attribution
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the sensor."""
|
||||
return '{} {}'.format(self.client_name, self._name)
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
"""Return the state of the device."""
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def should_poll(self): # pylint: disable=no-self-use
|
||||
"""No polling needed."""
|
||||
return False
|
||||
|
||||
@property
|
||||
def entity_picture(self):
|
||||
"""Weather symbol if type is symbol."""
|
||||
from buienradar.buienradar import SYMBOL
|
||||
|
||||
if self.type != SYMBOL:
|
||||
return None
|
||||
else:
|
||||
return self._entity_picture
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return the state attributes."""
|
||||
return {
|
||||
ATTR_ATTRIBUTION: self._attribution,
|
||||
SENSOR_TYPES['stationname'][0]: self._stationname,
|
||||
}
|
||||
|
||||
@property
|
||||
def unit_of_measurement(self):
|
||||
"""Return the unit of measurement of this entity, if any."""
|
||||
return self._unit_of_measurement
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
"""Return possible sensor specific icon."""
|
||||
return SENSOR_TYPES[self.type][2]
|
||||
|
||||
|
||||
class BrData(object):
|
||||
"""Get the latest data and updates the states."""
|
||||
|
||||
def __init__(self, hass, coordinates, devices):
|
||||
"""Initialize the data object."""
|
||||
self.devices = devices
|
||||
self.data = {}
|
||||
self.hass = hass
|
||||
self.coordinates = coordinates
|
||||
|
||||
@asyncio.coroutine
|
||||
def update_devices(self):
|
||||
"""Update all devices/sensors."""
|
||||
if self.devices:
|
||||
tasks = []
|
||||
# Update all devices
|
||||
for dev in self.devices:
|
||||
if dev.load_data(self.data):
|
||||
tasks.append(dev.async_update_ha_state())
|
||||
|
||||
if tasks:
|
||||
yield from asyncio.wait(tasks, loop=self.hass.loop)
|
||||
|
||||
@asyncio.coroutine
|
||||
def schedule_update(self, minute=1):
|
||||
"""Schedule an update after minute minutes."""
|
||||
_LOGGER.debug("Scheduling next update in %s minutes.", minute)
|
||||
nxt = dt_util.utcnow() + timedelta(minutes=minute)
|
||||
async_track_point_in_utc_time(self.hass, self.async_update,
|
||||
nxt)
|
||||
|
||||
@asyncio.coroutine
|
||||
def get_data(self, url):
|
||||
"""Load xmpl data from specified url."""
|
||||
from buienradar.buienradar import (CONTENT,
|
||||
MESSAGE, STATUS_CODE, SUCCESS)
|
||||
|
||||
_LOGGER.debug("Calling url: %s...", url)
|
||||
result = {SUCCESS: False, MESSAGE: None}
|
||||
resp = None
|
||||
try:
|
||||
websession = async_get_clientsession(self.hass)
|
||||
with async_timeout.timeout(10, loop=self.hass.loop):
|
||||
resp = yield from websession.get(url)
|
||||
|
||||
result[SUCCESS] = (resp.status == 200)
|
||||
result[STATUS_CODE] = resp.status
|
||||
result[CONTENT] = yield from resp.text()
|
||||
|
||||
return result
|
||||
except (asyncio.TimeoutError, aiohttp.ClientError) as err:
|
||||
result[MESSAGE] = "%s" % err
|
||||
return result
|
||||
finally:
|
||||
if resp is not None:
|
||||
yield from resp.release()
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_update(self, *_):
|
||||
"""Update the data from buienradar."""
|
||||
from buienradar.buienradar import (parse_data, CONTENT,
|
||||
DATA, MESSAGE, STATUS_CODE, SUCCESS)
|
||||
|
||||
result = yield from self.get_data('http://xml.buienradar.nl')
|
||||
if result.get(SUCCESS, False) is False:
|
||||
result = yield from self.get_data('http://api.buienradar.nl')
|
||||
|
||||
if result.get(SUCCESS):
|
||||
result = parse_data(result.get(CONTENT),
|
||||
latitude=self.coordinates[CONF_LATITUDE],
|
||||
longitude=self.coordinates[CONF_LONGITUDE])
|
||||
if result.get(SUCCESS):
|
||||
self.data = result.get(DATA)
|
||||
|
||||
yield from self.update_devices()
|
||||
|
||||
yield from self.schedule_update(10)
|
||||
else:
|
||||
yield from self.schedule_update(2)
|
||||
else:
|
||||
# unable to get the data
|
||||
_LOGGER.warning("Unable to retrieve data from Buienradar."
|
||||
"(Msg: %s, status: %s,)",
|
||||
result.get(MESSAGE),
|
||||
result.get(STATUS_CODE),)
|
||||
# schedule new call
|
||||
yield from self.schedule_update(2)
|
||||
|
||||
@property
|
||||
def attribution(self):
|
||||
"""Return the attribution."""
|
||||
from buienradar.buienradar import ATTRIBUTION
|
||||
return self.data.get(ATTRIBUTION)
|
||||
|
||||
@property
|
||||
def stationname(self):
|
||||
"""Return the name of the selected weatherstation."""
|
||||
from buienradar.buienradar import STATIONNAME
|
||||
return self.data.get(STATIONNAME)
|
||||
|
||||
@property
|
||||
def condition(self):
|
||||
"""Return the condition."""
|
||||
from buienradar.buienradar import SYMBOL
|
||||
return self.data.get(SYMBOL)
|
||||
|
||||
@property
|
||||
def temperature(self):
|
||||
"""Return the temperature, or None."""
|
||||
from buienradar.buienradar import TEMPERATURE
|
||||
try:
|
||||
return float(self.data.get(TEMPERATURE))
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
@property
|
||||
def pressure(self):
|
||||
"""Return the pressure, or None."""
|
||||
from buienradar.buienradar import PRESSURE
|
||||
try:
|
||||
return float(self.data.get(PRESSURE))
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
@property
|
||||
def humidity(self):
|
||||
"""Return the humidity, or None."""
|
||||
from buienradar.buienradar import HUMIDITY
|
||||
try:
|
||||
return int(self.data.get(HUMIDITY))
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
@property
|
||||
def wind_speed(self):
|
||||
"""Return the windspeed, or None."""
|
||||
from buienradar.buienradar import WINDSPEED
|
||||
try:
|
||||
return float(self.data.get(WINDSPEED))
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
@property
|
||||
def wind_bearing(self):
|
||||
"""Return the wind bearing, or None."""
|
||||
from buienradar.buienradar import WINDDIRECTION
|
||||
try:
|
||||
return int(self.data.get(WINDDIRECTION))
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
@property
|
||||
def forecast(self):
|
||||
"""Return the forecast data."""
|
||||
from buienradar.buienradar import FORECAST
|
||||
return self.data.get(FORECAST)
|
@ -6,18 +6,16 @@ https://home-assistant.io/components/sensor.coinmarketcap/
|
||||
"""
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
import json
|
||||
from urllib.error import HTTPError
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
||||
from homeassistant.const import ATTR_ATTRIBUTION
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.util import Throttle
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
||||
from homeassistant.const import ATTR_ATTRIBUTION, CONF_CURRENCY
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
REQUIREMENTS = ['coinmarketcap==2.0.1']
|
||||
REQUIREMENTS = ['coinmarketcap==3.0.1']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@ -32,13 +30,12 @@ ATTR_SYMBOL = 'symbol'
|
||||
ATTR_TOTAL_SUPPLY = 'total_supply'
|
||||
|
||||
CONF_ATTRIBUTION = "Data provided by CoinMarketCap"
|
||||
CONF_CURRENCY = 'currency'
|
||||
|
||||
DEFAULT_CURRENCY = 'bitcoin'
|
||||
|
||||
ICON = 'mdi:currency-usd'
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=15)
|
||||
SCAN_INTERVAL = timedelta(minutes=15)
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_CURRENCY, default=DEFAULT_CURRENCY): cv.string,
|
||||
@ -56,7 +53,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
currency)
|
||||
currency = DEFAULT_CURRENCY
|
||||
|
||||
add_devices([CoinMarketCapSensor(CoinMarketCapData(currency))])
|
||||
add_devices([CoinMarketCapSensor(CoinMarketCapData(currency))], True)
|
||||
|
||||
|
||||
class CoinMarketCapSensor(Entity):
|
||||
@ -67,7 +64,6 @@ class CoinMarketCapSensor(Entity):
|
||||
self.data = data
|
||||
self._ticker = None
|
||||
self._unit_of_measurement = 'USD'
|
||||
self.update()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@ -106,8 +102,7 @@ class CoinMarketCapSensor(Entity):
|
||||
def update(self):
|
||||
"""Get the latest data and updates the states."""
|
||||
self.data.update()
|
||||
self._ticker = json.loads(
|
||||
self.data.ticker.decode('utf-8').strip('\n '))[0]
|
||||
self._ticker = self.data.ticker[0]
|
||||
|
||||
|
||||
class CoinMarketCapData(object):
|
||||
@ -118,9 +113,7 @@ class CoinMarketCapData(object):
|
||||
self.currency = currency
|
||||
self.ticker = None
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def update(self):
|
||||
"""Get the latest data from blockchain.info."""
|
||||
from coinmarketcap import Market
|
||||
|
||||
self.ticker = Market().ticker(self.currency)
|
||||
self.ticker = Market().ticker(self.currency, limit=1)
|
||||
|
@ -8,12 +8,12 @@ import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
||||
from homeassistant.const import CONF_NAME
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
REQUIREMENTS = ['py-cpuinfo==3.2.0']
|
||||
REQUIREMENTS = ['py-cpuinfo==3.3.0']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@ -22,6 +22,7 @@ ATTR_HZ = 'GHz Advertised'
|
||||
ATTR_ARCH = 'arch'
|
||||
|
||||
DEFAULT_NAME = 'CPU speed'
|
||||
|
||||
ICON = 'mdi:pulse'
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
@ -34,7 +35,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the CPU speed sensor."""
|
||||
name = config.get(CONF_NAME)
|
||||
|
||||
add_devices([CpuSpeedSensor(name)])
|
||||
add_devices([CpuSpeedSensor(name)], True)
|
||||
|
||||
|
||||
class CpuSpeedSensor(Entity):
|
||||
@ -44,8 +45,8 @@ class CpuSpeedSensor(Entity):
|
||||
"""Initialize the sensor."""
|
||||
self._name = name
|
||||
self._state = None
|
||||
self.info = None
|
||||
self._unit_of_measurement = 'GHz'
|
||||
self.update()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
|
@ -13,7 +13,6 @@ import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
REQUIREMENTS = ['pycups==1.9.73']
|
||||
|
||||
@ -36,7 +35,7 @@ DEFAULT_PORT = 631
|
||||
|
||||
ICON = 'mdi:printer'
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
PRINTER_STATES = {
|
||||
3: 'idle',
|
||||
@ -72,7 +71,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
_LOGGER.error("Printer is not present: %s", printer)
|
||||
continue
|
||||
|
||||
add_devices(dev)
|
||||
add_devices(dev, True)
|
||||
|
||||
|
||||
class CupsSensor(Entity):
|
||||
@ -83,7 +82,6 @@ class CupsSensor(Entity):
|
||||
self.data = data
|
||||
self._name = printer
|
||||
self._printer = None
|
||||
self.update()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@ -140,7 +138,6 @@ class CupsData(object):
|
||||
self._port = port
|
||||
self.printers = None
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def update(self):
|
||||
"""Get the latest data from CUPS."""
|
||||
from cups import Connection
|
||||
|
@ -10,12 +10,11 @@ import logging
|
||||
import requests
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
||||
from homeassistant.const import (
|
||||
CONF_API_KEY, CONF_NAME, CONF_BASE, CONF_QUOTE, ATTR_ATTRIBUTION)
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.util import Throttle
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_RESOURCE = 'http://apilayer.net/api/live'
|
||||
@ -27,7 +26,7 @@ DEFAULT_NAME = 'CurrencyLayer Sensor'
|
||||
|
||||
ICON = 'mdi:currency'
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(hours=2)
|
||||
SCAN_INTERVAL = timedelta(hours=2)
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_API_KEY): cv.string,
|
||||
@ -56,8 +55,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
if 'error' in response.json():
|
||||
return False
|
||||
else:
|
||||
add_devices(sensors)
|
||||
rest.update()
|
||||
add_devices(sensors, True)
|
||||
|
||||
|
||||
class CurrencylayerSensor(Entity):
|
||||
@ -68,7 +66,7 @@ class CurrencylayerSensor(Entity):
|
||||
self.rest = rest
|
||||
self._quote = quote
|
||||
self._base = base
|
||||
self.update()
|
||||
self._state = None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@ -110,7 +108,6 @@ class CurrencylayerData(object):
|
||||
self._parameters = parameters
|
||||
self.data = None
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def update(self):
|
||||
"""Get the latest data from Currencylayer."""
|
||||
try:
|
||||
|
@ -9,11 +9,10 @@ from datetime import timedelta
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.util import Throttle
|
||||
from homeassistant.helpers.entity import Entity
|
||||
import homeassistant.util.dt as dt_util
|
||||
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
REQUIREMENTS = ['schiene==0.18']
|
||||
|
||||
@ -24,7 +23,7 @@ CONF_START = 'from'
|
||||
|
||||
ICON = 'mdi:train'
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=120)
|
||||
SCAN_INTERVAL = timedelta(minutes=2)
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_DESTINATION): cv.string,
|
||||
@ -37,7 +36,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
start = config.get(CONF_START)
|
||||
destination = config.get(CONF_DESTINATION)
|
||||
|
||||
add_devices([DeutscheBahnSensor(start, destination)])
|
||||
add_devices([DeutscheBahnSensor(start, destination)], True)
|
||||
|
||||
|
||||
class DeutscheBahnSensor(Entity):
|
||||
@ -47,7 +46,7 @@ class DeutscheBahnSensor(Entity):
|
||||
"""Initialize the sensor."""
|
||||
self._name = '{} to {}'.format(start, goal)
|
||||
self.data = SchieneData(start, goal)
|
||||
self.update()
|
||||
self._state = None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@ -92,7 +91,6 @@ class SchieneData(object):
|
||||
self.schiene = schiene.Schiene()
|
||||
self.connections = [{}]
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def update(self):
|
||||
"""Update the connection data."""
|
||||
self.connections = self.schiene.connections(
|
||||
|
@ -32,7 +32,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
name = config.get(CONF_NAME)
|
||||
ip_address = config.get(CONF_IP_ADDRESS)
|
||||
|
||||
add_devices([DteEnergyBridgeSensor(ip_address, name)])
|
||||
add_devices([DteEnergyBridgeSensor(ip_address, name)], True)
|
||||
|
||||
|
||||
class DteEnergyBridgeSensor(Entity):
|
||||
|
@ -13,12 +13,11 @@ from datetime import timedelta, datetime
|
||||
import requests
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
import homeassistant.util.dt as dt_util
|
||||
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
||||
from homeassistant.const import CONF_NAME, ATTR_ATTRIBUTION
|
||||
import homeassistant.util.dt as dt_util
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.util import Throttle
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_RESOURCE = 'https://data.dublinked.ie/cgi-bin/rtpi/realtimebusinformation'
|
||||
@ -36,7 +35,7 @@ CONF_ROUTE = 'route'
|
||||
DEFAULT_NAME = 'Next Bus'
|
||||
ICON = 'mdi:bus'
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
TIME_STR_FORMAT = '%H:%M'
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
@ -64,7 +63,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
route = config.get(CONF_ROUTE)
|
||||
|
||||
data = PublicTransportData(stop, route)
|
||||
add_devices([DublinPublicTransportSensor(data, stop, route, name)])
|
||||
add_devices([DublinPublicTransportSensor(data, stop, route, name)], True)
|
||||
|
||||
|
||||
class DublinPublicTransportSensor(Entity):
|
||||
@ -76,7 +75,7 @@ class DublinPublicTransportSensor(Entity):
|
||||
self._name = name
|
||||
self._stop = stop
|
||||
self._route = route
|
||||
self.update()
|
||||
self._times = self._state = None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@ -137,7 +136,6 @@ class PublicTransportData(object):
|
||||
ATTR_ROUTE: self.route,
|
||||
ATTR_DUE_IN: 'n/a'}]
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def update(self):
|
||||
"""Get the latest data from opendata.ch."""
|
||||
params = {}
|
||||
@ -149,10 +147,7 @@ class PublicTransportData(object):
|
||||
params['maxresults'] = 2
|
||||
params['format'] = 'json'
|
||||
|
||||
response = requests.get(
|
||||
_RESOURCE,
|
||||
params,
|
||||
timeout=10)
|
||||
response = requests.get(_RESOURCE, params, timeout=10)
|
||||
|
||||
if response.status_code != 200:
|
||||
self.info = [{ATTR_DUE_AT: 'n/a',
|
||||
@ -175,8 +170,7 @@ class PublicTransportData(object):
|
||||
if due_at is not None and route is not None:
|
||||
bus_data = {ATTR_DUE_AT: due_at,
|
||||
ATTR_ROUTE: route,
|
||||
ATTR_DUE_IN:
|
||||
due_in_minutes(due_at)}
|
||||
ATTR_DUE_IN: due_in_minutes(due_at)}
|
||||
self.info.append(bus_data)
|
||||
|
||||
if not self.info:
|
||||
|
@ -10,12 +10,11 @@ from datetime import timedelta
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
||||
from homeassistant.const import (
|
||||
CONF_NAME, CONF_VALUE_TEMPLATE, STATE_UNKNOWN, CONF_UNIT_OF_MEASUREMENT)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
REQUIREMENTS = ['dweepy==0.3.0']
|
||||
|
||||
@ -25,7 +24,7 @@ CONF_DEVICE = 'device'
|
||||
|
||||
DEFAULT_NAME = 'Dweet.io Sensor'
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_DEVICE): cv.string,
|
||||
@ -109,7 +108,6 @@ class DweetData(object):
|
||||
self._device = device
|
||||
self.data = None
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def update(self):
|
||||
"""Get the latest data from Dweet.io."""
|
||||
import dweepy
|
||||
|
73
homeassistant/components/sensor/dyson.py
Normal file
73
homeassistant/components/sensor/dyson.py
Normal file
@ -0,0 +1,73 @@
|
||||
"""Support for Dyson Pure Cool Link Sensors."""
|
||||
import logging
|
||||
import asyncio
|
||||
|
||||
from homeassistant.const import STATE_UNKNOWN
|
||||
from homeassistant.components.dyson import DYSON_DEVICES
|
||||
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
DEPENDENCIES = ['dyson']
|
||||
|
||||
SENSOR_UNITS = {'filter_life': 'hours'}
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the Dyson Sensors."""
|
||||
_LOGGER.info("Creating new Dyson fans")
|
||||
devices = []
|
||||
# Get Dyson Devices from parent component
|
||||
for device in hass.data[DYSON_DEVICES]:
|
||||
devices.append(DysonFilterLifeSensor(hass, device))
|
||||
add_devices(devices)
|
||||
|
||||
|
||||
class DysonFilterLifeSensor(Entity):
|
||||
"""Representation of Dyson filter life sensor (in hours)."""
|
||||
|
||||
def __init__(self, hass, device):
|
||||
"""Create a new Dyson filter life sensor."""
|
||||
self.hass = hass
|
||||
self._device = device
|
||||
self._name = "{} filter life".format(self._device.name)
|
||||
self._old_value = None
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_added_to_hass(self):
|
||||
"""Callback when entity is added to hass."""
|
||||
self.hass.async_add_job(
|
||||
self._device.add_message_listener(self.on_message))
|
||||
|
||||
def on_message(self, message):
|
||||
"""Called when new messages received from the fan."""
|
||||
_LOGGER.debug(
|
||||
"Message received for %s device: %s", self.name, message)
|
||||
# Prevent refreshing if not needed
|
||||
if self._old_value is None or self._old_value != self.state:
|
||||
self._old_value = self.state
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
"""No polling needed."""
|
||||
return False
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
"""Return filter life in hours.."""
|
||||
if self._device.state:
|
||||
return self._device.state.filter_life
|
||||
else:
|
||||
return STATE_UNKNOWN
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the dyson sensor name."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def unit_of_measurement(self):
|
||||
"""Return the unit the value is expressed in."""
|
||||
return SENSOR_UNITS['filter_life']
|
@ -12,13 +12,12 @@ from datetime import timedelta
|
||||
import requests
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
||||
from homeassistant.const import (
|
||||
CONF_USERNAME, CONF_PASSWORD,
|
||||
CONF_NAME, CONF_MONITORED_VARIABLES)
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.util import Throttle
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
REQUIREMENTS = ['pyebox==0.1.0']
|
||||
|
||||
@ -32,35 +31,25 @@ PERCENT = '%' # type: str
|
||||
DEFAULT_NAME = 'EBox'
|
||||
|
||||
REQUESTS_TIMEOUT = 15
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=5)
|
||||
SCAN_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
SENSOR_TYPES = {
|
||||
'usage': ['Usage',
|
||||
PERCENT, 'mdi:percent'],
|
||||
'balance': ['Balance',
|
||||
PRICE, 'mdi:square-inc-cash'],
|
||||
'limit': ['Data limit',
|
||||
GIGABITS, 'mdi:download'],
|
||||
'days_left': ['Days left',
|
||||
DAYS, 'mdi:calendar-today'],
|
||||
'before_offpeak_download': ['Download before offpeak',
|
||||
GIGABITS, 'mdi:download'],
|
||||
'before_offpeak_upload': ['Upload before offpeak',
|
||||
GIGABITS, 'mdi:upload'],
|
||||
'before_offpeak_total': ['Total before offpeak',
|
||||
GIGABITS, 'mdi:download'],
|
||||
'offpeak_download': ['Offpeak download',
|
||||
GIGABITS, 'mdi:download'],
|
||||
'offpeak_upload': ['Offpeak Upload',
|
||||
GIGABITS, 'mdi:upload'],
|
||||
'offpeak_total': ['Offpeak Total',
|
||||
GIGABITS, 'mdi:download'],
|
||||
'download': ['Download',
|
||||
GIGABITS, 'mdi:download'],
|
||||
'upload': ['Upload',
|
||||
GIGABITS, 'mdi:upload'],
|
||||
'total': ['Total',
|
||||
GIGABITS, 'mdi:download'],
|
||||
'usage': ['Usage', PERCENT, 'mdi:percent'],
|
||||
'balance': ['Balance', PRICE, 'mdi:square-inc-cash'],
|
||||
'limit': ['Data limit', GIGABITS, 'mdi:download'],
|
||||
'days_left': ['Days left', DAYS, 'mdi:calendar-today'],
|
||||
'before_offpeak_download':
|
||||
['Download before offpeak', GIGABITS, 'mdi:download'],
|
||||
'before_offpeak_upload':
|
||||
['Upload before offpeak', GIGABITS, 'mdi:upload'],
|
||||
'before_offpeak_total':
|
||||
['Total before offpeak', GIGABITS, 'mdi:download'],
|
||||
'offpeak_download': ['Offpeak download', GIGABITS, 'mdi:download'],
|
||||
'offpeak_upload': ['Offpeak Upload', GIGABITS, 'mdi:upload'],
|
||||
'offpeak_total': ['Offpeak Total', GIGABITS, 'mdi:download'],
|
||||
'download': ['Download', GIGABITS, 'mdi:download'],
|
||||
'upload': ['Upload', GIGABITS, 'mdi:upload'],
|
||||
'total': ['Total', GIGABITS, 'mdi:download'],
|
||||
}
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
@ -142,7 +131,6 @@ class EBoxData(object):
|
||||
self.client = EboxClient(username, password, REQUESTS_TIMEOUT)
|
||||
self.data = {}
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def update(self):
|
||||
"""Get the latest data from Ebox."""
|
||||
from pyebox.client import PyEboxError
|
||||
@ -151,5 +139,4 @@ class EBoxData(object):
|
||||
except PyEboxError as exp:
|
||||
_LOGGER.error("Error on receive last EBox data: %s", exp)
|
||||
return
|
||||
# Update data
|
||||
self.data = self.client.get_data()
|
||||
|
@ -9,13 +9,14 @@ from homeassistant.const import TEMP_FAHRENHEIT
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
DEPENDENCIES = ['ecobee']
|
||||
|
||||
ECOBEE_CONFIG_FILE = 'ecobee.conf'
|
||||
|
||||
SENSOR_TYPES = {
|
||||
'temperature': ['Temperature', TEMP_FAHRENHEIT],
|
||||
'humidity': ['Humidity', '%']
|
||||
}
|
||||
|
||||
ECOBEE_CONFIG_FILE = 'ecobee.conf'
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the Ecobee sensors."""
|
||||
@ -31,7 +32,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
|
||||
dev.append(EcobeeSensor(sensor['name'], item['type'], index))
|
||||
|
||||
add_devices(dev)
|
||||
add_devices(dev, True)
|
||||
|
||||
|
||||
class EcobeeSensor(Entity):
|
||||
@ -39,13 +40,12 @@ class EcobeeSensor(Entity):
|
||||
|
||||
def __init__(self, sensor_name, sensor_type, sensor_index):
|
||||
"""Initialize the sensor."""
|
||||
self._name = sensor_name + ' ' + SENSOR_TYPES[sensor_type][0]
|
||||
self._name = '{} {}'.format(sensor_name, SENSOR_TYPES[sensor_type][0])
|
||||
self.sensor_name = sensor_name
|
||||
self.type = sensor_type
|
||||
self.index = sensor_index
|
||||
self._state = None
|
||||
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
|
||||
self.update()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
|
@ -6,7 +6,6 @@ https://home-assistant.io/components/sensor.eliqonline/
|
||||
"""
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from urllib.error import URLError
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@ -49,9 +48,9 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
try:
|
||||
_LOGGER.debug("Probing for access to ELIQ Online API")
|
||||
api.get_data_now(channelid=channel_id)
|
||||
except URLError:
|
||||
except OSError as error:
|
||||
_LOGGER.error("Could not access the ELIQ Online API. "
|
||||
"Is the configuration valid?")
|
||||
"Is the configuration valid? %s", error)
|
||||
return False
|
||||
|
||||
add_devices([EliqSensor(api, channel_id, name)])
|
||||
@ -94,5 +93,6 @@ class EliqSensor(Entity):
|
||||
response = self._api.get_data_now(channelid=self._channel_id)
|
||||
self._state = int(response.power)
|
||||
_LOGGER.debug("Updated power from server %d W", self._state)
|
||||
except URLError:
|
||||
_LOGGER.warning("Could not connect to the ELIQ Online API")
|
||||
except OSError as error:
|
||||
_LOGGER.warning("Could not connect to the ELIQ Online API: %s",
|
||||
error)
|
||||
|
74
homeassistant/components/sensor/etherscan.py
Normal file
74
homeassistant/components/sensor/etherscan.py
Normal file
@ -0,0 +1,74 @@
|
||||
"""
|
||||
Support for Etherscan sensors.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/sensor.etherscan/
|
||||
"""
|
||||
from datetime import timedelta
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
||||
from homeassistant.const import (CONF_NAME, ATTR_ATTRIBUTION)
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
REQUIREMENTS = ['python-etherscan-api==0.0.1']
|
||||
|
||||
CONF_ADDRESS = 'address'
|
||||
CONF_ATTRIBUTION = "Data provided by etherscan.io"
|
||||
|
||||
DEFAULT_NAME = 'Ethereum Balance'
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_ADDRESS): cv.string,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
})
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the Etherscan.io sensors."""
|
||||
address = config.get(CONF_ADDRESS)
|
||||
name = config.get(CONF_NAME)
|
||||
|
||||
add_devices([EtherscanSensor(name, address)], True)
|
||||
|
||||
|
||||
class EtherscanSensor(Entity):
|
||||
"""Representation of an Etherscan.io sensor."""
|
||||
|
||||
def __init__(self, name, address):
|
||||
"""Initialize the sensor."""
|
||||
self._name = name
|
||||
self.address = address
|
||||
self._state = None
|
||||
self._unit_of_measurement = 'ETH'
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the sensor."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
"""Return the state of the sensor."""
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def unit_of_measurement(self):
|
||||
"""Return the unit of measurement this sensor expresses itself in."""
|
||||
return self._unit_of_measurement
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return the state attributes of the sensor."""
|
||||
return {
|
||||
ATTR_ATTRIBUTION: CONF_ATTRIBUTION,
|
||||
}
|
||||
|
||||
def update(self):
|
||||
"""Get the latest state of the sensor."""
|
||||
from pyetherscan import get_balance
|
||||
self._state = get_balance(self.address)
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user