mirror of
https://github.com/home-assistant/core.git
synced 2025-07-20 19:57:07 +00:00
Don't use len(SEQUENCE) as condition value (#7249)
* Don't use len(SEQUENCE) as condition value * Update volvooncall.py
This commit is contained in:
parent
15b2473224
commit
cfc023e128
@ -26,7 +26,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||||||
if cube.is_windowshutter(device):
|
if cube.is_windowshutter(device):
|
||||||
devices.append(MaxCubeShutter(hass, name, device.rf_address))
|
devices.append(MaxCubeShutter(hass, name, device.rf_address))
|
||||||
|
|
||||||
if len(devices):
|
if devices:
|
||||||
add_devices(devices)
|
add_devices(devices)
|
||||||
|
|
||||||
|
|
||||||
|
@ -28,7 +28,7 @@ class VolvoSensor(VolvoEntity, BinarySensorDevice):
|
|||||||
"""Return True if the binary sensor is on."""
|
"""Return True if the binary sensor is on."""
|
||||||
val = getattr(self.vehicle, self._attribute)
|
val = getattr(self.vehicle, self._attribute)
|
||||||
if self._attribute == 'bulb_failures':
|
if self._attribute == 'bulb_failures':
|
||||||
return len(val) > 0
|
return bool(val)
|
||||||
elif self._attribute in ['doors', 'windows']:
|
elif self._attribute in ['doors', 'windows']:
|
||||||
return any([val[key] for key in val if 'Open' in key])
|
return any([val[key] for key in val if 'Open' in key])
|
||||||
else:
|
else:
|
||||||
|
@ -34,7 +34,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||||||
# Add device to HASS
|
# Add device to HASS
|
||||||
devices.append(MaxCubeClimate(hass, name, device.rf_address))
|
devices.append(MaxCubeClimate(hass, name, device.rf_address))
|
||||||
|
|
||||||
if len(devices):
|
if devices:
|
||||||
add_devices(devices)
|
add_devices(devices)
|
||||||
|
|
||||||
|
|
||||||
|
@ -48,7 +48,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||||||
climate_devices.append(create_climate_device(
|
climate_devices.append(create_climate_device(
|
||||||
tado, hass, zone, zone['name'], zone['id']))
|
tado, hass, zone, zone['name'], zone['id']))
|
||||||
|
|
||||||
if len(climate_devices) > 0:
|
if climate_devices:
|
||||||
add_devices(climate_devices, True)
|
add_devices(climate_devices, True)
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
|
@ -37,6 +37,8 @@ from homeassistant.const import (
|
|||||||
ATTR_GPS_ACCURACY, ATTR_LATITUDE, ATTR_LONGITUDE,
|
ATTR_GPS_ACCURACY, ATTR_LATITUDE, ATTR_LONGITUDE,
|
||||||
DEVICE_DEFAULT_NAME, STATE_HOME, STATE_NOT_HOME, ATTR_ENTITY_ID)
|
DEVICE_DEFAULT_NAME, STATE_HOME, STATE_NOT_HOME, ATTR_ENTITY_ID)
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
DOMAIN = 'device_tracker'
|
DOMAIN = 'device_tracker'
|
||||||
DEPENDENCIES = ['zone']
|
DEPENDENCIES = ['zone']
|
||||||
|
|
||||||
@ -86,7 +88,6 @@ PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend({
|
|||||||
DISCOVERY_PLATFORMS = {
|
DISCOVERY_PLATFORMS = {
|
||||||
SERVICE_NETGEAR: 'netgear',
|
SERVICE_NETGEAR: 'netgear',
|
||||||
}
|
}
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def is_on(hass: HomeAssistantType, entity_id: str=None):
|
def is_on(hass: HomeAssistantType, entity_id: str=None):
|
||||||
@ -125,7 +126,7 @@ def async_setup(hass: HomeAssistantType, config: ConfigType):
|
|||||||
async_log_exception(ex, DOMAIN, config, hass)
|
async_log_exception(ex, DOMAIN, config, hass)
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
conf = conf[0] if len(conf) > 0 else {}
|
conf = conf[0] if conf else {}
|
||||||
consider_home = conf.get(CONF_CONSIDER_HOME, DEFAULT_CONSIDER_HOME)
|
consider_home = conf.get(CONF_CONSIDER_HOME, DEFAULT_CONSIDER_HOME)
|
||||||
track_new = conf.get(CONF_TRACK_NEW, DEFAULT_TRACK_NEW)
|
track_new = conf.get(CONF_TRACK_NEW, DEFAULT_TRACK_NEW)
|
||||||
|
|
||||||
|
@ -11,21 +11,19 @@ from collections import namedtuple
|
|||||||
import asyncio
|
import asyncio
|
||||||
import aiohttp
|
import aiohttp
|
||||||
import async_timeout
|
import async_timeout
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
import homeassistant.helpers.config_validation as cv
|
|
||||||
|
|
||||||
|
import homeassistant.helpers.config_validation as cv
|
||||||
from homeassistant.const import CONF_USERNAME, CONF_PASSWORD
|
from homeassistant.const import CONF_USERNAME, CONF_PASSWORD
|
||||||
from homeassistant.util import Throttle
|
from homeassistant.util import Throttle
|
||||||
from homeassistant.components.device_tracker import (
|
from homeassistant.components.device_tracker import (
|
||||||
DOMAIN, PLATFORM_SCHEMA, DeviceScanner)
|
DOMAIN, PLATFORM_SCHEMA, DeviceScanner)
|
||||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||||
|
|
||||||
# Configuration constant specific for tado
|
|
||||||
CONF_HOME_ID = 'home_id'
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
CONF_HOME_ID = 'home_id'
|
||||||
|
|
||||||
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=30)
|
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=30)
|
||||||
|
|
||||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||||
|
@ -48,7 +48,7 @@ def setup(hass, config):
|
|||||||
|
|
||||||
if not os.path.isdir(download_path):
|
if not os.path.isdir(download_path):
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Download path %s does not exist. File Downloader not active.",
|
"Download path %s does not exist. File Downloader not active",
|
||||||
download_path)
|
download_path)
|
||||||
|
|
||||||
return False
|
return False
|
||||||
@ -76,7 +76,7 @@ def setup(hass, config):
|
|||||||
match = re.findall(r"filename=(\S+)",
|
match = re.findall(r"filename=(\S+)",
|
||||||
req.headers['content-disposition'])
|
req.headers['content-disposition'])
|
||||||
|
|
||||||
if len(match) > 0:
|
if match:
|
||||||
filename = match[0].strip("'\" ")
|
filename = match[0].strip("'\" ")
|
||||||
|
|
||||||
if not filename:
|
if not filename:
|
||||||
@ -84,7 +84,7 @@ def setup(hass, config):
|
|||||||
url).strip()
|
url).strip()
|
||||||
|
|
||||||
if not filename:
|
if not filename:
|
||||||
filename = "ha_download"
|
filename = 'ha_download'
|
||||||
|
|
||||||
# Remove stuff to ruin paths
|
# Remove stuff to ruin paths
|
||||||
filename = sanitize_filename(filename)
|
filename = sanitize_filename(filename)
|
||||||
|
@ -7,13 +7,13 @@ https://home-assistant.io/components/emoncms_history/
|
|||||||
import logging
|
import logging
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
|
||||||
import voluptuous as vol
|
|
||||||
import requests
|
import requests
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
import homeassistant.helpers.config_validation as cv
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
CONF_API_KEY, CONF_WHITELIST, CONF_URL, STATE_UNKNOWN, STATE_UNAVAILABLE,
|
CONF_API_KEY, CONF_WHITELIST, CONF_URL, STATE_UNKNOWN, STATE_UNAVAILABLE,
|
||||||
CONF_SCAN_INTERVAL)
|
CONF_SCAN_INTERVAL)
|
||||||
import homeassistant.helpers.config_validation as cv
|
|
||||||
from homeassistant.helpers import state as state_helper
|
from homeassistant.helpers import state as state_helper
|
||||||
from homeassistant.helpers.event import track_point_in_time
|
from homeassistant.helpers.event import track_point_in_time
|
||||||
from homeassistant.util import dt as dt_util
|
from homeassistant.util import dt as dt_util
|
||||||
@ -50,14 +50,13 @@ def setup(hass, config):
|
|||||||
timeout=5)
|
timeout=5)
|
||||||
|
|
||||||
except requests.exceptions.RequestException:
|
except requests.exceptions.RequestException:
|
||||||
_LOGGER.error("Error saving data '%s' to '%s'",
|
_LOGGER.error("Error saving data '%s' to '%s'", payload, fullurl)
|
||||||
payload, fullurl)
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
if req.status_code != 200:
|
if req.status_code != 200:
|
||||||
_LOGGER.error("Error saving data '%s' to '%s'" +
|
_LOGGER.error(
|
||||||
"(http status code = %d)", payload,
|
"Error saving data %s to %s (http status code = %d)",
|
||||||
fullurl, req.status_code)
|
payload, fullurl, req.status_code)
|
||||||
|
|
||||||
def update_emoncms(time):
|
def update_emoncms(time):
|
||||||
"""Send whitelisted entities states reguarly to Emoncms."""
|
"""Send whitelisted entities states reguarly to Emoncms."""
|
||||||
@ -71,12 +70,11 @@ def setup(hass, config):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
payload_dict[entity_id] = state_helper.state_as_number(
|
payload_dict[entity_id] = state_helper.state_as_number(state)
|
||||||
state)
|
|
||||||
except ValueError:
|
except ValueError:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if len(payload_dict) > 0:
|
if payload_dict:
|
||||||
payload = "{%s}" % ",".join("{}:{}".format(key, val)
|
payload = "{%s}" % ",".join("{}:{}".format(key, val)
|
||||||
for key, val in
|
for key, val in
|
||||||
payload_dict.items())
|
payload_dict.items())
|
||||||
|
@ -36,7 +36,7 @@ CONFIG_SCHEMA = vol.Schema({
|
|||||||
|
|
||||||
|
|
||||||
def setup(hass, config):
|
def setup(hass, config):
|
||||||
"""Setup the feedreader component."""
|
"""Set up the Feedreader component."""
|
||||||
urls = config.get(DOMAIN)[CONF_URLS]
|
urls = config.get(DOMAIN)[CONF_URLS]
|
||||||
data_file = hass.config.path("{}.pickle".format(DOMAIN))
|
data_file = hass.config.path("{}.pickle".format(DOMAIN))
|
||||||
storage = StoredData(data_file)
|
storage = StoredData(data_file)
|
||||||
@ -45,7 +45,7 @@ def setup(hass, config):
|
|||||||
|
|
||||||
|
|
||||||
class FeedManager(object):
|
class FeedManager(object):
|
||||||
"""Abstraction over feedparser module."""
|
"""Abstraction over Feedparser module."""
|
||||||
|
|
||||||
def __init__(self, url, hass, storage):
|
def __init__(self, url, hass, storage):
|
||||||
"""Initialize the FeedManager object, poll every hour."""
|
"""Initialize the FeedManager object, poll every hour."""
|
||||||
@ -56,46 +56,45 @@ class FeedManager(object):
|
|||||||
self._storage = storage
|
self._storage = storage
|
||||||
self._last_entry_timestamp = None
|
self._last_entry_timestamp = None
|
||||||
self._has_published_parsed = False
|
self._has_published_parsed = False
|
||||||
hass.bus.listen_once(EVENT_HOMEASSISTANT_START,
|
hass.bus.listen_once(
|
||||||
lambda _: self._update())
|
EVENT_HOMEASSISTANT_START, lambda _: self._update())
|
||||||
track_utc_time_change(hass, lambda now: self._update(),
|
track_utc_time_change(
|
||||||
minute=0, second=0)
|
hass, lambda now: self._update(), minute=0, second=0)
|
||||||
|
|
||||||
def _log_no_entries(self):
|
def _log_no_entries(self):
|
||||||
"""Send no entries log at debug level."""
|
"""Send no entries log at debug level."""
|
||||||
_LOGGER.debug('No new entries to be published in feed "%s"', self._url)
|
_LOGGER.debug("No new entries to be published in feed %s", self._url)
|
||||||
|
|
||||||
def _update(self):
|
def _update(self):
|
||||||
"""Update the feed and publish new entries to the event bus."""
|
"""Update the feed and publish new entries to the event bus."""
|
||||||
import feedparser
|
import feedparser
|
||||||
_LOGGER.info('Fetching new data from feed "%s"', self._url)
|
_LOGGER.info("Fetching new data from feed %s", self._url)
|
||||||
self._feed = feedparser.parse(self._url,
|
self._feed = feedparser.parse(self._url,
|
||||||
etag=None if not self._feed
|
etag=None if not self._feed
|
||||||
else self._feed.get('etag'),
|
else self._feed.get('etag'),
|
||||||
modified=None if not self._feed
|
modified=None if not self._feed
|
||||||
else self._feed.get('modified'))
|
else self._feed.get('modified'))
|
||||||
if not self._feed:
|
if not self._feed:
|
||||||
_LOGGER.error('Error fetching feed data from "%s"', self._url)
|
_LOGGER.error("Error fetching feed data from %s", self._url)
|
||||||
else:
|
else:
|
||||||
if self._feed.bozo != 0:
|
if self._feed.bozo != 0:
|
||||||
_LOGGER.error('Error parsing feed "%s"', self._url)
|
_LOGGER.error("Error parsing feed %s", self._url)
|
||||||
# Using etag and modified, if there's no new data available,
|
# Using etag and modified, if there's no new data available,
|
||||||
# the entries list will be empty
|
# the entries list will be empty
|
||||||
elif len(self._feed.entries) > 0:
|
elif self._feed.entries:
|
||||||
_LOGGER.debug('%s entri(es) available in feed "%s"',
|
_LOGGER.debug("%s entri(es) available in feed %s",
|
||||||
len(self._feed.entries),
|
len(self._feed.entries), self._url)
|
||||||
self._url)
|
|
||||||
if len(self._feed.entries) > MAX_ENTRIES:
|
if len(self._feed.entries) > MAX_ENTRIES:
|
||||||
_LOGGER.debug('Processing only the first %s entries '
|
_LOGGER.debug("Processing only the first %s entries "
|
||||||
'in feed "%s"', MAX_ENTRIES, self._url)
|
"in feed %s", MAX_ENTRIES, self._url)
|
||||||
self._feed.entries = self._feed.entries[0:MAX_ENTRIES]
|
self._feed.entries = self._feed.entries[0:MAX_ENTRIES]
|
||||||
self._publish_new_entries()
|
self._publish_new_entries()
|
||||||
if self._has_published_parsed:
|
if self._has_published_parsed:
|
||||||
self._storage.put_timestamp(self._url,
|
self._storage.put_timestamp(
|
||||||
self._last_entry_timestamp)
|
self._url, self._last_entry_timestamp)
|
||||||
else:
|
else:
|
||||||
self._log_no_entries()
|
self._log_no_entries()
|
||||||
_LOGGER.info('Fetch from feed "%s" completed', self._url)
|
_LOGGER.info("Fetch from feed %s completed", self._url)
|
||||||
|
|
||||||
def _update_and_fire_entry(self, entry):
|
def _update_and_fire_entry(self, entry):
|
||||||
"""Update last_entry_timestamp and fire entry."""
|
"""Update last_entry_timestamp and fire entry."""
|
||||||
@ -103,12 +102,12 @@ class FeedManager(object):
|
|||||||
# it to publish only new available entries since the last run
|
# it to publish only new available entries since the last run
|
||||||
if 'published_parsed' in entry.keys():
|
if 'published_parsed' in entry.keys():
|
||||||
self._has_published_parsed = True
|
self._has_published_parsed = True
|
||||||
self._last_entry_timestamp = max(entry.published_parsed,
|
self._last_entry_timestamp = max(
|
||||||
self._last_entry_timestamp)
|
entry.published_parsed, self._last_entry_timestamp)
|
||||||
else:
|
else:
|
||||||
self._has_published_parsed = False
|
self._has_published_parsed = False
|
||||||
_LOGGER.debug('No `published_parsed` info available '
|
_LOGGER.debug("No published_parsed info available for entry %s",
|
||||||
'for entry "%s"', entry.title)
|
entry.title)
|
||||||
entry.update({'feed_url': self._url})
|
entry.update({'feed_url': self._url})
|
||||||
self._hass.bus.fire(EVENT_FEEDREADER, entry)
|
self._hass.bus.fire(EVENT_FEEDREADER, entry)
|
||||||
|
|
||||||
@ -129,7 +128,7 @@ class FeedManager(object):
|
|||||||
self._update_and_fire_entry(entry)
|
self._update_and_fire_entry(entry)
|
||||||
new_entries = True
|
new_entries = True
|
||||||
else:
|
else:
|
||||||
_LOGGER.debug('Entry "%s" already processed', entry.title)
|
_LOGGER.debug("Entry %s already processed", entry.title)
|
||||||
if not new_entries:
|
if not new_entries:
|
||||||
self._log_no_entries()
|
self._log_no_entries()
|
||||||
self._firstrun = False
|
self._firstrun = False
|
||||||
@ -150,13 +149,13 @@ class StoredData(object):
|
|||||||
"""Fetch data stored into pickle file."""
|
"""Fetch data stored into pickle file."""
|
||||||
if self._cache_outdated and exists(self._data_file):
|
if self._cache_outdated and exists(self._data_file):
|
||||||
try:
|
try:
|
||||||
_LOGGER.debug('Fetching data from file %s', self._data_file)
|
_LOGGER.debug("Fetching data from file %s", self._data_file)
|
||||||
with self._lock, open(self._data_file, 'rb') as myfile:
|
with self._lock, open(self._data_file, 'rb') as myfile:
|
||||||
self._data = pickle.load(myfile) or {}
|
self._data = pickle.load(myfile) or {}
|
||||||
self._cache_outdated = False
|
self._cache_outdated = False
|
||||||
# pylint: disable=bare-except
|
# pylint: disable=bare-except
|
||||||
except:
|
except:
|
||||||
_LOGGER.error('Error loading data from pickled file %s',
|
_LOGGER.error("Error loading data from pickled file %s",
|
||||||
self._data_file)
|
self._data_file)
|
||||||
|
|
||||||
def get_timestamp(self, url):
|
def get_timestamp(self, url):
|
||||||
@ -165,16 +164,16 @@ class StoredData(object):
|
|||||||
return self._data.get(url)
|
return self._data.get(url)
|
||||||
|
|
||||||
def put_timestamp(self, url, timestamp):
|
def put_timestamp(self, url, timestamp):
|
||||||
"""Update timestamp for given url."""
|
"""Update timestamp for given URL."""
|
||||||
self._fetch_data()
|
self._fetch_data()
|
||||||
with self._lock, open(self._data_file, 'wb') as myfile:
|
with self._lock, open(self._data_file, 'wb') as myfile:
|
||||||
self._data.update({url: timestamp})
|
self._data.update({url: timestamp})
|
||||||
_LOGGER.debug('Overwriting feed "%s" timestamp in storage file %s',
|
_LOGGER.debug("Overwriting feed %s timestamp in storage file %s",
|
||||||
url, self._data_file)
|
url, self._data_file)
|
||||||
try:
|
try:
|
||||||
pickle.dump(self._data, myfile)
|
pickle.dump(self._data, myfile)
|
||||||
# pylint: disable=bare-except
|
# pylint: disable=bare-except
|
||||||
except:
|
except:
|
||||||
_LOGGER.error('Error saving pickled data to %s',
|
_LOGGER.error(
|
||||||
self._data_file)
|
"Error saving pickled data to %s", self._data_file)
|
||||||
self._cache_outdated = True
|
self._cache_outdated = True
|
||||||
|
@ -21,9 +21,10 @@ from homeassistant.helpers.entity import Entity
|
|||||||
from homeassistant.helpers.event import track_time_interval
|
from homeassistant.helpers.event import track_time_interval
|
||||||
from homeassistant.config import load_yaml_config_file
|
from homeassistant.config import load_yaml_config_file
|
||||||
|
|
||||||
DOMAIN = 'homematic'
|
|
||||||
REQUIREMENTS = ["pyhomematic==0.1.24"]
|
REQUIREMENTS = ["pyhomematic==0.1.24"]
|
||||||
|
|
||||||
|
DOMAIN = 'homematic'
|
||||||
|
|
||||||
SCAN_INTERVAL_HUB = timedelta(seconds=300)
|
SCAN_INTERVAL_HUB = timedelta(seconds=300)
|
||||||
SCAN_INTERVAL_VARIABLES = timedelta(seconds=30)
|
SCAN_INTERVAL_VARIABLES = timedelta(seconds=30)
|
||||||
|
|
||||||
@ -124,20 +125,20 @@ CONF_LOCAL_IP = 'local_ip'
|
|||||||
CONF_LOCAL_PORT = 'local_port'
|
CONF_LOCAL_PORT = 'local_port'
|
||||||
CONF_IP = 'ip'
|
CONF_IP = 'ip'
|
||||||
CONF_PORT = 'port'
|
CONF_PORT = 'port'
|
||||||
CONF_CALLBACK_IP = "callback_ip"
|
CONF_CALLBACK_IP = 'callback_ip'
|
||||||
CONF_CALLBACK_PORT = "callback_port"
|
CONF_CALLBACK_PORT = 'callback_port'
|
||||||
CONF_RESOLVENAMES = 'resolvenames'
|
CONF_RESOLVENAMES = 'resolvenames'
|
||||||
CONF_VARIABLES = 'variables'
|
CONF_VARIABLES = 'variables'
|
||||||
CONF_DEVICES = 'devices'
|
CONF_DEVICES = 'devices'
|
||||||
CONF_DELAY = 'delay'
|
CONF_DELAY = 'delay'
|
||||||
CONF_PRIMARY = 'primary'
|
CONF_PRIMARY = 'primary'
|
||||||
|
|
||||||
DEFAULT_LOCAL_IP = "0.0.0.0"
|
DEFAULT_LOCAL_IP = '0.0.0.0'
|
||||||
DEFAULT_LOCAL_PORT = 0
|
DEFAULT_LOCAL_PORT = 0
|
||||||
DEFAULT_RESOLVENAMES = False
|
DEFAULT_RESOLVENAMES = False
|
||||||
DEFAULT_PORT = 2001
|
DEFAULT_PORT = 2001
|
||||||
DEFAULT_USERNAME = "Admin"
|
DEFAULT_USERNAME = 'Admin'
|
||||||
DEFAULT_PASSWORD = ""
|
DEFAULT_PASSWORD = ''
|
||||||
DEFAULT_VARIABLES = False
|
DEFAULT_VARIABLES = False
|
||||||
DEFAULT_DEVICES = True
|
DEFAULT_DEVICES = True
|
||||||
DEFAULT_DELAY = 0.5
|
DEFAULT_DELAY = 0.5
|
||||||
@ -145,7 +146,7 @@ DEFAULT_PRIMARY = False
|
|||||||
|
|
||||||
|
|
||||||
DEVICE_SCHEMA = vol.Schema({
|
DEVICE_SCHEMA = vol.Schema({
|
||||||
vol.Required(CONF_PLATFORM): "homematic",
|
vol.Required(CONF_PLATFORM): 'homematic',
|
||||||
vol.Required(ATTR_NAME): cv.string,
|
vol.Required(ATTR_NAME): cv.string,
|
||||||
vol.Required(ATTR_ADDRESS): cv.string,
|
vol.Required(ATTR_ADDRESS): cv.string,
|
||||||
vol.Required(ATTR_PROXY): cv.string,
|
vol.Required(ATTR_PROXY): cv.string,
|
||||||
@ -249,7 +250,7 @@ def setup(hass, config):
|
|||||||
hass.data[DATA_DEVINIT] = {}
|
hass.data[DATA_DEVINIT] = {}
|
||||||
hass.data[DATA_STORE] = []
|
hass.data[DATA_STORE] = []
|
||||||
|
|
||||||
# create hosts list for pyhomematic
|
# Create hosts list for pyhomematic
|
||||||
remotes = {}
|
remotes = {}
|
||||||
hosts = {}
|
hosts = {}
|
||||||
for rname, rconfig in config[DOMAIN][CONF_HOSTS].items():
|
for rname, rconfig in config[DOMAIN][CONF_HOSTS].items():
|
||||||
@ -278,7 +279,7 @@ def setup(hass, config):
|
|||||||
localport=config[DOMAIN].get(CONF_LOCAL_PORT),
|
localport=config[DOMAIN].get(CONF_LOCAL_PORT),
|
||||||
remotes=remotes,
|
remotes=remotes,
|
||||||
systemcallback=bound_system_callback,
|
systemcallback=bound_system_callback,
|
||||||
interface_id="homeassistant"
|
interface_id='homeassistant'
|
||||||
)
|
)
|
||||||
|
|
||||||
# Start server thread, connect to peer, initialize to receive events
|
# Start server thread, connect to peer, initialize to receive events
|
||||||
@ -288,13 +289,13 @@ def setup(hass, config):
|
|||||||
hass.bus.listen_once(
|
hass.bus.listen_once(
|
||||||
EVENT_HOMEASSISTANT_STOP, hass.data[DATA_HOMEMATIC].stop)
|
EVENT_HOMEASSISTANT_STOP, hass.data[DATA_HOMEMATIC].stop)
|
||||||
|
|
||||||
# init homematic hubs
|
# Init homematic hubs
|
||||||
entity_hubs = []
|
entity_hubs = []
|
||||||
for _, hub_data in hosts.items():
|
for _, hub_data in hosts.items():
|
||||||
entity_hubs.append(HMHub(
|
entity_hubs.append(HMHub(
|
||||||
hass, hub_data[CONF_NAME], hub_data[CONF_VARIABLES]))
|
hass, hub_data[CONF_NAME], hub_data[CONF_VARIABLES]))
|
||||||
|
|
||||||
# regeister homematic services
|
# Regeister homematic services
|
||||||
descriptions = load_yaml_config_file(
|
descriptions = load_yaml_config_file(
|
||||||
os.path.join(os.path.dirname(__file__), 'services.yaml'))
|
os.path.join(os.path.dirname(__file__), 'services.yaml'))
|
||||||
|
|
||||||
@ -304,24 +305,24 @@ def setup(hass, config):
|
|||||||
channel = service.data.get(ATTR_CHANNEL)
|
channel = service.data.get(ATTR_CHANNEL)
|
||||||
param = service.data.get(ATTR_PARAM)
|
param = service.data.get(ATTR_PARAM)
|
||||||
|
|
||||||
# device not found
|
# Device not found
|
||||||
hmdevice = _device_from_servicecall(hass, service)
|
hmdevice = _device_from_servicecall(hass, service)
|
||||||
if hmdevice is None:
|
if hmdevice is None:
|
||||||
_LOGGER.error("%s not found for service virtualkey!", address)
|
_LOGGER.error("%s not found for service virtualkey!", address)
|
||||||
return
|
return
|
||||||
|
|
||||||
# if param exists for this device
|
# If param exists for this device
|
||||||
if param not in hmdevice.ACTIONNODE:
|
if param not in hmdevice.ACTIONNODE:
|
||||||
_LOGGER.error("%s not datapoint in hm device %s", param, address)
|
_LOGGER.error("%s not datapoint in hm device %s", param, address)
|
||||||
return
|
return
|
||||||
|
|
||||||
# channel exists?
|
# Channel exists?
|
||||||
if channel not in hmdevice.ACTIONNODE[param]:
|
if channel not in hmdevice.ACTIONNODE[param]:
|
||||||
_LOGGER.error("%i is not a channel in hm device %s",
|
_LOGGER.error("%i is not a channel in hm device %s",
|
||||||
channel, address)
|
channel, address)
|
||||||
return
|
return
|
||||||
|
|
||||||
# call key
|
# Call key
|
||||||
hmdevice.actionNodeData(param, True, channel)
|
hmdevice.actionNodeData(param, True, channel)
|
||||||
|
|
||||||
hass.services.register(
|
hass.services.register(
|
||||||
@ -369,13 +370,13 @@ def setup(hass, config):
|
|||||||
param = service.data.get(ATTR_PARAM)
|
param = service.data.get(ATTR_PARAM)
|
||||||
value = service.data.get(ATTR_VALUE)
|
value = service.data.get(ATTR_VALUE)
|
||||||
|
|
||||||
# device not found
|
# Device not found
|
||||||
hmdevice = _device_from_servicecall(hass, service)
|
hmdevice = _device_from_servicecall(hass, service)
|
||||||
if hmdevice is None:
|
if hmdevice is None:
|
||||||
_LOGGER.error("%s not found!", address)
|
_LOGGER.error("%s not found!", address)
|
||||||
return
|
return
|
||||||
|
|
||||||
# call key
|
# Call key
|
||||||
hmdevice.setValue(param, value, channel)
|
hmdevice.setValue(param, value, channel)
|
||||||
|
|
||||||
hass.services.register(
|
hass.services.register(
|
||||||
@ -394,18 +395,16 @@ def _system_callback_handler(hass, config, src, *args):
|
|||||||
(interface_id, dev_descriptions) = args
|
(interface_id, dev_descriptions) = args
|
||||||
proxy = interface_id.split('-')[-1]
|
proxy = interface_id.split('-')[-1]
|
||||||
|
|
||||||
# device support active?
|
# Device support active?
|
||||||
if not hass.data[DATA_DEVINIT][proxy]:
|
if not hass.data[DATA_DEVINIT][proxy]:
|
||||||
return
|
return
|
||||||
|
|
||||||
##
|
|
||||||
# Get list of all keys of the devices (ignoring channels)
|
# Get list of all keys of the devices (ignoring channels)
|
||||||
key_dict = {}
|
key_dict = {}
|
||||||
for dev in dev_descriptions:
|
for dev in dev_descriptions:
|
||||||
key_dict[dev['ADDRESS'].split(':')[0]] = True
|
key_dict[dev['ADDRESS'].split(':')[0]] = True
|
||||||
|
|
||||||
##
|
# Remove device they allready init by HA
|
||||||
# remove device they allready init by HA
|
|
||||||
tmp_devs = key_dict.copy()
|
tmp_devs = key_dict.copy()
|
||||||
for dev in tmp_devs:
|
for dev in tmp_devs:
|
||||||
if dev in hass.data[DATA_STORE]:
|
if dev in hass.data[DATA_STORE]:
|
||||||
@ -419,11 +418,11 @@ def _system_callback_handler(hass, config, src, *args):
|
|||||||
for dev in key_dict:
|
for dev in key_dict:
|
||||||
hmdevice = hass.data[DATA_HOMEMATIC].devices[proxy].get(dev)
|
hmdevice = hass.data[DATA_HOMEMATIC].devices[proxy].get(dev)
|
||||||
|
|
||||||
# have events?
|
# Have events?
|
||||||
if len(hmdevice.EVENTNODE) > 0:
|
if hmdevice.EVENTNODE:
|
||||||
_LOGGER.debug("Register Events from %s", dev)
|
_LOGGER.debug("Register Events from %s", dev)
|
||||||
hmdevice.setEventCallback(callback=bound_event_callback,
|
hmdevice.setEventCallback(
|
||||||
bequeath=True)
|
callback=bound_event_callback, bequeath=True)
|
||||||
|
|
||||||
# If configuration allows autodetection of devices,
|
# If configuration allows autodetection of devices,
|
||||||
# all devices not configured are added.
|
# all devices not configured are added.
|
||||||
@ -559,7 +558,7 @@ def _hm_event_handler(hass, proxy, device, caller, attribute, value):
|
|||||||
}))
|
}))
|
||||||
return
|
return
|
||||||
|
|
||||||
_LOGGER.warning("Event is unknown and not forwarded to HA")
|
_LOGGER.warning("Event is unknown and not forwarded")
|
||||||
|
|
||||||
|
|
||||||
def _device_from_servicecall(hass, service):
|
def _device_from_servicecall(hass, service):
|
||||||
@ -727,7 +726,7 @@ class HMDevice(Entity):
|
|||||||
|
|
||||||
def link_homematic(self):
|
def link_homematic(self):
|
||||||
"""Connect to Homematic."""
|
"""Connect to Homematic."""
|
||||||
# device is already linked
|
# Device is already linked
|
||||||
if self._connected:
|
if self._connected:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -42,7 +42,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
|||||||
|
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||||
"""Set up the microsoft face identify platform."""
|
"""Set up the Microsoft face identify platform."""
|
||||||
api = hass.data[DATA_MICROSOFT_FACE]
|
api = hass.data[DATA_MICROSOFT_FACE]
|
||||||
face_group = config[CONF_GROUP]
|
face_group = config[CONF_GROUP]
|
||||||
confidence = config[CONF_CONFIDENCE]
|
confidence = config[CONF_CONFIDENCE]
|
||||||
@ -197,7 +197,7 @@ class MicrosoftFaceIdentifyEntity(ImageProcessingFaceEntity):
|
|||||||
{'faceIds': face_ids, 'personGroupId': self._face_group})
|
{'faceIds': face_ids, 'personGroupId': self._face_group})
|
||||||
|
|
||||||
except HomeAssistantError as err:
|
except HomeAssistantError as err:
|
||||||
_LOGGER.error("Can't process image on microsoft face: %s", err)
|
_LOGGER.error("Can't process image on Microsoft face: %s", err)
|
||||||
return
|
return
|
||||||
|
|
||||||
# parse data
|
# parse data
|
||||||
@ -205,7 +205,7 @@ class MicrosoftFaceIdentifyEntity(ImageProcessingFaceEntity):
|
|||||||
total = 0
|
total = 0
|
||||||
for face in detect:
|
for face in detect:
|
||||||
total += 1
|
total += 1
|
||||||
if len(face['candidates']) == 0:
|
if not face['candidates']:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
data = face['candidates'][0]
|
data = face['candidates'][0]
|
||||||
|
@ -101,7 +101,7 @@ def setup(hass, config):
|
|||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if len(whitelist) > 0 and state.entity_id not in whitelist:
|
if whitelist and state.entity_id not in whitelist:
|
||||||
return
|
return
|
||||||
|
|
||||||
_state = float(state_helper.state_as_number(state))
|
_state = float(state_helper.state_as_number(state))
|
||||||
@ -154,7 +154,7 @@ def setup(hass, config):
|
|||||||
try:
|
try:
|
||||||
influx.write_points(json_body)
|
influx.write_points(json_body)
|
||||||
except exceptions.InfluxDBClientError:
|
except exceptions.InfluxDBClientError:
|
||||||
_LOGGER.exception('Error saving event "%s" to InfluxDB', json_body)
|
_LOGGER.exception("Error saving event %s to InfluxDB", json_body)
|
||||||
|
|
||||||
hass.bus.listen(EVENT_STATE_CHANGED, influx_event_listener)
|
hass.bus.listen(EVENT_STATE_CHANGED, influx_event_listener)
|
||||||
|
|
||||||
|
@ -33,7 +33,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||||||
insteonhub = hass.data['insteon_local']
|
insteonhub = hass.data['insteon_local']
|
||||||
|
|
||||||
conf_lights = config_from_file(hass.config.path(INSTEON_LOCAL_LIGHTS_CONF))
|
conf_lights = config_from_file(hass.config.path(INSTEON_LOCAL_LIGHTS_CONF))
|
||||||
if len(conf_lights):
|
if conf_lights:
|
||||||
for device_id in conf_lights:
|
for device_id in conf_lights:
|
||||||
setup_light(device_id, conf_lights[device_id], insteonhub, hass,
|
setup_light(device_id, conf_lights[device_id], insteonhub, hass,
|
||||||
add_devices)
|
add_devices)
|
||||||
|
@ -101,11 +101,11 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||||||
host = config.get(CONF_HOST)
|
host = config.get(CONF_HOST)
|
||||||
|
|
||||||
if host is None:
|
if host is None:
|
||||||
return # if no host configured, do not continue
|
return
|
||||||
|
|
||||||
pin = None
|
pin = None
|
||||||
bravia_config = _config_from_file(hass.config.path(BRAVIA_CONFIG_FILE))
|
bravia_config = _config_from_file(hass.config.path(BRAVIA_CONFIG_FILE))
|
||||||
while len(bravia_config):
|
while bravia_config:
|
||||||
# Set up a configured TV
|
# Set up a configured TV
|
||||||
host_ip, host_config = bravia_config.popitem()
|
host_ip, host_config = bravia_config.popitem()
|
||||||
if host_ip == host:
|
if host_ip == host:
|
||||||
@ -236,7 +236,7 @@ class BraviaTVDevice(MediaPlayerDevice):
|
|||||||
self._state = STATE_ON
|
self._state = STATE_ON
|
||||||
playing_info = self._braviarc.get_playing_info()
|
playing_info = self._braviarc.get_playing_info()
|
||||||
self._reset_playing_info()
|
self._reset_playing_info()
|
||||||
if playing_info is None or len(playing_info) == 0:
|
if playing_info is None or not playing_info:
|
||||||
self._channel_name = 'App'
|
self._channel_name = 'App'
|
||||||
else:
|
else:
|
||||||
self._program_name = playing_info.get('programTitle')
|
self._program_name = playing_info.get('programTitle')
|
||||||
@ -275,7 +275,7 @@ class BraviaTVDevice(MediaPlayerDevice):
|
|||||||
self._muted = volume_info.get('mute')
|
self._muted = volume_info.get('mute')
|
||||||
|
|
||||||
def _refresh_channels(self):
|
def _refresh_channels(self):
|
||||||
if len(self._source_list) == 0:
|
if not self._source_list:
|
||||||
self._content_mapping = self._braviarc. \
|
self._content_mapping = self._braviarc. \
|
||||||
load_source_list()
|
load_source_list()
|
||||||
self._source_list = []
|
self._source_list = []
|
||||||
|
@ -250,12 +250,12 @@ class DemoMusicPlayer(AbstractDemoPlayer):
|
|||||||
@property
|
@property
|
||||||
def media_title(self):
|
def media_title(self):
|
||||||
"""Return the title of current playing media."""
|
"""Return the title of current playing media."""
|
||||||
return self.tracks[self._cur_track][1] if len(self.tracks) > 0 else ""
|
return self.tracks[self._cur_track][1] if self.tracks else ""
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def media_artist(self):
|
def media_artist(self):
|
||||||
"""Return the artist of current playing media (Music track only)."""
|
"""Return the artist of current playing media (Music track only)."""
|
||||||
return self.tracks[self._cur_track][0] if len(self.tracks) > 0 else ""
|
return self.tracks[self._cur_track][0] if self.tracks else ""
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def media_album_name(self):
|
def media_album_name(self):
|
||||||
|
@ -156,7 +156,7 @@ def _save_config(filename, config):
|
|||||||
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
|
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
|
||||||
"""Set up the GPMDP platform."""
|
"""Set up the GPMDP platform."""
|
||||||
codeconfig = _load_config(hass.config.path(GPMDP_CONFIG_FILE))
|
codeconfig = _load_config(hass.config.path(GPMDP_CONFIG_FILE))
|
||||||
if len(codeconfig):
|
if codeconfig:
|
||||||
code = codeconfig.get('CODE')
|
code = codeconfig.get('CODE')
|
||||||
elif discovery_info is not None:
|
elif discovery_info is not None:
|
||||||
if 'gpmdp' in _CONFIGURING:
|
if 'gpmdp' in _CONFIGURING:
|
||||||
|
@ -124,7 +124,7 @@ class Itunes(object):
|
|||||||
[playlist for playlist in playlists if
|
[playlist for playlist in playlists if
|
||||||
(playlist_id_or_name in [playlist["name"], playlist["id"]])]
|
(playlist_id_or_name in [playlist["name"], playlist["id"]])]
|
||||||
|
|
||||||
if len(found_playlists) > 0:
|
if found_playlists:
|
||||||
playlist = found_playlists[0]
|
playlist = found_playlists[0]
|
||||||
path = '/playlists/' + playlist['id'] + '/play'
|
path = '/playlists/' + playlist['id'] + '/play'
|
||||||
return self._request('PUT', path)
|
return self._request('PUT', path)
|
||||||
|
@ -303,7 +303,7 @@ class KodiDevice(MediaPlayerDevice):
|
|||||||
if self._players is None:
|
if self._players is None:
|
||||||
return STATE_OFF
|
return STATE_OFF
|
||||||
|
|
||||||
if len(self._players) == 0:
|
if not self._players:
|
||||||
return STATE_IDLE
|
return STATE_IDLE
|
||||||
|
|
||||||
if self._properties['speed'] == 0 and not self._properties['live']:
|
if self._properties['speed'] == 0 and not self._properties['live']:
|
||||||
@ -356,7 +356,7 @@ class KodiDevice(MediaPlayerDevice):
|
|||||||
['volume', 'muted']
|
['volume', 'muted']
|
||||||
)
|
)
|
||||||
|
|
||||||
if len(self._players) > 0:
|
if self._players:
|
||||||
player_id = self._players[0]['playerid']
|
player_id = self._players[0]['playerid']
|
||||||
|
|
||||||
assert isinstance(player_id, int)
|
assert isinstance(player_id, int)
|
||||||
@ -475,7 +475,7 @@ class KodiDevice(MediaPlayerDevice):
|
|||||||
def media_artist(self):
|
def media_artist(self):
|
||||||
"""Artist of current playing media, music track only."""
|
"""Artist of current playing media, music track only."""
|
||||||
artists = self._item.get('artist', [])
|
artists = self._item.get('artist', [])
|
||||||
if len(artists) > 0:
|
if artists:
|
||||||
return artists[0]
|
return artists[0]
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
@ -484,7 +484,7 @@ class KodiDevice(MediaPlayerDevice):
|
|||||||
def media_album_artist(self):
|
def media_album_artist(self):
|
||||||
"""Album artist of current playing media, music track only."""
|
"""Album artist of current playing media, music track only."""
|
||||||
artists = self._item.get('albumartist', [])
|
artists = self._item.get('albumartist', [])
|
||||||
if len(artists) > 0:
|
if artists:
|
||||||
return artists[0]
|
return artists[0]
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
@ -551,7 +551,7 @@ class KodiDevice(MediaPlayerDevice):
|
|||||||
"""Helper method for play/pause/toggle."""
|
"""Helper method for play/pause/toggle."""
|
||||||
players = yield from self._get_players()
|
players = yield from self._get_players()
|
||||||
|
|
||||||
if players is not None and len(players) != 0:
|
if players is not None and players:
|
||||||
yield from self.server.Player.PlayPause(
|
yield from self.server.Player.PlayPause(
|
||||||
players[0]['playerid'], state)
|
players[0]['playerid'], state)
|
||||||
|
|
||||||
@ -585,7 +585,7 @@ class KodiDevice(MediaPlayerDevice):
|
|||||||
"""Stop the media player."""
|
"""Stop the media player."""
|
||||||
players = yield from self._get_players()
|
players = yield from self._get_players()
|
||||||
|
|
||||||
if len(players) != 0:
|
if players:
|
||||||
yield from self.server.Player.Stop(players[0]['playerid'])
|
yield from self.server.Player.Stop(players[0]['playerid'])
|
||||||
|
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
@ -593,9 +593,9 @@ class KodiDevice(MediaPlayerDevice):
|
|||||||
"""Helper method used for previous/next track."""
|
"""Helper method used for previous/next track."""
|
||||||
players = yield from self._get_players()
|
players = yield from self._get_players()
|
||||||
|
|
||||||
if len(players) != 0:
|
if players:
|
||||||
if direction == 'previous':
|
if direction == 'previous':
|
||||||
# first seek to position 0. Kodi goes to the beginning of the
|
# First seek to position 0. Kodi goes to the beginning of the
|
||||||
# current track if the current track is not at the beginning.
|
# current track if the current track is not at the beginning.
|
||||||
yield from self.server.Player.Seek(players[0]['playerid'], 0)
|
yield from self.server.Player.Seek(players[0]['playerid'], 0)
|
||||||
|
|
||||||
@ -637,7 +637,7 @@ class KodiDevice(MediaPlayerDevice):
|
|||||||
|
|
||||||
time['hours'] = int(position)
|
time['hours'] = int(position)
|
||||||
|
|
||||||
if len(players) != 0:
|
if players:
|
||||||
yield from self.server.Player.Seek(players[0]['playerid'], time)
|
yield from self.server.Player.Seek(players[0]['playerid'], time)
|
||||||
|
|
||||||
@cmd
|
@cmd
|
||||||
|
@ -12,34 +12,24 @@ from urllib.parse import urlparse
|
|||||||
|
|
||||||
import requests
|
import requests
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant import util
|
from homeassistant import util
|
||||||
from homeassistant.components.media_player import (
|
from homeassistant.components.media_player import (
|
||||||
MEDIA_TYPE_MUSIC,
|
MEDIA_TYPE_MUSIC, MEDIA_TYPE_TVSHOW, MEDIA_TYPE_VIDEO, PLATFORM_SCHEMA,
|
||||||
MEDIA_TYPE_TVSHOW,
|
SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PLAY, SUPPORT_PREVIOUS_TRACK,
|
||||||
MEDIA_TYPE_VIDEO,
|
SUPPORT_STOP, SUPPORT_TURN_OFF, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET,
|
||||||
PLATFORM_SCHEMA,
|
MediaPlayerDevice)
|
||||||
SUPPORT_NEXT_TRACK,
|
|
||||||
SUPPORT_PAUSE,
|
|
||||||
SUPPORT_PLAY,
|
|
||||||
SUPPORT_PREVIOUS_TRACK,
|
|
||||||
SUPPORT_STOP,
|
|
||||||
SUPPORT_TURN_OFF,
|
|
||||||
SUPPORT_VOLUME_MUTE,
|
|
||||||
SUPPORT_VOLUME_SET,
|
|
||||||
MediaPlayerDevice,
|
|
||||||
)
|
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
DEVICE_DEFAULT_NAME,
|
DEVICE_DEFAULT_NAME, STATE_IDLE, STATE_OFF, STATE_PAUSED, STATE_PLAYING)
|
||||||
STATE_IDLE,
|
|
||||||
STATE_OFF,
|
|
||||||
STATE_PAUSED,
|
|
||||||
STATE_PLAYING,
|
|
||||||
)
|
|
||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
from homeassistant.helpers.event import track_utc_time_change
|
from homeassistant.helpers.event import track_utc_time_change
|
||||||
from homeassistant.loader import get_component
|
from homeassistant.loader import get_component
|
||||||
|
|
||||||
REQUIREMENTS = ['plexapi==2.0.2']
|
REQUIREMENTS = ['plexapi==2.0.2']
|
||||||
|
|
||||||
|
_CONFIGURING = {}
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10)
|
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10)
|
||||||
MIN_TIME_BETWEEN_FORCED_SCANS = timedelta(seconds=1)
|
MIN_TIME_BETWEEN_FORCED_SCANS = timedelta(seconds=1)
|
||||||
|
|
||||||
@ -59,10 +49,6 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
|||||||
cv.boolean,
|
cv.boolean,
|
||||||
})
|
})
|
||||||
|
|
||||||
# Map ip to request id for configuring
|
|
||||||
_CONFIGURING = {}
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def config_from_file(filename, config=None):
|
def config_from_file(filename, config=None):
|
||||||
"""Small configuration file management function."""
|
"""Small configuration file management function."""
|
||||||
@ -72,7 +58,7 @@ def config_from_file(filename, config=None):
|
|||||||
with open(filename, 'w') as fdesc:
|
with open(filename, 'w') as fdesc:
|
||||||
fdesc.write(json.dumps(config))
|
fdesc.write(json.dumps(config))
|
||||||
except IOError as error:
|
except IOError as error:
|
||||||
_LOGGER.error('Saving config file failed: %s', error)
|
_LOGGER.error("Saving config file failed: %s", error)
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
@ -82,7 +68,7 @@ def config_from_file(filename, config=None):
|
|||||||
with open(filename, 'r') as fdesc:
|
with open(filename, 'r') as fdesc:
|
||||||
return json.loads(fdesc.read())
|
return json.loads(fdesc.read())
|
||||||
except IOError as error:
|
except IOError as error:
|
||||||
_LOGGER.error('Reading config file failed: %s', error)
|
_LOGGER.error("Reading config file failed: %s", error)
|
||||||
# This won't work yet
|
# This won't work yet
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
@ -94,7 +80,7 @@ def setup_platform(hass, config, add_devices_callback, discovery_info=None):
|
|||||||
# get config from plex.conf
|
# get config from plex.conf
|
||||||
file_config = config_from_file(hass.config.path(PLEX_CONFIG_FILE))
|
file_config = config_from_file(hass.config.path(PLEX_CONFIG_FILE))
|
||||||
|
|
||||||
if len(file_config):
|
if file_config:
|
||||||
# Setup a configured PlexServer
|
# Setup a configured PlexServer
|
||||||
host, token = file_config.popitem()
|
host, token = file_config.popitem()
|
||||||
token = token['token']
|
token = token['token']
|
||||||
@ -102,7 +88,7 @@ def setup_platform(hass, config, add_devices_callback, discovery_info=None):
|
|||||||
elif discovery_info is not None:
|
elif discovery_info is not None:
|
||||||
# Parse discovery data
|
# Parse discovery data
|
||||||
host = discovery_info.get('host')
|
host = discovery_info.get('host')
|
||||||
_LOGGER.info('Discovered PLEX server: %s', host)
|
_LOGGER.info("Discovered PLEX server: %s", host)
|
||||||
|
|
||||||
if host in _CONFIGURING:
|
if host in _CONFIGURING:
|
||||||
return
|
return
|
||||||
@ -133,14 +119,14 @@ def setup_plexserver(host, token, hass, config, add_devices_callback):
|
|||||||
request_id = _CONFIGURING.pop(host)
|
request_id = _CONFIGURING.pop(host)
|
||||||
configurator = get_component('configurator')
|
configurator = get_component('configurator')
|
||||||
configurator.request_done(request_id)
|
configurator.request_done(request_id)
|
||||||
_LOGGER.info('Discovery configuration done!')
|
_LOGGER.info("Discovery configuration done")
|
||||||
|
|
||||||
# Save config
|
# Save config
|
||||||
if not config_from_file(
|
if not config_from_file(
|
||||||
hass.config.path(PLEX_CONFIG_FILE), {host: {
|
hass.config.path(PLEX_CONFIG_FILE), {host: {
|
||||||
'token': token
|
'token': token
|
||||||
}}):
|
}}):
|
||||||
_LOGGER.error('failed to save config file')
|
_LOGGER.error("Failed to save configuration file")
|
||||||
|
|
||||||
_LOGGER.info('Connected to: http://%s', host)
|
_LOGGER.info('Connected to: http://%s', host)
|
||||||
|
|
||||||
@ -158,7 +144,7 @@ def setup_plexserver(host, token, hass, config, add_devices_callback):
|
|||||||
_LOGGER.exception('Error listing plex devices')
|
_LOGGER.exception('Error listing plex devices')
|
||||||
return
|
return
|
||||||
except OSError:
|
except OSError:
|
||||||
_LOGGER.error('Could not connect to plex server at http://%s',
|
_LOGGER.error("Could not connect to plex server at http://%s",
|
||||||
host)
|
host)
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -204,7 +190,7 @@ def setup_plexserver(host, token, hass, config, add_devices_callback):
|
|||||||
try:
|
try:
|
||||||
sessions = plexserver.sessions()
|
sessions = plexserver.sessions()
|
||||||
except plexapi.exceptions.BadRequest:
|
except plexapi.exceptions.BadRequest:
|
||||||
_LOGGER.exception('Error listing plex sessions')
|
_LOGGER.exception("Error listing plex sessions")
|
||||||
return
|
return
|
||||||
|
|
||||||
plex_sessions.clear()
|
plex_sessions.clear()
|
||||||
@ -386,9 +372,8 @@ class PlexClient(MediaPlayerDevice):
|
|||||||
|
|
||||||
# media type
|
# media type
|
||||||
if self._session_type == 'clip':
|
if self._session_type == 'clip':
|
||||||
_LOGGER.debug('Clip content type detected, '
|
_LOGGER.debug("Clip content type detected, compatibility may "
|
||||||
'compatibility may vary: %s',
|
"vary: %s", self.entity_id)
|
||||||
self.entity_id)
|
|
||||||
self._media_content_type = MEDIA_TYPE_TVSHOW
|
self._media_content_type = MEDIA_TYPE_TVSHOW
|
||||||
elif self._session_type == 'episode':
|
elif self._session_type == 'episode':
|
||||||
self._media_content_type = MEDIA_TYPE_TVSHOW
|
self._media_content_type = MEDIA_TYPE_TVSHOW
|
||||||
@ -447,9 +432,8 @@ class PlexClient(MediaPlayerDevice):
|
|||||||
self._session.originalTitle)
|
self._session.originalTitle)
|
||||||
# use album artist if track artist is missing
|
# use album artist if track artist is missing
|
||||||
if self._media_artist is None:
|
if self._media_artist is None:
|
||||||
_LOGGER.debug(
|
_LOGGER.debug("Using album artist because track artist was "
|
||||||
'Using album artist because track artist '
|
"not found: %s", self.entity_id)
|
||||||
'was not found: %s', self.entity_id)
|
|
||||||
self._media_artist = self._media_album_artist
|
self._media_artist = self._media_album_artist
|
||||||
else:
|
else:
|
||||||
self._media_album_name = None
|
self._media_album_name = None
|
||||||
@ -475,8 +459,8 @@ class PlexClient(MediaPlayerDevice):
|
|||||||
self._session.grandparentThumb)
|
self._session.grandparentThumb)
|
||||||
|
|
||||||
if thumb_url is None:
|
if thumb_url is None:
|
||||||
_LOGGER.debug('Using media art because media thumb '
|
_LOGGER.debug("Using media art because media thumb "
|
||||||
'was not found: %s', self.entity_id)
|
"was not found: %s", self.entity_id)
|
||||||
thumb_url = self._get_thumbnail_url(self._session.art)
|
thumb_url = self._get_thumbnail_url(self._session.art)
|
||||||
|
|
||||||
self._media_image_url = thumb_url
|
self._media_image_url = thumb_url
|
||||||
@ -570,9 +554,8 @@ class PlexClient(MediaPlayerDevice):
|
|||||||
def media_content_type(self):
|
def media_content_type(self):
|
||||||
"""Content type of current playing media."""
|
"""Content type of current playing media."""
|
||||||
if self._session_type == 'clip':
|
if self._session_type == 'clip':
|
||||||
_LOGGER.debug('Clip content type detected, '
|
_LOGGER.debug("Clip content type detected, "
|
||||||
'compatibility may vary: %s',
|
"compatibility may vary: %s", self.entity_id)
|
||||||
self.entity_id)
|
|
||||||
return MEDIA_TYPE_TVSHOW
|
return MEDIA_TYPE_TVSHOW
|
||||||
elif self._session_type == 'episode':
|
elif self._session_type == 'episode':
|
||||||
return MEDIA_TYPE_TVSHOW
|
return MEDIA_TYPE_TVSHOW
|
||||||
@ -657,8 +640,8 @@ class PlexClient(MediaPlayerDevice):
|
|||||||
# no mute support
|
# no mute support
|
||||||
elif self.make.lower() == "shield android tv":
|
elif self.make.lower() == "shield android tv":
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
'Shield Android TV client detected, disabling mute '
|
"Shield Android TV client detected, disabling mute "
|
||||||
'controls: %s', self.entity_id)
|
"controls: %s", self.entity_id)
|
||||||
return (SUPPORT_PAUSE | SUPPORT_PREVIOUS_TRACK |
|
return (SUPPORT_PAUSE | SUPPORT_PREVIOUS_TRACK |
|
||||||
SUPPORT_NEXT_TRACK | SUPPORT_STOP |
|
SUPPORT_NEXT_TRACK | SUPPORT_STOP |
|
||||||
SUPPORT_VOLUME_SET | SUPPORT_PLAY |
|
SUPPORT_VOLUME_SET | SUPPORT_PLAY |
|
||||||
@ -666,8 +649,8 @@ class PlexClient(MediaPlayerDevice):
|
|||||||
# Only supports play,pause,stop (and off which really is stop)
|
# Only supports play,pause,stop (and off which really is stop)
|
||||||
elif self.make.lower().startswith("tivo"):
|
elif self.make.lower().startswith("tivo"):
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
'Tivo client detected, only enabling pause, play, '
|
"Tivo client detected, only enabling pause, play, "
|
||||||
'stop, and off controls: %s', self.entity_id)
|
"stop, and off controls: %s", self.entity_id)
|
||||||
return (SUPPORT_PAUSE | SUPPORT_PLAY | SUPPORT_STOP |
|
return (SUPPORT_PAUSE | SUPPORT_PLAY | SUPPORT_STOP |
|
||||||
SUPPORT_TURN_OFF)
|
SUPPORT_TURN_OFF)
|
||||||
# Not all devices support playback functionality
|
# Not all devices support playback functionality
|
||||||
@ -693,8 +676,8 @@ class PlexClient(MediaPlayerDevice):
|
|||||||
# point controls to server since that's where the
|
# point controls to server since that's where the
|
||||||
# playback is occuring
|
# playback is occuring
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
'Local client detected, redirecting controls to '
|
"Local client detected, redirecting controls to "
|
||||||
'Plex server: %s', self.entity_id)
|
"Plex server: %s", self.entity_id)
|
||||||
server_url = self.device.server.baseurl
|
server_url = self.device.server.baseurl
|
||||||
client_url = self.device.baseurl
|
client_url = self.device.baseurl
|
||||||
self.device.baseurl = "{}://{}:{}".format(
|
self.device.baseurl = "{}://{}:{}".format(
|
||||||
@ -830,7 +813,7 @@ class PlexClient(MediaPlayerDevice):
|
|||||||
break
|
break
|
||||||
|
|
||||||
if target_season is None:
|
if target_season is None:
|
||||||
_LOGGER.error('Season not found: %s\\%s - S%sE%s', library_name,
|
_LOGGER.error("Season not found: %s\\%s - S%sE%s", library_name,
|
||||||
show_name,
|
show_name,
|
||||||
str(season_number).zfill(2),
|
str(season_number).zfill(2),
|
||||||
str(episode_number).zfill(2))
|
str(episode_number).zfill(2))
|
||||||
@ -847,7 +830,7 @@ class PlexClient(MediaPlayerDevice):
|
|||||||
break
|
break
|
||||||
|
|
||||||
if target_episode is None:
|
if target_episode is None:
|
||||||
_LOGGER.error('Episode not found: %s\\%s - S%sE%s',
|
_LOGGER.error("Episode not found: %s\\%s - S%sE%s",
|
||||||
library_name, show_name,
|
library_name, show_name,
|
||||||
str(season_number).zfill(2),
|
str(season_number).zfill(2),
|
||||||
str(episode_number).zfill(2))
|
str(episode_number).zfill(2))
|
||||||
@ -858,14 +841,14 @@ class PlexClient(MediaPlayerDevice):
|
|||||||
"""Instruct Plex client to play a piece of media."""
|
"""Instruct Plex client to play a piece of media."""
|
||||||
if not (self.device and
|
if not (self.device and
|
||||||
'playback' in self._device_protocol_capabilities):
|
'playback' in self._device_protocol_capabilities):
|
||||||
_LOGGER.error('Client cannot play media: %s', self.entity_id)
|
_LOGGER.error("Client cannot play media: %s", self.entity_id)
|
||||||
return
|
return
|
||||||
|
|
||||||
import plexapi.playqueue
|
import plexapi.playqueue
|
||||||
playqueue = plexapi.playqueue.PlayQueue.create(self.device.server,
|
playqueue = plexapi.playqueue.PlayQueue.create(
|
||||||
media, **params)
|
self.device.server, media, **params)
|
||||||
|
|
||||||
# delete dynamic playlists used to build playqueue (ex. play tv season)
|
# Delete dynamic playlists used to build playqueue (ex. play tv season)
|
||||||
if delete:
|
if delete:
|
||||||
media.delete()
|
media.delete()
|
||||||
|
|
||||||
@ -873,16 +856,13 @@ class PlexClient(MediaPlayerDevice):
|
|||||||
|
|
||||||
server_url = self.device.server.baseurl.split(':')
|
server_url = self.device.server.baseurl.split(':')
|
||||||
self.device.sendCommand('playback/playMedia', **dict({
|
self.device.sendCommand('playback/playMedia', **dict({
|
||||||
'machineIdentifier':
|
'machineIdentifier': self.device.server.machineIdentifier,
|
||||||
self.device.server.machineIdentifier,
|
'address': server_url[1].strip('/'),
|
||||||
'address':
|
'port': server_url[-1],
|
||||||
server_url[1].strip('/'),
|
'key': media.key,
|
||||||
'port':
|
|
||||||
server_url[-1],
|
|
||||||
'key':
|
|
||||||
media.key,
|
|
||||||
'containerKey':
|
'containerKey':
|
||||||
'/playQueues/%s?window=100&own=1' % playqueue.playQueueID,
|
'/playQueues/{}?window=100&own=1'.format(
|
||||||
|
playqueue.playQueueID),
|
||||||
}, **params))
|
}, **params))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -6,6 +6,7 @@ https://home-assistant.io/components/notify.apns/
|
|||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.helpers.event import track_state_change
|
from homeassistant.helpers.event import track_state_change
|
||||||
@ -16,15 +17,17 @@ from homeassistant.const import CONF_NAME, CONF_PLATFORM
|
|||||||
import homeassistant.helpers.config_validation as cv
|
import homeassistant.helpers.config_validation as cv
|
||||||
from homeassistant.helpers import template as template_helper
|
from homeassistant.helpers import template as template_helper
|
||||||
|
|
||||||
APNS_DEVICES = "apns.yaml"
|
REQUIREMENTS = ['apns2==0.1.1']
|
||||||
CONF_CERTFILE = "cert_file"
|
|
||||||
CONF_TOPIC = "topic"
|
|
||||||
CONF_SANDBOX = "sandbox"
|
|
||||||
DEVICE_TRACKER_DOMAIN = "device_tracker"
|
|
||||||
SERVICE_REGISTER = "apns_register"
|
|
||||||
|
|
||||||
ATTR_PUSH_ID = "push_id"
|
APNS_DEVICES = 'apns.yaml'
|
||||||
ATTR_NAME = "name"
|
CONF_CERTFILE = 'cert_file'
|
||||||
|
CONF_TOPIC = 'topic'
|
||||||
|
CONF_SANDBOX = 'sandbox'
|
||||||
|
DEVICE_TRACKER_DOMAIN = 'device_tracker'
|
||||||
|
SERVICE_REGISTER = 'apns_register'
|
||||||
|
|
||||||
|
ATTR_PUSH_ID = 'push_id'
|
||||||
|
ATTR_NAME = 'name'
|
||||||
|
|
||||||
PLATFORM_SCHEMA = vol.Schema({
|
PLATFORM_SCHEMA = vol.Schema({
|
||||||
vol.Required(CONF_PLATFORM): 'apns',
|
vol.Required(CONF_PLATFORM): 'apns',
|
||||||
@ -39,8 +42,6 @@ REGISTER_SERVICE_SCHEMA = vol.Schema({
|
|||||||
vol.Optional(ATTR_NAME, default=None): cv.string,
|
vol.Optional(ATTR_NAME, default=None): cv.string,
|
||||||
})
|
})
|
||||||
|
|
||||||
REQUIREMENTS = ["apns2==0.1.1"]
|
|
||||||
|
|
||||||
|
|
||||||
def get_service(hass, config, discovery_info=None):
|
def get_service(hass, config, discovery_info=None):
|
||||||
"""Return push service."""
|
"""Return push service."""
|
||||||
@ -53,17 +54,15 @@ def get_service(hass, config, discovery_info=None):
|
|||||||
sandbox = config.get(CONF_SANDBOX)
|
sandbox = config.get(CONF_SANDBOX)
|
||||||
|
|
||||||
service = ApnsNotificationService(hass, name, topic, sandbox, cert_file)
|
service = ApnsNotificationService(hass, name, topic, sandbox, cert_file)
|
||||||
hass.services.register(DOMAIN,
|
hass.services.register(
|
||||||
'apns_{}'.format(name),
|
DOMAIN, 'apns_{}'.format(name), service.register,
|
||||||
service.register,
|
descriptions.get(SERVICE_REGISTER), schema=REGISTER_SERVICE_SCHEMA)
|
||||||
descriptions.get(SERVICE_REGISTER),
|
|
||||||
schema=REGISTER_SERVICE_SCHEMA)
|
|
||||||
return service
|
return service
|
||||||
|
|
||||||
|
|
||||||
class ApnsDevice(object):
|
class ApnsDevice(object):
|
||||||
"""
|
"""
|
||||||
Apns Device class.
|
APNS Device class.
|
||||||
|
|
||||||
Stores information about a device that is
|
Stores information about a device that is
|
||||||
registered for push notifications.
|
registered for push notifications.
|
||||||
@ -78,7 +77,7 @@ class ApnsDevice(object):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def push_id(self):
|
def push_id(self):
|
||||||
"""The apns id for the device."""
|
"""The APNS id for the device."""
|
||||||
return self.device_push_id
|
return self.device_push_id
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -104,7 +103,7 @@ class ApnsDevice(object):
|
|||||||
The full id of a device that is tracked by the device
|
The full id of a device that is tracked by the device
|
||||||
tracking component.
|
tracking component.
|
||||||
"""
|
"""
|
||||||
return DEVICE_TRACKER_DOMAIN + '.' + self.tracking_id
|
return '{}.{}'.format(DEVICE_TRACKER_DOMAIN, self.tracking_id)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def disabled(self):
|
def disabled(self):
|
||||||
@ -140,7 +139,7 @@ def _write_device(out, device):
|
|||||||
|
|
||||||
out.write(device.push_id)
|
out.write(device.push_id)
|
||||||
out.write(": {")
|
out.write(": {")
|
||||||
if len(attributes) > 0:
|
if attributes:
|
||||||
separator = ", "
|
separator = ", "
|
||||||
out.write(separator.join(attributes))
|
out.write(separator.join(attributes))
|
||||||
|
|
||||||
|
@ -8,7 +8,6 @@ import logging
|
|||||||
import functools
|
import functools
|
||||||
import socket
|
import socket
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
@ -24,8 +23,7 @@ REQUIREMENTS = ['pilight==0.1.1']
|
|||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
CONF_SEND_DELAY = 'send_delay'
|
||||||
CONF_SEND_DELAY = "send_delay"
|
|
||||||
|
|
||||||
DEFAULT_HOST = '127.0.0.1'
|
DEFAULT_HOST = '127.0.0.1'
|
||||||
DEFAULT_PORT = 5000
|
DEFAULT_PORT = 5000
|
||||||
@ -60,14 +58,14 @@ def setup(hass, config):
|
|||||||
|
|
||||||
host = config[DOMAIN][CONF_HOST]
|
host = config[DOMAIN][CONF_HOST]
|
||||||
port = config[DOMAIN][CONF_PORT]
|
port = config[DOMAIN][CONF_PORT]
|
||||||
send_throttler = CallRateDelayThrottle(hass,
|
send_throttler = CallRateDelayThrottle(
|
||||||
config[DOMAIN][CONF_SEND_DELAY])
|
hass, config[DOMAIN][CONF_SEND_DELAY])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
pilight_client = pilight.Client(host=host, port=port)
|
pilight_client = pilight.Client(host=host, port=port)
|
||||||
except (socket.error, socket.timeout) as err:
|
except (socket.error, socket.timeout) as err:
|
||||||
_LOGGER.error("Unable to connect to %s on port %s: %s",
|
_LOGGER.error(
|
||||||
host, port, err)
|
"Unable to connect to %s on port %s: %s", host, port, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def start_pilight_client(_):
|
def start_pilight_client(_):
|
||||||
@ -92,7 +90,7 @@ def setup(hass, config):
|
|||||||
try:
|
try:
|
||||||
pilight_client.send_code(message_data)
|
pilight_client.send_code(message_data)
|
||||||
except IOError:
|
except IOError:
|
||||||
_LOGGER.error('Pilight send failed for %s', str(message_data))
|
_LOGGER.error("Pilight send failed for %s", str(message_data))
|
||||||
|
|
||||||
hass.services.register(
|
hass.services.register(
|
||||||
DOMAIN, SERVICE_NAME, send_code, schema=RF_CODE_SCHEMA)
|
DOMAIN, SERVICE_NAME, send_code, schema=RF_CODE_SCHEMA)
|
||||||
@ -157,7 +155,7 @@ class CallRateDelayThrottle(object):
|
|||||||
with self._lock:
|
with self._lock:
|
||||||
self._next_ts = dt_util.utcnow() + self._delay
|
self._next_ts = dt_util.utcnow() + self._delay
|
||||||
|
|
||||||
if len(self._queue) == 0:
|
if not self._queue:
|
||||||
self._active = False
|
self._active = False
|
||||||
else:
|
else:
|
||||||
next_action = self._queue.pop(0)
|
next_action = self._queue.pop(0)
|
||||||
|
@ -165,13 +165,13 @@ def setup(hass, config):
|
|||||||
|
|
||||||
# Load platforms
|
# Load platforms
|
||||||
for comp_name in ('switch', 'light'):
|
for comp_name in ('switch', 'light'):
|
||||||
if len(QSUSB[comp_name]) > 0:
|
if QSUSB[comp_name]:
|
||||||
load_platform(hass, comp_name, 'qwikswitch', {}, config)
|
load_platform(hass, comp_name, 'qwikswitch', {}, config)
|
||||||
|
|
||||||
def qs_callback(item):
|
def qs_callback(item):
|
||||||
"""Typically a button press or update signal."""
|
"""Typically a button press or update signal."""
|
||||||
if qsusb is None: # Shutting down
|
if qsusb is None: # Shutting down
|
||||||
_LOGGER.info("Done")
|
_LOGGER.info("Botton press or updating signal done")
|
||||||
return
|
return
|
||||||
|
|
||||||
# If button pressed, fire a hass event
|
# If button pressed, fire a hass event
|
||||||
|
@ -141,7 +141,7 @@ class SCSGate(object):
|
|||||||
from scsgate.tasks import GetStatusTask
|
from scsgate.tasks import GetStatusTask
|
||||||
|
|
||||||
with self._devices_to_register_lock:
|
with self._devices_to_register_lock:
|
||||||
while len(self._devices_to_register) != 0:
|
while self._devices_to_register:
|
||||||
_, device = self._devices_to_register.popitem()
|
_, device = self._devices_to_register.popitem()
|
||||||
self._devices[device.scs_id] = device
|
self._devices[device.scs_id] = device
|
||||||
self._device_being_registered = device.scs_id
|
self._device_being_registered = device.scs_id
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
Support for the Broadlink RM2 Pro (only temperature) and A1 devices.
|
Support for the Broadlink RM2 Pro (only temperature) and A1 devices.
|
||||||
|
|
||||||
@ -9,12 +8,13 @@ from datetime import timedelta
|
|||||||
import binascii
|
import binascii
|
||||||
import logging
|
import logging
|
||||||
import socket
|
import socket
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
||||||
from homeassistant.const import (CONF_HOST, CONF_MAC,
|
from homeassistant.const import (
|
||||||
CONF_MONITORED_CONDITIONS,
|
CONF_HOST, CONF_MAC, CONF_MONITORED_CONDITIONS, CONF_NAME, TEMP_CELSIUS,
|
||||||
CONF_NAME, TEMP_CELSIUS, CONF_TIMEOUT)
|
CONF_TIMEOUT)
|
||||||
from homeassistant.helpers.entity import Entity
|
from homeassistant.helpers.entity import Entity
|
||||||
from homeassistant.util import Throttle
|
from homeassistant.util import Throttle
|
||||||
import homeassistant.helpers.config_validation as cv
|
import homeassistant.helpers.config_validation as cv
|
||||||
@ -71,7 +71,7 @@ class BroadlinkSensor(Entity):
|
|||||||
|
|
||||||
def __init__(self, name, broadlink_data, sensor_type):
|
def __init__(self, name, broadlink_data, sensor_type):
|
||||||
"""Initialize the sensor."""
|
"""Initialize the sensor."""
|
||||||
self._name = "%s %s" % (name, SENSOR_TYPES[sensor_type][0])
|
self._name = '{} {}'.format(name, SENSOR_TYPES[sensor_type][0])
|
||||||
self._state = None
|
self._state = None
|
||||||
self._type = sensor_type
|
self._type = sensor_type
|
||||||
self._broadlink_data = broadlink_data
|
self._broadlink_data = broadlink_data
|
||||||
@ -119,7 +119,7 @@ class BroadlinkData(object):
|
|||||||
})
|
})
|
||||||
self.update = Throttle(interval)(self._update)
|
self.update = Throttle(interval)(self._update)
|
||||||
if not self._auth():
|
if not self._auth():
|
||||||
_LOGGER.warning("Failed to connect to device.")
|
_LOGGER.warning("Failed to connect to device")
|
||||||
|
|
||||||
def _update(self, retry=3):
|
def _update(self, retry=3):
|
||||||
try:
|
try:
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""Support for Dublin RTPI information from data.dublinked.ie.
|
"""
|
||||||
|
Support for Dublin RTPI information from data.dublinked.ie.
|
||||||
|
|
||||||
For more info on the API see :
|
For more info on the API see :
|
||||||
https://data.gov.ie/dataset/real-time-passenger-information-rtpi-for-dublin-bus-bus-eireann-luas-and-irish-rail/resource/4b9f2c4f-6bf5-4958-a43a-f12dab04cf61
|
https://data.gov.ie/dataset/real-time-passenger-information-rtpi-for-dublin-bus-bus-eireann-luas-and-irish-rail/resource/4b9f2c4f-6bf5-4958-a43a-f12dab04cf61
|
||||||
@ -22,11 +23,11 @@ import homeassistant.helpers.config_validation as cv
|
|||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
_RESOURCE = 'https://data.dublinked.ie/cgi-bin/rtpi/realtimebusinformation'
|
_RESOURCE = 'https://data.dublinked.ie/cgi-bin/rtpi/realtimebusinformation'
|
||||||
|
|
||||||
ATTR_STOP_ID = "Stop ID"
|
ATTR_STOP_ID = 'Stop ID'
|
||||||
ATTR_ROUTE = "Route"
|
ATTR_ROUTE = 'Route'
|
||||||
ATTR_DUE_IN = "Due in"
|
ATTR_DUE_IN = 'Due in'
|
||||||
ATTR_DUE_AT = "Due at"
|
ATTR_DUE_AT = 'Due at'
|
||||||
ATTR_NEXT_UP = "Later Bus"
|
ATTR_NEXT_UP = 'Later Bus'
|
||||||
|
|
||||||
CONF_ATTRIBUTION = "Data provided by data.dublinked.ie"
|
CONF_ATTRIBUTION = "Data provided by data.dublinked.ie"
|
||||||
CONF_STOP_ID = 'stopid'
|
CONF_STOP_ID = 'stopid'
|
||||||
@ -36,7 +37,7 @@ DEFAULT_NAME = 'Next Bus'
|
|||||||
ICON = 'mdi:bus'
|
ICON = 'mdi:bus'
|
||||||
|
|
||||||
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
|
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
|
||||||
TIME_STR_FORMAT = "%H:%M"
|
TIME_STR_FORMAT = '%H:%M'
|
||||||
|
|
||||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||||
vol.Required(CONF_STOP_ID): cv.string,
|
vol.Required(CONF_STOP_ID): cv.string,
|
||||||
@ -92,7 +93,7 @@ class DublinPublicTransportSensor(Entity):
|
|||||||
"""Return the state attributes."""
|
"""Return the state attributes."""
|
||||||
if self._times is not None:
|
if self._times is not None:
|
||||||
next_up = "None"
|
next_up = "None"
|
||||||
if len(self._times) > 1:
|
if self._times:
|
||||||
next_up = self._times[1][ATTR_ROUTE] + " in "
|
next_up = self._times[1][ATTR_ROUTE] + " in "
|
||||||
next_up += self._times[1][ATTR_DUE_IN]
|
next_up += self._times[1][ATTR_DUE_IN]
|
||||||
|
|
||||||
@ -108,7 +109,7 @@ class DublinPublicTransportSensor(Entity):
|
|||||||
@property
|
@property
|
||||||
def unit_of_measurement(self):
|
def unit_of_measurement(self):
|
||||||
"""Return the unit this state is expressed in."""
|
"""Return the unit this state is expressed in."""
|
||||||
return "min"
|
return 'min'
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def icon(self):
|
def icon(self):
|
||||||
@ -178,7 +179,7 @@ class PublicTransportData(object):
|
|||||||
due_in_minutes(due_at)}
|
due_in_minutes(due_at)}
|
||||||
self.info.append(bus_data)
|
self.info.append(bus_data)
|
||||||
|
|
||||||
if len(self.info) == 0:
|
if not self.info:
|
||||||
self.info = [{ATTR_DUE_AT: 'n/a',
|
self.info = [{ATTR_DUE_AT: 'n/a',
|
||||||
ATTR_ROUTE: self.route,
|
ATTR_ROUTE: self.route,
|
||||||
ATTR_DUE_IN: 'n/a'}]
|
ATTR_DUE_IN: 'n/a'}]
|
||||||
|
@ -1,13 +1,11 @@
|
|||||||
"""Read temperature information from Eddystone beacons.
|
"""
|
||||||
|
Read temperature information from Eddystone beacons.
|
||||||
|
|
||||||
Your beacons must be configured to transmit UID (for identification) and TLM
|
Your beacons must be configured to transmit UID (for identification) and TLM
|
||||||
(for temperature) frames.
|
(for temperature) frames.
|
||||||
|
|
||||||
For more details about this platform, please refer to the documentation at
|
For more details about this platform, please refer to the documentation at
|
||||||
https://home-assistant.io/components/sensor.eddystone_temperature/
|
https://home-assistant.io/components/sensor.eddystone_temperature/
|
||||||
|
|
||||||
Original version of this code (for Skybeacons) by anpetrov.
|
|
||||||
https://github.com/anpetrov/skybeacon
|
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
@ -24,7 +22,6 @@ REQUIREMENTS = ['beacontools[scan]==1.0.1']
|
|||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
# constants
|
|
||||||
CONF_BEACONS = 'beacons'
|
CONF_BEACONS = 'beacons'
|
||||||
CONF_BT_DEVICE_ID = 'bt_device_id'
|
CONF_BT_DEVICE_ID = 'bt_device_id'
|
||||||
CONF_INSTANCE = 'instance'
|
CONF_INSTANCE = 'instance'
|
||||||
@ -45,8 +42,6 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
|||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument
|
||||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||||
"""Validate configuration, create devices and start monitoring thread."""
|
"""Validate configuration, create devices and start monitoring thread."""
|
||||||
_LOGGER.debug("Setting up...")
|
|
||||||
|
|
||||||
bt_device_id = config.get("bt_device_id")
|
bt_device_id = config.get("bt_device_id")
|
||||||
|
|
||||||
beacons = config.get("beacons")
|
beacons = config.get("beacons")
|
||||||
@ -63,17 +58,17 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||||||
else:
|
else:
|
||||||
devices.append(EddystoneTemp(name, namespace, instance))
|
devices.append(EddystoneTemp(name, namespace, instance))
|
||||||
|
|
||||||
if len(devices) > 0:
|
if devices:
|
||||||
mon = Monitor(hass, devices, bt_device_id)
|
mon = Monitor(hass, devices, bt_device_id)
|
||||||
|
|
||||||
def monitor_stop(_service_or_event):
|
def monitor_stop(_service_or_event):
|
||||||
"""Stop the monitor thread."""
|
"""Stop the monitor thread."""
|
||||||
_LOGGER.info("Stopping scanner for eddystone beacons")
|
_LOGGER.info("Stopping scanner for Eddystone beacons")
|
||||||
mon.stop()
|
mon.stop()
|
||||||
|
|
||||||
def monitor_start(_service_or_event):
|
def monitor_start(_service_or_event):
|
||||||
"""Start the monitor thread."""
|
"""Start the monitor thread."""
|
||||||
_LOGGER.info("Starting scanner for eddystone beacons")
|
_LOGGER.info("Starting scanner for Eddystone beacons")
|
||||||
mon.start()
|
mon.start()
|
||||||
|
|
||||||
add_devices(devices)
|
add_devices(devices)
|
||||||
@ -88,9 +83,8 @@ def get_from_conf(config, config_key, length):
|
|||||||
"""Retrieve value from config and validate length."""
|
"""Retrieve value from config and validate length."""
|
||||||
string = config.get(config_key)
|
string = config.get(config_key)
|
||||||
if len(string) != length:
|
if len(string) != length:
|
||||||
_LOGGER.error("Error in config parameter \"%s\": Must be exactly %d "
|
_LOGGER.error("Error in config parameter %s: Must be exactly %d "
|
||||||
"bytes. Device will not be added.",
|
"bytes. Device will not be added", config_key, length/2)
|
||||||
config_key, length/2)
|
|
||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
return string
|
return string
|
||||||
@ -135,9 +129,9 @@ class Monitor(object):
|
|||||||
"""Construct interface object."""
|
"""Construct interface object."""
|
||||||
self.hass = hass
|
self.hass = hass
|
||||||
|
|
||||||
# list of beacons to monitor
|
# List of beacons to monitor
|
||||||
self.devices = devices
|
self.devices = devices
|
||||||
# number of the bt device (hciX)
|
# Number of the bt device (hciX)
|
||||||
self.bt_device_id = bt_device_id
|
self.bt_device_id = bt_device_id
|
||||||
|
|
||||||
def callback(bt_addr, _, packet, additional_info):
|
def callback(bt_addr, _, packet, additional_info):
|
||||||
@ -147,14 +141,13 @@ class Monitor(object):
|
|||||||
packet.temperature)
|
packet.temperature)
|
||||||
|
|
||||||
# pylint: disable=import-error
|
# pylint: disable=import-error
|
||||||
from beacontools import (BeaconScanner, EddystoneFilter,
|
from beacontools import (
|
||||||
EddystoneTLMFrame)
|
BeaconScanner, EddystoneFilter, EddystoneTLMFrame)
|
||||||
# Create a device filter for each device
|
|
||||||
device_filters = [EddystoneFilter(d.namespace, d.instance)
|
device_filters = [EddystoneFilter(d.namespace, d.instance)
|
||||||
for d in devices]
|
for d in devices]
|
||||||
|
|
||||||
self.scanner = BeaconScanner(callback, bt_device_id, device_filters,
|
self.scanner = BeaconScanner(
|
||||||
EddystoneTLMFrame)
|
callback, bt_device_id, device_filters, EddystoneTLMFrame)
|
||||||
self.scanning = False
|
self.scanning = False
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
@ -163,8 +156,8 @@ class Monitor(object):
|
|||||||
self.scanner.start()
|
self.scanner.start()
|
||||||
self.scanning = True
|
self.scanning = True
|
||||||
else:
|
else:
|
||||||
_LOGGER.debug("Warning: start() called, but scanner is already"
|
_LOGGER.debug(
|
||||||
" running")
|
"Warning: start() called, but scanner is already running")
|
||||||
|
|
||||||
def process_packet(self, namespace, instance, temperature):
|
def process_packet(self, namespace, instance, temperature):
|
||||||
"""Assign temperature to hass device."""
|
"""Assign temperature to hass device."""
|
||||||
@ -185,5 +178,5 @@ class Monitor(object):
|
|||||||
_LOGGER.debug("Stopped")
|
_LOGGER.debug("Stopped")
|
||||||
self.scanning = False
|
self.scanning = False
|
||||||
else:
|
else:
|
||||||
_LOGGER.debug("Warning: stop() called but scanner was not"
|
_LOGGER.debug(
|
||||||
" running.")
|
"Warning: stop() called but scanner was not running")
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
"""
|
"""
|
||||||
EMail sensor support.
|
Email sensor support.
|
||||||
|
|
||||||
For more details about this platform, please refer to the documentation at
|
For more details about this platform, please refer to the documentation at
|
||||||
https://home-assistant.io/components/sensor.email/
|
https://home-assistant.io/components/sensor.email/
|
||||||
@ -7,24 +7,26 @@ https://home-assistant.io/components/sensor.email/
|
|||||||
import logging
|
import logging
|
||||||
import datetime
|
import datetime
|
||||||
import email
|
import email
|
||||||
|
|
||||||
from collections import deque
|
from collections import deque
|
||||||
|
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.helpers.entity import Entity
|
from homeassistant.helpers.entity import Entity
|
||||||
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
CONF_NAME, CONF_PORT, CONF_USERNAME, CONF_PASSWORD, CONF_VALUE_TEMPLATE)
|
CONF_NAME, CONF_PORT, CONF_USERNAME, CONF_PASSWORD, CONF_VALUE_TEMPLATE,
|
||||||
|
CONTENT_TYPE_TEXT_PLAIN)
|
||||||
import homeassistant.helpers.config_validation as cv
|
import homeassistant.helpers.config_validation as cv
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
CONF_SERVER = "server"
|
CONF_SERVER = 'server'
|
||||||
CONF_SENDERS = "senders"
|
CONF_SENDERS = 'senders'
|
||||||
|
|
||||||
ATTR_FROM = "from"
|
ATTR_FROM = 'from'
|
||||||
ATTR_BODY = "body"
|
ATTR_BODY = 'body'
|
||||||
ATTR_DATE = "date"
|
ATTR_DATE = 'date'
|
||||||
ATTR_SUBJECT = "subject"
|
ATTR_SUBJECT = 'subject'
|
||||||
|
|
||||||
DEFAULT_PORT = 993
|
DEFAULT_PORT = 993
|
||||||
|
|
||||||
@ -41,20 +43,15 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
|||||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||||
"""Setup the EMail platform."""
|
"""Setup the EMail platform."""
|
||||||
reader = EmailReader(
|
reader = EmailReader(
|
||||||
config.get(CONF_USERNAME),
|
config.get(CONF_USERNAME), config.get(CONF_PASSWORD),
|
||||||
config.get(CONF_PASSWORD),
|
config.get(CONF_SERVER), config.get(CONF_PORT))
|
||||||
config.get(CONF_SERVER),
|
|
||||||
config.get(CONF_PORT))
|
|
||||||
|
|
||||||
value_template = config.get(CONF_VALUE_TEMPLATE)
|
value_template = config.get(CONF_VALUE_TEMPLATE)
|
||||||
if value_template is not None:
|
if value_template is not None:
|
||||||
value_template.hass = hass
|
value_template.hass = hass
|
||||||
sensor = EmailContentSensor(
|
sensor = EmailContentSensor(
|
||||||
hass,
|
hass, reader, config.get(CONF_NAME) or config.get(CONF_USERNAME),
|
||||||
reader,
|
config.get(CONF_SENDERS), value_template)
|
||||||
config.get(CONF_NAME, None) or config.get(CONF_USERNAME),
|
|
||||||
config.get(CONF_SENDERS),
|
|
||||||
value_template)
|
|
||||||
|
|
||||||
if sensor.connected:
|
if sensor.connected:
|
||||||
add_devices([sensor])
|
add_devices([sensor])
|
||||||
@ -83,15 +80,13 @@ class EmailReader:
|
|||||||
self.connection.login(self._user, self._password)
|
self.connection.login(self._user, self._password)
|
||||||
return True
|
return True
|
||||||
except imaplib.IMAP4.error:
|
except imaplib.IMAP4.error:
|
||||||
_LOGGER.error("Failed to login to %s.", self._server)
|
_LOGGER.error("Failed to login to %s", self._server)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _fetch_message(self, message_uid):
|
def _fetch_message(self, message_uid):
|
||||||
"""Get an email message from a message id."""
|
"""Get an email message from a message id."""
|
||||||
_, message_data = self.connection.uid(
|
_, message_data = self.connection.uid(
|
||||||
'fetch',
|
'fetch', message_uid, '(RFC822)')
|
||||||
message_uid,
|
|
||||||
'(RFC822)')
|
|
||||||
|
|
||||||
raw_email = message_data[0][1]
|
raw_email = message_data[0][1]
|
||||||
email_message = email.message_from_bytes(raw_email)
|
email_message = email.message_from_bytes(raw_email)
|
||||||
@ -103,7 +98,7 @@ class EmailReader:
|
|||||||
try:
|
try:
|
||||||
self.connection.select()
|
self.connection.select()
|
||||||
|
|
||||||
if len(self._unread_ids) == 0:
|
if not self._unread_ids:
|
||||||
search = "SINCE {0:%d-%b-%Y}".format(datetime.date.today())
|
search = "SINCE {0:%d-%b-%Y}".format(datetime.date.today())
|
||||||
if self._last_id is not None:
|
if self._last_id is not None:
|
||||||
search = "UID {}:*".format(self._last_id)
|
search = "UID {}:*".format(self._last_id)
|
||||||
@ -111,7 +106,7 @@ class EmailReader:
|
|||||||
_, data = self.connection.uid("search", None, search)
|
_, data = self.connection.uid("search", None, search)
|
||||||
self._unread_ids = deque(data[0].split())
|
self._unread_ids = deque(data[0].split())
|
||||||
|
|
||||||
while len(self._unread_ids) > 0:
|
while self._unread_ids:
|
||||||
message_uid = self._unread_ids.popleft()
|
message_uid = self._unread_ids.popleft()
|
||||||
if self._last_id is None or int(message_uid) > self._last_id:
|
if self._last_id is None or int(message_uid) > self._last_id:
|
||||||
self._last_id = int(message_uid)
|
self._last_id = int(message_uid)
|
||||||
@ -119,29 +114,23 @@ class EmailReader:
|
|||||||
|
|
||||||
except imaplib.IMAP4.error:
|
except imaplib.IMAP4.error:
|
||||||
_LOGGER.info(
|
_LOGGER.info(
|
||||||
"Connection to %s lost, attempting to reconnect",
|
"Connection to %s lost, attempting to reconnect", self._server)
|
||||||
self._server)
|
|
||||||
try:
|
try:
|
||||||
self.connect()
|
self.connect()
|
||||||
except imaplib.IMAP4.error:
|
except imaplib.IMAP4.error:
|
||||||
_LOGGER.error("Failed to reconnect.")
|
_LOGGER.error("Failed to reconnect")
|
||||||
|
|
||||||
|
|
||||||
class EmailContentSensor(Entity):
|
class EmailContentSensor(Entity):
|
||||||
"""Representation of an EMail sensor."""
|
"""Representation of an EMail sensor."""
|
||||||
|
|
||||||
def __init__(self,
|
def __init__(self, hass, email_reader, name, allowed_senders,
|
||||||
hass,
|
|
||||||
email_reader,
|
|
||||||
name,
|
|
||||||
allowed_senders,
|
|
||||||
value_template):
|
value_template):
|
||||||
"""Initialize the sensor."""
|
"""Initialize the sensor."""
|
||||||
self.hass = hass
|
self.hass = hass
|
||||||
self._email_reader = email_reader
|
self._email_reader = email_reader
|
||||||
self._name = name
|
self._name = name
|
||||||
self._allowed_senders = \
|
self._allowed_senders = [sender.upper() for sender in allowed_senders]
|
||||||
[sender.upper() for sender in allowed_senders]
|
|
||||||
self._value_template = value_template
|
self._value_template = value_template
|
||||||
self._last_id = None
|
self._last_id = None
|
||||||
self._message = None
|
self._message = None
|
||||||
@ -202,7 +191,7 @@ class EmailContentSensor(Entity):
|
|||||||
message_untyped_text = None
|
message_untyped_text = None
|
||||||
|
|
||||||
for part in email_message.walk():
|
for part in email_message.walk():
|
||||||
if part.get_content_type() == 'text/plain':
|
if part.get_content_type() == CONTENT_TYPE_TEXT_PLAIN:
|
||||||
if message_text is None:
|
if message_text is None:
|
||||||
message_text = part.get_payload()
|
message_text = part.get_payload()
|
||||||
elif part.get_content_type() == 'text/html':
|
elif part.get_content_type() == 'text/html':
|
||||||
|
@ -168,15 +168,15 @@ class InfluxSensorData(object):
|
|||||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||||
def update(self):
|
def update(self):
|
||||||
"""Get the latest data with a shell command."""
|
"""Get the latest data with a shell command."""
|
||||||
_LOGGER.info('Running query: %s', self.query)
|
_LOGGER.info("Running query: %s", self.query)
|
||||||
|
|
||||||
points = list(self.influx.query(self.query).get_points())
|
points = list(self.influx.query(self.query).get_points())
|
||||||
if len(points) == 0:
|
if not points:
|
||||||
_LOGGER.warning('Query returned no points, sensor state set'
|
_LOGGER.warning("Query returned no points, sensor state set"
|
||||||
' to UNKNOWN : %s', self.query)
|
" to UNKNOWN : %s", self.query)
|
||||||
self.value = None
|
self.value = None
|
||||||
else:
|
else:
|
||||||
if len(points) > 1:
|
if points:
|
||||||
_LOGGER.warning('Query returned multiple points, only first'
|
_LOGGER.warning("Query returned multiple points, only first"
|
||||||
' one shown : %s', self.query)
|
" one shown : %s", self.query)
|
||||||
self.value = points[0].get('value')
|
self.value = points[0].get('value')
|
||||||
|
@ -1,15 +1,15 @@
|
|||||||
"""
|
"""
|
||||||
Support for ISY994 binary sensors.
|
Support for ISY994 sensors.
|
||||||
|
|
||||||
For more details about this platform, please refer to the documentation at
|
For more details about this platform, please refer to the documentation at
|
||||||
https://home-assistant.io/components/binary_sensor.isy994/
|
https://home-assistant.io/components/sensor.isy994/
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
from typing import Callable # noqa
|
from typing import Callable # noqa
|
||||||
|
|
||||||
import homeassistant.components.isy994 as isy
|
import homeassistant.components.isy994 as isy
|
||||||
from homeassistant.const import (TEMP_CELSIUS, TEMP_FAHRENHEIT, STATE_OFF,
|
from homeassistant.const import (
|
||||||
STATE_ON)
|
TEMP_CELSIUS, TEMP_FAHRENHEIT, STATE_OFF, STATE_ON)
|
||||||
from homeassistant.helpers.typing import ConfigType
|
from homeassistant.helpers.typing import ConfigType
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@ -240,15 +240,15 @@ def setup_platform(hass, config: ConfigType,
|
|||||||
add_devices: Callable[[list], None], discovery_info=None):
|
add_devices: Callable[[list], None], discovery_info=None):
|
||||||
"""Set up the ISY994 sensor platform."""
|
"""Set up the ISY994 sensor platform."""
|
||||||
if isy.ISY is None or not isy.ISY.connected:
|
if isy.ISY is None or not isy.ISY.connected:
|
||||||
_LOGGER.error('A connection has not been made to the ISY controller.')
|
_LOGGER.error("A connection has not been made to the ISY controller")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
devices = []
|
devices = []
|
||||||
|
|
||||||
for node in isy.SENSOR_NODES:
|
for node in isy.SENSOR_NODES:
|
||||||
if (len(node.uom) == 0 or node.uom[0] not in BINARY_UOM) and \
|
if (not node.uom or node.uom[0] not in BINARY_UOM) and \
|
||||||
STATE_OFF not in node.uom and STATE_ON not in node.uom:
|
STATE_OFF not in node.uom and STATE_ON not in node.uom:
|
||||||
_LOGGER.debug('LOADING %s', node.name)
|
_LOGGER.debug("Loading %s", node.name)
|
||||||
devices.append(ISYSensorDevice(node))
|
devices.append(ISYSensorDevice(node))
|
||||||
|
|
||||||
for node in isy.WEATHER_NODES:
|
for node in isy.WEATHER_NODES:
|
||||||
|
@ -77,7 +77,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||||||
unit = SENSOR_TYPES[parameter][1]
|
unit = SENSOR_TYPES[parameter][1]
|
||||||
|
|
||||||
prefix = config.get(CONF_NAME)
|
prefix = config.get(CONF_NAME)
|
||||||
if len(prefix) > 0:
|
if prefix:
|
||||||
name = "{} {}".format(prefix, name)
|
name = "{} {}".format(prefix, name)
|
||||||
|
|
||||||
devs.append(MiFloraSensor(
|
devs.append(MiFloraSensor(
|
||||||
@ -145,7 +145,7 @@ class MiFloraSensor(Entity):
|
|||||||
self.name)
|
self.name)
|
||||||
# Remove old data from median list or set sensor value to None
|
# Remove old data from median list or set sensor value to None
|
||||||
# if no data is available anymore
|
# if no data is available anymore
|
||||||
if len(self.data) > 0:
|
if self.data:
|
||||||
self.data = self.data[1:]
|
self.data = self.data[1:]
|
||||||
else:
|
else:
|
||||||
self._state = None
|
self._state = None
|
||||||
|
@ -5,6 +5,7 @@ For more details about this platform, please refer to the documentation at
|
|||||||
https://home-assistant.io/components/sensor.rfxtrx/
|
https://home-assistant.io/components/sensor.rfxtrx/
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
import homeassistant.components.rfxtrx as rfxtrx
|
import homeassistant.components.rfxtrx as rfxtrx
|
||||||
@ -13,8 +14,8 @@ from homeassistant.const import CONF_PLATFORM
|
|||||||
from homeassistant.helpers.entity import Entity
|
from homeassistant.helpers.entity import Entity
|
||||||
from homeassistant.util import slugify
|
from homeassistant.util import slugify
|
||||||
from homeassistant.components.rfxtrx import (
|
from homeassistant.components.rfxtrx import (
|
||||||
ATTR_AUTOMATIC_ADD, ATTR_NAME, ATTR_FIREEVENT,
|
ATTR_AUTOMATIC_ADD, ATTR_NAME, ATTR_FIREEVENT, CONF_DEVICES, DATA_TYPES,
|
||||||
CONF_DEVICES, ATTR_DATA_TYPE, DATA_TYPES, ATTR_ENTITY_ID)
|
ATTR_DATA_TYPE, ATTR_ENTITY_ID)
|
||||||
|
|
||||||
DEPENDENCIES = ['rfxtrx']
|
DEPENDENCIES = ['rfxtrx']
|
||||||
|
|
||||||
@ -40,7 +41,7 @@ def setup_platform(hass, config, add_devices_callback, discovery_info=None):
|
|||||||
|
|
||||||
sub_sensors = {}
|
sub_sensors = {}
|
||||||
data_types = entity_info[ATTR_DATA_TYPE]
|
data_types = entity_info[ATTR_DATA_TYPE]
|
||||||
if len(data_types) == 0:
|
if not data_types:
|
||||||
data_types = ['']
|
data_types = ['']
|
||||||
for data_type in DATA_TYPES:
|
for data_type in DATA_TYPES:
|
||||||
if data_type in event.values:
|
if data_type in event.values:
|
||||||
|
@ -1,20 +1,29 @@
|
|||||||
"""tado component to create some sensors for each zone."""
|
"""
|
||||||
|
Tado component to create some sensors for each zone.
|
||||||
|
|
||||||
|
For more details about this platform, please refer to the documentation at
|
||||||
|
https://home-assistant.io/components/sensor.tado/
|
||||||
|
"""
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from homeassistant.const import TEMP_CELSIUS
|
from homeassistant.const import TEMP_CELSIUS
|
||||||
from homeassistant.helpers.entity import Entity
|
from homeassistant.helpers.entity import Entity
|
||||||
from homeassistant.components.tado import (
|
from homeassistant.components.tado import (DATA_TADO)
|
||||||
DATA_TADO)
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
ATTR_DATA_ID = 'data_id'
|
||||||
|
ATTR_DEVICE = 'device'
|
||||||
|
ATTR_ID = 'id'
|
||||||
|
ATTR_NAME = 'name'
|
||||||
|
ATTR_ZONE = 'zone'
|
||||||
|
|
||||||
SENSOR_TYPES = ['temperature', 'humidity', 'power',
|
SENSOR_TYPES = ['temperature', 'humidity', 'power',
|
||||||
'link', 'heating', 'tado mode', 'overlay']
|
'link', 'heating', 'tado mode', 'overlay']
|
||||||
|
|
||||||
|
|
||||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||||
"""Set up the sensor platform."""
|
"""Set up the sensor platform."""
|
||||||
# get the PyTado object from the hub component
|
|
||||||
tado = hass.data[DATA_TADO]
|
tado = hass.data[DATA_TADO]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -36,7 +45,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||||||
tado, me_data, me_data['homes'][0]['name'],
|
tado, me_data, me_data['homes'][0]['name'],
|
||||||
me_data['homes'][0]['id'], "tado bridge status"))
|
me_data['homes'][0]['id'], "tado bridge status"))
|
||||||
|
|
||||||
if len(sensor_items) > 0:
|
if sensor_items:
|
||||||
add_devices(sensor_items, True)
|
add_devices(sensor_items, True)
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
@ -48,10 +57,10 @@ def create_zone_sensor(tado, zone, name, zone_id, variable):
|
|||||||
data_id = 'zone {} {}'.format(name, zone_id)
|
data_id = 'zone {} {}'.format(name, zone_id)
|
||||||
|
|
||||||
tado.add_sensor(data_id, {
|
tado.add_sensor(data_id, {
|
||||||
"zone": zone,
|
ATTR_ZONE: zone,
|
||||||
"name": name,
|
ATTR_NAME: name,
|
||||||
"id": zone_id,
|
ATTR_ID: zone_id,
|
||||||
"data_id": data_id
|
ATTR_DATA_ID: data_id
|
||||||
})
|
})
|
||||||
|
|
||||||
return TadoSensor(tado, name, zone_id, variable, data_id)
|
return TadoSensor(tado, name, zone_id, variable, data_id)
|
||||||
@ -62,10 +71,10 @@ def create_device_sensor(tado, device, name, device_id, variable):
|
|||||||
data_id = 'device {} {}'.format(name, device_id)
|
data_id = 'device {} {}'.format(name, device_id)
|
||||||
|
|
||||||
tado.add_sensor(data_id, {
|
tado.add_sensor(data_id, {
|
||||||
"device": device,
|
ATTR_DEVICE: device,
|
||||||
"name": name,
|
ATTR_NAME: name,
|
||||||
"id": device_id,
|
ATTR_ID: device_id,
|
||||||
"data_id": data_id
|
ATTR_DATA_ID: data_id
|
||||||
})
|
})
|
||||||
|
|
||||||
return TadoSensor(tado, name, device_id, variable, data_id)
|
return TadoSensor(tado, name, device_id, variable, data_id)
|
||||||
@ -133,8 +142,7 @@ class TadoSensor(Entity):
|
|||||||
data = self._store.get_data(self._data_id)
|
data = self._store.get_data(self._data_id)
|
||||||
|
|
||||||
if data is None:
|
if data is None:
|
||||||
_LOGGER.debug('Recieved no data for zone %s',
|
_LOGGER.debug("Recieved no data for zone %s", self.zone_name)
|
||||||
self.zone_name)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
unit = TEMP_CELSIUS
|
unit = TEMP_CELSIUS
|
||||||
|
@ -8,9 +8,9 @@ import logging
|
|||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
|
import homeassistant.helpers.config_validation as cv
|
||||||
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
||||||
from homeassistant.const import (CONF_API_KEY, STATE_UNKNOWN)
|
from homeassistant.const import (CONF_API_KEY, STATE_UNKNOWN)
|
||||||
import homeassistant.helpers.config_validation as cv
|
|
||||||
from homeassistant.helpers.entity import Entity
|
from homeassistant.helpers.entity import Entity
|
||||||
|
|
||||||
REQUIREMENTS = ['xboxapi==0.1.1']
|
REQUIREMENTS = ['xboxapi==0.1.1']
|
||||||
@ -39,7 +39,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||||||
if new_device.success_init:
|
if new_device.success_init:
|
||||||
devices.append(new_device)
|
devices.append(new_device)
|
||||||
|
|
||||||
if len(devices) > 0:
|
if devices:
|
||||||
add_devices(devices)
|
add_devices(devices)
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
@ -31,7 +31,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||||||
|
|
||||||
conf_switches = config_from_file(hass.config.path(
|
conf_switches = config_from_file(hass.config.path(
|
||||||
INSTEON_LOCAL_SWITCH_CONF))
|
INSTEON_LOCAL_SWITCH_CONF))
|
||||||
if len(conf_switches):
|
if conf_switches:
|
||||||
for device_id in conf_switches:
|
for device_id in conf_switches:
|
||||||
setup_switch(
|
setup_switch(
|
||||||
device_id, conf_switches[device_id], insteonhub, hass,
|
device_id, conf_switches[device_id], insteonhub, hass,
|
||||||
@ -48,8 +48,8 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||||||
hass, add_devices)
|
hass, add_devices)
|
||||||
|
|
||||||
|
|
||||||
def request_configuration(device_id, insteonhub, model, hass,
|
def request_configuration(
|
||||||
add_devices_callback):
|
device_id, insteonhub, model, hass, add_devices_callback):
|
||||||
"""Request configuration steps from the user."""
|
"""Request configuration steps from the user."""
|
||||||
configurator = get_component('configurator')
|
configurator = get_component('configurator')
|
||||||
|
|
||||||
@ -81,7 +81,7 @@ def setup_switch(device_id, name, insteonhub, hass, add_devices_callback):
|
|||||||
request_id = _CONFIGURING.pop(device_id)
|
request_id = _CONFIGURING.pop(device_id)
|
||||||
configurator = get_component('configurator')
|
configurator = get_component('configurator')
|
||||||
configurator.request_done(request_id)
|
configurator.request_done(request_id)
|
||||||
_LOGGER.info("Device configuration done!")
|
_LOGGER.info("Device configuration done")
|
||||||
|
|
||||||
conf_switch = config_from_file(hass.config.path(INSTEON_LOCAL_SWITCH_CONF))
|
conf_switch = config_from_file(hass.config.path(INSTEON_LOCAL_SWITCH_CONF))
|
||||||
if device_id not in conf_switch:
|
if device_id not in conf_switch:
|
||||||
|
@ -29,7 +29,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||||||
for robot in hass.data[NEATO_ROBOTS]:
|
for robot in hass.data[NEATO_ROBOTS]:
|
||||||
for type_name in SWITCH_TYPES:
|
for type_name in SWITCH_TYPES:
|
||||||
dev.append(NeatoConnectedSwitch(hass, robot, type_name))
|
dev.append(NeatoConnectedSwitch(hass, robot, type_name))
|
||||||
_LOGGER.debug('Adding switches %s', dev)
|
_LOGGER.debug("Adding switches %s", dev)
|
||||||
add_devices(dev)
|
add_devices(dev)
|
||||||
|
|
||||||
|
|
||||||
@ -41,25 +41,26 @@ class NeatoConnectedSwitch(ToggleEntity):
|
|||||||
self.type = switch_type
|
self.type = switch_type
|
||||||
self.robot = robot
|
self.robot = robot
|
||||||
self.neato = hass.data[NEATO_LOGIN]
|
self.neato = hass.data[NEATO_LOGIN]
|
||||||
self._robot_name = self.robot.name + ' ' + SWITCH_TYPES[self.type][0]
|
self._robot_name = '{} {}'.format(
|
||||||
|
self.robot.name, SWITCH_TYPES[self.type][0])
|
||||||
try:
|
try:
|
||||||
self._state = self.robot.state
|
self._state = self.robot.state
|
||||||
except (requests.exceptions.ConnectionError,
|
except (requests.exceptions.ConnectionError,
|
||||||
requests.exceptions.HTTPError) as ex:
|
requests.exceptions.HTTPError) as ex:
|
||||||
_LOGGER.warning('Neato connection error: %s', ex)
|
_LOGGER.warning("Neato connection error: %s", ex)
|
||||||
self._state = None
|
self._state = None
|
||||||
self._schedule_state = None
|
self._schedule_state = None
|
||||||
self._clean_state = None
|
self._clean_state = None
|
||||||
|
|
||||||
def update(self):
|
def update(self):
|
||||||
"""Update the states of Neato switches."""
|
"""Update the states of Neato switches."""
|
||||||
_LOGGER.debug('Running switch update')
|
_LOGGER.debug("Running switch update")
|
||||||
self.neato.update_robots()
|
self.neato.update_robots()
|
||||||
try:
|
try:
|
||||||
self._state = self.robot.state
|
self._state = self.robot.state
|
||||||
except (requests.exceptions.ConnectionError,
|
except (requests.exceptions.ConnectionError,
|
||||||
requests.exceptions.HTTPError) as ex:
|
requests.exceptions.HTTPError) as ex:
|
||||||
_LOGGER.warning('Neato connection error: %s', ex)
|
_LOGGER.warning("Neato connection error: %s", ex)
|
||||||
self._state = None
|
self._state = None
|
||||||
return
|
return
|
||||||
_LOGGER.debug('self._state=%s', self._state)
|
_LOGGER.debug('self._state=%s', self._state)
|
||||||
@ -71,14 +72,14 @@ class NeatoConnectedSwitch(ToggleEntity):
|
|||||||
self._clean_state = STATE_ON
|
self._clean_state = STATE_ON
|
||||||
else:
|
else:
|
||||||
self._clean_state = STATE_OFF
|
self._clean_state = STATE_OFF
|
||||||
_LOGGER.debug('schedule_state=%s', self._schedule_state)
|
_LOGGER.debug("Schedule state: %s", self._schedule_state)
|
||||||
if self.type == SWITCH_TYPE_SCHEDULE:
|
if self.type == SWITCH_TYPE_SCHEDULE:
|
||||||
_LOGGER.debug('self._state=%s', self._state)
|
_LOGGER.debug("State: %s", self._state)
|
||||||
if self.robot.schedule_enabled:
|
if self.robot.schedule_enabled:
|
||||||
self._schedule_state = STATE_ON
|
self._schedule_state = STATE_ON
|
||||||
else:
|
else:
|
||||||
self._schedule_state = STATE_OFF
|
self._schedule_state = STATE_OFF
|
||||||
_LOGGER.debug('schedule_state=%s', self._schedule_state)
|
_LOGGER.debug("Shedule state: %s", self._schedule_state)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self):
|
||||||
|
@ -58,14 +58,14 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||||||
password = config.get(CONF_PASSWORD)
|
password = config.get(CONF_PASSWORD)
|
||||||
port = config.get(CONF_PORT)
|
port = config.get(CONF_PORT)
|
||||||
|
|
||||||
if len(DEVICES) == 0:
|
if not DEVICES:
|
||||||
hass.http.register_view(NetioApiView)
|
hass.http.register_view(NetioApiView)
|
||||||
|
|
||||||
dev = Netio(host, port, username, password)
|
dev = Netio(host, port, username, password)
|
||||||
|
|
||||||
DEVICES[host] = Device(dev, [])
|
DEVICES[host] = Device(dev, [])
|
||||||
|
|
||||||
# Throttle the update for all NetioSwitches of one Netio
|
# Throttle the update for all Netio switches of one Netio
|
||||||
dev.update = util.Throttle(MIN_TIME_BETWEEN_SCANS)(dev.update)
|
dev.update = util.Throttle(MIN_TIME_BETWEEN_SCANS)(dev.update)
|
||||||
|
|
||||||
for key in config[CONF_OUTLETS]:
|
for key in config[CONF_OUTLETS]:
|
||||||
@ -123,7 +123,7 @@ class NetioApiView(HomeAssistantView):
|
|||||||
|
|
||||||
|
|
||||||
class NetioSwitch(SwitchDevice):
|
class NetioSwitch(SwitchDevice):
|
||||||
"""Provide a netio linked switch."""
|
"""Provide a Netio linked switch."""
|
||||||
|
|
||||||
def __init__(self, netio, outlet, name):
|
def __init__(self, netio, outlet, name):
|
||||||
"""Defined to handle throttle."""
|
"""Defined to handle throttle."""
|
||||||
|
@ -13,17 +13,16 @@ from homeassistant.helpers import discovery
|
|||||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
||||||
from homeassistant.helpers.entity import Entity
|
from homeassistant.helpers.entity import Entity
|
||||||
|
|
||||||
DOMAIN = 'tellstick'
|
|
||||||
|
|
||||||
REQUIREMENTS = ['tellcore-py==1.1.2']
|
REQUIREMENTS = ['tellcore-py==1.1.2']
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
ATTR_SIGNAL_REPETITIONS = 'signal_repetitions'
|
|
||||||
DEFAULT_SIGNAL_REPETITIONS = 1
|
|
||||||
|
|
||||||
ATTR_DISCOVER_DEVICES = 'devices'
|
|
||||||
ATTR_DISCOVER_CONFIG = 'config'
|
ATTR_DISCOVER_CONFIG = 'config'
|
||||||
|
ATTR_DISCOVER_DEVICES = 'devices'
|
||||||
|
ATTR_SIGNAL_REPETITIONS = 'signal_repetitions'
|
||||||
|
|
||||||
|
DEFAULT_SIGNAL_REPETITIONS = 1
|
||||||
|
DOMAIN = 'tellstick'
|
||||||
|
|
||||||
# Use a global tellstick domain lock to avoid getting Tellcore errors when
|
# Use a global tellstick domain lock to avoid getting Tellcore errors when
|
||||||
# calling concurrently.
|
# calling concurrently.
|
||||||
@ -43,7 +42,7 @@ CONFIG_SCHEMA = vol.Schema({
|
|||||||
|
|
||||||
def _discover(hass, config, component_name, found_tellcore_devices):
|
def _discover(hass, config, component_name, found_tellcore_devices):
|
||||||
"""Set up and send the discovery event."""
|
"""Set up and send the discovery event."""
|
||||||
if not len(found_tellcore_devices):
|
if not found_tellcore_devices:
|
||||||
return
|
return
|
||||||
|
|
||||||
_LOGGER.info("Discovered %d new %s devices", len(found_tellcore_devices),
|
_LOGGER.info("Discovered %d new %s devices", len(found_tellcore_devices),
|
||||||
@ -66,7 +65,7 @@ def setup(hass, config):
|
|||||||
tellcore_lib = TelldusCore(
|
tellcore_lib = TelldusCore(
|
||||||
callback_dispatcher=AsyncioCallbackDispatcher(hass.loop))
|
callback_dispatcher=AsyncioCallbackDispatcher(hass.loop))
|
||||||
except OSError:
|
except OSError:
|
||||||
_LOGGER.exception('Could not initialize Tellstick')
|
_LOGGER.exception("Could not initialize Tellstick")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Get all devices, switches and lights alike
|
# Get all devices, switches and lights alike
|
||||||
|
@ -84,7 +84,7 @@ def async_setup(hass, config):
|
|||||||
tts = SpeechManager(hass)
|
tts = SpeechManager(hass)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
conf = config[DOMAIN][0] if len(config.get(DOMAIN, [])) > 0 else {}
|
conf = config[DOMAIN][0] if config.get(DOMAIN, []) else {}
|
||||||
use_cache = conf.get(CONF_CACHE, DEFAULT_CACHE)
|
use_cache = conf.get(CONF_CACHE, DEFAULT_CACHE)
|
||||||
cache_dir = conf.get(CONF_CACHE_DIR, DEFAULT_CACHE_DIR)
|
cache_dir = conf.get(CONF_CACHE_DIR, DEFAULT_CACHE_DIR)
|
||||||
time_memory = conf.get(CONF_TIME_MEMORY, DEFAULT_TIME_MEMORY)
|
time_memory = conf.get(CONF_TIME_MEMORY, DEFAULT_TIME_MEMORY)
|
||||||
|
@ -253,7 +253,7 @@ class HomeAssistant(object):
|
|||||||
pending = [task for task in self._pending_tasks
|
pending = [task for task in self._pending_tasks
|
||||||
if not task.done()]
|
if not task.done()]
|
||||||
self._pending_tasks.clear()
|
self._pending_tasks.clear()
|
||||||
if len(pending) > 0:
|
if pending:
|
||||||
yield from asyncio.wait(pending, loop=self.loop)
|
yield from asyncio.wait(pending, loop=self.loop)
|
||||||
else:
|
else:
|
||||||
yield from asyncio.sleep(0, loop=self.loop)
|
yield from asyncio.sleep(0, loop=self.loop)
|
||||||
|
@ -242,7 +242,7 @@ def slugify(value):
|
|||||||
if value is None:
|
if value is None:
|
||||||
raise vol.Invalid('Slug should not be None')
|
raise vol.Invalid('Slug should not be None')
|
||||||
slg = util_slugify(str(value))
|
slg = util_slugify(str(value))
|
||||||
if len(slg) > 0:
|
if slg:
|
||||||
return slg
|
return slg
|
||||||
raise vol.Invalid('Unable to slugify {}'.format(value))
|
raise vol.Invalid('Unable to slugify {}'.format(value))
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
"""Template helper methods for rendering strings with HA data."""
|
"""Template helper methods for rendering strings with Home Assistant data."""
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
@ -202,15 +202,15 @@ def _load_order_component(comp_name: str, load_order: OrderedSet,
|
|||||||
|
|
||||||
# If we are already loading it, we have a circular dependency.
|
# If we are already loading it, we have a circular dependency.
|
||||||
if dependency in loading:
|
if dependency in loading:
|
||||||
_LOGGER.error('Circular dependency detected: %s -> %s',
|
_LOGGER.error("Circular dependency detected: %s -> %s",
|
||||||
comp_name, dependency)
|
comp_name, dependency)
|
||||||
return OrderedSet()
|
return OrderedSet()
|
||||||
|
|
||||||
dep_load_order = _load_order_component(dependency, load_order, loading)
|
dep_load_order = _load_order_component(dependency, load_order, loading)
|
||||||
|
|
||||||
# length == 0 means error loading dependency or children
|
# length == 0 means error loading dependency or children
|
||||||
if len(dep_load_order) == 0:
|
if not dep_load_order:
|
||||||
_LOGGER.error('Error loading %s dependency: %s',
|
_LOGGER.error("Error loading %s dependency: %s",
|
||||||
comp_name, dependency)
|
comp_name, dependency)
|
||||||
return OrderedSet()
|
return OrderedSet()
|
||||||
|
|
||||||
|
@ -50,7 +50,7 @@ def color(the_color, *args, reset=None):
|
|||||||
"""Color helper."""
|
"""Color helper."""
|
||||||
from colorlog.escape_codes import escape_codes, parse_colors
|
from colorlog.escape_codes import escape_codes, parse_colors
|
||||||
try:
|
try:
|
||||||
if len(args) == 0:
|
if not args:
|
||||||
assert reset is None, "You cannot reset if nothing being printed"
|
assert reset is None, "You cannot reset if nothing being printed"
|
||||||
return parse_colors(the_color)
|
return parse_colors(the_color)
|
||||||
return parse_colors(the_color) + ' '.join(args) + \
|
return parse_colors(the_color) + ' '.join(args) + \
|
||||||
@ -106,7 +106,7 @@ def run(script_args: List) -> int:
|
|||||||
the_color = '' if yfn in res['yaml_files'] else 'red'
|
the_color = '' if yfn in res['yaml_files'] else 'red'
|
||||||
print(color(the_color, '-', yfn))
|
print(color(the_color, '-', yfn))
|
||||||
|
|
||||||
if len(res['except']) > 0:
|
if res['except']:
|
||||||
print(color('bold_white', 'Failed config'))
|
print(color('bold_white', 'Failed config'))
|
||||||
for domain, config in res['except'].items():
|
for domain, config in res['except'].items():
|
||||||
domain_info.append(domain)
|
domain_info.append(domain)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user