diff --git a/.coveragerc b/.coveragerc index 80bc6905c21..15fa27dd1c0 100644 --- a/.coveragerc +++ b/.coveragerc @@ -31,6 +31,9 @@ omit = homeassistant/components/insteon_hub.py homeassistant/components/*/insteon_hub.py + homeassistant/components/ios.py + homeassistant/components/*/ios.py + homeassistant/components/isy994.py homeassistant/components/*/isy994.py @@ -95,8 +98,7 @@ omit = homeassistant/components/homematic.py homeassistant/components/*/homematic.py - homeassistant/components/pilight.py - homeassistant/components/*/pilight.py + homeassistant/components/switch/pilight.py homeassistant/components/knx.py homeassistant/components/*/knx.py @@ -104,16 +106,22 @@ omit = homeassistant/components/ffmpeg.py homeassistant/components/*/ffmpeg.py + homeassistant/components/zoneminder.py + homeassistant/components/*/zoneminder.py + homeassistant/components/alarm_control_panel/alarmdotcom.py + homeassistant/components/alarm_control_panel/concord232.py homeassistant/components/alarm_control_panel/nx584.py homeassistant/components/alarm_control_panel/simplisafe.py homeassistant/components/binary_sensor/arest.py + homeassistant/components/binary_sensor/concord232.py homeassistant/components/binary_sensor/rest.py homeassistant/components/browser.py homeassistant/components/camera/bloomsky.py homeassistant/components/camera/foscam.py homeassistant/components/camera/mjpeg.py homeassistant/components/camera/rpi_camera.py + homeassistant/components/camera/synology.py homeassistant/components/climate/eq3btsmart.py homeassistant/components/climate/heatmiser.py homeassistant/components/climate/homematic.py @@ -127,6 +135,7 @@ omit = homeassistant/components/device_tracker/actiontec.py homeassistant/components/device_tracker/aruba.py homeassistant/components/device_tracker/asuswrt.py + homeassistant/components/device_tracker/bbox.py homeassistant/components/device_tracker/bluetooth_tracker.py homeassistant/components/device_tracker/bluetooth_le_tracker.py homeassistant/components/device_tracker/bt_home_hub_5.py @@ -144,6 +153,7 @@ omit = homeassistant/components/device_tracker/volvooncall.py homeassistant/components/discovery.py homeassistant/components/downloader.py + homeassistant/components/emoncms_history.py homeassistant/components/fan/mqtt.py homeassistant/components/feedreader.py homeassistant/components/foursquare.py @@ -197,6 +207,7 @@ omit = homeassistant/components/notify/joaoapps_join.py homeassistant/components/notify/kodi.py homeassistant/components/notify/llamalab_automate.py + homeassistant/components/notify/matrix.py homeassistant/components/notify/message_bird.py homeassistant/components/notify/nma.py homeassistant/components/notify/pushbullet.py @@ -209,6 +220,7 @@ omit = homeassistant/components/notify/smtp.py homeassistant/components/notify/syslog.py homeassistant/components/notify/telegram.py + homeassistant/components/notify/telstra.py homeassistant/components/notify/twilio_sms.py homeassistant/components/notify/twitter.py homeassistant/components/notify/xmpp.py @@ -216,6 +228,8 @@ omit = homeassistant/components/openalpr.py homeassistant/components/scene/hunterdouglas_powerview.py homeassistant/components/sensor/arest.py + homeassistant/components/sensor/arwn.py + homeassistant/components/sensor/bbox.py homeassistant/components/sensor/bitcoin.py homeassistant/components/sensor/bom.py homeassistant/components/sensor/coinmarketcap.py @@ -235,6 +249,7 @@ omit = homeassistant/components/sensor/google_travel_time.py homeassistant/components/sensor/gpsd.py homeassistant/components/sensor/gtfs.py + homeassistant/components/sensor/haveibeenpwned.py homeassistant/components/sensor/hp_ilo.py homeassistant/components/sensor/imap.py homeassistant/components/sensor/imap_email_content.py @@ -254,6 +269,7 @@ omit = homeassistant/components/sensor/plex.py homeassistant/components/sensor/rest.py homeassistant/components/sensor/sabnzbd.py + homeassistant/components/sensor/scrape.py homeassistant/components/sensor/serial_pm.py homeassistant/components/sensor/snmp.py homeassistant/components/sensor/speedtest.py @@ -272,7 +288,6 @@ omit = homeassistant/components/sensor/vasttrafik.py homeassistant/components/sensor/worldclock.py homeassistant/components/sensor/xbox_live.py - homeassistant/components/sensor/yahoo_finance.py homeassistant/components/sensor/yweather.py homeassistant/components/switch/acer_projector.py homeassistant/components/switch/anel_pwrctrl.py @@ -281,6 +296,7 @@ omit = homeassistant/components/switch/edimax.py homeassistant/components/switch/hikvisioncam.py homeassistant/components/switch/mystrom.py + homeassistant/components/switch/neato.py homeassistant/components/switch/netio.py homeassistant/components/switch/orvibo.py homeassistant/components/switch/pulseaudio_loopback.py diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 998905c929b..d3106f26bae 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,9 +1,9 @@ **Description:** -**Related issue (if applicable):** fixes # +**Related issue (if applicable):** fixes # -**Pull request in [home-assistant.io](https://github.com/home-assistant/home-assistant.io) with documentation (if applicable):** home-assistant/home-assistant.io# +**Pull request in [home-assistant.github.io](https://github.com/home-assistant/home-assistant.github.io) with documentation (if applicable):** home-assistant/home-assistant.github.io# **Example entry for `configuration.yaml` (if applicable):** ```yaml @@ -13,7 +13,7 @@ **Checklist:** If user exposed functionality or configuration variables are added/changed: - - [ ] Documentation added/updated in [home-assistant.io](https://github.com/home-assistant/home-assistant.io) + - [ ] Documentation added/updated in [home-assistant.github.io](https://github.com/home-assistant/home-assistant.github.io) If the code communicates with devices, web services, or third-party tools: - [ ] Local tests with `tox` run successfully. **Your PR cannot be merged unless tests pass** diff --git a/Dockerfile b/Dockerfile index 14e70a0412c..b42d7edcc89 100644 --- a/Dockerfile +++ b/Dockerfile @@ -19,8 +19,7 @@ RUN script/build_python_openzwave && \ ln -sf /usr/src/app/build/python-openzwave/openzwave/config /usr/local/share/python-openzwave/config COPY requirements_all.txt requirements_all.txt -# certifi breaks Debian based installs -RUN pip3 install --no-cache-dir -r requirements_all.txt && pip3 uninstall -y certifi && \ +RUN pip3 install --no-cache-dir -r requirements_all.txt && \ pip3 install mysqlclient psycopg2 uvloop # Copy source diff --git a/docs/source/api/core.rst b/docs/source/api/core.rst index a32bdc24d11..bbaf591052c 100644 --- a/docs/source/api/core.rst +++ b/docs/source/api/core.rst @@ -8,11 +8,31 @@ .. autoclass:: Config :members: +.. autoclass:: Event + :members: + .. autoclass:: EventBus :members: +.. autoclass:: HomeAssistant + :members: + +.. autoclass:: State + :members: + .. autoclass:: StateMachine :members: +.. autoclass:: ServiceCall + :members: + .. autoclass:: ServiceRegistry :members: + +Module contents +--------------- + +.. automodule:: homeassistant.core + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/api/util.rst b/docs/source/api/util.rst index 7d6a22dbc0b..e31a1c98129 100644 --- a/docs/source/api/util.rst +++ b/docs/source/api/util.rst @@ -4,6 +4,14 @@ homeassistant.util package Submodules ---------- +homeassistant.util.async module +------------------------------- + +.. automodule:: homeassistant.util.async + :members: + :undoc-members: + :show-inheritance: + homeassistant.util.color module ------------------------------- diff --git a/docs/swagger.yaml b/docs/swagger.yaml index b0d765be361..488d6bddd46 100644 --- a/docs/swagger.yaml +++ b/docs/swagger.yaml @@ -2,7 +2,7 @@ swagger: '2.0' info: title: Home Assistant description: Home Assistant REST API - version: "1.0.0" + version: "1.0.1" # the domain of the service host: localhost:8123 @@ -12,17 +12,17 @@ schemes: - https securityDefinitions: - api_key: - type: apiKey - description: API password - name: api_password - in: query - - # api_key: + #api_key: # type: apiKey # description: API password - # name: x-ha-access - # in: header + # name: api_password + # in: query + + api_key: + type: apiKey + description: API password + name: x-ha-access + in: header # will be prefixed to all paths basePath: /api @@ -38,6 +38,8 @@ paths: description: Returns message if API is up and running. tags: - Core + security: + - api_key: [] responses: 200: description: API is up and running @@ -53,6 +55,8 @@ paths: description: Returns the current configuration as JSON. tags: - Core + security: + - api_key: [] responses: 200: description: Current configuration @@ -81,6 +85,8 @@ paths: summary: Returns all data needed to bootstrap Home Assistant. tags: - Core + security: + - api_key: [] responses: 200: description: Bootstrap information @@ -96,6 +102,8 @@ paths: description: Returns an array of event objects. Each event object contain event name and listener count. tags: - Events + security: + - api_key: [] responses: 200: description: Events @@ -113,6 +121,8 @@ paths: description: Returns an array of service objects. Each object contains the domain and which services it contains. tags: - Services + security: + - api_key: [] responses: 200: description: Services @@ -130,6 +140,8 @@ paths: description: Returns an array of state changes in the past. Each object contains further detail for the entities. tags: - State + security: + - api_key: [] responses: 200: description: State changes @@ -148,6 +160,8 @@ paths: Returns an array of state objects. Each state has the following attributes: entity_id, state, last_changed and attributes. tags: - State + security: + - api_key: [] responses: 200: description: States @@ -166,6 +180,8 @@ paths: Returns a state object for specified entity_id. tags: - State + security: + - api_key: [] parameters: - name: entity_id in: path @@ -223,6 +239,8 @@ paths: Retrieve all errors logged during the current session of Home Assistant as a plaintext response. tags: - Core + security: + - api_key: [] produces: - text/plain responses: @@ -239,6 +257,8 @@ paths: Returns the data (image) from the specified camera entity_id. tags: - Camera + security: + - api_key: [] produces: - image/jpeg parameters: @@ -262,6 +282,8 @@ paths: Fires an event with event_type tags: - Events + security: + - api_key: [] consumes: - application/json parameters: @@ -286,6 +308,8 @@ paths: Calls a service within a specific domain. Will return when the service has been executed or 10 seconds has past, whichever comes first. tags: - Services + security: + - api_key: [] consumes: - application/json parameters: @@ -317,6 +341,8 @@ paths: Render a Home Assistant template. tags: - Template + security: + - api_key: [] consumes: - application/json produces: @@ -338,6 +364,8 @@ paths: Setup event forwarding to another Home Assistant instance. tags: - Core + security: + - api_key: [] consumes: - application/json parameters: @@ -376,6 +404,8 @@ paths: tags: - Core - Events + security: + - api_key: [] produces: - text/event-stream parameters: @@ -420,8 +450,16 @@ definitions: location_name: type: string unit_system: - type: string - description: The system for measurement units + type: object + properties: + length: + type: string + mass: + type: string + temperature: + type: string + volume: + type: string time_zone: type: string version: diff --git a/homeassistant/bootstrap.py b/homeassistant/bootstrap.py index 21c56f55bad..8ad4e16c8cd 100644 --- a/homeassistant/bootstrap.py +++ b/homeassistant/bootstrap.py @@ -32,12 +32,15 @@ _CURRENT_SETUP = [] ATTR_COMPONENT = 'component' ERROR_LOG_FILENAME = 'home-assistant.log' +_PERSISTENT_PLATFORMS = set() +_PERSISTENT_VALIDATION = set() def setup_component(hass: core.HomeAssistant, domain: str, config: Optional[Dict]=None) -> bool: """Setup a component and all its dependencies.""" if domain in hass.config.components: + _LOGGER.debug('Component %s already set up.', domain) return True _ensure_loader_prepared(hass) @@ -53,6 +56,7 @@ def setup_component(hass: core.HomeAssistant, domain: str, for component in components: if not _setup_component(hass, component, config): + _LOGGER.error('Component %s failed to setup', component) return False return True @@ -147,7 +151,7 @@ def prepare_setup_component(hass: core.HomeAssistant, config: dict, try: config = component.CONFIG_SCHEMA(config) except vol.Invalid as ex: - log_exception(ex, domain, config) + log_exception(ex, domain, config, hass) return None elif hasattr(component, 'PLATFORM_SCHEMA'): @@ -157,8 +161,8 @@ def prepare_setup_component(hass: core.HomeAssistant, config: dict, try: p_validated = component.PLATFORM_SCHEMA(p_config) except vol.Invalid as ex: - log_exception(ex, domain, config) - return None + log_exception(ex, domain, config, hass) + continue # Not all platform components follow same pattern for platforms # So if p_name is None we are not going to validate platform @@ -171,7 +175,7 @@ def prepare_setup_component(hass: core.HomeAssistant, config: dict, p_name) if platform is None: - return None + continue # Validate platform specific schema if hasattr(platform, 'PLATFORM_SCHEMA'): @@ -179,8 +183,8 @@ def prepare_setup_component(hass: core.HomeAssistant, config: dict, p_validated = platform.PLATFORM_SCHEMA(p_validated) except vol.Invalid as ex: log_exception(ex, '{}.{}'.format(domain, p_name), - p_validated) - return None + p_validated, hass) + continue platforms.append(p_validated) @@ -209,6 +213,13 @@ def prepare_setup_platform(hass: core.HomeAssistant, config, domain: str, # Not found if platform is None: _LOGGER.error('Unable to find platform %s', platform_path) + + _PERSISTENT_PLATFORMS.add(platform_path) + message = ('Unable to find the following platforms: ' + + ', '.join(list(_PERSISTENT_PLATFORMS)) + + '(please check your configuration)') + persistent_notification.create( + hass, message, 'Invalid platforms', 'platform_errors') return None # Already loaded @@ -255,7 +266,7 @@ def from_config_dict(config: Dict[str, Any], try: conf_util.process_ha_core_config(hass, core_config) except vol.Invalid as ex: - log_exception(ex, 'homeassistant', core_config) + log_exception(ex, 'homeassistant', core_config, hass) return None conf_util.process_ha_config_upgrade(hass) @@ -303,6 +314,7 @@ def from_config_dict(config: Dict[str, Any], hass.loop.run_until_complete( hass.loop.run_in_executor(None, component_setup) ) + return hass @@ -343,6 +355,11 @@ def enable_logging(hass: core.HomeAssistant, verbose: bool=False, logging.basicConfig(level=logging.INFO) fmt = ("%(log_color)s%(asctime)s %(levelname)s (%(threadName)s) " "[%(name)s] %(message)s%(reset)s") + + # suppress overly verbose logs from libraries that aren't helpful + logging.getLogger("requests").setLevel(logging.WARNING) + logging.getLogger("urllib3").setLevel(logging.WARNING) + try: from colorlog import ColoredFormatter logging.getLogger().handlers[0].setFormatter(ColoredFormatter( @@ -395,9 +412,16 @@ def _ensure_loader_prepared(hass: core.HomeAssistant) -> None: loader.prepare(hass) -def log_exception(ex, domain, config): +def log_exception(ex, domain, config, hass=None): """Generate log exception for config validation.""" message = 'Invalid config for [{}]: '.format(domain) + if hass is not None: + _PERSISTENT_VALIDATION.add(domain) + message = ('The following platforms contain invalid configuration: ' + + ', '.join(list(_PERSISTENT_VALIDATION)) + + ' (please check your configuration)') + persistent_notification.create( + hass, message, 'Invalid config', 'invalid_config') if 'extra keys not allowed' in ex.error_message: message += '[{}] is an invalid option for [{}]. Check: {}->{}.'\ diff --git a/homeassistant/components/alarm_control_panel/concord232.py b/homeassistant/components/alarm_control_panel/concord232.py new file mode 100755 index 00000000000..0e0fd026b60 --- /dev/null +++ b/homeassistant/components/alarm_control_panel/concord232.py @@ -0,0 +1,136 @@ +""" +Support for Concord232 alarm control panels. + +For more details about this platform, please refer to the documentation at +https://home-assistant.io/components/alarm_control_panel.concord232/ +""" + +import datetime + +import logging + +import homeassistant.components.alarm_control_panel as alarm +from homeassistant.components.alarm_control_panel import PLATFORM_SCHEMA +from homeassistant.const import ( + CONF_HOST, CONF_NAME, CONF_PORT, + STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_HOME, + STATE_ALARM_DISARMED, STATE_UNKNOWN) +import homeassistant.helpers.config_validation as cv + +import requests + +import voluptuous as vol + +REQUIREMENTS = ['concord232==0.14'] + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_HOST = 'localhost' +DEFAULT_NAME = 'CONCORD232' +DEFAULT_PORT = 5007 + +PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ + vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, + vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, + vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string, +}) + +SCAN_INTERVAL = 1 + + +def setup_platform(hass, config, add_devices, discovery_info=None): + """Setup concord232 platform.""" + name = config.get(CONF_NAME) + host = config.get(CONF_HOST) + port = config.get(CONF_PORT) + + url = 'http://{}:{}'.format(host, port) + + try: + add_devices([Concord232Alarm(hass, url, name)]) + except requests.exceptions.ConnectionError as ex: + _LOGGER.error('Unable to connect to Concord232: %s', str(ex)) + return False + + +class Concord232Alarm(alarm.AlarmControlPanel): + """Represents the Concord232-based alarm panel.""" + + def __init__(self, hass, url, name): + """Initalize the concord232 alarm panel.""" + from concord232 import client as concord232_client + + self._state = STATE_UNKNOWN + self._hass = hass + self._name = name + self._url = url + + try: + client = concord232_client.Client(self._url) + except requests.exceptions.ConnectionError as ex: + _LOGGER.error('Unable to connect to Concord232: %s', str(ex)) + + self._alarm = client + self._alarm.partitions = self._alarm.list_partitions() + self._alarm.last_partition_update = datetime.datetime.now() + self.update() + + @property + def should_poll(self): + """Polling needed.""" + return True + + @property + def name(self): + """Return the name of the device.""" + return self._name + + @property + def code_format(self): + """The characters if code is defined.""" + return '[0-9]{4}([0-9]{2})?' + + @property + def state(self): + """Return the state of the device.""" + return self._state + + def update(self): + """Update values from API.""" + try: + part = self._alarm.list_partitions()[0] + except requests.exceptions.ConnectionError as ex: + _LOGGER.error('Unable to connect to %(host)s: %(reason)s', + dict(host=self._url, reason=ex)) + newstate = STATE_UNKNOWN + except IndexError: + _LOGGER.error('concord232 reports no partitions') + newstate = STATE_UNKNOWN + + if part['arming_level'] == "Off": + newstate = STATE_ALARM_DISARMED + elif "Home" in part['arming_level']: + newstate = STATE_ALARM_ARMED_HOME + else: + newstate = STATE_ALARM_ARMED_AWAY + + if not newstate == self._state: + _LOGGER.info("State Chnage from %s to %s", self._state, newstate) + self._state = newstate + return self._state + + def alarm_disarm(self, code=None): + """Send disarm command.""" + self._alarm.disarm(code) + + def alarm_arm_home(self, code=None): + """Send arm home command.""" + self._alarm.arm('home') + + def alarm_arm_away(self, code=None): + """Send arm away command.""" + self._alarm.arm('auto') + + def alarm_trigger(self, code=None): + """Alarm trigger command.""" + raise NotImplementedError() diff --git a/homeassistant/components/alarm_control_panel/nx584.py b/homeassistant/components/alarm_control_panel/nx584.py index 45857f3ef29..8e3b327aecb 100644 --- a/homeassistant/components/alarm_control_panel/nx584.py +++ b/homeassistant/components/alarm_control_panel/nx584.py @@ -60,6 +60,7 @@ class NX584Alarm(alarm.AlarmControlPanel): # talk to the API and trigger a requests exception for setup_platform() # to catch self._alarm.list_zones() + self._state = STATE_UNKNOWN @property def should_poll(self): @@ -79,16 +80,20 @@ class NX584Alarm(alarm.AlarmControlPanel): @property def state(self): """Return the state of the device.""" + return self._state + + def update(self): + """Process new events from panel.""" try: part = self._alarm.list_partitions()[0] zones = self._alarm.list_zones() except requests.exceptions.ConnectionError as ex: _LOGGER.error('Unable to connect to %(host)s: %(reason)s', dict(host=self._url, reason=ex)) - return STATE_UNKNOWN + self._state = STATE_UNKNOWN except IndexError: _LOGGER.error('nx584 reports no partitions') - return STATE_UNKNOWN + self._state = STATE_UNKNOWN bypassed = False for zone in zones: @@ -100,11 +105,11 @@ class NX584Alarm(alarm.AlarmControlPanel): break if not part['armed']: - return STATE_ALARM_DISARMED + self._state = STATE_ALARM_DISARMED elif bypassed: - return STATE_ALARM_ARMED_HOME + self._state = STATE_ALARM_ARMED_HOME else: - return STATE_ALARM_ARMED_AWAY + self._state = STATE_ALARM_ARMED_AWAY def alarm_disarm(self, code=None): """Send disarm command.""" diff --git a/homeassistant/components/alexa.py b/homeassistant/components/alexa.py index 94d5b24cbf0..64ff50af323 100644 --- a/homeassistant/components/alexa.py +++ b/homeassistant/components/alexa.py @@ -7,16 +7,20 @@ https://home-assistant.io/components/alexa/ import copy import enum import logging +import uuid +from datetime import datetime import voluptuous as vol from homeassistant.const import HTTP_BAD_REQUEST from homeassistant.helpers import template, script, config_validation as cv from homeassistant.components.http import HomeAssistantView +import homeassistant.util.dt as dt_util _LOGGER = logging.getLogger(__name__) -API_ENDPOINT = '/api/alexa' +INTENTS_API_ENDPOINT = '/api/alexa' +FLASH_BRIEFINGS_API_ENDPOINT = '/api/alexa/flash_briefings/' CONF_ACTION = 'action' CONF_CARD = 'card' @@ -28,6 +32,23 @@ CONF_TITLE = 'title' CONF_CONTENT = 'content' CONF_TEXT = 'text' +CONF_FLASH_BRIEFINGS = 'flash_briefings' +CONF_UID = 'uid' +CONF_DATE = 'date' +CONF_TITLE = 'title' +CONF_AUDIO = 'audio' +CONF_TEXT = 'text' +CONF_DISPLAY_URL = 'display_url' + +ATTR_UID = 'uid' +ATTR_UPDATE_DATE = 'updateDate' +ATTR_TITLE_TEXT = 'titleText' +ATTR_STREAM_URL = 'streamUrl' +ATTR_MAIN_TEXT = 'mainText' +ATTR_REDIRECTION_URL = 'redirectionURL' + +DATE_FORMAT = '%Y-%m-%dT%H:%M:%S.0Z' + DOMAIN = 'alexa' DEPENDENCIES = ['http'] @@ -61,6 +82,16 @@ CONFIG_SCHEMA = vol.Schema({ vol.Required(CONF_TEXT): cv.template, } } + }, + CONF_FLASH_BRIEFINGS: { + cv.string: vol.All(cv.ensure_list, [{ + vol.Required(CONF_UID, default=str(uuid.uuid4())): cv.string, + vol.Optional(CONF_DATE, default=datetime.utcnow()): cv.string, + vol.Required(CONF_TITLE): cv.template, + vol.Optional(CONF_AUDIO): cv.template, + vol.Required(CONF_TEXT, default=""): cv.template, + vol.Optional(CONF_DISPLAY_URL): cv.template, + }]), } } }, extra=vol.ALLOW_EXTRA) @@ -68,16 +99,19 @@ CONFIG_SCHEMA = vol.Schema({ def setup(hass, config): """Activate Alexa component.""" - hass.wsgi.register_view(AlexaView(hass, - config[DOMAIN].get(CONF_INTENTS, {}))) + intents = config[DOMAIN].get(CONF_INTENTS, {}) + flash_briefings = config[DOMAIN].get(CONF_FLASH_BRIEFINGS, {}) + + hass.wsgi.register_view(AlexaIntentsView(hass, intents)) + hass.wsgi.register_view(AlexaFlashBriefingView(hass, flash_briefings)) return True -class AlexaView(HomeAssistantView): +class AlexaIntentsView(HomeAssistantView): """Handle Alexa requests.""" - url = API_ENDPOINT + url = INTENTS_API_ENDPOINT name = 'api:alexa' def __init__(self, hass, intents): @@ -235,3 +269,69 @@ class AlexaResponse(object): 'sessionAttributes': self.session_attributes, 'response': response, } + + +class AlexaFlashBriefingView(HomeAssistantView): + """Handle Alexa Flash Briefing skill requests.""" + + url = FLASH_BRIEFINGS_API_ENDPOINT + name = 'api:alexa:flash_briefings' + + def __init__(self, hass, flash_briefings): + """Initialize Alexa view.""" + super().__init__(hass) + self.flash_briefings = copy.deepcopy(flash_briefings) + template.attach(hass, self.flash_briefings) + + # pylint: disable=too-many-branches + def get(self, request, briefing_id): + """Handle Alexa Flash Briefing request.""" + _LOGGER.debug('Received Alexa flash briefing request for: %s', + briefing_id) + + if self.flash_briefings.get(briefing_id) is None: + err = 'No configured Alexa flash briefing was found for: %s' + _LOGGER.error(err, briefing_id) + return self.Response(status=404) + + briefing = [] + + for item in self.flash_briefings.get(briefing_id, []): + output = {} + if item.get(CONF_TITLE) is not None: + if isinstance(item.get(CONF_TITLE), template.Template): + output[ATTR_TITLE_TEXT] = item[CONF_TITLE].render() + else: + output[ATTR_TITLE_TEXT] = item.get(CONF_TITLE) + + if item.get(CONF_TEXT) is not None: + if isinstance(item.get(CONF_TEXT), template.Template): + output[ATTR_MAIN_TEXT] = item[CONF_TEXT].render() + else: + output[ATTR_MAIN_TEXT] = item.get(CONF_TEXT) + + if item.get(CONF_UID) is not None: + output[ATTR_UID] = item.get(CONF_UID) + + if item.get(CONF_AUDIO) is not None: + if isinstance(item.get(CONF_AUDIO), template.Template): + output[ATTR_STREAM_URL] = item[CONF_AUDIO].render() + else: + output[ATTR_STREAM_URL] = item.get(CONF_AUDIO) + + if item.get(CONF_DISPLAY_URL) is not None: + if isinstance(item.get(CONF_DISPLAY_URL), + template.Template): + output[ATTR_REDIRECTION_URL] = \ + item[CONF_DISPLAY_URL].render() + else: + output[ATTR_REDIRECTION_URL] = item.get(CONF_DISPLAY_URL) + + if isinstance(item[CONF_DATE], str): + item[CONF_DATE] = dt_util.parse_datetime(item[CONF_DATE]) + + output[ATTR_UPDATE_DATE] = item[CONF_DATE].strftime(DATE_FORMAT) + + briefing.append(output) + + return self.json(briefing) diff --git a/homeassistant/components/automation/__init__.py b/homeassistant/components/automation/__init__.py index 355e19f7fa0..244887ca10a 100644 --- a/homeassistant/components/automation/__init__.py +++ b/homeassistant/components/automation/__init__.py @@ -11,6 +11,7 @@ import os import voluptuous as vol +from homeassistant.core import callback from homeassistant.bootstrap import prepare_setup_platform from homeassistant import config as conf_util from homeassistant.const import ( @@ -157,24 +158,24 @@ def setup(hass, config): descriptions = conf_util.load_yaml_config_file( os.path.join(os.path.dirname(__file__), 'services.yaml')) - @asyncio.coroutine + @callback def trigger_service_handler(service_call): """Handle automation triggers.""" - for entity in component.extract_from_service(service_call): + for entity in component.async_extract_from_service(service_call): hass.loop.create_task(entity.async_trigger( service_call.data.get(ATTR_VARIABLES), True)) - @asyncio.coroutine + @callback def turn_onoff_service_handler(service_call): """Handle automation turn on/off service calls.""" method = 'async_{}'.format(service_call.service) - for entity in component.extract_from_service(service_call): + for entity in component.async_extract_from_service(service_call): hass.loop.create_task(getattr(entity, method)()) - @asyncio.coroutine + @callback def toggle_service_handler(service_call): """Handle automation toggle service calls.""" - for entity in component.extract_from_service(service_call): + for entity in component.async_extract_from_service(service_call): if entity.is_on: hass.loop.create_task(entity.async_turn_off()) else: @@ -183,8 +184,7 @@ def setup(hass, config): @asyncio.coroutine def reload_service_handler(service_call): """Remove all automations and load new ones from config.""" - conf = yield from hass.loop.run_in_executor( - None, component.prepare_reload) + conf = yield from component.async_prepare_reload() if conf is None: return hass.loop.create_task(_async_process_config(hass, conf, component)) @@ -271,7 +271,9 @@ class AutomationEntity(ToggleEntity): self._async_detach_triggers() self._async_detach_triggers = None self._enabled = False - self.hass.loop.create_task(self.async_update_ha_state()) + # It's important that the update is finished before this method + # ends because async_remove depends on it. + yield from self.async_update_ha_state() @asyncio.coroutine def async_trigger(self, variables, skip_condition=False): @@ -280,15 +282,15 @@ class AutomationEntity(ToggleEntity): This method is a coroutine. """ if skip_condition or self._cond_func(variables): - yield from self._async_action(variables) + yield from self._async_action(self.entity_id, variables) self._last_triggered = utcnow() self.hass.loop.create_task(self.async_update_ha_state()) - def remove(self): + @asyncio.coroutine + def async_remove(self): """Remove automation from HASS.""" - run_coroutine_threadsafe(self.async_turn_off(), - self.hass.loop).result() - super().remove() + yield from self.async_turn_off() + yield from super().async_remove() @asyncio.coroutine def async_enable(self): @@ -341,12 +343,11 @@ def _async_process_config(hass, config, component): entity = AutomationEntity(name, async_attach_triggers, cond_func, action, hidden) if config_block[CONF_INITIAL_STATE]: - tasks.append(hass.loop.create_task(entity.async_enable())) + tasks.append(entity.async_enable()) entities.append(entity) yield from asyncio.gather(*tasks, loop=hass.loop) - yield from hass.loop.run_in_executor( - None, component.add_entities, entities) + hass.loop.create_task(component.async_add_entities(entities)) return len(entities) > 0 @@ -356,10 +357,11 @@ def _async_get_action(hass, config, name): script_obj = script.Script(hass, config, name) @asyncio.coroutine - def action(variables=None): + def action(entity_id, variables): """Action to be executed.""" _LOGGER.info('Executing %s', name) - logbook.async_log_entry(hass, name, 'has been triggered', DOMAIN) + logbook.async_log_entry( + hass, name, 'has been triggered', DOMAIN, entity_id) hass.loop.create_task(script_obj.async_run(variables)) return action diff --git a/homeassistant/components/binary_sensor/concord232.py b/homeassistant/components/binary_sensor/concord232.py new file mode 100755 index 00000000000..bc1eab4694a --- /dev/null +++ b/homeassistant/components/binary_sensor/concord232.py @@ -0,0 +1,143 @@ +""" +Support for exposing Concord232 elements as sensors. + +For more details about this platform, please refer to the documentation at +https://home-assistant.io/components/binary_sensor.concord232/ +""" +import datetime + +import logging + +from homeassistant.components.binary_sensor import ( + BinarySensorDevice, PLATFORM_SCHEMA, SENSOR_CLASSES) +from homeassistant.const import (CONF_HOST, CONF_PORT) + +import homeassistant.helpers.config_validation as cv + +import requests + +import voluptuous as vol + + +REQUIREMENTS = ['concord232==0.14'] + +_LOGGER = logging.getLogger(__name__) + +CONF_EXCLUDE_ZONES = 'exclude_zones' +CONF_ZONE_TYPES = 'zone_types' + +DEFAULT_HOST = 'localhost' +DEFAULT_PORT = '5007' +DEFAULT_SSL = False + +ZONE_TYPES_SCHEMA = vol.Schema({ + cv.positive_int: vol.In(SENSOR_CLASSES), +}) + +PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ + vol.Optional(CONF_EXCLUDE_ZONES, default=[]): + vol.All(cv.ensure_list, [cv.positive_int]), + vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string, + vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, + vol.Optional(CONF_ZONE_TYPES, default={}): ZONE_TYPES_SCHEMA, +}) + +SCAN_INTERVAL = 1 + +DEFAULT_NAME = "Alarm" + + +# pylint: disable=too-many-locals +def setup_platform(hass, config, add_devices, discovery_info=None): + """Setup the Concord232 binary sensor platform.""" + from concord232 import client as concord232_client + + host = config.get(CONF_HOST) + port = config.get(CONF_PORT) + exclude = config.get(CONF_EXCLUDE_ZONES) + zone_types = config.get(CONF_ZONE_TYPES) + sensors = [] + + try: + _LOGGER.debug('Initializing Client.') + client = concord232_client.Client('http://{}:{}' + .format(host, port)) + client.zones = client.list_zones() + client.last_zone_update = datetime.datetime.now() + + except requests.exceptions.ConnectionError as ex: + _LOGGER.error('Unable to connect to Concord232: %s', str(ex)) + return False + + for zone in client.zones: + _LOGGER.info('Loading Zone found: %s', zone['name']) + if zone['number'] not in exclude: + sensors.append(Concord232ZoneSensor( + hass, + client, + zone, + zone_types.get(zone['number'], get_opening_type(zone)))) + + add_devices(sensors) + + return True + + +def get_opening_type(zone): + """Helper function to try to guess sensor type frm name.""" + if "MOTION" in zone["name"]: + return "motion" + if "KEY" in zone["name"]: + return "safety" + if "SMOKE" in zone["name"]: + return "smoke" + if "WATER" in zone["name"]: + return "water" + return "opening" + + +class Concord232ZoneSensor(BinarySensorDevice): + """Representation of a Concord232 zone as a sensor.""" + + def __init__(self, hass, client, zone, zone_type): + """Initialize the Concord232 binary sensor.""" + self._hass = hass + self._client = client + self._zone = zone + self._number = zone['number'] + self._zone_type = zone_type + self.update() + + @property + def sensor_class(self): + """Return the class of this sensor, from SENSOR_CLASSES.""" + return self._zone_type + + @property + def should_poll(self): + """No polling needed.""" + return True + + @property + def name(self): + """Return the name of the binary sensor.""" + return self._zone['name'] + + @property + def is_on(self): + """Return true if the binary sensor is on.""" + # True means "faulted" or "open" or "abnormal state" + return bool(self._zone['state'] == 'Normal') + + def update(self): + """"Get updated stats from API.""" + last_update = datetime.datetime.now() - self._client.last_zone_update + _LOGGER.debug("Zone: %s ", self._zone) + if last_update > datetime.timedelta(seconds=1): + self._client.zones = self._client.list_zones() + self._client.last_zone_update = datetime.datetime.now() + _LOGGER.debug("Updated from Zone: %s", self._zone['name']) + + if hasattr(self._client, 'zones'): + self._zone = next((x for x in self._client.zones + if x['number'] == self._number), None) diff --git a/homeassistant/components/binary_sensor/netatmo.py b/homeassistant/components/binary_sensor/netatmo.py new file mode 100644 index 00000000000..e5004db0a4b --- /dev/null +++ b/homeassistant/components/binary_sensor/netatmo.py @@ -0,0 +1,127 @@ +""" +Support for the Netatmo binary sensors. + +The binary sensors based on events seen by the NetatmoCamera + +For more details about this platform, please refer to the documentation at +https://home-assistant.io/components/binary_sensor.netatmo/ +""" +import logging +import voluptuous as vol + +from homeassistant.components.binary_sensor import ( + BinarySensorDevice, PLATFORM_SCHEMA) +from homeassistant.components.netatmo import WelcomeData +from homeassistant.loader import get_component +from homeassistant.const import CONF_MONITORED_CONDITIONS +from homeassistant.helpers import config_validation as cv + +DEPENDENCIES = ["netatmo"] + +_LOGGER = logging.getLogger(__name__) + + +# These are the available sensors mapped to binary_sensor class +SENSOR_TYPES = { + "Someone known": "motion", + "Someone unknown": "motion", + "Motion": "motion", +} + +CONF_HOME = 'home' +CONF_CAMERAS = 'cameras' + +PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ + vol.Optional(CONF_HOME): cv.string, + vol.Optional(CONF_CAMERAS, default=[]): + vol.All(cv.ensure_list, [cv.string]), + vol.Optional(CONF_MONITORED_CONDITIONS, default=SENSOR_TYPES.keys()): + vol.All(cv.ensure_list, [vol.In(SENSOR_TYPES)]), +}) + + +# pylint: disable=unused-argument +def setup_platform(hass, config, add_devices, discovery_info=None): + """Setup access to Netatmo binary sensor.""" + netatmo = get_component('netatmo') + home = config.get(CONF_HOME, None) + + import lnetatmo + try: + data = WelcomeData(netatmo.NETATMO_AUTH, home) + if data.get_camera_names() == []: + return None + except lnetatmo.NoDevice: + return None + + sensors = config.get(CONF_MONITORED_CONDITIONS, SENSOR_TYPES) + + for camera_name in data.get_camera_names(): + if CONF_CAMERAS in config: + if config[CONF_CAMERAS] != [] and \ + camera_name not in config[CONF_CAMERAS]: + continue + for variable in sensors: + add_devices([WelcomeBinarySensor(data, camera_name, home, + variable)]) + + +class WelcomeBinarySensor(BinarySensorDevice): + """Represent a single binary sensor in a Netatmo Welcome device.""" + + def __init__(self, data, camera_name, home, sensor): + """Setup for access to the Netatmo camera events.""" + self._data = data + self._camera_name = camera_name + self._home = home + if home: + self._name = home + ' / ' + camera_name + else: + self._name = camera_name + self._sensor_name = sensor + self._name += ' ' + sensor + camera_id = data.welcomedata.cameraByName(camera=camera_name, + home=home)['id'] + self._unique_id = "Welcome_binary_sensor {0} - {1}".format(self._name, + camera_id) + self.update() + + @property + def name(self): + """The name of the Netatmo device and this sensor.""" + return self._name + + @property + def unique_id(self): + """Return the unique ID for this sensor.""" + return self._unique_id + + @property + def sensor_class(self): + """Return the class of this sensor, from SENSOR_CLASSES.""" + return SENSOR_TYPES.get(self._sensor_name) + + @property + def is_on(self): + """Return true if binary sensor is on.""" + return self._state + + def update(self): + """Request an update from the Netatmo API.""" + self._data.update() + self._data.welcomedata.updateEvent(home=self._data.home) + + if self._sensor_name == "Someone known": + self._state =\ + self._data.welcomedata.someoneKnownSeen(self._home, + self._camera_name) + elif self._sensor_name == "Someone unknown": + self._state =\ + self._data.welcomedata.someoneUnknownSeen(self._home, + self._camera_name) + elif self._sensor_name == "Motion": + self._state =\ + self._data.welcomedata.motionDetected(self._home, + self._camera_name) + else: + return None diff --git a/homeassistant/components/binary_sensor/rest.py b/homeassistant/components/binary_sensor/rest.py index 3e22150a4fd..36b455f9dfe 100644 --- a/homeassistant/components/binary_sensor/rest.py +++ b/homeassistant/components/binary_sensor/rest.py @@ -5,7 +5,6 @@ For more details about this platform, please refer to the documentation at https://home-assistant.io/components/binary_sensor.rest/ """ import logging -import json import voluptuous as vol from requests.auth import HTTPBasicAuth, HTTPDigestAuth @@ -30,7 +29,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_RESOURCE): cv.url, vol.Optional(CONF_AUTHENTICATION): vol.In([HTTP_BASIC_AUTHENTICATION, HTTP_DIGEST_AUTHENTICATION]), - vol.Optional(CONF_HEADERS): cv.string, + vol.Optional(CONF_HEADERS): {cv.string: cv.string}, vol.Optional(CONF_METHOD, default=DEFAULT_METHOD): vol.In(['POST', 'GET']), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_PASSWORD): cv.string, @@ -52,7 +51,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None): verify_ssl = config.get(CONF_VERIFY_SSL) username = config.get(CONF_USERNAME) password = config.get(CONF_PASSWORD) - headers = json.loads(config.get(CONF_HEADERS, '{}')) + headers = config.get(CONF_HEADERS) sensor_class = config.get(CONF_SENSOR_CLASS) value_template = config.get(CONF_VALUE_TEMPLATE) if value_template is not None: @@ -70,7 +69,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None): rest.update() if rest.data is None: - _LOGGER.error('Unable to fetch REST data') + _LOGGER.error("Unable to fetch REST data from %s", resource) return False add_devices([RestBinarySensor( @@ -109,8 +108,8 @@ class RestBinarySensor(BinarySensorDevice): return False if self._value_template is not None: - response = self._value_template.render_with_possible_json_value( - self.rest.data, False) + response = self._value_template.\ + async_render_with_possible_json_value(self.rest.data, False) try: return bool(int(response)) diff --git a/homeassistant/components/binary_sensor/tcp.py b/homeassistant/components/binary_sensor/tcp.py index dcf4c3dff7e..12a96a5492f 100644 --- a/homeassistant/components/binary_sensor/tcp.py +++ b/homeassistant/components/binary_sensor/tcp.py @@ -7,21 +7,20 @@ https://home-assistant.io/components/binary_sensor.tcp/ import logging from homeassistant.components.binary_sensor import BinarySensorDevice -from homeassistant.components.sensor.tcp import Sensor, CONF_VALUE_ON - +from homeassistant.components.sensor.tcp import ( + TcpSensor, CONF_VALUE_ON, PLATFORM_SCHEMA) _LOGGER = logging.getLogger(__name__) - -def setup_platform(hass, config, add_entities, discovery_info=None): - """Create the binary sensor.""" - if not BinarySensor.validate_config(config): - return False - - add_entities((BinarySensor(hass, config),)) +PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({}) -class BinarySensor(BinarySensorDevice, Sensor): +def setup_platform(hass, config, add_devices, discovery_info=None): + """Set up the TCP binary sensor.""" + add_devices([TcpBinarySensor(hass, config)]) + + +class TcpBinarySensor(BinarySensorDevice, TcpSensor): """A binary sensor which is on when its state == CONF_VALUE_ON.""" required = (CONF_VALUE_ON,) diff --git a/homeassistant/components/binary_sensor/template.py b/homeassistant/components/binary_sensor/template.py index d179edfc1d8..365b29eb308 100644 --- a/homeassistant/components/binary_sensor/template.py +++ b/homeassistant/components/binary_sensor/template.py @@ -17,8 +17,8 @@ from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ENTITY_ID, CONF_VALUE_TEMPLATE, CONF_SENSOR_CLASS, CONF_SENSORS) from homeassistant.exceptions import TemplateError -from homeassistant.helpers.entity import generate_entity_id -from homeassistant.helpers.event import track_state_change +from homeassistant.helpers.entity import async_generate_entity_id +from homeassistant.helpers.event import async_track_state_change import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) @@ -35,7 +35,8 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ }) -def setup_platform(hass, config, add_devices, discovery_info=None): +@asyncio.coroutine +def async_setup_platform(hass, config, async_add_devices, discovery_info=None): """Setup template binary sensors.""" sensors = [] @@ -61,8 +62,8 @@ def setup_platform(hass, config, add_devices, discovery_info=None): if not sensors: _LOGGER.error('No sensors added') return False - add_devices(sensors) + hass.loop.create_task(async_add_devices(sensors)) return True @@ -74,21 +75,22 @@ class BinarySensorTemplate(BinarySensorDevice): value_template, entity_ids): """Initialize the Template binary sensor.""" self.hass = hass - self.entity_id = generate_entity_id(ENTITY_ID_FORMAT, device, - hass=hass) + self.entity_id = async_generate_entity_id(ENTITY_ID_FORMAT, device, + hass=hass) self._name = friendly_name self._sensor_class = sensor_class self._template = value_template self._state = None - self.update() + self._async_render() @callback def template_bsensor_state_listener(entity, old_state, new_state): """Called when the target device changes state.""" hass.loop.create_task(self.async_update_ha_state(True)) - track_state_change(hass, entity_ids, template_bsensor_state_listener) + async_track_state_change( + hass, entity_ids, template_bsensor_state_listener) @property def name(self): @@ -112,7 +114,11 @@ class BinarySensorTemplate(BinarySensorDevice): @asyncio.coroutine def async_update(self): - """Get the latest data and update the state.""" + """Update the state from the template.""" + self._async_render() + + def _async_render(self): + """Render the state from the template.""" try: self._state = self._template.async_render().lower() == 'true' except TemplateError as ex: diff --git a/homeassistant/components/camera/netatmo.py b/homeassistant/components/camera/netatmo.py index 9069a5c6c28..47808de02b9 100644 --- a/homeassistant/components/camera/netatmo.py +++ b/homeassistant/components/camera/netatmo.py @@ -5,12 +5,11 @@ For more details about this platform, please refer to the documentation at https://home-assistant.io/components/camera.netatmo/ """ import logging -from datetime import timedelta import requests import voluptuous as vol -from homeassistant.util import Throttle +from homeassistant.components.netatmo import WelcomeData from homeassistant.components.camera import (Camera, PLATFORM_SCHEMA) from homeassistant.loader import get_component from homeassistant.helpers import config_validation as cv @@ -22,8 +21,6 @@ _LOGGER = logging.getLogger(__name__) CONF_HOME = 'home' CONF_CAMERAS = 'cameras' -MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=10) - PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_HOME): cv.string, vol.Optional(CONF_CAMERAS, default=[]): @@ -39,15 +36,15 @@ def setup_platform(hass, config, add_devices, discovery_info=None): import lnetatmo try: data = WelcomeData(netatmo.NETATMO_AUTH, home) + for camera_name in data.get_camera_names(): + if CONF_CAMERAS in config: + if config[CONF_CAMERAS] != [] and \ + camera_name not in config[CONF_CAMERAS]: + continue + add_devices([WelcomeCamera(data, camera_name, home)]) except lnetatmo.NoDevice: return None - for camera_name in data.get_camera_names(): - if config[CONF_CAMERAS] != []: - if camera_name not in config[CONF_CAMERAS]: - continue - add_devices([WelcomeCamera(data, camera_name, home)]) - class WelcomeCamera(Camera): """Representation of the images published from Welcome camera.""" @@ -61,6 +58,10 @@ class WelcomeCamera(Camera): self._name = home + ' / ' + camera_name else: self._name = camera_name + camera_id = data.welcomedata.cameraByName(camera=camera_name, + home=home)['id'] + self._unique_id = "Welcome_camera {0} - {1}".format(self._name, + camera_id) self._vpnurl, self._localurl = self._data.welcomedata.cameraUrls( camera=camera_name ) @@ -87,31 +88,7 @@ class WelcomeCamera(Camera): """Return the name of this Netatmo Welcome device.""" return self._name - -class WelcomeData(object): - """Get the latest data from NetAtmo.""" - - def __init__(self, auth, home=None): - """Initialize the data object.""" - self.auth = auth - self.welcomedata = None - self.camera_names = [] - self.home = home - - def get_camera_names(self): - """Return all module available on the API as a list.""" - self.update() - if not self.home: - for home in self.welcomedata.cameras: - for camera in self.welcomedata.cameras[home].values(): - self.camera_names.append(camera['name']) - else: - for camera in self.welcomedata.cameras[self.home].values(): - self.camera_names.append(camera['name']) - return self.camera_names - - @Throttle(MIN_TIME_BETWEEN_UPDATES) - def update(self): - """Call the NetAtmo API to update the data.""" - import lnetatmo - self.welcomedata = lnetatmo.WelcomeData(self.auth) + @property + def unique_id(self): + """Return the unique ID for this sensor.""" + return self._unique_id diff --git a/homeassistant/components/camera/synology.py b/homeassistant/components/camera/synology.py new file mode 100644 index 00000000000..dedf91a0031 --- /dev/null +++ b/homeassistant/components/camera/synology.py @@ -0,0 +1,223 @@ +""" +Support for Synology Surveillance Station Cameras. + +For more details about this platform, please refer to the documentation at +https://home-assistant.io/components/camera.synology/ +""" +import logging + +import voluptuous as vol + +import requests + +from homeassistant.const import ( + CONF_NAME, CONF_USERNAME, CONF_PASSWORD, + CONF_URL, CONF_WHITELIST) +from homeassistant.components.camera import ( + Camera, PLATFORM_SCHEMA) +import homeassistant.helpers.config_validation as cv + +_LOGGER = logging.getLogger(__name__) + +# pylint: disable=too-many-locals +DEFAULT_NAME = 'Synology Camera' +DEFAULT_STREAM_ID = '0' +TIMEOUT = 5 +CONF_CAMERA_NAME = 'camera_name' +CONF_STREAM_ID = 'stream_id' +CONF_VALID_CERT = 'valid_cert' + +QUERY_CGI = 'query.cgi' +QUERY_API = 'SYNO.API.Info' +AUTH_API = 'SYNO.API.Auth' +CAMERA_API = 'SYNO.SurveillanceStation.Camera' +STREAMING_API = 'SYNO.SurveillanceStation.VideoStream' +SESSION_ID = '0' + +WEBAPI_PATH = '/webapi/' +AUTH_PATH = 'auth.cgi' +CAMERA_PATH = 'camera.cgi' +STREAMING_PATH = 'SurveillanceStation/videoStreaming.cgi' + +SYNO_API_URL = '{0}{1}{2}' + +PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ + vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, + vol.Required(CONF_USERNAME): cv.string, + vol.Required(CONF_PASSWORD): cv.string, + vol.Required(CONF_URL): cv.string, + vol.Optional(CONF_WHITELIST, default=[]): cv.ensure_list, + vol.Optional(CONF_VALID_CERT, default=True): cv.boolean, +}) + + +def setup_platform(hass, config, add_devices, discovery_info=None): + """Setup a Synology IP Camera.""" + # Determine API to use for authentication + syno_api_url = SYNO_API_URL.format(config.get(CONF_URL), + WEBAPI_PATH, + QUERY_CGI) + query_payload = {'api': QUERY_API, + 'method': 'Query', + 'version': '1', + 'query': 'SYNO.'} + query_req = requests.get(syno_api_url, + params=query_payload, + verify=config.get(CONF_VALID_CERT), + timeout=TIMEOUT) + query_resp = query_req.json() + auth_path = query_resp['data'][AUTH_API]['path'] + camera_api = query_resp['data'][CAMERA_API]['path'] + camera_path = query_resp['data'][CAMERA_API]['path'] + streaming_path = query_resp['data'][STREAMING_API]['path'] + + # Authticate to NAS to get a session id + syno_auth_url = SYNO_API_URL.format(config.get(CONF_URL), + WEBAPI_PATH, + auth_path) + session_id = get_session_id(config.get(CONF_USERNAME), + config.get(CONF_PASSWORD), + syno_auth_url, + config.get(CONF_VALID_CERT)) + + # Use SessionID to get cameras in system + syno_camera_url = SYNO_API_URL.format(config.get(CONF_URL), + WEBAPI_PATH, + camera_api) + camera_payload = {'api': CAMERA_API, + 'method': 'List', + 'version': '1'} + camera_req = requests.get(syno_camera_url, + params=camera_payload, + verify=config.get(CONF_VALID_CERT), + timeout=TIMEOUT, + cookies={'id': session_id}) + camera_resp = camera_req.json() + cameras = camera_resp['data']['cameras'] + for camera in cameras: + if not config.get(CONF_WHITELIST): + camera_id = camera['id'] + snapshot_path = camera['snapshot_path'] + + add_devices([SynologyCamera(config, + camera_id, + camera['name'], + snapshot_path, + streaming_path, + camera_path, + auth_path)]) + + +def get_session_id(username, password, login_url, valid_cert): + """Get a session id.""" + auth_payload = {'api': AUTH_API, + 'method': 'Login', + 'version': '2', + 'account': username, + 'passwd': password, + 'session': 'SurveillanceStation', + 'format': 'sid'} + auth_req = requests.get(login_url, + params=auth_payload, + verify=valid_cert, + timeout=TIMEOUT) + auth_resp = auth_req.json() + return auth_resp['data']['sid'] + + +# pylint: disable=too-many-instance-attributes +class SynologyCamera(Camera): + """An implementation of a Synology NAS based IP camera.""" + +# pylint: disable=too-many-arguments + def __init__(self, config, camera_id, camera_name, + snapshot_path, streaming_path, camera_path, auth_path): + """Initialize a Synology Surveillance Station camera.""" + super().__init__() + self._name = camera_name + self._username = config.get(CONF_USERNAME) + self._password = config.get(CONF_PASSWORD) + self._synology_url = config.get(CONF_URL) + self._api_url = config.get(CONF_URL) + 'webapi/' + self._login_url = config.get(CONF_URL) + '/webapi/' + 'auth.cgi' + self._camera_name = config.get(CONF_CAMERA_NAME) + self._stream_id = config.get(CONF_STREAM_ID) + self._valid_cert = config.get(CONF_VALID_CERT) + self._camera_id = camera_id + self._snapshot_path = snapshot_path + self._streaming_path = streaming_path + self._camera_path = camera_path + self._auth_path = auth_path + + self._session_id = get_session_id(self._username, + self._password, + self._login_url, + self._valid_cert) + + def get_sid(self): + """Get a session id.""" + auth_payload = {'api': AUTH_API, + 'method': 'Login', + 'version': '2', + 'account': self._username, + 'passwd': self._password, + 'session': 'SurveillanceStation', + 'format': 'sid'} + auth_req = requests.get(self._login_url, + params=auth_payload, + verify=self._valid_cert, + timeout=TIMEOUT) + auth_resp = auth_req.json() + self._session_id = auth_resp['data']['sid'] + + def camera_image(self): + """Return a still image response from the camera.""" + image_url = SYNO_API_URL.format(self._synology_url, + WEBAPI_PATH, + self._camera_path) + image_payload = {'api': CAMERA_API, + 'method': 'GetSnapshot', + 'version': '1', + 'cameraId': self._camera_id} + try: + response = requests.get(image_url, + params=image_payload, + timeout=TIMEOUT, + verify=self._valid_cert, + cookies={'id': self._session_id}) + except requests.exceptions.RequestException as error: + _LOGGER.error('Error getting camera image: %s', error) + return None + + return response.content + + def camera_stream(self): + """Return a MJPEG stream image response directly from the camera.""" + streaming_url = SYNO_API_URL.format(self._synology_url, + WEBAPI_PATH, + self._streaming_path) + streaming_payload = {'api': STREAMING_API, + 'method': 'Stream', + 'version': '1', + 'cameraId': self._camera_id, + 'format': 'mjpeg'} + response = requests.get(streaming_url, + payload=streaming_payload, + stream=True, + timeout=TIMEOUT, + cookies={'id': self._session_id}) + return response + + def mjpeg_steam(self, response): + """Generate an HTTP MJPEG Stream from the Synology NAS.""" + stream = self.camera_stream() + return response( + stream.iter_content(chunk_size=1024), + mimetype=stream.headers['CONTENT_TYPE_HEADER'], + direct_passthrough=True + ) + + @property + def name(self): + """Return the name of this device.""" + return self._name diff --git a/homeassistant/components/camera/verisure.py b/homeassistant/components/camera/verisure.py new file mode 100644 index 00000000000..6e613b72298 --- /dev/null +++ b/homeassistant/components/camera/verisure.py @@ -0,0 +1,103 @@ +""" +Camera that loads a picture from a local file. + +For more details about this platform, please refer to the documentation at +https://home-assistant.io/components/camera.verisure/ +""" +import errno +import logging +import os + +from homeassistant.components.camera import Camera +from homeassistant.const import EVENT_HOMEASSISTANT_STOP +from homeassistant.components.verisure import HUB as hub +from homeassistant.components.verisure import CONF_SMARTCAM + +_LOGGER = logging.getLogger(__name__) + + +def setup_platform(hass, config, add_devices, discovery_info=None): + """Setup the Camera.""" + if not int(hub.config.get(CONF_SMARTCAM, 1)): + return False + directory_path = hass.config.config_dir + if not os.access(directory_path, os.R_OK): + _LOGGER.error("file path %s is not readable", directory_path) + return False + hub.update_smartcam() + smartcams = [] + smartcams.extend([ + VerisureSmartcam(hass, value.deviceLabel, directory_path) + for value in hub.smartcam_status.values()]) + add_devices(smartcams) + + +class VerisureSmartcam(Camera): + """Local camera.""" + + def __init__(self, hass, device_id, directory_path): + """Initialize Verisure File Camera component.""" + super().__init__() + + self._device_id = device_id + self._directory_path = directory_path + self._image = None + self._image_id = None + hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, + self.delete_image) + + def camera_image(self): + """Return image response.""" + self.check_imagelist() + if not self._image: + _LOGGER.debug('No image to display') + return + _LOGGER.debug('Trying to open %s', self._image) + with open(self._image, 'rb') as file: + return file.read() + + def check_imagelist(self): + """Check the contents of the image list.""" + hub.update_smartcam_imagelist() + if (self._device_id not in hub.smartcam_dict or + not hub.smartcam_dict[self._device_id]): + return + images = hub.smartcam_dict[self._device_id] + new_image_id = images[0] + _LOGGER.debug('self._device_id=%s, self._images=%s, ' + 'self._new_image_id=%s', self._device_id, + images, new_image_id) + if (new_image_id == '-1' or + self._image_id == new_image_id): + _LOGGER.debug('The image is the same, or loading image_id') + return + _LOGGER.debug('Download new image %s', new_image_id) + hub.my_pages.smartcam.download_image(self._device_id, + new_image_id, + self._directory_path) + _LOGGER.debug('Old image_id=%s', self._image_id) + self.delete_image(self) + + self._image_id = new_image_id + self._image = os.path.join(self._directory_path, + '{}{}'.format( + self._image_id, + '.jpg')) + + def delete_image(self, event): + """Delete an old image.""" + remove_image = os.path.join(self._directory_path, + '{}{}'.format( + self._image_id, + '.jpg')) + try: + os.remove(remove_image) + _LOGGER.debug('Deleting old image %s', remove_image) + except OSError as error: + if error.errno != errno.ENOENT: + raise + + @property + def name(self): + """Return the name of this camera.""" + return hub.smartcam_status[self._device_id].location diff --git a/homeassistant/components/climate/__init__.py b/homeassistant/components/climate/__init__.py index e8047093cc8..714581ba331 100644 --- a/homeassistant/components/climate/__init__.py +++ b/homeassistant/components/climate/__init__.py @@ -253,7 +253,7 @@ def setup(hass, config): kwargs[value] = convert_temperature( temp, hass.config.units.temperature_unit, - climate.unit_of_measurement + climate.temperature_unit ) else: kwargs[value] = temp @@ -368,7 +368,10 @@ class ClimateDevice(Entity): @property def state(self): """Return the current state.""" - return self.current_operation or STATE_UNKNOWN + if self.current_operation: + return self.current_operation + else: + return STATE_UNKNOWN @property def state_attributes(self): @@ -398,17 +401,20 @@ class ClimateDevice(Entity): fan_mode = self.current_fan_mode if fan_mode is not None: data[ATTR_FAN_MODE] = fan_mode - data[ATTR_FAN_LIST] = self.fan_list + if self.fan_list: + data[ATTR_FAN_LIST] = self.fan_list operation_mode = self.current_operation if operation_mode is not None: data[ATTR_OPERATION_MODE] = operation_mode - data[ATTR_OPERATION_LIST] = self.operation_list + if self.operation_list: + data[ATTR_OPERATION_LIST] = self.operation_list swing_mode = self.current_swing_mode if swing_mode is not None: data[ATTR_SWING_MODE] = swing_mode - data[ATTR_SWING_LIST] = self.swing_list + if self.swing_list: + data[ATTR_SWING_LIST] = self.swing_list is_away = self.is_away_mode_on if is_away is not None: @@ -422,7 +428,12 @@ class ClimateDevice(Entity): @property def unit_of_measurement(self): - """Return the unit of measurement.""" + """The unit of measurement to display.""" + return self.hass.config.units.temperature_unit + + @property + def temperature_unit(self): + """The unit of measurement used by the platform.""" raise NotImplementedError @property @@ -534,12 +545,12 @@ class ClimateDevice(Entity): @property def min_temp(self): """Return the minimum temperature.""" - return convert_temperature(7, TEMP_CELSIUS, self.unit_of_measurement) + return convert_temperature(7, TEMP_CELSIUS, self.temperature_unit) @property def max_temp(self): """Return the maximum temperature.""" - return convert_temperature(35, TEMP_CELSIUS, self.unit_of_measurement) + return convert_temperature(35, TEMP_CELSIUS, self.temperature_unit) @property def min_humidity(self): @@ -556,10 +567,10 @@ class ClimateDevice(Entity): if temp is None or not isinstance(temp, Number): return temp - value = convert_temperature(temp, self.unit_of_measurement, - self.hass.config.units.temperature_unit) + value = convert_temperature(temp, self.temperature_unit, + self.unit_of_measurement) - if self.hass.config.units.temperature_unit is TEMP_CELSIUS: + if self.unit_of_measurement is TEMP_CELSIUS: decimal_count = 1 else: # Users of fahrenheit generally expect integer units. diff --git a/homeassistant/components/climate/demo.py b/homeassistant/components/climate/demo.py index 51346e62269..0104d9d01af 100644 --- a/homeassistant/components/climate/demo.py +++ b/homeassistant/components/climate/demo.py @@ -59,7 +59,7 @@ class DemoClimate(ClimateDevice): return self._name @property - def unit_of_measurement(self): + def temperature_unit(self): """Return the unit of measurement.""" return self._unit_of_measurement diff --git a/homeassistant/components/climate/ecobee.py b/homeassistant/components/climate/ecobee.py index 10e56490c84..1ed826e55dc 100644 --- a/homeassistant/components/climate/ecobee.py +++ b/homeassistant/components/climate/ecobee.py @@ -14,7 +14,7 @@ from homeassistant.components.climate import ( DOMAIN, STATE_COOL, STATE_HEAT, STATE_IDLE, ClimateDevice, ATTR_TARGET_TEMP_LOW, ATTR_TARGET_TEMP_HIGH) from homeassistant.const import ( - ATTR_ENTITY_ID, STATE_OFF, STATE_ON, TEMP_FAHRENHEIT, TEMP_CELSIUS) + ATTR_ENTITY_ID, STATE_OFF, STATE_ON, TEMP_FAHRENHEIT) from homeassistant.config import load_yaml_config_file import homeassistant.helpers.config_validation as cv @@ -105,12 +105,9 @@ class Thermostat(ClimateDevice): return self.thermostat['name'] @property - def unit_of_measurement(self): + def temperature_unit(self): """Return the unit of measurement.""" - if self.thermostat['settings']['useCelsius']: - return TEMP_CELSIUS - else: - return TEMP_FAHRENHEIT + return TEMP_FAHRENHEIT @property def current_temperature(self): diff --git a/homeassistant/components/climate/eq3btsmart.py b/homeassistant/components/climate/eq3btsmart.py index 646bf7f2aa8..87d9e322405 100644 --- a/homeassistant/components/climate/eq3btsmart.py +++ b/homeassistant/components/climate/eq3btsmart.py @@ -1,24 +1,38 @@ """ -Support for eq3 Bluetooth Smart thermostats. +Support for eQ-3 Bluetooth Smart thermostats. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/climate.eq3btsmart/ """ import logging -from homeassistant.components.climate import ClimateDevice -from homeassistant.const import TEMP_CELSIUS, CONF_DEVICES, ATTR_TEMPERATURE +import voluptuous as vol + +from homeassistant.components.climate import ClimateDevice, PLATFORM_SCHEMA +from homeassistant.const import ( + CONF_MAC, TEMP_CELSIUS, CONF_DEVICES, ATTR_TEMPERATURE) from homeassistant.util.temperature import convert +import homeassistant.helpers.config_validation as cv REQUIREMENTS = ['bluepy_devices==0.2.0'] -CONF_MAC = 'mac' - _LOGGER = logging.getLogger(__name__) +ATTR_MODE = 'mode' +ATTR_MODE_READABLE = 'mode_readable' + +DEVICE_SCHEMA = vol.Schema({ + vol.Required(CONF_MAC): cv.string, +}) + +PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ + vol.Required(CONF_DEVICES): + vol.Schema({cv.string: DEVICE_SCHEMA}), +}) + def setup_platform(hass, config, add_devices, discovery_info=None): - """Setup the eq3 BLE thermostats.""" + """Setup the eQ-3 BLE thermostats.""" devices = [] for name, device_cfg in config[CONF_DEVICES].items(): @@ -30,14 +44,13 @@ def setup_platform(hass, config, add_devices, discovery_info=None): # pylint: disable=too-many-instance-attributes, import-error, abstract-method class EQ3BTSmartThermostat(ClimateDevice): - """Representation of a EQ3 Bluetooth Smart thermostat.""" + """Representation of a eQ-3 Bluetooth Smart thermostat.""" def __init__(self, _mac, _name): """Initialize the thermostat.""" from bluepy_devices.devices import eq3btsmart self._name = _name - self._thermostat = eq3btsmart.EQ3BTSmartThermostat(_mac) @property @@ -46,7 +59,7 @@ class EQ3BTSmartThermostat(ClimateDevice): return self._name @property - def unit_of_measurement(self): + def temperature_unit(self): """Return the unit of measurement that is used.""" return TEMP_CELSIUS @@ -70,8 +83,10 @@ class EQ3BTSmartThermostat(ClimateDevice): @property def device_state_attributes(self): """Return the device specific state attributes.""" - return {"mode": self._thermostat.mode, - "mode_readable": self._thermostat.mode_readable} + return { + ATTR_MODE: self._thermostat.mode, + ATTR_MODE_READABLE: self._thermostat.mode_readable, + } @property def min_temp(self): diff --git a/homeassistant/components/climate/generic_thermostat.py b/homeassistant/components/climate/generic_thermostat.py index 97ca7fe012f..c5c38d624f5 100644 --- a/homeassistant/components/climate/generic_thermostat.py +++ b/homeassistant/components/climate/generic_thermostat.py @@ -100,7 +100,7 @@ class GenericThermostat(ClimateDevice): return self._name @property - def unit_of_measurement(self): + def temperature_unit(self): """Return the unit of measurement.""" return self._unit diff --git a/homeassistant/components/climate/heatmiser.py b/homeassistant/components/climate/heatmiser.py index 941f211c411..a6dd01af4ab 100644 --- a/homeassistant/components/climate/heatmiser.py +++ b/homeassistant/components/climate/heatmiser.py @@ -1,56 +1,54 @@ """ Support for the PRT Heatmiser themostats using the V3 protocol. -See https://github.com/andylockran/heatmiserV3 for more info on the -heatmiserV3 module dependency. - For more details about this platform, please refer to the documentation at https://home-assistant.io/components/climate.heatmiser/ """ import logging -from homeassistant.components.climate import ClimateDevice -from homeassistant.const import TEMP_CELSIUS, ATTR_TEMPERATURE +import voluptuous as vol -CONF_IPADDRESS = 'ipaddress' -CONF_PORT = 'port' -CONF_TSTATS = 'tstats' +from homeassistant.components.climate import ClimateDevice, PLATFORM_SCHEMA +from homeassistant.const import ( + TEMP_CELSIUS, ATTR_TEMPERATURE, CONF_PORT, CONF_NAME, CONF_ID) +import homeassistant.helpers.config_validation as cv -REQUIREMENTS = ["heatmiserV3==0.9.1"] +REQUIREMENTS = ['heatmiserV3==0.9.1'] _LOGGER = logging.getLogger(__name__) +CONF_IPADDRESS = 'ipaddress' +CONF_TSTATS = 'tstats' +TSTATS_SCHEMA = vol.Schema({ + vol.Required(CONF_ID): cv.string, + vol.Required(CONF_NAME): cv.string, +}) + +PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ + vol.Required(CONF_IPADDRESS): cv.string, + vol.Required(CONF_PORT): cv.port, + vol.Required(CONF_TSTATS, default={}): + vol.Schema({cv.string: TSTATS_SCHEMA}), +}) + + +# pylint: disable=unused-variable def setup_platform(hass, config, add_devices, discovery_info=None): """Setup the heatmiser thermostat.""" from heatmiserV3 import heatmiser, connection - ipaddress = str(config[CONF_IPADDRESS]) - port = str(config[CONF_PORT]) - - if ipaddress is None or port is None: - _LOGGER.error("Missing required configuration items %s or %s", - CONF_IPADDRESS, CONF_PORT) - return False + ipaddress = config.get(CONF_IPADDRESS) + port = str(config.get(CONF_PORT)) + tstats = config.get(CONF_TSTATS) serport = connection.connection(ipaddress, port) serport.open() - tstats = [] - if CONF_TSTATS in config: - tstats = config[CONF_TSTATS] - - if tstats is None: - _LOGGER.error("No thermostats configured.") - return False - - for tstat in tstats: + for thermostat, tstat in tstats.items(): add_devices([ HeatmiserV3Thermostat( - heatmiser, - tstat.get("id"), - tstat.get("name"), - serport) + heatmiser, tstat.get(CONF_ID), tstat.get(CONF_NAME), serport) ]) return @@ -69,7 +67,7 @@ class HeatmiserV3Thermostat(ClimateDevice): self._id = device self.dcb = None self.update() - self._target_temperature = int(self.dcb.get("roomset")) + self._target_temperature = int(self.dcb.get('roomset')) @property def name(self): @@ -77,7 +75,7 @@ class HeatmiserV3Thermostat(ClimateDevice): return self._name @property - def unit_of_measurement(self): + def temperature_unit(self): """Return the unit of measurement which this thermostat uses.""" return TEMP_CELSIUS @@ -85,9 +83,9 @@ class HeatmiserV3Thermostat(ClimateDevice): def current_temperature(self): """Return the current temperature.""" if self.dcb is not None: - low = self.dcb.get("floortemplow ") - high = self.dcb.get("floortemphigh") - temp = (high*256 + low)/10.0 + low = self.dcb.get('floortemplow ') + high = self.dcb.get('floortemphigh') + temp = (high * 256 + low) / 10.0 self._current_temperature = temp else: self._current_temperature = None diff --git a/homeassistant/components/climate/homematic.py b/homeassistant/components/climate/homematic.py index c9901c40aea..7113779eb57 100644 --- a/homeassistant/components/climate/homematic.py +++ b/homeassistant/components/climate/homematic.py @@ -41,7 +41,7 @@ class HMThermostat(homematic.HMDevice, ClimateDevice): """Representation of a Homematic thermostat.""" @property - def unit_of_measurement(self): + def temperature_unit(self): """Return the unit of measurement that is used.""" return TEMP_CELSIUS diff --git a/homeassistant/components/climate/honeywell.py b/homeassistant/components/climate/honeywell.py index fb7b2887344..3af4f62246d 100644 --- a/homeassistant/components/climate/honeywell.py +++ b/homeassistant/components/climate/honeywell.py @@ -120,7 +120,7 @@ class RoundThermostat(ClimateDevice): return self._name @property - def unit_of_measurement(self): + def temperature_unit(self): """Return the unit of measurement.""" return TEMP_CELSIUS @@ -217,7 +217,7 @@ class HoneywellUSThermostat(ClimateDevice): return self._device.name @property - def unit_of_measurement(self): + def temperature_unit(self): """Return the unit of measurement.""" return (TEMP_CELSIUS if self._device.temperature_unit == 'C' else TEMP_FAHRENHEIT) diff --git a/homeassistant/components/climate/knx.py b/homeassistant/components/climate/knx.py index 5ea932ab8f5..ef7445c35fd 100644 --- a/homeassistant/components/climate/knx.py +++ b/homeassistant/components/climate/knx.py @@ -63,7 +63,7 @@ class KNXThermostat(KNXMultiAddressDevice, ClimateDevice): return True @property - def unit_of_measurement(self): + def temperature_unit(self): """Return the unit of measurement.""" return self._unit_of_measurement diff --git a/homeassistant/components/climate/mysensors.py b/homeassistant/components/climate/mysensors.py index 9b997954889..c93a69ac0b9 100755 --- a/homeassistant/components/climate/mysensors.py +++ b/homeassistant/components/climate/mysensors.py @@ -47,7 +47,7 @@ class MySensorsHVAC(mysensors.MySensorsDeviceEntity, ClimateDevice): return self.gateway.optimistic @property - def unit_of_measurement(self): + def temperature_unit(self): """Return the unit of measurement.""" return (TEMP_CELSIUS if self.gateway.metric else TEMP_FAHRENHEIT) diff --git a/homeassistant/components/climate/nest.py b/homeassistant/components/climate/nest.py index 36be2ca25f5..fb7e4b7ec11 100644 --- a/homeassistant/components/climate/nest.py +++ b/homeassistant/components/climate/nest.py @@ -40,8 +40,19 @@ class NestThermostat(ClimateDevice): self.structure = structure self.device = device self._fan_list = [STATE_ON, STATE_AUTO] - self._operation_list = [STATE_HEAT, STATE_COOL, STATE_AUTO, - STATE_OFF] + + # Not all nest devices support cooling and heating remove unused + self._operation_list = [STATE_OFF] + + # Add supported nest thermostat features + if self.device.can_heat: + self._operation_list.append(STATE_HEAT) + + if self.device.can_cool: + self._operation_list.append(STATE_COOL) + + if self.device.can_heat and self.device.can_cool: + self._operation_list.append(STATE_AUTO) @property def name(self): @@ -57,18 +68,22 @@ class NestThermostat(ClimateDevice): return location.capitalize() + '(' + name + ')' @property - def unit_of_measurement(self): + def temperature_unit(self): """Return the unit of measurement.""" return TEMP_CELSIUS @property def device_state_attributes(self): """Return the device specific state attributes.""" - # Move these to Thermostat Device and make them global - return { - "humidity": self.device.humidity, - "target_humidity": self.device.target_humidity, - } + if self.device.has_humidifier or self.device.has_dehumidifier: + # Move these to Thermostat Device and make them global + return { + "humidity": self.device.humidity, + "target_humidity": self.device.target_humidity, + } + else: + # No way to control humidity not show setting + return {} @property def current_temperature(self): @@ -164,7 +179,12 @@ class NestThermostat(ClimateDevice): @property def current_fan_mode(self): """Return whether the fan is on.""" - return STATE_ON if self.device.fan else STATE_AUTO + if self.device.has_fan: + # Return whether the fan is on + return STATE_ON if self.device.fan else STATE_AUTO + else: + # No Fan available so disable slider + return None @property def fan_list(self): diff --git a/homeassistant/components/climate/netatmo.py b/homeassistant/components/climate/netatmo.py new file mode 100755 index 00000000000..b0a5059ef44 --- /dev/null +++ b/homeassistant/components/climate/netatmo.py @@ -0,0 +1,178 @@ +""" +Support for Netatmo Smart Thermostat. + +For more details about this platform, please refer to the documentation at +https://home-assistant.io/components/climate.netatmo/ +""" +import logging +from datetime import timedelta +import voluptuous as vol + +from homeassistant.const import TEMP_CELSIUS, ATTR_TEMPERATURE +from homeassistant.components.climate import ( + STATE_HEAT, STATE_IDLE, ClimateDevice, PLATFORM_SCHEMA) +from homeassistant.util import Throttle +from homeassistant.loader import get_component +import homeassistant.helpers.config_validation as cv + +DEPENDENCIES = ['netatmo'] + +_LOGGER = logging.getLogger(__name__) + +CONF_RELAY = 'relay' +CONF_THERMOSTAT = 'thermostat' + +DEFAULT_AWAY_TEMPERATURE = 14 +# # The default offeset is 2 hours (when you use the thermostat itself) +DEFAULT_TIME_OFFSET = 7200 +# # Return cached results if last scan was less then this time ago +# # NetAtmo Data is uploaded to server every hour +MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=300) + +PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ + vol.Optional(CONF_RELAY): cv.string, + vol.Optional(CONF_THERMOSTAT, default=[]): + vol.All(cv.ensure_list, [cv.string]), +}) + + +def setup_platform(hass, config, add_callback_devices, discovery_info=None): + """Setup the NetAtmo Thermostat.""" + netatmo = get_component('netatmo') + device = config.get(CONF_RELAY) + + import lnetatmo + try: + data = ThermostatData(netatmo.NETATMO_AUTH, device) + for module_name in data.get_module_names(): + if CONF_THERMOSTAT in config: + if config[CONF_THERMOSTAT] != [] and \ + module_name not in config[CONF_THERMOSTAT]: + continue + add_callback_devices([NetatmoThermostat(data, module_name)]) + except lnetatmo.NoDevice: + return None + + +# pylint: disable=abstract-method +class NetatmoThermostat(ClimateDevice): + """Representation a Netatmo thermostat.""" + + def __init__(self, data, module_name, away_temp=None): + """Initialize the sensor.""" + self._data = data + self._state = None + self._name = module_name + self._target_temperature = None + self._away = None + self.update() + + @property + def name(self): + """Return the name of the sensor.""" + return self._name + + @property + def state(self): + """Return the state of the device.""" + return self._target_temperature + + @property + def temperature_unit(self): + """Return the unit of measurement.""" + return TEMP_CELSIUS + + @property + def current_temperature(self): + """Return the current temperature.""" + return self._data.current_temperature + + @property + def target_temperature(self): + """Return the temperature we try to reach.""" + return self._target_temperature + + @property + def current_operation(self): + """Return the current state of the thermostat.""" + state = self._data.thermostatdata.relay_cmd + if state == 0: + return STATE_IDLE + elif state == 100: + return STATE_HEAT + + @property + def is_away_mode_on(self): + """Return true if away mode is on.""" + return self._away + + def turn_away_mode_on(self): + """Turn away on.""" + mode = "away" + temp = None + self._data.thermostatdata.setthermpoint(mode, temp, endTimeOffset=None) + self._away = True + self.update_ha_state() + + def turn_away_mode_off(self): + """Turn away off.""" + mode = "program" + temp = None + self._data.thermostatdata.setthermpoint(mode, temp, endTimeOffset=None) + self._away = False + self.update_ha_state() + + def set_temperature(self, endTimeOffset=DEFAULT_TIME_OFFSET, **kwargs): + """Set new target temperature for 2 hours.""" + temperature = kwargs.get(ATTR_TEMPERATURE) + if temperature is None: + return + mode = "manual" + self._data.thermostatdata.setthermpoint( + mode, temperature, endTimeOffset) + self._target_temperature = temperature + self._away = False + self.update_ha_state() + + @Throttle(MIN_TIME_BETWEEN_UPDATES) + def update(self): + """Get the latest data from NetAtmo API and updates the states.""" + self._data.update() + self._target_temperature = self._data.thermostatdata.setpoint_temp + self._away = self._data.setpoint_mode == 'away' + + +class ThermostatData(object): + """Get the latest data from Netatmo.""" + + def __init__(self, auth, device=None): + """Initialize the data object.""" + self.auth = auth + self.thermostatdata = None + self.module_names = [] + self.device = device + self.current_temperature = None + self.target_temperature = None + self.setpoint_mode = None + # self.operation = + + def get_module_names(self): + """Return all module available on the API as a list.""" + self.update() + if not self.device: + for device in self.thermostatdata.modules: + for module in self.thermostatdata.modules[device].values(): + self.module_names.append(module['module_name']) + else: + for module in self.thermostatdata.modules[self.device].values(): + self.module_names.append(module['module_name']) + return self.module_names + + @Throttle(MIN_TIME_BETWEEN_UPDATES) + def update(self): + """Call the NetAtmo API to update the data.""" + import lnetatmo + self.thermostatdata = lnetatmo.ThermostatData(self.auth) + self.target_temperature = self.thermostatdata.setpoint_temp + self.setpoint_mode = self.thermostatdata.setpoint_mode + self.current_temperature = self.thermostatdata.temp diff --git a/homeassistant/components/climate/proliphix.py b/homeassistant/components/climate/proliphix.py index da5f5918d7c..6aeee6e537c 100644 --- a/homeassistant/components/climate/proliphix.py +++ b/homeassistant/components/climate/proliphix.py @@ -12,7 +12,7 @@ from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, CONF_USERNAME, TEMP_FAHRENHEIT, ATTR_TEMPERATURE) import homeassistant.helpers.config_validation as cv -REQUIREMENTS = ['proliphix==0.3.1'] +REQUIREMENTS = ['proliphix==0.4.0'] ATTR_FAN = 'fan' @@ -69,7 +69,7 @@ class ProliphixThermostat(ClimateDevice): } @property - def unit_of_measurement(self): + def temperature_unit(self): """Return the unit of measurement.""" return TEMP_FAHRENHEIT diff --git a/homeassistant/components/climate/radiotherm.py b/homeassistant/components/climate/radiotherm.py index 6af0e96045c..74778682540 100644 --- a/homeassistant/components/climate/radiotherm.py +++ b/homeassistant/components/climate/radiotherm.py @@ -81,7 +81,7 @@ class RadioThermostat(ClimateDevice): return self._name @property - def unit_of_measurement(self): + def temperature_unit(self): """Return the unit of measurement.""" return TEMP_FAHRENHEIT diff --git a/homeassistant/components/climate/vera.py b/homeassistant/components/climate/vera.py index 26d81e2b510..447d2e4f720 100644 --- a/homeassistant/components/climate/vera.py +++ b/homeassistant/components/climate/vera.py @@ -93,7 +93,7 @@ class VeraThermostat(VeraDevice, ClimateDevice): self._state = self.vera_device.get_hvac_mode() @property - def unit_of_measurement(self): + def temperature_unit(self): """Return the unit of measurement.""" return TEMP_FAHRENHEIT diff --git a/homeassistant/components/climate/zwave.py b/homeassistant/components/climate/zwave.py index 767bed6ff94..3e12d4c6006 100755 --- a/homeassistant/components/climate/zwave.py +++ b/homeassistant/components/climate/zwave.py @@ -209,7 +209,7 @@ class ZWaveClimate(ZWaveDeviceEntity, ClimateDevice): return self._swing_list @property - def unit_of_measurement(self): + def temperature_unit(self): """Return the unit of measurement.""" if self._unit == 'C': return TEMP_CELSIUS diff --git a/homeassistant/components/demo.py b/homeassistant/components/demo.py index a2eb40e21e8..9f3042320c9 100644 --- a/homeassistant/components/demo.py +++ b/homeassistant/components/demo.py @@ -67,31 +67,33 @@ def setup(hass, config): lights = sorted(hass.states.entity_ids('light')) switches = sorted(hass.states.entity_ids('switch')) media_players = sorted(hass.states.entity_ids('media_player')) - group.Group(hass, 'living room', [ + + group.Group.create_group(hass, 'living room', [ lights[1], switches[0], 'input_select.living_room_preset', 'rollershutter.living_room_window', media_players[1], 'scene.romantic_lights']) - group.Group(hass, 'bedroom', [ + group.Group.create_group(hass, 'bedroom', [ lights[0], switches[1], media_players[0], 'input_slider.noise_allowance']) - group.Group(hass, 'kitchen', [ + group.Group.create_group(hass, 'kitchen', [ lights[2], 'rollershutter.kitchen_window', 'lock.kitchen_door']) - group.Group(hass, 'doors', [ + group.Group.create_group(hass, 'doors', [ 'lock.front_door', 'lock.kitchen_door', 'garage_door.right_garage_door', 'garage_door.left_garage_door']) - group.Group(hass, 'automations', [ + group.Group.create_group(hass, 'automations', [ 'input_select.who_cooks', 'input_boolean.notify', ]) - group.Group(hass, 'people', [ + group.Group.create_group(hass, 'people', [ 'device_tracker.demo_anne_therese', 'device_tracker.demo_home_boy', 'device_tracker.demo_paulus']) - group.Group(hass, 'thermostats', [ + group.Group.create_group(hass, 'thermostats', [ 'thermostat.nest', 'thermostat.thermostat']) - group.Group(hass, 'downstairs', [ + group.Group.create_group(hass, 'downstairs', [ 'group.living_room', 'group.kitchen', 'scene.romantic_lights', 'rollershutter.kitchen_window', - 'rollershutter.living_room_window', 'group.doors', 'thermostat.nest', + 'rollershutter.living_room_window', 'group.doors', + 'thermostat.nest', ], view=True) - group.Group(hass, 'Upstairs', [ + group.Group.create_group(hass, 'Upstairs', [ 'thermostat.thermostat', 'group.bedroom', ], view=True) diff --git a/homeassistant/components/device_tracker/__init__.py b/homeassistant/components/device_tracker/__init__.py index 3fa8361a44d..87b628b050b 100644 --- a/homeassistant/components/device_tracker/__init__.py +++ b/homeassistant/components/device_tracker/__init__.py @@ -6,6 +6,7 @@ https://home-assistant.io/components/device_tracker/ """ # pylint: disable=too-many-instance-attributes, too-many-arguments # pylint: disable=too-many-locals +import asyncio from datetime import timedelta import logging import os @@ -13,6 +14,7 @@ import threading from typing import Any, Sequence, Callable import voluptuous as vol +import yaml from homeassistant.bootstrap import ( prepare_setup_platform, log_exception) @@ -25,6 +27,7 @@ from homeassistant.helpers.entity import Entity from homeassistant.helpers.typing import GPSType, ConfigType, HomeAssistantType import homeassistant.helpers.config_validation as cv import homeassistant.util as util +from homeassistant.util.async import run_coroutine_threadsafe import homeassistant.util.dt as dt_util from homeassistant.helpers.event import track_utc_time_change @@ -66,15 +69,11 @@ ATTR_ATTRIBUTES = 'attributes' PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend({ vol.Optional(CONF_SCAN_INTERVAL): cv.positive_int, # seconds -}, extra=vol.ALLOW_EXTRA) - -_CONFIG_SCHEMA = vol.Schema({DOMAIN: vol.All(cv.ensure_list, [ - vol.Schema({ - vol.Optional(CONF_TRACK_NEW, default=DEFAULT_TRACK_NEW): cv.boolean, - vol.Optional( - CONF_CONSIDER_HOME, default=timedelta(seconds=180)): vol.All( - cv.time_period, cv.positive_timedelta) - }, extra=vol.ALLOW_EXTRA)])}, extra=vol.ALLOW_EXTRA) + vol.Optional(CONF_TRACK_NEW, default=DEFAULT_TRACK_NEW): cv.boolean, + vol.Optional(CONF_CONSIDER_HOME, + default=timedelta(seconds=DEFAULT_CONSIDER_HOME)): vol.All( + cv.time_period, cv.positive_timedelta) +}) DISCOVERY_PLATFORMS = { SERVICE_NETGEAR: 'netgear', @@ -114,7 +113,7 @@ def setup(hass: HomeAssistantType, config: ConfigType): yaml_path = hass.config.path(YAML_DEVICES) try: - conf = _CONFIG_SCHEMA(config).get(DOMAIN, []) + conf = config.get(DOMAIN, []) except vol.Invalid as ex: log_exception(ex, DOMAIN, config) return False @@ -252,9 +251,18 @@ class DeviceTracker(object): def setup_group(self): """Initialize group for all tracked devices.""" + run_coroutine_threadsafe( + self.async_setup_group(), self.hass.loop).result() + + @asyncio.coroutine + def async_setup_group(self): + """Initialize group for all tracked devices. + + This method must be run in the event loop. + """ entity_ids = (dev.entity_id for dev in self.devices.values() if dev.track) - self.group = group.Group( + self.group = yield from group.Group.async_create_group( self.hass, GROUP_NAME_ALL_DEVICES, entity_ids, False) def update_stale(self, now: dt_util.dt.datetime): @@ -413,7 +421,12 @@ def load_config(path: str, hass: HomeAssistantType, consider_home: timedelta): }) try: result = [] - devices = load_yaml_config_file(path) + try: + devices = load_yaml_config_file(path) + except HomeAssistantError as err: + _LOGGER.error('Unable to load %s: %s', path, str(err)) + return [] + for dev_id, device in devices.items(): try: device = dev_schema(device) @@ -456,14 +469,15 @@ def update_config(path: str, dev_id: str, device: Device): """Add device to YAML configuration file.""" with open(path, 'a') as out: out.write('\n') - out.write('{}:\n'.format(device.dev_id)) - for key, value in (('name', device.name), ('mac', device.mac), - ('picture', device.config_picture), - ('track', 'yes' if device.track else 'no'), - (CONF_AWAY_HIDE, - 'yes' if device.away_hide else 'no')): - out.write(' {}: {}\n'.format(key, '' if value is None else value)) + device = {device.dev_id: { + 'name': device.name, + 'mac': device.mac, + 'picture': device.config_picture, + 'track': device.track, + CONF_AWAY_HIDE: device.away_hide + }} + yaml.dump(device, out, default_flow_style=False) def get_gravatar_for_email(email: str): diff --git a/homeassistant/components/device_tracker/bbox.py b/homeassistant/components/device_tracker/bbox.py new file mode 100644 index 00000000000..c851b622592 --- /dev/null +++ b/homeassistant/components/device_tracker/bbox.py @@ -0,0 +1,82 @@ +""" +Support for French FAI Bouygues Bbox routers. + +For more details about this platform, please refer to the documentation at +https://home-assistant.io/components/device_tracker.bbox/ +""" +from collections import namedtuple +import logging +from datetime import timedelta +import homeassistant.util.dt as dt_util +from homeassistant.components.device_tracker import DOMAIN +from homeassistant.util import Throttle + +# Return cached results if last scan was less then this time ago +MIN_TIME_BETWEEN_SCANS = timedelta(seconds=60) + +_LOGGER = logging.getLogger(__name__) +REQUIREMENTS = ['pybbox==0.0.5-alpha'] + + +def get_scanner(hass, config): + """Validate the configuration and return a Bbox scanner.""" + scanner = BboxDeviceScanner(config[DOMAIN]) + + return scanner if scanner.success_init else None + + +Device = namedtuple('Device', ['mac', 'name', 'ip', 'last_update']) + + +class BboxDeviceScanner(object): + """This class scans for devices connected to the bbox.""" + + def __init__(self, config): + """Initialize the scanner.""" + self.last_results = [] # type: List[Device] + + self.success_init = self._update_info() + _LOGGER.info('Bbox scanner initialized') + + def scan_devices(self): + """Scan for new devices and return a list with found device IDs.""" + self._update_info() + + return [device.mac for device in self.last_results] + + def get_device_name(self, mac): + """Return the name of the given device or None if we don't know.""" + filter_named = [device.name for device in self.last_results if + device.mac == mac] + + if filter_named: + return filter_named[0] + else: + return None + + @Throttle(MIN_TIME_BETWEEN_SCANS) + def _update_info(self): + """Check the bbox for devices. + + Returns boolean if scanning successful. + """ + _LOGGER.info('Scanning') + + import pybbox + + box = pybbox.Bbox() + result = box.get_all_connected_devices() + + now = dt_util.now() + last_results = [] + for device in result: + if device['active'] != 1: + continue + last_results.append( + Device(device['macaddress'], device['hostname'], + device['ipaddress'], now)) + + self.last_results = last_results + + _LOGGER.info('Bbox scan successful') + return True diff --git a/homeassistant/components/device_tracker/fritz.py b/homeassistant/components/device_tracker/fritz.py index 0e8ed512072..5832fa425be 100644 --- a/homeassistant/components/device_tracker/fritz.py +++ b/homeassistant/components/device_tracker/fritz.py @@ -79,7 +79,7 @@ class FritzBoxScanner(object): self._update_info() active_hosts = [] for known_host in self.last_results: - if known_host['status'] == '1': + if known_host['status'] == '1' and known_host.get('mac'): active_hosts.append(known_host['mac']) return active_hosts diff --git a/homeassistant/components/device_tracker/mqtt.py b/homeassistant/components/device_tracker/mqtt.py index 2318eb44dd1..f9a85da98b2 100644 --- a/homeassistant/components/device_tracker/mqtt.py +++ b/homeassistant/components/device_tracker/mqtt.py @@ -11,13 +11,14 @@ import voluptuous as vol import homeassistant.components.mqtt as mqtt from homeassistant.const import CONF_DEVICES from homeassistant.components.mqtt import CONF_QOS +from homeassistant.components.device_tracker import PLATFORM_SCHEMA import homeassistant.helpers.config_validation as cv DEPENDENCIES = ['mqtt'] _LOGGER = logging.getLogger(__name__) -PLATFORM_SCHEMA = mqtt.MQTT_BASE_PLATFORM_SCHEMA.extend({ +PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(mqtt.SCHEMA_BASE).extend({ vol.Required(CONF_DEVICES): {cv.string: mqtt.valid_subscribe_topic}, }) diff --git a/homeassistant/components/device_tracker/nmap_tracker.py b/homeassistant/components/device_tracker/nmap_tracker.py index fb217e66c48..68155910ffc 100644 --- a/homeassistant/components/device_tracker/nmap_tracker.py +++ b/homeassistant/components/device_tracker/nmap_tracker.py @@ -30,7 +30,7 @@ CONF_EXCLUDE = 'exclude' REQUIREMENTS = ['python-nmap==0.6.1'] PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ - vol.Required(CONF_HOSTS): cv.string, + vol.Required(CONF_HOSTS): cv.ensure_list, vol.Required(CONF_HOME_INTERVAL, default=0): cv.positive_int, vol.Optional(CONF_EXCLUDE, default=[]): vol.All(cv.ensure_list, vol.Length(min=1)) @@ -120,7 +120,8 @@ class NmapDeviceScanner(object): options += ' --exclude {}'.format(','.join(exclude_hosts)) try: - result = scanner.scan(hosts=self.hosts, arguments=options) + result = scanner.scan(hosts=' '.join(self.hosts), + arguments=options) except PortScannerError: return False diff --git a/homeassistant/components/device_tracker/snmp.py b/homeassistant/components/device_tracker/snmp.py index 56f9eb4aae6..33c89110da0 100644 --- a/homeassistant/components/device_tracker/snmp.py +++ b/homeassistant/components/device_tracker/snmp.py @@ -23,11 +23,17 @@ _LOGGER = logging.getLogger(__name__) REQUIREMENTS = ['pysnmp==4.3.2'] CONF_COMMUNITY = "community" +CONF_AUTHKEY = "authkey" +CONF_PRIVKEY = "privkey" CONF_BASEOID = "baseoid" +DEFAULT_COMMUNITY = "public" + PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_HOST): cv.string, - vol.Required(CONF_COMMUNITY): cv.string, + vol.Optional(CONF_COMMUNITY, default=DEFAULT_COMMUNITY): cv.string, + vol.Inclusive(CONF_AUTHKEY, "keys"): cv.string, + vol.Inclusive(CONF_PRIVKEY, "keys"): cv.string, vol.Required(CONF_BASEOID): cv.string }) @@ -43,13 +49,24 @@ def get_scanner(hass, config): class SnmpScanner(object): """Queries any SNMP capable Access Point for connected devices.""" + # pylint: disable=too-many-instance-attributes def __init__(self, config): """Initialize the scanner.""" from pysnmp.entity.rfc3413.oneliner import cmdgen + from pysnmp.entity import config as cfg self.snmp = cmdgen.CommandGenerator() self.host = cmdgen.UdpTransportTarget((config[CONF_HOST], 161)) - self.community = cmdgen.CommunityData(config[CONF_COMMUNITY]) + if CONF_AUTHKEY not in config or CONF_PRIVKEY not in config: + self.auth = cmdgen.CommunityData(config[CONF_COMMUNITY]) + else: + self.auth = cmdgen.UsmUserData( + config[CONF_COMMUNITY], + config[CONF_AUTHKEY], + config[CONF_PRIVKEY], + authProtocol=cfg.usmHMACSHAAuthProtocol, + privProtocol=cfg.usmAesCfb128Protocol + ) self.baseoid = cmdgen.MibVariable(config[CONF_BASEOID]) self.lock = threading.Lock() @@ -95,7 +112,7 @@ class SnmpScanner(object): devices = [] errindication, errstatus, errindex, restable = self.snmp.nextCmd( - self.community, self.host, self.baseoid) + self.auth, self.host, self.baseoid) if errindication: _LOGGER.error("SNMPLIB error: %s", errindication) diff --git a/homeassistant/components/digital_ocean.py b/homeassistant/components/digital_ocean.py index b91ec2672af..b507d9448e5 100644 --- a/homeassistant/components/digital_ocean.py +++ b/homeassistant/components/digital_ocean.py @@ -13,7 +13,7 @@ from homeassistant.const import CONF_ACCESS_TOKEN from homeassistant.util import Throttle import homeassistant.helpers.config_validation as cv -REQUIREMENTS = ['python-digitalocean==1.9.0'] +REQUIREMENTS = ['python-digitalocean==1.10.0'] _LOGGER = logging.getLogger(__name__) @@ -44,7 +44,7 @@ CONFIG_SCHEMA = vol.Schema({ # pylint: disable=unused-argument,too-few-public-methods def setup(hass, config): - """Setup the Digital Ocean component.""" + """Set up the Digital Ocean component.""" conf = config[DOMAIN] access_token = conf.get(CONF_ACCESS_TOKEN) diff --git a/homeassistant/components/discovery.py b/homeassistant/components/discovery.py index fa48be04e74..32e1bbd5f6a 100644 --- a/homeassistant/components/discovery.py +++ b/homeassistant/components/discovery.py @@ -14,15 +14,17 @@ import voluptuous as vol from homeassistant.const import EVENT_HOMEASSISTANT_START from homeassistant.helpers.discovery import load_platform, discover -REQUIREMENTS = ['netdisco==0.7.1'] +REQUIREMENTS = ['netdisco==0.7.2'] DOMAIN = 'discovery' SCAN_INTERVAL = 300 # seconds SERVICE_NETGEAR = 'netgear_router' SERVICE_WEMO = 'belkin_wemo' +SERVICE_HASS_IOS_APP = 'hass_ios' SERVICE_HANDLERS = { + SERVICE_HASS_IOS_APP: ('ios', None), SERVICE_NETGEAR: ('device_tracker', None), SERVICE_WEMO: ('wemo', None), 'philips_hue': ('light', 'hue'), diff --git a/homeassistant/components/emoncms_history.py b/homeassistant/components/emoncms_history.py new file mode 100644 index 00000000000..4e07447b027 --- /dev/null +++ b/homeassistant/components/emoncms_history.py @@ -0,0 +1,95 @@ +""" +A component which allows you to send data to Emoncms. + +For more details about this component, please refer to the documentation at +https://home-assistant.io/components/emoncms_history/ +""" +import logging +from datetime import timedelta + +import voluptuous as vol +import requests + +from homeassistant.const import ( + CONF_API_KEY, CONF_WHITELIST, + CONF_URL, STATE_UNKNOWN, + STATE_UNAVAILABLE, + CONF_SCAN_INTERVAL) +import homeassistant.helpers.config_validation as cv +from homeassistant.helpers import state as state_helper +from homeassistant.helpers.event import track_point_in_time +from homeassistant.util import dt as dt_util + +_LOGGER = logging.getLogger(__name__) + +DOMAIN = "emoncms_history" +CONF_INPUTNODE = "inputnode" + +CONFIG_SCHEMA = vol.Schema({ + DOMAIN: vol.Schema({ + vol.Required(CONF_API_KEY): cv.string, + vol.Required(CONF_URL): cv.string, + vol.Required(CONF_INPUTNODE): cv.positive_int, + vol.Required(CONF_WHITELIST): cv.entity_ids, + vol.Optional(CONF_SCAN_INTERVAL, default=30): cv.positive_int, + }), +}, extra=vol.ALLOW_EXTRA) + + +def setup(hass, config): + """Setup the emoncms_history component.""" + conf = config[DOMAIN] + whitelist = conf.get(CONF_WHITELIST) + + def send_data(url, apikey, node, payload): + """Send payload data to emoncms.""" + try: + fullurl = "{}/input/post.json".format(url) + req = requests.post(fullurl, + params={"node": node}, + data={"apikey": apikey, + "data": payload}, + allow_redirects=True, + timeout=5) + + except requests.exceptions.RequestException: + _LOGGER.error("Error saving data '%s' to '%s'", + payload, fullurl) + + else: + if req.status_code != 200: + _LOGGER.error("Error saving data '%s' to '%s'" + + "(http status code = %d)", payload, + fullurl, req.status_code) + + def update_emoncms(time): + """Send whitelisted entities states reguarly to emoncms.""" + payload_dict = {} + + for entity_id in whitelist: + state = hass.states.get(entity_id) + + if state is None or state.state in ( + STATE_UNKNOWN, "", STATE_UNAVAILABLE): + continue + + try: + payload_dict[entity_id] = state_helper.state_as_number( + state) + except ValueError: + continue + + if len(payload_dict) > 0: + payload = "{%s}" % ",".join("{}:{}".format(key, val) + for key, val in + payload_dict.items()) + + send_data(conf.get(CONF_URL), conf.get(CONF_API_KEY), + str(conf.get(CONF_INPUTNODE)), payload) + + track_point_in_time(hass, update_emoncms, time + + timedelta(seconds=conf.get( + CONF_SCAN_INTERVAL))) + + update_emoncms(dt_util.utcnow()) + return True diff --git a/homeassistant/components/emulated_hue.py b/homeassistant/components/emulated_hue.py index 88a5ff58fe5..a63117fc31b 100644 --- a/homeassistant/components/emulated_hue.py +++ b/homeassistant/components/emulated_hue.py @@ -77,7 +77,7 @@ def setup(hass, yaml_config): ssl_certificate=None, ssl_key=None, cors_origins=[], - approved_ips=[] + trusted_networks=[] ) server.register_view(DescriptionXmlView(hass, config)) diff --git a/homeassistant/components/frontend/__init__.py b/homeassistant/components/frontend/__init__.py index ab967fb114f..2d9abe8fe33 100644 --- a/homeassistant/components/frontend/__init__.py +++ b/homeassistant/components/frontend/__init__.py @@ -211,8 +211,14 @@ class IndexView(HomeAssistantView): panel_url = PANELS[panel]['url'] if panel != 'states' else '' - # auto login if no password was set - no_auth = 'false' if self.hass.config.api.api_password else 'true' + no_auth = 'true' + if self.hass.config.api.api_password: + # require password if set + no_auth = 'false' + if self.hass.wsgi.is_trusted_ip( + self.hass.wsgi.get_real_ip(request)): + # bypass for trusted networks + no_auth = 'true' icons_url = '/static/mdi-{}.html'.format(FINGERPRINTS['mdi.html']) template = self.templates.get_template('index.html') diff --git a/homeassistant/components/frontend/version.py b/homeassistant/components/frontend/version.py index 2c8b0cc8bed..1c437dedd5d 100644 --- a/homeassistant/components/frontend/version.py +++ b/homeassistant/components/frontend/version.py @@ -1,16 +1,17 @@ """DO NOT MODIFY. Auto-generated by script/fingerprint_frontend.""" FINGERPRINTS = { - "core.js": "9b3e5ab4eac7e3b074e0daf3f619a638", - "frontend.html": "5854807d361de26fe93ad474010f19d2", + "core.js": "5ed5e063d66eb252b5b288738c9c2d16", + "frontend.html": "0a4c2c6e86a0a78c2ff3e03842de609d", "mdi.html": "46a76f877ac9848899b8ed382427c16f", + "micromarkdown-js.html": "93b5ec4016f0bba585521cf4d18dec1a", "panels/ha-panel-dev-event.html": "550bf85345c454274a40d15b2795a002", "panels/ha-panel-dev-info.html": "ec613406ce7e20d93754233d55625c8a", - "panels/ha-panel-dev-service.html": "c7974458ebc33412d95497e99b785e12", - "panels/ha-panel-dev-state.html": "4be627b74e683af14ef779d8203ec674", + "panels/ha-panel-dev-service.html": "d33657c964041d3ebf114e90a922a15e", + "panels/ha-panel-dev-state.html": "65e5f791cc467561719bf591f1386054", "panels/ha-panel-dev-template.html": "d23943fa0370f168714da407c90091a2", "panels/ha-panel-history.html": "efe1bcdd7733b09e55f4f965d171c295", "panels/ha-panel-iframe.html": "d920f0aa3c903680f2f8795e2255daab", "panels/ha-panel-logbook.html": "66108d82763359a218c9695f0553de40", - "panels/ha-panel-map.html": "af7d04aff7dd5479c5a0016bc8d4dd7d" + "panels/ha-panel-map.html": "49ab2d6f180f8bdea7cffaa66b8a5d3e" } diff --git a/homeassistant/components/frontend/www_static/core.js b/homeassistant/components/frontend/www_static/core.js index 862449055fb..a07e5819489 100644 --- a/homeassistant/components/frontend/www_static/core.js +++ b/homeassistant/components/frontend/www_static/core.js @@ -1,4 +1,4 @@ -!(function(){"use strict";function t(t){return t&&t.__esModule?t.default:t}function e(t,e){return e={exports:{}},t(e,e.exports),e.exports}function n(t,e){var n=e.authToken,r=e.host;return Ne({authToken:n,host:r,isValidating:!0,isInvalid:!1,errorMessage:""})}function r(){return ke.getInitialState()}function i(t,e){var n=e.errorMessage;return t.withMutations((function(t){return t.set("isValidating",!1).set("isInvalid",!0).set("errorMessage",n)}))}function o(t,e){var n=e.authToken,r=e.host;return Pe({authToken:n,host:r})}function u(){return He.getInitialState()}function a(t,e){var n=e.rememberAuth;return n}function s(t){return t.withMutations((function(t){t.set("isStreaming",!0).set("useStreaming",!0).set("hasError",!1)}))}function c(t){return t.withMutations((function(t){t.set("isStreaming",!1).set("useStreaming",!1).set("hasError",!1)}))}function f(t){return t.withMutations((function(t){t.set("isStreaming",!1).set("hasError",!0)}))}function h(){return Ye.getInitialState()}function l(t,e){var n=e.model,r=e.result,i=e.params,o=n.entity;if(!r)return t;var u=i.replace?tn({}):t.get(o),a=Array.isArray(r)?r:[r],s=n.fromJSON||tn;return t.set(o,u.withMutations((function(t){for(var e=0;e199&&u.status<300?t(e):n(e)},u.onerror=function(){return n({})},r?(u.setRequestHeader("Content-Type","application/json;charset=UTF-8"),u.send(JSON.stringify(r))):u.send()})}function C(t,e){var n=e.message;return t.set(t.size,n)}function z(){return zn.getInitialState()}function R(t,e){t.dispatch(An.NOTIFICATION_CREATED,{message:e})}function M(t){t.registerStores({notifications:zn})}function L(t,e){if("lock"===t)return!0;if("garage_door"===t)return!0;var n=e.get(t);return!!n&&n.services.has("turn_on")}function j(t,e){return!!t&&("group"===t.domain?"on"===t.state||"off"===t.state:L(t.domain,e))}function N(t,e){return[rr(t),function(t){return!!t&&t.services.has(e)}]}function k(t){return[wn.byId(t),nr,j]}function U(t,e,n){function r(){var c=(new Date).getTime()-a;c0?i=setTimeout(r,e-c):(i=null,n||(s=t.apply(u,o),i||(u=o=null)))}var i,o,u,a,s;null==e&&(e=100);var c=function(){u=this,o=arguments,a=(new Date).getTime();var c=n&&!i;return i||(i=setTimeout(r,e)),c&&(s=t.apply(u,o),u=o=null),s};return c.clear=function(){i&&(clearTimeout(i),i=null)},c}function P(t,e){var n=e.component;return t.push(n)}function H(t,e){var n=e.components;return dr(n)}function x(){return vr.getInitialState()}function V(t,e){var n=e.latitude,r=e.longitude,i=e.location_name,o=e.unit_system,u=e.time_zone,a=e.config_dir,s=e.version;return Sr({latitude:n,longitude:r,location_name:i,unit_system:o,time_zone:u,config_dir:a,serverVersion:s})}function q(){return gr.getInitialState()}function F(t,e){t.dispatch(pr.SERVER_CONFIG_LOADED,e)}function G(t){ln(t,"GET","config").then((function(e){return F(t,e)}))}function K(t,e){t.dispatch(pr.COMPONENT_LOADED,{component:e})}function Y(t){return[["serverComponent"],function(e){return e.contains(t)}]}function B(t){t.registerStores({serverComponent:vr,serverConfig:gr})}function J(t,e){var n=e.pane;return n}function W(){return Rr.getInitialState()}function X(t,e){var n=e.panels;return Lr(n)}function Q(){return jr.getInitialState()}function Z(t,e){var n=e.show;return!!n}function $(){return kr.getInitialState()}function tt(t,e){t.dispatch(Cr.SHOW_SIDEBAR,{show:e})}function et(t,e){t.dispatch(Cr.NAVIGATE,{pane:e})}function nt(t,e){t.dispatch(Cr.PANELS_LOADED,{panels:e})}function rt(t,e){var n=e.entityId;return n}function it(){return Kr.getInitialState()}function ot(t,e){t.dispatch(Fr.SELECT_ENTITY,{entityId:e})}function ut(t){t.dispatch(Fr.SELECT_ENTITY,{entityId:null})}function at(t){return!t||(new Date).getTime()-t>6e4}function st(t,e){var n=e.date;return n.toISOString()}function ct(){return Wr.getInitialState()}function ft(t,e){var n=e.date,r=e.stateHistory;return 0===r.length?t.set(n,Qr({})):t.withMutations((function(t){r.forEach((function(e){return t.setIn([n,e[0].entity_id],Qr(e.map(yn.fromJSON)))}))}))}function ht(){return Zr.getInitialState()}function lt(t,e){var n=e.stateHistory;return t.withMutations((function(t){n.forEach((function(e){return t.set(e[0].entity_id,ni(e.map(yn.fromJSON)))}))}))}function pt(){return ri.getInitialState()}function _t(t,e){var n=e.stateHistory,r=(new Date).getTime();return t.withMutations((function(t){n.forEach((function(e){return t.set(e[0].entity_id,r)})),history.length>1&&t.set(ui,r)}))}function dt(){return ai.getInitialState()}function vt(t,e){t.dispatch(Br.ENTITY_HISTORY_DATE_SELECTED,{date:e})}function yt(t,e){void 0===e&&(e=null),t.dispatch(Br.RECENT_ENTITY_HISTORY_FETCH_START,{});var n="history/period";return null!==e&&(n+="?filter_entity_id="+e),ln(t,"GET",n).then((function(e){return t.dispatch(Br.RECENT_ENTITY_HISTORY_FETCH_SUCCESS,{stateHistory:e})}),(function(){return t.dispatch(Br.RECENT_ENTITY_HISTORY_FETCH_ERROR,{})}))}function St(t,e){return t.dispatch(Br.ENTITY_HISTORY_FETCH_START,{date:e}),ln(t,"GET","history/period/"+e).then((function(n){return t.dispatch(Br.ENTITY_HISTORY_FETCH_SUCCESS,{date:e,stateHistory:n})}),(function(){return t.dispatch(Br.ENTITY_HISTORY_FETCH_ERROR,{})}))}function gt(t){var e=t.evaluate(fi);return St(t,e)}function mt(t){t.registerStores({currentEntityHistoryDate:Wr,entityHistory:Zr,isLoadingEntityHistory:ti,recentEntityHistory:ri,recentEntityHistoryUpdated:ai})}function Et(t){t.registerStores({moreInfoEntityId:Kr})}function It(t,e){var n=e.model,r=e.result,i=e.params;if(null===t||"entity"!==n.entity||!i.replace)return t;for(var o=0;oau}function se(t){t.registerStores({currentLogbookDate:Bo,isLoadingLogbookEntries:Wo,logbookEntries:eu,logbookEntriesUpdated:iu})}function ce(t){return t.set("active",!0)}function fe(t){return t.set("active",!1)}function he(){return gu.getInitialState()}function le(t){return navigator.serviceWorker.getRegistration().then((function(t){if(!t)throw new Error("No service worker registered.");return t.pushManager.subscribe({userVisibleOnly:!0})})).then((function(e){var n;return n=navigator.userAgent.toLowerCase().indexOf("firefox")>-1?"firefox":"chrome",ln(t,"POST","notify.html5",{subscription:e,browser:n}).then((function(){return t.dispatch(vu.PUSH_NOTIFICATIONS_SUBSCRIBE,{})})).then((function(){return!0}))})).catch((function(e){var n;return n=e.message&&e.message.indexOf("gcm_sender_id")!==-1?"Please setup the notify.html5 platform.":"Notification registration failed.",console.error(e),Nn.createNotification(t,n),!1}))}function pe(t){return navigator.serviceWorker.getRegistration().then((function(t){if(!t)throw new Error("No service worker registered");return t.pushManager.subscribe({userVisibleOnly:!0})})).then((function(e){return ln(t,"DELETE","notify.html5",{subscription:e}).then((function(){return e.unsubscribe()})).then((function(){return t.dispatch(vu.PUSH_NOTIFICATIONS_UNSUBSCRIBE,{})})).then((function(){return!0}))})).catch((function(e){var n="Failed unsubscribing for push notifications.";return console.error(e),Nn.createNotification(t,n),!1}))}function _e(t){t.registerStores({pushNotifications:gu})}function de(t,e){return ln(t,"POST","template",{template:e})}function ve(t){return t.set("isListening",!0)}function ye(t,e){var n=e.interimTranscript,r=e.finalTranscript;return t.withMutations((function(t){return t.set("isListening",!0).set("isTransmitting",!1).set("interimTranscript",n).set("finalTranscript",r)}))}function Se(t,e){var n=e.finalTranscript;return t.withMutations((function(t){return t.set("isListening",!1).set("isTransmitting",!0).set("interimTranscript","").set("finalTranscript",n)}))}function ge(){return Nu.getInitialState()}function me(){return Nu.getInitialState()}function Ee(){return Nu.getInitialState()}function Ie(t){return ku[t.hassId]}function be(t){var e=Ie(t);if(e){var n=e.finalTranscript||e.interimTranscript;t.dispatch(Mu.VOICE_TRANSMITTING,{finalTranscript:n}),ur.callService(t,"conversation","process",{text:n}).then((function(){t.dispatch(Mu.VOICE_DONE)}),(function(){t.dispatch(Mu.VOICE_ERROR)}))}}function Oe(t){var e=Ie(t);e&&(e.recognition.stop(),ku[t.hassId]=!1)}function we(t){be(t),Oe(t)}function Te(t){var e=we.bind(null,t);e();var n=new webkitSpeechRecognition;ku[t.hassId]={recognition:n,interimTranscript:"",finalTranscript:""},n.interimResults=!0,n.onstart=function(){return t.dispatch(Mu.VOICE_START)},n.onerror=function(){return t.dispatch(Mu.VOICE_ERROR)},n.onend=e,n.onresult=function(e){var n=Ie(t);if(n){for(var r="",i="",o=e.resultIndex;o=n)}function c(t,e){return h(t,e,0)}function f(t,e){return h(t,e,e)}function h(t,e,n){return void 0===t?n:t<0?Math.max(0,e+t):void 0===e?t:Math.min(e,t)}function l(t){return v(t)?t:C(t)}function p(t){return y(t)?t:z(t)}function _(t){return S(t)?t:R(t)}function d(t){return v(t)&&!g(t)?t:M(t)}function v(t){return!(!t||!t[dn])}function y(t){return!(!t||!t[vn])}function S(t){return!(!t||!t[yn])}function g(t){return y(t)||S(t)}function m(t){return!(!t||!t[Sn])}function E(t){this.next=t}function I(t,e,n,r){var i=0===t?e:1===t?n:[e,n];return r?r.value=i:r={value:i,done:!1},r}function b(){return{value:void 0,done:!0}}function O(t){return!!A(t)}function w(t){return t&&"function"==typeof t.next}function T(t){var e=A(t);return e&&e.call(t)}function A(t){var e=t&&(In&&t[In]||t[bn]);if("function"==typeof e)return e}function D(t){return t&&"number"==typeof t.length}function C(t){return null===t||void 0===t?P():v(t)?t.toSeq():V(t)}function z(t){return null===t||void 0===t?P().toKeyedSeq():v(t)?y(t)?t.toSeq():t.fromEntrySeq():H(t)}function R(t){return null===t||void 0===t?P():v(t)?y(t)?t.entrySeq():t.toIndexedSeq():x(t)}function M(t){return(null===t||void 0===t?P():v(t)?y(t)?t.entrySeq():t:x(t)).toSetSeq()}function L(t){this._array=t,this.size=t.length}function j(t){var e=Object.keys(t);this._object=t,this._keys=e,this.size=e.length}function N(t){this._iterable=t,this.size=t.length||t.size}function k(t){this._iterator=t,this._iteratorCache=[]}function U(t){return!(!t||!t[wn])}function P(){return Tn||(Tn=new L([]))}function H(t){var e=Array.isArray(t)?new L(t).fromEntrySeq():w(t)?new k(t).fromEntrySeq():O(t)?new N(t).fromEntrySeq():"object"==typeof t?new j(t):void 0;if(!e)throw new TypeError("Expected Array or iterable object of [k, v] entries, or keyed object: "+t);return e}function x(t){var e=q(t);if(!e)throw new TypeError("Expected Array or iterable object of values: "+t);return e}function V(t){var e=q(t)||"object"==typeof t&&new j(t);if(!e)throw new TypeError("Expected Array or iterable object of values, or keyed object: "+t);return e}function q(t){return D(t)?new L(t):w(t)?new k(t):O(t)?new N(t):void 0}function F(t,e,n,r){var i=t._cache;if(i){for(var o=i.length-1,u=0;u<=o;u++){var a=i[n?o-u:u];if(e(a[1],r?a[0]:u,t)===!1)return u+1}return u}return t.__iterateUncached(e,n)}function G(t,e,n,r){var i=t._cache;if(i){var o=i.length-1,u=0;return new E(function(){var t=i[n?o-u:u];return u++>o?b():I(e,r?t[0]:u-1,t[1])})}return t.__iteratorUncached(e,n)}function K(){throw TypeError("Abstract")}function Y(){}function B(){}function J(){}function W(t,e){if(t===e||t!==t&&e!==e)return!0;if(!t||!e)return!1;if("function"==typeof t.valueOf&&"function"==typeof e.valueOf){if(t=t.valueOf(),e=e.valueOf(),t===e||t!==t&&e!==e)return!0;if(!t||!e)return!1}return!("function"!=typeof t.equals||"function"!=typeof e.equals||!t.equals(e))}function X(t,e){return e?Q(e,t,"",{"":t}):Z(t)}function Q(t,e,n,r){return Array.isArray(e)?t.call(r,n,R(e).map((function(n,r){return Q(t,n,r,e)}))):$(e)?t.call(r,n,z(e).map((function(n,r){return Q(t,n,r,e)}))):e}function Z(t){return Array.isArray(t)?R(t).map(Z).toList():$(t)?z(t).map(Z).toMap():t}function $(t){return t&&(t.constructor===Object||void 0===t.constructor)}function tt(t){return t>>>1&1073741824|3221225471&t}function et(t){if(t===!1||null===t||void 0===t)return 0;if("function"==typeof t.valueOf&&(t=t.valueOf(),t===!1||null===t||void 0===t))return 0;if(t===!0)return 1;var e=typeof t;if("number"===e){var n=0|t;for(n!==t&&(n^=4294967295*t);t>4294967295;)t/=4294967295,n^=t;return tt(n)}return"string"===e?t.length>jn?nt(t):rt(t):"function"==typeof t.hashCode?t.hashCode():it(t)}function nt(t){var e=Un[t];return void 0===e&&(e=rt(t),kn===Nn&&(kn=0,Un={}),kn++,Un[t]=e),e}function rt(t){for(var e=0,n=0;n0)switch(t.nodeType){case 1:return t.uniqueID;case 9:return t.documentElement&&t.documentElement.uniqueID}}function ut(t,e){if(!t)throw new Error(e)}function at(t){ut(t!==1/0,"Cannot perform this action with an infinite size.")}function st(t,e){this._iter=t,this._useKeys=e,this.size=t.size}function ct(t){this._iter=t,this.size=t.size}function ft(t){this._iter=t,this.size=t.size}function ht(t){this._iter=t,this.size=t.size}function lt(t){var e=Lt(t);return e._iter=t,e.size=t.size,e.flip=function(){return t},e.reverse=function(){var e=t.reverse.apply(this);return e.flip=function(){return t.reverse()},e},e.has=function(e){return t.includes(e)},e.includes=function(e){return t.has(e)},e.cacheResult=jt,e.__iterateUncached=function(e,n){var r=this;return t.__iterate((function(t,n){return e(n,t,r)!==!1}),n)},e.__iteratorUncached=function(e,n){if(e===En){var r=t.__iterator(e,n);return new E(function(){var t=r.next();if(!t.done){var e=t.value[0];t.value[0]=t.value[1],t.value[1]=e}return t})}return t.__iterator(e===mn?gn:mn,n)},e}function pt(t,e,n){var r=Lt(t);return r.size=t.size,r.has=function(e){return t.has(e)},r.get=function(r,i){var o=t.get(r,ln);return o===ln?i:e.call(n,o,r,t)},r.__iterateUncached=function(r,i){var o=this;return t.__iterate((function(t,i,u){return r(e.call(n,t,i,u),i,o)!==!1}),i)},r.__iteratorUncached=function(r,i){var o=t.__iterator(En,i);return new E(function(){var i=o.next();if(i.done)return i;var u=i.value,a=u[0];return I(r,a,e.call(n,u[1],a,t),i)})},r}function _t(t,e){var n=Lt(t);return n._iter=t,n.size=t.size,n.reverse=function(){return t},t.flip&&(n.flip=function(){var e=lt(t);return e.reverse=function(){return t.flip()},e}),n.get=function(n,r){return t.get(e?n:-1-n,r)},n.has=function(n){return t.has(e?n:-1-n)},n.includes=function(e){return t.includes(e)},n.cacheResult=jt,n.__iterate=function(e,n){var r=this;return t.__iterate((function(t,n){return e(t,n,r)}),!n)},n.__iterator=function(e,n){return t.__iterator(e,!n)},n}function dt(t,e,n,r){var i=Lt(t);return r&&(i.has=function(r){var i=t.get(r,ln);return i!==ln&&!!e.call(n,i,r,t)},i.get=function(r,i){var o=t.get(r,ln);return o!==ln&&e.call(n,o,r,t)?o:i}),i.__iterateUncached=function(i,o){var u=this,a=0;return t.__iterate((function(t,o,s){if(e.call(n,t,o,s))return a++,i(t,r?o:a-1,u)}),o),a},i.__iteratorUncached=function(i,o){var u=t.__iterator(En,o),a=0;return new E(function(){for(;;){var o=u.next();if(o.done)return o;var s=o.value,c=s[0],f=s[1];if(e.call(n,f,c,t))return I(i,r?c:a++,f,o)}})},i}function vt(t,e,n){var r=Ut().asMutable();return t.__iterate((function(i,o){r.update(e.call(n,i,o,t),0,(function(t){return t+1}))})),r.asImmutable()}function yt(t,e,n){var r=y(t),i=(m(t)?be():Ut()).asMutable();t.__iterate((function(o,u){i.update(e.call(n,o,u,t),(function(t){return t=t||[],t.push(r?[u,o]:o),t}))}));var o=Mt(t);return i.map((function(e){return Ct(t,o(e))}))}function St(t,e,n,r){var i=t.size;if(void 0!==e&&(e=0|e),void 0!==n&&(n=0|n),s(e,n,i))return t;var o=c(e,i),a=f(n,i);if(o!==o||a!==a)return St(t.toSeq().cacheResult(),e,n,r);var h,l=a-o;l===l&&(h=l<0?0:l);var p=Lt(t);return p.size=0===h?h:t.size&&h||void 0,!r&&U(t)&&h>=0&&(p.get=function(e,n){return e=u(this,e),e>=0&&eh)return b();var t=i.next();return r||e===mn?t:e===gn?I(e,a-1,void 0,t):I(e,a-1,t.value[1],t)})},p}function gt(t,e,n){var r=Lt(t);return r.__iterateUncached=function(r,i){var o=this;if(i)return this.cacheResult().__iterate(r,i);var u=0;return t.__iterate((function(t,i,a){return e.call(n,t,i,a)&&++u&&r(t,i,o)})),u},r.__iteratorUncached=function(r,i){var o=this;if(i)return this.cacheResult().__iterator(r,i);var u=t.__iterator(En,i),a=!0;return new E(function(){if(!a)return b();var t=u.next();if(t.done)return t;var i=t.value,s=i[0],c=i[1];return e.call(n,c,s,o)?r===En?t:I(r,s,c,t):(a=!1,b())})},r}function mt(t,e,n,r){var i=Lt(t);return i.__iterateUncached=function(i,o){var u=this;if(o)return this.cacheResult().__iterate(i,o);var a=!0,s=0;return t.__iterate((function(t,o,c){if(!a||!(a=e.call(n,t,o,c)))return s++,i(t,r?o:s-1,u)})),s},i.__iteratorUncached=function(i,o){var u=this;if(o)return this.cacheResult().__iterator(i,o);var a=t.__iterator(En,o),s=!0,c=0;return new E(function(){var t,o,f;do{if(t=a.next(),t.done)return r||i===mn?t:i===gn?I(i,c++,void 0,t):I(i,c++,t.value[1],t);var h=t.value;o=h[0],f=h[1],s&&(s=e.call(n,f,o,u))}while(s);return i===En?t:I(i,o,f,t)})},i}function Et(t,e){var n=y(t),r=[t].concat(e).map((function(t){return v(t)?n&&(t=p(t)):t=n?H(t):x(Array.isArray(t)?t:[t]),t})).filter((function(t){return 0!==t.size}));if(0===r.length)return t;if(1===r.length){var i=r[0];if(i===t||n&&y(i)||S(t)&&S(i))return i}var o=new L(r);return n?o=o.toKeyedSeq():S(t)||(o=o.toSetSeq()),o=o.flatten(!0),o.size=r.reduce((function(t,e){if(void 0!==t){var n=e.size;if(void 0!==n)return t+n}}),0),o}function It(t,e,n){var r=Lt(t);return r.__iterateUncached=function(r,i){function o(t,s){var c=this;t.__iterate((function(t,i){return(!e||s0}function Dt(t,e,n){var r=Lt(t);return r.size=new L(n).map((function(t){return t.size})).min(),r.__iterate=function(t,e){for(var n,r=this,i=this.__iterator(mn,e),o=0;!(n=i.next()).done&&t(n.value,o++,r)!==!1;);return o},r.__iteratorUncached=function(t,r){var i=n.map((function(t){return t=l(t),T(r?t.reverse():t)})),o=0,u=!1;return new E(function(){var n;return u||(n=i.map((function(t){ -return t.next()})),u=n.some((function(t){return t.done}))),u?b():I(t,o++,e.apply(null,n.map((function(t){return t.value}))))})},r}function Ct(t,e){return U(t)?e:t.constructor(e)}function zt(t){if(t!==Object(t))throw new TypeError("Expected [K, V] tuple: "+t)}function Rt(t){return at(t.size),o(t)}function Mt(t){return y(t)?p:S(t)?_:d}function Lt(t){return Object.create((y(t)?z:S(t)?R:M).prototype)}function jt(){return this._iter.cacheResult?(this._iter.cacheResult(),this.size=this._iter.size,this):C.prototype.cacheResult.call(this)}function Nt(t,e){return t>e?1:t>>n)&hn,a=(0===n?r:r>>>n)&hn,s=u===a?[Zt(t,e,n+cn,r,i)]:(o=new Ft(e,r,i),u>>=1)u[a]=1&n?e[o++]:void 0;return u[r]=i,new Vt(t,o+1,u)}function ne(t,e,n){for(var r=[],i=0;i>1&1431655765,t=(858993459&t)+(t>>2&858993459),t=t+(t>>4)&252645135,t+=t>>8,t+=t>>16,127&t}function ae(t,e,n,r){var o=r?t:i(t);return o[e]=n,o}function se(t,e,n,r){var i=t.length+1;if(r&&e+1===i)return t[e]=n,t;for(var o=new Array(i),u=0,a=0;a0&&ro?0:o-n,c=u-n;return c>fn&&(c=fn),function(){if(i===c)return Bn;var t=e?--c:i++;return r&&r[t]}}function i(t,r,i){var a,s=t&&t.array,c=i>o?0:o-i>>r,f=(u-i>>r)+1;return f>fn&&(f=fn),function(){for(;;){if(a){var t=a();if(t!==Bn)return t;a=null}if(c===f)return Bn;var o=e?--f:c++;a=n(s&&s[o],r-cn,i+(o<=t.size||n<0)return t.withMutations((function(t){n<0?me(t,n).set(0,r):me(t,0,n+1).set(n,r)}));n+=t._origin;var i=t._tail,o=t._root,a=e(_n);return n>=Ie(t._capacity)?i=ye(i,t.__ownerID,0,n,r,a):o=ye(o,t.__ownerID,t._level,n,r,a),a.value?t.__ownerID?(t._root=o,t._tail=i,t.__hash=void 0,t.__altered=!0,t):_e(t._origin,t._capacity,t._level,o,i):t}function ye(t,e,r,i,o,u){var a=i>>>r&hn,s=t&&a0){var f=t&&t.array[a],h=ye(f,e,r-cn,i,o,u);return h===f?t:(c=Se(t,e),c.array[a]=h,c)}return s&&t.array[a]===o?t:(n(u),c=Se(t,e),void 0===o&&a===c.array.length-1?c.array.pop():c.array[a]=o,c)}function Se(t,e){return e&&t&&e===t.ownerID?t:new le(t?t.array.slice():[],e)}function ge(t,e){if(e>=Ie(t._capacity))return t._tail;if(e<1<0;)n=n.array[e>>>r&hn],r-=cn;return n}}function me(t,e,n){void 0!==e&&(e=0|e),void 0!==n&&(n=0|n);var i=t.__ownerID||new r,o=t._origin,u=t._capacity,a=o+e,s=void 0===n?u:n<0?u+n:o+n;if(a===o&&s===u)return t;if(a>=s)return t.clear();for(var c=t._level,f=t._root,h=0;a+h<0;)f=new le(f&&f.array.length?[void 0,f]:[],i),c+=cn,h+=1<=1<l?new le([],i):_;if(_&&p>l&&acn;y-=cn){var S=l>>>y&hn;v=v.array[S]=Se(v.array[S],i)}v.array[l>>>cn&hn]=_}if(s=p)a-=p,s-=p,c=cn,f=null,d=d&&d.removeBefore(i,0,a);else if(a>o||p>>c&hn;if(g!==p>>>c&hn)break;g&&(h+=(1<o&&(f=f.removeBefore(i,c,a-h)),f&&pi&&(i=a.size),v(u)||(a=a.map((function(t){return X(t)}))),r.push(a)}return i>t.size&&(t=t.setSize(i)),ie(t,e,r)}function Ie(t){return t>>cn<=fn&&u.size>=2*o.size?(i=u.filter((function(t,e){return void 0!==t&&a!==e})),r=i.toKeyedSeq().map((function(t){return t[0]})).flip().toMap(),t.__ownerID&&(r.__ownerID=i.__ownerID=t.__ownerID)):(r=o.remove(e),i=a===u.size-1?u.pop():u.set(a,void 0))}else if(s){if(n===u.get(a)[1])return t;r=o,i=u.set(a,[e,n])}else r=o.set(e,u.size),i=u.set(u.size,[e,n]);return t.__ownerID?(t.size=r.size,t._map=r,t._list=i,t.__hash=void 0,t):we(r,i)}function De(t){return null===t||void 0===t?Re():Ce(t)?t:Re().unshiftAll(t)}function Ce(t){return!(!t||!t[Wn])}function ze(t,e,n,r){var i=Object.create(Xn);return i.size=t,i._head=e,i.__ownerID=n,i.__hash=r,i.__altered=!1,i}function Re(){return Qn||(Qn=ze(0))}function Me(t){return null===t||void 0===t?ke():Le(t)&&!m(t)?t:ke().withMutations((function(e){var n=d(t);at(n.size),n.forEach((function(t){return e.add(t)}))}))}function Le(t){return!(!t||!t[Zn])}function je(t,e){return t.__ownerID?(t.size=e.size,t._map=e,t):e===t._map?t:0===e.size?t.__empty():t.__make(e)}function Ne(t,e){var n=Object.create($n);return n.size=t?t.size:0,n._map=t,n.__ownerID=e,n}function ke(){return tr||(tr=Ne(Jt()))}function Ue(t){return null===t||void 0===t?xe():Pe(t)?t:xe().withMutations((function(e){var n=d(t);at(n.size),n.forEach((function(t){return e.add(t)}))}))}function Pe(t){return Le(t)&&m(t)}function He(t,e){var n=Object.create(er);return n.size=t?t.size:0,n._map=t,n.__ownerID=e,n}function xe(){return nr||(nr=He(Te()))}function Ve(t,e){var n,r=function(o){if(o instanceof r)return o;if(!(this instanceof r))return new r(o);if(!n){n=!0;var u=Object.keys(t);Ge(i,u),i.size=u.length,i._name=e,i._keys=u,i._defaultValues=t}this._map=Ut(o)},i=r.prototype=Object.create(rr);return i.constructor=r,r}function qe(t,e,n){var r=Object.create(Object.getPrototypeOf(t));return r._map=e,r.__ownerID=n,r}function Fe(t){return t._name||t.constructor.name||"Record"}function Ge(t,e){try{e.forEach(Ke.bind(void 0,t))}catch(t){}}function Ke(t,e){Object.defineProperty(t,e,{get:function(){return this.get(e)},set:function(t){ut(this.__ownerID,"Cannot set on an immutable record."),this.set(e,t)}})}function Ye(t,e){if(t===e)return!0;if(!v(e)||void 0!==t.size&&void 0!==e.size&&t.size!==e.size||void 0!==t.__hash&&void 0!==e.__hash&&t.__hash!==e.__hash||y(t)!==y(e)||S(t)!==S(e)||m(t)!==m(e))return!1;if(0===t.size&&0===e.size)return!0;var n=!g(t);if(m(t)){var r=t.entries();return e.every((function(t,e){var i=r.next().value;return i&&W(i[1],t)&&(n||W(i[0],e))}))&&r.next().done}var i=!1;if(void 0===t.size)if(void 0===e.size)"function"==typeof t.cacheResult&&t.cacheResult();else{i=!0;var o=t;t=e,e=o}var u=!0,a=e.__iterate((function(e,r){if(n?!t.has(e):i?!W(e,t.get(r,ln)):!W(t.get(r,ln),e))return u=!1,!1}));return u&&t.size===a}function Be(t,e,n){if(!(this instanceof Be))return new Be(t,e,n);if(ut(0!==n,"Cannot step a Range by 0"),t=t||0,void 0===e&&(e=1/0),n=void 0===n?1:Math.abs(n),ee?-1:0}function rn(t){if(t.size===1/0)return 0;var e=m(t),n=y(t),r=e?1:0,i=t.__iterate(n?e?function(t,e){r=31*r+un(et(t),et(e))|0}:function(t,e){r=r+un(et(t),et(e))|0}:e?function(t){r=31*r+et(t)|0}:function(t){r=r+et(t)|0});return on(i,r)}function on(t,e){return e=Dn(e,3432918353),e=Dn(e<<15|e>>>-15,461845907),e=Dn(e<<13|e>>>-13,5),e=(e+3864292196|0)^t,e=Dn(e^e>>>16,2246822507),e=Dn(e^e>>>13,3266489909),e=tt(e^e>>>16)}function un(t,e){return t^e+2654435769+(t<<6)+(t>>2)|0}var an=Array.prototype.slice,sn="delete",cn=5,fn=1<r?b():I(t,i,n[e?r-i++:i++])})},t(j,z),j.prototype.get=function(t,e){return void 0===e||this.has(t)?this._object[t]:e},j.prototype.has=function(t){return this._object.hasOwnProperty(t)},j.prototype.__iterate=function(t,e){for(var n=this,r=this._object,i=this._keys,o=i.length-1,u=0;u<=o;u++){var a=i[e?o-u:u];if(t(r[a],a,n)===!1)return u+1}return u},j.prototype.__iterator=function(t,e){var n=this._object,r=this._keys,i=r.length-1,o=0;return new E(function(){var u=r[e?i-o:o];return o++>i?b():I(t,u,n[u])})},j.prototype[Sn]=!0,t(N,R),N.prototype.__iterateUncached=function(t,e){var n=this;if(e)return this.cacheResult().__iterate(t,e);var r=this._iterable,i=T(r),o=0;if(w(i))for(var u;!(u=i.next()).done&&t(u.value,o++,n)!==!1;);return o},N.prototype.__iteratorUncached=function(t,e){if(e)return this.cacheResult().__iterator(t,e);var n=this._iterable,r=T(n);if(!w(r))return new E(b);var i=0;return new E(function(){var e=r.next();return e.done?e:I(t,i++,e.value)})},t(k,R),k.prototype.__iterateUncached=function(t,e){var n=this;if(e)return this.cacheResult().__iterate(t,e);for(var r=this._iterator,i=this._iteratorCache,o=0;o=r.length){var e=n.next();if(e.done)return e;r[i]=e.value}return I(t,i,r[i++])})};var Tn;t(K,l),t(Y,K),t(B,K),t(J,K),K.Keyed=Y,K.Indexed=B,K.Set=J;var An,Dn="function"==typeof Math.imul&&Math.imul(4294967295,2)===-2?Math.imul:function(t,e){t=0|t,e=0|e;var n=65535&t,r=65535&e;return n*r+((t>>>16)*r+n*(e>>>16)<<16>>>0)|0},Cn=Object.isExtensible,zn=(function(){try{return Object.defineProperty({},"@",{}),!0}catch(t){return!1}})(),Rn="function"==typeof WeakMap;Rn&&(An=new WeakMap);var Mn=0,Ln="__immutablehash__";"function"==typeof Symbol&&(Ln=Symbol(Ln));var jn=16,Nn=255,kn=0,Un={};t(st,z),st.prototype.get=function(t,e){return this._iter.get(t,e)},st.prototype.has=function(t){return this._iter.has(t)},st.prototype.valueSeq=function(){return this._iter.valueSeq()},st.prototype.reverse=function(){var t=this,e=_t(this,!0);return this._useKeys||(e.valueSeq=function(){return t._iter.toSeq().reverse()}),e},st.prototype.map=function(t,e){var n=this,r=pt(this,t,e);return this._useKeys||(r.valueSeq=function(){return n._iter.toSeq().map(t,e)}),r},st.prototype.__iterate=function(t,e){var n,r=this;return this._iter.__iterate(this._useKeys?function(e,n){return t(e,n,r)}:(n=e?Rt(this):0,function(i){return t(i,e?--n:n++,r)}),e)},st.prototype.__iterator=function(t,e){if(this._useKeys)return this._iter.__iterator(t,e);var n=this._iter.__iterator(mn,e),r=e?Rt(this):0;return new E(function(){var i=n.next();return i.done?i:I(t,e?--r:r++,i.value,i)})},st.prototype[Sn]=!0,t(ct,R),ct.prototype.includes=function(t){return this._iter.includes(t)},ct.prototype.__iterate=function(t,e){var n=this,r=0;return this._iter.__iterate((function(e){return t(e,r++,n)}),e)},ct.prototype.__iterator=function(t,e){var n=this._iter.__iterator(mn,e),r=0;return new E(function(){var e=n.next();return e.done?e:I(t,r++,e.value,e)})},t(ft,M),ft.prototype.has=function(t){return this._iter.includes(t)},ft.prototype.__iterate=function(t,e){var n=this;return this._iter.__iterate((function(e){return t(e,e,n)}),e)},ft.prototype.__iterator=function(t,e){var n=this._iter.__iterator(mn,e);return new E(function(){var e=n.next();return e.done?e:I(t,e.value,e.value,e)})},t(ht,z),ht.prototype.entrySeq=function(){return this._iter.toSeq()},ht.prototype.__iterate=function(t,e){var n=this;return this._iter.__iterate((function(e){if(e){zt(e);var r=v(e);return t(r?e.get(1):e[1],r?e.get(0):e[0],n)}}),e)},ht.prototype.__iterator=function(t,e){var n=this._iter.__iterator(mn,e);return new E(function(){for(;;){var e=n.next();if(e.done)return e;var r=e.value;if(r){zt(r);var i=v(r);return I(t,i?r.get(0):r[0],i?r.get(1):r[1],e)}}})},ct.prototype.cacheResult=st.prototype.cacheResult=ft.prototype.cacheResult=ht.prototype.cacheResult=jt,t(Ut,Y),Ut.prototype.toString=function(){return this.__toString("Map {","}")},Ut.prototype.get=function(t,e){return this._root?this._root.get(0,void 0,t,e):e},Ut.prototype.set=function(t,e){return Wt(this,t,e)},Ut.prototype.setIn=function(t,e){return this.updateIn(t,ln,(function(){return e}))},Ut.prototype.remove=function(t){return Wt(this,t,ln)},Ut.prototype.deleteIn=function(t){return this.updateIn(t,(function(){return ln}))},Ut.prototype.update=function(t,e,n){return 1===arguments.length?t(this):this.updateIn([t],e,n)},Ut.prototype.updateIn=function(t,e,n){n||(n=e,e=void 0);var r=oe(this,kt(t),e,n);return r===ln?void 0:r},Ut.prototype.clear=function(){return 0===this.size?this:this.__ownerID?(this.size=0,this._root=null,this.__hash=void 0,this.__altered=!0,this):Jt()},Ut.prototype.merge=function(){return ne(this,void 0,arguments)},Ut.prototype.mergeWith=function(t){var e=an.call(arguments,1);return ne(this,t,e)},Ut.prototype.mergeIn=function(t){var e=an.call(arguments,1);return this.updateIn(t,Jt(),(function(t){return"function"==typeof t.merge?t.merge.apply(t,e):e[e.length-1]}))},Ut.prototype.mergeDeep=function(){return ne(this,re(void 0),arguments)},Ut.prototype.mergeDeepWith=function(t){var e=an.call(arguments,1);return ne(this,re(t),e)},Ut.prototype.mergeDeepIn=function(t){var e=an.call(arguments,1);return this.updateIn(t,Jt(),(function(t){return"function"==typeof t.mergeDeep?t.mergeDeep.apply(t,e):e[e.length-1]}))},Ut.prototype.sort=function(t){return be(wt(this,t))},Ut.prototype.sortBy=function(t,e){return be(wt(this,e,t))},Ut.prototype.withMutations=function(t){var e=this.asMutable();return t(e),e.wasAltered()?e.__ensureOwner(this.__ownerID):this},Ut.prototype.asMutable=function(){return this.__ownerID?this:this.__ensureOwner(new r)},Ut.prototype.asImmutable=function(){return this.__ensureOwner()},Ut.prototype.wasAltered=function(){return this.__altered},Ut.prototype.__iterator=function(t,e){return new Gt(this,t,e)},Ut.prototype.__iterate=function(t,e){var n=this,r=0;return this._root&&this._root.iterate((function(e){return r++,t(e[1],e[0],n)}),e),r},Ut.prototype.__ensureOwner=function(t){return t===this.__ownerID?this:t?Bt(this.size,this._root,t,this.__hash):(this.__ownerID=t,this.__altered=!1,this)},Ut.isMap=Pt;var Pn="@@__IMMUTABLE_MAP__@@",Hn=Ut.prototype;Hn[Pn]=!0,Hn[sn]=Hn.remove,Hn.removeIn=Hn.deleteIn,Ht.prototype.get=function(t,e,n,r){for(var i=this.entries,o=0,u=i.length;o=Vn)return $t(t,f,o,u);var _=t&&t===this.ownerID,d=_?f:i(f);return p?c?h===l-1?d.pop():d[h]=d.pop():d[h]=[o,u]:d.push([o,u]),_?(this.entries=d,this):new Ht(t,d)}},xt.prototype.get=function(t,e,n,r){void 0===e&&(e=et(n));var i=1<<((0===t?e:e>>>t)&hn),o=this.bitmap;return 0===(o&i)?r:this.nodes[ue(o&i-1)].get(t+cn,e,n,r)},xt.prototype.update=function(t,e,n,r,i,o,u){void 0===n&&(n=et(r));var a=(0===e?n:n>>>e)&hn,s=1<=qn)return ee(t,l,c,a,_);if(f&&!_&&2===l.length&&Qt(l[1^h]))return l[1^h];if(f&&_&&1===l.length&&Qt(_))return _;var d=t&&t===this.ownerID,v=f?_?c:c^s:c|s,y=f?_?ae(l,h,_,d):ce(l,h,d):se(l,h,_,d);return d?(this.bitmap=v,this.nodes=y,this):new xt(t,v,y)},Vt.prototype.get=function(t,e,n,r){void 0===e&&(e=et(n));var i=(0===t?e:e>>>t)&hn,o=this.nodes[i];return o?o.get(t+cn,e,n,r):r},Vt.prototype.update=function(t,e,n,r,i,o,u){void 0===n&&(n=et(r));var a=(0===e?n:n>>>e)&hn,s=i===ln,c=this.nodes,f=c[a];if(s&&!f)return this;var h=Xt(f,t,e+cn,n,r,i,o,u);if(h===f)return this;var l=this.count;if(f){if(!h&&(l--,l=0&&t>>e&hn;if(r>=this.array.length)return new le([],t);var i,o=0===r;if(e>0){var u=this.array[r];if(i=u&&u.removeBefore(t,e-cn,n),i===u&&o)return this}if(o&&!i)return this;var a=Se(this,t);if(!o)for(var s=0;s>>e&hn;if(r>=this.array.length)return this;var i;if(e>0){var o=this.array[r];if(i=o&&o.removeAfter(t,e-cn,n),i===o&&r===this.array.length-1)return this}var u=Se(this,t);return u.array.splice(r+1),i&&(u.array[r]=i),u};var Yn,Bn={};t(be,Ut),be.of=function(){return this(arguments)},be.prototype.toString=function(){return this.__toString("OrderedMap {","}")},be.prototype.get=function(t,e){var n=this._map.get(t);return void 0!==n?this._list.get(n)[1]:e},be.prototype.clear=function(){return 0===this.size?this:this.__ownerID?(this.size=0,this._map.clear(),this._list.clear(),this):Te()},be.prototype.set=function(t,e){return Ae(this,t,e)},be.prototype.remove=function(t){return Ae(this,t,ln)},be.prototype.wasAltered=function(){return this._map.wasAltered()||this._list.wasAltered()},be.prototype.__iterate=function(t,e){var n=this;return this._list.__iterate((function(e){return e&&t(e[1],e[0],n)}),e)},be.prototype.__iterator=function(t,e){return this._list.fromEntrySeq().__iterator(t,e)},be.prototype.__ensureOwner=function(t){if(t===this.__ownerID)return this;var e=this._map.__ensureOwner(t),n=this._list.__ensureOwner(t);return t?we(e,n,t,this.__hash):(this.__ownerID=t,this._map=e,this._list=n,this)},be.isOrderedMap=Oe,be.prototype[Sn]=!0,be.prototype[sn]=be.prototype.remove;var Jn;t(De,B),De.of=function(){return this(arguments)},De.prototype.toString=function(){return this.__toString("Stack [","]")},De.prototype.get=function(t,e){var n=this._head;for(t=u(this,t);n&&t--;)n=n.next;return n?n.value:e},De.prototype.peek=function(){return this._head&&this._head.value},De.prototype.push=function(){var t=arguments;if(0===arguments.length)return this;for(var e=this.size+arguments.length,n=this._head,r=arguments.length-1;r>=0;r--)n={value:t[r],next:n};return this.__ownerID?(this.size=e,this._head=n,this.__hash=void 0,this.__altered=!0,this):ze(e,n)},De.prototype.pushAll=function(t){if(t=_(t),0===t.size)return this;at(t.size);var e=this.size,n=this._head;return t.reverse().forEach((function(t){e++,n={value:t,next:n}})),this.__ownerID?(this.size=e,this._head=n,this.__hash=void 0,this.__altered=!0,this):ze(e,n)},De.prototype.pop=function(){return this.slice(1)},De.prototype.unshift=function(){return this.push.apply(this,arguments)},De.prototype.unshiftAll=function(t){return this.pushAll(t)},De.prototype.shift=function(){return this.pop.apply(this,arguments)},De.prototype.clear=function(){return 0===this.size?this:this.__ownerID?(this.size=0,this._head=void 0,this.__hash=void 0,this.__altered=!0,this):Re()},De.prototype.slice=function(t,e){if(s(t,e,this.size))return this;var n=c(t,this.size),r=f(e,this.size);if(r!==this.size)return B.prototype.slice.call(this,t,e);for(var i=this.size-n,o=this._head;n--;)o=o.next;return this.__ownerID?(this.size=i,this._head=o,this.__hash=void 0,this.__altered=!0,this):ze(i,o)},De.prototype.__ensureOwner=function(t){return t===this.__ownerID?this:t?ze(this.size,this._head,t,this.__hash):(this.__ownerID=t,this.__altered=!1,this)},De.prototype.__iterate=function(t,e){var n=this;if(e)return this.reverse().__iterate(t);for(var r=0,i=this._head;i&&t(i.value,r++,n)!==!1;)i=i.next;return r},De.prototype.__iterator=function(t,e){if(e)return this.reverse().__iterator(t);var n=0,r=this._head;return new E(function(){if(r){var e=r.value;return r=r.next,I(t,n++,e)}return b()})},De.isStack=Ce;var Wn="@@__IMMUTABLE_STACK__@@",Xn=De.prototype;Xn[Wn]=!0,Xn.withMutations=Hn.withMutations,Xn.asMutable=Hn.asMutable,Xn.asImmutable=Hn.asImmutable,Xn.wasAltered=Hn.wasAltered;var Qn;t(Me,J),Me.of=function(){return this(arguments)},Me.fromKeys=function(t){return this(p(t).keySeq())},Me.prototype.toString=function(){return this.__toString("Set {","}")},Me.prototype.has=function(t){return this._map.has(t)},Me.prototype.add=function(t){return je(this,this._map.set(t,!0))},Me.prototype.remove=function(t){return je(this,this._map.remove(t))},Me.prototype.clear=function(){return je(this,this._map.clear())},Me.prototype.union=function(){var t=an.call(arguments,0);return t=t.filter((function(t){return 0!==t.size})),0===t.length?this:0!==this.size||this.__ownerID||1!==t.length?this.withMutations((function(e){for(var n=0;n1?" by "+this._step:"")+" ]"},Be.prototype.get=function(t,e){return this.has(t)?this._start+u(this,t)*this._step:e},Be.prototype.includes=function(t){var e=(t-this._start)/this._step;return e>=0&&e=0&&nn?b():I(t,o++,u)})},Be.prototype.equals=function(t){return t instanceof Be?this._start===t._start&&this._end===t._end&&this._step===t._step:Ye(this,t)};var ir;t(Je,R),Je.prototype.toString=function(){return 0===this.size?"Repeat []":"Repeat [ "+this._value+" "+this.size+" times ]"},Je.prototype.get=function(t,e){return this.has(t)?this._value:e},Je.prototype.includes=function(t){return W(this._value,t)},Je.prototype.slice=function(t,e){var n=this.size;return s(t,e,n)?this:new Je(this._value,f(e,n)-c(t,n))},Je.prototype.reverse=function(){return this},Je.prototype.indexOf=function(t){return W(this._value,t)?0:-1},Je.prototype.lastIndexOf=function(t){return W(this._value,t)?this.size:-1},Je.prototype.__iterate=function(t,e){for(var n=this,r=0;rthis.size?e:this.find((function(e,n){return n===t}),void 0,e)},has:function(t){return t=u(this,t),t>=0&&(void 0!==this.size?this.size===1/0||t-1&&t%1===0&&t<=Number.MAX_VALUE}var i=Function.prototype.bind;e.isString=function(t){return"string"==typeof t||"[object String]"===n(t)},e.isArray=Array.isArray||function(t){return"[object Array]"===n(t)},"function"!=typeof/./&&"object"!=typeof Int8Array?e.isFunction=function(t){return"function"==typeof t||!1}:e.isFunction=function(t){return"[object Function]"===toString.call(t)},e.isObject=function(t){var e=typeof t;return"function"===e||"object"===e&&!!t},e.extend=function(t){var e=arguments,n=arguments.length;if(!t||n<2)return t||{};for(var r=1;r0)){var e=this.reactorState.get("dirtyStores");if(0!==e.size){var n=c.default.Set().withMutations((function(n){n.union(t.observerState.get("any")),e.forEach((function(e){var r=t.observerState.getIn(["stores",e]);r&&n.union(r)}))}));n.forEach((function(e){var n=t.observerState.getIn(["observersMap",e]);if(n){var r=n.get("getter"),i=n.get("handler"),o=p.evaluate(t.prevReactorState,r),u=p.evaluate(t.reactorState,r);t.prevReactorState=o.reactorState,t.reactorState=u.reactorState;var a=o.result,s=u.result;c.default.is(a,s)||i.call(null,s)}}));var r=p.resetDirtyStores(this.reactorState);this.prevReactorState=r,this.reactorState=r}}}},{key:"batchStart",value:function(){this.__batchDepth++}},{key:"batchEnd",value:function(){if(this.__batchDepth--,this.__batchDepth<=0){this.__isDispatching=!0;try{this.__notify()}catch(t){throw this.__isDispatching=!1,t}this.__isDispatching=!1}}}]),t})();e.default=(0,y.toFactory)(g),t.exports=e.default},function(t,e,n){function r(t,e,n){return e in t?Object.defineProperty(t,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):t[e]=n,t}function i(t,e){var n={};return(0,o.each)(e,(function(e,r){n[r]=t.evaluate(e)})),n}Object.defineProperty(e,"__esModule",{value:!0});var o=n(4);e.default=function(t){return{getInitialState:function(){return i(t,this.getDataBindings())},componentDidMount:function(){var e=this;this.__unwatchFns=[],(0,o.each)(this.getDataBindings(),(function(n,i){var o=t.observe(n,(function(t){e.setState(r({},i,t))}));e.__unwatchFns.push(o)}))},componentWillUnmount:function(){for(var t=this;this.__unwatchFns.length;)t.__unwatchFns.shift()()}}},t.exports=e.default},function(t,e,n){function r(t){return t&&t.__esModule?t:{default:t}}function i(t,e){return new M({result:t,reactorState:e})}function o(t,e){return t.withMutations((function(t){(0,R.each)(e,(function(e,n){t.getIn(["stores",n])&&console.warn("Store already defined for id = "+n);var r=e.getInitialState();if(void 0===r&&f(t,"throwOnUndefinedStoreReturnValue"))throw new Error("Store getInitialState() must return a value, did you forget a return statement");if(f(t,"throwOnNonImmutableStore")&&!(0,D.isImmutableValue)(r))throw new Error("Store getInitialState() must return an immutable value, did you forget to call toImmutable");t.update("stores",(function(t){return t.set(n,e)})).update("state",(function(t){return t.set(n,r)})).update("dirtyStores",(function(t){return t.add(n)})).update("storeStates",(function(t){return b(t,[n])}))})),I(t)}))}function u(t,e){return t.withMutations((function(t){(0,R.each)(e,(function(e,n){t.update("stores",(function(t){return t.set(n,e)}))}))}))}function a(t,e,n){if(void 0===e&&f(t,"throwOnUndefinedActionType"))throw new Error("`dispatch` cannot be called with an `undefined` action type.");var r=t.get("state"),i=t.get("dirtyStores"),o=r.withMutations((function(r){A.default.dispatchStart(t,e,n),t.get("stores").forEach((function(o,u){var a=r.get(u),s=void 0;try{s=o.handle(a,e,n)}catch(e){throw A.default.dispatchError(t,e.message),e}if(void 0===s&&f(t,"throwOnUndefinedStoreReturnValue")){var c="Store handler must return a value, did you forget a return statement";throw A.default.dispatchError(t,c),new Error(c)}r.set(u,s),a!==s&&(i=i.add(u))})),A.default.dispatchEnd(t,r,i)})),u=t.set("state",o).set("dirtyStores",i).update("storeStates",(function(t){return b(t,i)}));return I(u)}function s(t,e){var n=[],r=(0,D.toImmutable)({}).withMutations((function(r){(0,R.each)(e,(function(e,i){var o=t.getIn(["stores",i]);if(o){var u=o.deserialize(e);void 0!==u&&(r.set(i,u),n.push(i))}}))})),i=w.default.Set(n);return t.update("state",(function(t){return t.merge(r)})).update("dirtyStores",(function(t){return t.union(i)})).update("storeStates",(function(t){return b(t,n)}))}function c(t,e,n){var r=e;(0,z.isKeyPath)(e)&&(e=(0,C.fromKeyPath)(e));var i=t.get("nextId"),o=(0,C.getStoreDeps)(e),u=w.default.Map({id:i,storeDeps:o,getterKey:r,getter:e,handler:n}),a=void 0;return a=0===o.size?t.update("any",(function(t){return t.add(i)})):t.withMutations((function(t){o.forEach((function(e){var n=["stores",e];t.hasIn(n)||t.setIn(n,w.default.Set()),t.updateIn(["stores",e],(function(t){return t.add(i)}))}))})),a=a.set("nextId",i+1).setIn(["observersMap",i],u),{observerState:a,entry:u}}function f(t,e){var n=t.getIn(["options",e]);if(void 0===n)throw new Error("Invalid option: "+e);return n}function h(t,e,n){var r=t.get("observersMap").filter((function(t){var r=t.get("getterKey"),i=!n||t.get("handler")===n;return!!i&&((0,z.isKeyPath)(e)&&(0,z.isKeyPath)(r)?(0,z.isEqual)(e,r):e===r)}));return t.withMutations((function(t){r.forEach((function(e){return l(t,e)}))}))}function l(t,e){return t.withMutations((function(t){var n=e.get("id"),r=e.get("storeDeps");0===r.size?t.update("any",(function(t){return t.remove(n)})):r.forEach((function(e){t.updateIn(["stores",e],(function(t){return t?t.remove(n):t}))})),t.removeIn(["observersMap",n])}))}function p(t){var e=t.get("state");return t.withMutations((function(t){var n=t.get("stores"),r=n.keySeq().toJS();n.forEach((function(n,r){var i=e.get(r),o=n.handleReset(i);if(void 0===o&&f(t,"throwOnUndefinedStoreReturnValue"))throw new Error("Store handleReset() must return a value, did you forget a return statement");if(f(t,"throwOnNonImmutableStore")&&!(0,D.isImmutableValue)(o))throw new Error("Store reset state must be an immutable value, did you forget to call toImmutable");t.setIn(["state",r],o)})),t.update("storeStates",(function(t){return b(t,r)})),v(t)}))}function _(t,e){var n=t.get("state");if((0,z.isKeyPath)(e))return i(n.getIn(e),t);if(!(0,C.isGetter)(e))throw new Error("evaluate must be passed a keyPath or Getter");if(g(t,e))return i(E(t,e),t);var r=(0,C.getDeps)(e).map((function(e){return _(t,e).result})),o=(0,C.getComputeFn)(e).apply(null,r);return i(o,m(t,e,o))}function d(t){var e={};return t.get("stores").forEach((function(n,r){var i=t.getIn(["state",r]),o=n.serialize(i);void 0!==o&&(e[r]=o)})),e}function v(t){return t.set("dirtyStores",w.default.Set())}function y(t){return t}function S(t,e){var n=y(e);return t.getIn(["cache",n])}function g(t,e){var n=S(t,e);if(!n)return!1;var r=n.get("storeStates");return 0!==r.size&&r.every((function(e,n){return t.getIn(["storeStates",n])===e}))}function m(t,e,n){var r=y(e),i=t.get("dispatchId"),o=(0,C.getStoreDeps)(e),u=(0,D.toImmutable)({}).withMutations((function(e){o.forEach((function(n){var r=t.getIn(["storeStates",n]);e.set(n,r)}))}));return t.setIn(["cache",r],w.default.Map({value:n,storeStates:u,dispatchId:i}))}function E(t,e){var n=y(e);return t.getIn(["cache",n,"value"])}function I(t){return t.update("dispatchId",(function(t){return t+1}))}function b(t,e){return t.withMutations((function(t){e.forEach((function(e){var n=t.has(e)?t.get(e)+1:1;t.set(e,n)}))}))}Object.defineProperty(e,"__esModule",{value:!0}),e.registerStores=o,e.replaceStores=u,e.dispatch=a,e.loadState=s,e.addObserver=c,e.getOption=f,e.removeObserver=h,e.removeObserverByEntry=l,e.reset=p,e.evaluate=_,e.serialize=d,e.resetDirtyStores=v;var O=n(3),w=r(O),T=n(9),A=r(T),D=n(5),C=n(10),z=n(11),R=n(4),M=w.default.Record({result:null,reactorState:null})},function(t,e,n){var r=n(8);e.dispatchStart=function(t,e,n){(0,r.getOption)(t,"logDispatches")&&console.group&&(console.groupCollapsed("Dispatch: %s",e),console.group("payload"),console.debug(n),console.groupEnd())},e.dispatchError=function(t,e){(0,r.getOption)(t,"logDispatches")&&console.group&&(console.debug("Dispatch error: "+e),console.groupEnd())},e.dispatchEnd=function(t,e,n){(0,r.getOption)(t,"logDispatches")&&console.group&&((0,r.getOption)(t,"logDirtyStores")&&console.log("Stores updated:",n.toList().toJS()),(0,r.getOption)(t,"logAppState")&&console.debug("Dispatch done, new state: ",e.toJS()),console.groupEnd())}},function(t,e,n){function r(t){return t&&t.__esModule?t:{default:t}}function i(t){return(0,l.isArray)(t)&&(0,l.isFunction)(t[t.length-1])}function o(t){return t[t.length-1]}function u(t){return t.slice(0,t.length-1)}function a(t,e){e||(e=h.default.Set());var n=h.default.Set().withMutations((function(e){if(!i(t))throw new Error("getFlattenedDeps must be passed a Getter");u(t).forEach((function(t){if((0,p.isKeyPath)(t))e.add((0,f.List)(t));else{if(!i(t))throw new Error("Invalid getter, each dependency must be a KeyPath or Getter");e.union(a(t))}}))}));return e.union(n)}function s(t){if(!(0,p.isKeyPath)(t))throw new Error("Cannot create Getter from KeyPath: "+t);return[t,_]}function c(t){if(t.hasOwnProperty("__storeDeps"))return t.__storeDeps;var e=a(t).map((function(t){return t.first()})).filter((function(t){return!!t}));return Object.defineProperty(t,"__storeDeps",{enumerable:!1,configurable:!1,writable:!1,value:e}),e}Object.defineProperty(e,"__esModule",{value:!0});var f=n(3),h=r(f),l=n(4),p=n(11),_=function(t){return t};e.default={isGetter:i,getComputeFn:o,getFlattenedDeps:a,getStoreDeps:c,getDeps:u,fromKeyPath:s},t.exports=e.default},function(t,e,n){function r(t){return t&&t.__esModule?t:{default:t}}function i(t){return(0,s.isArray)(t)&&!(0,s.isFunction)(t[t.length-1])}function o(t,e){var n=a.default.List(t),r=a.default.List(e);return a.default.is(n,r)}Object.defineProperty(e,"__esModule",{value:!0}),e.isKeyPath=i,e.isEqual=o;var u=n(3),a=r(u),s=n(4)},function(t,e,n){Object.defineProperty(e,"__esModule",{value:!0});var r=n(3),i=(0,r.Map)({logDispatches:!1,logAppState:!1,logDirtyStores:!1,throwOnUndefinedActionType:!1,throwOnUndefinedStoreReturnValue:!1,throwOnNonImmutableStore:!1,throwOnDispatchInDispatch:!1});e.PROD_OPTIONS=i;var o=(0,r.Map)({logDispatches:!0,logAppState:!0,logDirtyStores:!0,throwOnUndefinedActionType:!0,throwOnUndefinedStoreReturnValue:!0,throwOnNonImmutableStore:!0,throwOnDispatchInDispatch:!0});e.DEBUG_OPTIONS=o;var u=(0,r.Record)({dispatchId:0,state:(0,r.Map)(),stores:(0,r.Map)(),cache:(0,r.Map)(),storeStates:(0,r.Map)(),dirtyStores:(0,r.Set)(),debug:!1,options:i});e.ReactorState=u;var a=(0,r.Record)({any:(0,r.Set)(),stores:(0,r.Map)({}),observersMap:(0,r.Map)({}),nextId:1});e.ObserverState=a}])}))})),ze=t(Ce),Re=function(t){var e,n={};if(!(t instanceof Object)||Array.isArray(t))throw new Error("keyMirror(...): Argument must be an object.");for(e in t)t.hasOwnProperty(e)&&(n[e]=e);return n},Me=Re,Le=Me({VALIDATING_AUTH_TOKEN:null,VALID_AUTH_TOKEN:null,INVALID_AUTH_TOKEN:null,LOG_OUT:null}),je=ze.Store,Ne=ze.toImmutable,ke=new je({getInitialState:function(){return Ne({isValidating:!1,authToken:!1,host:null,isInvalid:!1,errorMessage:""})},initialize:function(){this.on(Le.VALIDATING_AUTH_TOKEN,n),this.on(Le.VALID_AUTH_TOKEN,r),this.on(Le.INVALID_AUTH_TOKEN,i)}}),Ue=ze.Store,Pe=ze.toImmutable,He=new Ue({getInitialState:function(){return Pe({authToken:null,host:""})},initialize:function(){this.on(Le.VALID_AUTH_TOKEN,o),this.on(Le.LOG_OUT,u)}}),xe=ze.Store,Ve=new xe({getInitialState:function(){return!0},initialize:function(){this.on(Le.VALID_AUTH_TOKEN,a)}}),qe=Me({STREAM_START:null,STREAM_STOP:null,STREAM_ERROR:null}),Fe="object"==typeof window&&"EventSource"in window,Ge=ze.Store,Ke=ze.toImmutable,Ye=new Ge({getInitialState:function(){return Ke({isSupported:Fe,isStreaming:!1,useStreaming:!0,hasError:!1})},initialize:function(){this.on(qe.STREAM_START,s),this.on(qe.STREAM_STOP,c),this.on(qe.STREAM_ERROR,f),this.on(qe.LOG_OUT,h)}}),Be=Me({API_FETCH_ALL_START:null,API_FETCH_ALL_SUCCESS:null,API_FETCH_ALL_FAIL:null,SYNC_SCHEDULED:null,SYNC_SCHEDULE_CANCELLED:null}),Je=ze.Store,We=new Je({getInitialState:function(){return!0},initialize:function(){this.on(Be.API_FETCH_ALL_START,(function(){return!0})),this.on(Be.API_FETCH_ALL_SUCCESS,(function(){return!1})),this.on(Be.API_FETCH_ALL_FAIL,(function(){return!1})),this.on(Be.LOG_OUT,(function(){return!1}))}}),Xe=ze.Store,Qe=new Xe({getInitialState:function(){return!1},initialize:function(){this.on(Be.SYNC_SCHEDULED,(function(){return!0})),this.on(Be.SYNC_SCHEDULE_CANCELLED,(function(){return!1})),this.on(Be.LOG_OUT,(function(){return!1}))}}),Ze=Me({API_FETCH_SUCCESS:null,API_FETCH_START:null,API_FETCH_FAIL:null,API_SAVE_SUCCESS:null,API_SAVE_START:null,API_SAVE_FAIL:null,API_DELETE_SUCCESS:null,API_DELETE_START:null,API_DELETE_FAIL:null,LOG_OUT:null}),$e=ze.Store,tn=ze.toImmutable,en=new $e({getInitialState:function(){return tn({})},initialize:function(){var t=this;this.on(Ze.API_FETCH_SUCCESS,l),this.on(Ze.API_SAVE_SUCCESS,l),this.on(Ze.API_DELETE_SUCCESS,p),this.on(Ze.LOG_OUT,(function(){return t.getInitialState()}))}}),nn=Object.prototype.hasOwnProperty,rn=Object.prototype.propertyIsEnumerable,on=d()?Object.assign:function(t,e){for(var n,r,i=arguments,o=_(t),u=1;u199&&u.status<300?t(e):n(e)},u.onerror=function(){return n({})},r?(u.setRequestHeader("Content-Type","application/json;charset=UTF-8"),u.send(JSON.stringify(r))):u.send()})}function D(t,e){var n=e.message;return t.set(t.size,n)}function z(){return zn.getInitialState()}function R(t,e){t.dispatch(An.NOTIFICATION_CREATED,{message:e})}function L(t){t.registerStores({notifications:zn})}function M(t,e){if("lock"===t)return!0;if("garage_door"===t)return!0;var n=e.get(t);return!!n&&n.services.has("turn_on")}function j(t,e){return!!t&&("group"===t.domain?"on"===t.state||"off"===t.state:M(t.domain,e))}function N(t,e){return[rr(t),function(t){return!!t&&t.services.has(e)}]}function k(t){return[wn.byId(t),nr,j]}function U(t,e,n){function r(){var c=(new Date).getTime()-a;c0?i=setTimeout(r,e-c):(i=null,n||(s=t.apply(u,o),i||(u=o=null)))}var i,o,u,a,s;null==e&&(e=100);var c=function(){u=this,o=arguments,a=(new Date).getTime();var c=n&&!i;return i||(i=setTimeout(r,e)),c&&(s=t.apply(u,o),u=o=null),s};return c.clear=function(){i&&(clearTimeout(i),i=null)},c}function P(t,e){var n=e.component;return t.push(n)}function H(t,e){var n=e.components;return dr(n)}function x(){return vr.getInitialState()}function V(t,e){var n=e.latitude,r=e.longitude,i=e.location_name,o=e.unit_system,u=e.time_zone,a=e.config_dir,s=e.version;return Sr({latitude:n,longitude:r,location_name:i,unit_system:o,time_zone:u,config_dir:a,serverVersion:s})}function F(){return gr.getInitialState()}function q(t,e){t.dispatch(pr.SERVER_CONFIG_LOADED,e)}function G(t){ln(t,"GET","config").then((function(e){return q(t,e)}))}function K(t,e){t.dispatch(pr.COMPONENT_LOADED,{component:e})}function B(t){return[["serverComponent"],function(e){return e.contains(t)}]}function Y(t){t.registerStores({serverComponent:vr,serverConfig:gr})}function J(t,e){var n=e.pane;return n}function W(){return Rr.getInitialState()}function X(t,e){var n=e.panels;return Mr(n)}function Q(){return jr.getInitialState()}function Z(t,e){var n=e.show;return!!n}function $(){return kr.getInitialState()}function tt(t,e){t.dispatch(Dr.SHOW_SIDEBAR,{show:e})}function et(t,e){t.dispatch(Dr.NAVIGATE,{pane:e})}function nt(t,e){t.dispatch(Dr.PANELS_LOADED,{panels:e})}function rt(t,e){var n=e.entityId;return n}function it(){return Kr.getInitialState()}function ot(t,e){t.dispatch(qr.SELECT_ENTITY,{entityId:e})}function ut(t){t.dispatch(qr.SELECT_ENTITY,{entityId:null})}function at(t){return!t||(new Date).getTime()-t>6e4}function st(t,e){var n=e.date;return n.toISOString()}function ct(){return Wr.getInitialState()}function ft(t,e){var n=e.date,r=e.stateHistory;return 0===r.length?t.set(n,Qr({})):t.withMutations((function(t){r.forEach((function(e){return t.setIn([n,e[0].entity_id],Qr(e.map(yn.fromJSON)))}))}))}function ht(){return Zr.getInitialState()}function lt(t,e){var n=e.stateHistory;return t.withMutations((function(t){n.forEach((function(e){return t.set(e[0].entity_id,ni(e.map(yn.fromJSON)))}))}))}function pt(){return ri.getInitialState()}function _t(t,e){var n=e.stateHistory,r=(new Date).getTime();return t.withMutations((function(t){n.forEach((function(e){return t.set(e[0].entity_id,r)})),history.length>1&&t.set(ui,r)}))}function dt(){return ai.getInitialState()}function vt(t,e){t.dispatch(Yr.ENTITY_HISTORY_DATE_SELECTED,{date:e})}function yt(t,e){void 0===e&&(e=null),t.dispatch(Yr.RECENT_ENTITY_HISTORY_FETCH_START,{});var n="history/period";return null!==e&&(n+="?filter_entity_id="+e),ln(t,"GET",n).then((function(e){return t.dispatch(Yr.RECENT_ENTITY_HISTORY_FETCH_SUCCESS,{stateHistory:e})}),(function(){return t.dispatch(Yr.RECENT_ENTITY_HISTORY_FETCH_ERROR,{})}))}function St(t,e){return t.dispatch(Yr.ENTITY_HISTORY_FETCH_START,{date:e}),ln(t,"GET","history/period/"+e).then((function(n){return t.dispatch(Yr.ENTITY_HISTORY_FETCH_SUCCESS,{date:e,stateHistory:n})}),(function(){return t.dispatch(Yr.ENTITY_HISTORY_FETCH_ERROR,{})}))}function gt(t){var e=t.evaluate(fi);return St(t,e)}function mt(t){t.registerStores({currentEntityHistoryDate:Wr,entityHistory:Zr,isLoadingEntityHistory:ti,recentEntityHistory:ri,recentEntityHistoryUpdated:ai})}function Et(t){t.registerStores({moreInfoEntityId:Kr})}function It(t,e){var n=e.model,r=e.result,i=e.params;if(null===t||"entity"!==n.entity||!i.replace)return t;for(var o=0;oau}function se(t){t.registerStores({currentLogbookDate:Yo,isLoadingLogbookEntries:Wo,logbookEntries:eu,logbookEntriesUpdated:iu})}function ce(t){return t.set("active",!0)}function fe(t){return t.set("active",!1)}function he(){return gu.getInitialState()}function le(t){return navigator.serviceWorker.getRegistration().then((function(t){if(!t)throw new Error("No service worker registered.");return t.pushManager.subscribe({userVisibleOnly:!0})})).then((function(e){var n;return n=navigator.userAgent.toLowerCase().indexOf("firefox")>-1?"firefox":"chrome",ln(t,"POST","notify.html5",{subscription:e,browser:n}).then((function(){return t.dispatch(vu.PUSH_NOTIFICATIONS_SUBSCRIBE,{})})).then((function(){return!0}))})).catch((function(e){var n;return n=e.message&&e.message.indexOf("gcm_sender_id")!==-1?"Please setup the notify.html5 platform.":"Notification registration failed.",console.error(e),Nn.createNotification(t,n),!1}))}function pe(t){return navigator.serviceWorker.getRegistration().then((function(t){if(!t)throw new Error("No service worker registered");return t.pushManager.subscribe({userVisibleOnly:!0})})).then((function(e){return ln(t,"DELETE","notify.html5",{subscription:e}).then((function(){return e.unsubscribe()})).then((function(){return t.dispatch(vu.PUSH_NOTIFICATIONS_UNSUBSCRIBE,{})})).then((function(){return!0}))})).catch((function(e){var n="Failed unsubscribing for push notifications.";return console.error(e),Nn.createNotification(t,n),!1}))}function _e(t){t.registerStores({pushNotifications:gu})}function de(t,e){return ln(t,"POST","template",{template:e})}function ve(t){return t.set("isListening",!0)}function ye(t,e){var n=e.interimTranscript,r=e.finalTranscript;return t.withMutations((function(t){return t.set("isListening",!0).set("isTransmitting",!1).set("interimTranscript",n).set("finalTranscript",r)}))}function Se(t,e){var n=e.finalTranscript;return t.withMutations((function(t){return t.set("isListening",!1).set("isTransmitting",!0).set("interimTranscript","").set("finalTranscript",n)}))}function ge(){return Nu.getInitialState()}function me(){return Nu.getInitialState()}function Ee(){return Nu.getInitialState()}function Ie(t){return ku[t.hassId]}function be(t){var e=Ie(t);if(e){var n=e.finalTranscript||e.interimTranscript;t.dispatch(Lu.VOICE_TRANSMITTING,{finalTranscript:n}),ur.callService(t,"conversation","process",{text:n}).then((function(){t.dispatch(Lu.VOICE_DONE)}),(function(){t.dispatch(Lu.VOICE_ERROR)}))}}function Oe(t){var e=Ie(t);e&&(e.recognition.stop(),ku[t.hassId]=!1)}function we(t){be(t),Oe(t)}function Te(t){var e=we.bind(null,t);e();var n=new webkitSpeechRecognition;ku[t.hassId]={recognition:n,interimTranscript:"",finalTranscript:""},n.interimResults=!0,n.onstart=function(){return t.dispatch(Lu.VOICE_START)},n.onerror=function(){return t.dispatch(Lu.VOICE_ERROR)},n.onend=e,n.onresult=function(e){var n=Ie(t);if(n){for(var r="",i="",o=e.resultIndex;o>>0;if(""+n!==e||4294967295===n)return NaN;e=n}return e<0?_(t)+e:e}function v(){return!0}function y(t,e,n){return(0===t||void 0!==n&&t<=-n)&&(void 0===e||void 0!==n&&e>=n)}function S(t,e){return m(t,e,0)}function g(t,e){return m(t,e,e)}function m(t,e,n){return void 0===t?n:t<0?Math.max(0,e+t):void 0===e?t:Math.min(e,t)}function E(t){this.next=t}function I(t,e,n,r){var i=0===t?e:1===t?n:[e,n];return r?r.value=i:r={value:i,done:!1},r}function b(){return{value:void 0,done:!0}}function O(t){return!!A(t)}function w(t){return t&&"function"==typeof t.next}function T(t){var e=A(t);return e&&e.call(t)}function A(t){var e=t&&(bn&&t[bn]||t[On]);if("function"==typeof e)return e}function C(t){return t&&"number"==typeof t.length}function D(t){return null===t||void 0===t?P():o(t)?t.toSeq():V(t)}function z(t){return null===t||void 0===t?P().toKeyedSeq():o(t)?u(t)?t.toSeq():t.fromEntrySeq():H(t)}function R(t){return null===t||void 0===t?P():o(t)?u(t)?t.entrySeq():t.toIndexedSeq():x(t)}function L(t){return(null===t||void 0===t?P():o(t)?u(t)?t.entrySeq():t:x(t)).toSetSeq()}function M(t){this._array=t,this.size=t.length}function j(t){var e=Object.keys(t);this._object=t,this._keys=e,this.size=e.length}function N(t){this._iterable=t,this.size=t.length||t.size}function k(t){this._iterator=t,this._iteratorCache=[]}function U(t){return!(!t||!t[Tn])}function P(){return An||(An=new M([]))}function H(t){var e=Array.isArray(t)?new M(t).fromEntrySeq():w(t)?new k(t).fromEntrySeq():O(t)?new N(t).fromEntrySeq():"object"==typeof t?new j(t):void 0;if(!e)throw new TypeError("Expected Array or iterable object of [k, v] entries, or keyed object: "+t);return e}function x(t){var e=F(t);if(!e)throw new TypeError("Expected Array or iterable object of values: "+t);return e}function V(t){var e=F(t)||"object"==typeof t&&new j(t);if(!e)throw new TypeError("Expected Array or iterable object of values, or keyed object: "+t);return e}function F(t){return C(t)?new M(t):w(t)?new k(t):O(t)?new N(t):void 0}function q(t,e,n,r){var i=t._cache;if(i){for(var o=i.length-1,u=0;u<=o;u++){var a=i[n?o-u:u];if(e(a[1],r?a[0]:u,t)===!1)return u+1}return u}return t.__iterateUncached(e,n)}function G(t,e,n,r){var i=t._cache;if(i){var o=i.length-1,u=0;return new E(function(){var t=i[n?o-u:u];return u++>o?b():I(e,r?t[0]:u-1,t[1])})}return t.__iteratorUncached(e,n)}function K(t,e){return e?B(e,t,"",{"":t}):Y(t)}function B(t,e,n,r){return Array.isArray(e)?t.call(r,n,R(e).map((function(n,r){return B(t,n,r,e)}))):J(e)?t.call(r,n,z(e).map((function(n,r){return B(t,n,r,e)}))):e}function Y(t){return Array.isArray(t)?R(t).map(Y).toList():J(t)?z(t).map(Y).toMap():t}function J(t){return t&&(t.constructor===Object||void 0===t.constructor)}function W(t,e){if(t===e||t!==t&&e!==e)return!0;if(!t||!e)return!1;if("function"==typeof t.valueOf&&"function"==typeof e.valueOf){if(t=t.valueOf(),e=e.valueOf(),t===e||t!==t&&e!==e)return!0;if(!t||!e)return!1}return!("function"!=typeof t.equals||"function"!=typeof e.equals||!t.equals(e))}function X(t,e){if(t===e)return!0;if(!o(e)||void 0!==t.size&&void 0!==e.size&&t.size!==e.size||void 0!==t.__hash&&void 0!==e.__hash&&t.__hash!==e.__hash||u(t)!==u(e)||a(t)!==a(e)||c(t)!==c(e))return!1;if(0===t.size&&0===e.size)return!0;var n=!s(t);if(c(t)){var r=t.entries();return e.every((function(t,e){var i=r.next().value;return i&&W(i[1],t)&&(n||W(i[0],e))}))&&r.next().done}var i=!1;if(void 0===t.size)if(void 0===e.size)"function"==typeof t.cacheResult&&t.cacheResult();else{i=!0;var f=t;t=e,e=f}var h=!0,l=e.__iterate((function(e,r){if(n?!t.has(e):i?!W(e,t.get(r,yn)):!W(t.get(r,yn),e))return h=!1,!1}));return h&&t.size===l}function Q(t,e){if(!(this instanceof Q))return new Q(t,e);if(this._value=t,this.size=void 0===e?1/0:Math.max(0,e),0===this.size){if(Cn)return Cn;Cn=this}}function Z(t,e){if(!t)throw new Error(e)}function $(t,e,n){if(!(this instanceof $))return new $(t,e,n);if(Z(0!==n,"Cannot step a Range by 0"),t=t||0,void 0===e&&(e=1/0),n=void 0===n?1:Math.abs(n),e>>1&1073741824|3221225471&t}function ot(t){if(t===!1||null===t||void 0===t)return 0;if("function"==typeof t.valueOf&&(t=t.valueOf(),t===!1||null===t||void 0===t))return 0;if(t===!0)return 1;var e=typeof t;if("number"===e){if(t!==t||t===1/0)return 0;var n=0|t;for(n!==t&&(n^=4294967295*t);t>4294967295;)t/=4294967295,n^=t;return it(n)}if("string"===e)return t.length>Un?ut(t):at(t);if("function"==typeof t.hashCode)return t.hashCode();if("object"===e)return st(t);if("function"==typeof t.toString)return at(t.toString());throw new Error("Value type "+e+" cannot be hashed.")}function ut(t){var e=xn[t];return void 0===e&&(e=at(t),Hn===Pn&&(Hn=0,xn={}),Hn++,xn[t]=e),e}function at(t){for(var e=0,n=0;n0)switch(t.nodeType){case 1:return t.uniqueID;case 9:return t.documentElement&&t.documentElement.uniqueID}}function ft(t){Z(t!==1/0,"Cannot perform this action with an infinite size.")}function ht(t){return null===t||void 0===t?It():lt(t)&&!c(t)?t:It().withMutations((function(e){var r=n(t);ft(r.size),r.forEach((function(t,n){return e.set(n,t)}))}))}function lt(t){return!(!t||!t[Vn])}function pt(t,e){this.ownerID=t,this.entries=e}function _t(t,e,n){this.ownerID=t,this.bitmap=e,this.nodes=n}function dt(t,e,n){this.ownerID=t,this.count=e,this.nodes=n}function vt(t,e,n){this.ownerID=t,this.keyHash=e,this.entries=n}function yt(t,e,n){this.ownerID=t,this.keyHash=e,this.entry=n}function St(t,e,n){this._type=e,this._reverse=n,this._stack=t._root&&mt(t._root)}function gt(t,e){return I(t,e[0],e[1])}function mt(t,e){return{node:t,index:0,__prev:e}}function Et(t,e,n,r){var i=Object.create(Fn);return i.size=t,i._root=e,i.__ownerID=n,i.__hash=r,i.__altered=!1,i}function It(){return qn||(qn=Et(0))}function bt(t,e,n){var r,i;if(t._root){var o=f(Sn),u=f(gn);if(r=Ot(t._root,t.__ownerID,0,void 0,e,n,o,u),!u.value)return t;i=t.size+(o.value?n===yn?-1:1:0)}else{if(n===yn)return t;i=1,r=new pt(t.__ownerID,[[e,n]])}return t.__ownerID?(t.size=i,t._root=r,t.__hash=void 0,t.__altered=!0,t):r?Et(i,r):It()}function Ot(t,e,n,r,i,o,u,a){return t?t.update(e,n,r,i,o,u,a):o===yn?t:(h(a),h(u),new yt(e,r,[i,o]))}function wt(t){return t.constructor===yt||t.constructor===vt}function Tt(t,e,n,r,i){if(t.keyHash===r)return new vt(e,r,[t.entry,i]);var o,u=(0===n?t.keyHash:t.keyHash>>>n)&vn,a=(0===n?r:r>>>n)&vn,s=u===a?[Tt(t,e,n+_n,r,i)]:(o=new yt(e,r,i),u>>=1)u[a]=1&n?e[o++]:void 0;return u[r]=i,new dt(t,o+1,u)}function zt(t,e,r){for(var i=[],u=0;u>1&1431655765,t=(858993459&t)+(t>>2&858993459),t=t+(t>>4)&252645135,t+=t>>8,t+=t>>16,127&t}function kt(t,e,n,r){var i=r?t:p(t);return i[e]=n,i}function Ut(t,e,n,r){var i=t.length+1;if(r&&e+1===i)return t[e]=n,t;for(var o=new Array(i),u=0,a=0;a0&&io?0:o-n,c=u-n;return c>dn&&(c=dn),function(){if(i===c)return Xn;var t=e?--c:i++;return r&&r[t]}}function i(t,r,i){var a,s=t&&t.array,c=i>o?0:o-i>>r,f=(u-i>>r)+1;return f>dn&&(f=dn),function(){for(;;){if(a){var t=a();if(t!==Xn)return t;a=null}if(c===f)return Xn;var o=e?--f:c++;a=n(s&&s[o],r-_n,i+(o<=t.size||e<0)return t.withMutations((function(t){e<0?Wt(t,e).set(0,n):Wt(t,0,e+1).set(e,n)}));e+=t._origin;var r=t._tail,i=t._root,o=f(gn);return e>=Qt(t._capacity)?r=Bt(r,t.__ownerID,0,e,n,o):i=Bt(i,t.__ownerID,t._level,e,n,o),o.value?t.__ownerID?(t._root=i,t._tail=r,t.__hash=void 0,t.__altered=!0,t):qt(t._origin,t._capacity,t._level,i,r):t}function Bt(t,e,n,r,i,o){var u=r>>>n&vn,a=t&&u0){var c=t&&t.array[u],f=Bt(c,e,n-_n,r,i,o);return f===c?t:(s=Yt(t,e),s.array[u]=f,s)}return a&&t.array[u]===i?t:(h(o),s=Yt(t,e),void 0===i&&u===s.array.length-1?s.array.pop():s.array[u]=i,s)}function Yt(t,e){return e&&t&&e===t.ownerID?t:new Vt(t?t.array.slice():[],e)}function Jt(t,e){if(e>=Qt(t._capacity))return t._tail;if(e<1<0;)n=n.array[e>>>r&vn],r-=_n;return n}}function Wt(t,e,n){void 0!==e&&(e=0|e),void 0!==n&&(n=0|n);var r=t.__ownerID||new l,i=t._origin,o=t._capacity,u=i+e,a=void 0===n?o:n<0?o+n:i+n;if(u===i&&a===o)return t;if(u>=a)return t.clear();for(var s=t._level,c=t._root,f=0;u+f<0;)c=new Vt(c&&c.array.length?[void 0,c]:[],r),s+=_n,f+=1<=1<h?new Vt([],r):_;if(_&&p>h&&u_n;y-=_n){var S=h>>>y&vn;v=v.array[S]=Yt(v.array[S],r)}v.array[h>>>_n&vn]=_}if(a=p)u-=p,a-=p,s=_n,c=null,d=d&&d.removeBefore(r,0,u);else if(u>i||p>>s&vn;if(g!==p>>>s&vn)break;g&&(f+=(1<i&&(c=c.removeBefore(r,s,u-f)),c&&pu&&(u=c.size),o(s)||(c=c.map((function(t){return K(t)}))),i.push(c)}return u>t.size&&(t=t.setSize(u)),Mt(t,e,i)}function Qt(t){return t>>_n<<_n}function Zt(t){return null===t||void 0===t?ee():$t(t)?t:ee().withMutations((function(e){var r=n(t);ft(r.size),r.forEach((function(t,n){return e.set(n,t)}))}))}function $t(t){return lt(t)&&c(t)}function te(t,e,n,r){var i=Object.create(Zt.prototype);return i.size=t?t.size:0,i._map=t,i._list=e,i.__ownerID=n,i.__hash=r,i}function ee(){return Qn||(Qn=te(It(),Gt()))}function ne(t,e,n){var r,i,o=t._map,u=t._list,a=o.get(e),s=void 0!==a;if(n===yn){if(!s)return t;u.size>=dn&&u.size>=2*o.size?(i=u.filter((function(t,e){return void 0!==t&&a!==e})),r=i.toKeyedSeq().map((function(t){return t[0]})).flip().toMap(),t.__ownerID&&(r.__ownerID=i.__ownerID=t.__ownerID)):(r=o.remove(e),i=a===u.size-1?u.pop():u.set(a,void 0))}else if(s){if(n===u.get(a)[1])return t;r=o,i=u.set(a,[e,n])}else r=o.set(e,u.size),i=u.set(u.size,[e,n]);return t.__ownerID?(t.size=r.size,t._map=r,t._list=i,t.__hash=void 0,t):te(r,i)}function re(t,e){this._iter=t,this._useKeys=e,this.size=t.size}function ie(t){this._iter=t,this.size=t.size}function oe(t){this._iter=t,this.size=t.size}function ue(t){this._iter=t,this.size=t.size}function ae(t){var e=Ce(t);return e._iter=t,e.size=t.size,e.flip=function(){return t},e.reverse=function(){var e=t.reverse.apply(this);return e.flip=function(){return t.reverse()},e},e.has=function(e){return t.includes(e)},e.includes=function(e){return t.has(e)},e.cacheResult=De,e.__iterateUncached=function(e,n){var r=this;return t.__iterate((function(t,n){return e(n,t,r)!==!1}),n)},e.__iteratorUncached=function(e,n){if(e===In){var r=t.__iterator(e,n);return new E(function(){var t=r.next();if(!t.done){var e=t.value[0];t.value[0]=t.value[1],t.value[1]=e}return t})}return t.__iterator(e===En?mn:En,n)},e}function se(t,e,n){var r=Ce(t);return r.size=t.size,r.has=function(e){return t.has(e)},r.get=function(r,i){var o=t.get(r,yn);return o===yn?i:e.call(n,o,r,t)},r.__iterateUncached=function(r,i){var o=this;return t.__iterate((function(t,i,u){return r(e.call(n,t,i,u),i,o)!==!1}),i)},r.__iteratorUncached=function(r,i){var o=t.__iterator(In,i);return new E(function(){var i=o.next();if(i.done)return i;var u=i.value,a=u[0];return I(r,a,e.call(n,u[1],a,t),i)})},r}function ce(t,e){var n=Ce(t);return n._iter=t,n.size=t.size,n.reverse=function(){return t},t.flip&&(n.flip=function(){var e=ae(t);return e.reverse=function(){return t.flip()},e}),n.get=function(n,r){return t.get(e?n:-1-n,r)},n.has=function(n){return t.has(e?n:-1-n)},n.includes=function(e){return t.includes(e)},n.cacheResult=De,n.__iterate=function(e,n){var r=this;return t.__iterate((function(t,n){return e(t,n,r)}),!n)},n.__iterator=function(e,n){return t.__iterator(e,!n)},n}function fe(t,e,n,r){var i=Ce(t);return r&&(i.has=function(r){var i=t.get(r,yn);return i!==yn&&!!e.call(n,i,r,t)},i.get=function(r,i){var o=t.get(r,yn);return o!==yn&&e.call(n,o,r,t)?o:i}),i.__iterateUncached=function(i,o){var u=this,a=0;return t.__iterate((function(t,o,s){if(e.call(n,t,o,s))return a++,i(t,r?o:a-1,u)}),o),a},i.__iteratorUncached=function(i,o){var u=t.__iterator(In,o),a=0;return new E(function(){for(;;){var o=u.next();if(o.done)return o;var s=o.value,c=s[0],f=s[1];if(e.call(n,f,c,t))return I(i,r?c:a++,f,o)}})},i}function he(t,e,n){var r=ht().asMutable();return t.__iterate((function(i,o){r.update(e.call(n,i,o,t),0,(function(t){return t+1}))})),r.asImmutable()}function le(t,e,n){var r=u(t),i=(c(t)?Zt():ht()).asMutable();t.__iterate((function(o,u){i.update(e.call(n,o,u,t),(function(t){return t=t||[],t.push(r?[u,o]:o),t}))}));var o=Ae(t);return i.map((function(e){return Oe(t,o(e))}))}function pe(t,e,n,r){var i=t.size;if(void 0!==e&&(e=0|e),void 0!==n&&(n=n===1/0?i:0|n),y(e,n,i))return t;var o=S(e,i),u=g(n,i);if(o!==o||u!==u)return pe(t.toSeq().cacheResult(),e,n,r);var a,s=u-o;s===s&&(a=s<0?0:s);var c=Ce(t);return c.size=0===a?a:t.size&&a||void 0,!r&&U(t)&&a>=0&&(c.get=function(e,n){return e=d(this,e),e>=0&&ea)return b();var t=i.next();return r||e===En?t:e===mn?I(e,s-1,void 0,t):I(e,s-1,t.value[1],t)})},c}function _e(t,e,n){var r=Ce(t);return r.__iterateUncached=function(r,i){var o=this;if(i)return this.cacheResult().__iterate(r,i);var u=0;return t.__iterate((function(t,i,a){return e.call(n,t,i,a)&&++u&&r(t,i,o)})),u},r.__iteratorUncached=function(r,i){var o=this;if(i)return this.cacheResult().__iterator(r,i);var u=t.__iterator(In,i),a=!0;return new E(function(){if(!a)return b();var t=u.next();if(t.done)return t;var i=t.value,s=i[0],c=i[1];return e.call(n,c,s,o)?r===In?t:I(r,s,c,t):(a=!1,b())})},r}function de(t,e,n,r){var i=Ce(t);return i.__iterateUncached=function(i,o){var u=this;if(o)return this.cacheResult().__iterate(i,o);var a=!0,s=0;return t.__iterate((function(t,o,c){if(!a||!(a=e.call(n,t,o,c)))return s++,i(t,r?o:s-1,u)})),s},i.__iteratorUncached=function(i,o){var u=this;if(o)return this.cacheResult().__iterator(i,o);var a=t.__iterator(In,o),s=!0,c=0;return new E(function(){var t,o,f;do{if(t=a.next(),t.done)return r||i===En?t:i===mn?I(i,c++,void 0,t):I(i,c++,t.value[1],t);var h=t.value;o=h[0],f=h[1],s&&(s=e.call(n,f,o,u))}while(s);return i===In?t:I(i,o,f,t)})},i}function ve(t,e){var r=u(t),i=[t].concat(e).map((function(t){return o(t)?r&&(t=n(t)):t=r?H(t):x(Array.isArray(t)?t:[t]),t})).filter((function(t){return 0!==t.size}));if(0===i.length)return t;if(1===i.length){var s=i[0];if(s===t||r&&u(s)||a(t)&&a(s))return s}var c=new M(i);return r?c=c.toKeyedSeq():a(t)||(c=c.toSetSeq()),c=c.flatten(!0),c.size=i.reduce((function(t,e){if(void 0!==t){var n=e.size;if(void 0!==n)return t+n}}),0),c}function ye(t,e,n){var r=Ce(t);return r.__iterateUncached=function(r,i){function u(t,c){var f=this;t.__iterate((function(t,i){return(!e||c0}function be(t,n,r){var i=Ce(t);return i.size=new M(r).map((function(t){return t.size})).min(),i.__iterate=function(t,e){for(var n,r=this,i=this.__iterator(En,e),o=0;!(n=i.next()).done&&t(n.value,o++,r)!==!1;);return o},i.__iteratorUncached=function(t,i){var o=r.map((function(t){return t=e(t),T(i?t.reverse():t)})),u=0,a=!1;return new E(function(){var e;return a||(e=o.map((function(t){return t.next()})),a=e.some((function(t){return t.done}))),a?b():I(t,u++,n.apply(null,e.map((function(t){return t.value}))))})},i}function Oe(t,e){return U(t)?e:t.constructor(e)}function we(t){if(t!==Object(t))throw new TypeError("Expected [K, V] tuple: "+t)}function Te(t){return ft(t.size),_(t)}function Ae(t){return u(t)?n:a(t)?r:i}function Ce(t){return Object.create((u(t)?z:a(t)?R:L).prototype)}function De(){return this._iter.cacheResult?(this._iter.cacheResult(),this.size=this._iter.size,this):D.prototype.cacheResult.call(this)}function ze(t,e){return t>e?1:te?-1:0}function on(t){if(t.size===1/0)return 0;var e=c(t),n=u(t),r=e?1:0,i=t.__iterate(n?e?function(t,e){r=31*r+an(ot(t),ot(e))|0}:function(t,e){r=r+an(ot(t),ot(e))|0}:e?function(t){r=31*r+ot(t)|0}:function(t){r=r+ot(t)|0});return un(i,r)}function un(t,e){return e=Rn(e,3432918353),e=Rn(e<<15|e>>>-15,461845907),e=Rn(e<<13|e>>>-13,5),e=(e+3864292196|0)^t,e=Rn(e^e>>>16,2246822507),e=Rn(e^e>>>13,3266489909),e=it(e^e>>>16)}function an(t,e){return t^e+2654435769+(t<<6)+(t>>2)|0}var sn=Array.prototype.slice;t(n,e),t(r,e),t(i,e),e.isIterable=o,e.isKeyed=u,e.isIndexed=a,e.isAssociative=s,e.isOrdered=c,e.Keyed=n,e.Indexed=r,e.Set=i;var cn="@@__IMMUTABLE_ITERABLE__@@",fn="@@__IMMUTABLE_KEYED__@@",hn="@@__IMMUTABLE_INDEXED__@@",ln="@@__IMMUTABLE_ORDERED__@@",pn="delete",_n=5,dn=1<<_n,vn=dn-1,yn={},Sn={value:!1},gn={value:!1},mn=0,En=1,In=2,bn="function"==typeof Symbol&&Symbol.iterator,On="@@iterator",wn=bn||On;E.prototype.toString=function(){return"[Iterator]"},E.KEYS=mn,E.VALUES=En,E.ENTRIES=In,E.prototype.inspect=E.prototype.toSource=function(){return this.toString()},E.prototype[wn]=function(){return this},t(D,e),D.of=function(){return D(arguments)},D.prototype.toSeq=function(){return this},D.prototype.toString=function(){return this.__toString("Seq {","}")},D.prototype.cacheResult=function(){return!this._cache&&this.__iterateUncached&&(this._cache=this.entrySeq().toArray(),this.size=this._cache.length),this},D.prototype.__iterate=function(t,e){return q(this,t,e,!0)},D.prototype.__iterator=function(t,e){return G(this,t,e,!0)},t(z,D),z.prototype.toKeyedSeq=function(){return this},t(R,D),R.of=function(){return R(arguments)},R.prototype.toIndexedSeq=function(){return this},R.prototype.toString=function(){return this.__toString("Seq [","]")},R.prototype.__iterate=function(t,e){return q(this,t,e,!1)},R.prototype.__iterator=function(t,e){return G(this,t,e,!1)},t(L,D),L.of=function(){return L(arguments)},L.prototype.toSetSeq=function(){return this},D.isSeq=U,D.Keyed=z,D.Set=L,D.Indexed=R;var Tn="@@__IMMUTABLE_SEQ__@@";D.prototype[Tn]=!0,t(M,R),M.prototype.get=function(t,e){return this.has(t)?this._array[d(this,t)]:e},M.prototype.__iterate=function(t,e){for(var n=this,r=this._array,i=r.length-1,o=0;o<=i;o++)if(t(r[e?i-o:o],o,n)===!1)return o+1;return o},M.prototype.__iterator=function(t,e){var n=this._array,r=n.length-1,i=0;return new E(function(){return i>r?b():I(t,i,n[e?r-i++:i++])})},t(j,z),j.prototype.get=function(t,e){return void 0===e||this.has(t)?this._object[t]:e},j.prototype.has=function(t){return this._object.hasOwnProperty(t)},j.prototype.__iterate=function(t,e){for(var n=this,r=this._object,i=this._keys,o=i.length-1,u=0;u<=o;u++){var a=i[e?o-u:u];if(t(r[a],a,n)===!1)return u+1}return u},j.prototype.__iterator=function(t,e){var n=this._object,r=this._keys,i=r.length-1,o=0;return new E(function(){var u=r[e?i-o:o];return o++>i?b():I(t,u,n[u])})},j.prototype[ln]=!0,t(N,R),N.prototype.__iterateUncached=function(t,e){var n=this;if(e)return this.cacheResult().__iterate(t,e);var r=this._iterable,i=T(r),o=0;if(w(i))for(var u;!(u=i.next()).done&&t(u.value,o++,n)!==!1;);return o},N.prototype.__iteratorUncached=function(t,e){if(e)return this.cacheResult().__iterator(t,e);var n=this._iterable,r=T(n);if(!w(r))return new E(b);var i=0;return new E(function(){var e=r.next();return e.done?e:I(t,i++,e.value)})},t(k,R),k.prototype.__iterateUncached=function(t,e){var n=this;if(e)return this.cacheResult().__iterate(t,e);for(var r=this._iterator,i=this._iteratorCache,o=0;o=r.length){var e=n.next();if(e.done)return e;r[i]=e.value}return I(t,i,r[i++])})};var An;t(Q,R),Q.prototype.toString=function(){return 0===this.size?"Repeat []":"Repeat [ "+this._value+" "+this.size+" times ]"},Q.prototype.get=function(t,e){return this.has(t)?this._value:e},Q.prototype.includes=function(t){return W(this._value,t)},Q.prototype.slice=function(t,e){var n=this.size;return y(t,e,n)?this:new Q(this._value,g(e,n)-S(t,n))},Q.prototype.reverse=function(){return this},Q.prototype.indexOf=function(t){return W(this._value,t)?0:-1},Q.prototype.lastIndexOf=function(t){return W(this._value,t)?this.size:-1},Q.prototype.__iterate=function(t,e){for(var n=this,r=0;r=0&&e=0&&nn?b():I(t,o++,u)})},$.prototype.equals=function(t){return t instanceof $?this._start===t._start&&this._end===t._end&&this._step===t._step:X(this,t)};var Dn;t(tt,e),t(et,tt),t(nt,tt),t(rt,tt),tt.Keyed=et,tt.Indexed=nt,tt.Set=rt;var zn,Rn="function"==typeof Math.imul&&Math.imul(4294967295,2)===-2?Math.imul:function(t,e){t=0|t,e=0|e;var n=65535&t,r=65535&e;return n*r+((t>>>16)*r+n*(e>>>16)<<16>>>0)|0},Ln=Object.isExtensible,Mn=(function(){try{return Object.defineProperty({},"@",{}),!0}catch(t){return!1}})(),jn="function"==typeof WeakMap;jn&&(zn=new WeakMap);var Nn=0,kn="__immutablehash__";"function"==typeof Symbol&&(kn=Symbol(kn));var Un=16,Pn=255,Hn=0,xn={};t(ht,et),ht.of=function(){var t=sn.call(arguments,0);return It().withMutations((function(e){for(var n=0;n=t.length)throw new Error("Missing value for key: "+t[n]);e.set(t[n],t[n+1])}}))},ht.prototype.toString=function(){return this.__toString("Map {","}")},ht.prototype.get=function(t,e){return this._root?this._root.get(0,void 0,t,e):e},ht.prototype.set=function(t,e){return bt(this,t,e)},ht.prototype.setIn=function(t,e){return this.updateIn(t,yn,(function(){return e}))},ht.prototype.remove=function(t){return bt(this,t,yn)},ht.prototype.deleteIn=function(t){return this.updateIn(t,(function(){return yn}))},ht.prototype.update=function(t,e,n){return 1===arguments.length?t(this):this.updateIn([t],e,n)},ht.prototype.updateIn=function(t,e,n){n||(n=e,e=void 0);var r=jt(this,Re(t),e,n);return r===yn?void 0:r},ht.prototype.clear=function(){return 0===this.size?this:this.__ownerID?(this.size=0,this._root=null,this.__hash=void 0,this.__altered=!0,this):It()},ht.prototype.merge=function(){return zt(this,void 0,arguments)},ht.prototype.mergeWith=function(t){var e=sn.call(arguments,1);return zt(this,t,e)},ht.prototype.mergeIn=function(t){var e=sn.call(arguments,1);return this.updateIn(t,It(),(function(t){return"function"==typeof t.merge?t.merge.apply(t,e):e[e.length-1]}))},ht.prototype.mergeDeep=function(){return zt(this,Rt,arguments)},ht.prototype.mergeDeepWith=function(t){var e=sn.call(arguments,1);return zt(this,Lt(t),e)},ht.prototype.mergeDeepIn=function(t){var e=sn.call(arguments,1);return this.updateIn(t,It(),(function(t){return"function"==typeof t.mergeDeep?t.mergeDeep.apply(t,e):e[e.length-1]}))},ht.prototype.sort=function(t){return Zt(me(this,t))},ht.prototype.sortBy=function(t,e){return Zt(me(this,e,t))},ht.prototype.withMutations=function(t){var e=this.asMutable();return t(e),e.wasAltered()?e.__ensureOwner(this.__ownerID):this},ht.prototype.asMutable=function(){return this.__ownerID?this:this.__ensureOwner(new l)},ht.prototype.asImmutable=function(){return this.__ensureOwner()},ht.prototype.wasAltered=function(){return this.__altered},ht.prototype.__iterator=function(t,e){return new St(this,t,e)},ht.prototype.__iterate=function(t,e){var n=this,r=0;return this._root&&this._root.iterate((function(e){return r++,t(e[1],e[0],n)}),e),r},ht.prototype.__ensureOwner=function(t){return t===this.__ownerID?this:t?Et(this.size,this._root,t,this.__hash):(this.__ownerID=t,this.__altered=!1,this)},ht.isMap=lt;var Vn="@@__IMMUTABLE_MAP__@@",Fn=ht.prototype;Fn[Vn]=!0,Fn[pn]=Fn.remove,Fn.removeIn=Fn.deleteIn,pt.prototype.get=function(t,e,n,r){for(var i=this.entries,o=0,u=i.length;o=Gn)return At(t,s,r,i);var _=t&&t===this.ownerID,d=_?s:p(s);return l?a?c===f-1?d.pop():d[c]=d.pop():d[c]=[r,i]:d.push([r,i]),_?(this.entries=d,this):new pt(t,d)}},_t.prototype.get=function(t,e,n,r){void 0===e&&(e=ot(n));var i=1<<((0===t?e:e>>>t)&vn),o=this.bitmap;return 0===(o&i)?r:this.nodes[Nt(o&i-1)].get(t+_n,e,n,r)},_t.prototype.update=function(t,e,n,r,i,o,u){void 0===n&&(n=ot(r));var a=(0===e?n:n>>>e)&vn,s=1<=Kn)return Dt(t,l,c,a,_);if(f&&!_&&2===l.length&&wt(l[1^h]))return l[1^h];if(f&&_&&1===l.length&&wt(_))return _;var d=t&&t===this.ownerID,v=f?_?c:c^s:c|s,y=f?_?kt(l,h,_,d):Pt(l,h,d):Ut(l,h,_,d);return d?(this.bitmap=v,this.nodes=y,this):new _t(t,v,y)},dt.prototype.get=function(t,e,n,r){void 0===e&&(e=ot(n));var i=(0===t?e:e>>>t)&vn,o=this.nodes[i];return o?o.get(t+_n,e,n,r):r},dt.prototype.update=function(t,e,n,r,i,o,u){void 0===n&&(n=ot(r));var a=(0===e?n:n>>>e)&vn,s=i===yn,c=this.nodes,f=c[a];if(s&&!f)return this;var h=Ot(f,t,e+_n,n,r,i,o,u);if(h===f)return this;var l=this.count;if(f){if(!h&&(l--,l=0&&t>>e&vn;if(r>=this.array.length)return new Vt([],t);var i,o=0===r;if(e>0){var u=this.array[r];if(i=u&&u.removeBefore(t,e-_n,n),i===u&&o)return this}if(o&&!i)return this;var a=Yt(this,t);if(!o)for(var s=0;s>>e&vn;if(r>=this.array.length)return this;var i;if(e>0){var o=this.array[r];if(i=o&&o.removeAfter(t,e-_n,n),i===o&&r===this.array.length-1)return this}var u=Yt(this,t);return u.array.splice(r+1),i&&(u.array[r]=i),u};var Wn,Xn={};t(Zt,ht),Zt.of=function(){return this(arguments)},Zt.prototype.toString=function(){return this.__toString("OrderedMap {","}")},Zt.prototype.get=function(t,e){var n=this._map.get(t);return void 0!==n?this._list.get(n)[1]:e},Zt.prototype.clear=function(){return 0===this.size?this:this.__ownerID?(this.size=0,this._map.clear(),this._list.clear(),this):ee()},Zt.prototype.set=function(t,e){return ne(this,t,e)},Zt.prototype.remove=function(t){return ne(this,t,yn)},Zt.prototype.wasAltered=function(){return this._map.wasAltered()||this._list.wasAltered()},Zt.prototype.__iterate=function(t,e){var n=this;return this._list.__iterate((function(e){return e&&t(e[1],e[0],n)}),e)},Zt.prototype.__iterator=function(t,e){return this._list.fromEntrySeq().__iterator(t,e)},Zt.prototype.__ensureOwner=function(t){if(t===this.__ownerID)return this;var e=this._map.__ensureOwner(t),n=this._list.__ensureOwner(t);return t?te(e,n,t,this.__hash):(this.__ownerID=t,this._map=e,this._list=n,this)},Zt.isOrderedMap=$t,Zt.prototype[ln]=!0,Zt.prototype[pn]=Zt.prototype.remove;var Qn;t(re,z),re.prototype.get=function(t,e){return this._iter.get(t,e)},re.prototype.has=function(t){return this._iter.has(t)},re.prototype.valueSeq=function(){return this._iter.valueSeq()},re.prototype.reverse=function(){var t=this,e=ce(this,!0);return this._useKeys||(e.valueSeq=function(){return t._iter.toSeq().reverse()}),e},re.prototype.map=function(t,e){var n=this,r=se(this,t,e);return this._useKeys||(r.valueSeq=function(){return n._iter.toSeq().map(t,e)}),r},re.prototype.__iterate=function(t,e){var n,r=this;return this._iter.__iterate(this._useKeys?function(e,n){return t(e,n,r)}:(n=e?Te(this):0,function(i){return t(i,e?--n:n++,r)}),e)},re.prototype.__iterator=function(t,e){if(this._useKeys)return this._iter.__iterator(t,e);var n=this._iter.__iterator(En,e),r=e?Te(this):0;return new E(function(){var i=n.next();return i.done?i:I(t,e?--r:r++,i.value,i)})},re.prototype[ln]=!0,t(ie,R),ie.prototype.includes=function(t){return this._iter.includes(t)},ie.prototype.__iterate=function(t,e){var n=this,r=0;return this._iter.__iterate((function(e){return t(e,r++,n)}),e)},ie.prototype.__iterator=function(t,e){var n=this._iter.__iterator(En,e),r=0;return new E(function(){var e=n.next();return e.done?e:I(t,r++,e.value,e)})},t(oe,L),oe.prototype.has=function(t){return this._iter.includes(t)},oe.prototype.__iterate=function(t,e){var n=this;return this._iter.__iterate((function(e){return t(e,e,n)}),e)},oe.prototype.__iterator=function(t,e){var n=this._iter.__iterator(En,e);return new E(function(){var e=n.next();return e.done?e:I(t,e.value,e.value,e)})},t(ue,z),ue.prototype.entrySeq=function(){return this._iter.toSeq()},ue.prototype.__iterate=function(t,e){var n=this;return this._iter.__iterate((function(e){if(e){we(e);var r=o(e);return t(r?e.get(1):e[1],r?e.get(0):e[0],n)}}),e)},ue.prototype.__iterator=function(t,e){var n=this._iter.__iterator(En,e);return new E(function(){for(;;){var e=n.next();if(e.done)return e;var r=e.value;if(r){we(r);var i=o(r);return I(t,i?r.get(0):r[0],i?r.get(1):r[1],e)}}})},ie.prototype.cacheResult=re.prototype.cacheResult=oe.prototype.cacheResult=ue.prototype.cacheResult=De,t(Le,et),Le.prototype.toString=function(){return this.__toString(je(this)+" {","}")},Le.prototype.has=function(t){return this._defaultValues.hasOwnProperty(t)},Le.prototype.get=function(t,e){if(!this.has(t))return e;var n=this._defaultValues[t];return this._map?this._map.get(t,n):n},Le.prototype.clear=function(){if(this.__ownerID)return this._map&&this._map.clear(),this;var t=this.constructor;return t._empty||(t._empty=Me(this,It()))},Le.prototype.set=function(t,e){if(!this.has(t))throw new Error('Cannot set unknown key "'+t+'" on '+je(this));if(this._map&&!this._map.has(t)){var n=this._defaultValues[t];if(e===n)return this}var r=this._map&&this._map.set(t,e);return this.__ownerID||r===this._map?this:Me(this,r)},Le.prototype.remove=function(t){if(!this.has(t))return this;var e=this._map&&this._map.remove(t);return this.__ownerID||e===this._map?this:Me(this,e)},Le.prototype.wasAltered=function(){return this._map.wasAltered()},Le.prototype.__iterator=function(t,e){var r=this;return n(this._defaultValues).map((function(t,e){return r.get(e)})).__iterator(t,e)},Le.prototype.__iterate=function(t,e){var r=this;return n(this._defaultValues).map((function(t,e){return r.get(e)})).__iterate(t,e)},Le.prototype.__ensureOwner=function(t){if(t===this.__ownerID)return this;var e=this._map&&this._map.__ensureOwner(t);return t?Me(this,e,t):(this.__ownerID=t,this._map=e,this)};var Zn=Le.prototype;Zn[pn]=Zn.remove,Zn.deleteIn=Zn.removeIn=Fn.removeIn,Zn.merge=Fn.merge,Zn.mergeWith=Fn.mergeWith,Zn.mergeIn=Fn.mergeIn,Zn.mergeDeep=Fn.mergeDeep,Zn.mergeDeepWith=Fn.mergeDeepWith,Zn.mergeDeepIn=Fn.mergeDeepIn,Zn.setIn=Fn.setIn,Zn.update=Fn.update,Zn.updateIn=Fn.updateIn,Zn.withMutations=Fn.withMutations,Zn.asMutable=Fn.asMutable,Zn.asImmutable=Fn.asImmutable,t(Ue,rt),Ue.of=function(){return this(arguments)},Ue.fromKeys=function(t){return this(n(t).keySeq())},Ue.prototype.toString=function(){return this.__toString("Set {","}")},Ue.prototype.has=function(t){return this._map.has(t)},Ue.prototype.add=function(t){ +return He(this,this._map.set(t,!0))},Ue.prototype.remove=function(t){return He(this,this._map.remove(t))},Ue.prototype.clear=function(){return He(this,this._map.clear())},Ue.prototype.union=function(){var t=sn.call(arguments,0);return t=t.filter((function(t){return 0!==t.size})),0===t.length?this:0!==this.size||this.__ownerID||1!==t.length?this.withMutations((function(e){for(var n=0;n=0;r--)n={value:t[r],next:n};return this.__ownerID?(this.size=e,this._head=n,this.__hash=void 0,this.__altered=!0,this):Je(e,n)},Be.prototype.pushAll=function(t){if(t=r(t),0===t.size)return this;ft(t.size);var e=this.size,n=this._head;return t.reverse().forEach((function(t){e++,n={value:t,next:n}})),this.__ownerID?(this.size=e,this._head=n,this.__hash=void 0,this.__altered=!0,this):Je(e,n)},Be.prototype.pop=function(){return this.slice(1)},Be.prototype.unshift=function(){return this.push.apply(this,arguments)},Be.prototype.unshiftAll=function(t){return this.pushAll(t)},Be.prototype.shift=function(){return this.pop.apply(this,arguments)},Be.prototype.clear=function(){return 0===this.size?this:this.__ownerID?(this.size=0,this._head=void 0,this.__hash=void 0,this.__altered=!0,this):We()},Be.prototype.slice=function(t,e){if(y(t,e,this.size))return this;var n=S(t,this.size),r=g(e,this.size);if(r!==this.size)return nt.prototype.slice.call(this,t,e);for(var i=this.size-n,o=this._head;n--;)o=o.next;return this.__ownerID?(this.size=i,this._head=o,this.__hash=void 0,this.__altered=!0,this):Je(i,o)},Be.prototype.__ensureOwner=function(t){return t===this.__ownerID?this:t?Je(this.size,this._head,t,this.__hash):(this.__ownerID=t,this.__altered=!1,this)},Be.prototype.__iterate=function(t,e){var n=this;if(e)return this.reverse().__iterate(t);for(var r=0,i=this._head;i&&t(i.value,r++,n)!==!1;)i=i.next;return r},Be.prototype.__iterator=function(t,e){if(e)return this.reverse().__iterator(t);var n=0,r=this._head;return new E(function(){if(r){var e=r.value;return r=r.next,I(t,n++,e)}return b()})},Be.isStack=Ye;var ir="@@__IMMUTABLE_STACK__@@",or=Be.prototype;or[ir]=!0,or.withMutations=Fn.withMutations,or.asMutable=Fn.asMutable,or.asImmutable=Fn.asImmutable,or.wasAltered=Fn.wasAltered;var ur;e.Iterator=E,Xe(e,{toArray:function(){ft(this.size);var t=new Array(this.size||0);return this.valueSeq().__iterate((function(e,n){t[n]=e})),t},toIndexedSeq:function(){return new ie(this)},toJS:function(){return this.toSeq().map((function(t){return t&&"function"==typeof t.toJS?t.toJS():t})).__toJS()},toJSON:function(){return this.toSeq().map((function(t){return t&&"function"==typeof t.toJSON?t.toJSON():t})).__toJS()},toKeyedSeq:function(){return new re(this,!0)},toMap:function(){return ht(this.toKeyedSeq())},toObject:function(){ft(this.size);var t={};return this.__iterate((function(e,n){t[n]=e})),t},toOrderedMap:function(){return Zt(this.toKeyedSeq())},toOrderedSet:function(){return Fe(u(this)?this.valueSeq():this)},toSet:function(){return Ue(u(this)?this.valueSeq():this)},toSetSeq:function(){return new oe(this)},toSeq:function(){return a(this)?this.toIndexedSeq():u(this)?this.toKeyedSeq():this.toSetSeq()},toStack:function(){return Be(u(this)?this.valueSeq():this)},toList:function(){return Ht(u(this)?this.valueSeq():this)},toString:function(){return"[Iterable]"},__toString:function(t,e){return 0===this.size?t+e:t+" "+this.toSeq().map(this.__toStringMapper).join(", ")+" "+e},concat:function(){var t=sn.call(arguments,0);return Oe(this,ve(this,t))},includes:function(t){return this.some((function(e){return W(e,t)}))},entries:function(){return this.__iterator(In)},every:function(t,e){ft(this.size);var n=!0;return this.__iterate((function(r,i,o){if(!t.call(e,r,i,o))return n=!1,!1})),n},filter:function(t,e){return Oe(this,fe(this,t,e,!0))},find:function(t,e,n){var r=this.findEntry(t,e);return r?r[1]:n},forEach:function(t,e){return ft(this.size),this.__iterate(e?t.bind(e):t)},join:function(t){ft(this.size),t=void 0!==t?""+t:",";var e="",n=!0;return this.__iterate((function(r){n?n=!1:e+=t,e+=null!==r&&void 0!==r?r.toString():""})),e},keys:function(){return this.__iterator(mn)},map:function(t,e){return Oe(this,se(this,t,e))},reduce:function(t,e,n){ft(this.size);var r,i;return arguments.length<2?i=!0:r=e,this.__iterate((function(e,o,u){i?(i=!1,r=e):r=t.call(n,r,e,o,u)})),r},reduceRight:function(t,e,n){var r=this.toKeyedSeq().reverse();return r.reduce.apply(r,arguments)},reverse:function(){return Oe(this,ce(this,!0))},slice:function(t,e){return Oe(this,pe(this,t,e,!0))},some:function(t,e){return!this.every($e(t),e)},sort:function(t){return Oe(this,me(this,t))},values:function(){return this.__iterator(En)},butLast:function(){return this.slice(0,-1)},isEmpty:function(){return void 0!==this.size?0===this.size:!this.some((function(){return!0}))},count:function(t,e){return _(t?this.toSeq().filter(t,e):this)},countBy:function(t,e){return he(this,t,e)},equals:function(t){return X(this,t)},entrySeq:function(){var t=this;if(t._cache)return new M(t._cache);var e=t.toSeq().map(Ze).toIndexedSeq();return e.fromEntrySeq=function(){return t.toSeq()},e},filterNot:function(t,e){return this.filter($e(t),e)},findEntry:function(t,e,n){var r=n;return this.__iterate((function(n,i,o){if(t.call(e,n,i,o))return r=[i,n],!1})),r},findKey:function(t,e){var n=this.findEntry(t,e);return n&&n[0]},findLast:function(t,e,n){return this.toKeyedSeq().reverse().find(t,e,n)},findLastEntry:function(t,e,n){return this.toKeyedSeq().reverse().findEntry(t,e,n)},findLastKey:function(t,e){return this.toKeyedSeq().reverse().findKey(t,e)},first:function(){return this.find(v)},flatMap:function(t,e){return Oe(this,Se(this,t,e))},flatten:function(t){return Oe(this,ye(this,t,!0))},fromEntrySeq:function(){return new ue(this)},get:function(t,e){return this.find((function(e,n){return W(n,t)}),void 0,e)},getIn:function(t,e){for(var n,r=this,i=Re(t);!(n=i.next()).done;){var o=n.value;if(r=r&&r.get?r.get(o,yn):yn,r===yn)return e}return r},groupBy:function(t,e){return le(this,t,e)},has:function(t){return this.get(t,yn)!==yn},hasIn:function(t){return this.getIn(t,yn)!==yn},isSubset:function(t){return t="function"==typeof t.includes?t:e(t),this.every((function(e){return t.includes(e)}))},isSuperset:function(t){return t="function"==typeof t.isSubset?t:e(t),t.isSubset(this)},keyOf:function(t){return this.findKey((function(e){return W(e,t)}))},keySeq:function(){return this.toSeq().map(Qe).toIndexedSeq()},last:function(){return this.toSeq().reverse().first()},lastKeyOf:function(t){return this.toKeyedSeq().reverse().keyOf(t)},max:function(t){return Ee(this,t)},maxBy:function(t,e){return Ee(this,e,t)},min:function(t){return Ee(this,t?tn(t):rn)},minBy:function(t,e){return Ee(this,e?tn(e):rn,t)},rest:function(){return this.slice(1)},skip:function(t){return this.slice(Math.max(0,t))},skipLast:function(t){return Oe(this,this.toSeq().reverse().skip(t).reverse())},skipWhile:function(t,e){return Oe(this,de(this,t,e,!0))},skipUntil:function(t,e){return this.skipWhile($e(t),e)},sortBy:function(t,e){return Oe(this,me(this,e,t))},take:function(t){return this.slice(0,Math.max(0,t))},takeLast:function(t){return Oe(this,this.toSeq().reverse().take(t).reverse())},takeWhile:function(t,e){return Oe(this,_e(this,t,e))},takeUntil:function(t,e){return this.takeWhile($e(t),e)},valueSeq:function(){return this.toIndexedSeq()},hashCode:function(){return this.__hash||(this.__hash=on(this))}});var ar=e.prototype;ar[cn]=!0,ar[wn]=ar.values,ar.__toJS=ar.toArray,ar.__toStringMapper=en,ar.inspect=ar.toSource=function(){return this.toString()},ar.chain=ar.flatMap,ar.contains=ar.includes,Xe(n,{flip:function(){return Oe(this,ae(this))},mapEntries:function(t,e){var n=this,r=0;return Oe(this,this.toSeq().map((function(i,o){return t.call(e,[o,i],r++,n)})).fromEntrySeq())},mapKeys:function(t,e){var n=this;return Oe(this,this.toSeq().flip().map((function(r,i){return t.call(e,r,i,n)})).flip())}});var sr=n.prototype;sr[fn]=!0,sr[wn]=ar.entries,sr.__toJS=ar.toObject,sr.__toStringMapper=function(t,e){return JSON.stringify(e)+": "+en(t)},Xe(r,{toKeyedSeq:function(){return new re(this,!1)},filter:function(t,e){return Oe(this,fe(this,t,e,!1))},findIndex:function(t,e){var n=this.findEntry(t,e);return n?n[0]:-1},indexOf:function(t){var e=this.keyOf(t);return void 0===e?-1:e},lastIndexOf:function(t){var e=this.lastKeyOf(t);return void 0===e?-1:e},reverse:function(){return Oe(this,ce(this,!1))},slice:function(t,e){return Oe(this,pe(this,t,e,!1))},splice:function(t,e){var n=arguments.length;if(e=Math.max(0|e,0),0===n||2===n&&!e)return this;t=S(t,t<0?this.count():this.size);var r=this.slice(0,t);return Oe(this,1===n?r:r.concat(p(arguments,2),this.slice(t+e)))},findLastIndex:function(t,e){var n=this.findLastEntry(t,e);return n?n[0]:-1},first:function(){return this.get(0)},flatten:function(t){return Oe(this,ye(this,t,!1))},get:function(t,e){return t=d(this,t),t<0||this.size===1/0||void 0!==this.size&&t>this.size?e:this.find((function(e,n){return n===t}),void 0,e)},has:function(t){return t=d(this,t),t>=0&&(void 0!==this.size?this.size===1/0||t-1&&t%1===0&&t<=Number.MAX_VALUE}var i=Function.prototype.bind;e.isString=function(t){return"string"==typeof t||"[object String]"===n(t)},e.isArray=Array.isArray||function(t){return"[object Array]"===n(t)},"function"!=typeof/./&&"object"!=typeof Int8Array?e.isFunction=function(t){return"function"==typeof t||!1}:e.isFunction=function(t){return"[object Function]"===toString.call(t)},e.isObject=function(t){var e=typeof t;return"function"===e||"object"===e&&!!t},e.extend=function(t){var e=arguments,n=arguments.length;if(!t||n<2)return t||{};for(var r=1;r0)){var e=this.reactorState.get("dirtyStores");if(0!==e.size){var n=c.default.Set().withMutations((function(n){n.union(t.observerState.get("any")),e.forEach((function(e){var r=t.observerState.getIn(["stores",e]);r&&n.union(r)}))}));n.forEach((function(e){var n=t.observerState.getIn(["observersMap",e]);if(n){var r=n.get("getter"),i=n.get("handler"),o=p.evaluate(t.prevReactorState,r),u=p.evaluate(t.reactorState,r);t.prevReactorState=o.reactorState,t.reactorState=u.reactorState;var a=o.result,s=u.result;c.default.is(a,s)||i.call(null,s)}}));var r=p.resetDirtyStores(this.reactorState);this.prevReactorState=r,this.reactorState=r}}}},{key:"batchStart",value:function(){this.__batchDepth++}},{key:"batchEnd",value:function(){if(this.__batchDepth--,this.__batchDepth<=0){this.__isDispatching=!0;try{this.__notify()}catch(t){throw this.__isDispatching=!1,t}this.__isDispatching=!1}}}]),t})();e.default=(0,g.toFactory)(E),t.exports=e.default},function(t,e,n){function r(t,e,n){return e in t?Object.defineProperty(t,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):t[e]=n,t}function i(t,e){var n={};return(0,o.each)(e,(function(e,r){n[r]=t.evaluate(e)})),n}Object.defineProperty(e,"__esModule",{value:!0});var o=n(4);e.default=function(t){return{getInitialState:function(){return i(t,this.getDataBindings())},componentDidMount:function(){var e=this;this.__unwatchFns=[],(0,o.each)(this.getDataBindings(),(function(n,i){var o=t.observe(n,(function(t){e.setState(r({},i,t))}));e.__unwatchFns.push(o)}))},componentWillUnmount:function(){for(var t=this;this.__unwatchFns.length;)t.__unwatchFns.shift()()}}},t.exports=e.default},function(t,e,n){function r(t){return t&&t.__esModule?t:{default:t}}function i(t,e){return new C({result:t,reactorState:e})}function o(t,e){return t.withMutations((function(t){(0,A.each)(e,(function(e,n){t.getIn(["stores",n])&&console.warn("Store already defined for id = "+n);var r=e.getInitialState();if(void 0===r&&f(t,"throwOnUndefinedStoreReturnValue"))throw new Error("Store getInitialState() must return a value, did you forget a return statement");if(f(t,"throwOnNonImmutableStore")&&!(0,O.isImmutableValue)(r))throw new Error("Store getInitialState() must return an immutable value, did you forget to call toImmutable");t.update("stores",(function(t){return t.set(n,e)})).update("state",(function(t){return t.set(n,r)})).update("dirtyStores",(function(t){return t.add(n)})).update("storeStates",(function(t){return m(t,[n])}))})),g(t)}))}function u(t,e){return t.withMutations((function(t){(0,A.each)(e,(function(e,n){t.update("stores",(function(t){return t.set(n,e)}))}))}))}function a(t,e,n){var r=t.get("logger");if(void 0===e&&f(t,"throwOnUndefinedActionType"))throw new Error("`dispatch` cannot be called with an `undefined` action type.");var i=t.get("state"),o=t.get("dirtyStores"),u=i.withMutations((function(u){r.dispatchStart(t,e,n),t.get("stores").forEach((function(i,a){var s=u.get(a),c=void 0;try{c=i.handle(s,e,n)}catch(e){throw r.dispatchError(t,e.message),e}if(void 0===c&&f(t,"throwOnUndefinedStoreReturnValue")){var h="Store handler must return a value, did you forget a return statement";throw r.dispatchError(t,h),new Error(h)}u.set(a,c),s!==c&&(o=o.add(a))})),r.dispatchEnd(t,u,o,i)})),a=t.set("state",u).set("dirtyStores",o).update("storeStates",(function(t){return m(t,o)}));return g(a)}function s(t,e){var n=[],r=(0,O.toImmutable)({}).withMutations((function(r){(0,A.each)(e,(function(e,i){var o=t.getIn(["stores",i]);if(o){var u=o.deserialize(e);void 0!==u&&(r.set(i,u),n.push(i))}}))})),i=I.default.Set(n);return t.update("state",(function(t){return t.merge(r)})).update("dirtyStores",(function(t){return t.union(i)})).update("storeStates",(function(t){return m(t,n)}))}function c(t,e,n){var r=e;(0,T.isKeyPath)(e)&&(e=(0,w.fromKeyPath)(e));var i=t.get("nextId"),o=(0,w.getStoreDeps)(e),u=I.default.Map({id:i,storeDeps:o,getterKey:r,getter:e,handler:n}),a=void 0;return a=0===o.size?t.update("any",(function(t){return t.add(i)})):t.withMutations((function(t){o.forEach((function(e){var n=["stores",e];t.hasIn(n)||t.setIn(n,I.default.Set()),t.updateIn(["stores",e],(function(t){return t.add(i)}))}))})),a=a.set("nextId",i+1).setIn(["observersMap",i],u),{observerState:a,entry:u}}function f(t,e){var n=t.getIn(["options",e]);if(void 0===n)throw new Error("Invalid option: "+e);return n}function h(t,e,n){var r=t.get("observersMap").filter((function(t){var r=t.get("getterKey"),i=!n||t.get("handler")===n;return!!i&&((0,T.isKeyPath)(e)&&(0,T.isKeyPath)(r)?(0,T.isEqual)(e,r):e===r)}));return t.withMutations((function(t){r.forEach((function(e){return l(t,e)}))}))}function l(t,e){return t.withMutations((function(t){var n=e.get("id"),r=e.get("storeDeps");0===r.size?t.update("any",(function(t){return t.remove(n)})):r.forEach((function(e){t.updateIn(["stores",e],(function(t){return t?t.remove(n):t}))})),t.removeIn(["observersMap",n])}))}function p(t){var e=t.get("state");return t.withMutations((function(t){var n=t.get("stores"),r=n.keySeq().toJS();n.forEach((function(n,r){var i=e.get(r),o=n.handleReset(i);if(void 0===o&&f(t,"throwOnUndefinedStoreReturnValue"))throw new Error("Store handleReset() must return a value, did you forget a return statement");if(f(t,"throwOnNonImmutableStore")&&!(0,O.isImmutableValue)(o))throw new Error("Store reset state must be an immutable value, did you forget to call toImmutable");t.setIn(["state",r],o)})),t.update("storeStates",(function(t){return m(t,r)})),v(t)}))}function _(t,e){var n=t.get("state");if((0,T.isKeyPath)(e))return i(n.getIn(e),t);if(!(0,w.isGetter)(e))throw new Error("evaluate must be passed a keyPath or Getter");var r=t.get("cache"),o=r.lookup(e),u=!o||y(t,o);return u&&(o=S(t,e)),i(o.get("value"),t.update("cache",(function(t){return u?t.miss(e,o):t.hit(e)})))}function d(t){var e={};return t.get("stores").forEach((function(n,r){var i=t.getIn(["state",r]),o=n.serialize(i);void 0!==o&&(e[r]=o)})),e}function v(t){return t.set("dirtyStores",I.default.Set())}function y(t,e){var n=e.get("storeStates");return!n.size||n.some((function(e,n){return t.getIn(["storeStates",n])!==e}))}function S(t,e){var n=(0,w.getDeps)(e).map((function(e){return _(t,e).result})),r=(0,w.getComputeFn)(e).apply(null,n),i=(0,w.getStoreDeps)(e),o=(0,O.toImmutable)({}).withMutations((function(e){i.forEach((function(n){var r=t.getIn(["storeStates",n]);e.set(n,r)}))}));return(0,b.CacheEntry)({value:r,storeStates:o,dispatchId:t.get("dispatchId")})}function g(t){return t.update("dispatchId",(function(t){return t+1}))}function m(t,e){return t.withMutations((function(t){e.forEach((function(e){var n=t.has(e)?t.get(e)+1:1;t.set(e,n)}))}))}Object.defineProperty(e,"__esModule",{value:!0}),e.registerStores=o,e.replaceStores=u,e.dispatch=a,e.loadState=s,e.addObserver=c,e.getOption=f,e.removeObserver=h,e.removeObserverByEntry=l,e.reset=p,e.evaluate=_,e.serialize=d,e.resetDirtyStores=v;var E=n(3),I=r(E),b=n(9),O=n(5),w=n(10),T=n(11),A=n(4),C=I.default.Record({result:null,reactorState:null})},function(t,e,n){function r(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}function i(){return new s}Object.defineProperty(e,"__esModule",{value:!0});var o=(function(){function t(t,e){for(var n=0;nn.dispatchId)throw new Error("Refusing to cache older value");return n})))}},{key:"evict",value:function(e){return new t(this.cache.remove(e))}}]),t})();e.BasicCache=s;var c=1e3,f=1,h=(function(){function t(){var e=arguments.length<=0||void 0===arguments[0]?c:arguments[0],n=arguments.length<=1||void 0===arguments[1]?f:arguments[1],i=arguments.length<=2||void 0===arguments[2]?new s:arguments[2],o=arguments.length<=3||void 0===arguments[3]?(0,u.OrderedSet)():arguments[3];r(this,t),console.log("using LRU"),this.limit=e,this.evictCount=n,this.cache=i,this.lru=o}return o(t,[{key:"lookup",value:function(t,e){return this.cache.lookup(t,e)}},{key:"has",value:function(t){return this.cache.has(t)}},{key:"asMap",value:function(){return this.cache.asMap()}},{key:"hit",value:function(e){return this.cache.has(e)?new t(this.limit,this.evictCount,this.cache,this.lru.remove(e).add(e)):this}},{key:"miss",value:function(e,n){var r;if(this.lru.size>=this.limit){if(this.has(e))return new t(this.limit,this.evictCount,this.cache.miss(e,n),this.lru.remove(e).add(e));var i=this.lru.take(this.evictCount).reduce((function(t,e){return t.evict(e)}),this.cache).miss(e,n);r=new t(this.limit,this.evictCount,i,this.lru.skip(this.evictCount).add(e))}else r=new t(this.limit,this.evictCount,this.cache.miss(e,n),this.lru.add(e));return r}},{key:"evict",value:function(e){return this.cache.has(e)?new t(this.limit,this.evictCount,this.cache.evict(e),this.lru.remove(e)):this}}]),t})();e.LRUCache=h},function(t,e,n){function r(t){return t&&t.__esModule?t:{default:t}}function i(t){return(0,l.isArray)(t)&&(0,l.isFunction)(t[t.length-1])}function o(t){return t[t.length-1]}function u(t){return t.slice(0,t.length-1)}function a(t,e){e||(e=h.default.Set());var n=h.default.Set().withMutations((function(e){if(!i(t))throw new Error("getFlattenedDeps must be passed a Getter");u(t).forEach((function(t){if((0,p.isKeyPath)(t))e.add((0,f.List)(t));else{if(!i(t))throw new Error("Invalid getter, each dependency must be a KeyPath or Getter");e.union(a(t))}}))}));return e.union(n)}function s(t){if(!(0,p.isKeyPath)(t))throw new Error("Cannot create Getter from KeyPath: "+t);return[t,_]}function c(t){if(t.hasOwnProperty("__storeDeps"))return t.__storeDeps;var e=a(t).map((function(t){return t.first()})).filter((function(t){return!!t}));return Object.defineProperty(t,"__storeDeps",{enumerable:!1,configurable:!1,writable:!1,value:e}),e}Object.defineProperty(e,"__esModule",{value:!0});var f=n(3),h=r(f),l=n(4),p=n(11),_=function(t){return t};e.default={isGetter:i,getComputeFn:o,getFlattenedDeps:a,getStoreDeps:c,getDeps:u,fromKeyPath:s},t.exports=e.default},function(t,e,n){function r(t){return t&&t.__esModule?t:{default:t}}function i(t){return(0,s.isArray)(t)&&!(0,s.isFunction)(t[t.length-1])}function o(t,e){var n=a.default.List(t),r=a.default.List(e);return a.default.is(n,r)}Object.defineProperty(e,"__esModule",{value:!0}),e.isKeyPath=i,e.isEqual=o;var u=n(3),a=r(u),s=n(4)},function(t,e,n){Object.defineProperty(e,"__esModule",{value:!0});var r=n(8),i={dispatchStart:function(t,e,n){(0,r.getOption)(t,"logDispatches")&&console.group&&(console.groupCollapsed("Dispatch: %s",e),console.group("payload"),console.debug(n),console.groupEnd())},dispatchError:function(t,e){(0,r.getOption)(t,"logDispatches")&&console.group&&(console.debug("Dispatch error: "+e),console.groupEnd())},dispatchEnd:function(t,e,n,i){(0,r.getOption)(t,"logDispatches")&&console.group&&((0,r.getOption)(t,"logDirtyStores")&&console.log("Stores updated:",n.toList().toJS()),(0,r.getOption)(t,"logAppState")&&console.debug("Dispatch done, new state: ",e.toJS()),console.groupEnd())}};e.ConsoleGroupLogger=i;var o={dispatchStart:function(t,e,n){},dispatchError:function(t,e){},dispatchEnd:function(t,e,n){}};e.NoopLogger=o},function(t,e,n){Object.defineProperty(e,"__esModule",{value:!0});var r=n(3),i=n(9),o=n(12),u=(0,r.Map)({logDispatches:!1,logAppState:!1,logDirtyStores:!1,throwOnUndefinedActionType:!1,throwOnUndefinedStoreReturnValue:!1,throwOnNonImmutableStore:!1,throwOnDispatchInDispatch:!1});e.PROD_OPTIONS=u;var a=(0,r.Map)({logDispatches:!0,logAppState:!0,logDirtyStores:!0,throwOnUndefinedActionType:!0,throwOnUndefinedStoreReturnValue:!0,throwOnNonImmutableStore:!0,throwOnDispatchInDispatch:!0});e.DEBUG_OPTIONS=a;var s=(0,r.Record)({dispatchId:0,state:(0,r.Map)(),stores:(0,r.Map)(),cache:(0,i.DefaultCache)(),logger:o.NoopLogger,storeStates:(0,r.Map)(),dirtyStores:(0,r.Set)(),debug:!1,options:u});e.ReactorState=s;var c=(0,r.Record)({any:(0,r.Set)(),stores:(0,r.Map)({}),observersMap:(0,r.Map)({}),nextId:1});e.ObserverState=c}])}))})),ze=t(De),Re=function(t){var e,n={};if(!(t instanceof Object)||Array.isArray(t))throw new Error("keyMirror(...): Argument must be an object.");for(e in t)t.hasOwnProperty(e)&&(n[e]=e);return n},Le=Re,Me=Le({VALIDATING_AUTH_TOKEN:null,VALID_AUTH_TOKEN:null,INVALID_AUTH_TOKEN:null,LOG_OUT:null}),je=ze.Store,Ne=ze.toImmutable,ke=new je({getInitialState:function(){return Ne({isValidating:!1,authToken:!1,host:null,isInvalid:!1,errorMessage:""})},initialize:function(){this.on(Me.VALIDATING_AUTH_TOKEN,n),this.on(Me.VALID_AUTH_TOKEN,r),this.on(Me.INVALID_AUTH_TOKEN,i)}}),Ue=ze.Store,Pe=ze.toImmutable,He=new Ue({getInitialState:function(){return Pe({authToken:null,host:""})},initialize:function(){this.on(Me.VALID_AUTH_TOKEN,o),this.on(Me.LOG_OUT,u)}}),xe=ze.Store,Ve=new xe({getInitialState:function(){return!0},initialize:function(){this.on(Me.VALID_AUTH_TOKEN,a)}}),Fe=Le({STREAM_START:null,STREAM_STOP:null,STREAM_ERROR:null}),qe="object"==typeof window&&"EventSource"in window,Ge=ze.Store,Ke=ze.toImmutable,Be=new Ge({getInitialState:function(){return Ke({isSupported:qe,isStreaming:!1,useStreaming:!0,hasError:!1})},initialize:function(){this.on(Fe.STREAM_START,s),this.on(Fe.STREAM_STOP,c),this.on(Fe.STREAM_ERROR,f),this.on(Fe.LOG_OUT,h)}}),Ye=Le({API_FETCH_ALL_START:null,API_FETCH_ALL_SUCCESS:null,API_FETCH_ALL_FAIL:null,SYNC_SCHEDULED:null,SYNC_SCHEDULE_CANCELLED:null}),Je=ze.Store,We=new Je({getInitialState:function(){return!0},initialize:function(){this.on(Ye.API_FETCH_ALL_START,(function(){return!0})),this.on(Ye.API_FETCH_ALL_SUCCESS,(function(){return!1})),this.on(Ye.API_FETCH_ALL_FAIL,(function(){return!1})),this.on(Ye.LOG_OUT,(function(){return!1}))}}),Xe=ze.Store,Qe=new Xe({getInitialState:function(){return!1},initialize:function(){this.on(Ye.SYNC_SCHEDULED,(function(){return!0})),this.on(Ye.SYNC_SCHEDULE_CANCELLED,(function(){return!1})),this.on(Ye.LOG_OUT,(function(){return!1}))}}),Ze=Le({API_FETCH_SUCCESS:null,API_FETCH_START:null,API_FETCH_FAIL:null,API_SAVE_SUCCESS:null,API_SAVE_START:null,API_SAVE_FAIL:null, +API_DELETE_SUCCESS:null,API_DELETE_START:null,API_DELETE_FAIL:null,LOG_OUT:null}),$e=ze.Store,tn=ze.toImmutable,en=new $e({getInitialState:function(){return tn({})},initialize:function(){var t=this;this.on(Ze.API_FETCH_SUCCESS,l),this.on(Ze.API_SAVE_SUCCESS,l),this.on(Ze.API_DELETE_SUCCESS,p),this.on(Ze.LOG_OUT,(function(){return t.getInitialState()}))}}),nn=Object.prototype.hasOwnProperty,rn=Object.prototype.propertyIsEnumerable,on=d()?Object.assign:function(t,e){for(var n,r,i=arguments,o=_(t),u=1;u \ No newline at end of file +},customStyle:null,getComputedStyleValue:function(e){return!i&&this._styleProperties&&this._styleProperties[e]||getComputedStyle(this).getPropertyValue(e)},_setupStyleProperties:function(){this.customStyle={},this._styleCache=null,this._styleProperties=null,this._scopeSelector=null,this._ownStyleProperties=null,this._customStyle=null},_needsStyleProperties:function(){return Boolean(!i&&this._ownStylePropertyNames&&this._ownStylePropertyNames.length)},_validateApplyShim:function(){if(this.__applyShimInvalid){Polymer.ApplyShim.transform(this._styles,this.__proto__);var e=n.elementStyles(this);if(s){var t=this._template.content.querySelector("style");t&&(t.textContent=e)}else{var r=this._scopeStyle&&this._scopeStyle.nextSibling;r&&(r.textContent=e)}}},_beforeAttached:function(){this._scopeSelector&&!this.__stylePropertiesInvalid||!this._needsStyleProperties()||(this.__stylePropertiesInvalid=!1,this._updateStyleProperties())},_findStyleHost:function(){for(var e,t=this;e=Polymer.dom(t).getOwnerRoot();){if(Polymer.isInstance(e.host))return e.host;t=e.host}return r},_updateStyleProperties:function(){var e,n=this._findStyleHost();n._styleProperties||n._computeStyleProperties(),n._styleCache||(n._styleCache=new Polymer.StyleCache);var r=t.propertyDataFromStyles(n._styles,this),i=!this.__notStyleScopeCacheable;i&&(r.key.customStyle=this.customStyle,e=n._styleCache.retrieve(this.is,r.key,this._styles));var a=Boolean(e);a?this._styleProperties=e._styleProperties:this._computeStyleProperties(r.properties),this._computeOwnStyleProperties(),a||(e=o.retrieve(this.is,this._ownStyleProperties,this._styles));var l=Boolean(e)&&!a,c=this._applyStyleProperties(e);a||(c=c&&s?c.cloneNode(!0):c,e={style:c,_scopeSelector:this._scopeSelector,_styleProperties:this._styleProperties},i&&(r.key.customStyle={},this.mixin(r.key.customStyle,this.customStyle),n._styleCache.store(this.is,e,r.key,this._styles)),l||o.store(this.is,Object.create(e),this._ownStyleProperties,this._styles))},_computeStyleProperties:function(e){var n=this._findStyleHost();n._styleProperties||n._computeStyleProperties();var r=Object.create(n._styleProperties),s=t.hostAndRootPropertiesForScope(this);this.mixin(r,s.hostProps),e=e||t.propertyDataFromStyles(n._styles,this).properties,this.mixin(r,e),this.mixin(r,s.rootProps),t.mixinCustomStyle(r,this.customStyle),t.reify(r),this._styleProperties=r},_computeOwnStyleProperties:function(){for(var e,t={},n=0;n0&&l.push(t);return[{removed:a,added:l}]}},Polymer.Collection.get=function(e){return Polymer._collections.get(e)||new Polymer.Collection(e)},Polymer.Collection.applySplices=function(e,t){var n=Polymer._collections.get(e);return n?n._applySplices(t):null},Polymer({is:"dom-repeat",extends:"template",_template:null,properties:{items:{type:Array},as:{type:String,value:"item"},indexAs:{type:String,value:"index"},sort:{type:Function,observer:"_sortChanged"},filter:{type:Function,observer:"_filterChanged"},observe:{type:String,observer:"_observeChanged"},delay:Number,renderedItemCount:{type:Number,notify:!0,readOnly:!0},initialCount:{type:Number,observer:"_initializeChunking"},targetFramerate:{type:Number,value:20},_targetFrameTime:{type:Number,computed:"_computeFrameTime(targetFramerate)"}},behaviors:[Polymer.Templatizer],observers:["_itemsChanged(items.*)"],created:function(){this._instances=[],this._pool=[],this._limit=1/0;var e=this;this._boundRenderChunk=function(){e._renderChunk()}},detached:function(){this.__isDetached=!0;for(var e=0;e=0;t--){var n=this._instances[t];n.isPlaceholder&&t=this._limit&&(n=this._downgradeInstance(t,n.__key__)),e[n.__key__]=t,n.isPlaceholder||n.__setProperty(this.indexAs,t,!0)}this._pool.length=0,this._setRenderedItemCount(this._instances.length),this.fire("dom-change"),this._tryRenderChunk()},_applyFullRefresh:function(){var e,t=this.collection;if(this._sortFn)e=t?t.getKeys():[];else{e=[];var n=this.items;if(n)for(var r=0;r=r;a--)this._detachAndRemoveInstance(a)},_numericSort:function(e,t){return e-t},_applySplicesUserSort:function(e){for(var t,n,r=this.collection,s={},i=0;i=0;i--){var c=a[i];void 0!==c&&this._detachAndRemoveInstance(c)}var h=this;if(l.length){this._filterFn&&(l=l.filter(function(e){return h._filterFn(r.getItem(e))})),l.sort(function(e,t){return h._sortFn(r.getItem(e),r.getItem(t))});var u=0;for(i=0;i>1,a=this._instances[o].__key__,l=this._sortFn(n.getItem(a),r);if(l<0)e=o+1;else{if(!(l>0)){i=o;break}s=o-1}}return i<0&&(i=s+1),this._insertPlaceholder(i,t),i},_applySplicesArrayOrder:function(e){for(var t,n=0;n=0?(e=this.as+"."+e.substring(n+1),i._notifyPath(e,t,!0)):i.__setProperty(this.as,t,!0))}},itemForElement:function(e){var t=this.modelForElement(e);return t&&t[this.as]},keyForElement:function(e){var t=this.modelForElement(e);return t&&t.__key__},indexForElement:function(e){var t=this.modelForElement(e);return t&&t[this.indexAs]}}),Polymer({is:"array-selector",_template:null,properties:{items:{type:Array,observer:"clearSelection"},multi:{type:Boolean,value:!1,observer:"clearSelection"},selected:{type:Object,notify:!0},selectedItem:{type:Object,notify:!0},toggle:{type:Boolean,value:!1}},clearSelection:function(){if(Array.isArray(this.selected))for(var e=0;e \ No newline at end of file diff --git a/homeassistant/components/frontend/www_static/frontend.html.gz b/homeassistant/components/frontend/www_static/frontend.html.gz index ac7a962c713..7e2be4a3b4c 100644 Binary files a/homeassistant/components/frontend/www_static/frontend.html.gz and b/homeassistant/components/frontend/www_static/frontend.html.gz differ diff --git a/homeassistant/components/frontend/www_static/home-assistant-polymer b/homeassistant/components/frontend/www_static/home-assistant-polymer index db109f5dda0..f3081ed48fd 160000 --- a/homeassistant/components/frontend/www_static/home-assistant-polymer +++ b/homeassistant/components/frontend/www_static/home-assistant-polymer @@ -1 +1 @@ -Subproject commit db109f5dda043182a7e9647b161851e83be9b91e +Subproject commit f3081ed48fd11fa89586701dba3792d028473a15 diff --git a/homeassistant/components/frontend/www_static/micromarkdown-js.html b/homeassistant/components/frontend/www_static/micromarkdown-js.html new file mode 100644 index 00000000000..a80c564cb7b --- /dev/null +++ b/homeassistant/components/frontend/www_static/micromarkdown-js.html @@ -0,0 +1,10 @@ + diff --git a/homeassistant/components/frontend/www_static/micromarkdown-js.html.gz b/homeassistant/components/frontend/www_static/micromarkdown-js.html.gz new file mode 100644 index 00000000000..7b13f03175e Binary files /dev/null and b/homeassistant/components/frontend/www_static/micromarkdown-js.html.gz differ diff --git a/homeassistant/components/frontend/www_static/panels/ha-panel-dev-service.html b/homeassistant/components/frontend/www_static/panels/ha-panel-dev-service.html index 4725aa4a729..458e136f7b1 100644 --- a/homeassistant/components/frontend/www_static/panels/ha-panel-dev-service.html +++ b/homeassistant/components/frontend/www_static/panels/ha-panel-dev-service.html @@ -1 +1 @@ - \ No newline at end of file + \ No newline at end of file diff --git a/homeassistant/components/frontend/www_static/panels/ha-panel-dev-service.html.gz b/homeassistant/components/frontend/www_static/panels/ha-panel-dev-service.html.gz index cdfed733269..458b49d4f44 100644 Binary files a/homeassistant/components/frontend/www_static/panels/ha-panel-dev-service.html.gz and b/homeassistant/components/frontend/www_static/panels/ha-panel-dev-service.html.gz differ diff --git a/homeassistant/components/frontend/www_static/panels/ha-panel-dev-state.html b/homeassistant/components/frontend/www_static/panels/ha-panel-dev-state.html index d6f91e28853..53c28a1109f 100644 --- a/homeassistant/components/frontend/www_static/panels/ha-panel-dev-state.html +++ b/homeassistant/components/frontend/www_static/panels/ha-panel-dev-state.html @@ -1 +1 @@ - \ No newline at end of file + \ No newline at end of file diff --git a/homeassistant/components/frontend/www_static/panels/ha-panel-dev-state.html.gz b/homeassistant/components/frontend/www_static/panels/ha-panel-dev-state.html.gz index d397aee1a1e..4c211bbe30d 100644 Binary files a/homeassistant/components/frontend/www_static/panels/ha-panel-dev-state.html.gz and b/homeassistant/components/frontend/www_static/panels/ha-panel-dev-state.html.gz differ diff --git a/homeassistant/components/frontend/www_static/panels/ha-panel-map.html b/homeassistant/components/frontend/www_static/panels/ha-panel-map.html index 296b14bd858..2f73f5a782a 100644 --- a/homeassistant/components/frontend/www_static/panels/ha-panel-map.html +++ b/homeassistant/components/frontend/www_static/panels/ha-panel-map.html @@ -1,4 +1,4 @@ - \ No newline at end of file +case"touchend":return this.addPointerListenerEnd(t,e,i,n);case"touchmove":return this.addPointerListenerMove(t,e,i,n);default:throw"Unknown touch event type"}},addPointerListenerStart:function(t,i,n,s){var a="_leaflet_",r=this._pointers,h=function(t){"mouse"!==t.pointerType&&t.pointerType!==t.MSPOINTER_TYPE_MOUSE&&o.DomEvent.preventDefault(t);for(var e=!1,i=0;i1))&&(this._moved||(o.DomUtil.addClass(e._mapPane,"leaflet-touching"),e.fire("movestart").fire("zoomstart"),this._moved=!0),o.Util.cancelAnimFrame(this._animRequest),this._animRequest=o.Util.requestAnimFrame(this._updateOnMove,this,!0,this._map._container),o.DomEvent.preventDefault(t))}},_updateOnMove:function(){var t=this._map,e=this._getScaleOrigin(),i=t.layerPointToLatLng(e),n=t.getScaleZoom(this._scale);t._animateZoom(i,n,this._startCenter,this._scale,this._delta,!1,!0)},_onTouchEnd:function(){if(!this._moved||!this._zooming)return void(this._zooming=!1);var t=this._map;this._zooming=!1,o.DomUtil.removeClass(t._mapPane,"leaflet-touching"),o.Util.cancelAnimFrame(this._animRequest),o.DomEvent.off(e,"touchmove",this._onTouchMove).off(e,"touchend",this._onTouchEnd);var i=this._getScaleOrigin(),n=t.layerPointToLatLng(i),s=t.getZoom(),a=t.getScaleZoom(this._scale)-s,r=a>0?Math.ceil(a):Math.floor(a),h=t._limitZoom(s+r),l=t.getZoomScale(h)/this._scale;t._animateZoom(n,h,i,l)},_getScaleOrigin:function(){var t=this._centerOffset.subtract(this._delta).divideBy(this._scale);return this._startCenter.add(t)}}),o.Map.addInitHook("addHandler","touchZoom",o.Map.TouchZoom),o.Map.mergeOptions({tap:!0,tapTolerance:15}),o.Map.Tap=o.Handler.extend({addHooks:function(){o.DomEvent.on(this._map._container,"touchstart",this._onDown,this)},removeHooks:function(){o.DomEvent.off(this._map._container,"touchstart",this._onDown,this)},_onDown:function(t){if(t.touches){if(o.DomEvent.preventDefault(t),this._fireClick=!0,t.touches.length>1)return this._fireClick=!1,void clearTimeout(this._holdTimeout);var i=t.touches[0],n=i.target;this._startPos=this._newPos=new o.Point(i.clientX,i.clientY),n.tagName&&"a"===n.tagName.toLowerCase()&&o.DomUtil.addClass(n,"leaflet-active"),this._holdTimeout=setTimeout(o.bind(function(){this._isTapValid()&&(this._fireClick=!1,this._onUp(),this._simulateEvent("contextmenu",i))},this),1e3),o.DomEvent.on(e,"touchmove",this._onMove,this).on(e,"touchend",this._onUp,this)}},_onUp:function(t){if(clearTimeout(this._holdTimeout),o.DomEvent.off(e,"touchmove",this._onMove,this).off(e,"touchend",this._onUp,this),this._fireClick&&t&&t.changedTouches){var i=t.changedTouches[0],n=i.target;n&&n.tagName&&"a"===n.tagName.toLowerCase()&&o.DomUtil.removeClass(n,"leaflet-active"),this._isTapValid()&&this._simulateEvent("click",i)}},_isTapValid:function(){return this._newPos.distanceTo(this._startPos)<=this._map.options.tapTolerance},_onMove:function(t){var e=t.touches[0];this._newPos=new o.Point(e.clientX,e.clientY)},_simulateEvent:function(i,n){var o=e.createEvent("MouseEvents");o._simulated=!0,n.target._simulatedClick=!0,o.initMouseEvent(i,!0,!0,t,1,n.screenX,n.screenY,n.clientX,n.clientY,!1,!1,!1,!1,0,null),n.target.dispatchEvent(o)}}),o.Browser.touch&&!o.Browser.pointer&&o.Map.addInitHook("addHandler","tap",o.Map.Tap),o.Map.mergeOptions({boxZoom:!0}),o.Map.BoxZoom=o.Handler.extend({initialize:function(t){this._map=t,this._container=t._container,this._pane=t._panes.overlayPane,this._moved=!1},addHooks:function(){o.DomEvent.on(this._container,"mousedown",this._onMouseDown,this)},removeHooks:function(){o.DomEvent.off(this._container,"mousedown",this._onMouseDown),this._moved=!1},moved:function(){return this._moved},_onMouseDown:function(t){return this._moved=!1,!(!t.shiftKey||1!==t.which&&1!==t.button)&&(o.DomUtil.disableTextSelection(),o.DomUtil.disableImageDrag(),this._startLayerPoint=this._map.mouseEventToLayerPoint(t),void o.DomEvent.on(e,"mousemove",this._onMouseMove,this).on(e,"mouseup",this._onMouseUp,this).on(e,"keydown",this._onKeyDown,this))},_onMouseMove:function(t){this._moved||(this._box=o.DomUtil.create("div","leaflet-zoom-box",this._pane),o.DomUtil.setPosition(this._box,this._startLayerPoint),this._container.style.cursor="crosshair",this._map.fire("boxzoomstart"));var e=this._startLayerPoint,i=this._box,n=this._map.mouseEventToLayerPoint(t),s=n.subtract(e),a=new o.Point(Math.min(n.x,e.x),Math.min(n.y,e.y));o.DomUtil.setPosition(i,a),this._moved=!0,i.style.width=Math.max(0,Math.abs(s.x)-4)+"px",i.style.height=Math.max(0,Math.abs(s.y)-4)+"px"},_finish:function(){this._moved&&(this._pane.removeChild(this._box),this._container.style.cursor=""),o.DomUtil.enableTextSelection(),o.DomUtil.enableImageDrag(),o.DomEvent.off(e,"mousemove",this._onMouseMove).off(e,"mouseup",this._onMouseUp).off(e,"keydown",this._onKeyDown)},_onMouseUp:function(t){this._finish();var e=this._map,i=e.mouseEventToLayerPoint(t);if(!this._startLayerPoint.equals(i)){var n=new o.LatLngBounds(e.layerPointToLatLng(this._startLayerPoint),e.layerPointToLatLng(i));e.fitBounds(n),e.fire("boxzoomend",{boxZoomBounds:n})}},_onKeyDown:function(t){27===t.keyCode&&this._finish()}}),o.Map.addInitHook("addHandler","boxZoom",o.Map.BoxZoom),o.Map.mergeOptions({keyboard:!0,keyboardPanOffset:80,keyboardZoomOffset:1}),o.Map.Keyboard=o.Handler.extend({keyCodes:{left:[37],right:[39],down:[40],up:[38],zoomIn:[187,107,61,171],zoomOut:[189,109,173]},initialize:function(t){this._map=t,this._setPanOffset(t.options.keyboardPanOffset),this._setZoomOffset(t.options.keyboardZoomOffset)},addHooks:function(){var t=this._map._container;-1===t.tabIndex&&(t.tabIndex="0"),o.DomEvent.on(t,"focus",this._onFocus,this).on(t,"blur",this._onBlur,this).on(t,"mousedown",this._onMouseDown,this),this._map.on("focus",this._addHooks,this).on("blur",this._removeHooks,this)},removeHooks:function(){this._removeHooks();var t=this._map._container;o.DomEvent.off(t,"focus",this._onFocus,this).off(t,"blur",this._onBlur,this).off(t,"mousedown",this._onMouseDown,this),this._map.off("focus",this._addHooks,this).off("blur",this._removeHooks,this)},_onMouseDown:function(){if(!this._focused){var i=e.body,n=e.documentElement,o=i.scrollTop||n.scrollTop,s=i.scrollLeft||n.scrollLeft;this._map._container.focus(),t.scrollTo(s,o)}},_onFocus:function(){this._focused=!0,this._map.fire("focus")},_onBlur:function(){this._focused=!1,this._map.fire("blur")},_setPanOffset:function(t){var e,i,n=this._panKeys={},o=this.keyCodes;for(e=0,i=o.left.length;i>e;e++)n[o.left[e]]=[-1*t,0];for(e=0,i=o.right.length;i>e;e++)n[o.right[e]]=[t,0];for(e=0,i=o.down.length;i>e;e++)n[o.down[e]]=[0,t];for(e=0,i=o.up.length;i>e;e++)n[o.up[e]]=[0,-1*t]},_setZoomOffset:function(t){var e,i,n=this._zoomKeys={},o=this.keyCodes;for(e=0,i=o.zoomIn.length;i>e;e++)n[o.zoomIn[e]]=t;for(e=0,i=o.zoomOut.length;i>e;e++)n[o.zoomOut[e]]=-t},_addHooks:function(){o.DomEvent.on(e,"keydown",this._onKeyDown,this)},_removeHooks:function(){o.DomEvent.off(e,"keydown",this._onKeyDown,this)},_onKeyDown:function(t){var e=t.keyCode,i=this._map;if(e in this._panKeys){if(i._panAnim&&i._panAnim._inProgress)return;i.panBy(this._panKeys[e]),i.options.maxBounds&&i.panInsideBounds(i.options.maxBounds)}else{if(!(e in this._zoomKeys))return;i.setZoom(i.getZoom()+this._zoomKeys[e])}o.DomEvent.stop(t)}}),o.Map.addInitHook("addHandler","keyboard",o.Map.Keyboard),o.Handler.MarkerDrag=o.Handler.extend({initialize:function(t){this._marker=t},addHooks:function(){var t=this._marker._icon;this._draggable||(this._draggable=new o.Draggable(t,t)),this._draggable.on("dragstart",this._onDragStart,this).on("drag",this._onDrag,this).on("dragend",this._onDragEnd,this),this._draggable.enable(),o.DomUtil.addClass(this._marker._icon,"leaflet-marker-draggable")},removeHooks:function(){this._draggable.off("dragstart",this._onDragStart,this).off("drag",this._onDrag,this).off("dragend",this._onDragEnd,this),this._draggable.disable(),o.DomUtil.removeClass(this._marker._icon,"leaflet-marker-draggable")},moved:function(){return this._draggable&&this._draggable._moved},_onDragStart:function(){this._marker.closePopup().fire("movestart").fire("dragstart")},_onDrag:function(){var t=this._marker,e=t._shadow,i=o.DomUtil.getPosition(t._icon),n=t._map.layerPointToLatLng(i);e&&o.DomUtil.setPosition(e,i),t._latlng=n,t.fire("move",{latlng:n}).fire("drag")},_onDragEnd:function(t){this._marker.fire("moveend").fire("dragend",t)}}),o.Control=o.Class.extend({options:{position:"topright"},initialize:function(t){o.setOptions(this,t)},getPosition:function(){return this.options.position},setPosition:function(t){var e=this._map;return e&&e.removeControl(this),this.options.position=t,e&&e.addControl(this),this},getContainer:function(){return this._container},addTo:function(t){this._map=t;var e=this._container=this.onAdd(t),i=this.getPosition(),n=t._controlCorners[i];return o.DomUtil.addClass(e,"leaflet-control"),-1!==i.indexOf("bottom")?n.insertBefore(e,n.firstChild):n.appendChild(e),this},removeFrom:function(t){var e=this.getPosition(),i=t._controlCorners[e];return i.removeChild(this._container),this._map=null,this.onRemove&&this.onRemove(t),this},_refocusOnMap:function(){this._map&&this._map.getContainer().focus()}}),o.control=function(t){return new o.Control(t)},o.Map.include({addControl:function(t){return t.addTo(this),this},removeControl:function(t){return t.removeFrom(this),this},_initControlPos:function(){function t(t,s){var a=i+t+" "+i+s;e[t+s]=o.DomUtil.create("div",a,n)}var e=this._controlCorners={},i="leaflet-",n=this._controlContainer=o.DomUtil.create("div",i+"control-container",this._container);t("top","left"),t("top","right"),t("bottom","left"),t("bottom","right")},_clearControlPos:function(){this._container.removeChild(this._controlContainer)}}),o.Control.Zoom=o.Control.extend({options:{position:"topleft",zoomInText:"+",zoomInTitle:"Zoom in",zoomOutText:"-",zoomOutTitle:"Zoom out"},onAdd:function(t){var e="leaflet-control-zoom",i=o.DomUtil.create("div",e+" leaflet-bar");return this._map=t,this._zoomInButton=this._createButton(this.options.zoomInText,this.options.zoomInTitle,e+"-in",i,this._zoomIn,this),this._zoomOutButton=this._createButton(this.options.zoomOutText,this.options.zoomOutTitle,e+"-out",i,this._zoomOut,this),this._updateDisabled(),t.on("zoomend zoomlevelschange",this._updateDisabled,this),i},onRemove:function(t){t.off("zoomend zoomlevelschange",this._updateDisabled,this)},_zoomIn:function(t){this._map.zoomIn(t.shiftKey?3:1)},_zoomOut:function(t){this._map.zoomOut(t.shiftKey?3:1)},_createButton:function(t,e,i,n,s,a){var r=o.DomUtil.create("a",i,n);r.innerHTML=t,r.href="#",r.title=e;var h=o.DomEvent.stopPropagation;return o.DomEvent.on(r,"click",h).on(r,"mousedown",h).on(r,"dblclick",h).on(r,"click",o.DomEvent.preventDefault).on(r,"click",s,a).on(r,"click",this._refocusOnMap,a),r},_updateDisabled:function(){var t=this._map,e="leaflet-disabled";o.DomUtil.removeClass(this._zoomInButton,e),o.DomUtil.removeClass(this._zoomOutButton,e),t._zoom===t.getMinZoom()&&o.DomUtil.addClass(this._zoomOutButton,e),t._zoom===t.getMaxZoom()&&o.DomUtil.addClass(this._zoomInButton,e)}}),o.Map.mergeOptions({zoomControl:!0}),o.Map.addInitHook(function(){this.options.zoomControl&&(this.zoomControl=new o.Control.Zoom,this.addControl(this.zoomControl))}),o.control.zoom=function(t){return new o.Control.Zoom(t)},o.Control.Attribution=o.Control.extend({options:{position:"bottomright",prefix:'Leaflet'},initialize:function(t){o.setOptions(this,t),this._attributions={}},onAdd:function(t){this._container=o.DomUtil.create("div","leaflet-control-attribution"),o.DomEvent.disableClickPropagation(this._container);for(var e in t._layers)t._layers[e].getAttribution&&this.addAttribution(t._layers[e].getAttribution());return t.on("layeradd",this._onLayerAdd,this).on("layerremove",this._onLayerRemove,this),this._update(),this._container},onRemove:function(t){t.off("layeradd",this._onLayerAdd).off("layerremove",this._onLayerRemove)},setPrefix:function(t){return this.options.prefix=t,this._update(),this},addAttribution:function(t){return t?(this._attributions[t]||(this._attributions[t]=0),this._attributions[t]++,this._update(),this):void 0},removeAttribution:function(t){return t?(this._attributions[t]&&(this._attributions[t]--,this._update()),this):void 0},_update:function(){if(this._map){var t=[];for(var e in this._attributions)this._attributions[e]&&t.push(e);var i=[];this.options.prefix&&i.push(this.options.prefix),t.length&&i.push(t.join(", ")),this._container.innerHTML=i.join(" | ")}},_onLayerAdd:function(t){t.layer.getAttribution&&this.addAttribution(t.layer.getAttribution())},_onLayerRemove:function(t){t.layer.getAttribution&&this.removeAttribution(t.layer.getAttribution())}}),o.Map.mergeOptions({attributionControl:!0}),o.Map.addInitHook(function(){this.options.attributionControl&&(this.attributionControl=(new o.Control.Attribution).addTo(this))}),o.control.attribution=function(t){return new o.Control.Attribution(t)},o.Control.Scale=o.Control.extend({options:{position:"bottomleft",maxWidth:100,metric:!0,imperial:!0,updateWhenIdle:!1},onAdd:function(t){this._map=t;var e="leaflet-control-scale",i=o.DomUtil.create("div",e),n=this.options;return this._addScales(n,e,i),t.on(n.updateWhenIdle?"moveend":"move",this._update,this),t.whenReady(this._update,this),i},onRemove:function(t){t.off(this.options.updateWhenIdle?"moveend":"move",this._update,this)},_addScales:function(t,e,i){t.metric&&(this._mScale=o.DomUtil.create("div",e+"-line",i)),t.imperial&&(this._iScale=o.DomUtil.create("div",e+"-line",i))},_update:function(){var t=this._map.getBounds(),e=t.getCenter().lat,i=6378137*Math.PI*Math.cos(e*Math.PI/180),n=i*(t.getNorthEast().lng-t.getSouthWest().lng)/180,o=this._map.getSize(),s=this.options,a=0;o.x>0&&(a=n*(s.maxWidth/o.x)),this._updateScales(s,a)},_updateScales:function(t,e){t.metric&&e&&this._updateMetric(e),t.imperial&&e&&this._updateImperial(e)},_updateMetric:function(t){var e=this._getRoundNum(t);this._mScale.style.width=this._getScaleWidth(e/t)+"px",this._mScale.innerHTML=1e3>e?e+" m":e/1e3+" km"},_updateImperial:function(t){var e,i,n,o=3.2808399*t,s=this._iScale;o>5280?(e=o/5280,i=this._getRoundNum(e),s.style.width=this._getScaleWidth(i/e)+"px",s.innerHTML=i+" mi"):(n=this._getRoundNum(o),s.style.width=this._getScaleWidth(n/o)+"px",s.innerHTML=n+" ft")},_getScaleWidth:function(t){return Math.round(this.options.maxWidth*t)-10},_getRoundNum:function(t){var e=Math.pow(10,(Math.floor(t)+"").length-1),i=t/e;return i=i>=10?10:i>=5?5:i>=3?3:i>=2?2:1,e*i}}),o.control.scale=function(t){return new o.Control.Scale(t)},o.Control.Layers=o.Control.extend({options:{collapsed:!0,position:"topright",autoZIndex:!0},initialize:function(t,e,i){o.setOptions(this,i),this._layers={},this._lastZIndex=0,this._handlingClick=!1;for(var n in t)this._addLayer(t[n],n);for(n in e)this._addLayer(e[n],n,!0)},onAdd:function(t){return this._initLayout(),this._update(),t.on("layeradd",this._onLayerChange,this).on("layerremove",this._onLayerChange,this),this._container},onRemove:function(t){t.off("layeradd",this._onLayerChange,this).off("layerremove",this._onLayerChange,this)},addBaseLayer:function(t,e){return this._addLayer(t,e),this._update(),this},addOverlay:function(t,e){return this._addLayer(t,e,!0),this._update(),this},removeLayer:function(t){var e=o.stamp(t);return delete this._layers[e],this._update(),this},_initLayout:function(){var t="leaflet-control-layers",e=this._container=o.DomUtil.create("div",t);e.setAttribute("aria-haspopup",!0),o.Browser.touch?o.DomEvent.on(e,"click",o.DomEvent.stopPropagation):o.DomEvent.disableClickPropagation(e).disableScrollPropagation(e);var i=this._form=o.DomUtil.create("form",t+"-list");if(this.options.collapsed){o.Browser.android||o.DomEvent.on(e,"mouseover",this._expand,this).on(e,"mouseout",this._collapse,this);var n=this._layersLink=o.DomUtil.create("a",t+"-toggle",e);n.href="#",n.title="Layers",o.Browser.touch?o.DomEvent.on(n,"click",o.DomEvent.stop).on(n,"click",this._expand,this):o.DomEvent.on(n,"focus",this._expand,this),o.DomEvent.on(i,"click",function(){setTimeout(o.bind(this._onInputClick,this),0)},this),this._map.on("click",this._collapse,this)}else this._expand();this._baseLayersList=o.DomUtil.create("div",t+"-base",i),this._separator=o.DomUtil.create("div",t+"-separator",i),this._overlaysList=o.DomUtil.create("div",t+"-overlays",i),e.appendChild(i)},_addLayer:function(t,e,i){var n=o.stamp(t);this._layers[n]={layer:t,name:e,overlay:i},this.options.autoZIndex&&t.setZIndex&&(this._lastZIndex++,t.setZIndex(this._lastZIndex))},_update:function(){if(this._container){this._baseLayersList.innerHTML="",this._overlaysList.innerHTML="";var t,e,i=!1,n=!1;for(t in this._layers)e=this._layers[t],this._addItem(e),n=n||e.overlay,i=i||!e.overlay;this._separator.style.display=n&&i?"":"none"}},_onLayerChange:function(t){var e=this._layers[o.stamp(t.layer)];if(e){this._handlingClick||this._update();var i=e.overlay?"layeradd"===t.type?"overlayadd":"overlayremove":"layeradd"===t.type?"baselayerchange":null;i&&this._map.fire(i,e)}},_createRadioElement:function(t,i){var n='t;t++)e=n[t],i=this._layers[e.layerId],e.checked&&!this._map.hasLayer(i.layer)?this._map.addLayer(i.layer):!e.checked&&this._map.hasLayer(i.layer)&&this._map.removeLayer(i.layer);this._handlingClick=!1,this._refocusOnMap()},_expand:function(){o.DomUtil.addClass(this._container,"leaflet-control-layers-expanded")},_collapse:function(){this._container.className=this._container.className.replace(" leaflet-control-layers-expanded","")}}),o.control.layers=function(t,e,i){return new o.Control.Layers(t,e,i)},o.PosAnimation=o.Class.extend({includes:o.Mixin.Events,run:function(t,e,i,n){this.stop(),this._el=t,this._inProgress=!0,this._newPos=e,this.fire("start"),t.style[o.DomUtil.TRANSITION]="all "+(i||.25)+"s cubic-bezier(0,0,"+(n||.5)+",1)",o.DomEvent.on(t,o.DomUtil.TRANSITION_END,this._onTransitionEnd,this),o.DomUtil.setPosition(t,e),o.Util.falseFn(t.offsetWidth),this._stepTimer=setInterval(o.bind(this._onStep,this),50)},stop:function(){this._inProgress&&(o.DomUtil.setPosition(this._el,this._getPos()),this._onTransitionEnd(),o.Util.falseFn(this._el.offsetWidth))},_onStep:function(){var t=this._getPos();return t?(this._el._leaflet_pos=t,void this.fire("step")):void this._onTransitionEnd()},_transformRe:/([-+]?(?:\d*\.)?\d+)\D*, ([-+]?(?:\d*\.)?\d+)\D*\)/,_getPos:function(){var e,i,n,s=this._el,a=t.getComputedStyle(s);if(o.Browser.any3d){if(n=a[o.DomUtil.TRANSFORM].match(this._transformRe),!n)return;e=parseFloat(n[1]),i=parseFloat(n[2])}else e=parseFloat(a.left),i=parseFloat(a.top);return new o.Point(e,i,!0)},_onTransitionEnd:function(){o.DomEvent.off(this._el,o.DomUtil.TRANSITION_END,this._onTransitionEnd,this),this._inProgress&&(this._inProgress=!1,this._el.style[o.DomUtil.TRANSITION]="",this._el._leaflet_pos=this._newPos,clearInterval(this._stepTimer),this.fire("step").fire("end"))}}),o.Map.include({setView:function(t,e,n){if(e=e===i?this._zoom:this._limitZoom(e),t=this._limitCenter(o.latLng(t),e,this.options.maxBounds),n=n||{},this._panAnim&&this._panAnim.stop(),this._loaded&&!n.reset&&n!==!0){n.animate!==i&&(n.zoom=o.extend({animate:n.animate},n.zoom),n.pan=o.extend({animate:n.animate},n.pan));var s=this._zoom!==e?this._tryAnimatedZoom&&this._tryAnimatedZoom(t,e,n.zoom):this._tryAnimatedPan(t,n.pan);if(s)return clearTimeout(this._sizeTimer),this}return this._resetView(t,e),this},panBy:function(t,e){if(t=o.point(t).round(),e=e||{},!t.x&&!t.y)return this;if(this._panAnim||(this._panAnim=new o.PosAnimation,this._panAnim.on({step:this._onPanTransitionStep,end:this._onPanTransitionEnd},this)),e.noMoveStart||this.fire("movestart"),e.animate!==!1){o.DomUtil.addClass(this._mapPane,"leaflet-pan-anim");var i=this._getMapPanePos().subtract(t);this._panAnim.run(this._mapPane,i,e.duration||.25,e.easeLinearity)}else this._rawPanBy(t),this.fire("move").fire("moveend");return this},_onPanTransitionStep:function(){this.fire("move")},_onPanTransitionEnd:function(){o.DomUtil.removeClass(this._mapPane,"leaflet-pan-anim"),this.fire("moveend")},_tryAnimatedPan:function(t,e){var i=this._getCenterOffset(t)._floor();return!((e&&e.animate)!==!0&&!this.getSize().contains(i))&&(this.panBy(i,e),!0)}}),o.PosAnimation=o.DomUtil.TRANSITION?o.PosAnimation:o.PosAnimation.extend({run:function(t,e,i,n){this.stop(),this._el=t,this._inProgress=!0,this._duration=i||.25,this._easeOutPower=1/Math.max(n||.5,.2),this._startPos=o.DomUtil.getPosition(t),this._offset=e.subtract(this._startPos),this._startTime=+new Date,this.fire("start"),this._animate()},stop:function(){this._inProgress&&(this._step(),this._complete())},_animate:function(){this._animId=o.Util.requestAnimFrame(this._animate,this),this._step()},_step:function(){var t=+new Date-this._startTime,e=1e3*this._duration;e>t?this._runFrame(this._easeOut(t/e)):(this._runFrame(1),this._complete())},_runFrame:function(t){var e=this._startPos.add(this._offset.multiplyBy(t));o.DomUtil.setPosition(this._el,e),this.fire("step")},_complete:function(){o.Util.cancelAnimFrame(this._animId),this._inProgress=!1,this.fire("end")},_easeOut:function(t){return 1-Math.pow(1-t,this._easeOutPower)}}),o.Map.mergeOptions({zoomAnimation:!0,zoomAnimationThreshold:4}),o.DomUtil.TRANSITION&&o.Map.addInitHook(function(){this._zoomAnimated=this.options.zoomAnimation&&o.DomUtil.TRANSITION&&o.Browser.any3d&&!o.Browser.android23&&!o.Browser.mobileOpera,this._zoomAnimated&&o.DomEvent.on(this._mapPane,o.DomUtil.TRANSITION_END,this._catchTransitionEnd,this)}),o.Map.include(o.DomUtil.TRANSITION?{_catchTransitionEnd:function(t){this._animatingZoom&&t.propertyName.indexOf("transform")>=0&&this._onZoomTransitionEnd()},_nothingToAnimate:function(){return!this._container.getElementsByClassName("leaflet-zoom-animated").length},_tryAnimatedZoom:function(t,e,i){if(this._animatingZoom)return!0;if(i=i||{},!this._zoomAnimated||i.animate===!1||this._nothingToAnimate()||Math.abs(e-this._zoom)>this.options.zoomAnimationThreshold)return!1;var n=this.getZoomScale(e),o=this._getCenterOffset(t)._divideBy(1-1/n),s=this._getCenterLayerPoint()._add(o);return!(i.animate!==!0&&!this.getSize().contains(o))&&(this.fire("movestart").fire("zoomstart"),this._animateZoom(t,e,s,n,null,!0),!0)},_animateZoom:function(t,e,i,n,s,a,r){r||(this._animatingZoom=!0),o.DomUtil.addClass(this._mapPane,"leaflet-zoom-anim"),this._animateToCenter=t,this._animateToZoom=e,o.Draggable&&(o.Draggable._disabled=!0),o.Util.requestAnimFrame(function(){this.fire("zoomanim",{center:t,zoom:e,origin:i,scale:n,delta:s,backwards:a}),setTimeout(o.bind(this._onZoomTransitionEnd,this),250)},this)},_onZoomTransitionEnd:function(){this._animatingZoom&&(this._animatingZoom=!1,o.DomUtil.removeClass(this._mapPane,"leaflet-zoom-anim"),o.Util.requestAnimFrame(function(){this._resetView(this._animateToCenter,this._animateToZoom,!0,!0),o.Draggable&&(o.Draggable._disabled=!1)},this))}}:{}),o.TileLayer.include({_animateZoom:function(t){this._animating||(this._animating=!0,this._prepareBgBuffer());var e=this._bgBuffer,i=o.DomUtil.TRANSFORM,n=t.delta?o.DomUtil.getTranslateString(t.delta):e.style[i],s=o.DomUtil.getScaleString(t.scale,t.origin);e.style[i]=t.backwards?s+" "+n:n+" "+s},_endZoomAnim:function(){var t=this._tileContainer,e=this._bgBuffer;t.style.visibility="",t.parentNode.appendChild(t),o.Util.falseFn(e.offsetWidth);var i=this._map.getZoom();(i>this.options.maxZoom||i.5&&.5>n?(t.style.visibility="hidden",void this._stopLoadingImages(t)):(e.style.visibility="hidden",e.style[o.DomUtil.TRANSFORM]="",this._tileContainer=e,e=this._bgBuffer=t,this._stopLoadingImages(e),void clearTimeout(this._clearBgBufferTimer))},_getLoadedTilesPercentage:function(t){var e,i,n=t.getElementsByTagName("img"),o=0;for(e=0,i=n.length;i>e;e++)n[e].complete&&o++;return o/i},_stopLoadingImages:function(t){var e,i,n,s=Array.prototype.slice.call(t.getElementsByTagName("img"));for(e=0,i=s.length;i>e;e++)n=s[e],n.complete||(n.onload=o.Util.falseFn,n.onerror=o.Util.falseFn,n.src=o.Util.emptyImageUrl,n.parentNode.removeChild(n))}}),o.Map.include({_defaultLocateOptions:{watch:!1,setView:!1,maxZoom:1/0,timeout:1e4,maximumAge:0,enableHighAccuracy:!1},locate:function(t){if(t=this._locateOptions=o.extend(this._defaultLocateOptions,t),!navigator.geolocation)return this._handleGeolocationError({code:0,message:"Geolocation not supported."}),this;var e=o.bind(this._handleGeolocationResponse,this),i=o.bind(this._handleGeolocationError,this);return t.watch?this._locationWatchId=navigator.geolocation.watchPosition(e,i,t):navigator.geolocation.getCurrentPosition(e,i,t),this},stopLocate:function(){return navigator.geolocation&&navigator.geolocation.clearWatch(this._locationWatchId),this._locateOptions&&(this._locateOptions.setView=!1),this},_handleGeolocationError:function(t){var e=t.code,i=t.message||(1===e?"permission denied":2===e?"position unavailable":"timeout");this._locateOptions.setView&&!this._loaded&&this.fitWorld(),this.fire("locationerror",{code:e,message:"Geolocation error: "+i+"."})},_handleGeolocationResponse:function(t){var e=t.coords.latitude,i=t.coords.longitude,n=new o.LatLng(e,i),s=180*t.coords.accuracy/40075017,a=s/Math.cos(o.LatLng.DEG_TO_RAD*e),r=o.latLngBounds([e-s,i-a],[e+s,i+a]),h=this._locateOptions;if(h.setView){var l=Math.min(this.getBoundsZoom(r),h.maxZoom);this.setView(n,l)}var u={latlng:n,bounds:r,timestamp:t.timestamp};for(var c in t.coords)"number"==typeof t.coords[c]&&(u[c]=t.coords[c]);this.fire("locationfound",u)}})}(window,document)- \ No newline at end of file diff --git a/homeassistant/components/frontend/www_static/panels/ha-panel-map.html.gz b/homeassistant/components/frontend/www_static/panels/ha-panel-map.html.gz index f62bdb183b1..faab2715587 100644 Binary files a/homeassistant/components/frontend/www_static/panels/ha-panel-map.html.gz and b/homeassistant/components/frontend/www_static/panels/ha-panel-map.html.gz differ diff --git a/homeassistant/components/frontend/www_static/service_worker.js b/homeassistant/components/frontend/www_static/service_worker.js index d92c374406b..6c01252b0d3 100644 --- a/homeassistant/components/frontend/www_static/service_worker.js +++ b/homeassistant/components/frontend/www_static/service_worker.js @@ -1 +1 @@ -"use strict";function setOfCachedUrls(e){return e.keys().then(function(e){return e.map(function(e){return e.url})}).then(function(e){return new Set(e)})}function notificationEventCallback(e,t){firePushCallback({action:t.action,data:t.notification.data,tag:t.notification.tag,type:e},t.notification.data.jwt)}function firePushCallback(e,t){delete e.data.jwt,0===Object.keys(e.data).length&&e.data.constructor===Object&&delete e.data,fetch("/api/notify.html5/callback",{method:"POST",headers:new Headers({"Content-Type":"application/json",Authorization:"Bearer "+t}),body:JSON.stringify(e)})}var precacheConfig=[["/","8d89b35f10257827112f606055f6b9d9"],["/frontend/panels/dev-event-550bf85345c454274a40d15b2795a002.html","6977c253b5b4da588d50b0aaa50b21f4"],["/frontend/panels/dev-info-ec613406ce7e20d93754233d55625c8a.html","8e28a4c617fd6963b45103d5e5c80617"],["/frontend/panels/dev-service-c7974458ebc33412d95497e99b785e12.html","3a551b1ea5fd8b64dee7b1a458d9ffde"],["/frontend/panels/dev-state-4be627b74e683af14ef779d8203ec674.html","b79c88170416821dee7b9339b6456a34"],["/frontend/panels/dev-template-d23943fa0370f168714da407c90091a2.html","2cf2426a6aa4ee9c1df74926dc475bc8"],["/frontend/panels/map-af7d04aff7dd5479c5a0016bc8d4dd7d.html","6031df1b4d23d5b321208449b2d293f8"],["/static/core-9b3e5ab4eac7e3b074e0daf3f619a638.js","a778d967944d0cc06b8b1e477e7afc1c"],["/static/frontend-5854807d361de26fe93ad474010f19d2.html","ee9c864599dcd4838c75bcd4a9d44622"],["/static/mdi-46a76f877ac9848899b8ed382427c16f.html","a846c4082dd5cffd88ac72cbe943e691"],["static/fonts/roboto/Roboto-Bold.ttf","d329cc8b34667f114a95422aaad1b063"],["static/fonts/roboto/Roboto-Light.ttf","7b5fb88f12bec8143f00e21bc3222124"],["static/fonts/roboto/Roboto-Medium.ttf","fe13e4170719c2fc586501e777bde143"],["static/fonts/roboto/Roboto-Regular.ttf","ac3f799d5bbaf5196fab15ab8de8431c"],["static/icons/favicon-192x192.png","419903b8422586a7e28021bbe9011175"],["static/icons/favicon.ico","04235bda7843ec2fceb1cbe2bc696cf4"],["static/images/card_media_player_bg.png","a34281d1c1835d338a642e90930e61aa"],["static/webcomponents-lite.min.js","b0f32ad3c7749c40d486603f31c9d8b1"]],cacheName="sw-precache-v2--"+(self.registration?self.registration.scope:""),ignoreUrlParametersMatching=[/^utm_/],addDirectoryIndex=function(e,t){var a=new URL(e);return"/"===a.pathname.slice(-1)&&(a.pathname+=t),a.toString()},createCacheKey=function(e,t,a,n){var c=new URL(e);return n&&c.toString().match(n)||(c.search+=(c.search?"&":"")+encodeURIComponent(t)+"="+encodeURIComponent(a)),c.toString()},isPathWhitelisted=function(e,t){if(0===e.length)return!0;var a=new URL(t).pathname;return e.some(function(e){return a.match(e)})},stripIgnoredUrlParameters=function(e,t){var a=new URL(e);return a.search=a.search.slice(1).split("&").map(function(e){return e.split("=")}).filter(function(e){return t.every(function(t){return!t.test(e[0])})}).map(function(e){return e.join("=")}).join("&"),a.toString()},hashParamName="_sw-precache",urlsToCacheKeys=new Map(precacheConfig.map(function(e){var t=e[0],a=e[1],n=new URL(t,self.location),c=createCacheKey(n,hashParamName,a,!1);return[n.toString(),c]}));self.addEventListener("install",function(e){e.waitUntil(caches.open(cacheName).then(function(e){return setOfCachedUrls(e).then(function(t){return Promise.all(Array.from(urlsToCacheKeys.values()).map(function(a){if(!t.has(a))return e.add(new Request(a,{credentials:"same-origin"}))}))})}).then(function(){return self.skipWaiting()}))}),self.addEventListener("activate",function(e){var t=new Set(urlsToCacheKeys.values());e.waitUntil(caches.open(cacheName).then(function(e){return e.keys().then(function(a){return Promise.all(a.map(function(a){if(!t.has(a.url))return e.delete(a)}))})}).then(function(){return self.clients.claim()}))}),self.addEventListener("fetch",function(e){if("GET"===e.request.method){var t,a=stripIgnoredUrlParameters(e.request.url,ignoreUrlParametersMatching);t=urlsToCacheKeys.has(a);var n="index.html";!t&&n&&(a=addDirectoryIndex(a,n),t=urlsToCacheKeys.has(a));var c="/";!t&&c&&"navigate"===e.request.mode&&isPathWhitelisted(["^((?!(static|api|local|service_worker.js|manifest.json)).)*$"],e.request.url)&&(a=new URL(c,self.location).toString(),t=urlsToCacheKeys.has(a)),t&&e.respondWith(caches.open(cacheName).then(function(e){return e.match(urlsToCacheKeys.get(a))}).catch(function(t){return console.warn('Couldn\'t serve response for "%s" from cache: %O',e.request.url,t),fetch(e.request)}))}}),self.addEventListener("push",function(e){var t;e.data&&(t=e.data.json(),e.waitUntil(self.registration.showNotification(t.title,t).then(function(e){firePushCallback({type:"received",tag:t.tag,data:t.data},t.data.jwt)})))}),self.addEventListener("notificationclick",function(e){var t;notificationEventCallback("clicked",e),e.notification.close(),e.notification.data&&e.notification.data.url&&(t=e.notification.data.url,t&&e.waitUntil(clients.matchAll({type:"window"}).then(function(e){var a,n;for(a=0;a start_time))) + (states.last_updated > start_time)) + if filters: + query = filters.apply(query, entity_ids) if end_time is not None: query = query.filter(states.last_updated < end_time) - if entity_id is not None: - query = query.filter_by(entity_id=entity_id.lower()) - states = ( state for state in recorder.execute( query.order_by(states.entity_id, states.last_updated)) - if _is_significant(state)) + if (_is_significant(state) and + not state.attributes.get(ATTR_HIDDEN, False))) - return states_to_json(states, start_time, entity_id) + return states_to_json(states, start_time, entity_id, filters) def state_changes_during_period(start_time, end_time=None, entity_id=None): @@ -80,7 +104,7 @@ def state_changes_during_period(start_time, end_time=None, entity_id=None): return states_to_json(states, start_time, entity_id) -def get_states(utc_point_in_time, entity_ids=None, run=None): +def get_states(utc_point_in_time, entity_ids=None, run=None, filters=None): """Return the states at a specific point in time.""" if run is None: run = recorder.run_information(utc_point_in_time) @@ -96,12 +120,11 @@ def get_states(utc_point_in_time, entity_ids=None, run=None): func.max(states.state_id).label('max_state_id') ).filter( (states.created >= run.start) & - (states.created < utc_point_in_time) - ) - - if entity_ids is not None: - most_recent_state_ids = most_recent_state_ids.filter( - states.entity_id.in_(entity_ids)) + (states.created < utc_point_in_time) & + (~states.domain.in_(IGNORE_DOMAINS))) + if filters: + most_recent_state_ids = filters.apply(most_recent_state_ids, + entity_ids) most_recent_state_ids = most_recent_state_ids.group_by( states.entity_id).subquery() @@ -109,10 +132,12 @@ def get_states(utc_point_in_time, entity_ids=None, run=None): query = recorder.query('States').join(most_recent_state_ids, and_( states.state_id == most_recent_state_ids.c.max_state_id)) - return recorder.execute(query) + for state in recorder.execute(query): + if not state.attributes.get(ATTR_HIDDEN, False): + yield state -def states_to_json(states, start_time, entity_id): +def states_to_json(states, start_time, entity_id, filters=None): """Convert SQL results into JSON friendly data structure. This takes our state list and turns it into a JSON friendly data @@ -127,7 +152,7 @@ def states_to_json(states, start_time, entity_id): entity_ids = [entity_id] if entity_id is not None else None # Get the states at the start time - for state in get_states(start_time, entity_ids): + for state in get_states(start_time, entity_ids, filters=filters): state.last_changed = start_time state.last_updated = start_time result[state.entity_id].append(state) @@ -140,16 +165,25 @@ def states_to_json(states, start_time, entity_id): def get_state(utc_point_in_time, entity_id, run=None): """Return a state at a specific point in time.""" - states = get_states(utc_point_in_time, (entity_id,), run) - + states = list(get_states(utc_point_in_time, (entity_id,), run)) return states[0] if states else None # pylint: disable=unused-argument def setup(hass, config): """Setup the history hooks.""" - hass.wsgi.register_view(Last5StatesView) - hass.wsgi.register_view(HistoryPeriodView) + filters = Filters() + exclude = config[DOMAIN].get(CONF_EXCLUDE) + if exclude: + filters.excluded_entities = exclude[CONF_ENTITIES] + filters.excluded_domains = exclude[CONF_DOMAINS] + include = config[DOMAIN].get(CONF_INCLUDE) + if include: + filters.included_entities = include[CONF_ENTITIES] + filters.included_domains = include[CONF_DOMAINS] + + hass.wsgi.register_view(Last5StatesView(hass)) + hass.wsgi.register_view(HistoryPeriodView(hass, filters)) register_built_in_panel(hass, 'history', 'History', 'mdi:poll-box') return True @@ -161,6 +195,10 @@ class Last5StatesView(HomeAssistantView): url = '/api/history/entity//recent_states' name = 'api:history:entity-recent-states' + def __init__(self, hass): + """Initilalize the history last 5 states view.""" + super().__init__(hass) + def get(self, request, entity_id): """Retrieve last 5 states of entity.""" return self.json(last_5_states(entity_id)) @@ -173,6 +211,11 @@ class HistoryPeriodView(HomeAssistantView): name = 'api:history:view-period' extra_urls = ['/api/history/period/'] + def __init__(self, hass, filters): + """Initilalize the history period view.""" + super().__init__(hass) + self.filters = filters + def get(self, request, datetime=None): """Return history over a period of time.""" one_day = timedelta(days=1) @@ -185,8 +228,68 @@ class HistoryPeriodView(HomeAssistantView): end_time = start_time + one_day entity_id = request.args.get('filter_entity_id') - return self.json( - get_significant_states(start_time, end_time, entity_id).values()) + return self.json(get_significant_states( + start_time, end_time, entity_id, self.filters).values()) + + +# pylint: disable=too-few-public-methods +class Filters(object): + """Container for the configured include and exclude filters.""" + + def __init__(self): + """Initialise the include and exclude filters.""" + self.excluded_entities = [] + self.excluded_domains = [] + self.included_entities = [] + self.included_domains = [] + + def apply(self, query, entity_ids=None): + """Apply the include/exclude filter on domains and entities on query. + + Following rules apply: + * only the include section is configured - just query the specified + entities or domains. + * only the exclude section is configured - filter the specified + entities and domains from all the entities in the system. + * if include and exclude is defined - select the entities specified in + the include and filter out the ones from the exclude list. + """ + states = recorder.get_model('States') + # specific entities requested - do not in/exclude anything + if entity_ids is not None: + return query.filter(states.entity_id.in_(entity_ids)) + query = query.filter(~states.domain.in_(IGNORE_DOMAINS)) + + filter_query = None + # filter if only excluded domain is configured + if self.excluded_domains and not self.included_domains: + filter_query = ~states.domain.in_(self.excluded_domains) + if self.included_entities: + filter_query &= states.entity_id.in_(self.included_entities) + # filter if only included domain is configured + elif not self.excluded_domains and self.included_domains: + filter_query = states.domain.in_(self.included_domains) + if self.included_entities: + filter_query |= states.entity_id.in_(self.included_entities) + # filter if included and excluded domain is configured + elif self.excluded_domains and self.included_domains: + filter_query = ~states.domain.in_(self.excluded_domains) + if self.included_entities: + filter_query &= (states.domain.in_(self.included_domains) | + states.entity_id.in_(self.included_entities)) + else: + filter_query &= (states.domain.in_(self.included_domains) & ~ + states.domain.in_(self.excluded_domains)) + # no domain filter just included entities + elif not self.excluded_domains and not self.included_domains and \ + self.included_entities: + filter_query = states.entity_id.in_(self.included_entities) + if filter_query is not None: + query = query.filter(filter_query) + # finally apply excluded entities filter if configured + if self.excluded_entities: + query = query.filter(~states.entity_id.in_(self.excluded_entities)) + return query def _is_significant(state): diff --git a/homeassistant/components/http.py b/homeassistant/components/http.py index 73c8079023f..97009b69d1c 100644 --- a/homeassistant/components/http.py +++ b/homeassistant/components/http.py @@ -11,6 +11,7 @@ import mimetypes import threading import re import ssl +from ipaddress import ip_address, ip_network import voluptuous as vol @@ -27,16 +28,16 @@ import homeassistant.helpers.config_validation as cv from homeassistant.components import persistent_notification DOMAIN = 'http' -REQUIREMENTS = ('cherrypy==8.1.0', 'static3==0.7.0', 'Werkzeug==0.11.11') +REQUIREMENTS = ('cherrypy==8.1.2', 'static3==0.7.0', 'Werkzeug==0.11.11') CONF_API_PASSWORD = 'api_password' -CONF_APPROVED_IPS = 'approved_ips' CONF_SERVER_HOST = 'server_host' CONF_SERVER_PORT = 'server_port' CONF_DEVELOPMENT = 'development' CONF_SSL_CERTIFICATE = 'ssl_certificate' CONF_SSL_KEY = 'ssl_key' CONF_CORS_ORIGINS = 'cors_allowed_origins' +CONF_TRUSTED_NETWORKS = 'trusted_networks' DATA_API_PASSWORD = 'api_password' NOTIFICATION_ID_LOGIN = 'http-login' @@ -76,7 +77,8 @@ CONFIG_SCHEMA = vol.Schema({ vol.Optional(CONF_SSL_CERTIFICATE): cv.isfile, vol.Optional(CONF_SSL_KEY): cv.isfile, vol.Optional(CONF_CORS_ORIGINS): vol.All(cv.ensure_list, [cv.string]), - vol.Optional(CONF_APPROVED_IPS): vol.All(cv.ensure_list, [cv.string]) + vol.Optional(CONF_TRUSTED_NETWORKS): + vol.All(cv.ensure_list, [ip_network]) }), }, extra=vol.ALLOW_EXTRA) @@ -113,7 +115,9 @@ def setup(hass, config): ssl_certificate = conf.get(CONF_SSL_CERTIFICATE) ssl_key = conf.get(CONF_SSL_KEY) cors_origins = conf.get(CONF_CORS_ORIGINS, []) - approved_ips = conf.get(CONF_APPROVED_IPS, []) + trusted_networks = [ + ip_network(trusted_network) + for trusted_network in conf.get(CONF_TRUSTED_NETWORKS, [])] server = HomeAssistantWSGI( hass, @@ -124,7 +128,7 @@ def setup(hass, config): ssl_certificate=ssl_certificate, ssl_key=ssl_key, cors_origins=cors_origins, - approved_ips=approved_ips + trusted_networks=trusted_networks ) def start_wsgi_server(event): @@ -257,7 +261,7 @@ class HomeAssistantWSGI(object): def __init__(self, hass, development, api_password, ssl_certificate, ssl_key, server_host, server_port, cors_origins, - approved_ips): + trusted_networks): """Initilalize the WSGI Home Assistant server.""" from werkzeug.wrappers import Response @@ -276,7 +280,7 @@ class HomeAssistantWSGI(object): self.server_host = server_host self.server_port = server_port self.cors_origins = cors_origins - self.approved_ips = approved_ips + self.trusted_networks = trusted_networks self.event_forwarder = None self.server = None @@ -431,6 +435,19 @@ class HomeAssistantWSGI(object): environ['PATH_INFO'] = '{}.{}'.format(*fingerprinted.groups()) return app(environ, start_response) + @staticmethod + def get_real_ip(request): + """Return the clients correct ip address, even in proxied setups.""" + if request.access_route: + return request.access_route[-1] + else: + return request.remote_addr + + def is_trusted_ip(self, remote_addr): + """Match an ip address against trusted CIDR networks.""" + return any(ip_address(remote_addr) in trusted_network + for trusted_network in self.hass.wsgi.trusted_networks) + class HomeAssistantView(object): """Base view for all views.""" @@ -471,13 +488,15 @@ class HomeAssistantView(object): except AttributeError: raise MethodNotAllowed + remote_addr = HomeAssistantWSGI.get_real_ip(request) + # Auth code verbose on purpose authenticated = False if self.hass.wsgi.api_password is None: authenticated = True - elif request.remote_addr in self.hass.wsgi.approved_ips: + elif self.hass.wsgi.is_trusted_ip(remote_addr): authenticated = True elif hmac.compare_digest(request.headers.get(HTTP_HEADER_HA_AUTH, ''), @@ -491,17 +510,17 @@ class HomeAssistantView(object): if self.requires_auth and not authenticated: _LOGGER.warning('Login attempt or request with an invalid ' - 'password from %s', request.remote_addr) + 'password from %s', remote_addr) persistent_notification.create( self.hass, - 'Invalid password used from {}'.format(request.remote_addr), + 'Invalid password used from {}'.format(remote_addr), 'Login attempt failed', NOTIFICATION_ID_LOGIN) raise Unauthorized() request.authenticated = authenticated _LOGGER.info('Serving %s to %s (auth: %s)', - request.path, request.remote_addr, authenticated) + request.path, remote_addr, authenticated) result = handler(request, **values) diff --git a/homeassistant/components/influxdb.py b/homeassistant/components/influxdb.py index 6bac1fa7cfb..420781bcb74 100644 --- a/homeassistant/components/influxdb.py +++ b/homeassistant/components/influxdb.py @@ -33,8 +33,8 @@ TIMEOUT = 5 CONFIG_SCHEMA = vol.Schema({ DOMAIN: vol.Schema({ vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string, - vol.Required(CONF_USERNAME): cv.string, - vol.Required(CONF_PASSWORD): cv.string, + vol.Inclusive(CONF_USERNAME, 'authentication'): cv.string, + vol.Inclusive(CONF_PASSWORD, 'authentication'): cv.string, vol.Optional(CONF_BLACKLIST, default=[]): vol.All(cv.ensure_list, [cv.entity_id]), vol.Optional(CONF_DB_NAME, default=DEFAULT_DATABASE): cv.string, diff --git a/homeassistant/components/input_select.py b/homeassistant/components/input_select.py index 33c0757f266..f94d8200d00 100644 --- a/homeassistant/components/input_select.py +++ b/homeassistant/components/input_select.py @@ -31,6 +31,18 @@ SERVICE_SELECT_OPTION_SCHEMA = vol.Schema({ vol.Required(ATTR_OPTION): cv.string, }) +SERVICE_SELECT_NEXT = 'select_next' + +SERVICE_SELECT_NEXT_SCHEMA = vol.Schema({ + vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, +}) + +SERVICE_SELECT_PREVIOUS = 'select_previous' + +SERVICE_SELECT_PREVIOUS_SCHEMA = vol.Schema({ + vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, +}) + def _cv_input_select(cfg): """Config validation helper for input select (Voluptuous).""" @@ -53,13 +65,27 @@ CONFIG_SCHEMA = vol.Schema({DOMAIN: { def select_option(hass, entity_id, option): - """Set input_select to False.""" + """Set value of input_select.""" hass.services.call(DOMAIN, SERVICE_SELECT_OPTION, { ATTR_ENTITY_ID: entity_id, ATTR_OPTION: option, }) +def select_next(hass, entity_id): + """Set next value of input_select.""" + hass.services.call(DOMAIN, SERVICE_SELECT_NEXT, { + ATTR_ENTITY_ID: entity_id, + }) + + +def select_previous(hass, entity_id): + """Set previous value of input_select.""" + hass.services.call(DOMAIN, SERVICE_SELECT_PREVIOUS, { + ATTR_ENTITY_ID: entity_id, + }) + + def setup(hass, config): """Setup input select.""" component = EntityComponent(_LOGGER, DOMAIN, hass) @@ -77,7 +103,7 @@ def setup(hass, config): return False def select_option_service(call): - """Handle a calls to the input select services.""" + """Handle a calls to the input select option service.""" target_inputs = component.extract_from_service(call) for input_select in target_inputs: @@ -87,6 +113,28 @@ def setup(hass, config): select_option_service, schema=SERVICE_SELECT_OPTION_SCHEMA) + def select_next_service(call): + """Handle a calls to the input select next service.""" + target_inputs = component.extract_from_service(call) + + for input_select in target_inputs: + input_select.offset_index(1) + + hass.services.register(DOMAIN, SERVICE_SELECT_NEXT, + select_next_service, + schema=SERVICE_SELECT_NEXT_SCHEMA) + + def select_previous_service(call): + """Handle a calls to the input select previous service.""" + target_inputs = component.extract_from_service(call) + + for input_select in target_inputs: + input_select.offset_index(-1) + + hass.services.register(DOMAIN, SERVICE_SELECT_PREVIOUS, + select_previous_service, + schema=SERVICE_SELECT_PREVIOUS_SCHEMA) + component.add_entities(entities) return True @@ -139,3 +187,10 @@ class InputSelect(Entity): return self._current_option = option self.update_ha_state() + + def offset_index(self, offset): + """Offset current index.""" + current_index = self._options.index(self._current_option) + new_index = (current_index + offset) % len(self._options) + self._current_option = self._options[new_index] + self.update_ha_state() diff --git a/homeassistant/components/ios.py b/homeassistant/components/ios.py new file mode 100644 index 00000000000..0793417fab3 --- /dev/null +++ b/homeassistant/components/ios.py @@ -0,0 +1,323 @@ +""" +Native Home Assistant iOS app component. + +For more details about this platform, please refer to the documentation at +https://home-assistant.io/components/ios/ +""" +import os +import json +import logging + +import voluptuous as vol +from voluptuous.humanize import humanize_error + +from homeassistant.helpers import config_validation as cv + +import homeassistant.loader as loader + +from homeassistant.helpers import discovery + +from homeassistant.components.http import HomeAssistantView + +from homeassistant.const import (HTTP_INTERNAL_SERVER_ERROR, + HTTP_BAD_REQUEST) + +from homeassistant.components.notify import DOMAIN as NotifyDomain + +_LOGGER = logging.getLogger(__name__) + +DOMAIN = "ios" + +DEPENDENCIES = ["http"] + +CONF_PUSH = "push" +CONF_PUSH_CATEGORIES = "categories" +CONF_PUSH_CATEGORIES_NAME = "name" +CONF_PUSH_CATEGORIES_IDENTIFIER = "identifier" +CONF_PUSH_CATEGORIES_ACTIONS = "actions" + +CONF_PUSH_ACTIONS_IDENTIFIER = "identifier" +CONF_PUSH_ACTIONS_TITLE = "title" +CONF_PUSH_ACTIONS_ACTIVATION_MODE = "activationMode" +CONF_PUSH_ACTIONS_AUTHENTICATION_REQUIRED = "authenticationRequired" +CONF_PUSH_ACTIONS_DESTRUCTIVE = "destructive" +CONF_PUSH_ACTIONS_BEHAVIOR = "behavior" +CONF_PUSH_ACTIONS_CONTEXT = "context" +CONF_PUSH_ACTIONS_TEXT_INPUT_BUTTON_TITLE = "textInputButtonTitle" +CONF_PUSH_ACTIONS_TEXT_INPUT_PLACEHOLDER = "textInputPlaceholder" + +ATTR_FOREGROUND = "foreground" +ATTR_BACKGROUND = "background" + +ACTIVATION_MODES = [ATTR_FOREGROUND, ATTR_BACKGROUND] + +ATTR_DEFAULT_BEHAVIOR = "default" +ATTR_TEXT_INPUT_BEHAVIOR = "textInput" + +BEHAVIORS = [ATTR_DEFAULT_BEHAVIOR, ATTR_TEXT_INPUT_BEHAVIOR] + +ATTR_DEVICE = "device" +ATTR_PUSH_TOKEN = "pushToken" +ATTR_APP = "app" +ATTR_PERMISSIONS = "permissions" +ATTR_PUSH_ID = "pushId" +ATTR_DEVICE_ID = "deviceId" +ATTR_PUSH_SOUNDS = "pushSounds" +ATTR_BATTERY = "battery" + +ATTR_DEVICE_NAME = "name" +ATTR_DEVICE_LOCALIZED_MODEL = "localizedModel" +ATTR_DEVICE_MODEL = "model" +ATTR_DEVICE_PERMANENT_ID = "permanentID" +ATTR_DEVICE_SYSTEM_VERSION = "systemVersion" +ATTR_DEVICE_TYPE = "type" +ATTR_DEVICE_SYSTEM_NAME = "systemName" + +ATTR_APP_BUNDLE_IDENTIFER = "bundleIdentifer" +ATTR_APP_BUILD_NUMBER = "buildNumber" +ATTR_APP_VERSION_NUMBER = "versionNumber" + +ATTR_LOCATION_PERMISSION = "location" +ATTR_NOTIFICATIONS_PERMISSION = "notifications" + +PERMISSIONS = [ATTR_LOCATION_PERMISSION, ATTR_NOTIFICATIONS_PERMISSION] + +ATTR_BATTERY_STATE = "state" +ATTR_BATTERY_LEVEL = "level" + +ATTR_BATTERY_STATE_UNPLUGGED = "Unplugged" +ATTR_BATTERY_STATE_CHARGING = "Charging" +ATTR_BATTERY_STATE_FULL = "Full" +ATTR_BATTERY_STATE_UNKNOWN = "Unknown" + +BATTERY_STATES = [ATTR_BATTERY_STATE_UNPLUGGED, ATTR_BATTERY_STATE_CHARGING, + ATTR_BATTERY_STATE_FULL, ATTR_BATTERY_STATE_UNKNOWN] + +ATTR_DEVICES = "devices" + +ACTION_SCHEMA = vol.Schema({ + vol.Required(CONF_PUSH_ACTIONS_IDENTIFIER): vol.Upper, + vol.Required(CONF_PUSH_ACTIONS_TITLE): cv.string, + vol.Optional(CONF_PUSH_ACTIONS_ACTIVATION_MODE, + default=ATTR_BACKGROUND): vol.In(ACTIVATION_MODES), + vol.Optional(CONF_PUSH_ACTIONS_AUTHENTICATION_REQUIRED, + default=False): cv.boolean, + vol.Optional(CONF_PUSH_ACTIONS_DESTRUCTIVE, + default=False): cv.boolean, + vol.Optional(CONF_PUSH_ACTIONS_BEHAVIOR, + default=ATTR_DEFAULT_BEHAVIOR): vol.In(BEHAVIORS), + vol.Optional(CONF_PUSH_ACTIONS_TEXT_INPUT_BUTTON_TITLE): cv.string, + vol.Optional(CONF_PUSH_ACTIONS_TEXT_INPUT_PLACEHOLDER): cv.string, +}, extra=vol.ALLOW_EXTRA) + +ACTION_SCHEMA_LIST = vol.All(cv.ensure_list, [ACTION_SCHEMA]) + +CONFIG_SCHEMA = vol.Schema({ + DOMAIN: { + CONF_PUSH: { + CONF_PUSH_CATEGORIES: vol.All(cv.ensure_list, [{ + vol.Required(CONF_PUSH_CATEGORIES_NAME): cv.string, + vol.Required(CONF_PUSH_CATEGORIES_IDENTIFIER): vol.Upper, + vol.Required(CONF_PUSH_CATEGORIES_ACTIONS): ACTION_SCHEMA_LIST + }]) + } + } +}, extra=vol.ALLOW_EXTRA) + +IDENTIFY_DEVICE_SCHEMA = vol.Schema({ + vol.Required(ATTR_DEVICE_NAME): cv.string, + vol.Required(ATTR_DEVICE_LOCALIZED_MODEL): cv.string, + vol.Required(ATTR_DEVICE_MODEL): cv.string, + vol.Required(ATTR_DEVICE_PERMANENT_ID): cv.string, + vol.Required(ATTR_DEVICE_SYSTEM_VERSION): cv.string, + vol.Required(ATTR_DEVICE_TYPE): cv.string, + vol.Required(ATTR_DEVICE_SYSTEM_NAME): cv.string, +}, extra=vol.ALLOW_EXTRA) + +IDENTIFY_DEVICE_SCHEMA_CONTAINER = vol.All(dict, IDENTIFY_DEVICE_SCHEMA) + +IDENTIFY_APP_SCHEMA = vol.Schema({ + vol.Required(ATTR_APP_BUNDLE_IDENTIFER): cv.string, + vol.Required(ATTR_APP_BUILD_NUMBER): cv.positive_int, + vol.Required(ATTR_APP_VERSION_NUMBER): cv.positive_int +}, extra=vol.ALLOW_EXTRA) + +IDENTIFY_APP_SCHEMA_CONTAINER = vol.All(dict, IDENTIFY_APP_SCHEMA) + +IDENTIFY_BATTERY_SCHEMA = vol.Schema({ + vol.Required(ATTR_BATTERY_LEVEL): cv.positive_int, + vol.Required(ATTR_BATTERY_STATE): vol.In(BATTERY_STATES) +}, extra=vol.ALLOW_EXTRA) + +IDENTIFY_BATTERY_SCHEMA_CONTAINER = vol.All(dict, IDENTIFY_BATTERY_SCHEMA) + +IDENTIFY_SCHEMA = vol.Schema({ + vol.Required(ATTR_DEVICE): IDENTIFY_DEVICE_SCHEMA_CONTAINER, + vol.Required(ATTR_BATTERY): IDENTIFY_BATTERY_SCHEMA_CONTAINER, + vol.Required(ATTR_PUSH_TOKEN): cv.string, + vol.Required(ATTR_APP): IDENTIFY_APP_SCHEMA_CONTAINER, + vol.Required(ATTR_PERMISSIONS): vol.All(cv.ensure_list, + [vol.In(PERMISSIONS)]), + vol.Required(ATTR_PUSH_ID): cv.string, + vol.Required(ATTR_DEVICE_ID): cv.string, + vol.Optional(ATTR_PUSH_SOUNDS): list +}, extra=vol.ALLOW_EXTRA) + +CONFIGURATION_FILE = "ios.conf" + +CONFIG_FILE = {ATTR_DEVICES: {}} + +CONFIG_FILE_PATH = "" + + +def _load_config(filename): + """Load configuration.""" + if not os.path.isfile(filename): + return {} + + try: + with open(filename, "r") as fdesc: + inp = fdesc.read() + + # In case empty file + if not inp: + return {} + + return json.loads(inp) + except (IOError, ValueError) as error: + _LOGGER.error("Reading config file %s failed: %s", filename, error) + return None + + +def _save_config(filename, config): + """Save configuration.""" + try: + with open(filename, "w") as fdesc: + fdesc.write(json.dumps(config)) + except (IOError, TypeError) as error: + _LOGGER.error("Saving config file failed: %s", error) + return False + return True + + +def devices_with_push(): + """Return a dictionary of push enabled targets.""" + targets = {} + for device_name, device in CONFIG_FILE[ATTR_DEVICES].items(): + if device.get(ATTR_PUSH_ID) is not None: + targets[device_name] = device.get(ATTR_PUSH_ID) + return targets + + +def enabled_push_ids(): + """Return a list of push enabled target push IDs.""" + push_ids = list() + # pylint: disable=unused-variable + for device_name, device in CONFIG_FILE[ATTR_DEVICES].items(): + if device.get(ATTR_PUSH_ID) is not None: + push_ids.append(device.get(ATTR_PUSH_ID)) + return push_ids + + +def devices(): + """Return a dictionary of all identified devices.""" + return CONFIG_FILE[ATTR_DEVICES] + + +def device_name_for_push_id(push_id): + """Return the device name for the push ID.""" + for device_name, device in CONFIG_FILE[ATTR_DEVICES].items(): + if device.get(ATTR_PUSH_ID) is push_id: + return device_name + return None + + +def setup(hass, config): + """Setup the iOS component.""" + # pylint: disable=global-statement, import-error + global CONFIG_FILE + global CONFIG_FILE_PATH + + CONFIG_FILE_PATH = hass.config.path(CONFIGURATION_FILE) + + CONFIG_FILE = _load_config(CONFIG_FILE_PATH) + + if CONFIG_FILE == {}: + CONFIG_FILE[ATTR_DEVICES] = {} + + device_tracker = loader.get_component("device_tracker") + if device_tracker.DOMAIN not in hass.config.components: + device_tracker.setup(hass, {}) + # Need this to enable requirements checking in the app. + hass.config.components.append(device_tracker.DOMAIN) + + if "notify.ios" not in hass.config.components: + notify = loader.get_component("notify.ios") + notify.get_service(hass, {}) + # Need this to enable requirements checking in the app. + if NotifyDomain not in hass.config.components: + hass.config.components.append(NotifyDomain) + + zeroconf = loader.get_component("zeroconf") + if zeroconf.DOMAIN not in hass.config.components: + zeroconf.setup(hass, config) + # Need this to enable requirements checking in the app. + hass.config.components.append(zeroconf.DOMAIN) + + discovery.load_platform(hass, "sensor", DOMAIN, {}, config) + + hass.wsgi.register_view(iOSIdentifyDeviceView(hass)) + + if config.get(DOMAIN) is not None: + app_config = config[DOMAIN] + if app_config.get(CONF_PUSH) is not None: + push_config = app_config[CONF_PUSH] + hass.wsgi.register_view(iOSPushConfigView(hass, push_config)) + + return True + + +# pylint: disable=invalid-name +class iOSPushConfigView(HomeAssistantView): + """A view that provides the push categories configuration.""" + + url = "/api/ios/push" + name = "api:ios:push" + + def __init__(self, hass, push_config): + """Init the view.""" + super().__init__(hass) + self.push_config = push_config + + def get(self, request): + """Handle the GET request for the push configuration.""" + return self.json(self.push_config) + + +class iOSIdentifyDeviceView(HomeAssistantView): + """A view that accepts device identification requests.""" + + url = "/api/ios/identify" + name = "api:ios:identify" + + def __init__(self, hass): + """Init the view.""" + super().__init__(hass) + + def post(self, request): + """Handle the POST request for device identification.""" + try: + data = IDENTIFY_SCHEMA(request.json) + except vol.Invalid as ex: + return self.json_message(humanize_error(request.json, ex), + HTTP_BAD_REQUEST) + + name = data.get(ATTR_DEVICE_ID) + + CONFIG_FILE[ATTR_DEVICES][name] = data + + if not _save_config(CONFIG_FILE_PATH, CONFIG_FILE): + return self.json_message("Error saving device.", + HTTP_INTERNAL_SERVER_ERROR) + + return self.json({"status": "registered"}) diff --git a/homeassistant/components/light/flux_led.py b/homeassistant/components/light/flux_led.py index 035307f5678..ce84072b5bb 100644 --- a/homeassistant/components/light/flux_led.py +++ b/homeassistant/components/light/flux_led.py @@ -18,7 +18,7 @@ from homeassistant.components.light import ( import homeassistant.helpers.config_validation as cv REQUIREMENTS = ['https://github.com/Danielhiversen/flux_led/archive/0.7.zip' - '#flux_led==0.7'] + '#flux_led==0.8'] _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/light/zwave.py b/homeassistant/components/light/zwave.py index 14d635153fb..346482d3ff6 100644 --- a/homeassistant/components/light/zwave.py +++ b/homeassistant/components/light/zwave.py @@ -24,6 +24,26 @@ AEOTEC = 0x86 AEOTEC_ZW098_LED_BULB = 0x62 AEOTEC_ZW098_LED_BULB_LIGHT = (AEOTEC, AEOTEC_ZW098_LED_BULB) +LINEAR = 0x14f +LINEAR_WD500Z_DIMMER = 0x3034 +LINEAR_WD500Z_DIMMER_LIGHT = (LINEAR, LINEAR_WD500Z_DIMMER) + +GE = 0x63 +GE_12724_DIMMER = 0x3031 +GE_12724_DIMMER_LIGHT = (GE, GE_12724_DIMMER) + +DRAGONTECH = 0x184 +DRAGONTECH_PD100_DIMMER = 0x3032 +DRAGONTECH_PD100_DIMMER_LIGHT = (DRAGONTECH, DRAGONTECH_PD100_DIMMER) + +ACT = 0x01 +ACT_ZDP100_DIMMER = 0x3030 +ACT_ZDP100_DIMMER_LIGHT = (ACT, ACT_ZDP100_DIMMER) + +HOMESEER = 0x0c +HOMESEER_WD100_DIMMER = 0x3034 +HOMESEER_WD100_DIMMER_LIGHT = (HOMESEER, HOMESEER_WD100_DIMMER) + COLOR_CHANNEL_WARM_WHITE = 0x01 COLOR_CHANNEL_COLD_WHITE = 0x02 COLOR_CHANNEL_RED = 0x04 @@ -31,9 +51,15 @@ COLOR_CHANNEL_GREEN = 0x08 COLOR_CHANNEL_BLUE = 0x10 WORKAROUND_ZW098 = 'zw098' +WORKAROUND_DELAY = 'alt_delay' DEVICE_MAPPINGS = { - AEOTEC_ZW098_LED_BULB_LIGHT: WORKAROUND_ZW098 + AEOTEC_ZW098_LED_BULB_LIGHT: WORKAROUND_ZW098, + LINEAR_WD500Z_DIMMER_LIGHT: WORKAROUND_DELAY, + GE_12724_DIMMER_LIGHT: WORKAROUND_DELAY, + DRAGONTECH_PD100_DIMMER_LIGHT: WORKAROUND_DELAY, + ACT_ZDP100_DIMMER_LIGHT: WORKAROUND_DELAY, + HOMESEER_WD100_DIMMER_LIGHT: WORKAROUND_DELAY, } # Generate midpoint color temperatures for bulbs that have limited @@ -94,6 +120,24 @@ class ZwaveDimmer(zwave.ZWaveDeviceEntity, Light): zwave.ZWaveDeviceEntity.__init__(self, value, DOMAIN) self._brightness = None self._state = None + self._alt_delay = None + self._zw098 = None + + # Enable appropriate workaround flags for our device + # Make sure that we have values for the key before converting to int + if (value.node.manufacturer_id.strip() and + value.node.product_id.strip()): + specific_sensor_key = (int(value.node.manufacturer_id, 16), + int(value.node.product_id, 16)) + if specific_sensor_key in DEVICE_MAPPINGS: + if DEVICE_MAPPINGS[specific_sensor_key] == WORKAROUND_ZW098: + _LOGGER.debug("AEOTEC ZW098 workaround enabled") + self._zw098 = 1 + elif DEVICE_MAPPINGS[specific_sensor_key] == WORKAROUND_DELAY: + _LOGGER.debug("Dimmer delay workaround enabled for node:" + " %s", value.parent_id) + self._alt_delay = 1 + self.update_properties() # Used for value change event handling @@ -125,7 +169,10 @@ class ZwaveDimmer(zwave.ZWaveDeviceEntity, Light): if self._timer is not None and self._timer.isAlive(): self._timer.cancel() - self._timer = Timer(2, _refresh_value) + if self._alt_delay: + self._timer = Timer(5, _refresh_value) + else: + self._timer = Timer(2, _refresh_value) self._timer.start() self.update_ha_state() @@ -180,19 +227,13 @@ class ZwaveColorLight(ZwaveDimmer): self._color_channels = None self._rgb = None self._ct = None - self._zw098 = None - # Here we attempt to find a zwave color value with the same instance - # id as the dimmer value. Currently zwave nodes that change colors - # only include one dimmer and one color command, but this will - # hopefully provide some forward compatibility for new devices that - # have multiple color changing elements. + # Currently zwave nodes only exist with one color element per node. for value_color in value.node.get_rgbbulbs().values(): - if value.instance == value_color.instance: - self._value_color = value_color + self._value_color = value_color if self._value_color is None: - raise ValueError("No matching color command found.") + raise ValueError("No color command found.") for value_color_channels in value.node.get_values( class_id=zwave.const.COMMAND_CLASS_SWITCH_COLOR, @@ -202,17 +243,6 @@ class ZwaveColorLight(ZwaveDimmer): if self._value_color_channels is None: raise ValueError("Color Channels not found.") - # Make sure that we have values for the key before converting to int - if (value.node.manufacturer_id.strip() and - value.node.product_id.strip()): - specific_sensor_key = (int(value.node.manufacturer_id, 16), - int(value.node.product_id, 16)) - - if specific_sensor_key in DEVICE_MAPPINGS: - if DEVICE_MAPPINGS[specific_sensor_key] == WORKAROUND_ZW098: - _LOGGER.debug("AEOTEC ZW098 workaround enabled") - self._zw098 = 1 - super().__init__(value) def update_properties(self): diff --git a/homeassistant/components/lirc.py b/homeassistant/components/lirc.py index 06a5f288c77..ac4807b26af 100644 --- a/homeassistant/components/lirc.py +++ b/homeassistant/components/lirc.py @@ -14,7 +14,7 @@ import voluptuous as vol from homeassistant.const import ( EVENT_HOMEASSISTANT_STOP, EVENT_HOMEASSISTANT_START) -REQUIREMENTS = ['python-lirc==1.2.1'] +REQUIREMENTS = ['python-lirc==1.2.3'] _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/logbook.py b/homeassistant/components/logbook.py index 9100c098413..266496fff78 100644 --- a/homeassistant/components/logbook.py +++ b/homeassistant/components/logbook.py @@ -29,15 +29,22 @@ DEPENDENCIES = ['recorder', 'frontend'] _LOGGER = logging.getLogger(__name__) CONF_EXCLUDE = 'exclude' +CONF_INCLUDE = 'include' CONF_ENTITIES = 'entities' CONF_DOMAINS = 'domains' CONFIG_SCHEMA = vol.Schema({ DOMAIN: vol.Schema({ CONF_EXCLUDE: vol.Schema({ - vol.Optional(CONF_ENTITIES, default=[]): cv.ensure_list, - vol.Optional(CONF_DOMAINS, default=[]): cv.ensure_list + vol.Optional(CONF_ENTITIES, default=[]): cv.entity_ids, + vol.Optional(CONF_DOMAINS, default=[]): vol.All(cv.ensure_list, + [cv.string]) }), + CONF_INCLUDE: vol.Schema({ + vol.Optional(CONF_ENTITIES, default=[]): cv.entity_ids, + vol.Optional(CONF_DOMAINS, default=[]): vol.All(cv.ensure_list, + [cv.string]) + }) }), }, extra=vol.ALLOW_EXTRA) @@ -250,7 +257,7 @@ def humanify(events): event.time_fired, "Home Assistant", action, domain=HA_DOMAIN) - elif event.event_type.lower() == EVENT_LOGBOOK_ENTRY: + elif event.event_type == EVENT_LOGBOOK_ENTRY: domain = event.data.get(ATTR_DOMAIN) entity_id = event.data.get(ATTR_ENTITY_ID) if domain is None and entity_id is not None: @@ -267,15 +274,24 @@ def humanify(events): def _exclude_events(events, config): """Get lists of excluded entities and platforms.""" + # pylint: disable=too-many-branches excluded_entities = [] excluded_domains = [] + included_entities = [] + included_domains = [] exclude = config[DOMAIN].get(CONF_EXCLUDE) if exclude: excluded_entities = exclude[CONF_ENTITIES] excluded_domains = exclude[CONF_DOMAINS] + include = config[DOMAIN].get(CONF_INCLUDE) + if include: + included_entities = include[CONF_ENTITIES] + included_domains = include[CONF_DOMAINS] filtered_events = [] for event in events: + domain, entity_id = None, None + if event.event_type == EVENT_STATE_CHANGED: to_state = State.from_dict(event.data.get('new_state')) # Do not report on new entities @@ -288,11 +304,38 @@ def _exclude_events(events, config): continue domain = to_state.domain - # check if logbook entry is excluded for this domain - if domain in excluded_domains: + entity_id = to_state.entity_id + + elif event.event_type == EVENT_LOGBOOK_ENTRY: + domain = event.data.get(ATTR_DOMAIN) + entity_id = event.data.get(ATTR_ENTITY_ID) + + if domain or entity_id: + # filter if only excluded is configured for this domain + if excluded_domains and domain in excluded_domains and \ + not included_domains: + if (included_entities and entity_id not in included_entities) \ + or not included_entities: + continue + # filter if only included is configured for this domain + elif not excluded_domains and included_domains and \ + domain not in included_domains: + if (included_entities and entity_id not in included_entities) \ + or not included_entities: + continue + # filter if included and excluded is configured for this domain + elif excluded_domains and included_domains and \ + (domain not in included_domains or + domain in excluded_domains): + if (included_entities and entity_id not in included_entities) \ + or not included_entities or domain in excluded_domains: + continue + # filter if only included is configured for this entity + elif not excluded_domains and not included_domains and \ + included_entities and entity_id not in included_entities: continue # check if logbook entry is excluded for this entity - if to_state.entity_id in excluded_entities: + if entity_id in excluded_entities: continue filtered_events.append(event) return filtered_events diff --git a/homeassistant/components/media_player/braviatv.py b/homeassistant/components/media_player/braviatv.py index b4bab417742..1550c487433 100644 --- a/homeassistant/components/media_player/braviatv.py +++ b/homeassistant/components/media_player/braviatv.py @@ -236,6 +236,7 @@ class BraviaTVDevice(MediaPlayerDevice): if power_status == 'active': self._state = STATE_ON playing_info = self._braviarc.get_playing_info() + self._reset_playing_info() if playing_info is None or len(playing_info) == 0: self._channel_name = 'App' else: @@ -255,6 +256,16 @@ class BraviaTVDevice(MediaPlayerDevice): _LOGGER.error(exception_instance) self._state = STATE_OFF + def _reset_playing_info(self): + self._program_name = None + self._channel_name = None + self._program_media_type = None + self._channel_number = None + self._source = None + self._content_uri = None + self._duration = None + self._start_date_time = None + def _refresh_volume(self): """Refresh volume information.""" volume_info = self._braviarc.get_volume_info() diff --git a/homeassistant/components/media_player/cast.py b/homeassistant/components/media_player/cast.py index 8468390c590..831f9857e4b 100644 --- a/homeassistant/components/media_player/cast.py +++ b/homeassistant/components/media_player/cast.py @@ -19,7 +19,7 @@ from homeassistant.const import ( STATE_UNKNOWN) import homeassistant.helpers.config_validation as cv -REQUIREMENTS = ['pychromecast==0.7.4'] +REQUIREMENTS = ['pychromecast==0.7.6'] _LOGGER = logging.getLogger(__name__) @@ -68,12 +68,19 @@ def setup_platform(hass, config, add_devices, discovery_info=None): casts = [] + # get_chromecasts() returns Chromecast objects + # with the correct friendly name for grouped devices + all_chromecasts = pychromecast.get_chromecasts() + for host in hosts: - try: - casts.append(CastDevice(*host)) - KNOWN_HOSTS.append(host) - except pychromecast.ChromecastConnectionError: - pass + found = [device for device in all_chromecasts + if (device.host, device.port) == host] + if found: + try: + casts.append(CastDevice(found[0])) + KNOWN_HOSTS.append(host) + except pychromecast.ChromecastConnectionError: + pass add_devices(casts) @@ -83,10 +90,9 @@ class CastDevice(MediaPlayerDevice): # pylint: disable=abstract-method # pylint: disable=too-many-public-methods - def __init__(self, host, port): + def __init__(self, chromecast): """Initialize the Cast device.""" - import pychromecast - self.cast = pychromecast.Chromecast(host, port) + self.cast = chromecast self.cast.socket_client.receiver_controller.register_status_listener( self) diff --git a/homeassistant/components/media_player/pioneer.py b/homeassistant/components/media_player/pioneer.py index 599edf08b37..8930057857d 100644 --- a/homeassistant/components/media_player/pioneer.py +++ b/homeassistant/components/media_player/pioneer.py @@ -13,12 +13,15 @@ from homeassistant.components.media_player import ( SUPPORT_PAUSE, SUPPORT_SELECT_SOURCE, MediaPlayerDevice, PLATFORM_SCHEMA, SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET) from homeassistant.const import ( - CONF_HOST, STATE_OFF, STATE_ON, STATE_UNKNOWN, CONF_NAME) + CONF_HOST, STATE_OFF, STATE_ON, STATE_UNKNOWN, CONF_NAME, CONF_PORT, + CONF_TIMEOUT) import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = 'Pioneer AVR' +DEFAULT_PORT = 23 # telnet default. Some Pioneer AVRs use 8102 +DEFAULT_TIMEOUT = None SUPPORT_PIONEER = SUPPORT_PAUSE | SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE | \ SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_SELECT_SOURCE @@ -29,12 +32,17 @@ MAX_SOURCE_NUMBERS = 60 PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, + vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, + vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.socket_timeout, }) def setup_platform(hass, config, add_devices, discovery_info=None): """Setup the Pioneer platform.""" - pioneer = PioneerDevice(config.get(CONF_NAME), config.get(CONF_HOST)) + pioneer = PioneerDevice(config.get(CONF_NAME), + config.get(CONF_HOST), + config.get(CONF_PORT), + config.get(CONF_TIMEOUT)) if pioneer.update(): add_devices([pioneer]) @@ -48,10 +56,12 @@ class PioneerDevice(MediaPlayerDevice): # pylint: disable=too-many-public-methods, abstract-method # pylint: disable=too-many-instance-attributes - def __init__(self, name, host): + def __init__(self, name, host, port, timeout): """Initialize the Pioneer device.""" self._name = name self._host = host + self._port = port + self._timeout = timeout self._pwstate = 'PWR1' self._volume = 0 self._muted = False @@ -62,7 +72,11 @@ class PioneerDevice(MediaPlayerDevice): @classmethod def telnet_request(cls, telnet, command, expected_prefix): """Execute `command` and return the response.""" - telnet.write(command.encode("ASCII") + b"\r") + try: + telnet.write(command.encode("ASCII") + b"\r") + except telnetlib.socket.timeout: + _LOGGER.debug("Pioneer command %s timed out", command) + return None # The receiver will randomly send state change updates, make sure # we get the response we are looking for @@ -76,19 +90,32 @@ class PioneerDevice(MediaPlayerDevice): def telnet_command(self, command): """Establish a telnet connection and sends `command`.""" - telnet = telnetlib.Telnet(self._host) - telnet.write(command.encode("ASCII") + b"\r") - telnet.read_very_eager() # skip response - telnet.close() + try: + try: + telnet = telnetlib.Telnet(self._host, + self._port, + self._timeout) + except ConnectionRefusedError: + _LOGGER.debug("Pioneer %s refused connection", self._name) + return + telnet.write(command.encode("ASCII") + b"\r") + telnet.read_very_eager() # skip response + telnet.close() + except telnetlib.socket.timeout: + _LOGGER.debug( + "Pioneer %s command %s timed out", self._name, command) def update(self): """Get the latest details from the device.""" try: - telnet = telnetlib.Telnet(self._host) + telnet = telnetlib.Telnet(self._host, self._port, self._timeout) except ConnectionRefusedError: + _LOGGER.debug("Pioneer %s refused connection", self._name) return False - self._pwstate = self.telnet_request(telnet, "?P", "PWR") + pwstate = self.telnet_request(telnet, "?P", "PWR") + if pwstate: + self._pwstate = pwstate volume_str = self.telnet_request(telnet, "?V", "VOL") self._volume = int(volume_str[3:]) / MAX_VOLUME if volume_str else None diff --git a/homeassistant/components/media_player/russound_rnet.py b/homeassistant/components/media_player/russound_rnet.py index a0405f3f531..91aecb57a10 100644 --- a/homeassistant/components/media_player/russound_rnet.py +++ b/homeassistant/components/media_player/russound_rnet.py @@ -6,23 +6,42 @@ https://home-assistant.io/components/media_player.russound_rnet/ """ import logging +import voluptuous as vol + from homeassistant.components.media_player import ( SUPPORT_TURN_OFF, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET, - SUPPORT_SELECT_SOURCE, MediaPlayerDevice) + SUPPORT_SELECT_SOURCE, MediaPlayerDevice, PLATFORM_SCHEMA) from homeassistant.const import ( - CONF_HOST, CONF_PORT, STATE_OFF, STATE_ON) + CONF_HOST, CONF_PORT, STATE_OFF, STATE_ON, CONF_NAME) +import homeassistant.helpers.config_validation as cv REQUIREMENTS = [ 'https://github.com/laf/russound/archive/0.1.6.zip' '#russound==0.1.6'] -ZONES = 'zones' -SOURCES = 'sources' +_LOGGER = logging.getLogger(__name__) + +CONF_ZONES = 'zones' +CONF_SOURCES = 'sources' SUPPORT_RUSSOUND = SUPPORT_VOLUME_MUTE | SUPPORT_VOLUME_SET | \ SUPPORT_TURN_OFF | SUPPORT_SELECT_SOURCE -_LOGGER = logging.getLogger(__name__) +ZONE_SCHEMA = vol.Schema({ + vol.Required(CONF_NAME): cv.string, +}) + +SOURCE_SCHEMA = vol.Schema({ + vol.Required(CONF_NAME): cv.string, +}) + +PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ + vol.Required(CONF_HOST): cv.string, + vol.Required(CONF_NAME): cv.string, + vol.Required(CONF_PORT): cv.port, + vol.Required(CONF_ZONES): vol.Schema({cv.positive_int: ZONE_SCHEMA}), + vol.Required(CONF_SOURCES): vol.All(cv.ensure_list, [SOURCE_SCHEMA]), +}) def setup_platform(hass, config, add_devices, discovery_info=None): @@ -32,7 +51,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None): keypad = config.get('keypad', '70') if host is None or port is None: - _LOGGER.error('Invalid config. Expected %s and %s', + _LOGGER.error("Invalid config. Expected %s and %s", CONF_HOST, CONF_PORT) return False @@ -42,13 +61,13 @@ def setup_platform(hass, config, add_devices, discovery_info=None): russ.connect(keypad) sources = [] - for source in config[SOURCES]: + for source in config[CONF_SOURCES]: sources.append(source['name']) if russ.is_connected(): - for zone_id, extra in config[ZONES].items(): - add_devices([RussoundRNETDevice(hass, russ, sources, zone_id, - extra)]) + for zone_id, extra in config[CONF_ZONES].items(): + add_devices([RussoundRNETDevice( + hass, russ, sources, zone_id, extra)]) else: _LOGGER.error('Not connected to %s:%s', host, port) diff --git a/homeassistant/components/media_player/sonos.py b/homeassistant/components/media_player/sonos.py index 5fc0166aefa..533b385f0fa 100644 --- a/homeassistant/components/media_player/sonos.py +++ b/homeassistant/components/media_player/sonos.py @@ -21,7 +21,7 @@ from homeassistant.const import ( from homeassistant.config import load_yaml_config_file import homeassistant.helpers.config_validation as cv -REQUIREMENTS = ['SoCo==0.11.1'] +REQUIREMENTS = ['SoCo==0.12'] _LOGGER = logging.getLogger(__name__) @@ -62,6 +62,11 @@ def setup_platform(hass, config, add_devices, discovery_info=None): if discovery_info: player = soco.SoCo(discovery_info) + + # if device allready exists by config + if player.uid in DEVICES: + return True + if player.is_visible: device = SonosDevice(hass, player) add_devices([device]) @@ -212,6 +217,11 @@ class SonosDevice(MediaPlayerDevice): """Update state, called by track_utc_time_change.""" self.update_ha_state(True) + @property + def unique_id(self): + """Return an unique ID.""" + return self._player.uid + @property def name(self): """Return the name of the device.""" diff --git a/homeassistant/components/media_player/squeezebox.py b/homeassistant/components/media_player/squeezebox.py index d54226b0566..4f994461a26 100644 --- a/homeassistant/components/media_player/squeezebox.py +++ b/homeassistant/components/media_player/squeezebox.py @@ -40,6 +40,8 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ def setup_platform(hass, config, add_devices, discovery_info=None): """Setup the squeezebox platform.""" + import socket + username = config.get(CONF_USERNAME) password = config.get(CONF_PASSWORD) @@ -50,11 +52,23 @@ def setup_platform(hass, config, add_devices, discovery_info=None): host = config.get(CONF_HOST) port = config.get(CONF_PORT) - # Only add a media server once - if host in KNOWN_DEVICES: + # Get IP of host, to prevent duplication of same host (different DNS names) + try: + ipaddr = socket.gethostbyname(host) + except (OSError) as error: + _LOGGER.error("Could not communicate with %s:%d: %s", + host, port, error) return False - KNOWN_DEVICES.append(host) + # Combine it with port to allow multiple servers at the same host + key = "{}:{}".format(ipaddr, port) + + # Only add a media server once + if key in KNOWN_DEVICES: + return False + KNOWN_DEVICES.append(key) + + _LOGGER.debug("Creating LMS object for %s", key) lms = LogitechMediaServer(host, port, username, password) if not lms.init_success: @@ -97,56 +111,64 @@ class LogitechMediaServer(object): def query(self, *parameters): """Send request and await response from server.""" - try: - telnet = telnetlib.Telnet(self.host, self.port) - if self._username and self._password: - telnet.write('login {username} {password}\n'.format( - username=self._username, - password=self._password).encode('UTF-8')) - telnet.read_until(b'\n', timeout=3) - message = '{}\n'.format(' '.join(parameters)) - telnet.write(message.encode('UTF-8')) - response = telnet.read_until(b'\n', timeout=3)\ - .decode('UTF-8')\ - .split(' ')[-1]\ - .strip() - telnet.write(b'exit\n') - return urllib.parse.unquote(response) - except (OSError, ConnectionError) as error: - _LOGGER.error("Could not communicate with %s:%d: %s", - self.host, - self.port, - error) - return None + response = urllib.parse.unquote(self.get(' '.join(parameters))) + + return response.split(' ')[-1].strip() def get_player_status(self, player): - """Get ithe status of a player.""" + """Get the status of a player.""" # (title) : Song title # Requested Information # a (artist): Artist name 'artist' # d (duration): Song duration in seconds 'duration' # K (artwork_url): URL to remote artwork - tags = 'adK' + # l (album): Album, including the server's "(N of M)" + tags = 'adKl' new_status = {} + response = self.get('{player} status - 1 tags:{tags}\n' + .format(player=player, tags=tags)) + + if not response: + return {} + + response = response.split(' ') + + for item in response: + parts = urllib.parse.unquote(item).partition(':') + new_status[parts[0]] = parts[2] + return new_status + + def get(self, command): + """Abstract out the telnet connection.""" try: telnet = telnetlib.Telnet(self.host, self.port) - telnet.write('{player} status - 1 tags:{tags}\n'.format( - player=player, - tags=tags - ).encode('UTF-8')) + + if self._username and self._password: + _LOGGER.debug("Logging in") + + telnet.write('login {username} {password}\n'.format( + username=self._username, + password=self._password).encode('UTF-8')) + telnet.read_until(b'\n', timeout=3) + + _LOGGER.debug("About to send message: %s", command) + message = '{}\n'.format(command) + telnet.write(message.encode('UTF-8')) + response = telnet.read_until(b'\n', timeout=3)\ .decode('UTF-8')\ - .split(' ') + telnet.write(b'exit\n') - for item in response: - parts = urllib.parse.unquote(item).partition(':') - new_status[parts[0]] = parts[2] - except (OSError, ConnectionError) as error: + _LOGGER.debug("Response: %s", response) + + return response + + except (OSError, ConnectionError, EOFError) as error: _LOGGER.error("Could not communicate with %s:%d: %s", self.host, self.port, error) - return new_status + return None # pylint: disable=too-many-instance-attributes @@ -227,23 +249,44 @@ class SqueezeBoxDevice(MediaPlayerDevice): media_url = ('/music/current/cover.jpg?player={player}').format( player=self._id) - base_url = 'http://{server}:{port}/'.format( - server=self._lms.host, - port=self._lms.http_port) + # pylint: disable=protected-access + if self._lms._username: + base_url = 'http://{username}:{password}@{server}:{port}/'.format( + username=self._lms._username, + password=self._lms._password, + server=self._lms.host, + port=self._lms.http_port) + else: + base_url = 'http://{server}:{port}/'.format( + server=self._lms.host, + port=self._lms.http_port) - return urllib.parse.urljoin(base_url, media_url) + url = urllib.parse.urljoin(base_url, media_url) + + _LOGGER.debug("Media image url: %s", url) + return url @property def media_title(self): """Title of current playing media.""" - if 'artist' in self._status and 'title' in self._status: - return '{artist} - {title}'.format( - artist=self._status['artist'], - title=self._status['title'] - ) + if 'title' in self._status: + return self._status['title'] + if 'current_title' in self._status: return self._status['current_title'] + @property + def media_artist(self): + """Artist of current playing media.""" + if 'artist' in self._status: + return self._status['artist'] + + @property + def media_album_name(self): + """Album of current playing media.""" + if 'album' in self._status: + return self._status['album'].rstrip() + @property def supported_media_commands(self): """Flag of media commands that are supported.""" @@ -326,7 +369,7 @@ class SqueezeBoxDevice(MediaPlayerDevice): """ Replace the current play list with the uri. - Telnet Command Strucutre: + Telnet Command Structure: playlist play <fadeInSecs> The "playlist play" command puts the specified song URL, @@ -350,7 +393,7 @@ class SqueezeBoxDevice(MediaPlayerDevice): """ Add a items to the existing playlist. - Telnet Command Strucutre: + Telnet Command Structure: <playerid> playlist add <item> The "playlist add" command adds the specified song URL, playlist or diff --git a/homeassistant/components/media_player/yamaha.py b/homeassistant/components/media_player/yamaha.py index c40d932ab92..027fd607730 100644 --- a/homeassistant/components/media_player/yamaha.py +++ b/homeassistant/components/media_player/yamaha.py @@ -10,16 +10,19 @@ import voluptuous as vol from homeassistant.components.media_player import ( SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET, - SUPPORT_SELECT_SOURCE, MediaPlayerDevice, PLATFORM_SCHEMA) + SUPPORT_SELECT_SOURCE, SUPPORT_PLAY_MEDIA, + MEDIA_TYPE_MUSIC, + MediaPlayerDevice, PLATFORM_SCHEMA) from homeassistant.const import (CONF_NAME, CONF_HOST, STATE_OFF, STATE_ON) import homeassistant.helpers.config_validation as cv -REQUIREMENTS = ['rxv==0.1.11'] +REQUIREMENTS = ['rxv==0.2.0'] _LOGGER = logging.getLogger(__name__) SUPPORT_YAMAHA = SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE | \ - SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_SELECT_SOURCE + SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_SELECT_SOURCE | \ + SUPPORT_PLAY_MEDIA CONF_SOURCE_NAMES = 'source_names' CONF_SOURCE_IGNORE = 'source_ignore' @@ -45,11 +48,12 @@ def setup_platform(hass, config, add_devices, discovery_info=None): source_names = config.get(CONF_SOURCE_NAMES) if host is None: - receivers = rxv.find() + receivers = [] + for recv in rxv.find(): + receivers.extend(recv.zone_controllers()) else: - receivers = \ - [rxv.RXV("http://{}:80/YamahaRemoteControl/ctrl".format(host), - name)] + ctrl_url = "http://{}:80/YamahaRemoteControl/ctrl".format(host) + receivers = rxv.RXV(ctrl_url, name).zone_controllers() add_devices( YamahaDevice(name, receiver, source_ignore, source_names) @@ -74,6 +78,7 @@ class YamahaDevice(MediaPlayerDevice): self._reverse_mapping = None self.update() self._name = name + self._zone = receiver.zone def update(self): """Get the latest details from the device.""" @@ -104,7 +109,11 @@ class YamahaDevice(MediaPlayerDevice): @property def name(self): """Return the name of the device.""" - return self._name + name = self._name + if self._zone != "Main_Zone": + # Zone will be one of Main_Zone, Zone_2, Zone_3 + name += " " + self._zone.replace('_', ' ') + return name @property def state(self): @@ -158,3 +167,35 @@ class YamahaDevice(MediaPlayerDevice): def select_source(self, source): """Select input source.""" self._receiver.input = self._reverse_mapping.get(source, source) + + def play_media(self, media_type, media_id, **kwargs): + """Play media from an ID. + + This exposes a pass through for various input sources in the + Yamaha to direct play certain kinds of media. media_type is + treated as the input type that we are setting, and media id is + specific to it. + """ + if media_type == "NET RADIO": + self._receiver.net_radio(media_id) + + @property + def media_content_type(self): + """Return the media content type.""" + if self.source == "NET RADIO": + return MEDIA_TYPE_MUSIC + + @property + def media_title(self): + """Return the media title. + + This will vary by input source, as they provide different + information in metadata. + + """ + if self.source == "NET RADIO": + info = self._receiver.play_status() + if info.song: + return "%s: %s" % (info.station, info.song) + else: + return info.station diff --git a/homeassistant/components/mqtt/__init__.py b/homeassistant/components/mqtt/__init__.py index 3edd0ffc500..307b287ea0d 100644 --- a/homeassistant/components/mqtt/__init__.py +++ b/homeassistant/components/mqtt/__init__.py @@ -18,8 +18,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import template, config_validation as cv from homeassistant.helpers.event import threaded_listener_factory from homeassistant.const import ( - EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, - CONF_PLATFORM, CONF_SCAN_INTERVAL, CONF_VALUE_TEMPLATE) + EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, CONF_VALUE_TEMPLATE) _LOGGER = logging.getLogger(__name__) @@ -107,12 +106,11 @@ CONFIG_SCHEMA = vol.Schema({ }), }, extra=vol.ALLOW_EXTRA) -MQTT_BASE_PLATFORM_SCHEMA = vol.Schema({ - vol.Required(CONF_PLATFORM): DOMAIN, - vol.Optional(CONF_SCAN_INTERVAL): - vol.All(vol.Coerce(int), vol.Range(min=1)), +SCHEMA_BASE = { vol.Optional(CONF_QOS, default=DEFAULT_QOS): _VALID_QOS_SCHEMA, -}) +} + +MQTT_BASE_PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend(SCHEMA_BASE) # Sensor type platforms subscribe to MQTT events MQTT_RO_PLATFORM_SCHEMA = MQTT_BASE_PLATFORM_SCHEMA.extend({ @@ -401,13 +399,20 @@ class MQTT(object): def _mqtt_on_message(self, _mqttc, _userdata, msg): """Message received callback.""" - _LOGGER.debug("received message on %s: %s", - msg.topic, msg.payload.decode('utf-8')) - self.hass.bus.fire(EVENT_MQTT_MESSAGE_RECEIVED, { - ATTR_TOPIC: msg.topic, - ATTR_QOS: msg.qos, - ATTR_PAYLOAD: msg.payload.decode('utf-8'), - }) + try: + payload = msg.payload.decode('utf-8') + except AttributeError: + _LOGGER.error("Illegal utf-8 unicode payload from " + "MQTT topic: %s, Payload: %s", msg.topic, + msg.payload) + else: + _LOGGER.debug("received message on %s: %s", + msg.topic, payload) + self.hass.bus.fire(EVENT_MQTT_MESSAGE_RECEIVED, { + ATTR_TOPIC: msg.topic, + ATTR_QOS: msg.qos, + ATTR_PAYLOAD: payload, + }) def _mqtt_on_unsubscribe(self, _mqttc, _userdata, mid, granted_qos): """Unsubscribe successful callback.""" diff --git a/homeassistant/components/mysensors.py b/homeassistant/components/mysensors.py index 0c13347ebd1..be5a19bf7c0 100644 --- a/homeassistant/components/mysensors.py +++ b/homeassistant/components/mysensors.py @@ -42,7 +42,7 @@ GATEWAYS = None MQTT_COMPONENT = 'mqtt' REQUIREMENTS = [ 'https://github.com/theolind/pymysensors/archive/' - '8ce98b7fb56f7921a808eb66845ce8b2c455c81e.zip#pymysensors==0.7.1'] + '0b705119389be58332f17753c53167f551254b6c.zip#pymysensors==0.8'] CONFIG_SCHEMA = vol.Schema({ DOMAIN: vol.Schema({ diff --git a/homeassistant/components/nest.py b/homeassistant/components/nest.py index a3d86a725aa..b8aa1d1c70a 100644 --- a/homeassistant/components/nest.py +++ b/homeassistant/components/nest.py @@ -14,7 +14,7 @@ from homeassistant.const import (CONF_PASSWORD, CONF_USERNAME, CONF_STRUCTURE) _LOGGER = logging.getLogger(__name__) -REQUIREMENTS = ['python-nest==2.10.0'] +REQUIREMENTS = ['python-nest==2.11.0'] DOMAIN = 'nest' diff --git a/homeassistant/components/netatmo.py b/homeassistant/components/netatmo.py index 07f90d0879e..77432411e1a 100644 --- a/homeassistant/components/netatmo.py +++ b/homeassistant/components/netatmo.py @@ -1,22 +1,24 @@ """ -Support for the Netatmo devices (Weather Station and Welcome camera). +Support for the Netatmo devices. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/netatmo/ """ import logging +from datetime import timedelta from urllib.error import HTTPError import voluptuous as vol from homeassistant.const import ( - CONF_API_KEY, CONF_PASSWORD, CONF_USERNAME) + CONF_API_KEY, CONF_PASSWORD, CONF_USERNAME, CONF_DISCOVERY) from homeassistant.helpers import discovery import homeassistant.helpers.config_validation as cv +from homeassistant.util import Throttle REQUIREMENTS = [ 'https://github.com/jabesq/netatmo-api-python/archive/' - 'v0.5.0.zip#lnetatmo==0.5.0'] + 'v0.6.0.zip#lnetatmo==0.6.0'] _LOGGER = logging.getLogger(__name__) @@ -25,6 +27,9 @@ CONF_SECRET_KEY = 'secret_key' DOMAIN = 'netatmo' NETATMO_AUTH = None +DEFAULT_DISCOVERY = True + +MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=10) CONFIG_SCHEMA = vol.Schema({ DOMAIN: vol.Schema({ @@ -32,6 +37,7 @@ CONFIG_SCHEMA = vol.Schema({ vol.Required(CONF_PASSWORD): cv.string, vol.Required(CONF_SECRET_KEY): cv.string, vol.Required(CONF_USERNAME): cv.string, + vol.Optional(CONF_DISCOVERY, default=DEFAULT_DISCOVERY): cv.boolean, }) }, extra=vol.ALLOW_EXTRA) @@ -45,12 +51,44 @@ def setup(hass, config): NETATMO_AUTH = lnetatmo.ClientAuth( config[DOMAIN][CONF_API_KEY], config[DOMAIN][CONF_SECRET_KEY], config[DOMAIN][CONF_USERNAME], config[DOMAIN][CONF_PASSWORD], - 'read_station read_camera access_camera') + 'read_station read_camera access_camera ' + 'read_thermostat write_thermostat') except HTTPError: _LOGGER.error("Unable to connect to Netatmo API") return False - for component in 'camera', 'sensor': - discovery.load_platform(hass, component, DOMAIN, {}, config) + if config[DOMAIN][CONF_DISCOVERY]: + for component in 'camera', 'sensor', 'binary_sensor', 'climate': + discovery.load_platform(hass, component, DOMAIN, {}, config) return True + + +class WelcomeData(object): + """Get the latest data from Netatmo.""" + + def __init__(self, auth, home=None): + """Initialize the data object.""" + self.auth = auth + self.welcomedata = None + self.camera_names = [] + self.home = home + + def get_camera_names(self): + """Return all module available on the API as a list.""" + self.camera_names = [] + self.update() + if not self.home: + for home in self.welcomedata.cameras: + for camera in self.welcomedata.cameras[home].values(): + self.camera_names.append(camera['name']) + else: + for camera in self.welcomedata.cameras[self.home].values(): + self.camera_names.append(camera['name']) + return self.camera_names + + @Throttle(MIN_TIME_BETWEEN_UPDATES) + def update(self): + """Call the Netatmo API to update the data.""" + import lnetatmo + self.welcomedata = lnetatmo.WelcomeData(self.auth) diff --git a/homeassistant/components/notify/apns.py b/homeassistant/components/notify/apns.py new file mode 100644 index 00000000000..5e5a8088aa7 --- /dev/null +++ b/homeassistant/components/notify/apns.py @@ -0,0 +1,289 @@ +""" +APNS Notification platform. + +For more details about this platform, please refer to the documentation at +https://home-assistant.io/components/notify.apns/ +""" +import logging +import os +import voluptuous as vol + +from homeassistant.helpers.event import track_state_change +from homeassistant.config import load_yaml_config_file +from homeassistant.components.notify import ( + ATTR_TARGET, ATTR_DATA, BaseNotificationService) +import homeassistant.helpers.config_validation as cv +from homeassistant.helpers import template as template_helper + +DOMAIN = "apns" +APNS_DEVICES = "apns.yaml" +DEVICE_TRACKER_DOMAIN = "device_tracker" +SERVICE_REGISTER = "apns_register" + +ATTR_PUSH_ID = "push_id" +ATTR_NAME = "name" + +REGISTER_SERVICE_SCHEMA = vol.Schema({ + vol.Required(ATTR_PUSH_ID): cv.string, + vol.Optional(ATTR_NAME, default=None): cv.string, +}) + +REQUIREMENTS = ["apns2==0.1.1"] + + +def get_service(hass, config): + """Return push service.""" + descriptions = load_yaml_config_file( + os.path.join(os.path.dirname(__file__), 'services.yaml')) + + name = config.get("name") + if name is None: + logging.error("Name must be specified.") + return None + + cert_file = config.get('cert_file') + if cert_file is None: + logging.error("Certificate must be specified.") + return None + + topic = config.get('topic') + if topic is None: + logging.error("Topic must be specified.") + return None + + sandbox = bool(config.get('sandbox', False)) + + service = ApnsNotificationService(hass, name, topic, sandbox, cert_file) + hass.services.register(DOMAIN, + name, + service.register, + descriptions.get(SERVICE_REGISTER), + schema=REGISTER_SERVICE_SCHEMA) + return service + + +class ApnsDevice(object): + """ + Apns Device class. + + Stores information about a device that is + registered for push notifications. + """ + + def __init__(self, push_id, name, tracking_device_id=None, disabled=False): + """Initialize Apns Device.""" + self.device_push_id = push_id + self.device_name = name + self.tracking_id = tracking_device_id + self.device_disabled = disabled + + @property + def push_id(self): + """The apns id for the device.""" + return self.device_push_id + + @property + def name(self): + """The friendly name for the device.""" + return self.device_name + + @property + def tracking_device_id(self): + """ + Device Id. + + The id of a device that is tracked by the device + tracking component. + """ + return self.tracking_id + + @property + def full_tracking_device_id(self): + """ + Fully qualified device id. + + The full id of a device that is tracked by the device + tracking component. + """ + return DEVICE_TRACKER_DOMAIN + '.' + self.tracking_id + + @property + def disabled(self): + """Should receive notifications.""" + return self.device_disabled + + def disable(self): + """Disable the device from recieving notifications.""" + self.device_disabled = True + + def __eq__(self, other): + """Return the comparision.""" + if isinstance(other, self.__class__): + return self.push_id == other.push_id and self.name == other.name + return NotImplemented + + def __ne__(self, other): + """Return the comparision.""" + return not self.__eq__(other) + + +class ApnsNotificationService(BaseNotificationService): + """Implement the notification service for the APNS service.""" + + # pylint: disable=too-many-arguments + # pylint: disable=too-many-instance-attributes + def __init__(self, hass, app_name, topic, sandbox, cert_file): + """Initialize APNS application.""" + self.hass = hass + self.app_name = app_name + self.sandbox = sandbox + self.certificate = cert_file + self.yaml_path = hass.config.path(app_name + '_' + APNS_DEVICES) + self.devices = {} + self.device_states = {} + self.topic = topic + if os.path.isfile(self.yaml_path): + self.devices = { + str(key): ApnsDevice( + str(key), + value.get('name'), + value.get('tracking_device_id'), + value.get('disabled', False) + ) + for (key, value) in + load_yaml_config_file(self.yaml_path).items() + } + + tracking_ids = [ + device.full_tracking_device_id + for (key, device) in self.devices.items() + if device.tracking_device_id is not None + ] + track_state_change( + hass, + tracking_ids, + self.device_state_changed_listener) + + def device_state_changed_listener(self, entity_id, from_s, to_s): + """ + Listener for sate change. + + Track device state change if a device + has a tracking id specified. + """ + self.device_states[entity_id] = str(to_s.state) + return + + @staticmethod + def write_device(out, device): + """Write a single device to file.""" + attributes = [] + if device.name is not None: + attributes.append( + 'name: {}'.format(device.name)) + if device.tracking_device_id is not None: + attributes.append( + 'tracking_device_id: {}'.format(device.tracking_device_id)) + if device.disabled: + attributes.append('disabled: True') + + out.write(device.push_id) + out.write(": {") + if len(attributes) > 0: + separator = ", " + out.write(separator.join(attributes)) + + out.write("}\n") + + def write_devices(self): + """Write all known devices to file.""" + with open(self.yaml_path, 'w+') as out: + for _, device in self.devices.items(): + ApnsNotificationService.write_device(out, device) + + def register(self, call): + """Register a device to receive push messages.""" + push_id = call.data.get(ATTR_PUSH_ID) + if push_id is None: + return False + + device_name = call.data.get(ATTR_NAME) + current_device = self.devices.get(push_id) + current_tracking_id = None if current_device is None \ + else current_device.tracking_device_id + + device = ApnsDevice( + push_id, + device_name, + current_tracking_id) + + if current_device is None: + self.devices[push_id] = device + with open(self.yaml_path, 'a') as out: + self.write_device(out, device) + return True + + if device != current_device: + self.devices[push_id] = device + self.write_devices() + + return True + + def send_message(self, message=None, **kwargs): + """Send push message to registered devices.""" + from apns2.client import APNsClient + from apns2.payload import Payload + from apns2.errors import Unregistered + + apns = APNsClient( + self.certificate, + use_sandbox=self.sandbox, + use_alternative_port=False) + + device_state = kwargs.get(ATTR_TARGET) + message_data = kwargs.get(ATTR_DATA) + + if message_data is None: + message_data = {} + + if isinstance(message, str): + rendered_message = message + elif isinstance(message, template_helper.Template): + rendered_message = message.render() + else: + rendered_message = "" + + payload = Payload( + alert=rendered_message, + badge=message_data.get("badge"), + sound=message_data.get("sound"), + category=message_data.get("category"), + custom=message_data.get("custom", {}), + content_available=message_data.get("content_available", False)) + + device_update = False + + for push_id, device in self.devices.items(): + if not device.disabled: + state = None + if device.tracking_device_id is not None: + state = self.device_states.get( + device.full_tracking_device_id) + + if device_state is None or state == str(device_state): + try: + apns.send_notification( + push_id, + payload, + topic=self.topic) + except Unregistered: + logging.error( + "Device %s has unregistered.", + push_id) + device_update = True + device.disable() + + if device_update: + self.write_devices() + + return True diff --git a/homeassistant/components/notify/ios.py b/homeassistant/components/notify/ios.py new file mode 100644 index 00000000000..cb85ab8f753 --- /dev/null +++ b/homeassistant/components/notify/ios.py @@ -0,0 +1,87 @@ +""" +iOS push notification platform for notify component. + +For more details about this platform, please refer to the documentation at +https://home-assistant.io/components/notify.ios/ +""" +import logging +from datetime import datetime, timezone +import requests + +from homeassistant.components import ios + +import homeassistant.util.dt as dt_util + +from homeassistant.components.notify import ( + ATTR_TARGET, ATTR_TITLE, ATTR_TITLE_DEFAULT, ATTR_MESSAGE, + ATTR_DATA, BaseNotificationService) + +_LOGGER = logging.getLogger(__name__) + +PUSH_URL = "https://ios-push.home-assistant.io/push" + +DEPENDENCIES = ["ios"] + + +def get_service(hass, config): + """Get the iOS notification service.""" + if "notify.ios" not in hass.config.components: + # Need this to enable requirements checking in the app. + hass.config.components.append("notify.ios") + + return iOSNotificationService() + + +# pylint: disable=too-few-public-methods, too-many-arguments, invalid-name +class iOSNotificationService(BaseNotificationService): + """Implement the notification service for iOS.""" + + def __init__(self): + """Initialize the service.""" + + @property + def targets(self): + """Return a dictionary of registered targets.""" + return ios.devices_with_push() + + def send_message(self, message="", **kwargs): + """Send a message to the Lambda APNS gateway.""" + data = {ATTR_MESSAGE: message} + + if kwargs.get(ATTR_TITLE) is not None: + # Remove default title from notifications. + if kwargs.get(ATTR_TITLE) != ATTR_TITLE_DEFAULT: + data[ATTR_TITLE] = kwargs.get(ATTR_TITLE) + + targets = kwargs.get(ATTR_TARGET) + + if not targets: + targets = ios.enabled_push_ids() + + if kwargs.get(ATTR_DATA) is not None: + data[ATTR_DATA] = kwargs.get(ATTR_DATA) + + for target in targets: + data[ATTR_TARGET] = target + + req = requests.post(PUSH_URL, json=data, timeout=10) + + if req.status_code is not 201: + message = req.json()["message"] + if req.status_code is 429: + _LOGGER.warning(message) + elif req.status_code is 400 or 500: + _LOGGER.error(message) + + if req.status_code in (201, 429): + rate_limits = req.json()["rateLimits"] + resetsAt = dt_util.parse_datetime(rate_limits["resetsAt"]) + resetsAtTime = resetsAt - datetime.now(timezone.utc) + rate_limit_msg = ("iOS push notification rate limits for %s: " + "%d sent, %d allowed, %d errors, " + "resets in %s") + _LOGGER.info(rate_limit_msg, + ios.device_name_for_push_id(target), + rate_limits["successful"], + rate_limits["maximum"], rate_limits["errors"], + str(resetsAtTime).split(".")[0]) diff --git a/homeassistant/components/notify/matrix.py b/homeassistant/components/notify/matrix.py new file mode 100644 index 00000000000..566bd1a4652 --- /dev/null +++ b/homeassistant/components/notify/matrix.py @@ -0,0 +1,169 @@ +""" +Matrix notification service. + +For more details about this platform, please refer to the documentation at +https://home-assistant.io/components/notify.matrix/ +""" +import logging +import json +import os + +import voluptuous as vol + +import homeassistant.helpers.config_validation as cv +from homeassistant.components.notify import ( + ATTR_TARGET, PLATFORM_SCHEMA, BaseNotificationService) +from homeassistant.const import CONF_USERNAME, CONF_PASSWORD, CONF_VERIFY_SSL + +REQUIREMENTS = ['matrix-client==0.0.5'] + +SESSION_FILE = 'matrix.conf' +AUTH_TOKENS = dict() + +CONF_HOMESERVER = 'homeserver' +CONF_DEFAULT_ROOM = 'default_room' + +PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ + vol.Required(CONF_HOMESERVER): cv.url, + vol.Optional(CONF_VERIFY_SSL, default=True): cv.boolean, + vol.Required(CONF_USERNAME): cv.string, + vol.Required(CONF_PASSWORD): cv.string, + vol.Required(CONF_DEFAULT_ROOM): cv.string, +}) + +_LOGGER = logging.getLogger(__name__) + + +def get_service(hass, config): + """Get the Matrix notification service.""" + if not AUTH_TOKENS: + load_token(hass.config.path(SESSION_FILE)) + + return MatrixNotificationService( + config.get(CONF_HOMESERVER), + config.get(CONF_DEFAULT_ROOM), + config.get(CONF_VERIFY_SSL), + config.get(CONF_USERNAME), + config.get(CONF_PASSWORD) + ) + + +# pylint: disable=too-few-public-methods +class MatrixNotificationService(BaseNotificationService): + """Wrapper for the MatrixNotificationClient.""" + + # pylint: disable=too-many-arguments + def __init__(self, homeserver, default_room, verify_ssl, + username, password): + """Buffer configuration data for send_message.""" + self.homeserver = homeserver + self.default_room = default_room + self.verify_tls = verify_ssl + self.username = username + self.password = password + + def send_message(self, message, **kwargs): + """Wrapper function pass default parameters to actual send_message.""" + send_message( + message, + self.homeserver, + kwargs.get(ATTR_TARGET) or [self.default_room], + self.verify_tls, + self.username, + self.password + ) + + +def load_token(session_file): + """Load authentication tokens from persistent storage, if exists.""" + if not os.path.exists(session_file): + return + + with open(session_file) as handle: + data = json.load(handle) + + for mx_id, token in data.items(): + AUTH_TOKENS[mx_id] = token + + +def store_token(mx_id, token): + """Store authentication token to session and persistent storage.""" + AUTH_TOKENS[mx_id] = token + + with open(SESSION_FILE, 'w') as handle: + handle.write(json.dumps(AUTH_TOKENS)) + + +# pylint: disable=too-many-locals, too-many-arguments +def send_message(message, homeserver, target_rooms, verify_tls, + username, password): + """Do everything thats necessary to send a message to a Matrix room.""" + from matrix_client.client import MatrixClient, MatrixRequestError + + def login_by_token(): + """Login using authentication token.""" + try: + return MatrixClient( + base_url=homeserver, + token=AUTH_TOKENS[mx_id], + user_id=username, + valid_cert_check=verify_tls + ) + except MatrixRequestError as ex: + _LOGGER.info( + 'login_by_token: (%d) %s', ex.code, ex.content + ) + + def login_by_password(): + """Login using password authentication.""" + try: + _client = MatrixClient( + base_url=homeserver, + valid_cert_check=verify_tls + ) + _client.login_with_password(username, password) + store_token(mx_id, _client.token) + return _client + except MatrixRequestError as ex: + _LOGGER.error( + 'login_by_password: (%d) %s', ex.code, ex.content + ) + + # this is as close as we can get to the mx_id, since there is no + # homeserver discovery protocol we have to fall back to the homeserver url + # instead of the actual domain it serves. + mx_id = "{user}@{homeserver}".format( + user=username, + homeserver=homeserver + ) + + if mx_id in AUTH_TOKENS: + client = login_by_token() + if not client: + client = login_by_password() + if not client: + _LOGGER.error( + 'login failed, both token and username/password ' + 'invalid' + ) + return + else: + client = login_by_password() + if not client: + _LOGGER.error('login failed, username/password invalid') + return + + rooms = client.get_rooms() + for target_room in target_rooms: + try: + if target_room in rooms: + room = rooms[target_room] + else: + room = client.join_room(target_room) + + _LOGGER.debug(room.send_text(message)) + except MatrixRequestError as ex: + _LOGGER.error( + 'Unable to deliver message to room \'%s\': (%d): %s', + target_room, ex.code, ex.content + ) diff --git a/homeassistant/components/notify/pushbullet.py b/homeassistant/components/notify/pushbullet.py index 71b3f227e9b..3fe6492525b 100644 --- a/homeassistant/components/notify/pushbullet.py +++ b/homeassistant/components/notify/pushbullet.py @@ -9,14 +9,15 @@ import logging import voluptuous as vol from homeassistant.components.notify import ( - ATTR_TARGET, ATTR_TITLE, ATTR_TITLE_DEFAULT, PLATFORM_SCHEMA, - BaseNotificationService) + ATTR_DATA, ATTR_TARGET, ATTR_TITLE, ATTR_TITLE_DEFAULT, + PLATFORM_SCHEMA, BaseNotificationService) from homeassistant.const import CONF_API_KEY import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) REQUIREMENTS = ['pushbullet.py==0.10.0'] +ATTR_URL = 'url' PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_API_KEY): cv.string, @@ -40,7 +41,7 @@ def get_service(hass, config): return PushBulletNotificationService(pushbullet) -# pylint: disable=too-few-public-methods +# pylint: disable=too-few-public-methods, too-many-branches class PushBulletNotificationService(BaseNotificationService): """Implement the notification service for Pushbullet.""" @@ -79,11 +80,18 @@ class PushBulletNotificationService(BaseNotificationService): """ targets = kwargs.get(ATTR_TARGET) title = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT) + data = kwargs.get(ATTR_DATA) + url = None + if data: + url = data.get(ATTR_URL, None) refreshed = False if not targets: # Backward compatebility, notify all devices in own account - self.pushbullet.push_note(title, message) + if url: + self.pushbullet.push_link(title, url, body=message) + else: + self.pushbullet.push_note(title, message) _LOGGER.info('Sent notification to self') return @@ -98,7 +106,11 @@ class PushBulletNotificationService(BaseNotificationService): # Target is email, send directly, don't use a target object # This also seems works to send to all devices in own account if ttype == 'email': - self.pushbullet.push_note(title, message, email=tname) + if url: + self.pushbullet.push_link(title, url, + body=message, email=tname) + else: + self.pushbullet.push_note(title, message, email=tname) _LOGGER.info('Sent notification to email %s', tname) continue @@ -117,7 +129,11 @@ class PushBulletNotificationService(BaseNotificationService): # Attempt push_note on a dict value. Keys are types & target # name. Dict pbtargets has all *actual* targets. try: - self.pbtargets[ttype][tname].push_note(title, message) + if url: + self.pbtargets[ttype][tname].push_link(title, url, + body=message) + else: + self.pbtargets[ttype][tname].push_note(title, message) _LOGGER.info('Sent notification to %s/%s', ttype, tname) except KeyError: _LOGGER.error('No such target: %s/%s', ttype, tname) diff --git a/homeassistant/components/notify/sendgrid.py b/homeassistant/components/notify/sendgrid.py index 42921e2be2c..c8afe601ae5 100644 --- a/homeassistant/components/notify/sendgrid.py +++ b/homeassistant/components/notify/sendgrid.py @@ -13,7 +13,7 @@ from homeassistant.components.notify import ( from homeassistant.const import (CONF_API_KEY, CONF_SENDER, CONF_RECIPIENT) import homeassistant.helpers.config_validation as cv -REQUIREMENTS = ['sendgrid==3.4.0'] +REQUIREMENTS = ['sendgrid==3.6.0'] _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/notify/services.yaml b/homeassistant/components/notify/services.yaml index a3980f658ea..4fe66844aa9 100644 --- a/homeassistant/components/notify/services.yaml +++ b/homeassistant/components/notify/services.yaml @@ -17,3 +17,15 @@ notify: data: description: Extended information for notification. Optional depending on the platform. example: platform specific + +apns_register: + description: Registers a device to receive push notifications. + + fields: + push_id: + description: The device token, a 64 character hex string (256 bits). The device token is provided to you by your client app, which receives the token after registering itself with the remote notification service. + example: '72f2a8633655c5ce574fdc9b2b34ff8abdfc3b739b6ceb7a9ff06c1cbbf99f62' + + name: + description: A friendly name for the device (optional). + example: 'Sam''s iPhone' diff --git a/homeassistant/components/notify/slack.py b/homeassistant/components/notify/slack.py index 7c0a2b4d118..48f80138073 100644 --- a/homeassistant/components/notify/slack.py +++ b/homeassistant/components/notify/slack.py @@ -14,7 +14,7 @@ from homeassistant.const import ( CONF_API_KEY, CONF_USERNAME, CONF_ICON) import homeassistant.helpers.config_validation as cv -REQUIREMENTS = ['slacker==0.9.25'] +REQUIREMENTS = ['slacker==0.9.28'] _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/notify/telstra.py b/homeassistant/components/notify/telstra.py new file mode 100644 index 00000000000..2bd76989eaa --- /dev/null +++ b/homeassistant/components/notify/telstra.py @@ -0,0 +1,109 @@ +""" +Telstra API platform for notify component. + +For more details about this platform, please refer to the documentation at +https://home-assistant.io/components/notify.telstra/ +""" +import logging + +import requests +import voluptuous as vol + +from homeassistant.components.notify import (BaseNotificationService, + ATTR_TITLE, + PLATFORM_SCHEMA) +from homeassistant.const import CONTENT_TYPE_JSON +import homeassistant.helpers.config_validation as cv + +CONF_CONSUMER_KEY = 'consumer_key' +CONF_CONSUMER_SECRET = 'consumer_secret' +CONF_PHONE_NUMBER = 'phone_number' + +PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ + vol.Required(CONF_CONSUMER_KEY): cv.string, + vol.Required(CONF_CONSUMER_SECRET): cv.string, + vol.Required(CONF_PHONE_NUMBER): cv.string, +}) + +_LOGGER = logging.getLogger(__name__) + + +def get_service(hass, config): + """Get the Telstra SMS API notification service.""" + consumer_key = config.get(CONF_CONSUMER_KEY) + consumer_secret = config.get(CONF_CONSUMER_SECRET) + phone_number = config.get(CONF_PHONE_NUMBER) + + # Attempt an initial authentication to confirm credentials + if _authenticate(consumer_key, consumer_secret) is False: + _LOGGER.exception('Error obtaining authorization from Telstra API') + return None + + return TelstraNotificationService(consumer_key, + consumer_secret, + phone_number) + + +# pylint: disable=too-few-public-methods, too-many-arguments +class TelstraNotificationService(BaseNotificationService): + """Implementation of a notification service for the Telstra SMS API.""" + + def __init__(self, consumer_key, consumer_secret, phone_number): + """Initialize the service.""" + self._consumer_key = consumer_key + self._consumer_secret = consumer_secret + self._phone_number = phone_number + + def send_message(self, message="", **kwargs): + """Send a message to a user.""" + title = kwargs.get(ATTR_TITLE) + + # Retrieve authorization first + token_response = _authenticate(self._consumer_key, + self._consumer_secret) + if token_response is False: + _LOGGER.exception('Error obtaining authorization from Telstra API') + return + + # Send the SMS + if title: + text = '{} {}'.format(title, message) + else: + text = message + + message_data = { + 'to': self._phone_number, + 'body': text + } + message_resource = 'https://api.telstra.com/v1/sms/messages' + message_headers = { + 'Content-Type': CONTENT_TYPE_JSON, + 'Authorization': 'Bearer ' + token_response['access_token'] + } + message_response = requests.post(message_resource, + headers=message_headers, + json=message_data, + timeout=10) + + if message_response.status_code != 202: + _LOGGER.exception("Failed to send SMS. Status code: %d", + message_response.status_code) + + +def _authenticate(consumer_key, consumer_secret): + """Authenticate with the Telstra API.""" + token_data = { + 'client_id': consumer_key, + 'client_secret': consumer_secret, + 'grant_type': 'client_credentials', + 'scope': 'SMS' + } + token_resource = 'https://api.telstra.com/v1/oauth/token' + token_response = requests.get(token_resource, + params=token_data, + timeout=10).json() + + if 'error' in token_response: + return False + + return token_response diff --git a/homeassistant/components/notify/xmpp.py b/homeassistant/components/notify/xmpp.py index f292ceccd26..cbe6da89d81 100644 --- a/homeassistant/components/notify/xmpp.py +++ b/homeassistant/components/notify/xmpp.py @@ -14,10 +14,11 @@ from homeassistant.components.notify import ( from homeassistant.const import CONF_PASSWORD, CONF_SENDER, CONF_RECIPIENT REQUIREMENTS = ['sleekxmpp==1.3.1', - 'dnspython3==1.14.0', + 'dnspython3==1.15.0', 'pyasn1==0.1.9', 'pyasn1-modules==0.0.8'] +_LOGGER = logging.getLogger(__name__) CONF_TLS = 'tls' @@ -29,9 +30,6 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ }) -_LOGGER = logging.getLogger(__name__) - - def get_service(hass, config): """Get the Jabber (XMPP) notification service.""" return XmppNotificationService( diff --git a/homeassistant/components/nuimo_controller.py b/homeassistant/components/nuimo_controller.py index b383b4f45fc..e3d8f0238cf 100644 --- a/homeassistant/components/nuimo_controller.py +++ b/homeassistant/components/nuimo_controller.py @@ -79,8 +79,7 @@ class NuimoThread(threading.Thread): self._name = name self._hass_is_running = True self._nuimo = None - self._listener = hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, - self.stop) + hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, self.stop) def run(self): """Setup connection or be idle.""" @@ -99,8 +98,6 @@ class NuimoThread(threading.Thread): """Terminate Thread by unsetting flag.""" _LOGGER.debug('Stopping thread for Nuimo %s', self._mac) self._hass_is_running = False - self._hass.bus.remove_listener(EVENT_HOMEASSISTANT_STOP, - self._listener) def _attach(self): """Create a nuimo object from mac address or discovery.""" diff --git a/homeassistant/components/pilight.py b/homeassistant/components/pilight.py index 3475a6be65a..2cfbc0063a1 100644 --- a/homeassistant/components/pilight.py +++ b/homeassistant/components/pilight.py @@ -14,7 +14,7 @@ from homeassistant.const import ( EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, CONF_HOST, CONF_PORT, CONF_WHITELIST) -REQUIREMENTS = ['pilight==0.0.2'] +REQUIREMENTS = ['pilight==0.1.1'] _LOGGER = logging.getLogger(__name__) @@ -102,7 +102,7 @@ def setup(hass, config): if not whitelist: hass.bus.fire(EVENT, data) # Check if data matches the defined whitelist - elif all(data[key] in whitelist[key] for key in whitelist): + elif all(str(data[key]) in whitelist[key] for key in whitelist): hass.bus.fire(EVENT, data) pilight_client.set_callback(handle_received_code) diff --git a/homeassistant/components/proximity.py b/homeassistant/components/proximity.py index ba0a192398f..fceec21dd5d 100644 --- a/homeassistant/components/proximity.py +++ b/homeassistant/components/proximity.py @@ -9,106 +9,92 @@ https://home-assistant.io/components/proximity/ """ import logging +import voluptuous as vol + +from homeassistant.const import ( + CONF_ZONE, CONF_DEVICES, CONF_UNIT_OF_MEASUREMENT) from homeassistant.helpers.entity import Entity from homeassistant.helpers.event import track_state_change -from homeassistant.util.location import distance from homeassistant.util.distance import convert -from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT - -DEPENDENCIES = ['zone', 'device_tracker'] - -DOMAIN = 'proximity' - -NOT_SET = 'not set' - -# Default tolerance -DEFAULT_TOLERANCE = 1 - -# Default zone -DEFAULT_PROXIMITY_ZONE = 'home' - -# Default distance to zone -DEFAULT_DIST_TO_ZONE = NOT_SET - -# Default direction of travel -DEFAULT_DIR_OF_TRAVEL = NOT_SET - -# Default nearest device -DEFAULT_NEAREST = NOT_SET - -# Entity attributes -ATTR_DIST_FROM = 'dist_to_zone' -ATTR_DIR_OF_TRAVEL = 'dir_of_travel' -ATTR_NEAREST = 'nearest' +from homeassistant.util.location import distance +import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) +ATTR_DIR_OF_TRAVEL = 'dir_of_travel' +ATTR_DIST_FROM = 'dist_to_zone' +ATTR_NEAREST = 'nearest' -def setup_proximity_component(hass, config): +CONF_IGNORED_ZONES = 'ignored_zones' +CONF_TOLERANCE = 'tolerance' + +DEFAULT_DIR_OF_TRAVEL = 'not set' +DEFAULT_DIST_TO_ZONE = 'not set' +DEFAULT_NEAREST = 'not set' +DEFAULT_PROXIMITY_ZONE = 'home' +DEFAULT_TOLERANCE = 1 +DEPENDENCIES = ['zone', 'device_tracker'] +DOMAIN = 'proximity' + +UNITS = ['km', 'm', 'mi', 'ft'] + +ZONE_SCHEMA = vol.Schema({ + vol.Optional(CONF_ZONE, default=DEFAULT_PROXIMITY_ZONE): cv.string, + vol.Optional(CONF_DEVICES, default=[]): + vol.All(cv.ensure_list, [cv.entity_id]), + vol.Optional(CONF_IGNORED_ZONES, default=[]): + vol.All(cv.ensure_list, [cv.string]), + vol.Optional(CONF_TOLERANCE, default=DEFAULT_TOLERANCE): cv.positive_int, + vol.Optional(CONF_UNIT_OF_MEASUREMENT): vol.All(cv.string, vol.In(UNITS)), +}) + +CONFIG_SCHEMA = vol.Schema({ + DOMAIN: vol.Schema({ + cv.slug: ZONE_SCHEMA, + }), +}, extra=vol.ALLOW_EXTRA) + + +def setup_proximity_component(hass, name, config): """Set up individual proximity component.""" - # Get the devices from configuration.yaml. - if 'devices' not in config: - _LOGGER.error('devices not found in config') - return False - - ignored_zones = [] - if 'ignored_zones' in config: - for variable in config['ignored_zones']: - ignored_zones.append(variable) - - proximity_devices = [] - for variable in config['devices']: - proximity_devices.append(variable) - - # Get the direction of travel tolerance from configuration.yaml. - tolerance = config.get('tolerance', DEFAULT_TOLERANCE) - - # Get the zone to monitor proximity to from configuration.yaml. - proximity_zone = config.get('zone', DEFAULT_PROXIMITY_ZONE) - - # Get the unit of measurement from configuration.yaml. - unit_of_measure = config.get(ATTR_UNIT_OF_MEASUREMENT, - hass.config.units.length_unit) - + ignored_zones = config.get(CONF_IGNORED_ZONES) + proximity_devices = config.get(CONF_DEVICES) + tolerance = config.get(CONF_TOLERANCE) + proximity_zone = name + unit_of_measurement = config.get( + CONF_UNIT_OF_MEASUREMENT, hass.config.units.length_unit) zone_id = 'zone.{}'.format(proximity_zone) - state = hass.states.get(zone_id) - zone_friendly_name = (state.name).lower() - proximity = Proximity(hass, zone_friendly_name, DEFAULT_DIST_TO_ZONE, + proximity = Proximity(hass, proximity_zone, DEFAULT_DIST_TO_ZONE, DEFAULT_DIR_OF_TRAVEL, DEFAULT_NEAREST, ignored_zones, proximity_devices, tolerance, - zone_id, unit_of_measure) + zone_id, unit_of_measurement) proximity.entity_id = '{}.{}'.format(DOMAIN, proximity_zone) proximity.update_ha_state() - # Main command to monitor proximity of devices. - track_state_change(hass, proximity_devices, - proximity.check_proximity_state_change) + track_state_change( + hass, proximity_devices, proximity.check_proximity_state_change) return True def setup(hass, config): """Get the zones and offsets from configuration.yaml.""" - result = True - if isinstance(config[DOMAIN], list): - for proximity_config in config[DOMAIN]: - if not setup_proximity_component(hass, proximity_config): - result = False - elif not setup_proximity_component(hass, config[DOMAIN]): - result = False + for zone, proximity_config in config[DOMAIN].items(): + setup_proximity_component(hass, zone, proximity_config) - return result + return True -class Proximity(Entity): # pylint: disable=too-many-instance-attributes +# pylint: disable=too-many-instance-attributes +class Proximity(Entity): """Representation of a Proximity.""" # pylint: disable=too-many-arguments def __init__(self, hass, zone_friendly_name, dist_to, dir_of_travel, nearest, ignored_zones, proximity_devices, tolerance, - proximity_zone, unit_of_measure): + proximity_zone, unit_of_measurement): """Initialize the proximity.""" self.hass = hass self.friendly_name = zone_friendly_name @@ -119,7 +105,7 @@ class Proximity(Entity): # pylint: disable=too-many-instance-attributes self.proximity_devices = proximity_devices self.tolerance = tolerance self.proximity_zone = proximity_zone - self.unit_of_measure = unit_of_measure + self._unit_of_measurement = unit_of_measurement @property def name(self): @@ -134,7 +120,7 @@ class Proximity(Entity): # pylint: disable=too-many-instance-attributes @property def unit_of_measurement(self): """Return the unit of measurement of this entity.""" - return self.unit_of_measure + return self._unit_of_measurement @property def state_attributes(self): @@ -209,7 +195,7 @@ class Proximity(Entity): # pylint: disable=too-many-instance-attributes # Add the device and distance to a dictionary. distances_to_zone[device] = round( - convert(dist_to_zone, 'm', self.unit_of_measure), 1) + convert(dist_to_zone, 'm', self.unit_of_measurement), 1) # Loop through each of the distances collected and work out the # closest. diff --git a/homeassistant/components/recorder/__init__.py b/homeassistant/components/recorder/__init__.py index 7f836a1363d..6feee95be45 100644 --- a/homeassistant/components/recorder/__init__.py +++ b/homeassistant/components/recorder/__init__.py @@ -7,7 +7,6 @@ to query this database. For more details about this component, please refer to the documentation at https://home-assistant.io/components/recorder/ """ -import asyncio import logging import queue import threading @@ -17,7 +16,7 @@ from typing import Any, Union, Optional, List import voluptuous as vol -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.const import (EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, EVENT_STATE_CHANGED, EVENT_TIME_CHANGED, MATCH_ALL) @@ -26,15 +25,15 @@ from homeassistant.helpers.event import track_point_in_utc_time from homeassistant.helpers.typing import ConfigType, QueryType import homeassistant.util.dt as dt_util -DOMAIN = "recorder" +DOMAIN = 'recorder' -REQUIREMENTS = ['sqlalchemy==1.0.15'] +REQUIREMENTS = ['sqlalchemy==1.1.1'] -DEFAULT_URL = "sqlite:///{hass_config_path}" -DEFAULT_DB_FILE = "home-assistant_v2.db" +DEFAULT_URL = 'sqlite:///{hass_config_path}' +DEFAULT_DB_FILE = 'home-assistant_v2.db' -CONF_DB_URL = "db_url" -CONF_PURGE_DAYS = "purge_days" +CONF_DB_URL = 'db_url' +CONF_PURGE_DAYS = 'purge_days' RETRIES = 3 CONNECT_RETRY_WAIT = 10 @@ -56,8 +55,8 @@ _LOGGER = logging.getLogger(__name__) Session = None # pylint: disable=no-member -def execute(q: QueryType) \ - -> List[Any]: # pylint: disable=invalid-sequence-index +# pylint: disable=invalid-sequence-index +def execute(q: QueryType) -> List[Any]: """Query the database and convert the objects to HA native form. This method also retries a few times in the case of stale connections. @@ -101,7 +100,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: global _INSTANCE # pylint: disable=global-statement if _INSTANCE is not None: - _LOGGER.error('Only a single instance allowed.') + _LOGGER.error("Only a single instance allowed") return False purge_days = config.get(DOMAIN, {}).get(CONF_PURGE_DAYS) @@ -155,8 +154,7 @@ class Recorder(threading.Thread): """A threaded recorder class.""" # pylint: disable=too-many-instance-attributes - def __init__(self, hass: HomeAssistant, purge_days: int, uri: str) \ - -> None: + def __init__(self, hass: HomeAssistant, purge_days: int, uri: str) -> None: """Initialize the recorder.""" threading.Thread.__init__(self) @@ -226,7 +224,7 @@ class Recorder(threading.Thread): self.queue.task_done() - @asyncio.coroutine + @callback def event_listener(self, event): """Listen for new events and put them in the process queue.""" self.queue.put(event) diff --git a/homeassistant/components/scene/hunterdouglas_powerview.py b/homeassistant/components/scene/hunterdouglas_powerview.py index 9fc36cf1cff..0ae44d878f8 100644 --- a/homeassistant/components/scene/hunterdouglas_powerview.py +++ b/homeassistant/components/scene/hunterdouglas_powerview.py @@ -11,8 +11,9 @@ from homeassistant.helpers.entity import generate_entity_id _LOGGER = logging.getLogger(__name__) REQUIREMENTS = [ - 'https://github.com/sander76/powerviewApi/' - 'archive/cc6f75dd39160d4aaf46cb2ed9220136b924bcb4.zip#powerviewApi==0.2'] + 'https://github.com/sander76/powerviewApi/archive' + '/246e782d60d5c0addcc98d7899a0186f9d5640b0.zip#powerviewApi==0.3.15' +] HUB_ADDRESS = 'address' @@ -20,7 +21,7 @@ HUB_ADDRESS = 'address' # pylint: disable=unused-argument def setup_platform(hass, config, add_devices, discovery_info=None): """Setup the powerview scenes stored in a Powerview hub.""" - import powerview + from powerview_api import powerview hub_address = config.get(HUB_ADDRESS) diff --git a/homeassistant/components/sensor/arduino.py b/homeassistant/components/sensor/arduino.py index 203848fbe6e..03307a49768 100644 --- a/homeassistant/components/sensor/arduino.py +++ b/homeassistant/components/sensor/arduino.py @@ -8,28 +8,44 @@ https://home-assistant.io/components/sensor.arduino/ """ import logging +import voluptuous as vol + +from homeassistant.components.sensor import PLATFORM_SCHEMA import homeassistant.components.arduino as arduino -from homeassistant.const import DEVICE_DEFAULT_NAME +from homeassistant.const import CONF_NAME from homeassistant.helpers.entity import Entity +import homeassistant.helpers.config_validation as cv + + +_LOGGER = logging.getLogger(__name__) + +CONF_PINS = 'pins' +CONF_TYPE = 'analog' DEPENDENCIES = ['arduino'] -_LOGGER = logging.getLogger(__name__) + +PIN_SCHEMA = vol.Schema({ + vol.Required(CONF_NAME): cv.string, +}) + +PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ + vol.Required(CONF_PINS): + vol.Schema({cv.positive_int: PIN_SCHEMA}), +}) def setup_platform(hass, config, add_devices, discovery_info=None): - """Setup the Arduino platform.""" + """Set up the Arduino platform.""" # Verify that the Arduino board is present if arduino.BOARD is None: - _LOGGER.error('A connection has not been made to the Arduino board.') + _LOGGER.error("A connection has not been made to the Arduino board") return False + pins = config.get(CONF_PINS) + sensors = [] - pins = config.get('pins') for pinnum, pin in pins.items(): - if pin.get('name'): - sensors.append(ArduinoSensor(pin.get('name'), - pinnum, - 'analog')) + sensors.append(ArduinoSensor(pin.get(CONF_NAME), pinnum, CONF_TYPE)) add_devices(sensors) @@ -39,7 +55,7 @@ class ArduinoSensor(Entity): def __init__(self, name, pin, pin_type): """Initialize the sensor.""" self._pin = pin - self._name = name or DEVICE_DEFAULT_NAME + self._name = name self.pin_type = pin_type self.direction = 'in' self._value = None diff --git a/homeassistant/components/sensor/arwn.py b/homeassistant/components/sensor/arwn.py new file mode 100644 index 00000000000..5eb95ba16d1 --- /dev/null +++ b/homeassistant/components/sensor/arwn.py @@ -0,0 +1,126 @@ +"""Support for collecting data from the ARWN project. + +For more details about this platform, please refer to the +documentation at https://home-assistant.io/components/sensor.arwn/ + +""" +import json +import logging +from homeassistant.helpers.entity import Entity +import homeassistant.components.mqtt as mqtt +from homeassistant.const import (TEMP_FAHRENHEIT, TEMP_CELSIUS) +from homeassistant.util import slugify + +DEPENDENCIES = ['mqtt'] + +DOMAIN = "arwn" +TOPIC = 'arwn/#' +SENSORS = {} + +_LOGGER = logging.getLogger(__name__) + + +def discover_sensors(topic, payload): + """Given a topic, dynamically create the right sensor type.""" + parts = topic.split('/') + unit = payload.get('units', '') + domain = parts[1] + if domain == "temperature": + name = parts[2] + if unit == "F": + unit = TEMP_FAHRENHEIT + else: + unit = TEMP_CELSIUS + return (ArwnSensor(name, 'temp', unit),) + if domain == "barometer": + return (ArwnSensor("Barometer", 'pressure', unit),) + if domain == "wind": + return (ArwnSensor("Wind Speed", 'speed', unit), + ArwnSensor("Wind Gust", 'gust', unit), + ArwnSensor("Wind Direction", 'direction', '°')) + + +def _slug(name): + return "sensor.arwn_%s" % slugify(name) + + +def setup_platform(hass, config, add_devices, discovery_info=None): + """Set up the ARWN platform.""" + def sensor_event_received(topic, payload, qos): + """Process events as sensors. + + When a new event on our topic (arwn/#) is received we map it + into a known kind of sensor based on topic name. If we've + never seen this before, we keep this sensor around in a global + cache. If we have seen it before, we update the values of the + existing sensor. Either way, we push an ha state update at the + end for the new event we've seen. + + This lets us dynamically incorporate sensors without any + configuration on our side. + """ + event = json.loads(payload) + sensors = discover_sensors(topic, event) + if not sensors: + return + + if 'timestamp' in event: + del event['timestamp'] + + for sensor in sensors: + if sensor.name not in SENSORS: + sensor.hass = hass + sensor.set_event(event) + SENSORS[sensor.name] = sensor + _LOGGER.debug("Registering new sensor %(name)s => %(event)s", + dict(name=sensor.name, event=event)) + add_devices((sensor,)) + else: + SENSORS[sensor.name].set_event(event) + SENSORS[sensor.name].update_ha_state() + + mqtt.subscribe(hass, TOPIC, sensor_event_received, 0) + return True + + +class ArwnSensor(Entity): + """Represents an ARWN sensor.""" + + def __init__(self, name, state_key, units): + """Initialize the sensor.""" + self.hass = None + self.entity_id = _slug(name) + self._name = name + self._state_key = state_key + self.event = {} + self._unit_of_measurement = units + + def set_event(self, event): + """Update the sensor with the most recent event.""" + self.event = {} + self.event.update(event) + + @property + def state(self): + """Return the state of the device.""" + return self.event.get(self._state_key, None) + + @property + def name(self): + """Get the name of the sensor.""" + return self._name + + @property + def state_attributes(self): + """Return all the state attributes.""" + return self.event + + @property + def unit_of_measurement(self): + """Unit this state is expressed in.""" + return self._unit_of_measurement + + @property + def should_poll(self): + """Should we poll.""" + return False diff --git a/homeassistant/components/sensor/bbox.py b/homeassistant/components/sensor/bbox.py new file mode 100644 index 00000000000..c79fa904c5d --- /dev/null +++ b/homeassistant/components/sensor/bbox.py @@ -0,0 +1,151 @@ +""" +Support for Bbox Bouygues Modem Router. + +For more details about this platform, please refer to the documentation at +https://home-assistant.io/components/sensor.bbox/ +""" +import logging +from datetime import timedelta +import requests +import voluptuous as vol + +from homeassistant.components.sensor import PLATFORM_SCHEMA +from homeassistant.const import (CONF_NAME, CONF_MONITORED_VARIABLES, + ATTR_ATTRIBUTION) +from homeassistant.helpers.entity import Entity +from homeassistant.util import Throttle +import homeassistant.helpers.config_validation as cv + +# Return cached results if last scan was less then this time ago +MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60) + +REQUIREMENTS = ['pybbox==0.0.5-alpha'] + +_LOGGER = logging.getLogger(__name__) + +CONF_ATTRIBUTION = "Powered by Bouygues Telecom" +DEFAULT_NAME = 'Bbox' + +# Bandwidth units +BANDWIDTH_MEGABITS_SECONDS = 'Mb/s' # type: str + +# Sensor types are defined like so: +# Name, unit, icon +SENSOR_TYPES = { + 'down_max_bandwidth': ['Maximum Download Bandwidth', + BANDWIDTH_MEGABITS_SECONDS, 'mdi:download'], + 'up_max_bandwidth': ['Maximum Upload Bandwidth', + BANDWIDTH_MEGABITS_SECONDS, 'mdi:upload'], + 'current_down_bandwidth': ['Currently Used Download Bandwidth', + BANDWIDTH_MEGABITS_SECONDS, 'mdi:download'], + 'current_up_bandwidth': ['Currently Used Upload Bandwidth', + BANDWIDTH_MEGABITS_SECONDS, 'mdi:upload'], +} + +PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ + vol.Required(CONF_MONITORED_VARIABLES): + vol.All(cv.ensure_list, [vol.In(SENSOR_TYPES)]), + vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, +}) + + +# pylint: disable=too-many-arguments +def setup_platform(hass, config, add_devices, discovery_info=None): + """Setup the Bbox sensor.""" + # Create a data fetcher to support all of the configured sensors. Then make + # the first call to init the data. + try: + bbox_data = BboxData() + bbox_data.update() + except requests.exceptions.HTTPError as error: + _LOGGER.error(error) + return False + + name = config.get(CONF_NAME) + + sensors = [] + for variable in config[CONF_MONITORED_VARIABLES]: + sensors.append(BboxSensor(bbox_data, variable, name)) + + add_devices(sensors) + + +class BboxSensor(Entity): + """Implementation of a Bbox sensor.""" + + def __init__(self, bbox_data, sensor_type, name): + """Initialize the sensor.""" + self.client_name = name + self.type = sensor_type + self._name = SENSOR_TYPES[sensor_type][0] + self._unit_of_measurement = SENSOR_TYPES[sensor_type][1] + self._icon = SENSOR_TYPES[sensor_type][2] + self.bbox_data = bbox_data + self._state = None + + self.update() + + @property + def name(self): + """Return the name of the sensor.""" + return '{} {}'.format(self.client_name, self._name) + + @property + def state(self): + """Return the state of the sensor.""" + return self._state + + @property + def unit_of_measurement(self): + """Return the unit of measurement of this entity, if any.""" + return self._unit_of_measurement + + @property + def icon(self): + """Icon to use in the frontend, if any.""" + return self._icon + + @property + def device_state_attributes(self): + """Return the state attributes.""" + return { + ATTR_ATTRIBUTION: CONF_ATTRIBUTION, + } + + def update(self): + """Get the latest data from Bbox and update the state.""" + self.bbox_data.update() + if self.type == 'down_max_bandwidth': + self._state = round( + self.bbox_data.data['rx']['maxBandwidth'] / 1000, 2) + elif self.type == 'up_max_bandwidth': + self._state = round( + self.bbox_data.data['tx']['maxBandwidth'] / 1000, 2) + elif self.type == 'current_down_bandwidth': + self._state = round(self.bbox_data.data['rx']['bandwidth'] / 1000, + 2) + elif self.type == 'current_up_bandwidth': + self._state = round(self.bbox_data.data['tx']['bandwidth'] / 1000, + 2) + + +# pylint: disable=too-few-public-methods +class BboxData(object): + """Get data from the Bbox.""" + + def __init__(self): + """Initialize the data object.""" + self.data = None + + @Throttle(MIN_TIME_BETWEEN_UPDATES) + def update(self): + """Get the latest data from the Bbox.""" + import pybbox + + try: + box = pybbox.Bbox() + self.data = box.get_ip_stats() + except requests.exceptions.HTTPError as error: + _LOGGER.error(error) + self.data = None + return False diff --git a/homeassistant/components/sensor/bom.py b/homeassistant/components/sensor/bom.py index eb1fddeb810..a49ac48ba6f 100644 --- a/homeassistant/components/sensor/bom.py +++ b/homeassistant/components/sensor/bom.py @@ -11,16 +11,17 @@ import requests import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA -from homeassistant.helpers.entity import Entity -import homeassistant.helpers.config_validation as cv -from homeassistant.util import Throttle from homeassistant.const import ( - CONF_MONITORED_CONDITIONS, TEMP_CELSIUS, - STATE_UNKNOWN, CONF_NAME) + CONF_MONITORED_CONDITIONS, TEMP_CELSIUS, STATE_UNKNOWN, CONF_NAME, + ATTR_ATTRIBUTION) +from homeassistant.helpers.entity import Entity +from homeassistant.util import Throttle +import homeassistant.helpers.config_validation as cv _RESOURCE = 'http://www.bom.gov.au/fwo/{}/{}.{}.json' _LOGGER = logging.getLogger(__name__) +CONF_ATTRIBUTION = "Data provided by the Australian Bureau of Meteorology" CONF_ZONE_ID = 'zone_id' CONF_WMO_ID = 'wmo_id' @@ -75,7 +76,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ def setup_platform(hass, config, add_devices, discovery_info=None): - """Setup the BOM sensor.""" + """Set up the BOM sensor.""" rest = BOMCurrentData( hass, config.get(CONF_ZONE_ID), config.get(CONF_WMO_ID)) @@ -96,7 +97,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None): class BOMCurrentSensor(Entity): - """Implementing the BOM current sensor.""" + """Implementation of a BOM current sensor.""" def __init__(self, rest, condition, stationname): """Initialize the sensor.""" @@ -131,6 +132,7 @@ class BOMCurrentSensor(Entity): attr['Station Name'] = self.rest.data['name'] attr['Last Update'] = datetime.datetime.strptime(str( self.rest.data['local_date_time_full']), '%Y%m%d%H%M%S') + attr[ATTR_ATTRIBUTION] = CONF_ATTRIBUTION return attr @property diff --git a/homeassistant/components/sensor/coinmarketcap.py b/homeassistant/components/sensor/coinmarketcap.py index 83adcac7fea..a166ec91d10 100644 --- a/homeassistant/components/sensor/coinmarketcap.py +++ b/homeassistant/components/sensor/coinmarketcap.py @@ -12,9 +12,10 @@ from urllib.error import HTTPError import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA -import homeassistant.helpers.config_validation as cv +from homeassistant.const import ATTR_ATTRIBUTION from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle +import homeassistant.helpers.config_validation as cv REQUIREMENTS = ['coinmarketcap==2.0.1'] @@ -30,6 +31,7 @@ ATTR_PRICE = 'price_usd' ATTR_SYMBOL = 'symbol' ATTR_TOTAL_SUPPLY = 'total_supply' +CONF_ATTRIBUTION = "Data provided by CoinMarketCap" CONF_CURRENCY = 'currency' DEFAULT_CURRENCY = 'bitcoin' @@ -89,7 +91,7 @@ class CoinMarketCapSensor(Entity): return ICON @property - def state_attributes(self): + def device_state_attributes(self): """Return the state attributes of the sensor.""" return { ATTR_24H_VOLUME_USD: self._ticker.get('24h_volume_usd'), @@ -99,6 +101,7 @@ class CoinMarketCapSensor(Entity): ATTR_PERCENT_CHANGE_7D: self._ticker.get('percent_change_7d'), ATTR_SYMBOL: self._ticker.get('symbol'), ATTR_TOTAL_SUPPLY: self._ticker.get('total_supply'), + ATTR_ATTRIBUTION: CONF_ATTRIBUTION, } # pylint: disable=too-many-branches diff --git a/homeassistant/components/sensor/darksky.py b/homeassistant/components/sensor/darksky.py index 241ab5f4655..f092959ba1d 100644 --- a/homeassistant/components/sensor/darksky.py +++ b/homeassistant/components/sensor/darksky.py @@ -13,7 +13,7 @@ from requests.exceptions import ConnectionError as ConnectError, \ from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( - CONF_API_KEY, CONF_NAME, CONF_MONITORED_CONDITIONS) + CONF_API_KEY, CONF_NAME, CONF_MONITORED_CONDITIONS, ATTR_ATTRIBUTION) from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle import homeassistant.helpers.config_validation as cv @@ -22,6 +22,7 @@ REQUIREMENTS = ['python-forecastio==1.3.5'] _LOGGER = logging.getLogger(__name__) +CONF_ATTRIBUTION = "Powered by Dark Sky" CONF_UNITS = 'units' CONF_UPDATE_INTERVAL = 'update_interval' @@ -178,6 +179,13 @@ class DarkSkySensor(Entity): """Icon to use in the frontend, if any.""" return SENSOR_TYPES[self.type][6] + @property + def device_state_attributes(self): + """Return the state attributes.""" + return { + ATTR_ATTRIBUTION: CONF_ATTRIBUTION, + } + # pylint: disable=too-many-branches,too-many-statements def update(self): """Get the latest data from Dark Sky and updates the states.""" diff --git a/homeassistant/components/sensor/fitbit.py b/homeassistant/components/sensor/fitbit.py index b99a4f320c9..11288bae63a 100644 --- a/homeassistant/components/sensor/fitbit.py +++ b/homeassistant/components/sensor/fitbit.py @@ -10,110 +10,125 @@ import logging import datetime import time -from homeassistant.util import Throttle +import voluptuous as vol + +from homeassistant.components.http import HomeAssistantView +from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.helpers.entity import Entity from homeassistant.loader import get_component -from homeassistant.components.http import HomeAssistantView +from homeassistant.util import Throttle +import homeassistant.helpers.config_validation as cv -_LOGGER = logging.getLogger(__name__) -REQUIREMENTS = ["fitbit==0.2.3"] -DEPENDENCIES = ["http"] - -ICON = "mdi:walk" +REQUIREMENTS = ['fitbit==0.2.3'] _CONFIGURING = {} +_LOGGER = logging.getLogger(__name__) + +ATTR_ACCESS_TOKEN = 'access_token' +ATTR_REFRESH_TOKEN = 'refresh_token' +ATTR_CLIENT_ID = 'client_id' +ATTR_CLIENT_SECRET = 'client_secret' +ATTR_LAST_SAVED_AT = 'last_saved_at' + +CONF_MONITORED_RESOURCES = 'monitored_resources' + +DEPENDENCIES = ['http'] + +FITBIT_AUTH_CALLBACK_PATH = '/auth/fitbit/callback' +FITBIT_AUTH_START = '/auth/fitbit' +FITBIT_CONFIG_FILE = 'fitbit.conf' +FITBIT_DEFAULT_RESOURCES = ['activities/steps'] + +ICON = 'mdi:walk' -# Return cached results if last scan was less then this time ago. MIN_TIME_BETWEEN_UPDATES = datetime.timedelta(minutes=30) -FITBIT_AUTH_START = "/auth/fitbit" -FITBIT_AUTH_CALLBACK_PATH = "/auth/fitbit/callback" - DEFAULT_CONFIG = { - "client_id": "CLIENT_ID_HERE", - "client_secret": "CLIENT_SECRET_HERE" + 'client_id': 'CLIENT_ID_HERE', + 'client_secret': 'CLIENT_SECRET_HERE' } -FITBIT_CONFIG_FILE = "fitbit.conf" - FITBIT_RESOURCES_LIST = { - "activities/activityCalories": "cal", - "activities/calories": "cal", - "activities/caloriesBMR": "cal", - "activities/distance": "", - "activities/elevation": "", - "activities/floors": "floors", - "activities/heart": "bpm", - "activities/minutesFairlyActive": "minutes", - "activities/minutesLightlyActive": "minutes", - "activities/minutesSedentary": "minutes", - "activities/minutesVeryActive": "minutes", - "activities/steps": "steps", - "activities/tracker/activityCalories": "cal", - "activities/tracker/calories": "cal", - "activities/tracker/distance": "", - "activities/tracker/elevation": "", - "activities/tracker/floors": "floors", - "activities/tracker/minutesFairlyActive": "minutes", - "activities/tracker/minutesLightlyActive": "minutes", - "activities/tracker/minutesSedentary": "minutes", - "activities/tracker/minutesVeryActive": "minutes", - "activities/tracker/steps": "steps", - "body/bmi": "BMI", - "body/fat": "%", - "sleep/awakeningsCount": "times awaken", - "sleep/efficiency": "%", - "sleep/minutesAfterWakeup": "minutes", - "sleep/minutesAsleep": "minutes", - "sleep/minutesAwake": "minutes", - "sleep/minutesToFallAsleep": "minutes", - "sleep/startTime": "start time", - "sleep/timeInBed": "time in bed", - "body/weight": "" + 'activities/activityCalories': 'cal', + 'activities/calories': 'cal', + 'activities/caloriesBMR': 'cal', + 'activities/distance': '', + 'activities/elevation': '', + 'activities/floors': 'floors', + 'activities/heart': 'bpm', + 'activities/minutesFairlyActive': 'minutes', + 'activities/minutesLightlyActive': 'minutes', + 'activities/minutesSedentary': 'minutes', + 'activities/minutesVeryActive': 'minutes', + 'activities/steps': 'steps', + 'activities/tracker/activityCalories': 'cal', + 'activities/tracker/calories': 'cal', + 'activities/tracker/distance': '', + 'activities/tracker/elevation': '', + 'activities/tracker/floors': 'floors', + 'activities/tracker/minutesFairlyActive': 'minutes', + 'activities/tracker/minutesLightlyActive': 'minutes', + 'activities/tracker/minutesSedentary': 'minutes', + 'activities/tracker/minutesVeryActive': 'minutes', + 'activities/tracker/steps': 'steps', + 'body/bmi': 'BMI', + 'body/fat': '%', + 'sleep/awakeningsCount': 'times awaken', + 'sleep/efficiency': '%', + 'sleep/minutesAfterWakeup': 'minutes', + 'sleep/minutesAsleep': 'minutes', + 'sleep/minutesAwake': 'minutes', + 'sleep/minutesToFallAsleep': 'minutes', + 'sleep/startTime': 'start time', + 'sleep/timeInBed': 'time in bed', + 'body/weight': '' } -FITBIT_DEFAULT_RESOURCE_LIST = ["activities/steps"] - FITBIT_MEASUREMENTS = { - "en_US": { - "duration": "ms", - "distance": "mi", - "elevation": "ft", - "height": "in", - "weight": "lbs", - "body": "in", - "liquids": "fl. oz.", - "blood glucose": "mg/dL", + 'en_US': { + 'duration': 'ms', + 'distance': 'mi', + 'elevation': 'ft', + 'height': 'in', + 'weight': 'lbs', + 'body': 'in', + 'liquids': 'fl. oz.', + 'blood glucose': 'mg/dL', }, - "en_GB": { - "duration": "milliseconds", - "distance": "kilometers", - "elevation": "meters", - "height": "centimeters", - "weight": "stone", - "body": "centimeters", - "liquids": "milliliters", - "blood glucose": "mmol/L" + 'en_GB': { + 'duration': 'milliseconds', + 'distance': 'kilometers', + 'elevation': 'meters', + 'height': 'centimeters', + 'weight': 'stone', + 'body': 'centimeters', + 'liquids': 'milliliters', + 'blood glucose': 'mmol/L' }, - "metric": { - "duration": "milliseconds", - "distance": "kilometers", - "elevation": "meters", - "height": "centimeters", - "weight": "kilograms", - "body": "centimeters", - "liquids": "milliliters", - "blood glucose": "mmol/L" + 'metric': { + 'duration': 'milliseconds', + 'distance': 'kilometers', + 'elevation': 'meters', + 'height': 'centimeters', + 'weight': 'kilograms', + 'body': 'centimeters', + 'liquids': 'milliliters', + 'blood glucose': 'mmol/L' } } +PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ + vol.Optional(CONF_MONITORED_RESOURCES, default=FITBIT_DEFAULT_RESOURCES): + vol.All(cv.ensure_list, [vol.In(FITBIT_RESOURCES_LIST)]), +}) + def config_from_file(filename, config=None): """Small configuration file management function.""" if config: # We"re writing configuration try: - with open(filename, "w") as fdesc: + with open(filename, 'w') as fdesc: fdesc.write(json.dumps(config)) except IOError as error: _LOGGER.error("Saving config file failed: %s", error) @@ -123,7 +138,7 @@ def config_from_file(filename, config=None): # We"re reading config if os.path.isfile(filename): try: - with open(filename, "r") as fdesc: + with open(filename, 'r') as fdesc: return json.loads(fdesc.read()) except IOError as error: _LOGGER.error("Reading config file failed: %s", error) @@ -136,7 +151,7 @@ def config_from_file(filename, config=None): def request_app_setup(hass, config, add_devices, config_path, discovery_info=None): """Assist user with configuring the Fitbit dev application.""" - configurator = get_component("configurator") + configurator = get_component('configurator') # pylint: disable=unused-argument def fitbit_configuration_callback(callback_data): @@ -147,7 +162,7 @@ def request_app_setup(hass, config, add_devices, config_path, if config_file == DEFAULT_CONFIG: error_msg = ("You didn't correctly modify fitbit.conf", " please try again") - configurator.notify_errors(_CONFIGURING["fitbit"], error_msg) + configurator.notify_errors(_CONFIGURING['fitbit'], error_msg) else: setup_platform(hass, config, add_devices, discovery_info) else: @@ -167,8 +182,8 @@ def request_app_setup(hass, config, add_devices, config_path, submit = "I have saved my Client ID and Client Secret into fitbit.conf." - _CONFIGURING["fitbit"] = configurator.request_config( - hass, "Fitbit", fitbit_configuration_callback, + _CONFIGURING['fitbit'] = configurator.request_config( + hass, 'Fitbit', fitbit_configuration_callback, description=description, submit_caption=submit, description_image="/static/images/config_fitbit_app.png" ) @@ -176,10 +191,10 @@ def request_app_setup(hass, config, add_devices, config_path, def request_oauth_completion(hass): """Request user complete Fitbit OAuth2 flow.""" - configurator = get_component("configurator") + configurator = get_component('configurator') if "fitbit" in _CONFIGURING: configurator.notify_errors( - _CONFIGURING["fitbit"], "Failed to register, please try again.") + _CONFIGURING['fitbit'], "Failed to register, please try again.") return @@ -187,12 +202,12 @@ def request_oauth_completion(hass): def fitbit_configuration_callback(callback_data): """The actions to do when our configuration callback is called.""" - start_url = "{}{}".format(hass.config.api.base_url, FITBIT_AUTH_START) + start_url = '{}{}'.format(hass.config.api.base_url, FITBIT_AUTH_START) description = "Please authorize Fitbit by visiting {}".format(start_url) - _CONFIGURING["fitbit"] = configurator.request_config( - hass, "Fitbit", fitbit_configuration_callback, + _CONFIGURING['fitbit'] = configurator.request_config( + hass, 'Fitbit', fitbit_configuration_callback, description=description, submit_caption="I have authorized Fitbit." ) @@ -206,60 +221,61 @@ def setup_platform(hass, config, add_devices, discovery_info=None): if os.path.isfile(config_path): config_file = config_from_file(config_path) if config_file == DEFAULT_CONFIG: - request_app_setup(hass, config, add_devices, config_path, - discovery_info=None) + request_app_setup( + hass, config, add_devices, config_path, discovery_info=None) return False else: config_file = config_from_file(config_path, DEFAULT_CONFIG) - request_app_setup(hass, config, add_devices, config_path, - discovery_info=None) + request_app_setup( + hass, config, add_devices, config_path, discovery_info=None) return False if "fitbit" in _CONFIGURING: - get_component("configurator").request_done(_CONFIGURING.pop("fitbit")) + get_component('configurator').request_done(_CONFIGURING.pop("fitbit")) import fitbit - access_token = config_file.get("access_token") - refresh_token = config_file.get("refresh_token") + access_token = config_file.get(ATTR_ACCESS_TOKEN) + refresh_token = config_file.get(ATTR_REFRESH_TOKEN) if None not in (access_token, refresh_token): - authd_client = fitbit.Fitbit(config_file.get("client_id"), - config_file.get("client_secret"), + authd_client = fitbit.Fitbit(config_file.get(ATTR_CLIENT_ID), + config_file.get(ATTR_CLIENT_SECRET), access_token=access_token, refresh_token=refresh_token) - if int(time.time()) - config_file.get("last_saved_at", 0) > 3600: + if int(time.time()) - config_file.get(ATTR_LAST_SAVED_AT, 0) > 3600: authd_client.client.refresh_token() authd_client.system = authd_client.user_profile_get()["user"]["locale"] if authd_client.system != 'en_GB': if hass.config.units.is_metric: - authd_client.system = "metric" + authd_client.system = 'metric' else: - authd_client.system = "en_US" + authd_client.system = 'en_US' dev = [] - for resource in config.get("monitored_resources", - FITBIT_DEFAULT_RESOURCE_LIST): - dev.append(FitbitSensor(authd_client, config_path, resource, - hass.config.units.is_metric)) + for resource in config.get(CONF_MONITORED_RESOURCES): + dev.append(FitbitSensor( + authd_client, config_path, resource, + hass.config.units.is_metric)) add_devices(dev) else: - oauth = fitbit.api.FitbitOauth2Client(config_file.get("client_id"), - config_file.get("client_secret")) + oauth = fitbit.api.FitbitOauth2Client( + config_file.get(ATTR_CLIENT_ID), + config_file.get(ATTR_CLIENT_SECRET)) - redirect_uri = "{}{}".format(hass.config.api.base_url, + redirect_uri = '{}{}'.format(hass.config.api.base_url, FITBIT_AUTH_CALLBACK_PATH) fitbit_auth_start_url, _ = oauth.authorize_token_url( redirect_uri=redirect_uri, - scope=["activity", "heartrate", "nutrition", "profile", - "settings", "sleep", "weight"]) + scope=['activity', 'heartrate', 'nutrition', 'profile', + 'settings', 'sleep', 'weight']) hass.wsgi.register_redirect(FITBIT_AUTH_START, fitbit_auth_start_url) - hass.wsgi.register_view(FitbitAuthCallbackView(hass, config, - add_devices, oauth)) + hass.wsgi.register_view(FitbitAuthCallbackView( + hass, config, add_devices, oauth)) request_oauth_completion(hass) @@ -288,12 +304,12 @@ class FitbitAuthCallbackView(HomeAssistantView): response_message = """Fitbit has been successfully authorized! You can close this window now!""" - if data.get("code") is not None: - redirect_uri = "{}{}".format(self.hass.config.api.base_url, - FITBIT_AUTH_CALLBACK_PATH) + if data.get('code') is not None: + redirect_uri = '{}{}'.format( + self.hass.config.api.base_url, FITBIT_AUTH_CALLBACK_PATH) try: - self.oauth.fetch_access_token(data.get("code"), redirect_uri) + self.oauth.fetch_access_token(data.get('code'), redirect_uri) except MissingTokenError as error: _LOGGER.error("Missing token: %s", error) response_message = """Something went wrong when @@ -315,14 +331,14 @@ class FitbitAuthCallbackView(HomeAssistantView): <body><h1>{}</h1></body></html>""".format(response_message) config_contents = { - "access_token": self.oauth.token["access_token"], - "refresh_token": self.oauth.token["refresh_token"], - "client_id": self.oauth.client_id, - "client_secret": self.oauth.client_secret + ATTR_ACCESS_TOKEN: self.oauth.token['access_token'], + ATTR_REFRESH_TOKEN: self.oauth.token['refresh_token'], + ATTR_CLIENT_ID: self.oauth.client_id, + ATTR_CLIENT_SECRET: self.oauth.client_secret } if not config_from_file(self.hass.config.path(FITBIT_CONFIG_FILE), config_contents): - _LOGGER.error("failed to save config file") + _LOGGER.error("Failed to save config file") setup_platform(self.hass, self.config, self.add_devices) @@ -338,22 +354,24 @@ class FitbitSensor(Entity): self.client = client self.config_path = config_path self.resource_type = resource_type - pretty_resource = self.resource_type.replace("activities/", "") - pretty_resource = pretty_resource.replace("/", " ") + pretty_resource = self.resource_type.replace('activities/', '') + pretty_resource = pretty_resource.replace('/', ' ') pretty_resource = pretty_resource.title() - if pretty_resource == "Body Bmi": - pretty_resource = "BMI" + if pretty_resource == 'Body Bmi': + pretty_resource = 'BMI' + elif pretty_resource == 'Heart': + pretty_resource = 'Resting Heart Rate' self._name = pretty_resource unit_type = FITBIT_RESOURCES_LIST[self.resource_type] if unit_type == "": - split_resource = self.resource_type.split("/") + split_resource = self.resource_type.split('/') try: measurement_system = FITBIT_MEASUREMENTS[self.client.system] except KeyError: if is_metric: - measurement_system = FITBIT_MEASUREMENTS["metric"] + measurement_system = FITBIT_MEASUREMENTS['metric'] else: - measurement_system = FITBIT_MEASUREMENTS["en_US"] + measurement_system = FITBIT_MEASUREMENTS['en_US'] unit_type = measurement_system[split_resource[-1]] self._unit_of_measurement = unit_type self._state = 0 @@ -384,16 +402,17 @@ class FitbitSensor(Entity): def update(self): """Get the latest data from the Fitbit API and update the states.""" container = self.resource_type.replace("/", "-") - response = self.client.time_series(self.resource_type, period="7d") - self._state = response[container][-1].get("value") - if self.resource_type == "activities/heart": - self._state = response[container][-1].get("restingHeartRate") + response = self.client.time_series(self.resource_type, period='7d') + self._state = response[container][-1].get('value') + if self.resource_type == 'activities/heart': + self._state = response[container][-1]. \ + get('value').get('restingHeartRate') config_contents = { - "access_token": self.client.client.token["access_token"], - "refresh_token": self.client.client.token["refresh_token"], - "client_id": self.client.client.client_id, - "client_secret": self.client.client.client_secret, - "last_saved_at": int(time.time()) + ATTR_ACCESS_TOKEN: self.client.client.token['access_token'], + ATTR_REFRESH_TOKEN: self.client.client.token['refresh_token'], + ATTR_CLIENT_ID: self.client.client.client_id, + ATTR_CLIENT_SECRET: self.client.client.client_secret, + ATTR_LAST_SAVED_AT: int(time.time()) } if not config_from_file(self.config_path, config_contents): - _LOGGER.error("failed to save config file") + _LOGGER.error("Failed to save config file") diff --git a/homeassistant/components/sensor/fixer.py b/homeassistant/components/sensor/fixer.py index 8aa5002fbfa..c8fe3b06c4e 100644 --- a/homeassistant/components/sensor/fixer.py +++ b/homeassistant/components/sensor/fixer.py @@ -10,7 +10,7 @@ from datetime import timedelta import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA -from homeassistant.const import CONF_NAME +from homeassistant.const import CONF_NAME, ATTR_ATTRIBUTION from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle import homeassistant.helpers.config_validation as cv @@ -19,6 +19,11 @@ REQUIREMENTS = ['fixerio==0.1.1'] _LOGGER = logging.getLogger(__name__) +ATTR_BASE = 'Base currency' +ATTR_EXCHANGE_RATE = 'Exchange rate' +ATTR_TARGET = 'Target currency' + +CONF_ATTRIBUTION = "Data provided by the European Central Bank (ECB)" CONF_BASE = 'base' CONF_TARGET = 'target' @@ -29,10 +34,6 @@ ICON = 'mdi:currency' MIN_TIME_BETWEEN_UPDATES = timedelta(days=1) -STATE_ATTR_BASE = 'Base currency' -STATE_ATTR_EXCHANGE_RATE = 'Exchange rate' -STATE_ATTR_TARGET = 'Target currency' - PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_TARGET): cv.string, vol.Optional(CONF_BASE, default=DEFAULT_BASE): cv.string, @@ -90,9 +91,10 @@ class ExchangeRateSensor(Entity): """Return the state attributes.""" if self.data.rate is not None: return { - STATE_ATTR_BASE: self.data.rate['base'], - STATE_ATTR_TARGET: self._target, - STATE_ATTR_EXCHANGE_RATE: self.data.rate['rates'][self._target] + ATTR_BASE: self.data.rate['base'], + ATTR_TARGET: self._target, + ATTR_EXCHANGE_RATE: self.data.rate['rates'][self._target], + ATTR_ATTRIBUTION: CONF_ATTRIBUTION, } @property diff --git a/homeassistant/components/sensor/gtfs.py b/homeassistant/components/sensor/gtfs.py index 2a5f4aa0de6..5fcf46832db 100644 --- a/homeassistant/components/sensor/gtfs.py +++ b/homeassistant/components/sensor/gtfs.py @@ -185,7 +185,8 @@ class GTFSDepartureSensor(Entity): self._pygtfs = pygtfs self.origin = origin self.destination = destination - self._name = name + self._custom_name = name + self._name = '' self._unit_of_measurement = 'min' self._state = 0 self._attributes = {} @@ -233,9 +234,10 @@ class GTFSDepartureSensor(Entity): trip = self._departure['trip'] name = '{} {} to {} next departure' - self._name = name.format(agency.agency_name, - origin_station.stop_id, - destination_station.stop_id) + self._name = (self._custom_name or + name.format(agency.agency_name, + origin_station.stop_id, + destination_station.stop_id)) # Build attributes self._attributes = {} diff --git a/homeassistant/components/sensor/haveibeenpwned.py b/homeassistant/components/sensor/haveibeenpwned.py new file mode 100644 index 00000000000..36330f9bba9 --- /dev/null +++ b/homeassistant/components/sensor/haveibeenpwned.py @@ -0,0 +1,179 @@ +""" +Support for haveibeenpwned (email breaches) sensor. + +For more details about this platform, please refer to the documentation at +https://home-assistant.io/components/sensor.haveibeenpwned/ +""" +from datetime import timedelta +import logging + +import voluptuous as vol +import requests + +from homeassistant.components.sensor import PLATFORM_SCHEMA +from homeassistant.const import (STATE_UNKNOWN, CONF_EMAIL) +from homeassistant.helpers.entity import Entity +import homeassistant.helpers.config_validation as cv +from homeassistant.util import Throttle +import homeassistant.util.dt as dt_util +from homeassistant.helpers.event import track_point_in_time + +_LOGGER = logging.getLogger(__name__) + +DATE_STR_FORMAT = "%Y-%m-%d %H:%M:%S" +USER_AGENT = "Home Assistant HaveIBeenPwned Sensor Component" + +MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=15) +MIN_TIME_BETWEEN_FORCED_UPDATES = timedelta(seconds=5) + +PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ + vol.Required(CONF_EMAIL): vol.All(cv.ensure_list, [cv.string]), +}) + + +# pylint: disable=unused-argument +def setup_platform(hass, config, add_devices, discovery_info=None): + """Set up the HaveIBeenPwnedSensor sensor.""" + emails = config.get(CONF_EMAIL) + data = HaveIBeenPwnedData(emails) + + devices = [] + for email in emails: + devices.append(HaveIBeenPwnedSensor(data, hass, email)) + + add_devices(devices) + + # To make sure we get initial data for the sensors ignoring the normal + # throttle of 15 minutes but using an update throttle of 5 seconds + for sensor in devices: + sensor.update_nothrottle() + + +class HaveIBeenPwnedSensor(Entity): + """Implementation of a HaveIBeenPwnedSensor.""" + + def __init__(self, data, hass, email): + """Initialize the HaveIBeenPwnedSensor sensor.""" + self._state = STATE_UNKNOWN + self._data = data + self._hass = hass + self._email = email + self._unit_of_measurement = "Breaches" + + @property + def name(self): + """Return the name of the sensor.""" + return "Breaches {}".format(self._email) + + @property + def unit_of_measurement(self): + """Return the unit the value is expressed in.""" + return self._unit_of_measurement + + @property + def state(self): + """Return the state of the device.""" + return self._state + + @property + def device_state_attributes(self): + """Return the atrributes of the sensor.""" + val = {} + if self._email not in self._data.data: + return val + + for idx, value in enumerate(self._data.data[self._email]): + tmpname = "breach {}".format(idx+1) + tmpvalue = "{} {}".format( + value["Title"], + dt_util.as_local(dt_util.parse_datetime( + value["AddedDate"])).strftime(DATE_STR_FORMAT)) + val[tmpname] = tmpvalue + + return val + + def update_nothrottle(self, dummy=None): + """Update sensor without throttle.""" + self._data.update_no_throttle() + + # Schedule a forced update 5 seconds in the future if the update above + # returned no data for this sensors email. This is mainly to make sure + # that we don't get HTTP Error "too many requests" and to have initial + # data after hass startup once we have the data it will update as + # normal using update + if self._email not in self._data.data: + track_point_in_time(self._hass, + self.update_nothrottle, + dt_util.now() + + MIN_TIME_BETWEEN_FORCED_UPDATES) + return + + if self._email in self._data.data: + self._state = len(self._data.data[self._email]) + self.update_ha_state() + + def update(self): + """Update data and see if it contains data for our email.""" + self._data.update() + + if self._email in self._data.data: + self._state = len(self._data.data[self._email]) + + +class HaveIBeenPwnedData(object): + """Class for handling the data retrieval.""" + + def __init__(self, emails): + """Initialize the data object.""" + self._email_count = len(emails) + self._current_index = 0 + self.data = {} + self._email = emails[0] + self._emails = emails + + def set_next_email(self): + """Set the next email to be looked up.""" + self._current_index = (self._current_index + 1) % self._email_count + self._email = self._emails[self._current_index] + + def update_no_throttle(self): + """Get the data for a specific email.""" + self.update(no_throttle=True) + + @Throttle(MIN_TIME_BETWEEN_UPDATES, MIN_TIME_BETWEEN_FORCED_UPDATES) + def update(self, **kwargs): + """Get the latest data for current email from REST service.""" + try: + url = "https://haveibeenpwned.com/api/v2/breachedaccount/{}". \ + format(self._email) + + _LOGGER.info("Checking for breaches for email %s", self._email) + + req = requests.get(url, headers={"User-agent": USER_AGENT}, + allow_redirects=True, timeout=5) + + except requests.exceptions.RequestException: + _LOGGER.error("failed fetching HaveIBeenPwned Data for '%s'", + self._email) + return + + if req.status_code == 200: + self.data[self._email] = sorted(req.json(), + key=lambda k: k["AddedDate"], + reverse=True) + + # only goto next email if we had data so that + # the forced updates try this current email again + self.set_next_email() + + elif req.status_code == 404: + self.data[self._email] = [] + + # only goto next email if we had data so that + # the forced updates try this current email again + self.set_next_email() + + else: + _LOGGER.error("failed fetching HaveIBeenPwned Data for '%s'" + "(HTTP Status_code = %d)", self._email, + req.status_code) diff --git a/homeassistant/components/sensor/ios.py b/homeassistant/components/sensor/ios.py new file mode 100644 index 00000000000..c4c8f1eba69 --- /dev/null +++ b/homeassistant/components/sensor/ios.py @@ -0,0 +1,112 @@ +""" +Support for Home Assistant iOS app sensors. + +For more details about this platform, please refer to the documentation at +https://home-assistant.io/components/sensor.ios/ +""" +from homeassistant.components import ios +from homeassistant.helpers.entity import Entity + +DEPENDENCIES = ["ios"] + +SENSOR_TYPES = { + "level": ["Battery Level", "%"], + "state": ["Battery State", None] +} + +DEFAULT_ICON = "mdi:battery" + + +def setup_platform(hass, config, add_devices, discovery_info=None): + """Setup the iOS sensor.""" + if discovery_info is None: + return + dev = list() + for device_name, device in ios.devices().items(): + for sensor_type in ("level", "state"): + dev.append(IOSSensor(sensor_type, device_name, device)) + + add_devices(dev) + + +class IOSSensor(Entity): + """Representation of an iOS sensor.""" + + def __init__(self, sensor_type, device_name, device): + """Initialize the sensor.""" + self._device_name = device_name + self._name = device_name + " " + SENSOR_TYPES[sensor_type][0] + self._device = device + self.type = sensor_type + self._state = None + self._unit_of_measurement = SENSOR_TYPES[sensor_type][1] + self.update() + + @property + def name(self): + """Return the name of the iOS sensor.""" + device_name = self._device[ios.ATTR_DEVICE][ios.ATTR_DEVICE_NAME] + return "{} {}".format(device_name, SENSOR_TYPES[self.type][0]) + + @property + def state(self): + """Return the state of the sensor.""" + return self._state + + @property + def unique_id(self): + """Return the unique ID of this sensor.""" + return "sensor_ios_battery_{}_{}".format(self.type, self._device_name) + + @property + def unit_of_measurement(self): + """Return the unit of measurement this sensor expresses itself in.""" + return self._unit_of_measurement + + @property + def device_state_attributes(self): + """Return the device state attributes.""" + device = self._device[ios.ATTR_DEVICE] + device_battery = self._device[ios.ATTR_BATTERY] + return { + "Battery State": device_battery[ios.ATTR_BATTERY_STATE], + "Battery Level": device_battery[ios.ATTR_BATTERY_LEVEL], + "Device Type": device[ios.ATTR_DEVICE_TYPE], + "Device Name": device[ios.ATTR_DEVICE_NAME], + "Device Version": device[ios.ATTR_DEVICE_SYSTEM_VERSION], + } + + @property + def icon(self): + """Return the icon to use in the frontend, if any.""" + device_battery = self._device[ios.ATTR_BATTERY] + battery_state = device_battery[ios.ATTR_BATTERY_STATE] + battery_level = device_battery[ios.ATTR_BATTERY_LEVEL] + rounded_level = round(battery_level, -1) + returning_icon = DEFAULT_ICON + if battery_state == ios.ATTR_BATTERY_STATE_FULL: + returning_icon = DEFAULT_ICON + elif battery_state == ios.ATTR_BATTERY_STATE_CHARGING: + # Why is MDI missing 10, 50, 70? + if rounded_level in (20, 30, 40, 60, 80, 90, 100): + returning_icon = "{}-charging-{}".format(DEFAULT_ICON, + str(rounded_level)) + else: + returning_icon = "{}-charging".format(DEFAULT_ICON) + elif battery_state == ios.ATTR_BATTERY_STATE_UNPLUGGED: + if rounded_level < 10: + returning_icon = "{}-outline".format(DEFAULT_ICON) + elif battery_level == 100: + returning_icon = DEFAULT_ICON + else: + returning_icon = "{}-{}".format(DEFAULT_ICON, + str(rounded_level)) + elif battery_state == ios.ATTR_BATTERY_STATE_UNKNOWN: + returning_icon = "{}-unknown".format(DEFAULT_ICON) + + return returning_icon + + def update(self): + """Get the latest state of the sensor.""" + self._device = ios.devices().get(self._device_name) + self._state = self._device[ios.ATTR_BATTERY][self.type] diff --git a/homeassistant/components/sensor/min_max.py b/homeassistant/components/sensor/min_max.py new file mode 100644 index 00000000000..e88c2dffe6a --- /dev/null +++ b/homeassistant/components/sensor/min_max.py @@ -0,0 +1,146 @@ +""" +Support for displaying the minimal and the maximal value. + +For more details about this platform, please refer to the documentation at +https://home-assistant.io/components/sensor.min_max/ +""" +import logging + +import voluptuous as vol + +from homeassistant.components.sensor import PLATFORM_SCHEMA +from homeassistant.const import ( + CONF_NAME, STATE_UNKNOWN, CONF_TYPE, ATTR_UNIT_OF_MEASUREMENT) +import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.entity import Entity +from homeassistant.helpers.event import track_state_change + +_LOGGER = logging.getLogger(__name__) + +ATTR_MIN_VALUE = 'min_value' +ATTR_MAX_VALUE = 'max_value' +ATTR_COUNT_SENSORS = 'count_sensors' +ATTR_MEAN = 'mean' + +ATTR_TO_PROPERTY = [ + ATTR_COUNT_SENSORS, + ATTR_MAX_VALUE, + ATTR_MEAN, + ATTR_MIN_VALUE, +] + +CONF_ENTITY_IDS = 'entity_ids' + +DEFAULT_NAME = 'Min/Max Sensor' + +ICON = 'mdi:calculator' + +SENSOR_TYPES = { + ATTR_MIN_VALUE: 'min', + ATTR_MAX_VALUE: 'max', + ATTR_MEAN: 'mean', +} + +PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ + vol.Optional(CONF_TYPE, default=SENSOR_TYPES[ATTR_MAX_VALUE]): + vol.All(cv.string, vol.In(SENSOR_TYPES.values())), + vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, + vol.Required(CONF_ENTITY_IDS): cv.entity_ids, +}) + + +def setup_platform(hass, config, add_devices, discovery_info=None): + """Set up the min/max sensor.""" + entity_ids = config.get(CONF_ENTITY_IDS) + name = config.get(CONF_NAME) + sensor_type = config.get(CONF_TYPE) + + add_devices([MinMaxSensor(hass, entity_ids, name, sensor_type)]) + + +# pylint: disable=too-many-instance-attributes +class MinMaxSensor(Entity): + """Representation of a min/max sensor.""" + + def __init__(self, hass, entity_ids, name, sensor_type): + """Initialize the min/max sensor.""" + self._hass = hass + self._entity_ids = entity_ids + self._sensor_type = sensor_type + self._name = '{} {}'.format( + name, next(v for k, v in SENSOR_TYPES.items() + if self._sensor_type == v)) + self._unit_of_measurement = None + self.min_value = self.max_value = self.mean = STATE_UNKNOWN + self.count_sensors = len(self._entity_ids) + self.states = {} + self.update() + + def min_max_sensor_state_listener(entity, old_state, new_state): + """Called when the sensor changes state.""" + if new_state.state is None or new_state.state in STATE_UNKNOWN: + return + + if self._unit_of_measurement is None: + self._unit_of_measurement = new_state.attributes.get( + ATTR_UNIT_OF_MEASUREMENT) + + if self._unit_of_measurement != new_state.attributes.get( + ATTR_UNIT_OF_MEASUREMENT): + _LOGGER.warning("Units of measurement do not match") + return + try: + self.states[entity] = float(new_state.state) + except ValueError: + _LOGGER.warning("Unable to store state. " + "Only numerical states are supported") + + self.update_ha_state(True) + + track_state_change(hass, entity_ids, min_max_sensor_state_listener) + + @property + def name(self): + """Return the name of the sensor.""" + return self._name + + @property + def state(self): + """Return the state of the sensor.""" + return getattr(self, next( + k for k, v in SENSOR_TYPES.items() if self._sensor_type == v)) + + @property + def unit_of_measurement(self): + """Return the unit the value is expressed in.""" + return self._unit_of_measurement + + @property + def should_poll(self): + """No polling needed.""" + return False + + @property + def device_state_attributes(self): + """Return the state attributes of the sensor.""" + state_attr = { + attr: getattr(self, attr) for attr + in ATTR_TO_PROPERTY if getattr(self, attr) is not None + } + return state_attr + + @property + def icon(self): + """Return the icon to use in the frontend, if any.""" + return ICON + + def update(self): + """Get the latest data and updates the states.""" + sensor_values = [self.states[k] for k in self._entity_ids + if k in self.states] + if len(sensor_values) == self.count_sensors: + self.min_value = min(sensor_values) + self.max_value = max(sensor_values) + self.mean = round(sum(sensor_values) / self.count_sensors, 2) + else: + self.min_value = self.max_value = self.mean = STATE_UNKNOWN diff --git a/homeassistant/components/sensor/mqtt.py b/homeassistant/components/sensor/mqtt.py index fadf171d15b..c3cc9e3003f 100644 --- a/homeassistant/components/sensor/mqtt.py +++ b/homeassistant/components/sensor/mqtt.py @@ -60,7 +60,7 @@ class MqttSensor(Entity): """A new MQTT message has been received.""" if value_template is not None: payload = value_template.render_with_possible_json_value( - payload) + payload, self._state) self._state = payload self.update_ha_state() diff --git a/homeassistant/components/sensor/nest.py b/homeassistant/components/sensor/nest.py index 135fc22895d..98d018a7c0b 100644 --- a/homeassistant/components/sensor/nest.py +++ b/homeassistant/components/sensor/nest.py @@ -135,7 +135,11 @@ class NestTempSensor(NestSensor): if temp is None: return None - return round(temp, 1) + if isinstance(temp, tuple): + low, high = temp + return "%s-%s" % (int(low), int(high)) + else: + return round(temp, 1) class NestWeatherSensor(NestSensor): diff --git a/homeassistant/components/sensor/netatmo.py b/homeassistant/components/sensor/netatmo.py index be8f2e7d76d..2d321752483 100644 --- a/homeassistant/components/sensor/netatmo.py +++ b/homeassistant/components/sensor/netatmo.py @@ -55,7 +55,7 @@ MODULE_SCHEMA = vol.Schema({ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_STATION): cv.string, - vol.Required(CONF_MODULES): MODULE_SCHEMA, + vol.Optional(CONF_MODULES): MODULE_SCHEMA, }) @@ -65,18 +65,26 @@ def setup_platform(hass, config, add_devices, discovery_info=None): data = NetAtmoData(netatmo.NETATMO_AUTH, config.get(CONF_STATION, None)) dev = [] + import lnetatmo try: - # Iterate each module - for module_name, monitored_conditions in config[CONF_MODULES].items(): - # Test if module exist """ - if module_name not in data.get_module_names(): - _LOGGER.error('Module name: "%s" not found', module_name) - continue - # Only create sensor for monitored """ - for variable in monitored_conditions: - dev.append(NetAtmoSensor(data, module_name, variable)) - except KeyError: - pass + if CONF_MODULES in config: + # Iterate each module + for module_name, monitored_conditions in\ + config[CONF_MODULES].items(): + # Test if module exist """ + if module_name not in data.get_module_names(): + _LOGGER.error('Module name: "%s" not found', module_name) + continue + # Only create sensor for monitored """ + for variable in monitored_conditions: + dev.append(NetAtmoSensor(data, module_name, variable)) + else: + for module_name in data.get_module_names(): + for variable in\ + data.station_data.monitoredConditions(module_name): + dev.append(NetAtmoSensor(data, module_name, variable)) + except lnetatmo.NoDevice: + return None add_devices(dev) @@ -94,6 +102,11 @@ class NetAtmoSensor(Entity): self.type = sensor_type self._state = None self._unit_of_measurement = SENSOR_TYPES[sensor_type][1] + module_id = self.netatmo_data.\ + station_data.moduleByName(module=module_name)['_id'] + self._unique_id = "Netatmo Sensor {0} - {1} ({2})".format(self._name, + module_id, + self.type) self.update() @property @@ -101,6 +114,11 @@ class NetAtmoSensor(Entity): """Return the name of the sensor.""" return self._name + @property + def unique_id(self): + """Return the unique ID for this sensor.""" + return self._unique_id + @property def icon(self): """Icon to use in the frontend, if any.""" @@ -222,6 +240,7 @@ class NetAtmoData(object): """Initialize the data object.""" self.auth = auth self.data = None + self.station_data = None self.station = station def get_module_names(self): @@ -233,9 +252,10 @@ class NetAtmoData(object): def update(self): """Call the Netatmo API to update the data.""" import lnetatmo - dev_list = lnetatmo.DeviceList(self.auth) + self.station_data = lnetatmo.DeviceList(self.auth) if self.station is not None: - self.data = dev_list.lastData(station=self.station, exclude=3600) + self.data = self.station_data.lastData(station=self.station, + exclude=3600) else: - self.data = dev_list.lastData(exclude=3600) + self.data = self.station_data.lastData(exclude=3600) diff --git a/homeassistant/components/sensor/openweathermap.py b/homeassistant/components/sensor/openweathermap.py index e7936cc0535..b59bfa7dab5 100644 --- a/homeassistant/components/sensor/openweathermap.py +++ b/homeassistant/components/sensor/openweathermap.py @@ -12,15 +12,16 @@ import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( CONF_API_KEY, CONF_NAME, TEMP_CELSIUS, TEMP_FAHRENHEIT, - CONF_MONITORED_CONDITIONS) + CONF_MONITORED_CONDITIONS, ATTR_ATTRIBUTION) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle -REQUIREMENTS = ['pyowm==2.4.0'] +REQUIREMENTS = ['pyowm==2.5.0'] _LOGGER = logging.getLogger(__name__) +CONF_ATTRIBUTION = "Data provied by OpenWeatherMap" CONF_FORECAST = 'forecast' DEFAULT_NAME = 'OWM' @@ -113,6 +114,13 @@ class OpenWeatherMapSensor(Entity): """Return the unit of measurement of this entity, if any.""" return self._unit_of_measurement + @property + def device_state_attributes(self): + """Return the state attributes.""" + return { + ATTR_ATTRIBUTION: CONF_ATTRIBUTION, + } + # pylint: disable=too-many-branches def update(self): """Get the latest data from OWM and updates the states.""" @@ -174,12 +182,12 @@ class WeatherData(object): """Get the latest data from OpenWeatherMap.""" obs = self.owm.weather_at_coords(self.latitude, self.longitude) if obs is None: - _LOGGER.warning('Failed to fetch data from OWM') + _LOGGER.warning("Failed to fetch data from OpenWeatherMap") return self.data = obs.get_weather() if self.forecast == 1: - obs = self.owm.three_hours_forecast_at_coords(self.latitude, - self.longitude) + obs = self.owm.three_hours_forecast_at_coords( + self.latitude, self.longitude) self.fc_data = obs.get_forecast() diff --git a/homeassistant/components/sensor/pi_hole.py b/homeassistant/components/sensor/pi_hole.py index a578a6bb119..a9a4c11e67f 100644 --- a/homeassistant/components/sensor/pi_hole.py +++ b/homeassistant/components/sensor/pi_hole.py @@ -89,8 +89,8 @@ class PiHoleSensor(Entity): # pylint: disable=no-member @property - def state_attributes(self): - """Return the state attributes of the GPS.""" + def device_state_attributes(self): + """Return the state attributes of the Pi-Hole.""" return { ATTR_BLOCKED_DOMAINS: self._state.get('domains_being_blocked'), ATTR_PERCENTAGE_TODAY: self._state.get('ads_percentage_today'), @@ -98,7 +98,7 @@ class PiHoleSensor(Entity): } def update(self): - """Get the latest data from REST API and updates the state.""" + """Get the latest data from the Pi-Hole API and updates the state.""" try: self.rest.update() self._state = json.loads(self.rest.data) diff --git a/homeassistant/components/sensor/pilight.py b/homeassistant/components/sensor/pilight.py new file mode 100644 index 00000000000..99caebd708c --- /dev/null +++ b/homeassistant/components/sensor/pilight.py @@ -0,0 +1,96 @@ +""" +Support for pilight sensors. + +For more details about this platform, please refer to the documentation at +https://home-assistant.io/components/sensor.pilight/ +""" +import logging + +import voluptuous as vol + +from homeassistant.const import ( + CONF_NAME, STATE_UNKNOWN, CONF_UNIT_OF_MEASUREMENT, + CONF_PAYLOAD) +from homeassistant.components.sensor import PLATFORM_SCHEMA +from homeassistant.helpers.entity import Entity +import homeassistant.components.pilight as pilight +import homeassistant.helpers.config_validation as cv + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_NAME = 'Pilight Sensor' +DEPENDENCIES = ['pilight'] + +PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ + vol.Required("variable"): cv.string, + vol.Required(CONF_PAYLOAD): vol.Schema(dict), + vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, + vol.Optional(CONF_UNIT_OF_MEASUREMENT, default=None): cv.string, +}) + + +# pylint: disable=unused-argument +def setup_platform(hass, config, add_devices, discovery_info=None): + """Setup pilight Sensor.""" + add_devices([PilightSensor( + hass=hass, + name=config.get(CONF_NAME), + variable=config.get("variable"), + payload=config.get(CONF_PAYLOAD), + unit_of_measurement=config.get(CONF_UNIT_OF_MEASUREMENT) + )]) + + +# pylint: disable=too-many-arguments, too-many-instance-attributes +class PilightSensor(Entity): + """Representation of a sensor that can be updated using pilight.""" + + def __init__(self, hass, name, variable, payload, unit_of_measurement): + """Initialize the sensor.""" + self._state = STATE_UNKNOWN + self._hass = hass + self._name = name + self._variable = variable + self._payload = payload + self._unit_of_measurement = unit_of_measurement + + hass.bus.listen(pilight.EVENT, self._handle_code) + + @property + def should_poll(self): + """No polling needed.""" + return False + + @property + def name(self): + """Return the name of the sensor.""" + return self._name + + @property + def unit_of_measurement(self): + """Return the unit this state is expressed in.""" + return self._unit_of_measurement + + @property + def state(self): + """Return the state of the entity.""" + return self._state + + def _handle_code(self, call): + """Handle received code by the pilight-daemon. + + If the code matches the defined playload + of this sensor the sensor state is changed accordingly. + """ + # Check if received code matches defined playoad + # True if payload is contained in received code dict, not + # all items have to match + if self._payload.items() <= call.data.items(): + try: + value = call.data[self._variable] + self._state = value + self.update_ha_state() + except KeyError: + _LOGGER.error( + 'No variable %s in received code data %s', + str(self._variable), str(call.data)) diff --git a/homeassistant/components/sensor/scrape.py b/homeassistant/components/sensor/scrape.py new file mode 100644 index 00000000000..4789703d051 --- /dev/null +++ b/homeassistant/components/sensor/scrape.py @@ -0,0 +1,109 @@ +""" +Support for getting data from websites with scraping. + +For more details about this platform, please refer to the documentation at +https://home-assistant.io/components/sensor.scrape/ +""" +import logging + +import voluptuous as vol + +from homeassistant.components.sensor import PLATFORM_SCHEMA +from homeassistant.components.sensor.rest import RestData +from homeassistant.const import ( + CONF_NAME, CONF_RESOURCE, CONF_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, + CONF_VALUE_TEMPLATE, CONF_VERIFY_SSL) +from homeassistant.helpers.entity import Entity +import homeassistant.helpers.config_validation as cv + +REQUIREMENTS = ['beautifulsoup4==4.5.1'] + +_LOGGER = logging.getLogger(__name__) + +CONF_SELECT = 'select' + +DEFAULT_NAME = 'Web scrape' +DEFAULT_VERIFY_SSL = True + +PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ + vol.Required(CONF_RESOURCE): cv.string, + vol.Required(CONF_SELECT): cv.string, + vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, + vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string, + vol.Optional(CONF_VALUE_TEMPLATE): cv.template, + vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): cv.boolean, +}) + + +# pylint: disable=too-many-locals +def setup_platform(hass, config, add_devices, discovery_info=None): + """Set up the Web scrape sensor.""" + name = config.get(CONF_NAME) + resource = config.get(CONF_RESOURCE) + method = 'GET' + payload = auth = headers = None + verify_ssl = config.get(CONF_VERIFY_SSL) + select = config.get(CONF_SELECT) + unit = config.get(CONF_UNIT_OF_MEASUREMENT) + value_template = config.get(CONF_VALUE_TEMPLATE) + if value_template is not None: + value_template.hass = hass + + rest = RestData(method, resource, auth, headers, payload, verify_ssl) + rest.update() + + if rest.data is None: + _LOGGER.error("Unable to fetch data from %s", resource) + return False + + add_devices([ + ScrapeSensor(hass, rest, name, select, value_template, unit) + ]) + + +# pylint: disable=too-many-instance-attributes +class ScrapeSensor(Entity): + """Representation of a web scrape sensor.""" + + # pylint: disable=too-many-arguments + def __init__(self, hass, rest, name, select, value_template, unit): + """Initialize a web scrape sensor.""" + self.rest = rest + self._name = name + self._state = STATE_UNKNOWN + self._select = select + self._value_template = value_template + self._unit_of_measurement = unit + self.update() + + @property + def name(self): + """Return the name of the sensor.""" + return self._name + + @property + def unit_of_measurement(self): + """Return the unit the value is expressed in.""" + return self._unit_of_measurement + + @property + def state(self): + """Return the state of the device.""" + return self._state + + def update(self): + """Get the latest data from the source and updates the state.""" + self.rest.update() + + from bs4 import BeautifulSoup + + raw_data = BeautifulSoup(self.rest.data, 'html.parser') + _LOGGER.debug(raw_data) + value = raw_data.select(self._select)[0].text + _LOGGER.debug(value) + + if self._value_template is not None: + self._state = self._value_template.render_with_possible_json_value( + value, STATE_UNKNOWN) + else: + self._state = value diff --git a/homeassistant/components/sensor/statistics.py b/homeassistant/components/sensor/statistics.py index 0c413ab9263..6e75c105ec3 100644 --- a/homeassistant/components/sensor/statistics.py +++ b/homeassistant/components/sensor/statistics.py @@ -74,7 +74,7 @@ class StatisticsSensor(Entity): self.min = self.max = self.total = self.count = 0 self.update() - def calculate_sensor_state_listener(entity, old_state, new_state): + def stats_sensor_state_listener(entity, old_state, new_state): """Called when the sensor changes state.""" self._unit_of_measurement = new_state.attributes.get( ATTR_UNIT_OF_MEASUREMENT) @@ -87,7 +87,7 @@ class StatisticsSensor(Entity): self.update_ha_state(True) - track_state_change(hass, entity_id, calculate_sensor_state_listener) + track_state_change(hass, entity_id, stats_sensor_state_listener) @property def name(self): diff --git a/homeassistant/components/sensor/swiss_hydrological_data.py b/homeassistant/components/sensor/swiss_hydrological_data.py index b2e95690727..c8e0be68062 100644 --- a/homeassistant/components/sensor/swiss_hydrological_data.py +++ b/homeassistant/components/sensor/swiss_hydrological_data.py @@ -11,7 +11,8 @@ import voluptuous as vol import requests from homeassistant.components.sensor import PLATFORM_SCHEMA -from homeassistant.const import (TEMP_CELSIUS, CONF_NAME, STATE_UNKNOWN) +from homeassistant.const import ( + TEMP_CELSIUS, CONF_NAME, STATE_UNKNOWN, ATTR_ATTRIBUTION) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle @@ -22,19 +23,23 @@ _LOGGER = logging.getLogger(__name__) _RESOURCE = 'http://www.hydrodata.ch/xml/SMS.xml' CONF_STATION = 'station' +CONF_ATTRIBUTION = "Data provided by the Swiss Federal Office for the " \ + "Environment FOEN" + DEFAULT_NAME = 'Water temperature' + ICON = 'mdi:cup-water' -ATTR_LOCATION = 'Location' -ATTR_UPDATE = 'Update' -ATTR_DISCHARGE = 'Discharge' -ATTR_WATERLEVEL = 'Level' -ATTR_DISCHARGE_MEAN = 'Discharge mean' -ATTR_WATERLEVEL_MEAN = 'Level mean' -ATTR_TEMPERATURE_MEAN = 'Temperature mean' -ATTR_DISCHARGE_MAX = 'Discharge max' -ATTR_WATERLEVEL_MAX = 'Level max' -ATTR_TEMPERATURE_MAX = 'Temperature max' +ATTR_LOCATION = 'location' +ATTR_UPDATE = 'update' +ATTR_DISCHARGE = 'discharge' +ATTR_WATERLEVEL = 'level' +ATTR_DISCHARGE_MEAN = 'discharge_mean' +ATTR_WATERLEVEL_MEAN = 'level_mean' +ATTR_TEMPERATURE_MEAN = 'temperature_mean' +ATTR_DISCHARGE_MAX = 'discharge_max' +ATTR_WATERLEVEL_MAX = 'level_max' +ATTR_TEMPERATURE_MAX = 'temperature_max' PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_STATION): vol.Coerce(int), @@ -125,6 +130,7 @@ class SwissHydrologicalDataSensor(Entity): attributes[ATTR_LOCATION] = self.data.measurings['location'] attributes[ATTR_UPDATE] = self.data.measurings['update_time'] + attributes[ATTR_ATTRIBUTION] = CONF_ATTRIBUTION return attributes @property diff --git a/homeassistant/components/sensor/swiss_public_transport.py b/homeassistant/components/sensor/swiss_public_transport.py index d7d80ac2a3c..823a96cc01f 100644 --- a/homeassistant/components/sensor/swiss_public_transport.py +++ b/homeassistant/components/sensor/swiss_public_transport.py @@ -11,7 +11,7 @@ import requests import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA -from homeassistant.const import CONF_NAME +from homeassistant.const import CONF_NAME, ATTR_ATTRIBUTION import homeassistant.util.dt as dt_util from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle @@ -20,12 +20,13 @@ import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) _RESOURCE = 'http://transport.opendata.ch/v1/' -ATTR_DEPARTURE_TIME1 = 'Next departure' -ATTR_DEPARTURE_TIME2 = 'Next on departure' -ATTR_REMAINING_TIME = 'Remaining time' -ATTR_START = 'Start' -ATTR_TARGET = 'Destination' +ATTR_DEPARTURE_TIME1 = 'next_departure' +ATTR_DEPARTURE_TIME2 = 'next_on_departure' +ATTR_REMAINING_TIME = 'remaining_time' +ATTR_START = 'start' +ATTR_TARGET = 'destination' +CONF_ATTRIBUTION = "Data provided by transport.opendata.ch" CONF_DESTINATION = 'to' CONF_START = 'from' @@ -96,7 +97,8 @@ class SwissPublicTransportSensor(Entity): ATTR_START: self._from, ATTR_TARGET: self._to, ATTR_REMAINING_TIME: '{}'.format( - ':'.join(str(self._times[2]).split(':')[:2])) + ':'.join(str(self._times[2]).split(':')[:2])), + ATTR_ATTRIBUTION: CONF_ATTRIBUTION, } @property diff --git a/homeassistant/components/sensor/tcp.py b/homeassistant/components/sensor/tcp.py index 7a3c4e9bfc3..ab27e1e580f 100644 --- a/homeassistant/components/sensor/tcp.py +++ b/homeassistant/components/sensor/tcp.py @@ -8,33 +8,46 @@ import logging import socket import select -from homeassistant.const import CONF_NAME, CONF_HOST +import voluptuous as vol + +from homeassistant.components.sensor import PLATFORM_SCHEMA +from homeassistant.const import ( + CONF_NAME, CONF_HOST, CONF_PORT, CONF_PAYLOAD, CONF_TIMEOUT, + CONF_UNIT_OF_MEASUREMENT, CONF_VALUE_TEMPLATE) from homeassistant.exceptions import TemplateError from homeassistant.helpers.entity import Entity from homeassistant.helpers.template import Template - -CONF_PORT = "port" -CONF_TIMEOUT = "timeout" -CONF_PAYLOAD = "payload" -CONF_UNIT = "unit" -CONF_VALUE_TEMPLATE = "value_template" -CONF_VALUE_ON = "value_on" -CONF_BUFFER_SIZE = "buffer_size" - -DEFAULT_TIMEOUT = 10 -DEFAULT_BUFFER_SIZE = 1024 +import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) +CONF_BUFFER_SIZE = 'buffer_size' +CONF_VALUE_ON = 'value_on' -def setup_platform(hass, config, add_entities, discovery_info=None): - """Setup the TCP Sensor.""" - if not Sensor.validate_config(config): - return False - add_entities((Sensor(hass, config),)) +DEFAULT_BUFFER_SIZE = 1024 +DEFAULT_NAME = 'TCP Sensor' +DEFAULT_TIMEOUT = 10 + +PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ + vol.Required(CONF_HOST): cv.string, + vol.Required(CONF_PORT): cv.port, + vol.Required(CONF_PAYLOAD): cv.string, + vol.Optional(CONF_BUFFER_SIZE, default=DEFAULT_BUFFER_SIZE): + cv.positive_int, + vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, + vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int, + vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string, + vol.Optional(CONF_VALUE_ON): cv.string, + vol.Optional(CONF_VALUE_TEMPLATE): cv.template, +}) -class Sensor(Entity): +def setup_platform(hass, config, add_devices, discovery_info=None): + """Set up the TCP Sensor.""" + add_devices([TcpSensor(hass, config)]) + + +class TcpSensor(Entity): """Implementation of a TCP socket based sensor.""" required = tuple() @@ -49,37 +62,25 @@ class Sensor(Entity): self._hass = hass self._config = { CONF_NAME: config.get(CONF_NAME), - CONF_HOST: config[CONF_HOST], - CONF_PORT: config[CONF_PORT], - CONF_TIMEOUT: config.get(CONF_TIMEOUT, DEFAULT_TIMEOUT), - CONF_PAYLOAD: config[CONF_PAYLOAD], - CONF_UNIT: config.get(CONF_UNIT), + CONF_HOST: config.get(CONF_HOST), + CONF_PORT: config.get(CONF_PORT), + CONF_TIMEOUT: config.get(CONF_TIMEOUT), + CONF_PAYLOAD: config.get(CONF_PAYLOAD), + CONF_UNIT_OF_MEASUREMENT: config.get(CONF_UNIT_OF_MEASUREMENT), CONF_VALUE_TEMPLATE: value_template, CONF_VALUE_ON: config.get(CONF_VALUE_ON), - CONF_BUFFER_SIZE: config.get( - CONF_BUFFER_SIZE, DEFAULT_BUFFER_SIZE), + CONF_BUFFER_SIZE: config.get(CONF_BUFFER_SIZE), } self._state = None self.update() - @classmethod - def validate_config(cls, config): - """Ensure the configuration has all of the necessary values.""" - always_required = (CONF_HOST, CONF_PORT, CONF_PAYLOAD) - for key in always_required + tuple(cls.required): - if key not in config: - _LOGGER.error( - "You must provide %r to create any TCP entity.", key) - return False - return True - @property def name(self): """Return the name of this sensor.""" name = self._config[CONF_NAME] if name is not None: return name - return super(Sensor, self).name + return super(TcpSensor, self).name @property def state(self): @@ -89,7 +90,7 @@ class Sensor(Entity): @property def unit_of_measurement(self): """Return the unit of measurement of this entity.""" - return self._config[CONF_UNIT] + return self._config[CONF_UNIT_OF_MEASUREMENT] def update(self): """Get the latest value for this sensor.""" diff --git a/homeassistant/components/sensor/template.py b/homeassistant/components/sensor/template.py index 1abd1d2fd94..600d188bdc0 100644 --- a/homeassistant/components/sensor/template.py +++ b/homeassistant/components/sensor/template.py @@ -15,8 +15,8 @@ from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_UNIT_OF_MEASUREMENT, CONF_VALUE_TEMPLATE, ATTR_ENTITY_ID, CONF_SENSORS) from homeassistant.exceptions import TemplateError -from homeassistant.helpers.entity import Entity, generate_entity_id -from homeassistant.helpers.event import track_state_change +from homeassistant.helpers.entity import Entity, async_generate_entity_id +from homeassistant.helpers.event import async_track_state_change import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) @@ -33,8 +33,9 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ }) +@asyncio.coroutine # pylint: disable=unused-argument -def setup_platform(hass, config, add_devices, discovery_info=None): +def async_setup_platform(hass, config, async_add_devices, discovery_info=None): """Setup the template sensors.""" sensors = [] @@ -59,7 +60,8 @@ def setup_platform(hass, config, add_devices, discovery_info=None): if not sensors: _LOGGER.error("No sensors added") return False - add_devices(sensors) + + hass.loop.create_task(async_add_devices(sensors)) return True @@ -71,21 +73,23 @@ class SensorTemplate(Entity): state_template, entity_ids): """Initialize the sensor.""" self.hass = hass - self.entity_id = generate_entity_id(ENTITY_ID_FORMAT, device_id, - hass=hass) + self.entity_id = async_generate_entity_id(ENTITY_ID_FORMAT, device_id, + hass=hass) self._name = friendly_name self._unit_of_measurement = unit_of_measurement self._template = state_template self._state = None - self.update() + # update state + self._async_render() @callback def template_sensor_state_listener(entity, old_state, new_state): """Called when the target device changes state.""" hass.loop.create_task(self.async_update_ha_state(True)) - track_state_change(hass, entity_ids, template_sensor_state_listener) + async_track_state_change( + hass, entity_ids, template_sensor_state_listener) @property def name(self): @@ -109,7 +113,11 @@ class SensorTemplate(Entity): @asyncio.coroutine def async_update(self): - """Get the latest data and update the states.""" + """Update the state from the template.""" + self._async_render() + + def _async_render(self): + """Render the state from the template.""" try: self._state = self._template.async_render() except TemplateError as ex: diff --git a/homeassistant/components/sensor/uber.py b/homeassistant/components/sensor/uber.py index 7f250431984..5a3f931d76b 100644 --- a/homeassistant/components/sensor/uber.py +++ b/homeassistant/components/sensor/uber.py @@ -14,7 +14,7 @@ from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle import homeassistant.helpers.config_validation as cv -REQUIREMENTS = ['uber_rides==0.2.5'] +REQUIREMENTS = ['uber_rides==0.2.7'] _LOGGER = logging.getLogger(__name__) @@ -49,10 +49,9 @@ def setup_platform(hass, config, add_devices, discovery_info=None): wanted_product_ids = config.get(CONF_PRODUCT_IDS) dev = [] - timeandpriceest = UberEstimate(session, config[CONF_START_LATITUDE], - config[CONF_START_LONGITUDE], - config.get(CONF_END_LATITUDE), - config.get(CONF_END_LONGITUDE)) + timeandpriceest = UberEstimate( + session, config[CONF_START_LATITUDE], config[CONF_START_LONGITUDE], + config.get(CONF_END_LATITUDE), config.get(CONF_END_LONGITUDE)) for product_id, product in timeandpriceest.products.items(): if (wanted_product_ids is not None) and \ (product_id not in wanted_product_ids): @@ -114,7 +113,7 @@ class UberSensor(Entity): @property def device_state_attributes(self): """Return the state attributes.""" - time_estimate = self._product.get("time_estimate_seconds") + time_estimate = self._product.get('time_estimate_seconds') params = { 'Product ID': self._product['product_id'], 'Product short description': self._product['short_description'], diff --git a/homeassistant/components/sensor/wunderground.py b/homeassistant/components/sensor/wunderground.py index 623016518ac..69b53b0c259 100644 --- a/homeassistant/components/sensor/wunderground.py +++ b/homeassistant/components/sensor/wunderground.py @@ -11,22 +11,25 @@ import requests import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA -from homeassistant.helpers.entity import Entity -import homeassistant.helpers.config_validation as cv -from homeassistant.util import Throttle from homeassistant.const import ( CONF_MONITORED_CONDITIONS, CONF_API_KEY, TEMP_FAHRENHEIT, TEMP_CELSIUS, - STATE_UNKNOWN) + STATE_UNKNOWN, ATTR_ATTRIBUTION) +from homeassistant.helpers.entity import Entity +from homeassistant.util import Throttle +import homeassistant.helpers.config_validation as cv _RESOURCE = 'http://api.wunderground.com/api/{}/conditions/q/' +_ALERTS = 'http://api.wunderground.com/api/{}/alerts/q/' _LOGGER = logging.getLogger(__name__) +CONF_ATTRIBUTION = "Data provided by the WUnderground weather service" CONF_PWS_ID = 'pws_id' MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=300) # Sensor types are defined like: Name, units SENSOR_TYPES = { + 'alerts': ['Alerts', None], 'weather': ['Weather Summary', None], 'station_id': ['Station ID', None], 'feelslike_c': ['Feels Like (°C)', TEMP_CELSIUS], @@ -56,6 +59,14 @@ SENSOR_TYPES = { 'solarradiation': ['Solar Radiation', None] } +# Alert Attributes +ALERTS_ATTRS = [ + 'date', + 'description', + 'expires', + 'message', +] + PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_API_KEY): cv.string, vol.Optional(CONF_PWS_ID): cv.string, @@ -105,8 +116,31 @@ class WUndergroundSensor(Entity): return int(self.rest.data[self._condition][:-1]) else: return self.rest.data[self._condition] - else: - return STATE_UNKNOWN + + if self.rest.alerts and self._condition == 'alerts': + return len(self.rest.alerts) + return STATE_UNKNOWN + + @property + def device_state_attributes(self): + """Return the state attributes.""" + attrs = {} + + attrs[ATTR_ATTRIBUTION] = CONF_ATTRIBUTION + + if not self.rest.alerts or self._condition != 'alerts': + return attrs + + multiple_alerts = len(self.rest.alerts) > 1 + for data in self.rest.alerts: + for alert in ALERTS_ATTRS: + if data[alert]: + if multiple_alerts: + dkey = alert.capitalize() + '_' + data['type'] + else: + dkey = alert.capitalize() + attrs[dkey] = data[alert] + return attrs @property def entity_picture(self): @@ -121,11 +155,13 @@ class WUndergroundSensor(Entity): def update(self): """Update current conditions.""" - self.rest.update() + if self._condition == 'alerts': + self.rest.update_alerts() + else: + self.rest.update() + # pylint: disable=too-few-public-methods - - class WUndergroundData(object): """Get data from WUnderground.""" @@ -137,9 +173,10 @@ class WUndergroundData(object): self._latitude = hass.config.latitude self._longitude = hass.config.longitude self.data = None + self.alerts = None - def _build_url(self): - url = _RESOURCE.format(self._api_key) + def _build_url(self, baseurl=_RESOURCE): + url = baseurl.format(self._api_key) if self._pws_id: url = url + 'pws:{}'.format(self._pws_id) else: @@ -161,3 +198,18 @@ class WUndergroundData(object): _LOGGER.error("Check WUnderground API %s", err.args) self.data = None raise + + @Throttle(MIN_TIME_BETWEEN_UPDATES) + def update_alerts(self): + """Get the latest alerts data from WUnderground.""" + try: + result = requests.get(self._build_url(_ALERTS), timeout=10).json() + if "error" in result['response']: + raise ValueError(result['response']["error"] + ["description"]) + else: + self.alerts = result["alerts"] + except ValueError as err: + _LOGGER.error("Check WUnderground API %s", err.args) + self.alerts = None + raise diff --git a/homeassistant/components/sensor/yahoo_finance.py b/homeassistant/components/sensor/yahoo_finance.py index 822c50823fc..a389a13656d 100644 --- a/homeassistant/components/sensor/yahoo_finance.py +++ b/homeassistant/components/sensor/yahoo_finance.py @@ -10,7 +10,7 @@ from datetime import timedelta import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA -from homeassistant.const import CONF_NAME +from homeassistant.const import CONF_NAME, ATTR_ATTRIBUTION from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle import homeassistant.helpers.config_validation as cv @@ -19,7 +19,9 @@ REQUIREMENTS = ['yahoo-finance==1.2.1'] _LOGGER = logging.getLogger(__name__) +CONF_ATTRIBUTION = "Stock market information provided by Yahoo! Inc." CONF_SYMBOL = 'symbol' + DEFAULT_SYMBOL = 'YHOO' DEFAULT_NAME = 'Yahoo Stock' @@ -28,8 +30,8 @@ ICON = 'mdi:currency-usd' MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=1) ATTR_CHANGE = 'Change' -ATTR_OPEN = 'Open' -ATTR_PREV_CLOSE = 'Prev. Close' +ATTR_OPEN = 'open' +ATTR_PREV_CLOSE = 'prev_close' PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_SYMBOL, default=DEFAULT_SYMBOL): cv.string, @@ -82,10 +84,7 @@ class YahooFinanceSensor(Entity): ATTR_CHANGE: self.data.price_change, ATTR_OPEN: self.data.price_open, ATTR_PREV_CLOSE: self.data.prev_close, - 'About': "Stock market information delivered by Yahoo!" - " Inc. are provided free of charge for use" - " by individuals and non-profit organizations" - " for personal, non-commercial uses." + ATTR_ATTRIBUTION: CONF_ATTRIBUTION, } @property diff --git a/homeassistant/components/sensor/yr.py b/homeassistant/components/sensor/yr.py index d69bd65688a..d73a016003c 100644 --- a/homeassistant/components/sensor/yr.py +++ b/homeassistant/components/sensor/yr.py @@ -5,13 +5,15 @@ For more details about this platform, please refer to the documentation at https://home-assistant.io/components/sensor.yr/ """ import logging + import requests import voluptuous as vol import homeassistant.helpers.config_validation as cv from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( - CONF_LATITUDE, CONF_LONGITUDE, CONF_ELEVATION, CONF_MONITORED_CONDITIONS) + CONF_LATITUDE, CONF_LONGITUDE, CONF_ELEVATION, CONF_MONITORED_CONDITIONS, + ATTR_ATTRIBUTION) from homeassistant.helpers.entity import Entity from homeassistant.util import dt as dt_util @@ -19,10 +21,13 @@ REQUIREMENTS = ['xmltodict==0.10.2'] _LOGGER = logging.getLogger(__name__) +CONF_ATTRIBUTION = "Weather forecast from yr.no, delivered by the Norwegian " \ + "Meteorological Institute and the NRK." + # Sensor types are defined like so: SENSOR_TYPES = { 'symbol': ['Symbol', None], - 'precipitation': ['Condition', 'mm'], + 'precipitation': ['Precipitation', 'mm'], 'temperature': ['Temperature', '°C'], 'windSpeed': ['Wind speed', 'm/s'], 'windGust': ['Wind gust', 'm/s'], @@ -108,8 +113,7 @@ class YrSensor(Entity): def device_state_attributes(self): """Return the state attributes.""" return { - 'about': "Weather forecast from yr.no, delivered by the" - " Norwegian Meteorological Institute and the NRK" + ATTR_ATTRIBUTION: CONF_ATTRIBUTION, } @property diff --git a/homeassistant/components/sensor/yweather.py b/homeassistant/components/sensor/yweather.py index f482d8d2e2c..f59913facb8 100644 --- a/homeassistant/components/sensor/yweather.py +++ b/homeassistant/components/sensor/yweather.py @@ -11,7 +11,8 @@ import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( - TEMP_CELSIUS, CONF_MONITORED_CONDITIONS, CONF_NAME, STATE_UNKNOWN) + TEMP_CELSIUS, CONF_MONITORED_CONDITIONS, CONF_NAME, STATE_UNKNOWN, + ATTR_ATTRIBUTION) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle @@ -20,6 +21,7 @@ REQUIREMENTS = ["yahooweather==0.8"] _LOGGER = logging.getLogger(__name__) +CONF_ATTRIBUTION = "Weather details provided by Yahoo! Inc." CONF_FORECAST = 'forecast' CONF_WOEID = 'woeid' @@ -140,9 +142,7 @@ class YahooWeatherSensor(Entity): def device_state_attributes(self): """Return the state attributes.""" return { - 'about': "Weather forecast delivered by Yahoo! Inc. are provided" - " free of charge for use by individuals and non-profit" - " organizations for personal, non-commercial uses." + ATTR_ATTRIBUTION: CONF_ATTRIBUTION, } def update(self): diff --git a/homeassistant/components/sensor/zoneminder.py b/homeassistant/components/sensor/zoneminder.py new file mode 100644 index 00000000000..50446f735c3 --- /dev/null +++ b/homeassistant/components/sensor/zoneminder.py @@ -0,0 +1,93 @@ +""" +Support for Zoneminder Sensors. + +For more details about this platform, please refer to the documentation at +https://home-assistant.io/components/sensor.zoneminder/ +""" +import logging + +import homeassistant.components.zoneminder as zoneminder +from homeassistant.helpers.entity import Entity + +_LOGGER = logging.getLogger(__name__) + +DEPENDENCIES = ['zoneminder'] + + +def setup_platform(hass, config, add_devices, discovery_info=None): + """Setup Zoneminder platform.""" + sensors = [] + + monitors = zoneminder.get_state('api/monitors.json') + for i in monitors['monitors']: + sensors.append( + ZMSensorMonitors(int(i['Monitor']['Id']), i['Monitor']['Name']) + ) + sensors.append( + ZMSensorEvents(int(i['Monitor']['Id']), i['Monitor']['Name']) + ) + + add_devices(sensors) + + +class ZMSensorMonitors(Entity): + """Get the status of each monitor.""" + + def __init__(self, monitor_id, monitor_name): + """Initiate monitor sensor.""" + self._monitor_id = monitor_id + self._monitor_name = monitor_name + self._state = None + + @property + def name(self): + """Return the name of the sensor.""" + return "%s Status" % self._monitor_name + + @property + def state(self): + """Return the state of the sensor.""" + return self._state + + def update(self): + """Update the sensor.""" + monitor = zoneminder.get_state( + 'api/monitors/%i.json' % self._monitor_id + ) + if monitor['monitor']['Monitor']['Function'] is None: + self._state = "None" + else: + self._state = monitor['monitor']['Monitor']['Function'] + + +class ZMSensorEvents(Entity): + """Get the number of events for each monitor.""" + + def __init__(self, monitor_id, monitor_name): + """Initiate event sensor.""" + self._monitor_id = monitor_id + self._monitor_name = monitor_name + self._state = None + + @property + def name(self): + """Return the name of the sensor.""" + return "%s Events" % self._monitor_name + + @property + def unit_of_measurement(self): + """Return the unit of measurement of this entity, if any.""" + return 'Events' + + @property + def state(self): + """Return the state of the sensor.""" + return self._state + + def update(self): + """Update the sensor.""" + event = zoneminder.get_state( + 'api/events/index/MonitorId:%i.json' % self._monitor_id + ) + + self._state = event['pagination']['count'] diff --git a/homeassistant/components/sensor/zwave.py b/homeassistant/components/sensor/zwave.py index 4f474dbe73f..3a70e0d521f 100644 --- a/homeassistant/components/sensor/zwave.py +++ b/homeassistant/components/sensor/zwave.py @@ -12,20 +12,6 @@ from homeassistant.const import TEMP_CELSIUS, TEMP_FAHRENHEIT from homeassistant.helpers.entity import Entity -FIBARO = 0x010f -FIBARO_WALL_PLUG = 0x1000 -FIBARO_WALL_PLUG_SENSOR_METER = (FIBARO, FIBARO_WALL_PLUG, 8) - -WORKAROUND_IGNORE = 'ignore' - -DEVICE_MAPPINGS = { - # For some reason Fibaro Wall Plug reports 2 power consumptions. - # One value updates as the power consumption changes - # and the other does not change. - FIBARO_WALL_PLUG_SENSOR_METER: WORKAROUND_IGNORE, -} - - def setup_platform(hass, config, add_devices, discovery_info=None): """Setup Z-Wave sensors.""" # Return on empty `discovery_info`. Given you configure HA with: @@ -46,18 +32,6 @@ def setup_platform(hass, config, add_devices, discovery_info=None): # groups[1].associations): # node.groups[1].add_association(NETWORK.controller.node_id) - # Make sure that we have values for the key before converting to int - if (value.node.manufacturer_id.strip() and - value.node.product_id.strip()): - specific_sensor_key = (int(value.node.manufacturer_id, 16), - int(value.node.product_id, 16), - value.index) - - # Check workaround mappings for specific devices. - if specific_sensor_key in DEVICE_MAPPINGS: - if DEVICE_MAPPINGS[specific_sensor_key] == WORKAROUND_IGNORE: - return - # Generic Device mappings if node.has_command_class(zwave.const.COMMAND_CLASS_SENSOR_MULTILEVEL): add_devices([ZWaveMultilevelSensor(value)]) diff --git a/homeassistant/components/statsd.py b/homeassistant/components/statsd.py index 3a99a65fe6a..d85bc1e030c 100644 --- a/homeassistant/components/statsd.py +++ b/homeassistant/components/statsd.py @@ -61,18 +61,20 @@ def setup(hass, config): try: _state = state_helper.state_as_number(state) except ValueError: - return + # Set the state to none and continue for any numeric attributes. + _state = None states = dict(state.attributes) - _LOGGER.debug('Sending %s.%s', state.entity_id, _state) + _LOGGER.debug('Sending %s', state.entity_id) if show_attribute_flag is True: - statsd_client.gauge( - "%s.state" % state.entity_id, - _state, - sample_rate - ) + if isinstance(_state, (float, int)): + statsd_client.gauge( + "%s.state" % state.entity_id, + _state, + sample_rate + ) # Send attribute values for key, value in states.items(): @@ -81,7 +83,8 @@ def setup(hass, config): statsd_client.gauge(stat, value, sample_rate) else: - statsd_client.gauge(state.entity_id, _state, sample_rate) + if isinstance(_state, (float, int)): + statsd_client.gauge(state.entity_id, _state, sample_rate) # Increment the count statsd_client.incr(state.entity_id, rate=sample_rate) diff --git a/homeassistant/components/switch/arduino.py b/homeassistant/components/switch/arduino.py index 46e6baf8943..3aa61feffc8 100644 --- a/homeassistant/components/switch/arduino.py +++ b/homeassistant/components/switch/arduino.py @@ -8,27 +8,46 @@ https://home-assistant.io/components/switch.arduino/ """ import logging +import voluptuous as vol + import homeassistant.components.arduino as arduino -from homeassistant.components.switch import SwitchDevice -from homeassistant.const import DEVICE_DEFAULT_NAME +from homeassistant.components.switch import (SwitchDevice, PLATFORM_SCHEMA) +from homeassistant.const import CONF_NAME +import homeassistant.helpers.config_validation as cv DEPENDENCIES = ['arduino'] _LOGGER = logging.getLogger(__name__) +CONF_PINS = 'pins' +CONF_TYPE = 'digital' +CONF_NEGATE = 'negate' +CONF_INITIAL = 'initial' + +PIN_SCHEMA = vol.Schema({ + vol.Required(CONF_NAME): cv.string, + vol.Optional(CONF_INITIAL, default=False): cv.boolean, + vol.Optional(CONF_NEGATE, default=False): cv.boolean, +}) + +PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ + vol.Required(CONF_PINS, default={}): + vol.Schema({cv.positive_int: PIN_SCHEMA}), +}) + def setup_platform(hass, config, add_devices, discovery_info=None): - """Setup the Arduino platform.""" + """Set up the Arduino platform.""" # Verify that Arduino board is present if arduino.BOARD is None: - _LOGGER.error('A connection has not been made to the Arduino board.') + _LOGGER.error("A connection has not been made to the Arduino board") return False + pins = config.get(CONF_PINS) + switches = [] - pins = config.get('pins') for pinnum, pin in pins.items(): - if pin.get('name'): - switches.append(ArduinoSwitch(pinnum, pin)) + switches.append(ArduinoSwitch(pinnum, pin)) add_devices(switches) @@ -38,13 +57,13 @@ class ArduinoSwitch(SwitchDevice): def __init__(self, pin, options): """Initialize the Pin.""" self._pin = pin - self._name = options.get('name') or DEVICE_DEFAULT_NAME - self.pin_type = options.get('type') + self._name = options.get(CONF_NAME) + self.pin_type = CONF_TYPE self.direction = 'out' - self._state = options.get('initial', False) + self._state = options.get(CONF_INITIAL) - if options.get('negate', False): + if options.get(CONF_NEGATE): self.turn_on_handler = arduino.BOARD.set_digital_out_low self.turn_off_handler = arduino.BOARD.set_digital_out_high else: diff --git a/homeassistant/components/switch/neato.py b/homeassistant/components/switch/neato.py new file mode 100644 index 00000000000..c5ff4bae861 --- /dev/null +++ b/homeassistant/components/switch/neato.py @@ -0,0 +1,148 @@ +""" +Support for Neato Connected Vaccums. + +For more details about this platform, please refer to the documentation at +https://home-assistant.io/components/switch.neato/ +""" +import time +import logging +from datetime import timedelta +from urllib.error import HTTPError +from requests.exceptions import HTTPError as req_HTTPError + +import voluptuous as vol + +from homeassistant.const import (CONF_PASSWORD, CONF_USERNAME, STATE_OFF, + STATE_ON, STATE_UNAVAILABLE) +from homeassistant.helpers.entity import ToggleEntity +import homeassistant.helpers.config_validation as cv + +_LOGGER = logging.getLogger(__name__) + +REQUIREMENTS = ['https://github.com/jabesq/pybotvac/archive/v0.0.1.zip' + '#pybotvac==0.0.1'] + +MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10) +MIN_TIME_BETWEEN_FORCED_SCANS = timedelta(milliseconds=100) + +MIN_TIME_TO_WAIT = timedelta(seconds=10) +MIN_TIME_TO_LOCK_UPDATE = 10 + +SWITCH_TYPES = { + 'clean': ['Clean'] +} + +DOMAIN = 'neato' + +CONFIG_SCHEMA = vol.Schema({ + DOMAIN: vol.Schema({ + vol.Required(CONF_USERNAME): cv.string, + vol.Required(CONF_PASSWORD): cv.string, + }) +}, extra=vol.ALLOW_EXTRA) + + +def setup_platform(hass, config, add_devices, discovery_info=None): + """Setup the Neato platform.""" + from pybotvac import Account + + try: + auth = Account(config[CONF_USERNAME], config[CONF_PASSWORD]) + except HTTPError: + _LOGGER.error("Unable to connect to Neato API") + return False + + dev = [] + for robot in auth.robots: + for type_name in SWITCH_TYPES: + dev.append(NeatoConnectedSwitch(robot, type_name)) + add_devices(dev) + + +class NeatoConnectedSwitch(ToggleEntity): + """Neato Connected Switch (clean).""" + + def __init__(self, robot, switch_type): + """Initialize the Neato Connected switch.""" + self.type = switch_type + self.robot = robot + self.lock = False + self.last_lock_time = None + self.graceful_state = False + self._state = None + + def lock_update(self): + """Lock the update since Neato clean takes some time to start.""" + if self.is_update_locked(): + return + self.lock = True + self.last_lock_time = time.time() + + def reset_update_lock(self): + """Reset the update lock.""" + self.lock = False + self.last_lock_time = None + + def set_graceful_lock(self, state): + """Set the graceful state.""" + self.graceful_state = state + self.reset_update_lock() + self.lock_update() + + def is_update_locked(self): + """Check if the update method is locked.""" + if self.last_lock_time is None: + return False + + if time.time() - self.last_lock_time >= MIN_TIME_TO_LOCK_UPDATE: + self.last_lock_time = None + return False + + return True + + @property + def state(self): + """Return the state.""" + if not self._state: + return STATE_UNAVAILABLE + if not self._state['availableCommands']['start'] and \ + not self._state['availableCommands']['stop'] and \ + not self._state['availableCommands']['pause'] and \ + not self._state['availableCommands']['resume'] and \ + not self._state['availableCommands']['goToBase']: + return STATE_UNAVAILABLE + return STATE_ON if self.is_on else STATE_OFF + + @property + def name(self): + """Return the name of the sensor.""" + return self.robot.name + ' ' + SWITCH_TYPES[self.type][0] + + @property + def is_on(self): + """Return true if device is on.""" + if self.is_update_locked(): + return self.graceful_state + if self._state['action'] == 1 and self._state['state'] == 2: + return True + return False + + def turn_on(self, **kwargs): + """Turn the device on.""" + self.set_graceful_lock(True) + self.robot.start_cleaning() + + def turn_off(self, **kwargs): + """Turn the device off (Return Robot to base).""" + self.robot.pause_cleaning() + time.sleep(1) + self.robot.send_to_base() + + def update(self): + """Refresh Robot state from Neato API.""" + try: + self._state = self.robot.state + except req_HTTPError: + _LOGGER.error("Unable to retrieve to Robot State.") + self._state = None + return False diff --git a/homeassistant/components/switch/rpi_rf.py b/homeassistant/components/switch/rpi_rf.py index b96a1d70dc5..61a9fdb0333 100644 --- a/homeassistant/components/switch/rpi_rf.py +++ b/homeassistant/components/switch/rpi_rf.py @@ -4,51 +4,65 @@ Allows to configure a switch using a 433MHz module via GPIO on a Raspberry Pi. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/switch.rpi_rf/ """ - import logging -from homeassistant.components.switch import SwitchDevice +import voluptuous as vol + +from homeassistant.components.switch import (SwitchDevice, PLATFORM_SCHEMA) +from homeassistant.const import (CONF_NAME, CONF_SWITCHES) +import homeassistant.helpers.config_validation as cv REQUIREMENTS = ['rpi-rf==0.9.5'] _LOGGER = logging.getLogger(__name__) +CONF_CODE_OFF = 'code_off' +CONF_CODE_ON = 'code_on' +CONF_GPIO = 'gpio' +CONF_PROTOCOL = 'protocol' +CONF_PULSELENGTH = 'pulselength' + +DEFAULT_PROTOCOL = 1 + +SWITCH_SCHEMA = vol.Schema({ + vol.Required(CONF_CODE_OFF): cv.positive_int, + vol.Required(CONF_CODE_ON): cv.positive_int, + vol.Optional(CONF_PULSELENGTH): cv.positive_int, + vol.Optional(CONF_PROTOCOL, default=DEFAULT_PROTOCOL): cv.positive_int, +}) + +PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ + vol.Required(CONF_GPIO): cv.positive_int, + vol.Required(CONF_SWITCHES): vol.Schema({cv.string: SWITCH_SCHEMA}), +}) + # pylint: disable=unused-argument, import-error -def setup_platform(hass, config, add_devices_callback, discovery_info=None): +def setup_platform(hass, config, add_devices, discovery_info=None): """Find and return switches controlled by a generic RF device via GPIO.""" import rpi_rf - gpio = config.get('gpio') - if not gpio: - _LOGGER.error("No GPIO specified") - return False - + gpio = config.get(CONF_GPIO) rfdevice = rpi_rf.RFDevice(gpio) + switches = config.get(CONF_SWITCHES) - switches = config.get('switches', {}) devices = [] for dev_name, properties in switches.items(): - if not properties.get('code_on'): - _LOGGER.error("%s: code_on not specified", dev_name) - continue - if not properties.get('code_off'): - _LOGGER.error("%s: code_off not specified", dev_name) - continue - devices.append( RPiRFSwitch( hass, - properties.get('name', dev_name), + properties.get(CONF_NAME, dev_name), rfdevice, - properties.get('protocol', None), - properties.get('pulselength', None), - properties.get('code_on'), - properties.get('code_off'))) + properties.get(CONF_PROTOCOL), + properties.get(CONF_PULSELENGTH), + properties.get(CONF_CODE_ON), + properties.get(CONF_CODE_OFF) + ) + ) if devices: rfdevice.enable_tx() - add_devices_callback(devices) + add_devices(devices) class RPiRFSwitch(SwitchDevice): @@ -84,10 +98,10 @@ class RPiRFSwitch(SwitchDevice): def _send_code(self, code, protocol, pulselength): """Send the code with a specified pulselength.""" - _LOGGER.info('Sending code: %s', code) + _LOGGER.info("Sending code: %s", code) res = self._rfdevice.tx_code(code, protocol, pulselength) if not res: - _LOGGER.error('Sending code %s failed', code) + _LOGGER.error("Sending code %s failed", code) return res def turn_on(self): diff --git a/homeassistant/components/switch/tellstick.py b/homeassistant/components/switch/tellstick.py index a0cc4294b23..e5134c07a34 100644 --- a/homeassistant/components/switch/tellstick.py +++ b/homeassistant/components/switch/tellstick.py @@ -61,3 +61,8 @@ class TellstickSwitchDevice(tellstick.TellstickDevice, ToggleEntity): """Turn the switch off.""" from tellcore.constants import TELLSTICK_TURNOFF self.call_tellstick(TELLSTICK_TURNOFF) + + @property + def force_update(self) -> bool: + """Will trigger anytime the state property is updated.""" + return True diff --git a/homeassistant/components/switch/template.py b/homeassistant/components/switch/template.py index b6ce400d0ac..2bac825b5b4 100644 --- a/homeassistant/components/switch/template.py +++ b/homeassistant/components/switch/template.py @@ -16,8 +16,8 @@ from homeassistant.const import ( ATTR_FRIENDLY_NAME, CONF_VALUE_TEMPLATE, STATE_OFF, STATE_ON, ATTR_ENTITY_ID, CONF_SWITCHES) from homeassistant.exceptions import TemplateError -from homeassistant.helpers.entity import generate_entity_id -from homeassistant.helpers.event import track_state_change +from homeassistant.helpers.entity import async_generate_entity_id +from homeassistant.helpers.event import async_track_state_change from homeassistant.helpers.script import Script import homeassistant.helpers.config_validation as cv @@ -40,8 +40,9 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ }) +@asyncio.coroutine # pylint: disable=unused-argument -def setup_platform(hass, config, add_devices, discovery_info=None): +def async_setup_platform(hass, config, async_add_devices, discovery_info=None): """Setup the Template switch.""" switches = [] @@ -53,6 +54,8 @@ def setup_platform(hass, config, add_devices, discovery_info=None): entity_ids = (device_config.get(ATTR_ENTITY_ID) or state_template.extract_entities()) + state_template.hass = hass + switches.append( SwitchTemplate( hass, @@ -66,7 +69,8 @@ def setup_platform(hass, config, add_devices, discovery_info=None): if not switches: _LOGGER.error("No switches added") return False - add_devices(switches) + + hass.loop.create_task(async_add_devices(switches)) return True @@ -78,23 +82,23 @@ class SwitchTemplate(SwitchDevice): on_action, off_action, entity_ids): """Initialize the Template switch.""" self.hass = hass - self.entity_id = generate_entity_id(ENTITY_ID_FORMAT, device_id, - hass=hass) + self.entity_id = async_generate_entity_id(ENTITY_ID_FORMAT, device_id, + hass=hass) self._name = friendly_name self._template = state_template - state_template.hass = hass self._on_script = Script(hass, on_action) self._off_script = Script(hass, off_action) self._state = False - self.update() + self._async_render() @callback def template_switch_state_listener(entity, old_state, new_state): """Called when the target device changes state.""" hass.loop.create_task(self.async_update_ha_state(True)) - track_state_change(hass, entity_ids, template_switch_state_listener) + async_track_state_change( + hass, entity_ids, template_switch_state_listener) @property def name(self): @@ -127,6 +131,10 @@ class SwitchTemplate(SwitchDevice): @asyncio.coroutine def async_update(self): """Update the state from the template.""" + self._async_render() + + def _async_render(self): + """Render the state from the template.""" try: state = self._template.async_render().lower() diff --git a/homeassistant/components/switch/tplink.py b/homeassistant/components/switch/tplink.py index 7937a4c77ac..06c67dcf5ea 100644 --- a/homeassistant/components/switch/tplink.py +++ b/homeassistant/components/switch/tplink.py @@ -12,13 +12,18 @@ from homeassistant.components.switch import (SwitchDevice, PLATFORM_SCHEMA) from homeassistant.const import (CONF_HOST, CONF_NAME) import homeassistant.helpers.config_validation as cv -REQUIREMENTS = ['https://github.com/gadgetreactor/pyHS100/archive/' - 'ef85f939fd5b07064a0f34dfa673fa7d6140bd95.zip#pyHS100==0.1.2'] +REQUIREMENTS = ['https://github.com/GadgetReactor/pyHS100/archive/' + '1f771b7d8090a91c6a58931532e42730b021cbde.zip#pyHS100==0.2.0'] _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = 'TPLink Switch HS100' +ATTR_CURRENT_CONSUMPTION = 'Current consumption' +ATTR_TOTAL_CONSUMPTION = 'Total consumption' +ATTR_VOLTAGE = 'Voltage' +ATTR_CURRENT = 'Current' + PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, @@ -42,6 +47,11 @@ class SmartPlugSwitch(SwitchDevice): """Initialize the switch.""" self.smartplug = smartplug self._name = name + self._state = None + self._emeter_present = (smartplug.model == 110) + _LOGGER.debug("Setting up TP-Link Smart Plug HS%i", smartplug.model) + # Set up emeter cache + self._emeter_params = {} @property def name(self): @@ -51,7 +61,7 @@ class SmartPlugSwitch(SwitchDevice): @property def is_on(self): """Return true if switch is on.""" - return self.smartplug.state == 'ON' + return self._state == 'ON' def turn_on(self, **kwargs): """Turn the switch on.""" @@ -60,3 +70,24 @@ class SmartPlugSwitch(SwitchDevice): def turn_off(self): """Turn the switch off.""" self.smartplug.state = 'OFF' + + @property + def device_state_attributes(self): + """Return the state attributes of the device.""" + return self._emeter_params + + def update(self): + """Update the TP-Link switch's state.""" + self._state = self.smartplug.state + + if self._emeter_present: + emeter_readings = self.smartplug.get_emeter_realtime() + + self._emeter_params[ATTR_CURRENT_CONSUMPTION] \ + = "%.1f W" % emeter_readings["power"] + self._emeter_params[ATTR_TOTAL_CONSUMPTION] \ + = "%.2f kW" % emeter_readings["total"] + self._emeter_params[ATTR_VOLTAGE] \ + = "%.2f V" % emeter_readings["voltage"] + self._emeter_params[ATTR_CURRENT] \ + = "%.1f A" % emeter_readings["current"] diff --git a/homeassistant/components/switch/zoneminder.py b/homeassistant/components/switch/zoneminder.py new file mode 100644 index 00000000000..ab9adbca97d --- /dev/null +++ b/homeassistant/components/switch/zoneminder.py @@ -0,0 +1,92 @@ +""" +Support for Zoneminder switches. + +For more details about this platform, please refer to the documentation at +https://home-assistant.io/components/switch.zoneminder/ +""" +import logging + +import voluptuous as vol + +from homeassistant.components.switch import (SwitchDevice, PLATFORM_SCHEMA) +from homeassistant.const import (CONF_COMMAND_ON, CONF_COMMAND_OFF) +import homeassistant.helpers.config_validation as cv + +import homeassistant.components.zoneminder as zoneminder + + +PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ + vol.Required(CONF_COMMAND_ON): cv.string, + vol.Required(CONF_COMMAND_OFF): cv.string, +}) + +_LOGGER = logging.getLogger(__name__) + +DEPENDENCIES = ['zoneminder'] + + +def setup_platform(hass, config, add_devices, discovery_info=None): + """Setup the Zoneminder switch.""" + on_state = config.get(CONF_COMMAND_ON) + off_state = config.get(CONF_COMMAND_OFF) + + switches = [] + + monitors = zoneminder.get_state('api/monitors.json') + for i in monitors['monitors']: + switches.append( + ZMSwitchMonitors( + int(i['Monitor']['Id']), + i['Monitor']['Name'], + on_state, + off_state + ) + ) + + add_devices(switches) + + +class ZMSwitchMonitors(SwitchDevice): + """Representation of an zoneminder switch.""" + + icon = 'mdi:record-rec' + + def __init__(self, monitor_id, monitor_name, on_state, off_state): + """Initialize the switch.""" + self._monitor_id = monitor_id + self._monitor_name = monitor_name + self._on_state = on_state + self._off_state = off_state + self._state = None + + @property + def name(self): + """Return the name of the switch.""" + return "%s State" % self._monitor_name + + def update(self): + """Update the switch value.""" + monitor = zoneminder.get_state( + 'api/monitors/%i.json' % self._monitor_id + ) + current_state = monitor['monitor']['Monitor']['Function'] + self._state = True if current_state == self._on_state else False + + @property + def is_on(self): + """Return True if entity is on.""" + return self._state + + def turn_on(self): + """Turn the entity on.""" + zoneminder.change_state( + 'api/monitors/%i.json' % self._monitor_id, + {'Monitor[Function]': self._on_state} + ) + + def turn_off(self): + """Turn the entity off.""" + zoneminder.change_state( + 'api/monitors/%i.json' % self._monitor_id, + {'Monitor[Function]': self._off_state} + ) diff --git a/homeassistant/components/thermostat/proliphix.py b/homeassistant/components/thermostat/proliphix.py index e54a5a4aa11..f92407b0d16 100644 --- a/homeassistant/components/thermostat/proliphix.py +++ b/homeassistant/components/thermostat/proliphix.py @@ -9,7 +9,7 @@ from homeassistant.components.thermostat import ( from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, CONF_USERNAME, TEMP_FAHRENHEIT) -REQUIREMENTS = ['proliphix==0.3.1'] +REQUIREMENTS = ['proliphix==0.4.0'] def setup_platform(hass, config, add_devices, discovery_info=None): diff --git a/homeassistant/components/updater.py b/homeassistant/components/updater.py index ec91149a87d..40899af9803 100644 --- a/homeassistant/components/updater.py +++ b/homeassistant/components/updater.py @@ -4,62 +4,126 @@ Support to check for available updates. For more details about this component, please refer to the documentation at https://home-assistant.io/components/updater/ """ +from datetime import datetime, timedelta import logging +import json +import platform +import uuid +import os +# pylint: disable=no-name-in-module,import-error +from distutils.version import StrictVersion import requests import voluptuous as vol from homeassistant.const import __version__ as CURRENT_VERSION from homeassistant.const import ATTR_FRIENDLY_NAME +import homeassistant.util.dt as dt_util from homeassistant.helpers import event +import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) - +UPDATER_URL = 'https://updater.home-assistant.io/' DOMAIN = 'updater' - ENTITY_ID = 'updater.updater' +ATTR_RELEASE_NOTES = 'release_notes' +UPDATER_UUID_FILE = '.uuid' +CONF_REPORTING = 'reporting' -PYPI_URL = 'https://pypi.python.org/pypi/homeassistant/json' +REQUIREMENTS = ['distro==1.0.0'] -CONFIG_SCHEMA = vol.Schema({ - DOMAIN: vol.Schema({}), -}, extra=vol.ALLOW_EXTRA) +CONFIG_SCHEMA = vol.Schema({DOMAIN: { + vol.Optional(CONF_REPORTING, default=True): cv.boolean +}}, extra=vol.ALLOW_EXTRA) + + +def _create_uuid(hass, filename=UPDATER_UUID_FILE): + """Create UUID and save it in a file.""" + with open(hass.config.path(filename), 'w') as fptr: + _uuid = uuid.uuid4().hex + fptr.write(json.dumps({"uuid": _uuid})) + return _uuid + + +def _load_uuid(hass, filename=UPDATER_UUID_FILE): + """Load UUID from a file, or return None.""" + try: + with open(hass.config.path(filename)) as fptr: + jsonf = json.loads(fptr.read()) + return uuid.UUID(jsonf['uuid'], version=4).hex + except (ValueError, AttributeError): + return None + except FileNotFoundError: + return _create_uuid(hass, filename) def setup(hass, config): """Setup the updater component.""" if 'dev' in CURRENT_VERSION: - _LOGGER.warning("Updater not supported in development version") + # This component only makes sense in release versions + _LOGGER.warning('Updater not supported in development version') return False - def check_newest_version(_=None): - """Check if a new version is available and report if one is.""" - newest = get_newest_version() - - if newest != CURRENT_VERSION and newest is not None: - hass.states.set( - ENTITY_ID, newest, {ATTR_FRIENDLY_NAME: 'Update available'}) + config = config.get(DOMAIN, {}) + huuid = _load_uuid(hass) if config.get(CONF_REPORTING) else None + # Update daily, start 1 hour after startup + _dt = datetime.now() + timedelta(hours=1) event.track_time_change( - hass, check_newest_version, hour=[0, 12], minute=0, second=0) - - check_newest_version() + hass, lambda _: check_newest_version(hass, huuid), + hour=_dt.hour, minute=_dt.minute, second=_dt.second) return True -def get_newest_version(): - """Get the newest Home Assistant version from PyPI.""" - try: - req = requests.get(PYPI_URL) +def check_newest_version(hass, huuid): + """Check if a new version is available and report if one is.""" + newest, releasenotes = get_newest_version(huuid) - return req.json()['info']['version'] + if newest is not None: + if StrictVersion(newest) > StrictVersion(CURRENT_VERSION): + hass.states.set( + ENTITY_ID, newest, {ATTR_FRIENDLY_NAME: 'Update Available', + ATTR_RELEASE_NOTES: releasenotes} + ) + + +def get_newest_version(huuid): + """Get the newest Home Assistant version.""" + info_object = {'uuid': huuid, 'version': CURRENT_VERSION, + 'timezone': dt_util.DEFAULT_TIME_ZONE.zone, + 'os_name': platform.system(), "arch": platform.machine(), + 'python_version': platform.python_version(), + 'virtualenv': (os.environ.get('VIRTUAL_ENV') is not None), + 'docker': False} + + if platform.system() == 'Windows': + info_object['os_version'] = platform.win32_ver()[0] + elif platform.system() == 'Darwin': + info_object['os_version'] = platform.mac_ver()[0] + elif platform.system() == 'Linux': + import distro + linux_dist = distro.linux_distribution(full_distribution_name=False) + info_object['distribution'] = linux_dist[0] + info_object['os_version'] = linux_dist[1] + info_object['docker'] = os.path.isfile('/.dockerenv') + + if not huuid: + info_object = {} + + try: + req = requests.post(UPDATER_URL, json=info_object) + res = req.json() + _LOGGER.info(('The latest version is %s. ' + 'Information submitted includes %s'), + res['version'], info_object) + return (res['version'], res['release-notes']) except requests.RequestException: - _LOGGER.exception("Could not contact PyPI to check for updates") + _LOGGER.exception('Could not contact HASS Update to check for updates') return None except ValueError: - _LOGGER.exception("Received invalid response from PyPI") + _LOGGER.exception('Received invalid response from HASS Update') return None except KeyError: - _LOGGER.exception("Response from PyPI did not include version") + _LOGGER.exception('Response from HASS Update did not include version') return None diff --git a/homeassistant/components/verisure.py b/homeassistant/components/verisure.py index e8eabddc440..0c760d899c8 100644 --- a/homeassistant/components/verisure.py +++ b/homeassistant/components/verisure.py @@ -16,7 +16,7 @@ from homeassistant.helpers import discovery from homeassistant.util import Throttle import homeassistant.helpers.config_validation as cv -REQUIREMENTS = ['vsure==0.10.3'] +REQUIREMENTS = ['vsure==0.11.1'] _LOGGER = logging.getLogger(__name__) @@ -27,7 +27,7 @@ CONF_LOCKS = 'locks' CONF_MOUSE = 'mouse' CONF_SMARTPLUGS = 'smartplugs' CONF_THERMOMETERS = 'thermometers' - +CONF_SMARTCAM = 'smartcam' DOMAIN = 'verisure' HUB = None @@ -43,6 +43,7 @@ CONFIG_SCHEMA = vol.Schema({ vol.Optional(CONF_MOUSE, default=True): cv.boolean, vol.Optional(CONF_SMARTPLUGS, default=True): cv.boolean, vol.Optional(CONF_THERMOMETERS, default=True): cv.boolean, + vol.Optional(CONF_SMARTCAM, default=True): cv.boolean, }), }, extra=vol.ALLOW_EXTRA) @@ -55,7 +56,8 @@ def setup(hass, config): if not HUB.login(): return False - for component in ('sensor', 'switch', 'alarm_control_panel', 'lock'): + for component in ('sensor', 'switch', 'alarm_control_panel', 'lock', + 'camera'): discovery.load_platform(hass, component, DOMAIN, {}, config) return True @@ -72,6 +74,8 @@ class VerisureHub(object): self.climate_status = {} self.mouse_status = {} self.smartplug_status = {} + self.smartcam_status = {} + self.smartcam_dict = {} self.config = domain_config self._verisure = verisure @@ -133,6 +137,20 @@ class VerisureHub(object): self.my_pages.smartplug.get, self.smartplug_status) + @Throttle(timedelta(seconds=30)) + def update_smartcam(self): + """Update the status of the smartcam.""" + self.update_component( + self.my_pages.smartcam.get, + self.smartcam_status) + + @Throttle(timedelta(seconds=30)) + def update_smartcam_imagelist(self): + """Update the imagelist for the camera.""" + _LOGGER.debug('Running update imagelist') + self.smartcam_dict = self.my_pages.smartcam.get_imagelist() + _LOGGER.debug('New dict: %s', self.smartcam_dict) + @property def available(self): """Return True if hub is available.""" diff --git a/homeassistant/components/wink.py b/homeassistant/components/wink.py index 2e5e7ebcfb4..22c6c992838 100644 --- a/homeassistant/components/wink.py +++ b/homeassistant/components/wink.py @@ -49,6 +49,10 @@ CONFIG_SCHEMA = vol.Schema({ }) }, extra=vol.ALLOW_EXTRA) +WINK_COMPONENTS = [ + 'binary_sensor', 'sensor', 'light', 'switch', 'lock', 'cover' +] + def setup(hass, config): """Setup the Wink component.""" @@ -78,19 +82,8 @@ def setup(hass, config): SUBSCRIPTION_HANDLER.set_heartbeat(120) # Load components for the devices in Wink that we support - for component_name, func_exists in ( - ('light', pywink.get_bulbs), - ('switch', lambda: pywink.get_switches or pywink.get_sirens or - pywink.get_powerstrip_outlets), - ('binary_sensor', pywink.get_sensors), - ('sensor', lambda: pywink.get_sensors or pywink.get_eggtrays), - ('lock', pywink.get_locks), - ('cover', pywink.get_shades), - ('cover', pywink.get_garage_doors)): - - if func_exists(): - discovery.load_platform(hass, component_name, DOMAIN, {}, config) - + for component in WINK_COMPONENTS: + discovery.load_platform(hass, component, DOMAIN, {}, config) return True diff --git a/homeassistant/components/zoneminder.py b/homeassistant/components/zoneminder.py new file mode 100644 index 00000000000..0ed985fb427 --- /dev/null +++ b/homeassistant/components/zoneminder.py @@ -0,0 +1,119 @@ +""" +Support for Zoneminder. + +For more details about this component, please refer to the documentation at +https://home-assistant.io/components/zoneminder/ +""" + +import logging +import json +from urllib.parse import urljoin + +import requests +import voluptuous as vol + +import homeassistant.helpers.config_validation as cv +from homeassistant.const import ( + CONF_PATH, CONF_HOST, CONF_PASSWORD, CONF_USERNAME) + + +_LOGGER = logging.getLogger(__name__) + +REQUIREMENTS = [] + +DOMAIN = 'zoneminder' + +CONFIG_SCHEMA = vol.Schema({ + DOMAIN: vol.Schema({ + vol.Required(CONF_HOST): cv.string, + vol.Optional(CONF_PATH, default="/zm/"): cv.string, + vol.Optional(CONF_USERNAME): cv.string, + vol.Optional(CONF_PASSWORD): cv.string + }) +}, extra=vol.ALLOW_EXTRA) + +LOGIN_RETRIES = 2 +ZM = {} + + +def setup(hass, config): + """Setup the zonminder platform.""" + global ZM + ZM = {} + + conf = config[DOMAIN] + url = urljoin("http://" + conf[CONF_HOST], conf[CONF_PATH]) + username = conf.get(CONF_USERNAME, None) + password = conf.get(CONF_PASSWORD, None) + + ZM['url'] = url + ZM['username'] = username + ZM['password'] = password + + return login() + + +# pylint: disable=no-member +def login(): + """Login to the zoneminder api.""" + _LOGGER.debug("Attempting to login to zoneminder") + + login_post = {'view': 'console', 'action': 'login'} + if ZM['username']: + login_post['username'] = ZM['username'] + if ZM['password']: + login_post['password'] = ZM['password'] + + req = requests.post(ZM['url'] + '/index.php', data=login_post) + ZM['cookies'] = req.cookies + + # Login calls returns a 200 repsonse on both failure and success.. + # The only way to tell if you logged in correctly is to issue an api call. + req = requests.get( + ZM['url'] + 'api/host/getVersion.json', + cookies=ZM['cookies'] + ) + + if req.status_code != requests.codes.ok: + _LOGGER.error("Connection error logging into ZoneMinder") + return False + + return True + + +# pylint: disable=no-member +def get_state(api_url): + """Get a state from the zoneminder API service.""" + # Since the API uses sessions that expire, sometimes we need + # to re-auth if the call fails. + for _ in range(LOGIN_RETRIES): + req = requests.get(urljoin(ZM['url'], api_url), cookies=ZM['cookies']) + + if req.status_code != requests.codes.ok: + login() + else: + break + else: + _LOGGER.exception("Unable to get API response") + + return json.loads(req.text) + + +# pylint: disable=no-member +def change_state(api_url, post_data): + """Update a state using the Zoneminder API.""" + for _ in range(LOGIN_RETRIES): + req = requests.post( + urljoin(ZM['url'], api_url), + data=post_data, + cookies=ZM['cookies']) + + if req.status_code != requests.codes.ok: + login() + else: + break + + else: + _LOGGER.exception("Unable to get API response") + + return json.loads(req.text) diff --git a/homeassistant/components/zwave/__init__.py b/homeassistant/components/zwave/__init__.py old mode 100644 new mode 100755 index 32d4f42c1a1..33dfa690632 --- a/homeassistant/components/zwave/__init__.py +++ b/homeassistant/components/zwave/__init__.py @@ -31,11 +31,13 @@ CONF_POLLING_INTENSITY = 'polling_intensity' CONF_POLLING_INTERVAL = 'polling_interval' CONF_USB_STICK_PATH = 'usb_path' CONF_CONFIG_PATH = 'config_path' +CONF_IGNORED = 'ignored' DEFAULT_CONF_AUTOHEAL = True DEFAULT_CONF_USB_STICK_PATH = '/zwaveusbstick' DEFAULT_POLLING_INTERVAL = 60000 DEFAULT_DEBUG = True +DEFAULT_CONF_IGNORED = False DOMAIN = 'zwave' NETWORK = None @@ -125,15 +127,23 @@ RENAME_NODE_SCHEMA = vol.Schema({ vol.Required(const.ATTR_NAME): cv.string, }) SET_CONFIG_PARAMETER_SCHEMA = vol.Schema({ - vol.Required(ATTR_ENTITY_ID): cv.entity_id, + vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_PARAMETER): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_VALUE): vol.Coerce(int), vol.Optional(const.ATTR_CONFIG_SIZE): vol.Coerce(int) }) +CHANGE_ASSOCIATION_SCHEMA = vol.Schema({ + vol.Required(const.ATTR_ASSOCIATION): cv.string, + vol.Required(const.ATTR_NODE_ID): vol.Coerce(int), + vol.Required(const.ATTR_TARGET_NODE_ID): vol.Coerce(int), + vol.Required(const.ATTR_GROUP): vol.Coerce(int), + vol.Optional(const.ATTR_INSTANCE, default=0x00): vol.Coerce(int) +}) CUSTOMIZE_SCHEMA = vol.Schema({ vol.Optional(CONF_POLLING_INTENSITY): - vol.All(cv.positive_int, vol.In([0, 1, 2, 3, 4, 5])), + vol.All(cv.positive_int), + vol.Optional(CONF_IGNORED, default=DEFAULT_CONF_IGNORED): cv.boolean, }) CONFIG_SCHEMA = vol.Schema({ @@ -181,7 +191,8 @@ def _object_id(value): The object_id contains node_id and value instance id to not collide with other entity_ids. """ - object_id = "{}_{}".format(slugify(_value_name(value)), value.node.node_id) + object_id = "{}_{}_{}".format(slugify(_value_name(value)), + value.node.node_id, value.index) # Add the instance id if there is more than one instance for the value if value.instance > 1: @@ -236,6 +247,7 @@ def setup(hass, config): from pydispatch import dispatcher from openzwave.option import ZWaveOption from openzwave.network import ZWaveNetwork + from openzwave.group import ZWaveGroup default_zwave_config_path = os.path.join(os.path.dirname( libopenzwave.__file__), 'config') @@ -323,6 +335,11 @@ def setup(hass, config): name = "{}.{}".format(component, _object_id(value)) node_config = customize.get(name, {}) + + if node_config.get(CONF_IGNORED): + _LOGGER.info("Ignoring device %s", name) + return + polling_intensity = convert( node_config.get(CONF_POLLING_INTENSITY), int) if polling_intensity: @@ -413,7 +430,8 @@ def setup(hass, config): """Stop Z-Wave network.""" _LOGGER.info("Stopping ZWave network.") NETWORK.stop() - hass.bus.fire(const.EVENT_NETWORK_STOP) + if hass.state == 'RUNNING': + hass.bus.fire(const.EVENT_NETWORK_STOP) def rename_node(service): """Rename a node.""" @@ -427,8 +445,7 @@ def setup(hass, config): def set_config_parameter(service): """Set a config parameter to a node.""" - state = hass.states.get(service.data.get(ATTR_ENTITY_ID)) - node_id = state.attributes.get(const.ATTR_NODE_ID) + node_id = service.data.get(const.ATTR_NODE_ID) node = NETWORK.nodes[node_id] param = service.data.get(const.ATTR_CONFIG_PARAMETER) value = service.data.get(const.ATTR_CONFIG_VALUE) @@ -437,6 +454,26 @@ def setup(hass, config): _LOGGER.info("Setting config parameter %s on Node %s " "with value %s and size=%s", param, node_id, value, size) + def change_association(service): + """Change an association in the zwave network.""" + association_type = service.data.get(const.ATTR_ASSOCIATION) + node_id = service.data.get(const.ATTR_NODE_ID) + target_node_id = service.data.get(const.ATTR_TARGET_NODE_ID) + group = service.data.get(const.ATTR_GROUP) + instance = service.data.get(const.ATTR_INSTANCE) + + node = ZWaveGroup(group, NETWORK, node_id) + if association_type == 'add': + node.add_association(target_node_id, instance) + _LOGGER.info("Adding association for node:%s in group:%s " + "target node:%s, instance=%s", node_id, group, + target_node_id, instance) + if association_type == 'remove': + node.remove_association(target_node_id, instance) + _LOGGER.info("Removing association for node:%s in group:%s " + "target node:%s, instance=%s", node_id, group, + target_node_id, instance) + def start_zwave(_service_or_event): """Startup Z-Wave network.""" _LOGGER.info("Starting ZWave network.") @@ -502,6 +539,11 @@ def setup(hass, config): descriptions[ const.SERVICE_SET_CONFIG_PARAMETER], schema=SET_CONFIG_PARAMETER_SCHEMA) + hass.services.register(DOMAIN, const.SERVICE_CHANGE_ASSOCIATION, + change_association, + descriptions[ + const.SERVICE_CHANGE_ASSOCIATION], + schema=CHANGE_ASSOCIATION_SCHEMA) # Setup autoheal if autoheal: diff --git a/homeassistant/components/zwave/const.py b/homeassistant/components/zwave/const.py index 6b5c5fc55e5..698dad8e063 100644 --- a/homeassistant/components/zwave/const.py +++ b/homeassistant/components/zwave/const.py @@ -1,6 +1,10 @@ """Z-Wave Constants.""" ATTR_NODE_ID = "node_id" +ATTR_TARGET_NODE_ID = "target_node_id" +ATTR_ASSOCIATION = "association" +ATTR_INSTANCE = "instance" +ATTR_GROUP = "group" ATTR_VALUE_ID = "value_id" ATTR_OBJECT_ID = "object_id" ATTR_NAME = "name" @@ -11,6 +15,7 @@ ATTR_CONFIG_SIZE = "size" ATTR_CONFIG_VALUE = "value" NETWORK_READY_WAIT_SECS = 30 +SERVICE_CHANGE_ASSOCIATION = "change_association" SERVICE_ADD_NODE = "add_node" SERVICE_ADD_NODE_SECURE = "add_node_secure" SERVICE_REMOVE_NODE = "remove_node" diff --git a/homeassistant/components/zwave/services.yaml b/homeassistant/components/zwave/services.yaml index 2542502badb..cfe2edab5c9 100644 --- a/homeassistant/components/zwave/services.yaml +++ b/homeassistant/components/zwave/services.yaml @@ -1,3 +1,17 @@ +change_association: + description: Change an association in the Z-Wave network. + fields: + association: + description: Specify add or remove assosication + node_id: + description: Node id of the node to set association for. + target_node_id: + description: Node id of the node to associate to. + group: + description: Group number to set association for. + instance: + description: (Optional) Instance of association. Defaults to 0. + add_node: description: Add a new node to the Z-Wave network. Refer to OZW.log for details. @@ -16,8 +30,8 @@ remove_node: set_config_parameter: description: Set a config parameter to a node on the Z-Wave network. fields: - entity_id: - description: Name of entity to set config parameter to. + node_id: + description: Node id of the device to set config parameter to (integer). parameter: description: Parameter number to set (integer). value: diff --git a/homeassistant/const.py b/homeassistant/const.py index 31d1c50f82e..efb11cdffbf 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -1,8 +1,8 @@ # coding: utf-8 """Constants used by Home Assistant components.""" MAJOR_VERSION = 0 -MINOR_VERSION = 30 -PATCH_VERSION = '2' +MINOR_VERSION = 31 +PATCH_VERSION = '0' __short_version__ = '{}.{}'.format(MAJOR_VERSION, MINOR_VERSION) __version__ = '{}.{}'.format(__short_version__, PATCH_VERSION) REQUIRED_PYTHON_VER = (3, 4, 2) @@ -104,6 +104,7 @@ CONF_NAME = 'name' CONF_OFFSET = 'offset' CONF_OPTIMISTIC = 'optimistic' CONF_PASSWORD = 'password' +CONF_PATH = 'path' CONF_PAYLOAD = 'payload' CONF_PAYLOAD_OFF = 'payload_off' CONF_PAYLOAD_ON = 'payload_on' @@ -173,6 +174,9 @@ STATE_UNLOCKED = 'unlocked' STATE_UNAVAILABLE = 'unavailable' # #### STATE AND EVENT ATTRIBUTES #### +# Attribution +ATTR_ATTRIBUTION = 'attribution' + # Contains current time for a TIME_CHANGED event ATTR_NOW = 'now' diff --git a/homeassistant/core.py b/homeassistant/core.py index ebd24558a40..bd59db59f05 100644 --- a/homeassistant/core.py +++ b/homeassistant/core.py @@ -8,7 +8,6 @@ of entities and react to changes. import asyncio from concurrent.futures import ThreadPoolExecutor import enum -import functools as ft import logging import os import re @@ -137,8 +136,8 @@ class HomeAssistant(object): self.executor = ThreadPoolExecutor(max_workers=5) self.loop.set_default_executor(self.executor) self.loop.set_exception_handler(self._async_exception_handler) - self.pool = pool = create_worker_pool() - self.bus = EventBus(pool, self.loop) + self.pool = create_worker_pool() + self.bus = EventBus(self) self.services = ServiceRegistry(self.bus, self.add_job, self.loop) self.states = StateMachine(self.bus, self.loop) self.config = Config() # type: Config @@ -218,8 +217,8 @@ class HomeAssistant(object): """ # pylint: disable=protected-access self.loop._thread_ident = threading.get_ident() - async_create_timer(self) - async_monitor_worker_pool(self) + _async_create_timer(self) + _async_monitor_worker_pool(self) self.bus.async_fire(EVENT_HOMEASSISTANT_START) yield from self.loop.run_in_executor(None, self.pool.block_till_done) self.state = CoreState.running @@ -235,9 +234,12 @@ class HomeAssistant(object): """ self.pool.add_job(priority, (target,) + args) + @callback def async_add_job(self, target: Callable[..., None], *args: Any): """Add a job from within the eventloop. + This method must be run in the event loop. + target: target to call. args: parameters for method to call. """ @@ -248,9 +250,12 @@ class HomeAssistant(object): else: self.add_job(target, *args) + @callback def async_run_job(self, target: Callable[..., None], *args: Any): """Run a job from within the event loop. + This method must be run in the event loop. + target: target to call. args: parameters for method to call. """ @@ -369,7 +374,10 @@ class Event(object): self.time_fired = time_fired or dt_util.utcnow() def as_dict(self): - """Create a dict representation of this Event.""" + """Create a dict representation of this Event. + + Async friendly. + """ return { 'event_type': self.event_type, 'data': dict(self.data), @@ -400,13 +408,12 @@ class Event(object): class EventBus(object): """Allows firing of and listening for events.""" - def __init__(self, pool: util.ThreadPool, - loop: asyncio.AbstractEventLoop) -> None: + def __init__(self, hass: HomeAssistant) -> None: """Initialize a new event bus.""" self._listeners = {} - self._pool = pool - self._loop = loop + self._hass = hass + @callback def async_listeners(self): """Dict with events and the number of listeners. @@ -419,23 +426,25 @@ class EventBus(object): def listeners(self): """Dict with events and the number of listeners.""" return run_callback_threadsafe( - self._loop, self.async_listeners + self._hass.loop, self.async_listeners ).result() def fire(self, event_type: str, event_data=None, origin=EventOrigin.local): """Fire an event.""" - if not self._pool.running: - raise HomeAssistantError('Home Assistant has shut down.') - - self._loop.call_soon_threadsafe(self.async_fire, event_type, - event_data, origin) + self._hass.loop.call_soon_threadsafe(self.async_fire, event_type, + event_data, origin) + @callback def async_fire(self, event_type: str, event_data=None, origin=EventOrigin.local, wait=False): """Fire an event. This method must be run in the event loop. """ + if event_type != EVENT_HOMEASSISTANT_STOP and \ + self._hass.state == CoreState.stopping: + raise HomeAssistantError('Home Assistant is shutting down.') + # Copy the list of the current listeners because some listeners # remove themselves as a listener while being executed which # causes the iterator to be confused. @@ -450,20 +459,8 @@ class EventBus(object): if not listeners: return - job_priority = JobPriority.from_event_type(event_type) - - sync_jobs = [] for func in listeners: - if asyncio.iscoroutinefunction(func): - self._loop.create_task(func(event)) - elif is_callback(func): - self._loop.call_soon(func, event) - else: - sync_jobs.append((job_priority, (func, event))) - - # Send all the sync jobs at once - if sync_jobs: - self._pool.add_many_jobs(sync_jobs) + self._hass.async_add_job(func, event) def listen(self, event_type, listener): """Listen for all events or events of a specific type. @@ -471,16 +468,17 @@ class EventBus(object): To listen to all events specify the constant ``MATCH_ALL`` as event_type. """ - future = run_callback_threadsafe( - self._loop, self.async_listen, event_type, listener) - future.result() + async_remove_listener = run_callback_threadsafe( + self._hass.loop, self.async_listen, event_type, listener).result() def remove_listener(): """Remove the listener.""" - self._remove_listener(event_type, listener) + run_callback_threadsafe( + self._hass.loop, async_remove_listener).result() return remove_listener + @callback def async_listen(self, event_type, listener): """Listen for all events or events of a specific type. @@ -496,7 +494,7 @@ class EventBus(object): def remove_listener(): """Remove the listener.""" - self.async_remove_listener(event_type, listener) + self._async_remove_listener(event_type, listener) return remove_listener @@ -508,26 +506,18 @@ class EventBus(object): Returns function to unsubscribe the listener. """ - @ft.wraps(listener) - def onetime_listener(event): - """Remove listener from eventbus and then fire listener.""" - if hasattr(onetime_listener, 'run'): - return - # Set variable so that we will never run twice. - # Because the event bus might have to wait till a thread comes - # available to execute this listener it might occur that the - # listener gets lined up twice to be executed. - # This will make sure the second time it does nothing. - setattr(onetime_listener, 'run', True) + async_remove_listener = run_callback_threadsafe( + self._hass.loop, self.async_listen_once, event_type, listener, + ).result() - remove_listener() - - listener(event) - - remove_listener = self.listen(event_type, onetime_listener) + def remove_listener(): + """Remove the listener.""" + run_callback_threadsafe( + self._hass.loop, async_remove_listener).result() return remove_listener + @callback def async_listen_once(self, event_type, listener): """Listen once for event of a specific type. @@ -538,8 +528,7 @@ class EventBus(object): This method must be run in the event loop. """ - @ft.wraps(listener) - @asyncio.coroutine + @callback def onetime_listener(event): """Remove listener from eventbus and then fire listener.""" if hasattr(onetime_listener, 'run'): @@ -550,34 +539,14 @@ class EventBus(object): # multiple times as well. # This will make sure the second time it does nothing. setattr(onetime_listener, 'run', True) + self._async_remove_listener(event_type, onetime_listener) - self.async_remove_listener(event_type, onetime_listener) + self._hass.async_run_job(listener, event) - if asyncio.iscoroutinefunction(listener): - yield from listener(event) - else: - job_priority = JobPriority.from_event_type(event.event_type) - self._pool.add_job(job_priority, (listener, event)) + return self.async_listen(event_type, onetime_listener) - self.async_listen(event_type, onetime_listener) - - return onetime_listener - - def remove_listener(self, event_type, listener): - """Remove a listener of a specific event_type. (DEPRECATED 0.28).""" - _LOGGER.warning('bus.remove_listener has been deprecated. Please use ' - 'the function returned from calling listen.') - self._remove_listener(event_type, listener) - - def _remove_listener(self, event_type, listener): - """Remove a listener of a specific event_type.""" - future = run_callback_threadsafe( - self._loop, - self.async_remove_listener, event_type, listener - ) - future.result() - - def async_remove_listener(self, event_type, listener): + @callback + def _async_remove_listener(self, event_type, listener): """Remove a listener of a specific event_type. This method must be run in the event loop. @@ -644,6 +613,8 @@ class State(object): def as_dict(self): """Return a dict representation of the State. + Async friendly. + To be used for JSON serialization. Ensures: state == State.from_dict(state.as_dict()) """ @@ -657,6 +628,8 @@ class State(object): def from_dict(cls, json_dict): """Initialize a state from a dict. + Async friendly. + Ensures: state == State.from_json_dict(state.to_json_dict()) """ if not (json_dict and 'entity_id' in json_dict and @@ -709,8 +682,12 @@ class StateMachine(object): ) return future.result() + @callback def async_entity_ids(self, domain_filter=None): - """List of entity ids that are being tracked.""" + """List of entity ids that are being tracked. + + This method must be run in the event loop. + """ if domain_filter is None: return list(self._states.keys()) @@ -723,6 +700,7 @@ class StateMachine(object): """Create a list of all states.""" return run_callback_threadsafe(self._loop, self.async_all).result() + @callback def async_all(self): """Create a list of all states. @@ -763,6 +741,7 @@ class StateMachine(object): return run_callback_threadsafe( self._loop, self.async_remove, entity_id).result() + @callback def async_remove(self, entity_id): """Remove the state of an entity. @@ -800,6 +779,7 @@ class StateMachine(object): self.async_set, entity_id, new_state, attributes, force_update, ).result() + @callback def async_set(self, entity_id, new_state, attributes=None, force_update=False): """Set the state of an entity, add entity if it does not exist. @@ -908,14 +888,21 @@ class ServiceRegistry(object): self._loop, self.async_services, ).result() + @callback def async_services(self): - """Dict with per domain a list of available services.""" + """Dict with per domain a list of available services. + + This method must be run in the event loop. + """ return {domain: {key: value.as_dict() for key, value in self._services[domain].items()} for domain in self._services} def has_service(self, domain, service): - """Test if specified service exists.""" + """Test if specified service exists. + + Async friendly. + """ return service.lower() in self._services.get(domain.lower(), []) # pylint: disable=too-many-arguments @@ -935,6 +922,7 @@ class ServiceRegistry(object): schema ).result() + @callback def async_register(self, domain, service, service_func, description=None, schema=None): """ @@ -985,7 +973,7 @@ class ServiceRegistry(object): self._loop ).result() - @callback + @asyncio.coroutine def async_call(self, domain, service, service_data=None, blocking=False): """ Call a service. @@ -1121,18 +1109,27 @@ class Config(object): self.config_dir = None def distance(self: object, lat: float, lon: float) -> float: - """Calculate distance from Home Assistant.""" + """Calculate distance from Home Assistant. + + Async friendly. + """ return self.units.length( location.distance(self.latitude, self.longitude, lat, lon), 'm') def path(self, *path): - """Generate path to the file within the config dir.""" + """Generate path to the file within the config dir. + + Async friendly. + """ if self.config_dir is None: raise HomeAssistantError("config_dir is not set") return os.path.join(self.config_dir, *path) def as_dict(self): - """Create a dict representation of this dict.""" + """Create a dict representation of this dict. + + Async friendly. + """ time_zone = self.time_zone or dt_util.UTC return { @@ -1147,7 +1144,7 @@ class Config(object): } -def async_create_timer(hass, interval=TIMER_INTERVAL): +def _async_create_timer(hass, interval=TIMER_INTERVAL): """Create a timer that will start on HOMEASSISTANT_START.""" stop_event = asyncio.Event(loop=hass.loop) @@ -1230,7 +1227,7 @@ def create_worker_pool(worker_count=None): return util.ThreadPool(job_handler, worker_count) -def async_monitor_worker_pool(hass): +def _async_monitor_worker_pool(hass): """Create a monitor for the thread pool to check if pool is misbehaving.""" busy_threshold = hass.pool.worker_count * 3 diff --git a/homeassistant/helpers/config_validation.py b/homeassistant/helpers/config_validation.py index 1d368a37d3c..4c6efe11001 100644 --- a/homeassistant/helpers/config_validation.py +++ b/homeassistant/helpers/config_validation.py @@ -3,6 +3,7 @@ from collections import OrderedDict from datetime import timedelta import os from urllib.parse import urlparse +from socket import _GLOBAL_DEFAULT_TIMEOUT from typing import Any, Union, TypeVar, Callable, Sequence, Dict @@ -306,6 +307,24 @@ def time_zone(value): weekdays = vol.All(ensure_list, [vol.In(WEEKDAYS)]) +def socket_timeout(value): + """Validate timeout float > 0.0. + + None coerced to socket._GLOBAL_DEFAULT_TIMEOUT bare object. + """ + if value is None: + return _GLOBAL_DEFAULT_TIMEOUT + else: + try: + float_value = float(value) + if float_value > 0.0: + return float_value + raise vol.Invalid('Invalid socket timeout value.' + ' float > 0.0 required.') + except Exception as _: + raise vol.Invalid('Invalid socket timeout: {err}'.format(err=_)) + + # pylint: disable=no-value-for-parameter def url(value: Any) -> str: """Validate an URL.""" @@ -358,7 +377,8 @@ def key_dependency(key, dependency): PLATFORM_SCHEMA = vol.Schema({ vol.Required(CONF_PLATFORM): string, - CONF_SCAN_INTERVAL: vol.All(vol.Coerce(int), vol.Range(min=1)), + vol.Optional(CONF_SCAN_INTERVAL): + vol.All(vol.Coerce(int), vol.Range(min=1)), }, extra=vol.ALLOW_EXTRA) EVENT_SCHEMA = vol.Schema({ diff --git a/homeassistant/helpers/discovery.py b/homeassistant/helpers/discovery.py index b0cf8af0747..eb36fc9e1d5 100644 --- a/homeassistant/helpers/discovery.py +++ b/homeassistant/helpers/discovery.py @@ -1,8 +1,9 @@ """Helper methods to help with platform discovery.""" -from homeassistant import bootstrap +from homeassistant import bootstrap, core from homeassistant.const import ( ATTR_DISCOVERED, ATTR_SERVICE, EVENT_PLATFORM_DISCOVERED) +from homeassistant.util.async import run_callback_threadsafe EVENT_LOAD_PLATFORM = 'load_platform.{}' ATTR_PLATFORM = 'platform' @@ -43,8 +44,19 @@ def discover(hass, service, discovered=None, component=None, hass_config=None): def listen_platform(hass, component, callback): """Register a platform loader listener.""" + run_callback_threadsafe( + hass.loop, async_listen_platform, hass, component, callback + ).result() + + +def async_listen_platform(hass, component, callback): + """Register a platform loader listener. + + This method must be run in the event loop. + """ service = EVENT_LOAD_PLATFORM.format(component) + @core.callback def discovery_platform_listener(event): """Listen for platform discovery events.""" if event.data.get(ATTR_SERVICE) != service: @@ -55,9 +67,12 @@ def listen_platform(hass, component, callback): if not platform: return - callback(platform, event.data.get(ATTR_DISCOVERED)) + hass.async_run_job( + callback, platform, event.data.get(ATTR_DISCOVERED) + ) - hass.bus.listen(EVENT_PLATFORM_DISCOVERED, discovery_platform_listener) + hass.bus.async_listen( + EVENT_PLATFORM_DISCOVERED, discovery_platform_listener) def load_platform(hass, component, platform, discovered=None, diff --git a/homeassistant/helpers/entity.py b/homeassistant/helpers/entity.py index 99384764b5b..08f93b3697b 100644 --- a/homeassistant/helpers/entity.py +++ b/homeassistant/helpers/entity.py @@ -12,7 +12,8 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import NoEntitySpecifiedError from homeassistant.util import ensure_unique_string, slugify -from homeassistant.util.async import run_coroutine_threadsafe +from homeassistant.util.async import ( + run_coroutine_threadsafe, run_callback_threadsafe) # Entity attributes that we will overwrite _OVERWRITE = {} # type: Dict[str, Any] @@ -27,15 +28,27 @@ def generate_entity_id(entity_id_format: str, name: Optional[str], if current_ids is None: if hass is None: raise ValueError("Missing required parameter currentids or hass") + else: + return run_callback_threadsafe( + hass.loop, async_generate_entity_id, entity_id_format, name, + current_ids, hass + ).result() - current_ids = hass.states.entity_ids() + name = (name or DEVICE_DEFAULT_NAME).lower() - return async_generate_entity_id(entity_id_format, name, current_ids) + return ensure_unique_string( + entity_id_format.format(slugify(name)), current_ids) def async_generate_entity_id(entity_id_format: str, name: Optional[str], - current_ids: Optional[List[str]]=None) -> str: + current_ids: Optional[List[str]]=None, + hass: Optional[HomeAssistant]=None) -> str: """Generate a unique entity ID based on given entity IDs or used IDs.""" + if current_ids is None: + if hass is None: + raise ValueError("Missing required parameter currentids or hass") + + current_ids = hass.states.async_entity_ids() name = (name or DEVICE_DEFAULT_NAME).lower() return ensure_unique_string( @@ -238,7 +251,17 @@ class Entity(object): def remove(self) -> None: """Remove entitiy from HASS.""" - self.hass.states.remove(self.entity_id) + run_coroutine_threadsafe( + self.async_remove(), self.hass.loop + ).result() + + @asyncio.coroutine + def async_remove(self) -> None: + """Remove entitiy from async HASS. + + This method must be run in the event loop. + """ + self.hass.states.async_remove(self.entity_id) def _attr_setter(self, name, typ, attr, attrs): """Helper method to populate attributes based on properties.""" diff --git a/homeassistant/helpers/entity_component.py b/homeassistant/helpers/entity_component.py index 3146d703d19..2576970065f 100644 --- a/homeassistant/helpers/entity_component.py +++ b/homeassistant/helpers/entity_component.py @@ -1,5 +1,5 @@ """Helpers for components that manage entities.""" -from threading import Lock +import asyncio from homeassistant import config as conf_util from homeassistant.bootstrap import (prepare_setup_platform, @@ -7,12 +7,15 @@ from homeassistant.bootstrap import (prepare_setup_platform, from homeassistant.const import ( ATTR_ENTITY_ID, CONF_SCAN_INTERVAL, CONF_ENTITY_NAMESPACE, DEVICE_DEFAULT_NAME) +from homeassistant.core import callback from homeassistant.exceptions import HomeAssistantError from homeassistant.loader import get_component from homeassistant.helpers import config_per_platform, discovery -from homeassistant.helpers.entity import generate_entity_id -from homeassistant.helpers.event import track_utc_time_change +from homeassistant.helpers.entity import async_generate_entity_id +from homeassistant.helpers.event import async_track_utc_time_change from homeassistant.helpers.service import extract_entity_ids +from homeassistant.util.async import ( + run_callback_threadsafe, run_coroutine_threadsafe) DEFAULT_SCAN_INTERVAL = 15 @@ -37,11 +40,11 @@ class EntityComponent(object): self.group = None self.config = None - self.lock = Lock() self._platforms = { 'core': EntityPlatform(self, self.scan_interval, None), } + self.async_add_entities = self._platforms['core'].async_add_entities self.add_entities = self._platforms['core'].add_entities def setup(self, config): @@ -50,20 +53,38 @@ class EntityComponent(object): Loads the platforms from the config and will listen for supported discovered platforms. """ + run_coroutine_threadsafe( + self.async_setup(config), self.hass.loop + ).result() + + @asyncio.coroutine + def async_setup(self, config): + """Set up a full entity component. + + Loads the platforms from the config and will listen for supported + discovered platforms. + + This method must be run in the event loop. + """ self.config = config # Look in config for Domain, Domain 2, Domain 3 etc and load them + tasks = [] for p_type, p_config in config_per_platform(config, self.domain): - self._setup_platform(p_type, p_config) + tasks.append(self._async_setup_platform(p_type, p_config)) + + yield from asyncio.gather(*tasks, loop=self.hass.loop) # Generic discovery listener for loading platform dynamically # Refer to: homeassistant.components.discovery.load_platform() + @callback def component_platform_discovered(platform, info): """Callback to load a platform.""" - self._setup_platform(platform, {}, info) + self.hass.loop.create_task( + self._async_setup_platform(platform, {}, info)) - discovery.listen_platform(self.hass, self.domain, - component_platform_discovered) + discovery.async_listen_platform( + self.hass, self.domain, component_platform_discovered) def extract_from_service(self, service): """Extract all known entities from a service call. @@ -71,19 +92,36 @@ class EntityComponent(object): Will return all entities if no entities specified in call. Will return an empty list if entities specified but unknown. """ - with self.lock: - if ATTR_ENTITY_ID not in service.data: - return list(self.entities.values()) + return run_callback_threadsafe( + self.hass.loop, self.async_extract_from_service, service + ).result() - return [self.entities[entity_id] for entity_id - in extract_entity_ids(self.hass, service) - if entity_id in self.entities] + def async_extract_from_service(self, service): + """Extract all known entities from a service call. - def _setup_platform(self, platform_type, platform_config, - discovery_info=None): - """Setup a platform for this component.""" - platform = prepare_setup_platform( - self.hass, self.config, self.domain, platform_type) + Will return all entities if no entities specified in call. + Will return an empty list if entities specified but unknown. + + This method must be run in the event loop. + """ + if ATTR_ENTITY_ID not in service.data: + return list(self.entities.values()) + + return [self.entities[entity_id] for entity_id + in extract_entity_ids(self.hass, service) + if entity_id in self.entities] + + @asyncio.coroutine + def _async_setup_platform(self, platform_type, platform_config, + discovery_info=None): + """Setup a platform for this component. + + This method must be run in the event loop. + """ + platform = yield from self.hass.loop.run_in_executor( + None, prepare_setup_platform, self.hass, self.config, self.domain, + platform_type + ) if platform is None: return @@ -102,9 +140,16 @@ class EntityComponent(object): entity_platform = self._platforms[key] try: - platform.setup_platform(self.hass, platform_config, - entity_platform.add_entities, - discovery_info) + if getattr(platform, 'async_setup_platform', None): + yield from platform.async_setup_platform( + self.hass, platform_config, + entity_platform.async_add_entities, discovery_info + ) + else: + yield from self.hass.loop.run_in_executor( + None, platform.setup_platform, self.hass, platform_config, + entity_platform.add_entities, discovery_info + ) self.hass.config.components.append( '{}.{}'.format(self.domain, platform_type)) @@ -114,6 +159,16 @@ class EntityComponent(object): def add_entity(self, entity, platform=None): """Add entity to component.""" + return run_coroutine_threadsafe( + self.async_add_entity(entity, platform), self.hass.loop + ).result() + + @asyncio.coroutine + def async_add_entity(self, entity, platform=None): + """Add entity to component. + + This method must be run in the event loop. + """ if entity is None or entity in self.entities.values(): return False @@ -126,40 +181,60 @@ class EntityComponent(object): object_id = '{} {}'.format(platform.entity_namespace, object_id) - entity.entity_id = generate_entity_id( + entity.entity_id = async_generate_entity_id( self.entity_id_format, object_id, self.entities.keys()) self.entities[entity.entity_id] = entity - entity.update_ha_state() + yield from entity.async_update_ha_state() return True def update_group(self): """Set up and/or update component group.""" + run_callback_threadsafe( + self.hass.loop, self.async_update_group).result() + + @asyncio.coroutine + def async_update_group(self): + """Set up and/or update component group. + + This method must be run in the event loop. + """ if self.group is None and self.group_name is not None: group = get_component('group') - self.group = group.Group(self.hass, self.group_name, - user_defined=False) - - if self.group is not None: - self.group.update_tracked_entity_ids(self.entities.keys()) + self.group = yield from group.Group.async_create_group( + self.hass, self.group_name, self.entities.keys(), + user_defined=False + ) + elif self.group is not None: + yield from self.group.async_update_tracked_entity_ids( + self.entities.keys()) def reset(self): """Remove entities and reset the entity component to initial values.""" - with self.lock: - for platform in self._platforms.values(): - platform.reset() + run_coroutine_threadsafe(self.async_reset(), self.hass.loop).result() - self._platforms = { - 'core': self._platforms['core'] - } - self.entities = {} - self.config = None + @asyncio.coroutine + def async_reset(self): + """Remove entities and reset the entity component to initial values. - if self.group is not None: - self.group.stop() - self.group = None + This method must be run in the event loop. + """ + tasks = [platform.async_reset() for platform + in self._platforms.values()] + + yield from asyncio.gather(*tasks, loop=self.hass.loop) + + self._platforms = { + 'core': self._platforms['core'] + } + self.entities = {} + self.config = None + + if self.group is not None: + yield from self.group.async_stop() + self.group = None def prepare_reload(self): """Prepare reloading this entity component.""" @@ -178,9 +253,20 @@ class EntityComponent(object): self.reset() return conf + @asyncio.coroutine + def async_prepare_reload(self): + """Prepare reloading this entity component. + + This method must be run in the event loop. + """ + conf = yield from self.hass.loop.run_in_executor( + None, self.prepare_reload + ) + return conf + class EntityPlatform(object): - """Keep track of entities for a single platform.""" + """Keep track of entities for a single platform and stay in loop.""" # pylint: disable=too-few-public-methods def __init__(self, component, scan_interval, entity_namespace): @@ -189,41 +275,63 @@ class EntityPlatform(object): self.scan_interval = scan_interval self.entity_namespace = entity_namespace self.platform_entities = [] - self._unsub_polling = None + self._async_unsub_polling = None def add_entities(self, new_entities): """Add entities for a single platform.""" - with self.component.lock: - for entity in new_entities: - if self.component.add_entity(entity, self): - self.platform_entities.append(entity) + run_coroutine_threadsafe( + self.async_add_entities(new_entities), self.component.hass.loop + ).result() - self.component.update_group() + @asyncio.coroutine + def async_add_entities(self, new_entities): + """Add entities for a single platform async. - if self._unsub_polling is not None or \ - not any(entity.should_poll for entity - in self.platform_entities): - return + This method must be run in the event loop. + """ + tasks = [self._async_process_entity(entity) for entity in new_entities] - self._unsub_polling = track_utc_time_change( - self.component.hass, self._update_entity_states, - second=range(0, 60, self.scan_interval)) + yield from asyncio.gather(*tasks, loop=self.component.hass.loop) + yield from self.component.async_update_group() - def reset(self): - """Remove all entities and reset data.""" - for entity in self.platform_entities: - entity.remove() - if self._unsub_polling is not None: - self._unsub_polling() - self._unsub_polling = None + if self._async_unsub_polling is not None or \ + not any(entity.should_poll for entity + in self.platform_entities): + return + self._async_unsub_polling = async_track_utc_time_change( + self.component.hass, self._update_entity_states, + second=range(0, 60, self.scan_interval)) + + @asyncio.coroutine + def _async_process_entity(self, new_entity): + """Add entities to StateMachine.""" + ret = yield from self.component.async_add_entity(new_entity, self) + if ret: + self.platform_entities.append(new_entity) + + @asyncio.coroutine + def async_reset(self): + """Remove all entities and reset data. + + This method must be run in the event loop. + """ + tasks = [entity.async_remove() for entity in self.platform_entities] + + yield from asyncio.gather(*tasks, loop=self.component.hass.loop) + + if self._async_unsub_polling is not None: + self._async_unsub_polling() + self._async_unsub_polling = None + + @callback def _update_entity_states(self, now): - """Update the states of all the polling entities.""" - with self.component.lock: - # We copy the entities because new entities might be detected - # during state update causing deadlocks. - entities = list(entity for entity in self.platform_entities - if entity.should_poll) + """Update the states of all the polling entities. - for entity in entities: - entity.update_ha_state(True) + This method must be run in the event loop. + """ + for entity in self.platform_entities: + if entity.should_poll: + self.component.hass.loop.create_task( + entity.async_update_ha_state(True) + ) diff --git a/homeassistant/helpers/script.py b/homeassistant/helpers/script.py index cb4a1fbbe04..f6a2f482fc1 100644 --- a/homeassistant/helpers/script.py +++ b/homeassistant/helpers/script.py @@ -51,6 +51,7 @@ class Script(): in self.sequence) self._async_unsub_delay_listener = None self._template_cache = {} + self._config_cache = {} @property def is_running(self) -> bool: @@ -93,7 +94,7 @@ class Script(): delay = vol.All( cv.time_period, cv.positive_timedelta)( - delay.async_render()) + delay.async_render(variables)) self._async_unsub_delay_listener = \ async_track_point_in_utc_time( @@ -153,9 +154,14 @@ class Script(): def _async_check_condition(self, action, variables): """Test if condition is matching.""" + config_cache_key = frozenset((k, str(v)) for k, v in action.items()) + config = self._config_cache.get(config_cache_key) + if not config: + config = condition.async_from_config(action, False) + self._config_cache[config_cache_key] = config + self.last_action = action.get(CONF_ALIAS, action[CONF_CONDITION]) - check = condition.async_from_config(action, False)( - self.hass, variables) + check = config(self.hass, variables) self._log("Test condition {}: {}".format(self.last_action, check)) return check diff --git a/homeassistant/helpers/service.py b/homeassistant/helpers/service.py index 06df2eb992d..ccfeb707fea 100644 --- a/homeassistant/helpers/service.py +++ b/homeassistant/helpers/service.py @@ -98,6 +98,8 @@ def extract_entity_ids(hass, service_call): """Helper method to extract a list of entity ids from a service call. Will convert group entity ids to the entity ids it represents. + + Async friendly. """ if not (service_call.data and ATTR_ENTITY_ID in service_call.data): return [] diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index 03029c369e6..2a72fc1a088 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -387,6 +387,13 @@ def timestamp_utc(value): return value +def fail_when_undefined(value): + """Filter to force a failure when the value is undefined.""" + if isinstance(value, jinja2.Undefined): + value() + return value + + def forgiving_float(value): """Try to convert value to a float.""" try: @@ -408,6 +415,7 @@ ENV.filters['multiply'] = multiply ENV.filters['timestamp_custom'] = timestamp_custom ENV.filters['timestamp_local'] = timestamp_local ENV.filters['timestamp_utc'] = timestamp_utc +ENV.filters['is_defined'] = fail_when_undefined ENV.globals['float'] = forgiving_float ENV.globals['now'] = dt_util.now ENV.globals['utcnow'] = dt_util.utcnow diff --git a/homeassistant/remote.py b/homeassistant/remote.py index 8725990f146..15a84e08ffe 100644 --- a/homeassistant/remote.py +++ b/homeassistant/remote.py @@ -124,9 +124,9 @@ class HomeAssistant(ha.HomeAssistant): self.remote_api = remote_api self.loop = loop or asyncio.get_event_loop() - self.pool = pool = ha.create_worker_pool() + self.pool = ha.create_worker_pool() - self.bus = EventBus(remote_api, pool, self.loop) + self.bus = EventBus(remote_api, self) self.services = ha.ServiceRegistry(self.bus, self.add_job, self.loop) self.states = StateMachine(self.bus, self.loop, self.remote_api) self.config = ha.Config() @@ -143,7 +143,7 @@ class HomeAssistant(ha.HomeAssistant): 'Unable to setup local API to receive events') self.state = ha.CoreState.starting - ha.async_create_timer(self) + ha._async_create_timer(self) # pylint: disable=protected-access self.bus.fire(ha.EVENT_HOMEASSISTANT_START, origin=ha.EventOrigin.remote) @@ -180,9 +180,9 @@ class EventBus(ha.EventBus): """EventBus implementation that forwards fire_event to remote API.""" # pylint: disable=too-few-public-methods - def __init__(self, api, pool, loop): + def __init__(self, api, hass): """Initalize the eventbus.""" - super().__init__(pool, loop) + super().__init__(hass) self._api = api def fire(self, event_type, event_data=None, origin=ha.EventOrigin.local): diff --git a/homeassistant/scripts/check_config.py b/homeassistant/scripts/check_config.py index b3df02e8b34..f8b6fc6e69b 100644 --- a/homeassistant/scripts/check_config.py +++ b/homeassistant/scripts/check_config.py @@ -199,9 +199,10 @@ def check(config_path): res['secrets'][node.value] = val return val - def mock_except(ex, domain, config): # pylint: disable=unused-variable + def mock_except(ex, domain, config, # pylint: disable=unused-variable + hass=None): """Mock bootstrap.log_exception.""" - MOCKS['except'][1](ex, domain, config) + MOCKS['except'][1](ex, domain, config, hass) res['except'][domain] = config.get(domain, config) # Patches to skip functions diff --git a/homeassistant/util/yaml.py b/homeassistant/util/yaml.py index 035a96b657e..cf773bb999f 100644 --- a/homeassistant/util/yaml.py +++ b/homeassistant/util/yaml.py @@ -1,8 +1,8 @@ """YAML utility functions.""" -import glob import logging import os import sys +import fnmatch from collections import OrderedDict from typing import Union, List, Dict @@ -61,23 +61,32 @@ def _include_yaml(loader: SafeLineLoader, return load_yaml(fname) +def _find_files(directory, pattern): + """Recursively load files in a directory.""" + for root, _dirs, files in os.walk(directory): + for basename in files: + if fnmatch.fnmatch(basename, pattern): + filename = os.path.join(root, basename) + yield filename + + def _include_dir_named_yaml(loader: SafeLineLoader, - node: yaml.nodes.Node): + node: yaml.nodes.Node) -> OrderedDict: """Load multiple files from directory as a dictionary.""" mapping = OrderedDict() # type: OrderedDict - files = os.path.join(os.path.dirname(loader.name), node.value, '*.yaml') - for fname in glob.glob(files): + loc = os.path.join(os.path.dirname(loader.name), node.value) + for fname in _find_files(loc, '*.yaml'): filename = os.path.splitext(os.path.basename(fname))[0] mapping[filename] = load_yaml(fname) return mapping def _include_dir_merge_named_yaml(loader: SafeLineLoader, - node: yaml.nodes.Node): + node: yaml.nodes.Node) -> OrderedDict: """Load multiple files from directory as a merged dictionary.""" mapping = OrderedDict() # type: OrderedDict - files = os.path.join(os.path.dirname(loader.name), node.value, '*.yaml') - for fname in glob.glob(files): + loc = os.path.join(os.path.dirname(loader.name), node.value) + for fname in _find_files(loc, '*.yaml'): if os.path.basename(fname) == _SECRET_YAML: continue loaded_yaml = load_yaml(fname) @@ -89,18 +98,18 @@ def _include_dir_merge_named_yaml(loader: SafeLineLoader, def _include_dir_list_yaml(loader: SafeLineLoader, node: yaml.nodes.Node): """Load multiple files from directory as a list.""" - files = os.path.join(os.path.dirname(loader.name), node.value, '*.yaml') - return [load_yaml(f) for f in glob.glob(files) + loc = os.path.join(os.path.dirname(loader.name), node.value) + return [load_yaml(f) for f in _find_files(loc, '*.yaml') if os.path.basename(f) != _SECRET_YAML] def _include_dir_merge_list_yaml(loader: SafeLineLoader, node: yaml.nodes.Node): """Load multiple files from directory as a merged list.""" - files = os.path.join(os.path.dirname(loader.name), - node.value, '*.yaml') # type: str + loc = os.path.join(os.path.dirname(loader.name), + node.value) # type: str merged_list = [] # type: List - for fname in glob.glob(files): + for fname in _find_files(loc, '*.yaml'): if os.path.basename(fname) == _SECRET_YAML: continue loaded_yaml = load_yaml(fname) diff --git a/requirements_all.txt b/requirements_all.txt index 8a65b76f5cc..01a0acdeebd 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -23,7 +23,7 @@ PyMata==2.13 # RPi.GPIO==0.6.1 # homeassistant.components.media_player.sonos -SoCo==0.11.1 +SoCo==0.12 # homeassistant.components.notify.twitter TwitterAPI==2.4.2 @@ -35,12 +35,18 @@ Werkzeug==0.11.11 # homeassistant.components.apcupsd apcaccess==0.0.4 +# homeassistant.components.notify.apns +apns2==0.1.1 + # homeassistant.components.sun astral==1.2 # homeassistant.components.sensor.linux_battery batinfo==0.3 +# homeassistant.components.sensor.scrape +beautifulsoup4==4.5.1 + # homeassistant.components.light.blinksticklight blinkstick==1.1.8 @@ -58,7 +64,7 @@ boto3==1.3.1 # homeassistant.components.emulated_hue # homeassistant.components.http -cherrypy==8.1.0 +cherrypy==8.1.2 # homeassistant.components.sensor.coinmarketcap coinmarketcap==2.0.1 @@ -66,11 +72,18 @@ coinmarketcap==2.0.1 # homeassistant.scripts.check_config colorlog>2.1,<3 +# homeassistant.components.alarm_control_panel.concord232 +# homeassistant.components.binary_sensor.concord232 +concord232==0.14 + # homeassistant.components.media_player.directv directpy==0.1 +# homeassistant.components.updater +distro==1.0.0 + # homeassistant.components.notify.xmpp -dnspython3==1.14.0 +dnspython3==1.15.0 # homeassistant.components.dweet # homeassistant.components.sensor.dweet @@ -139,7 +152,10 @@ hikvision==0.4 # http://github.com/adafruit/Adafruit_Python_DHT/archive/310c59b0293354d07d94375f1365f7b9b9110c7d.zip#Adafruit_DHT==1.3.0 # homeassistant.components.light.flux_led -https://github.com/Danielhiversen/flux_led/archive/0.7.zip#flux_led==0.7 +https://github.com/Danielhiversen/flux_led/archive/0.7.zip#flux_led==0.8 + +# homeassistant.components.switch.tplink +https://github.com/GadgetReactor/pyHS100/archive/1f771b7d8090a91c6a58931532e42730b021cbde.zip#pyHS100==0.2.0 # homeassistant.components.switch.dlink https://github.com/LinuxChristian/pyW215/archive/v0.3.5.zip#pyW215==0.3.5 @@ -170,11 +186,11 @@ https://github.com/danieljkemp/onkyo-eiscp/archive/python3.zip#onkyo-eiscp==0.9. # homeassistant.components.device_tracker.fritz # https://github.com/deisi/fritzconnection/archive/b5c14515e1c8e2652b06b6316a7f3913df942841.zip#fritzconnection==0.4.6 -# homeassistant.components.switch.tplink -https://github.com/gadgetreactor/pyHS100/archive/ef85f939fd5b07064a0f34dfa673fa7d6140bd95.zip#pyHS100==0.1.2 - # homeassistant.components.netatmo -https://github.com/jabesq/netatmo-api-python/archive/v0.5.0.zip#lnetatmo==0.5.0 +https://github.com/jabesq/netatmo-api-python/archive/v0.6.0.zip#lnetatmo==0.6.0 + +# homeassistant.components.switch.neato +https://github.com/jabesq/pybotvac/archive/v0.0.1.zip#pybotvac==0.0.1 # homeassistant.components.sensor.sabnzbd https://github.com/jamespcole/home-assistant-nzb-clients/archive/616cad59154092599278661af17e2a9f2cf5e2a9.zip#python-sabnzbd==0.1 @@ -205,10 +221,10 @@ https://github.com/rkabadi/pyedimax/archive/365301ce3ff26129a7910c501ead09ea625f https://github.com/robbiet480/pygtfs/archive/00546724e4bbcb3053110d844ca44e2246267dd8.zip#pygtfs==0.1.3 # homeassistant.components.scene.hunterdouglas_powerview -https://github.com/sander76/powerviewApi/archive/cc6f75dd39160d4aaf46cb2ed9220136b924bcb4.zip#powerviewApi==0.2 +https://github.com/sander76/powerviewApi/archive/246e782d60d5c0addcc98d7899a0186f9d5640b0.zip#powerviewApi==0.3.15 # homeassistant.components.mysensors -https://github.com/theolind/pymysensors/archive/8ce98b7fb56f7921a808eb66845ce8b2c455c81e.zip#pymysensors==0.7.1 +https://github.com/theolind/pymysensors/archive/0b705119389be58332f17753c53167f551254b6c.zip#pymysensors==0.8 # homeassistant.components.alarm_control_panel.simplisafe https://github.com/w1ll1am23/simplisafe-python/archive/586fede0e85fd69e56e516aaa8e97eb644ca8866.zip#simplisafe-python==0.0.1 @@ -247,6 +263,9 @@ lightify==1.0.3 # homeassistant.components.light.limitlessled limitlessled==1.0.2 +# homeassistant.components.notify.matrix +matrix-client==0.0.5 + # homeassistant.components.notify.message_bird messagebird==1.2.0 @@ -258,7 +277,7 @@ mficlient==0.3.0 miflora==0.1.9 # homeassistant.components.discovery -netdisco==0.7.1 +netdisco==0.7.2 # homeassistant.components.sensor.neurio_energy neurio==0.2.10 @@ -281,7 +300,7 @@ pexpect==4.0.1 phue==0.8 # homeassistant.components.pilight -pilight==0.0.2 +pilight==0.1.1 # homeassistant.components.media_player.plex # homeassistant.components.sensor.plex @@ -293,7 +312,7 @@ pmsensor==0.3 # homeassistant.components.climate.proliphix # homeassistant.components.thermostat.proliphix -proliphix==0.3.1 +proliphix==0.4.0 # homeassistant.components.sensor.systemmonitor psutil==4.3.1 @@ -319,11 +338,15 @@ pyasn1-modules==0.0.8 # homeassistant.components.notify.xmpp pyasn1==0.1.9 +# homeassistant.components.device_tracker.bbox +# homeassistant.components.sensor.bbox +pybbox==0.0.5-alpha + # homeassistant.components.device_tracker.bluetooth_tracker # pybluez==0.22 # homeassistant.components.media_player.cast -pychromecast==0.7.4 +pychromecast==0.7.6 # homeassistant.components.media_player.cmus pycmus==0.1.0 @@ -361,7 +384,7 @@ pynetio==0.1.6 pynx584==0.2 # homeassistant.components.sensor.openweathermap -pyowm==2.4.0 +pyowm==2.5.0 # homeassistant.components.switch.acer_projector pyserial==3.1.1 @@ -371,7 +394,7 @@ pyserial==3.1.1 pysnmp==4.3.2 # homeassistant.components.digital_ocean -python-digitalocean==1.9.0 +python-digitalocean==1.10.0 # homeassistant.components.sensor.darksky python-forecastio==1.3.5 @@ -380,7 +403,7 @@ python-forecastio==1.3.5 python-hpilo==3.8 # homeassistant.components.lirc -# python-lirc==1.2.1 +# python-lirc==1.2.3 # homeassistant.components.media_player.mpd python-mpd2==0.5.5 @@ -389,7 +412,7 @@ python-mpd2==0.5.5 python-mystrom==0.3.6 # homeassistant.components.nest -python-nest==2.10.0 +python-nest==2.11.0 # homeassistant.components.device_tracker.nmap_tracker python-nmap==0.6.1 @@ -423,7 +446,7 @@ radiotherm==1.2 # rpi-rf==0.9.5 # homeassistant.components.media_player.yamaha -rxv==0.1.11 +rxv==0.2.0 # homeassistant.components.media_player.samsungtv samsungctl==0.5.1 @@ -435,10 +458,10 @@ schiene==0.17 scsgate==0.1.0 # homeassistant.components.notify.sendgrid -sendgrid==3.4.0 +sendgrid==3.6.0 # homeassistant.components.notify.slack -slacker==0.9.25 +slacker==0.9.28 # homeassistant.components.notify.xmpp sleekxmpp==1.3.1 @@ -458,7 +481,7 @@ speedtest-cli==0.3.4 # homeassistant.components.recorder # homeassistant.scripts.db_migrator -sqlalchemy==1.0.15 +sqlalchemy==1.1.1 # homeassistant.components.emulated_hue # homeassistant.components.http @@ -488,7 +511,7 @@ transmissionrpc==0.11 twilio==5.4.0 # homeassistant.components.sensor.uber -uber_rides==0.2.5 +uber_rides==0.2.7 # homeassistant.components.device_tracker.unifi unifi==1.2.5 @@ -500,7 +523,7 @@ urllib3 uvcclient==0.9.0 # homeassistant.components.verisure -vsure==0.10.3 +vsure==0.11.1 # homeassistant.components.sensor.vasttrafik vtjp==0.1.11 diff --git a/tests/common.py b/tests/common.py index 891bd3534a3..b185a47e66c 100644 --- a/tests/common.py +++ b/tests/common.py @@ -7,9 +7,10 @@ from unittest.mock import patch from io import StringIO import logging import threading +from contextlib import contextmanager from homeassistant import core as ha, loader -from homeassistant.bootstrap import setup_component +from homeassistant.bootstrap import setup_component, prepare_setup_component from homeassistant.helpers.entity import ToggleEntity from homeassistant.util.unit_system import METRIC_SYSTEM import homeassistant.util.dt as date_util @@ -58,6 +59,8 @@ def get_test_home_assistant(num_threads=None): stop_event = threading.Event() def run_loop(): + """Run event loop.""" + # pylint: disable=protected-access loop._thread_ident = threading.get_ident() loop.run_forever() loop.close() @@ -70,10 +73,11 @@ def get_test_home_assistant(num_threads=None): @asyncio.coroutine def fake_stop(): + """Fake stop.""" yield None - @patch.object(ha, 'async_create_timer') - @patch.object(ha, 'async_monitor_worker_pool') + @patch.object(ha, '_async_create_timer') + @patch.object(ha, '_async_monitor_worker_pool') @patch.object(hass.loop, 'add_signal_handler') @patch.object(hass.loop, 'run_forever') @patch.object(hass.loop, 'close') @@ -84,6 +88,7 @@ def get_test_home_assistant(num_threads=None): hass.block_till_done() def stop_hass(): + """Stop hass.""" orig_stop() stop_event.wait() @@ -112,6 +117,7 @@ def mock_service(hass, domain, service): """ calls = [] + # pylint: disable=unnecessary-lambda hass.services.register(domain, service, lambda call: calls.append(call)) return calls @@ -315,3 +321,41 @@ def patch_yaml_files(files_dict, endswith=True): raise FileNotFoundError('File not found: {}'.format(fname)) return patch.object(yaml, 'open', mock_open_f, create=True) + + +@contextmanager +def assert_setup_component(count, domain=None): + """Collect valid configuration from setup_component. + + - count: The amount of valid platforms that should be setup + - domain: The domain to count is optional. It can be automatically + determined most of the time + + Use as a context manager aroung bootstrap.setup_component + with assert_setup_component(0) as result_config: + setup_component(hass, start_config, domain) + # using result_config is optional + """ + config = {} + + def mock_psc(hass, config_input, domain): + """Mock the prepare_setup_component to capture config.""" + res = prepare_setup_component(hass, config_input, domain) + config[domain] = None if res is None else res.get(domain) + _LOGGER.debug('Configuration for %s, Validated: %s, Original %s', + domain, config[domain], config_input.get(domain)) + return res + + assert isinstance(config, dict) + with patch('homeassistant.bootstrap.prepare_setup_component', mock_psc): + yield config + + if domain is None: + assert len(config) == 1, ('assert_setup_component requires DOMAIN: {}' + .format(list(config.keys()))) + domain = list(config.keys())[0] + + res = config.get(domain) + res_len = 0 if res is None else len(res) + assert res_len == count, 'setup_component failed, expected {} got {}: {}' \ + .format(count, res_len, res) diff --git a/tests/components/alarm_control_panel/test_mqtt.py b/tests/components/alarm_control_panel/test_mqtt.py index e4e120cec19..871bc6afd76 100644 --- a/tests/components/alarm_control_panel/test_mqtt.py +++ b/tests/components/alarm_control_panel/test_mqtt.py @@ -1,14 +1,15 @@ """The tests the MQTT alarm control panel component.""" import unittest -from homeassistant.bootstrap import _setup_component +from homeassistant.bootstrap import setup_component from homeassistant.const import ( STATE_ALARM_DISARMED, STATE_ALARM_ARMED_HOME, STATE_ALARM_ARMED_AWAY, STATE_ALARM_PENDING, STATE_ALARM_TRIGGERED, STATE_UNKNOWN) from homeassistant.components import alarm_control_panel from tests.common import ( - mock_mqtt_component, fire_mqtt_message, get_test_home_assistant) + mock_mqtt_component, fire_mqtt_message, get_test_home_assistant, + assert_setup_component) CODE = 'HELLO_CODE' @@ -16,7 +17,9 @@ CODE = 'HELLO_CODE' class TestAlarmControlPanelMQTT(unittest.TestCase): """Test the manual alarm module.""" - def setUp(self): # pylint: disable=invalid-name + # pylint: disable=invalid-name + + def setUp(self): """Setup things to be run when tests are started.""" self.hass = get_test_home_assistant() self.mock_publish = mock_mqtt_component(self.hass) @@ -28,27 +31,30 @@ class TestAlarmControlPanelMQTT(unittest.TestCase): def test_fail_setup_without_state_topic(self): """Test for failing with no state topic.""" self.hass.config.components = ['mqtt'] - assert not _setup_component(self.hass, alarm_control_panel.DOMAIN, { - alarm_control_panel.DOMAIN: { - 'platform': 'mqtt', - 'command_topic': 'alarm/command' - } - }) + with assert_setup_component(0) as config: + assert setup_component(self.hass, alarm_control_panel.DOMAIN, { + alarm_control_panel.DOMAIN: { + 'platform': 'mqtt', + 'command_topic': 'alarm/command' + } + }) + assert not config[alarm_control_panel.DOMAIN] def test_fail_setup_without_command_topic(self): """Test failing with no command topic.""" self.hass.config.components = ['mqtt'] - assert not _setup_component(self.hass, alarm_control_panel.DOMAIN, { - alarm_control_panel.DOMAIN: { - 'platform': 'mqtt', - 'state_topic': 'alarm/state' - } - }) + with assert_setup_component(0): + assert setup_component(self.hass, alarm_control_panel.DOMAIN, { + alarm_control_panel.DOMAIN: { + 'platform': 'mqtt', + 'state_topic': 'alarm/state' + } + }) def test_update_state_via_state_topic(self): """Test updating with via state topic.""" self.hass.config.components = ['mqtt'] - assert _setup_component(self.hass, alarm_control_panel.DOMAIN, { + assert setup_component(self.hass, alarm_control_panel.DOMAIN, { alarm_control_panel.DOMAIN: { 'platform': 'mqtt', 'name': 'test', @@ -72,7 +78,7 @@ class TestAlarmControlPanelMQTT(unittest.TestCase): def test_ignore_update_state_if_unknown_via_state_topic(self): """Test ignoring updates via state topic.""" self.hass.config.components = ['mqtt'] - assert _setup_component(self.hass, alarm_control_panel.DOMAIN, { + assert setup_component(self.hass, alarm_control_panel.DOMAIN, { alarm_control_panel.DOMAIN: { 'platform': 'mqtt', 'name': 'test', @@ -93,7 +99,7 @@ class TestAlarmControlPanelMQTT(unittest.TestCase): def test_arm_home_publishes_mqtt(self): """Test publishing of MQTT messages while armed.""" self.hass.config.components = ['mqtt'] - assert _setup_component(self.hass, alarm_control_panel.DOMAIN, { + assert setup_component(self.hass, alarm_control_panel.DOMAIN, { alarm_control_panel.DOMAIN: { 'platform': 'mqtt', 'name': 'test', @@ -110,7 +116,7 @@ class TestAlarmControlPanelMQTT(unittest.TestCase): def test_arm_home_not_publishes_mqtt_with_invalid_code(self): """Test not publishing of MQTT messages with invalid code.""" self.hass.config.components = ['mqtt'] - assert _setup_component(self.hass, alarm_control_panel.DOMAIN, { + assert setup_component(self.hass, alarm_control_panel.DOMAIN, { alarm_control_panel.DOMAIN: { 'platform': 'mqtt', 'name': 'test', @@ -128,7 +134,7 @@ class TestAlarmControlPanelMQTT(unittest.TestCase): def test_arm_away_publishes_mqtt(self): """Test publishing of MQTT messages while armed.""" self.hass.config.components = ['mqtt'] - assert _setup_component(self.hass, alarm_control_panel.DOMAIN, { + assert setup_component(self.hass, alarm_control_panel.DOMAIN, { alarm_control_panel.DOMAIN: { 'platform': 'mqtt', 'name': 'test', @@ -145,7 +151,7 @@ class TestAlarmControlPanelMQTT(unittest.TestCase): def test_arm_away_not_publishes_mqtt_with_invalid_code(self): """Test not publishing of MQTT messages with invalid code.""" self.hass.config.components = ['mqtt'] - assert _setup_component(self.hass, alarm_control_panel.DOMAIN, { + assert setup_component(self.hass, alarm_control_panel.DOMAIN, { alarm_control_panel.DOMAIN: { 'platform': 'mqtt', 'name': 'test', @@ -163,7 +169,7 @@ class TestAlarmControlPanelMQTT(unittest.TestCase): def test_disarm_publishes_mqtt(self): """Test publishing of MQTT messages while disarmed.""" self.hass.config.components = ['mqtt'] - assert _setup_component(self.hass, alarm_control_panel.DOMAIN, { + assert setup_component(self.hass, alarm_control_panel.DOMAIN, { alarm_control_panel.DOMAIN: { 'platform': 'mqtt', 'name': 'test', @@ -180,7 +186,7 @@ class TestAlarmControlPanelMQTT(unittest.TestCase): def test_disarm_not_publishes_mqtt_with_invalid_code(self): """Test not publishing of MQTT messages with invalid code.""" self.hass.config.components = ['mqtt'] - assert _setup_component(self.hass, alarm_control_panel.DOMAIN, { + assert setup_component(self.hass, alarm_control_panel.DOMAIN, { alarm_control_panel.DOMAIN: { 'platform': 'mqtt', 'name': 'test', diff --git a/tests/components/automation/test_init.py b/tests/components/automation/test_init.py index 0a601452393..ec128f77756 100644 --- a/tests/components/automation/test_init.py +++ b/tests/components/automation/test_init.py @@ -8,19 +8,22 @@ from homeassistant.const import ATTR_ENTITY_ID from homeassistant.exceptions import HomeAssistantError import homeassistant.util.dt as dt_util -from tests.common import get_test_home_assistant +from tests.common import get_test_home_assistant, assert_setup_component class TestAutomation(unittest.TestCase): """Test the event automation.""" - def setUp(self): # pylint: disable=invalid-name + # pylint: disable=invalid-name + + def setUp(self): """Setup things to be run when tests are started.""" self.hass = get_test_home_assistant() self.hass.config.components.append('group') self.calls = [] def record_call(service): + """Record call.""" self.calls.append(service) self.hass.services.register('test', 'automation', record_call) @@ -31,18 +34,19 @@ class TestAutomation(unittest.TestCase): def test_service_data_not_a_dict(self): """Test service data not dict.""" - assert not setup_component(self.hass, automation.DOMAIN, { - automation.DOMAIN: { - 'trigger': { - 'platform': 'event', - 'event_type': 'test_event', - }, - 'action': { - 'service': 'test.automation', - 'data': 100, + with assert_setup_component(0): + assert not setup_component(self.hass, automation.DOMAIN, { + automation.DOMAIN: { + 'trigger': { + 'platform': 'event', + 'event_type': 'test_event', + }, + 'action': { + 'service': 'test.automation', + 'data': 100, + } } - } - }) + }) def test_service_specify_data(self): """Test service data.""" @@ -70,7 +74,7 @@ class TestAutomation(unittest.TestCase): self.hass.bus.fire('test_event') self.hass.block_till_done() assert len(self.calls) == 1 - assert 'event - test_event' == self.calls[0].data['some'] + assert self.calls[0].data['some'] == 'event - test_event' state = self.hass.states.get('automation.hello') assert state is not None assert state.attributes.get('last_triggered') == time @@ -444,21 +448,22 @@ class TestAutomation(unittest.TestCase): }) def test_reload_config_when_invalid_config(self, mock_load_yaml): """Test the reload config service handling invalid config.""" - assert setup_component(self.hass, automation.DOMAIN, { - automation.DOMAIN: { - 'alias': 'hello', - 'trigger': { - 'platform': 'event', - 'event_type': 'test_event', - }, - 'action': { - 'service': 'test.automation', - 'data_template': { - 'event': '{{ trigger.event.event_type }}' + with assert_setup_component(1): + assert setup_component(self.hass, automation.DOMAIN, { + automation.DOMAIN: { + 'alias': 'hello', + 'trigger': { + 'platform': 'event', + 'event_type': 'test_event', + }, + 'action': { + 'service': 'test.automation', + 'data_template': { + 'event': '{{ trigger.event.event_type }}' + } } } - } - }) + }) assert self.hass.states.get('automation.hello') is not None self.hass.bus.fire('test_event') @@ -470,11 +475,11 @@ class TestAutomation(unittest.TestCase): automation.reload(self.hass) self.hass.block_till_done() - assert self.hass.states.get('automation.hello') is not None + assert self.hass.states.get('automation.hello') is None self.hass.bus.fire('test_event') self.hass.block_till_done() - assert len(self.calls) == 2 + assert len(self.calls) == 1 def test_reload_config_handles_load_fails(self): """Test the reload config service.""" diff --git a/tests/components/binary_sensor/test_nx584.py b/tests/components/binary_sensor/test_nx584.py index ea4d997c2c3..71efd1ff1b2 100644 --- a/tests/components/binary_sensor/test_nx584.py +++ b/tests/components/binary_sensor/test_nx584.py @@ -37,12 +37,6 @@ class TestNX584SensorSetup(unittest.TestCase): """Stop everything that was started.""" self._mock_client.stop() - def test_setup_no_config(self): - """Test the setup with no configuration.""" - hass = mock.MagicMock() - hass.pool.worker_count = 2 - assert setup_component(hass, 'binary_sensor', {'nx584': {}}) - @mock.patch('homeassistant.components.binary_sensor.nx584.NX584Watcher') @mock.patch('homeassistant.components.binary_sensor.nx584.NX584ZoneSensor') def test_setup_defaults(self, mock_nx, mock_watcher): @@ -59,7 +53,10 @@ class TestNX584SensorSetup(unittest.TestCase): mock_nx.assert_has_calls( [mock.call(zone, 'opening') for zone in self.fake_zones]) self.assertTrue(add_devices.called) - nx584_client.Client.assert_called_once_with('http://localhost:5007') + self.assertEqual(nx584_client.Client.call_count, 1) + self.assertEqual( + nx584_client.Client.call_args, mock.call('http://localhost:5007') + ) @mock.patch('homeassistant.components.binary_sensor.nx584.NX584Watcher') @mock.patch('homeassistant.components.binary_sensor.nx584.NX584ZoneSensor') @@ -79,7 +76,10 @@ class TestNX584SensorSetup(unittest.TestCase): mock.call(self.fake_zones[2], 'motion'), ]) self.assertTrue(add_devices.called) - nx584_client.Client.assert_called_once_with('http://foo:123') + self.assertEqual(nx584_client.Client.call_count, 1) + self.assertEqual( + nx584_client.Client.call_args, mock.call('http://foo:123') + ) self.assertTrue(mock_watcher.called) def _test_assert_graceful_fail(self, config): @@ -180,7 +180,8 @@ class TestNX584Watcher(unittest.TestCase): def run(fake_process): fake_process.side_effect = StopMe self.assertRaises(StopMe, watcher._run) - fake_process.assert_called_once_with(fake_events[0]) + self.assertEqual(fake_process.call_count, 1) + self.assertEqual(fake_process.call_args, mock.call(fake_events[0])) run() self.assertEqual(3, client.get_events.call_count) diff --git a/tests/components/binary_sensor/test_sleepiq.py b/tests/components/binary_sensor/test_sleepiq.py index d220578ca9d..94a51832d56 100644 --- a/tests/components/binary_sensor/test_sleepiq.py +++ b/tests/components/binary_sensor/test_sleepiq.py @@ -4,10 +4,11 @@ from unittest.mock import MagicMock import requests_mock -from homeassistant import core as ha +from homeassistant.bootstrap import setup_component from homeassistant.components.binary_sensor import sleepiq from tests.components.test_sleepiq import mock_responses +from tests.common import get_test_home_assistant class TestSleepIQBinarySensorSetup(unittest.TestCase): @@ -22,7 +23,7 @@ class TestSleepIQBinarySensorSetup(unittest.TestCase): def setUp(self): """Initialize values for this testcase class.""" - self.hass = ha.HomeAssistant() + self.hass = get_test_home_assistant() self.username = 'foo' self.password = 'bar' self.config = { @@ -35,6 +36,9 @@ class TestSleepIQBinarySensorSetup(unittest.TestCase): """Test for successfully setting up the SleepIQ platform.""" mock_responses(mock) + setup_component(self.hass, 'sleepiq', { + 'sleepiq': self.config}) + sleepiq.setup_platform(self.hass, self.config, self.add_devices, diff --git a/tests/components/binary_sensor/test_tcp.py b/tests/components/binary_sensor/test_tcp.py index ea06d69bebc..156ebe2c355 100644 --- a/tests/components/binary_sensor/test_tcp.py +++ b/tests/components/binary_sensor/test_tcp.py @@ -1,59 +1,65 @@ """The tests for the TCP binary sensor platform.""" -from copy import copy +import unittest from unittest.mock import patch, Mock -from homeassistant.components.sensor import tcp +from homeassistant.bootstrap import setup_component from homeassistant.components.binary_sensor import tcp as bin_tcp -from tests.common import get_test_home_assistant +from homeassistant.components.sensor import tcp +from tests.common import (get_test_home_assistant, assert_setup_component) from tests.components.sensor import test_tcp -@patch('homeassistant.components.sensor.tcp.Sensor.update') -def test_setup_platform_valid_config(mock_update): - """Should check the supplied config and call add_entities with Sensor.""" - add_entities = Mock() - ret = bin_tcp.setup_platform(None, test_tcp.TEST_CONFIG, add_entities) - assert ret is None, "setup_platform() should return None if successful." - assert add_entities.called - assert isinstance(add_entities.call_args[0][0][0], bin_tcp.BinarySensor) - - -def test_setup_platform_invalid_config(): - """Should check the supplied config and return False if it is invalid.""" - config = copy(test_tcp.TEST_CONFIG) - del config[tcp.CONF_HOST] - assert bin_tcp.setup_platform(None, config, None) is False - - -class TestTCPBinarySensor(): +class TestTCPBinarySensor(unittest.TestCase): """Test the TCP Binary Sensor.""" - def setup_class(cls): + def setup_method(self, method): """Setup things to be run when tests are started.""" - cls.hass = get_test_home_assistant() + self.hass = get_test_home_assistant() - def teardown_class(cls): + def teardown_method(self, method): """Stop down everything that was started.""" - cls.hass.stop() + self.hass.stop() - def test_requires_additional_values(self): - """Should require the additional config values specified.""" - config = copy(test_tcp.TEST_CONFIG) - for key in bin_tcp.BinarySensor.required: - del config[key] - assert len(config) != len(test_tcp.TEST_CONFIG) - assert not bin_tcp.BinarySensor.validate_config(config) + def test_setup_platform_valid_config(self): + """Check a valid configuration.""" + with assert_setup_component(0, 'binary_sensor'): + assert setup_component( + self.hass, 'binary_sensor', test_tcp.TEST_CONFIG) - @patch('homeassistant.components.sensor.tcp.Sensor.update') + def test_setup_platform_invalid_config(self): + """Check the invalid configuration.""" + with assert_setup_component(0): + assert setup_component(self.hass, 'binary_sensor', { + 'binary_sensor': { + 'platform': 'tcp', + 'porrt': 1234, + } + }) + + @patch('homeassistant.components.sensor.tcp.TcpSensor.update') + def test_setup_platform_devices(self, mock_update): + """Check the supplied config and call add_devices with sensor.""" + add_devices = Mock() + ret = bin_tcp.setup_platform(None, test_tcp.TEST_CONFIG, add_devices) + assert ret is None + assert add_devices.called + assert isinstance( + add_devices.call_args[0][0][0], bin_tcp.TcpBinarySensor) + + @patch('homeassistant.components.sensor.tcp.TcpSensor.update') def test_is_on_true(self, mock_update): - """Should return True if _state is the same as value_on.""" - sensor = bin_tcp.BinarySensor(self.hass, test_tcp.TEST_CONFIG) - sensor._state = test_tcp.TEST_CONFIG[tcp.CONF_VALUE_ON] + """Check the return that _state is value_on.""" + sensor = bin_tcp.TcpBinarySensor( + self.hass, test_tcp.TEST_CONFIG['sensor']) + sensor._state = test_tcp.TEST_CONFIG['sensor'][tcp.CONF_VALUE_ON] + print(sensor._state) assert sensor.is_on - @patch('homeassistant.components.sensor.tcp.Sensor.update') + @patch('homeassistant.components.sensor.tcp.TcpSensor.update') def test_is_on_false(self, mock_update): - """Should return False if _state is not the same as value_on.""" - sensor = bin_tcp.BinarySensor(self.hass, test_tcp.TEST_CONFIG) - sensor._state = "%s abc" % test_tcp.TEST_CONFIG[tcp.CONF_VALUE_ON] + """Check the return that _state is not the same as value_on.""" + sensor = bin_tcp.TcpBinarySensor( + self.hass, test_tcp.TEST_CONFIG['sensor']) + sensor._state = '{} abc'.format( + test_tcp.TEST_CONFIG['sensor'][tcp.CONF_VALUE_ON]) assert not sensor.is_on diff --git a/tests/components/binary_sensor/test_template.py b/tests/components/binary_sensor/test_template.py index c9e4bf6138b..98462083e6f 100644 --- a/tests/components/binary_sensor/test_template.py +++ b/tests/components/binary_sensor/test_template.py @@ -4,17 +4,20 @@ from unittest import mock from homeassistant.const import EVENT_STATE_CHANGED, MATCH_ALL import homeassistant.bootstrap as bootstrap -from homeassistant.components.binary_sensor import PLATFORM_SCHEMA from homeassistant.components.binary_sensor import template from homeassistant.exceptions import TemplateError from homeassistant.helpers import template as template_hlpr +from homeassistant.util.async import run_callback_threadsafe -from tests.common import get_test_home_assistant +from tests.common import get_test_home_assistant, assert_setup_component class TestBinarySensorTemplate(unittest.TestCase): """Test for Binary sensor template platform.""" + hass = None + # pylint: disable=invalid-name + def setup_method(self, method): """Setup things to be run when tests are started.""" self.hass = get_test_home_assistant() @@ -26,80 +29,79 @@ class TestBinarySensorTemplate(unittest.TestCase): @mock.patch.object(template, 'BinarySensorTemplate') def test_setup(self, mock_template): """"Test the setup.""" - tpl = template_hlpr.Template('{{ foo }}', self.hass) - config = PLATFORM_SCHEMA({ - 'platform': 'template', - 'sensors': { - 'test': { - 'friendly_name': 'virtual thingy', - 'value_template': tpl, - 'sensor_class': 'motion', - 'entity_id': 'test' + config = { + 'binary_sensor': { + 'platform': 'template', + 'sensors': { + 'test': { + 'friendly_name': 'virtual thingy', + 'value_template': '{{ foo }}', + 'sensor_class': 'motion', + }, }, - } - }) - add_devices = mock.MagicMock() - result = template.setup_platform(self.hass, config, add_devices) - self.assertTrue(result) - mock_template.assert_called_once_with( - self.hass, 'test', 'virtual thingy', 'motion', tpl, 'test') - add_devices.assert_called_once_with([mock_template.return_value]) + }, + } + with assert_setup_component(1): + assert bootstrap.setup_component( + self.hass, 'binary_sensor', config) def test_setup_no_sensors(self): """"Test setup with no sensors.""" - result = bootstrap.setup_component(self.hass, 'sensor', { - 'sensor': { - 'platform': 'template' - } - }) - self.assertFalse(result) + with assert_setup_component(0): + assert bootstrap.setup_component(self.hass, 'binary_sensor', { + 'binary_sensor': { + 'platform': 'template' + } + }) def test_setup_invalid_device(self): """"Test the setup with invalid devices.""" - result = bootstrap.setup_component(self.hass, 'sensor', { - 'sensor': { - 'platform': 'template', - 'sensors': { - 'foo bar': {}, - }, - } - }) - self.assertFalse(result) + with assert_setup_component(0): + assert bootstrap.setup_component(self.hass, 'binary_sensor', { + 'binary_sensor': { + 'platform': 'template', + 'sensors': { + 'foo bar': {}, + }, + } + }) def test_setup_invalid_sensor_class(self): """"Test setup with invalid sensor class.""" - result = bootstrap.setup_component(self.hass, 'sensor', { - 'sensor': { - 'platform': 'template', - 'sensors': { - 'test': { - 'value_template': '{{ foo }}', - 'sensor_class': 'foobarnotreal', + with assert_setup_component(0): + assert bootstrap.setup_component(self.hass, 'binary_sensor', { + 'binary_sensor': { + 'platform': 'template', + 'sensors': { + 'test': { + 'value_template': '{{ foo }}', + 'sensor_class': 'foobarnotreal', + }, }, - }, - } - }) - self.assertFalse(result) + } + }) def test_setup_invalid_missing_template(self): """"Test setup with invalid and missing template.""" - result = bootstrap.setup_component(self.hass, 'sensor', { - 'sensor': { - 'platform': 'template', - 'sensors': { - 'test': { - 'sensor_class': 'motion', - }, + with assert_setup_component(0): + assert bootstrap.setup_component(self.hass, 'binary_sensor', { + 'binary_sensor': { + 'platform': 'template', + 'sensors': { + 'test': { + 'sensor_class': 'motion', + }, + } } - } - }) - self.assertFalse(result) + }) def test_attributes(self): """"Test the attributes.""" - vs = template.BinarySensorTemplate( + vs = run_callback_threadsafe( + self.hass.loop, template.BinarySensorTemplate, self.hass, 'parent', 'Parent', 'motion', - template_hlpr.Template('{{ 1 > 1 }}', self.hass), MATCH_ALL) + template_hlpr.Template('{{ 1 > 1 }}', self.hass), MATCH_ALL + ).result() self.assertFalse(vs.should_poll) self.assertEqual('motion', vs.sensor_class) self.assertEqual('Parent', vs.name) @@ -107,15 +109,19 @@ class TestBinarySensorTemplate(unittest.TestCase): vs.update() self.assertFalse(vs.is_on) + # pylint: disable=protected-access vs._template = template_hlpr.Template("{{ 2 > 1 }}", self.hass) + vs.update() self.assertTrue(vs.is_on) def test_event(self): """"Test the event.""" - vs = template.BinarySensorTemplate( + vs = run_callback_threadsafe( + self.hass.loop, template.BinarySensorTemplate, self.hass, 'parent', 'Parent', 'motion', - template_hlpr.Template('{{ 1 > 1 }}', self.hass), MATCH_ALL) + template_hlpr.Template('{{ 1 > 1 }}', self.hass), MATCH_ALL + ).result() vs.update_ha_state() self.hass.block_till_done() @@ -127,9 +133,11 @@ class TestBinarySensorTemplate(unittest.TestCase): @mock.patch('homeassistant.helpers.template.Template.render') def test_update_template_error(self, mock_render): """"Test the template update error.""" - vs = template.BinarySensorTemplate( + vs = run_callback_threadsafe( + self.hass.loop, template.BinarySensorTemplate, self.hass, 'parent', 'Parent', 'motion', - template_hlpr.Template('{{ 1 > 1 }}', self.hass), MATCH_ALL) + template_hlpr.Template('{{ 1 > 1 }}', self.hass), MATCH_ALL + ).result() mock_render.side_effect = TemplateError('foo') vs.update() mock_render.side_effect = TemplateError( diff --git a/tests/components/binary_sensor/test_trend.py b/tests/components/binary_sensor/test_trend.py index 475e445175b..8b522db4a58 100644 --- a/tests/components/binary_sensor/test_trend.py +++ b/tests/components/binary_sensor/test_trend.py @@ -1,12 +1,14 @@ """The test for the Trend sensor platform.""" import homeassistant.bootstrap as bootstrap -from tests.common import get_test_home_assistant +from tests.common import get_test_home_assistant, assert_setup_component class TestTrendBinarySensor: """Test the Trend sensor.""" + hass = None + def setup_method(self, method): """Setup things to be run when tests are started.""" self.hass = get_test_home_assistant() @@ -189,41 +191,46 @@ class TestTrendBinarySensor: state = self.hass.states.get('binary_sensor.test_trend_sensor') assert state.state == 'off' - def test_invalid_name_does_not_create(self): + def test_invalid_name_does_not_create(self): \ + # pylint: disable=invalid-name """Test invalid name.""" - assert not bootstrap.setup_component(self.hass, 'binary_sensor', { - 'binary_sensor': { - 'platform': 'template', - 'sensors': { - 'test INVALID sensor': { - 'entity_id': - "sensor.test_state" + with assert_setup_component(0): + assert bootstrap.setup_component(self.hass, 'binary_sensor', { + 'binary_sensor': { + 'platform': 'template', + 'sensors': { + 'test INVALID sensor': { + 'entity_id': + "sensor.test_state" + } } } - } - }) + }) assert self.hass.states.all() == [] - def test_invalid_sensor_does_not_create(self): + def test_invalid_sensor_does_not_create(self): \ + # pylint: disable=invalid-name """Test invalid sensor.""" - assert not bootstrap.setup_component(self.hass, 'binary_sensor', { - 'binary_sensor': { - 'platform': 'template', - 'sensors': { - 'test_trend_sensor': { - 'not_entity_id': - "sensor.test_state" + with assert_setup_component(0): + assert bootstrap.setup_component(self.hass, 'binary_sensor', { + 'binary_sensor': { + 'platform': 'template', + 'sensors': { + 'test_trend_sensor': { + 'not_entity_id': + "sensor.test_state" + } } } - } - }) + }) assert self.hass.states.all() == [] def test_no_sensors_does_not_create(self): """Test no sensors.""" - assert not bootstrap.setup_component(self.hass, 'binary_sensor', { - 'binary_sensor': { - 'platform': 'trend' - } - }) + with assert_setup_component(0): + assert bootstrap.setup_component(self.hass, 'binary_sensor', { + 'binary_sensor': { + 'platform': 'trend' + } + }) assert self.hass.states.all() == [] diff --git a/tests/components/camera/test_local_file.py b/tests/components/camera/test_local_file.py index 546152b0d8a..0c131b441b5 100644 --- a/tests/components/camera/test_local_file.py +++ b/tests/components/camera/test_local_file.py @@ -1,5 +1,4 @@ """The tests for local file camera component.""" -from tempfile import NamedTemporaryFile import unittest from unittest import mock @@ -8,7 +7,7 @@ from werkzeug.test import EnvironBuilder from homeassistant.bootstrap import setup_component from homeassistant.components.http import request_class -from tests.common import get_test_home_assistant +from tests.common import get_test_home_assistant, assert_setup_component class TestLocalCamera(unittest.TestCase): @@ -26,44 +25,46 @@ class TestLocalCamera(unittest.TestCase): def test_loading_file(self): """Test that it loads image from disk.""" + test_string = 'hello' self.hass.wsgi = mock.MagicMock() - with NamedTemporaryFile() as fp: - fp.write('hello'.encode('utf-8')) - fp.flush() - + with mock.patch('os.path.isfile', mock.Mock(return_value=True)), \ + mock.patch('os.access', mock.Mock(return_value=True)): assert setup_component(self.hass, 'camera', { 'camera': { 'name': 'config_test', 'platform': 'local_file', - 'file_path': fp.name, + 'file_path': 'mock.file', }}) - image_view = self.hass.wsgi.mock_calls[0][1][0] + image_view = self.hass.wsgi.mock_calls[0][1][0] + m_open = mock.mock_open(read_data=test_string) + with mock.patch( + 'homeassistant.components.camera.local_file.open', + m_open, create=True + ): builder = EnvironBuilder(method='GET') - Request = request_class() + Request = request_class() # pylint: disable=invalid-name request = Request(builder.get_environ()) request.authenticated = True resp = image_view.get(request, 'camera.config_test') - assert resp.status_code == 200, resp.response - assert resp.response[0].decode('utf-8') == 'hello' + assert resp.status_code == 200, resp.response + assert resp.response[0].decode('utf-8') == test_string def test_file_not_readable(self): """Test local file will not setup when file is not readable.""" self.hass.wsgi = mock.MagicMock() - with NamedTemporaryFile() as fp: - fp.write('hello'.encode('utf-8')) - fp.flush() + with mock.patch('os.path.isfile', mock.Mock(return_value=True)), \ + mock.patch('os.access', return_value=False), \ + assert_setup_component(0): + assert setup_component(self.hass, 'camera', { + 'camera': { + 'name': 'config_test', + 'platform': 'local_file', + 'file_path': 'mock.file', + }}) - with mock.patch('os.access', return_value=False): - assert not setup_component(self.hass, 'camera', { - 'camera': { - 'name': 'config_test', - 'platform': 'local_file', - 'file_path': fp.name, - }}) - - assert [] == self.hass.states.all() + assert [] == self.hass.states.all() diff --git a/tests/components/camera/test_uvc.py b/tests/components/camera/test_uvc.py index 01ce1cec518..5addb3266c3 100644 --- a/tests/components/camera/test_uvc.py +++ b/tests/components/camera/test_uvc.py @@ -9,11 +9,22 @@ from uvcclient import nvr from homeassistant.bootstrap import setup_component from homeassistant.components.camera import uvc +from tests.common import get_test_home_assistant class TestUVCSetup(unittest.TestCase): """Test the UVC camera platform.""" + def setUp(self): + """Setup things to be run when tests are started.""" + self.hass = get_test_home_assistant() + self.hass.wsgi = mock.MagicMock() + self.hass.config.components = ['http'] + + def tearDown(self): + """Stop everything that was started.""" + self.hass.stop() + @mock.patch('uvcclient.nvr.UVCRemote') @mock.patch.object(uvc, 'UnifiVideoCamera') def test_setup_full_config(self, mock_uvc, mock_remote): @@ -37,16 +48,16 @@ class TestUVCSetup(unittest.TestCase): else: return {'model': 'UVC'} - hass = mock.MagicMock() - hass.pool.worker_count = 2 - hass.config.components = ['http'] mock_remote.return_value.index.return_value = fake_cameras mock_remote.return_value.get_camera.side_effect = fake_get_camera mock_remote.return_value.server_version = (3, 2, 0) - assert setup_component(hass, 'camera', {'camera': config}) + assert setup_component(self.hass, 'camera', {'camera': config}) - mock_remote.assert_called_once_with('foo', 123, 'secret') + self.assertEqual(mock_remote.call_count, 1) + self.assertEqual( + mock_remote.call_args, mock.call('foo', 123, 'secret') + ) mock_uvc.assert_has_calls([ mock.call(mock_remote.return_value, 'id1', 'Front'), mock.call(mock_remote.return_value, 'id2', 'Back'), @@ -65,16 +76,16 @@ class TestUVCSetup(unittest.TestCase): {'uuid': 'one', 'name': 'Front', 'id': 'id1'}, {'uuid': 'two', 'name': 'Back', 'id': 'id2'}, ] - hass = mock.MagicMock() - hass.pool.worker_count = 2 - hass.config.components = ['http'] mock_remote.return_value.index.return_value = fake_cameras mock_remote.return_value.get_camera.return_value = {'model': 'UVC'} mock_remote.return_value.server_version = (3, 2, 0) - assert setup_component(hass, 'camera', {'camera': config}) + assert setup_component(self.hass, 'camera', {'camera': config}) - mock_remote.assert_called_once_with('foo', 7080, 'secret') + self.assertEqual(mock_remote.call_count, 1) + self.assertEqual( + mock_remote.call_args, mock.call('foo', 7080, 'secret') + ) mock_uvc.assert_has_calls([ mock.call(mock_remote.return_value, 'id1', 'Front'), mock.call(mock_remote.return_value, 'id2', 'Back'), @@ -93,16 +104,16 @@ class TestUVCSetup(unittest.TestCase): {'uuid': 'one', 'name': 'Front', 'id': 'id1'}, {'uuid': 'two', 'name': 'Back', 'id': 'id2'}, ] - hass = mock.MagicMock() - hass.pool.worker_count = 2 - hass.config.components = ['http'] mock_remote.return_value.index.return_value = fake_cameras mock_remote.return_value.get_camera.return_value = {'model': 'UVC'} mock_remote.return_value.server_version = (3, 1, 3) - assert setup_component(hass, 'camera', {'camera': config}) + assert setup_component(self.hass, 'camera', {'camera': config}) - mock_remote.assert_called_once_with('foo', 7080, 'secret') + self.assertEqual(mock_remote.call_count, 1) + self.assertEqual( + mock_remote.call_args, mock.call('foo', 7080, 'secret') + ) mock_uvc.assert_has_calls([ mock.call(mock_remote.return_value, 'one', 'Front'), mock.call(mock_remote.return_value, 'two', 'Back'), @@ -111,18 +122,14 @@ class TestUVCSetup(unittest.TestCase): @mock.patch.object(uvc, 'UnifiVideoCamera') def test_setup_incomplete_config(self, mock_uvc): """"Test the setup with incomplete configuration.""" - hass = mock.MagicMock() - hass.pool.worker_count = 2 - hass.config.components = ['http'] - assert setup_component( - hass, 'camera', {'platform': 'uvc', 'nvr': 'foo'}) + self.hass, 'camera', {'platform': 'uvc', 'nvr': 'foo'}) assert not mock_uvc.called assert setup_component( - hass, 'camera', {'platform': 'uvc', 'key': 'secret'}) + self.hass, 'camera', {'platform': 'uvc', 'key': 'secret'}) assert not mock_uvc.called assert setup_component( - hass, 'camera', {'platform': 'uvc', 'port': 'invalid'}) + self.hass, 'camera', {'platform': 'uvc', 'port': 'invalid'}) assert not mock_uvc.called @mock.patch.object(uvc, 'UnifiVideoCamera') @@ -136,13 +143,9 @@ class TestUVCSetup(unittest.TestCase): 'nvr': 'foo', 'key': 'secret', } - hass = mock.MagicMock() - hass.pool.worker_count = 2 - hass.config.components = ['http'] - for error in errors: mock_remote.return_value.index.side_effect = error - assert setup_component(hass, 'camera', config) + assert setup_component(self.hass, 'camera', config) assert not mock_uvc.called @@ -179,8 +182,12 @@ class TestUVC(unittest.TestCase): """"Test the login.""" mock_store.return_value.get_camera_password.return_value = 'seekret' self.uvc._login() - mock_camera.assert_called_once_with('host-a', 'admin', 'seekret') - mock_camera.return_value.login.assert_called_once_with() + self.assertEqual(mock_camera.call_count, 1) + self.assertEqual( + mock_camera.call_args, mock.call('host-a', 'admin', 'seekret') + ) + self.assertEqual(mock_camera.return_value.login.call_count, 1) + self.assertEqual(mock_camera.return_value.login.call_args, mock.call()) @mock.patch('uvcclient.store.get_info_store') @mock.patch('uvcclient.camera.UVCCameraClient') @@ -189,8 +196,12 @@ class TestUVC(unittest.TestCase): mock_store.return_value.get_camera_password.return_value = 'seekret' self.nvr.server_version = (3, 1, 3) self.uvc._login() - mock_camera.assert_called_once_with('host-a', 'admin', 'seekret') - mock_camera.return_value.login.assert_called_once_with() + self.assertEqual(mock_camera.call_count, 1) + self.assertEqual( + mock_camera.call_args, mock.call('host-a', 'admin', 'seekret') + ) + self.assertEqual(mock_camera.return_value.login.call_count, 1) + self.assertEqual(mock_camera.return_value.login.call_args, mock.call()) @mock.patch('uvcclient.store.get_info_store') @mock.patch('uvcclient.camera.UVCCameraClientV320') @@ -198,8 +209,12 @@ class TestUVC(unittest.TestCase): """"Test the login with no password.""" mock_store.return_value.get_camera_password.return_value = None self.uvc._login() - mock_camera.assert_called_once_with('host-a', 'admin', 'ubnt') - mock_camera.return_value.login.assert_called_once_with() + self.assertEqual(mock_camera.call_count, 1) + self.assertEqual( + mock_camera.call_args, mock.call('host-a', 'admin', 'ubnt') + ) + self.assertEqual(mock_camera.return_value.login.call_count, 1) + self.assertEqual(mock_camera.return_value.login.call_args, mock.call()) @mock.patch('uvcclient.store.get_info_store') @mock.patch('uvcclient.camera.UVCCameraClientV320') @@ -222,8 +237,12 @@ class TestUVC(unittest.TestCase): mock_camera.reset_mock() self.uvc._login() - mock_camera.assert_called_once_with('host-b', 'admin', 'ubnt') - mock_camera.return_value.login.assert_called_once_with() + self.assertEqual(mock_camera.call_count, 1) + self.assertEqual( + mock_camera.call_args, mock.call('host-b', 'admin', 'ubnt') + ) + self.assertEqual(mock_camera.return_value.login.call_count, 1) + self.assertEqual(mock_camera.return_value.login.call_args, mock.call()) @mock.patch('uvcclient.store.get_info_store') @mock.patch('uvcclient.camera.UVCCameraClientV320') @@ -238,7 +257,8 @@ class TestUVC(unittest.TestCase): with mock.patch.object(self.uvc, '_login') as mock_login: mock_login.return_value = False self.assertEqual(None, self.uvc.camera_image()) - mock_login.assert_called_once_with() + self.assertEqual(mock_login.call_count, 1) + self.assertEqual(mock_login.call_args, mock.call()) def test_camera_image_logged_in(self): """"Test the login state.""" @@ -268,7 +288,8 @@ class TestUVC(unittest.TestCase): self.uvc._camera.get_snapshot.side_effect = fake_snapshot with mock.patch.object(self.uvc, '_login') as mock_login: self.assertEqual('image', self.uvc.camera_image()) - mock_login.assert_called_once_with() + self.assertEqual(mock_login.call_count, 1) + self.assertEqual(mock_login.call_args, mock.call()) self.assertEqual([], responses) def test_camera_image_reauths_only_once(self): @@ -277,4 +298,5 @@ class TestUVC(unittest.TestCase): self.uvc._camera.get_snapshot.side_effect = camera.CameraAuthError with mock.patch.object(self.uvc, '_login') as mock_login: self.assertRaises(camera.CameraAuthError, self.uvc.camera_image) - mock_login.assert_called_once_with() + self.assertEqual(mock_login.call_count, 1) + self.assertEqual(mock_login.call_args, mock.call()) diff --git a/tests/components/climate/test_generic_thermostat.py b/tests/components/climate/test_generic_thermostat.py index 313bfd6035f..070ca31f8df 100644 --- a/tests/components/climate/test_generic_thermostat.py +++ b/tests/components/climate/test_generic_thermostat.py @@ -16,7 +16,7 @@ from homeassistant.const import ( from homeassistant.util.unit_system import METRIC_SYSTEM from homeassistant.components import climate -from tests.common import get_test_home_assistant +from tests.common import assert_setup_component, get_test_home_assistant ENTITY = 'climate.test' @@ -44,8 +44,9 @@ class TestSetupClimateGenericThermostat(unittest.TestCase): 'name': 'test', 'target_sensor': ENT_SENSOR } - self.assertFalse(setup_component(self.hass, 'climate', { - 'climate': config})) + with assert_setup_component(0): + setup_component(self.hass, 'climate', { + 'climate': config}) def test_valid_conf(self): """Test set up genreic_thermostat with valid config values.""" diff --git a/tests/components/climate/test_honeywell.py b/tests/components/climate/test_honeywell.py index 470e280faa7..13d7eb65257 100644 --- a/tests/components/climate/test_honeywell.py +++ b/tests/components/climate/test_honeywell.py @@ -62,7 +62,8 @@ class TestHoneywell(unittest.TestCase): result = honeywell.setup_platform(hass, config, add_devices) self.assertTrue(result) - mock_sc.assert_called_once_with('user', 'pass') + self.assertEqual(mock_sc.call_count, 1) + self.assertEqual(mock_sc.call_args, mock.call('user', 'pass')) mock_ht.assert_has_calls([ mock.call(mock_sc.return_value, devices_1[0]), mock.call(mock_sc.return_value, devices_2[0]), @@ -174,9 +175,13 @@ class TestHoneywell(unittest.TestCase): hass = mock.MagicMock() add_devices = mock.MagicMock() self.assertTrue(honeywell.setup_platform(hass, config, add_devices)) - mock_evo.assert_called_once_with('user', 'pass') - mock_evo.return_value.temperatures.assert_called_once_with( - force_refresh=True) + self.assertEqual(mock_evo.call_count, 1) + self.assertEqual(mock_evo.call_args, mock.call('user', 'pass')) + self.assertEqual(mock_evo.return_value.temperatures.call_count, 1) + self.assertEqual( + mock_evo.return_value.temperatures.call_args, + mock.call(force_refresh=True) + ) mock_round.assert_has_calls([ mock.call(mock_evo.return_value, 'foo', True, 20.0), mock.call(mock_evo.return_value, 'bar', False, 20.0), @@ -264,13 +269,13 @@ class TestHoneywellRound(unittest.TestCase): def test_attributes(self): """Test the attributes.""" self.assertEqual('House', self.round1.name) - self.assertEqual(TEMP_CELSIUS, self.round1.unit_of_measurement) + self.assertEqual(TEMP_CELSIUS, self.round1.temperature_unit) self.assertEqual(20, self.round1.current_temperature) self.assertEqual(21, self.round1.target_temperature) self.assertFalse(self.round1.is_away_mode_on) self.assertEqual('Hot Water', self.round2.name) - self.assertEqual(TEMP_CELSIUS, self.round2.unit_of_measurement) + self.assertEqual(TEMP_CELSIUS, self.round2.temperature_unit) self.assertEqual(21, self.round2.current_temperature) self.assertEqual(None, self.round2.target_temperature) self.assertFalse(self.round2.is_away_mode_on) @@ -280,17 +285,26 @@ class TestHoneywellRound(unittest.TestCase): self.assertFalse(self.round1.is_away_mode_on) self.round1.turn_away_mode_on() self.assertTrue(self.round1.is_away_mode_on) - self.device.set_temperature.assert_called_once_with('House', 16) + self.assertEqual(self.device.set_temperature.call_count, 1) + self.assertEqual( + self.device.set_temperature.call_args, mock.call('House', 16) + ) self.device.set_temperature.reset_mock() self.round1.turn_away_mode_off() self.assertFalse(self.round1.is_away_mode_on) - self.device.cancel_temp_override.assert_called_once_with('House') + self.assertEqual(self.device.cancel_temp_override.call_count, 1) + self.assertEqual( + self.device.cancel_temp_override.call_args, mock.call('House') + ) def test_set_temperature(self): """Test setting the temperature.""" self.round1.set_temperature(temperature=25) - self.device.set_temperature.assert_called_once_with('House', 25) + self.assertEqual(self.device.set_temperature.call_count, 1) + self.assertEqual( + self.device.set_temperature.call_args, mock.call('House', 25) + ) def test_set_operation_mode(self: unittest.TestCase) -> None: """Test setting the system operation.""" @@ -330,9 +344,9 @@ class TestHoneywellUS(unittest.TestCase): def test_unit_of_measurement(self): """Test the unit of measurement.""" - self.assertEqual(TEMP_FAHRENHEIT, self.honeywell.unit_of_measurement) + self.assertEqual(TEMP_FAHRENHEIT, self.honeywell.temperature_unit) self.device.temperature_unit = 'C' - self.assertEqual(TEMP_CELSIUS, self.honeywell.unit_of_measurement) + self.assertEqual(TEMP_CELSIUS, self.honeywell.temperature_unit) def test_target_temp(self): """Test the target temperature.""" diff --git a/tests/components/cover/test_command_line.py b/tests/components/cover/test_command_line.py index f687094a038..9d1552b2e73 100644 --- a/tests/components/cover/test_command_line.py +++ b/tests/components/cover/test_command_line.py @@ -40,7 +40,10 @@ class TestCommandCover(unittest.TestCase): mock_run.return_value = b' foo bar ' result = self.rs._query_state_value('runme') self.assertEqual('foo bar', result) - mock_run.assert_called_once_with('runme', shell=True) + self.assertEqual(mock_run.call_count, 1) + self.assertEqual( + mock_run.call_args, mock.call('runme', shell=True) + ) def test_state_value(self): """Test with state value.""" diff --git a/tests/components/device_tracker/test_asuswrt.py b/tests/components/device_tracker/test_asuswrt.py index a4d5ee64b32..ad42fd9d9a6 100644 --- a/tests/components/device_tracker/test_asuswrt.py +++ b/tests/components/device_tracker/test_asuswrt.py @@ -1,18 +1,23 @@ """The tests for the ASUSWRT device tracker platform.""" import os +from datetime import timedelta import unittest from unittest import mock import voluptuous as vol -from homeassistant.bootstrap import _setup_component +from homeassistant.bootstrap import setup_component from homeassistant.components import device_tracker +from homeassistant.components.device_tracker import ( + CONF_CONSIDER_HOME, CONF_TRACK_NEW) from homeassistant.components.device_tracker.asuswrt import ( - CONF_PROTOCOL, CONF_MODE, CONF_PUB_KEY, PLATFORM_SCHEMA, DOMAIN) + CONF_PROTOCOL, CONF_MODE, CONF_PUB_KEY, DOMAIN, + PLATFORM_SCHEMA) from homeassistant.const import (CONF_PLATFORM, CONF_PASSWORD, CONF_USERNAME, CONF_HOST) -from tests.common import get_test_home_assistant, get_test_config_dir +from tests.common import ( + get_test_home_assistant, get_test_config_dir, assert_setup_component) FAKEFILE = None @@ -32,6 +37,7 @@ def teardown_module(): class TestComponentsDeviceTrackerASUSWRT(unittest.TestCase): """Tests for the ASUSWRT device tracker platform.""" + hass = None def setup_method(self, _): @@ -49,12 +55,13 @@ class TestComponentsDeviceTrackerASUSWRT(unittest.TestCase): def test_password_or_pub_key_required(self): \ # pylint: disable=invalid-name """Test creating an AsusWRT scanner without a pass or pubkey.""" - self.assertFalse(_setup_component( - self.hass, DOMAIN, {DOMAIN: { - CONF_PLATFORM: 'asuswrt', - CONF_HOST: 'fake_host', - CONF_USERNAME: 'fake_user' - }})) + with assert_setup_component(0): + assert setup_component( + self.hass, DOMAIN, {DOMAIN: { + CONF_PLATFORM: 'asuswrt', + CONF_HOST: 'fake_host', + CONF_USERNAME: 'fake_user' + }}) @mock.patch( 'homeassistant.components.device_tracker.asuswrt.AsusWrtDeviceScanner', @@ -67,13 +74,19 @@ class TestComponentsDeviceTrackerASUSWRT(unittest.TestCase): CONF_PLATFORM: 'asuswrt', CONF_HOST: 'fake_host', CONF_USERNAME: 'fake_user', - CONF_PASSWORD: 'fake_pass' + CONF_PASSWORD: 'fake_pass', + CONF_TRACK_NEW: True, + CONF_CONSIDER_HOME: timedelta(seconds=180) } } - self.assertIsNotNone(_setup_component(self.hass, DOMAIN, conf_dict)) + + with assert_setup_component(1): + assert setup_component(self.hass, DOMAIN, conf_dict) + conf_dict[DOMAIN][CONF_MODE] = 'router' conf_dict[DOMAIN][CONF_PROTOCOL] = 'ssh' - asuswrt_mock.assert_called_once_with(conf_dict[DOMAIN]) + self.assertEqual(asuswrt_mock.call_count, 1) + self.assertEqual(asuswrt_mock.call_args, mock.call(conf_dict[DOMAIN])) @mock.patch( 'homeassistant.components.device_tracker.asuswrt.AsusWrtDeviceScanner', @@ -86,15 +99,19 @@ class TestComponentsDeviceTrackerASUSWRT(unittest.TestCase): CONF_PLATFORM: 'asuswrt', CONF_HOST: 'fake_host', CONF_USERNAME: 'fake_user', - CONF_PUB_KEY: FAKEFILE + CONF_PUB_KEY: FAKEFILE, + CONF_TRACK_NEW: True, + CONF_CONSIDER_HOME: timedelta(seconds=180) } } - self.assertIsNotNone(_setup_component(self.hass, DOMAIN, conf_dict)) + with assert_setup_component(1): + assert setup_component(self.hass, DOMAIN, conf_dict) conf_dict[DOMAIN][CONF_MODE] = 'router' conf_dict[DOMAIN][CONF_PROTOCOL] = 'ssh' - asuswrt_mock.assert_called_once_with(conf_dict[DOMAIN]) + self.assertEqual(asuswrt_mock.call_count, 1) + self.assertEqual(asuswrt_mock.call_args, mock.call(conf_dict[DOMAIN])) def test_ssh_login_with_pub_key(self): """Test that login is done with pub_key when configured to.""" @@ -115,8 +132,11 @@ class TestComponentsDeviceTrackerASUSWRT(unittest.TestCase): self.addCleanup(update_mock.stop) asuswrt = device_tracker.asuswrt.AsusWrtDeviceScanner(conf_dict) asuswrt.ssh_connection() - ssh.login.assert_called_once_with('fake_host', 'fake_user', - ssh_key=FAKEFILE) + self.assertEqual(ssh.login.call_count, 1) + self.assertEqual( + ssh.login.call_args, + mock.call('fake_host', 'fake_user', ssh_key=FAKEFILE) + ) def test_ssh_login_with_password(self): """Test that login is done with password when configured to.""" @@ -137,8 +157,11 @@ class TestComponentsDeviceTrackerASUSWRT(unittest.TestCase): self.addCleanup(update_mock.stop) asuswrt = device_tracker.asuswrt.AsusWrtDeviceScanner(conf_dict) asuswrt.ssh_connection() - ssh.login.assert_called_once_with('fake_host', 'fake_user', - password='fake_pass') + self.assertEqual(ssh.login.call_count, 1) + self.assertEqual( + ssh.login.call_args, + mock.call('fake_host', 'fake_user', password='fake_pass') + ) def test_ssh_login_without_password_or_pubkey(self): \ # pylint: disable=invalid-name @@ -163,6 +186,7 @@ class TestComponentsDeviceTrackerASUSWRT(unittest.TestCase): update_mock.start() self.addCleanup(update_mock.stop) - self.assertFalse(_setup_component(self.hass, DOMAIN, - {DOMAIN: conf_dict})) + with assert_setup_component(0): + assert setup_component(self.hass, DOMAIN, + {DOMAIN: conf_dict}) ssh.login.assert_not_called() diff --git a/tests/components/device_tracker/test_init.py b/tests/components/device_tracker/test_init.py index 3e5b4dc96b2..34f89d450eb 100644 --- a/tests/components/device_tracker/test_init.py +++ b/tests/components/device_tracker/test_init.py @@ -2,7 +2,7 @@ # pylint: disable=protected-access,too-many-public-methods import logging import unittest -from unittest.mock import patch +from unittest.mock import call, patch from datetime import datetime, timedelta import os @@ -17,7 +17,7 @@ from homeassistant.exceptions import HomeAssistantError from tests.common import ( get_test_home_assistant, fire_time_changed, fire_service_discovered, - patch_yaml_files) + patch_yaml_files, assert_setup_component) TEST_PLATFORM = {device_tracker.DOMAIN: {CONF_PLATFORM: 'test'}} @@ -288,7 +288,8 @@ class TestComponentsDeviceTracker(unittest.TestCase): device_tracker.see(self.hass, **params) self.hass.block_till_done() assert mock_see.call_count == 1 - mock_see.assert_called_once_with(**params) + self.assertEqual(mock_see.call_count, 1) + self.assertEqual(mock_see.call_args, call(**params)) mock_see.reset_mock() params['dev_id'] += chr(233) # e' acute accent from icloud @@ -296,7 +297,8 @@ class TestComponentsDeviceTracker(unittest.TestCase): device_tracker.see(self.hass, **params) self.hass.block_till_done() assert mock_see.call_count == 1 - mock_see.assert_called_once_with(**params) + self.assertEqual(mock_see.call_count, 1) + self.assertEqual(mock_see.call_args, call(**params)) def test_not_write_duplicate_yaml_keys(self): \ # pylint: disable=invalid-name @@ -351,6 +353,7 @@ class TestComponentsDeviceTracker(unittest.TestCase): @patch('homeassistant.components.device_tracker.log_exception') def test_config_failure(self, mock_ex): """Test that the device tracker see failures.""" - assert not setup_component(self.hass, device_tracker.DOMAIN, - {device_tracker.DOMAIN: { - device_tracker.CONF_CONSIDER_HOME: -1}}) + with assert_setup_component(0, device_tracker.DOMAIN): + setup_component(self.hass, device_tracker.DOMAIN, + {device_tracker.DOMAIN: { + device_tracker.CONF_CONSIDER_HOME: -1}}) diff --git a/tests/components/device_tracker/test_owntracks.py b/tests/components/device_tracker/test_owntracks.py index 9ee9c80dc43..2a269a65212 100644 --- a/tests/components/device_tracker/test_owntracks.py +++ b/tests/components/device_tracker/test_owntracks.py @@ -2,17 +2,16 @@ import json import os import unittest +from collections import defaultdict from unittest.mock import patch -from collections import defaultdict +from tests.common import (assert_setup_component, fire_mqtt_message, + get_test_home_assistant, mock_mqtt_component) +import homeassistant.components.device_tracker.owntracks as owntracks from homeassistant.bootstrap import setup_component from homeassistant.components import device_tracker -from homeassistant.const import (STATE_NOT_HOME, CONF_PLATFORM) -import homeassistant.components.device_tracker.owntracks as owntracks - -from tests.common import ( - get_test_home_assistant, mock_mqtt_component, fire_mqtt_message) +from homeassistant.const import CONF_PLATFORM, STATE_NOT_HOME USER = 'greg' DEVICE = 'phone' @@ -207,20 +206,60 @@ MOCK_ENCRYPTED_LOCATION_MESSAGE = { } -class TestDeviceTrackerOwnTracks(unittest.TestCase): +class BaseMQTT(unittest.TestCase): + """Base MQTT assert functions.""" + + hass = None + + def send_message(self, topic, message, corrupt=False): + """Test the sending of a message.""" + str_message = json.dumps(message) + if corrupt: + mod_message = BAD_JSON_PREFIX + str_message + BAD_JSON_SUFFIX + else: + mod_message = str_message + fire_mqtt_message(self.hass, topic, mod_message) + self.hass.block_till_done() + + def assert_location_state(self, location): + """Test the assertion of a location state.""" + state = self.hass.states.get(DEVICE_TRACKER_STATE) + self.assertEqual(state.state, location) + + def assert_location_latitude(self, latitude): + """Test the assertion of a location latitude.""" + state = self.hass.states.get(DEVICE_TRACKER_STATE) + self.assertEqual(state.attributes.get('latitude'), latitude) + + def assert_location_longitude(self, longitude): + """Test the assertion of a location longitude.""" + state = self.hass.states.get(DEVICE_TRACKER_STATE) + self.assertEqual(state.attributes.get('longitude'), longitude) + + def assert_location_accuracy(self, accuracy): + """Test the assertion of a location accuracy.""" + state = self.hass.states.get(DEVICE_TRACKER_STATE) + self.assertEqual(state.attributes.get('gps_accuracy'), accuracy) + + +# pylint: disable=too-many-public-methods +class TestDeviceTrackerOwnTracks(BaseMQTT): """Test the OwnTrack sensor.""" - def setup_method(self, method): + # pylint: disable=invalid-name + + def setup_method(self, _): """Setup things to be run when tests are started.""" self.hass = get_test_home_assistant() mock_mqtt_component(self.hass) - self.assertTrue(setup_component(self.hass, device_tracker.DOMAIN, { - device_tracker.DOMAIN: { - CONF_PLATFORM: 'owntracks', - CONF_MAX_GPS_ACCURACY: 200, - CONF_WAYPOINT_IMPORT: True, - CONF_WAYPOINT_WHITELIST: ['jon', 'greg'] - }})) + with assert_setup_component(1, device_tracker.DOMAIN): + assert setup_component(self.hass, device_tracker.DOMAIN, { + device_tracker.DOMAIN: { + CONF_PLATFORM: 'owntracks', + CONF_MAX_GPS_ACCURACY: 200, + CONF_WAYPOINT_IMPORT: True, + CONF_WAYPOINT_WHITELIST: ['jon', 'greg'] + }}) self.hass.states.set( 'zone.inner', 'zoning', @@ -254,7 +293,7 @@ class TestDeviceTrackerOwnTracks(unittest.TestCase): owntracks.REGIONS_ENTERED = defaultdict(list) owntracks.MOBILE_BEACONS_ACTIVE = defaultdict(list) - def teardown_method(self, method): + def teardown_method(self, _): """Stop everything that was started.""" self.hass.stop() @@ -263,40 +302,6 @@ class TestDeviceTrackerOwnTracks(unittest.TestCase): except FileNotFoundError: pass - def mock_see(**kwargs): - """Fake see method for owntracks.""" - return - - def send_message(self, topic, message, corrupt=False): - """Test the sending of a message.""" - str_message = json.dumps(message) - if corrupt: - mod_message = BAD_JSON_PREFIX + str_message + BAD_JSON_SUFFIX - else: - mod_message = str_message - fire_mqtt_message(self.hass, topic, mod_message) - self.hass.block_till_done() - - def assert_location_state(self, location): - """Test the assertion of a location state.""" - state = self.hass.states.get(DEVICE_TRACKER_STATE) - self.assertEqual(state.state, location) - - def assert_location_latitude(self, latitude): - """Test the assertion of a location latitude.""" - state = self.hass.states.get(DEVICE_TRACKER_STATE) - self.assertEqual(state.attributes.get('latitude'), latitude) - - def assert_location_longitude(self, longitude): - """Test the assertion of a location longitude.""" - state = self.hass.states.get(DEVICE_TRACKER_STATE) - self.assertEqual(state.attributes.get('longitude'), longitude) - - def assert_location_accuracy(self, accuracy): - """Test the assertion of a location accuracy.""" - state = self.hass.states.get(DEVICE_TRACKER_STATE) - self.assertEqual(state.attributes.get('gps_accuracy'), accuracy) - def assert_tracker_state(self, location): """Test the assertion of a tracker state.""" state = self.hass.states.get(REGION_TRACKER_STATE) @@ -312,7 +317,7 @@ class TestDeviceTrackerOwnTracks(unittest.TestCase): state = self.hass.states.get(REGION_TRACKER_STATE) self.assertEqual(state.attributes.get('gps_accuracy'), accuracy) - def test_location_invalid_devid(self): + def test_location_invalid_devid(self): # pylint: disable=invalid-name """Test the update of a location.""" self.send_message('owntracks/paulus/nexus-5x', LOCATION_MESSAGE) @@ -588,7 +593,7 @@ class TestDeviceTrackerOwnTracks(unittest.TestCase): exit_message = REGION_LEAVE_MESSAGE.copy() exit_message['desc'] = IBEACON_DEVICE - for i in range(0, 20): + for _ in range(0, 20): fire_mqtt_message( self.hass, EVENT_TOPIC, json.dumps(enter_message)) fire_mqtt_message( @@ -637,12 +642,16 @@ class TestDeviceTrackerOwnTracks(unittest.TestCase): def test_waypoint_import_no_whitelist(self): """Test import of list of waypoints with no whitelist set.""" + def mock_see(**kwargs): + """Fake see method for owntracks.""" + return + test_config = { CONF_PLATFORM: 'owntracks', CONF_MAX_GPS_ACCURACY: 200, CONF_WAYPOINT_IMPORT: True } - owntracks.setup_scanner(self.hass, test_config, self.mock_see) + owntracks.setup_scanner(self.hass, test_config, mock_see) waypoints_message = WAYPOINTS_EXPORTED_MESSAGE.copy() self.send_message(WAYPOINT_TOPIC_BLOCKED, waypoints_message) # Check if it made it into states @@ -673,24 +682,18 @@ class TestDeviceTrackerOwnTracks(unittest.TestCase): new_wayp = self.hass.states.get(WAYPOINT_ENTITY_NAMES[0]) self.assertTrue(wayp == new_wayp) - try: - import libnacl - except (ImportError, OSError): - libnacl = None - @unittest.skipUnless(libnacl, "libnacl/libsodium is not installed") - def test_encrypted_payload_libsodium(self): - """Test sending encrypted message payload.""" - self.assertTrue(device_tracker.setup(self.hass, { - device_tracker.DOMAIN: { - CONF_PLATFORM: 'owntracks', - CONF_SECRET: SECRET_KEY, - }})) +class TestDeviceTrackerOwnTrackConfigs(BaseMQTT): + """Test the OwnTrack sensor.""" - self.send_message(LOCATION_TOPIC, ENCRYPTED_LOCATION_MESSAGE) - self.assert_location_latitude(2.0) + # pylint: disable=invalid-name - def mock_cipher(): + def setup_method(self, method): + """Setup things to be run when tests are started.""" + self.hass = get_test_home_assistant() + mock_mqtt_component(self.hass) + + def mock_cipher(): # pylint: disable=no-method-argument """Return a dummy pickle-based cipher.""" def mock_decrypt(ciphertext, key): """Decrypt/unpickle.""" @@ -705,11 +708,12 @@ class TestDeviceTrackerOwnTracks(unittest.TestCase): mock_cipher) def test_encrypted_payload(self): """Test encrypted payload.""" - self.assertTrue(device_tracker.setup(self.hass, { - device_tracker.DOMAIN: { - CONF_PLATFORM: 'owntracks', - CONF_SECRET: SECRET_KEY, - }})) + with assert_setup_component(1, device_tracker.DOMAIN): + assert setup_component(self.hass, device_tracker.DOMAIN, { + device_tracker.DOMAIN: { + CONF_PLATFORM: 'owntracks', + CONF_SECRET: SECRET_KEY, + }}) self.send_message(LOCATION_TOPIC, MOCK_ENCRYPTED_LOCATION_MESSAGE) self.assert_location_latitude(2.0) @@ -717,24 +721,26 @@ class TestDeviceTrackerOwnTracks(unittest.TestCase): mock_cipher) def test_encrypted_payload_topic_key(self): """Test encrypted payload with a topic key.""" - self.assertTrue(device_tracker.setup(self.hass, { - device_tracker.DOMAIN: { - CONF_PLATFORM: 'owntracks', - CONF_SECRET: { - LOCATION_TOPIC: SECRET_KEY, - }}})) + with assert_setup_component(1, device_tracker.DOMAIN): + assert setup_component(self.hass, device_tracker.DOMAIN, { + device_tracker.DOMAIN: { + CONF_PLATFORM: 'owntracks', + CONF_SECRET: { + LOCATION_TOPIC: SECRET_KEY, + }}}) self.send_message(LOCATION_TOPIC, MOCK_ENCRYPTED_LOCATION_MESSAGE) self.assert_location_latitude(2.0) @patch('homeassistant.components.device_tracker.owntracks.get_cipher', mock_cipher) def test_encrypted_payload_no_key(self): - """Test encrypted payload with no key.""" - self.assertTrue(device_tracker.setup(self.hass, { - device_tracker.DOMAIN: { - CONF_PLATFORM: 'owntracks', - # key missing - }})) + """Test encrypted payload with no key, .""" + with assert_setup_component(1, device_tracker.DOMAIN): + assert setup_component(self.hass, device_tracker.DOMAIN, { + device_tracker.DOMAIN: { + CONF_PLATFORM: 'owntracks', + # key missing + }}) self.send_message(LOCATION_TOPIC, MOCK_ENCRYPTED_LOCATION_MESSAGE) self.assert_location_latitude(None) @@ -742,11 +748,12 @@ class TestDeviceTrackerOwnTracks(unittest.TestCase): mock_cipher) def test_encrypted_payload_wrong_key(self): """Test encrypted payload with wrong key.""" - self.assertTrue(device_tracker.setup(self.hass, { - device_tracker.DOMAIN: { - CONF_PLATFORM: 'owntracks', - CONF_SECRET: 'wrong key', - }})) + with assert_setup_component(1, device_tracker.DOMAIN): + assert setup_component(self.hass, device_tracker.DOMAIN, { + device_tracker.DOMAIN: { + CONF_PLATFORM: 'owntracks', + CONF_SECRET: 'wrong key', + }}) self.send_message(LOCATION_TOPIC, MOCK_ENCRYPTED_LOCATION_MESSAGE) self.assert_location_latitude(None) @@ -754,12 +761,13 @@ class TestDeviceTrackerOwnTracks(unittest.TestCase): mock_cipher) def test_encrypted_payload_wrong_topic_key(self): """Test encrypted payload with wrong topic key.""" - self.assertTrue(device_tracker.setup(self.hass, { - device_tracker.DOMAIN: { - CONF_PLATFORM: 'owntracks', - CONF_SECRET: { - LOCATION_TOPIC: 'wrong key' - }}})) + with assert_setup_component(1, device_tracker.DOMAIN): + assert setup_component(self.hass, device_tracker.DOMAIN, { + device_tracker.DOMAIN: { + CONF_PLATFORM: 'owntracks', + CONF_SECRET: { + LOCATION_TOPIC: 'wrong key' + }}}) self.send_message(LOCATION_TOPIC, MOCK_ENCRYPTED_LOCATION_MESSAGE) self.assert_location_latitude(None) @@ -767,11 +775,30 @@ class TestDeviceTrackerOwnTracks(unittest.TestCase): mock_cipher) def test_encrypted_payload_no_topic_key(self): """Test encrypted payload with no topic key.""" - self.assertTrue(device_tracker.setup(self.hass, { - device_tracker.DOMAIN: { - CONF_PLATFORM: 'owntracks', - CONF_SECRET: { - 'owntracks/{}/{}'.format(USER, 'otherdevice'): 'foobar' - }}})) + with assert_setup_component(1, device_tracker.DOMAIN): + assert setup_component(self.hass, device_tracker.DOMAIN, { + device_tracker.DOMAIN: { + CONF_PLATFORM: 'owntracks', + CONF_SECRET: { + 'owntracks/{}/{}'.format(USER, 'otherdevice'): 'foobar' + }}}) self.send_message(LOCATION_TOPIC, MOCK_ENCRYPTED_LOCATION_MESSAGE) self.assert_location_latitude(None) + + try: + import libnacl + except (ImportError, OSError): + libnacl = None + + @unittest.skipUnless(libnacl, "libnacl/libsodium is not installed") + def test_encrypted_payload_libsodium(self): + """Test sending encrypted message payload.""" + with assert_setup_component(1, device_tracker.DOMAIN): + assert setup_component(self.hass, device_tracker.DOMAIN, { + device_tracker.DOMAIN: { + CONF_PLATFORM: 'owntracks', + CONF_SECRET: SECRET_KEY, + }}) + + self.send_message(LOCATION_TOPIC, ENCRYPTED_LOCATION_MESSAGE) + self.assert_location_latitude(2.0) diff --git a/tests/components/device_tracker/test_unifi.py b/tests/components/device_tracker/test_unifi.py index 8e43eb7485e..32ef8976196 100644 --- a/tests/components/device_tracker/test_unifi.py +++ b/tests/components/device_tracker/test_unifi.py @@ -27,9 +27,16 @@ class TestUnifiScanner(unittest.TestCase): } result = unifi.get_scanner(None, config) self.assertEqual(mock_scanner.return_value, result) - mock_ctrl.assert_called_once_with('localhost', 'foo', 'password', - 8443, 'v4', 'default') - mock_scanner.assert_called_once_with(mock_ctrl.return_value) + self.assertEqual(mock_ctrl.call_count, 1) + self.assertEqual( + mock_ctrl.call_args, + mock.call('localhost', 'foo', 'password', 8443, 'v4', 'default') + ) + self.assertEqual(mock_scanner.call_count, 1) + self.assertEqual( + mock_scanner.call_args, + mock.call(mock_ctrl.return_value) + ) @mock.patch('homeassistant.components.device_tracker.unifi.UnifiScanner') @mock.patch.object(controller, 'Controller') @@ -47,9 +54,16 @@ class TestUnifiScanner(unittest.TestCase): } result = unifi.get_scanner(None, config) self.assertEqual(mock_scanner.return_value, result) - mock_ctrl.assert_called_once_with('myhost', 'foo', 'password', - 123, 'v4', 'abcdef01') - mock_scanner.assert_called_once_with(mock_ctrl.return_value) + self.assertEqual(mock_ctrl.call_count, 1) + self.assertEqual( + mock_ctrl.call_args, + mock.call('myhost', 'foo', 'password', 123, 'v4', 'abcdef01') + ) + self.assertEqual(mock_scanner.call_count, 1) + self.assertEqual( + mock_scanner.call_args, + mock.call(mock_ctrl.return_value) + ) def test_config_error(self): """Test for configuration errors.""" @@ -94,7 +108,8 @@ class TestUnifiScanner(unittest.TestCase): ] ctrl.get_clients.return_value = fake_clients unifi.UnifiScanner(ctrl) - ctrl.get_clients.assert_called_once_with() + self.assertEqual(ctrl.get_clients.call_count, 1) + self.assertEqual(ctrl.get_clients.call_args, mock.call()) def test_scanner_update_error(self): # pylint: disable=no-self-use """Test the scanner update for error.""" diff --git a/tests/components/garage_door/test_mqtt.py b/tests/components/garage_door/test_mqtt.py index f2f5e61d1fb..c46befe6f1b 100644 --- a/tests/components/garage_door/test_mqtt.py +++ b/tests/components/garage_door/test_mqtt.py @@ -1,18 +1,21 @@ """The tests for the MQTT Garge door platform.""" import unittest -from homeassistant.bootstrap import _setup_component +from homeassistant.bootstrap import setup_component from homeassistant.const import STATE_OPEN, STATE_CLOSED, ATTR_ASSUMED_STATE import homeassistant.components.garage_door as garage_door from tests.common import ( - mock_mqtt_component, fire_mqtt_message, get_test_home_assistant) + mock_mqtt_component, fire_mqtt_message, get_test_home_assistant, + assert_setup_component) class TestGarageDoorMQTT(unittest.TestCase): """Test the MQTT Garage door.""" - def setUp(self): # pylint: disable=invalid-name + # pylint: disable=invalid-name + + def setUp(self): """Setup things to be run when tests are started.""" self.hass = get_test_home_assistant() self.mock_publish = mock_mqtt_component(self.hass) @@ -24,29 +27,31 @@ class TestGarageDoorMQTT(unittest.TestCase): def test_fail_setup_if_no_command_topic(self): """Test if command fails with command topic.""" self.hass.config.components = ['mqtt'] - assert not _setup_component(self.hass, garage_door.DOMAIN, { - garage_door.DOMAIN: { - 'platform': 'mqtt', - 'name': 'test', - 'state_topic': '/home/garage_door/door' - } - }) + with assert_setup_component(0): + assert setup_component(self.hass, garage_door.DOMAIN, { + garage_door.DOMAIN: { + 'platform': 'mqtt', + 'name': 'test', + 'state_topic': '/home/garage_door/door' + } + }) self.assertIsNone(self.hass.states.get('garage_door.test')) def test_controlling_state_via_topic(self): """Test the controlling state via topic.""" - assert _setup_component(self.hass, garage_door.DOMAIN, { - garage_door.DOMAIN: { - 'platform': 'mqtt', - 'name': 'test', - 'state_topic': 'state-topic', - 'command_topic': 'command-topic', - 'state_open': 1, - 'state_closed': 0, - 'service_open': 1, - 'service_close': 0 - } - }) + with assert_setup_component(1): + assert setup_component(self.hass, garage_door.DOMAIN, { + garage_door.DOMAIN: { + 'platform': 'mqtt', + 'name': 'test', + 'state_topic': 'state-topic', + 'command_topic': 'command-topic', + 'state_open': 1, + 'state_closed': 0, + 'service_open': 1, + 'service_close': 0 + } + }) state = self.hass.states.get('garage_door.test') self.assertEqual(STATE_CLOSED, state.state) @@ -66,18 +71,19 @@ class TestGarageDoorMQTT(unittest.TestCase): def test_sending_mqtt_commands_and_optimistic(self): """Test the sending MQTT commands in optimistic mode.""" - assert _setup_component(self.hass, garage_door.DOMAIN, { - garage_door.DOMAIN: { - 'platform': 'mqtt', - 'name': 'test', - 'command_topic': 'command-topic', - 'state_open': 'beer state open', - 'state_closed': 'beer state closed', - 'service_open': 'beer open', - 'service_close': 'beer close', - 'qos': '2' - } - }) + with assert_setup_component(1): + assert setup_component(self.hass, garage_door.DOMAIN, { + garage_door.DOMAIN: { + 'platform': 'mqtt', + 'name': 'test', + 'command_topic': 'command-topic', + 'state_open': 'beer state open', + 'state_closed': 'beer state closed', + 'service_open': 'beer open', + 'service_close': 'beer close', + 'qos': '2' + } + }) state = self.hass.states.get('garage_door.test') self.assertEqual(STATE_CLOSED, state.state) @@ -101,19 +107,20 @@ class TestGarageDoorMQTT(unittest.TestCase): def test_controlling_state_via_topic_and_json_message(self): """Test the controlling state via topic and JSON message.""" - assert _setup_component(self.hass, garage_door.DOMAIN, { - garage_door.DOMAIN: { - 'platform': 'mqtt', - 'name': 'test', - 'state_topic': 'state-topic', - 'command_topic': 'command-topic', - 'state_open': 'beer open', - 'state_closed': 'beer closed', - 'service_open': 'beer service open', - 'service_close': 'beer service close', - 'value_template': '{{ value_json.val }}' - } - }) + with assert_setup_component(1): + assert setup_component(self.hass, garage_door.DOMAIN, { + garage_door.DOMAIN: { + 'platform': 'mqtt', + 'name': 'test', + 'state_topic': 'state-topic', + 'command_topic': 'command-topic', + 'state_open': 'beer open', + 'state_closed': 'beer closed', + 'service_open': 'beer service open', + 'service_close': 'beer service close', + 'value_template': '{{ value_json.val }}' + } + }) state = self.hass.states.get('garage_door.test') self.assertEqual(STATE_CLOSED, state.state) diff --git a/tests/components/light/test_mqtt.py b/tests/components/light/test_mqtt.py index 375a4a45905..667f2342603 100644 --- a/tests/components/light/test_mqtt.py +++ b/tests/components/light/test_mqtt.py @@ -75,17 +75,20 @@ light: """ import unittest -from homeassistant.bootstrap import _setup_component +from homeassistant.bootstrap import setup_component from homeassistant.const import STATE_ON, STATE_OFF, ATTR_ASSUMED_STATE import homeassistant.components.light as light from tests.common import ( - get_test_home_assistant, mock_mqtt_component, fire_mqtt_message) + assert_setup_component, get_test_home_assistant, mock_mqtt_component, + fire_mqtt_message) class TestLightMQTT(unittest.TestCase): """Test the MQTT light.""" - def setUp(self): # pylint: disable=invalid-name + # pylint: disable=invalid-name + + def setUp(self): """Setup things to be run when tests are started.""" self.hass = get_test_home_assistant() self.mock_publish = mock_mqtt_component(self.hass) @@ -97,25 +100,28 @@ class TestLightMQTT(unittest.TestCase): def test_fail_setup_if_no_command_topic(self): """Test if command fails with command topic.""" self.hass.config.components = ['mqtt'] - assert not _setup_component(self.hass, light.DOMAIN, { - light.DOMAIN: { - 'platform': 'mqtt', - 'name': 'test', - } - }) + with assert_setup_component(0): + assert setup_component(self.hass, light.DOMAIN, { + light.DOMAIN: { + 'platform': 'mqtt', + 'name': 'test', + } + }) self.assertIsNone(self.hass.states.get('light.test')) - def test_no_color_or_brightness_or_color_temp_if_no_topics(self): + def test_no_color_or_brightness_or_color_temp_if_no_topics(self): \ + # pylint: disable=invalid-name """Test if there is no color and brightness if no topic.""" self.hass.config.components = ['mqtt'] - assert _setup_component(self.hass, light.DOMAIN, { - light.DOMAIN: { - 'platform': 'mqtt', - 'name': 'test', - 'state_topic': 'test_light_rgb/status', - 'command_topic': 'test_light_rgb/set', - } - }) + with assert_setup_component(1): + assert setup_component(self.hass, light.DOMAIN, { + light.DOMAIN: { + 'platform': 'mqtt', + 'name': 'test', + 'state_topic': 'test_light_rgb/status', + 'command_topic': 'test_light_rgb/set', + } + }) state = self.hass.states.get('light.test') self.assertEqual(STATE_OFF, state.state) @@ -132,26 +138,28 @@ class TestLightMQTT(unittest.TestCase): self.assertIsNone(state.attributes.get('brightness')) self.assertIsNone(state.attributes.get('color_temp')) - def test_controlling_state_via_topic(self): + def test_controlling_state_via_topic(self): \ + # pylint: disable=invalid-name """Test the controlling of the state via topic.""" + config = {light.DOMAIN: { + 'platform': 'mqtt', + 'name': 'test', + 'state_topic': 'test_light_rgb/status', + 'command_topic': 'test_light_rgb/set', + 'brightness_state_topic': 'test_light_rgb/brightness/status', + 'brightness_command_topic': 'test_light_rgb/brightness/set', + 'rgb_state_topic': 'test_light_rgb/rgb/status', + 'rgb_command_topic': 'test_light_rgb/rgb/set', + 'color_temp_state_topic': 'test_light_rgb/color_temp/status', + 'color_temp_command_topic': 'test_light_rgb/color_temp/set', + 'qos': '0', + 'payload_on': 1, + 'payload_off': 0 + }} + self.hass.config.components = ['mqtt'] - assert _setup_component(self.hass, light.DOMAIN, { - light.DOMAIN: { - 'platform': 'mqtt', - 'name': 'test', - 'state_topic': 'test_light_rgb/status', - 'command_topic': 'test_light_rgb/set', - 'brightness_state_topic': 'test_light_rgb/brightness/status', - 'brightness_command_topic': 'test_light_rgb/brightness/set', - 'rgb_state_topic': 'test_light_rgb/rgb/status', - 'rgb_command_topic': 'test_light_rgb/rgb/set', - 'color_temp_state_topic': 'test_light_rgb/color_temp/status', - 'color_temp_command_topic': 'test_light_rgb/color_temp/set', - 'qos': '0', - 'payload_on': 1, - 'payload_off': 0 - } - }) + with assert_setup_component(1): + assert setup_component(self.hass, light.DOMAIN, config) state = self.hass.states.get('light.test') self.assertEqual(STATE_OFF, state.state) @@ -206,20 +214,21 @@ class TestLightMQTT(unittest.TestCase): def test_controlling_scale(self): """Test the controlling scale.""" self.hass.config.components = ['mqtt'] - assert _setup_component(self.hass, light.DOMAIN, { - light.DOMAIN: { - 'platform': 'mqtt', - 'name': 'test', - 'state_topic': 'test_scale/status', - 'command_topic': 'test_scale/set', - 'brightness_state_topic': 'test_scale/brightness/status', - 'brightness_command_topic': 'test_scale/brightness/set', - 'brightness_scale': '99', - 'qos': 0, - 'payload_on': 'on', - 'payload_off': 'off' - } - }) + with assert_setup_component(1): + assert setup_component(self.hass, light.DOMAIN, { + light.DOMAIN: { + 'platform': 'mqtt', + 'name': 'test', + 'state_topic': 'test_scale/status', + 'command_topic': 'test_scale/set', + 'brightness_state_topic': 'test_scale/brightness/status', + 'brightness_command_topic': 'test_scale/brightness/set', + 'brightness_scale': '99', + 'qos': 0, + 'payload_on': 'on', + 'payload_off': 'off' + } + }) state = self.hass.states.get('light.test') self.assertEqual(STATE_OFF, state.state) @@ -250,24 +259,26 @@ class TestLightMQTT(unittest.TestCase): self.assertEqual(255, light_state.attributes['brightness']) - def test_controlling_state_via_topic_with_templates(self): + def test_controlling_state_via_topic_with_templates(self): \ + # pylint: disable=invalid-name """Test the setting og the state with a template.""" + config = {light.DOMAIN: { + 'platform': 'mqtt', + 'name': 'test', + 'state_topic': 'test_light_rgb/status', + 'command_topic': 'test_light_rgb/set', + 'brightness_state_topic': 'test_light_rgb/brightness/status', + 'color_temp_state_topic': 'test_light_rgb/color_temp/status', + 'rgb_state_topic': 'test_light_rgb/rgb/status', + 'state_value_template': '{{ value_json.hello }}', + 'brightness_value_template': '{{ value_json.hello }}', + 'color_temp_value_template': '{{ value_json.hello }}', + 'rgb_value_template': '{{ value_json.hello | join(",") }}', + }} + self.hass.config.components = ['mqtt'] - assert _setup_component(self.hass, light.DOMAIN, { - light.DOMAIN: { - 'platform': 'mqtt', - 'name': 'test', - 'state_topic': 'test_light_rgb/status', - 'command_topic': 'test_light_rgb/set', - 'brightness_state_topic': 'test_light_rgb/brightness/status', - 'color_temp_state_topic': 'test_light_rgb/color_temp/status', - 'rgb_state_topic': 'test_light_rgb/rgb/status', - 'state_value_template': '{{ value_json.hello }}', - 'brightness_value_template': '{{ value_json.hello }}', - 'color_temp_value_template': '{{ value_json.hello }}', - 'rgb_value_template': '{{ value_json.hello | join(",") }}', - } - }) + with assert_setup_component(1): + assert setup_component(self.hass, light.DOMAIN, config) state = self.hass.states.get('light.test') self.assertEqual(STATE_OFF, state.state) @@ -290,22 +301,24 @@ class TestLightMQTT(unittest.TestCase): self.assertEqual([1, 2, 3], state.attributes.get('rgb_color')) self.assertEqual(300, state.attributes.get('color_temp')) - def test_sending_mqtt_commands_and_optimistic(self): + def test_sending_mqtt_commands_and_optimistic(self): \ + # pylint: disable=invalid-name """Test the sending of command in optimistic mode.""" + config = {light.DOMAIN: { + 'platform': 'mqtt', + 'name': 'test', + 'command_topic': 'test_light_rgb/set', + 'brightness_command_topic': 'test_light_rgb/brightness/set', + 'rgb_command_topic': 'test_light_rgb/rgb/set', + 'color_temp_command_topic': 'test_light_rgb/color_temp/set', + 'qos': 2, + 'payload_on': 'on', + 'payload_off': 'off' + }} + self.hass.config.components = ['mqtt'] - assert _setup_component(self.hass, light.DOMAIN, { - light.DOMAIN: { - 'platform': 'mqtt', - 'name': 'test', - 'command_topic': 'test_light_rgb/set', - 'brightness_command_topic': 'test_light_rgb/brightness/set', - 'rgb_command_topic': 'test_light_rgb/rgb/set', - 'color_temp_command_topic': 'test_light_rgb/color_temp/set', - 'qos': 2, - 'payload_on': 'on', - 'payload_off': 'off' - } - }) + with assert_setup_component(1): + assert setup_component(self.hass, light.DOMAIN, config) state = self.hass.states.get('light.test') self.assertEqual(STATE_OFF, state.state) @@ -352,16 +365,17 @@ class TestLightMQTT(unittest.TestCase): def test_show_brightness_if_only_command_topic(self): """Test the brightness if only a command topic is present.""" + config = {light.DOMAIN: { + 'platform': 'mqtt', + 'name': 'test', + 'brightness_command_topic': 'test_light_rgb/brightness/set', + 'command_topic': 'test_light_rgb/set', + 'state_topic': 'test_light_rgb/status', + }} + self.hass.config.components = ['mqtt'] - assert _setup_component(self.hass, light.DOMAIN, { - light.DOMAIN: { - 'platform': 'mqtt', - 'name': 'test', - 'brightness_command_topic': 'test_light_rgb/brightness/set', - 'command_topic': 'test_light_rgb/set', - 'state_topic': 'test_light_rgb/status', - } - }) + with assert_setup_component(1): + assert setup_component(self.hass, light.DOMAIN, config) state = self.hass.states.get('light.test') self.assertEqual(STATE_OFF, state.state) @@ -376,16 +390,17 @@ class TestLightMQTT(unittest.TestCase): def test_show_color_temp_only_if_command_topic(self): """Test the color temp only if a command topic is present.""" + config = {light.DOMAIN: { + 'platform': 'mqtt', + 'name': 'test', + 'color_temp_command_topic': 'test_light_rgb/brightness/set', + 'command_topic': 'test_light_rgb/set', + 'state_topic': 'test_light_rgb/status' + }} + self.hass.config.components = ['mqtt'] - assert _setup_component(self.hass, light.DOMAIN, { - light.DOMAIN: { - 'platform': 'mqtt', - 'name': 'test', - 'color_temp_command_topic': 'test_light_rgb/brightness/set', - 'command_topic': 'test_light_rgb/set', - 'state_topic': 'test_light_rgb/status' - } - }) + with assert_setup_component(1): + assert setup_component(self.hass, light.DOMAIN, config) state = self.hass.states.get('light.test') self.assertEqual(STATE_OFF, state.state) diff --git a/tests/components/light/test_mqtt_json.py b/tests/components/light/test_mqtt_json.py index 6ea01dccccd..6fc4a00097d 100755 --- a/tests/components/light/test_mqtt_json.py +++ b/tests/components/light/test_mqtt_json.py @@ -30,11 +30,12 @@ light: import json import unittest -from homeassistant.bootstrap import _setup_component +from homeassistant.bootstrap import _setup_component, setup_component from homeassistant.const import STATE_ON, STATE_OFF, ATTR_ASSUMED_STATE import homeassistant.components.light as light from tests.common import ( - get_test_home_assistant, mock_mqtt_component, fire_mqtt_message) + get_test_home_assistant, mock_mqtt_component, fire_mqtt_message, + assert_setup_component) class TestLightMQTTJSON(unittest.TestCase): @@ -49,18 +50,21 @@ class TestLightMQTTJSON(unittest.TestCase): """Stop everything that was started.""" self.hass.stop() - def test_fail_setup_if_no_command_topic(self): + def test_fail_setup_if_no_command_topic(self): \ + # pylint: disable=invalid-name """Test if setup fails with no command topic.""" self.hass.config.components = ['mqtt'] - assert not _setup_component(self.hass, light.DOMAIN, { - light.DOMAIN: { - 'platform': 'mqtt_json', - 'name': 'test', - } - }) + with assert_setup_component(0): + assert setup_component(self.hass, light.DOMAIN, { + light.DOMAIN: { + 'platform': 'mqtt_json', + 'name': 'test', + } + }) self.assertIsNone(self.hass.states.get('light.test')) - def test_no_color_or_brightness_if_no_config(self): + def test_no_color_or_brightness_if_no_config(self): \ + # pylint: disable=invalid-name """Test if there is no color and brightness if they aren't defined.""" self.hass.config.components = ['mqtt'] assert _setup_component(self.hass, light.DOMAIN, { @@ -85,7 +89,8 @@ class TestLightMQTTJSON(unittest.TestCase): self.assertIsNone(state.attributes.get('rgb_color')) self.assertIsNone(state.attributes.get('brightness')) - def test_controlling_state_via_topic(self): + def test_controlling_state_via_topic(self): \ + # pylint: disable=invalid-name """Test the controlling of the state via topic.""" self.hass.config.components = ['mqtt'] assert _setup_component(self.hass, light.DOMAIN, { @@ -108,10 +113,9 @@ class TestLightMQTTJSON(unittest.TestCase): # Turn on the light, full white fire_mqtt_message(self.hass, 'test_light_rgb', - '{"state":"ON",' + - '"color":{"r":255,"g":255,"b":255},' + - '"brightness":255}' - ) + '{"state":"ON",' + '"color":{"r":255,"g":255,"b":255},' + '"brightness":255}') self.hass.block_till_done() state = self.hass.states.get('light.test') @@ -127,9 +131,8 @@ class TestLightMQTTJSON(unittest.TestCase): self.assertEqual(STATE_OFF, state.state) fire_mqtt_message(self.hass, 'test_light_rgb', - '{"state":"ON",' + - '"brightness":100}' - ) + '{"state":"ON",' + '"brightness":100}') self.hass.block_till_done() light_state = self.hass.states.get('light.test') @@ -138,16 +141,16 @@ class TestLightMQTTJSON(unittest.TestCase): light_state.attributes['brightness']) fire_mqtt_message(self.hass, 'test_light_rgb', - '{"state":"ON",' + - '"color":{"r":125,"g":125,"b":125}}' - ) + '{"state":"ON",' + '"color":{"r":125,"g":125,"b":125}}') self.hass.block_till_done() light_state = self.hass.states.get('light.test') self.assertEqual([125, 125, 125], light_state.attributes.get('rgb_color')) - def test_sending_mqtt_commands_and_optimistic(self): + def test_sending_mqtt_commands_and_optimistic(self): \ + # pylint: disable=invalid-name """Test the sending of command in optimistic mode.""" self.hass.config.components = ['mqtt'] assert _setup_component(self.hass, light.DOMAIN, { @@ -202,7 +205,8 @@ class TestLightMQTTJSON(unittest.TestCase): self.assertEqual((75, 75, 75), state.attributes['rgb_color']) self.assertEqual(50, state.attributes['brightness']) - def test_flash_short_and_long(self): + def test_flash_short_and_long(self): \ + # pylint: disable=invalid-name """Test for flash length being sent when included.""" self.hass.config.components = ['mqtt'] assert _setup_component(self.hass, light.DOMAIN, { @@ -285,7 +289,8 @@ class TestLightMQTTJSON(unittest.TestCase): self.assertEqual(10, message_json["transition"]) self.assertEqual("OFF", message_json["state"]) - def test_invalid_color_and_brightness_values(self): + def test_invalid_color_and_brightness_values(self): \ + # pylint: disable=invalid-name """Test that invalid color/brightness values are ignored.""" self.hass.config.components = ['mqtt'] assert _setup_component(self.hass, light.DOMAIN, { @@ -308,10 +313,9 @@ class TestLightMQTTJSON(unittest.TestCase): # Turn on the light fire_mqtt_message(self.hass, 'test_light_rgb', - '{"state":"ON",' + - '"color":{"r":255,"g":255,"b":255},' + - '"brightness": 255}' - ) + '{"state":"ON",' + '"color":{"r":255,"g":255,"b":255},' + '"brightness": 255}') self.hass.block_till_done() state = self.hass.states.get('light.test') @@ -321,9 +325,8 @@ class TestLightMQTTJSON(unittest.TestCase): # Bad color values fire_mqtt_message(self.hass, 'test_light_rgb', - '{"state":"ON",' + - '"color":{"r":"bad","g":"val","b":"test"}}' - ) + '{"state":"ON",' + '"color":{"r":"bad","g":"val","b":"test"}}') self.hass.block_till_done() # Color should not have changed @@ -333,9 +336,8 @@ class TestLightMQTTJSON(unittest.TestCase): # Bad brightness values fire_mqtt_message(self.hass, 'test_light_rgb', - '{"state":"ON",' + - '"brightness": "badValue"}' - ) + '{"state":"ON",' + '"brightness": "badValue"}') self.hass.block_till_done() # Brightness should not have changed diff --git a/tests/components/media_player/test_cast.py b/tests/components/media_player/test_cast.py index 9930ae678f3..3fd4ab9929d 100644 --- a/tests/components/media_player/test_cast.py +++ b/tests/components/media_player/test_cast.py @@ -6,12 +6,27 @@ from unittest.mock import patch from homeassistant.components.media_player import cast +class FakeChromeCast(object): + """A fake Chrome Cast.""" + + def __init__(self, host, port): + """Initialize the fake Chrome Cast.""" + self.host = host + self.port = port + + class TestCastMediaPlayer(unittest.TestCase): """Test the media_player module.""" @patch('homeassistant.components.media_player.cast.CastDevice') - def test_filter_duplicates(self, mock_device): + @patch('pychromecast.get_chromecasts') + def test_filter_duplicates(self, mock_get_chromecasts, mock_device): """Test filtering of duplicates.""" + mock_get_chromecasts.return_value = [ + FakeChromeCast('some_host', cast.DEFAULT_PORT) + ] + + # Test chromecasts as if they were hardcoded in configuration.yaml cast.setup_platform(None, { 'host': 'some_host' }, lambda _: _) @@ -21,6 +36,7 @@ class TestCastMediaPlayer(unittest.TestCase): mock_device.reset_mock() assert not mock_device.called + # Test chromecasts as if they were automatically discovered cast.setup_platform(None, {}, lambda _: _, ('some_host', cast.DEFAULT_PORT)) assert not mock_device.called diff --git a/tests/components/media_player/test_sonos.py b/tests/components/media_player/test_sonos.py index 8647926445d..add1f0c3ce5 100644 --- a/tests/components/media_player/test_sonos.py +++ b/tests/components/media_player/test_sonos.py @@ -67,6 +67,10 @@ class SoCoMock(): """Cause the speaker to separate itself from other speakers.""" return + def uid(self): + """Return a player uid.""" + return "RINCON_XXXXXXXXXXXXXXXXX" + class TestSonosMediaPlayer(unittest.TestCase): """Test the media_player module.""" @@ -125,7 +129,8 @@ class TestSonosMediaPlayer(unittest.TestCase): device = sonos.DEVICES[-1] partymodeMock.return_value = True device.group_players() - partymodeMock.assert_called_once_with() + self.assertEqual(partymodeMock.call_count, 1) + self.assertEqual(partymodeMock.call_args, mock.call()) @mock.patch('soco.SoCo', new=SoCoMock) @mock.patch.object(SoCoMock, 'unjoin') @@ -135,7 +140,8 @@ class TestSonosMediaPlayer(unittest.TestCase): device = sonos.DEVICES[-1] unjoinMock.return_value = True device.unjoin() - unjoinMock.assert_called_once_with() + self.assertEqual(unjoinMock.call_count, 1) + self.assertEqual(unjoinMock.call_args, mock.call()) @mock.patch('soco.SoCo', new=SoCoMock) @mock.patch.object(soco.snapshot.Snapshot, 'snapshot') @@ -145,7 +151,8 @@ class TestSonosMediaPlayer(unittest.TestCase): device = sonos.DEVICES[-1] snapshotMock.return_value = True device.snapshot() - snapshotMock.assert_called_once_with() + self.assertEqual(snapshotMock.call_count, 1) + self.assertEqual(snapshotMock.call_args, mock.call()) @mock.patch('soco.SoCo', new=SoCoMock) @mock.patch.object(soco.snapshot.Snapshot, 'restore') @@ -155,4 +162,5 @@ class TestSonosMediaPlayer(unittest.TestCase): device = sonos.DEVICES[-1] restoreMock.return_value = True device.restore() - restoreMock.assert_called_once_with(True) + self.assertEqual(restoreMock.call_count, 1) + self.assertEqual(restoreMock.call_args, mock.call(True)) diff --git a/tests/components/mqtt/test_init.py b/tests/components/mqtt/test_init.py index bb7b09c5112..cfa0766c8ed 100644 --- a/tests/components/mqtt/test_init.py +++ b/tests/components/mqtt/test_init.py @@ -316,3 +316,27 @@ class TestMQTTCallbacks(unittest.TestCase): def test_invalid_mqtt_topics(self): self.assertRaises(vol.Invalid, mqtt.valid_publish_topic, 'bad+topic') self.assertRaises(vol.Invalid, mqtt.valid_subscribe_topic, 'bad\0one') + + def test_receiving_non_utf8_message_gets_logged(self): + """Test receiving a non utf8 encoded message.""" + calls = [] + + def record(event): + """Helper to record calls.""" + calls.append(event) + + payload = 0x9a + topic = 'test_topic' + self.hass.bus.listen_once(mqtt.EVENT_MQTT_MESSAGE_RECEIVED, record) + MQTTMessage = namedtuple('MQTTMessage', ['topic', 'qos', 'payload']) + message = MQTTMessage(topic, 1, payload) + with self.assertLogs(level='ERROR') as test_handle: + mqtt.MQTT_CLIENT._mqtt_on_message( + None, + {'hass': self.hass}, + message) + self.hass.block_till_done() + self.assertIn( + "ERROR:homeassistant.components.mqtt:Illegal utf-8 unicode " + "payload from MQTT topic: %s, Payload: " % topic, + test_handle.output[0]) diff --git a/tests/components/mqtt/test_server.py b/tests/components/mqtt/test_server.py index cf869082f4b..eb7dabb28b3 100644 --- a/tests/components/mqtt/test_server.py +++ b/tests/components/mqtt/test_server.py @@ -1,5 +1,5 @@ """The tests for the MQTT component embedded server.""" -from unittest.mock import MagicMock, patch +from unittest.mock import Mock, MagicMock, patch from homeassistant.bootstrap import _setup_component import homeassistant.components.mqtt as mqtt @@ -18,14 +18,12 @@ class TestMQTT: """Stop everything that was started.""" self.hass.stop() - @patch('passlib.apps.custom_app_context', return_value='') - @patch('tempfile.NamedTemporaryFile', return_value=MagicMock()) + @patch('passlib.apps.custom_app_context', Mock(return_value='')) + @patch('tempfile.NamedTemporaryFile', Mock(return_value=MagicMock())) + @patch('asyncio.new_event_loop', Mock()) @patch('homeassistant.components.mqtt.MQTT') @patch('asyncio.gather') - @patch('asyncio.new_event_loop') - def test_creating_config_with_http_pass(self, mock_new_loop, mock_gather, - mock_mqtt, mock_temp, - mock_context): + def test_creating_config_with_http_pass(self, mock_gather, mock_mqtt): """Test if the MQTT server gets started and subscribe/publish msg.""" self.hass.config.components.append('http') password = 'super_secret' @@ -45,10 +43,10 @@ class TestMQTT: assert mock_mqtt.mock_calls[0][1][5] is None assert mock_mqtt.mock_calls[0][1][6] is None - @patch('tempfile.NamedTemporaryFile', return_value=MagicMock()) + @patch('tempfile.NamedTemporaryFile', Mock(return_value=MagicMock())) + @patch('asyncio.new_event_loop', Mock()) @patch('asyncio.gather') - @patch('asyncio.new_event_loop') - def test_broker_config_fails(self, mock_new_loop, mock_gather, mock_temp): + def test_broker_config_fails(self, mock_gather): """Test if the MQTT component fails if server fails.""" self.hass.config.components.append('http') from hbmqtt.broker import BrokerException diff --git a/tests/components/notify/test_apns.py b/tests/components/notify/test_apns.py new file mode 100644 index 00000000000..7103b6cdc8b --- /dev/null +++ b/tests/components/notify/test_apns.py @@ -0,0 +1,358 @@ +"""The tests for the APNS component.""" +import unittest +import os + +import homeassistant.components.notify as notify +from homeassistant.core import State +from homeassistant.components.notify.apns import ApnsNotificationService +from tests.common import get_test_home_assistant +from homeassistant.config import load_yaml_config_file +from unittest.mock import patch +from apns2.errors import Unregistered + + +class TestApns(unittest.TestCase): + """Test the APNS component.""" + + def test_apns_setup_full(self): + """Test setup with all data.""" + config = { + 'notify': { + 'platform': 'apns', + 'name': 'test_app', + 'sandbox': 'True', + 'topic': 'testapp.appname', + 'cert_file': 'test_app.pem' + } + } + hass = get_test_home_assistant() + + self.assertTrue(notify.setup(hass, config)) + + def test_apns_setup_missing_name(self): + """Test setup with missing name.""" + config = { + 'notify': { + 'platform': 'apns', + 'sandbox': 'True', + 'topic': 'testapp.appname', + 'cert_file': 'test_app.pem' + } + } + hass = get_test_home_assistant() + self.assertFalse(notify.setup(hass, config)) + + def test_apns_setup_missing_certificate(self): + """Test setup with missing name.""" + config = { + 'notify': { + 'platform': 'apns', + 'topic': 'testapp.appname', + 'name': 'test_app' + } + } + hass = get_test_home_assistant() + self.assertFalse(notify.setup(hass, config)) + + def test_apns_setup_missing_topic(self): + """Test setup with missing topic.""" + config = { + 'notify': { + 'platform': 'apns', + 'cert_file': 'test_app.pem', + 'name': 'test_app' + } + } + hass = get_test_home_assistant() + self.assertFalse(notify.setup(hass, config)) + + def test_register_new_device(self): + """Test registering a new device with a name.""" + config = { + 'notify': { + 'platform': 'apns', + 'name': 'test_app', + 'topic': 'testapp.appname', + 'cert_file': 'test_app.pem' + } + } + hass = get_test_home_assistant() + + devices_path = hass.config.path('test_app_apns.yaml') + with open(devices_path, 'w+') as out: + out.write('5678: {name: test device 2}\n') + + notify.setup(hass, config) + self.assertTrue(hass.services.call('apns', + 'test_app', + {'push_id': '1234', + 'name': 'test device'}, + blocking=True)) + + devices = {str(key): value for (key, value) in + load_yaml_config_file(devices_path).items()} + + test_device_1 = devices.get('1234') + test_device_2 = devices.get('5678') + + self.assertIsNotNone(test_device_1) + self.assertIsNotNone(test_device_2) + + self.assertEqual('test device', test_device_1.get('name')) + + os.remove(devices_path) + + def test_register_device_without_name(self): + """Test registering a without a name.""" + config = { + 'notify': { + 'platform': 'apns', + 'name': 'test_app', + 'topic': 'testapp.appname', + 'cert_file': 'test_app.pem' + } + } + hass = get_test_home_assistant() + + devices_path = hass.config.path('test_app_apns.yaml') + with open(devices_path, 'w+') as out: + out.write('5678: {name: test device 2}\n') + + notify.setup(hass, config) + self.assertTrue(hass.services.call('apns', 'test_app', + {'push_id': '1234'}, + blocking=True)) + + devices = {str(key): value for (key, value) in + load_yaml_config_file(devices_path).items()} + + test_device = devices.get('1234') + + self.assertIsNotNone(test_device) + self.assertIsNone(test_device.get('name')) + + os.remove(devices_path) + + def test_update_existing_device(self): + """Test updating an existing device.""" + config = { + 'notify': { + 'platform': 'apns', + 'name': 'test_app', + 'topic': 'testapp.appname', + 'cert_file': 'test_app.pem' + } + } + hass = get_test_home_assistant() + + devices_path = hass.config.path('test_app_apns.yaml') + with open(devices_path, 'w+') as out: + out.write('1234: {name: test device 1}\n') + out.write('5678: {name: test device 2}\n') + + notify.setup(hass, config) + self.assertTrue(hass.services.call('apns', + 'test_app', + {'push_id': '1234', + 'name': 'updated device 1'}, + blocking=True)) + + devices = {str(key): value for (key, value) in + load_yaml_config_file(devices_path).items()} + + test_device_1 = devices.get('1234') + test_device_2 = devices.get('5678') + + self.assertIsNotNone(test_device_1) + self.assertIsNotNone(test_device_2) + + self.assertEqual('updated device 1', test_device_1.get('name')) + + os.remove(devices_path) + + def test_update_existing_device_with_tracking_id(self): + """Test updating an existing device that has a tracking id.""" + config = { + 'notify': { + 'platform': 'apns', + 'name': 'test_app', + 'topic': 'testapp.appname', + 'cert_file': 'test_app.pem' + } + } + hass = get_test_home_assistant() + + devices_path = hass.config.path('test_app_apns.yaml') + with open(devices_path, 'w+') as out: + out.write('1234: {name: test device 1, tracking_device_id: tracking123}\n') # nopep8 + out.write('5678: {name: test device 2, tracking_device_id: tracking456}\n') # nopep8 + + notify.setup(hass, config) + self.assertTrue(hass.services.call('apns', + 'test_app', + {'push_id': '1234', + 'name': 'updated device 1'}, + blocking=True)) + + devices = {str(key): value for (key, value) in + load_yaml_config_file(devices_path).items()} + + test_device_1 = devices.get('1234') + test_device_2 = devices.get('5678') + + self.assertIsNotNone(test_device_1) + self.assertIsNotNone(test_device_2) + + self.assertEqual('tracking123', + test_device_1.get('tracking_device_id')) + self.assertEqual('tracking456', + test_device_2.get('tracking_device_id')) + + os.remove(devices_path) + + @patch('apns2.client.APNsClient') + def test_send(self, mock_client): + """Test updating an existing device.""" + send = mock_client.return_value.send_notification + config = { + 'notify': { + 'platform': 'apns', + 'name': 'test_app', + 'topic': 'testapp.appname', + 'cert_file': 'test_app.pem' + } + } + hass = get_test_home_assistant() + + devices_path = hass.config.path('test_app_apns.yaml') + with open(devices_path, 'w+') as out: + out.write('1234: {name: test device 1}\n') + + notify.setup(hass, config) + + self.assertTrue(hass.services.call('notify', 'test_app', + {'message': 'Hello', + 'data': { + 'badge': 1, + 'sound': 'test.mp3', + 'category': 'testing' + } + }, + blocking=True)) + + self.assertTrue(send.called) + self.assertEqual(1, len(send.mock_calls)) + + target = send.mock_calls[0][1][0] + payload = send.mock_calls[0][1][1] + + self.assertEqual('1234', target) + self.assertEqual('Hello', payload.alert) + self.assertEqual(1, payload.badge) + self.assertEqual('test.mp3', payload.sound) + self.assertEqual('testing', payload.category) + + @patch('apns2.client.APNsClient') + def test_send_when_disabled(self, mock_client): + """Test updating an existing device.""" + send = mock_client.return_value.send_notification + config = { + 'notify': { + 'platform': 'apns', + 'name': 'test_app', + 'topic': 'testapp.appname', + 'cert_file': 'test_app.pem' + } + } + hass = get_test_home_assistant() + + devices_path = hass.config.path('test_app_apns.yaml') + with open(devices_path, 'w+') as out: + out.write('1234: {name: test device 1, disabled: True}\n') + + notify.setup(hass, config) + + self.assertTrue(hass.services.call('notify', 'test_app', + {'message': 'Hello', + 'data': { + 'badge': 1, + 'sound': 'test.mp3', + 'category': 'testing' + } + }, + blocking=True)) + + self.assertFalse(send.called) + + @patch('apns2.client.APNsClient') + def test_send_with_state(self, mock_client): + """Test updating an existing device.""" + send = mock_client.return_value.send_notification + + hass = get_test_home_assistant() + + devices_path = hass.config.path('test_app_apns.yaml') + with open(devices_path, 'w+') as out: + out.write('1234: {name: test device 1, tracking_device_id: tracking123}\n') # nopep8 + out.write('5678: {name: test device 2, tracking_device_id: tracking456}\n') # nopep8 + + notify_service = ApnsNotificationService( + hass, + 'test_app', + 'testapp.appname', + False, + 'test_app.pem' + ) + + notify_service.device_state_changed_listener( + 'device_tracker.tracking456', + State('device_tracker.tracking456', None), + State('device_tracker.tracking456', 'home')) + + hass.block_till_done() + + notify_service.send_message(message='Hello', target='home') + + self.assertTrue(send.called) + self.assertEqual(1, len(send.mock_calls)) + + target = send.mock_calls[0][1][0] + payload = send.mock_calls[0][1][1] + + self.assertEqual('5678', target) + self.assertEqual('Hello', payload.alert) + + @patch('apns2.client.APNsClient') + def test_disable_when_unregistered(self, mock_client): + """Test disabling a device when it is unregistered.""" + send = mock_client.return_value.send_notification + send.side_effect = Unregistered() + + config = { + 'notify': { + 'platform': 'apns', + 'name': 'test_app', + 'topic': 'testapp.appname', + 'cert_file': 'test_app.pem' + } + } + hass = get_test_home_assistant() + + devices_path = hass.config.path('test_app_apns.yaml') + with open(devices_path, 'w+') as out: + out.write('1234: {name: test device 1}\n') + + notify.setup(hass, config) + + self.assertTrue(hass.services.call('notify', 'test_app', + {'message': 'Hello'}, + blocking=True)) + + devices = {str(key): value for (key, value) in + load_yaml_config_file(devices_path).items()} + + test_device_1 = devices.get('1234') + self.assertIsNotNone(test_device_1) + self.assertEqual(True, test_device_1.get('disabled')) + + os.remove(devices_path) diff --git a/tests/components/notify/test_demo.py b/tests/components/notify/test_demo.py index a0d9f28fe1a..3ec00a84bda 100644 --- a/tests/components/notify/test_demo.py +++ b/tests/components/notify/test_demo.py @@ -1,12 +1,10 @@ """The tests for the notify demo platform.""" -import tempfile import unittest from homeassistant.bootstrap import setup_component import homeassistant.components.notify as notify from homeassistant.components.notify import demo from homeassistant.helpers import script -from homeassistant.util import yaml from tests.common import get_test_home_assistant @@ -70,21 +68,18 @@ class TestNotifyDemo(unittest.TestCase): def test_calling_notify_from_script_loaded_from_yaml_without_title(self): """Test if we can call a notify from a script.""" - yaml_conf = """ -service: notify.notify -data: - data: - push: - sound: US-EN-Morgan-Freeman-Roommate-Is-Arriving.wav -data_template: - message: > - Test 123 {{ 2 + 2 }} -""" - - with tempfile.NamedTemporaryFile() as fp: - fp.write(yaml_conf.encode('utf-8')) - fp.flush() - conf = yaml.load_yaml(fp.name) + conf = { + 'service': 'notify.notify', + 'data': { + 'data': { + 'push': { + 'sound': + 'US-EN-Morgan-Freeman-Roommate-Is-Arriving.wav' + } + } + }, + 'data_template': {'message': 'Test 123 {{ 2 + 2 }}\n'}, + } script.call_from_config(self.hass, conf) self.hass.block_till_done() @@ -99,22 +94,21 @@ data_template: def test_calling_notify_from_script_loaded_from_yaml_with_title(self): """Test if we can call a notify from a script.""" - yaml_conf = """ -service: notify.notify -data: - data: - push: - sound: US-EN-Morgan-Freeman-Roommate-Is-Arriving.wav -data_template: - title: Test - message: > - Test 123 {{ 2 + 2 }} -""" - - with tempfile.NamedTemporaryFile() as fp: - fp.write(yaml_conf.encode('utf-8')) - fp.flush() - conf = yaml.load_yaml(fp.name) + conf = { + 'service': 'notify.notify', + 'data': { + 'data': { + 'push': { + 'sound': + 'US-EN-Morgan-Freeman-Roommate-Is-Arriving.wav' + } + } + }, + 'data_template': { + 'message': 'Test 123 {{ 2 + 2 }}\n', + 'title': 'Test' + } + } script.call_from_config(self.hass, conf) self.hass.pool.block_till_done() diff --git a/tests/components/notify/test_file.py b/tests/components/notify/test_file.py index eaca5c3d962..f63d16a5711 100644 --- a/tests/components/notify/test_file.py +++ b/tests/components/notify/test_file.py @@ -1,8 +1,7 @@ """The tests for the notify file platform.""" import os import unittest -import tempfile -from unittest.mock import patch +from unittest.mock import call, mock_open, patch from homeassistant.bootstrap import setup_component import homeassistant.components.notify as notify @@ -34,13 +33,19 @@ class TestNotifyFile(unittest.TestCase): }, })) + @patch('homeassistant.components.notify.file.os.stat') @patch('homeassistant.util.dt.utcnow') - def test_notify_file(self, mock_utcnow): + def test_notify_file(self, mock_utcnow, mock_stat): """Test the notify file output.""" mock_utcnow.return_value = dt_util.as_utc(dt_util.now()) + mock_stat.return_value.st_size = 0 - with tempfile.TemporaryDirectory() as tempdirname: - filename = os.path.join(tempdirname, 'notify.txt') + m_open = mock_open() + with patch( + 'homeassistant.components.notify.file.open', + m_open, create=True + ): + filename = 'mock_file' message = 'one, two, testing, testing' self.assertTrue(setup_component(self.hass, notify.DOMAIN, { 'notify': { @@ -58,5 +63,12 @@ class TestNotifyFile(unittest.TestCase): self.hass.services.call('notify', 'test', {'message': message}, blocking=True) - result = open(filename).read() - self.assertEqual(result, "{}{}\n".format(title, message)) + full_filename = os.path.join(self.hass.config.path(), filename) + self.assertEqual(m_open.call_count, 1) + self.assertEqual(m_open.call_args, call(full_filename, 'a')) + + self.assertEqual(m_open.return_value.write.call_count, 2) + self.assertEqual( + m_open.return_value.write.call_args_list, + [call(title), call(message + '\n')] + ) diff --git a/tests/components/notify/test_group.py b/tests/components/notify/test_group.py index e1c6d9f5bd4..4a318a2d3b8 100644 --- a/tests/components/notify/test_group.py +++ b/tests/components/notify/test_group.py @@ -5,7 +5,7 @@ from homeassistant.bootstrap import setup_component import homeassistant.components.notify as notify from homeassistant.components.notify import group -from tests.common import get_test_home_assistant +from tests.common import assert_setup_component, get_test_home_assistant class TestNotifyGroup(unittest.TestCase): @@ -15,15 +15,16 @@ class TestNotifyGroup(unittest.TestCase): """Setup things to be run when tests are started.""" self.hass = get_test_home_assistant() self.events = [] - self.assertTrue(setup_component(self.hass, notify.DOMAIN, { - 'notify': [{ - 'name': 'demo1', - 'platform': 'demo' - }, { - 'name': 'demo2', - 'platform': 'demo' - }] - })) + with assert_setup_component(2): + setup_component(self.hass, notify.DOMAIN, { + 'notify': [{ + 'name': 'demo1', + 'platform': 'demo' + }, { + 'name': 'demo2', + 'platform': 'demo' + }] + }) self.service = group.get_service(self.hass, {'services': [ {'service': 'demo1'}, diff --git a/tests/components/rollershutter/test_command_line.py b/tests/components/rollershutter/test_command_line.py index 5bec5f4e984..d8b5110578c 100644 --- a/tests/components/rollershutter/test_command_line.py +++ b/tests/components/rollershutter/test_command_line.py @@ -41,7 +41,10 @@ class TestCommandRollerShutter(unittest.TestCase): mock_run.return_value = b' foo bar ' result = self.rs._query_state_value('runme') self.assertEqual('foo bar', result) - mock_run.assert_called_once_with('runme', shell=True) + self.assertEqual(mock_run.call_count, 1) + self.assertEqual( + mock_run.call_args, mock.call('runme', shell=True) + ) def test_state_value(self): """Test with state value.""" diff --git a/tests/components/sensor/test_darksky.py b/tests/components/sensor/test_darksky.py index f44f7385e5b..09ced049b58 100644 --- a/tests/components/sensor/test_darksky.py +++ b/tests/components/sensor/test_darksky.py @@ -57,12 +57,12 @@ class TestDarkSkySetup(unittest.TestCase): @requests_mock.Mocker() @patch('forecastio.api.get_forecast', wraps=forecastio.api.get_forecast) - def test_setup(self, m, mock_get_forecast): + def test_setup(self, mock_req, mock_get_forecast): """Test for successfully setting up the forecast.io platform.""" - uri = ('https://api.darksky.net\/forecast\/(\w+)\/' - '(-?\d+\.?\d*),(-?\d+\.?\d*)') - m.get(re.compile(uri), - text=load_fixture('darksky.json')) + uri = (r'https://api.(darksky.net|forecast.io)\/forecast\/(\w+)\/' + r'(-?\d+\.?\d*),(-?\d+\.?\d*)') + mock_req.get(re.compile(uri), + text=load_fixture('darksky.json')) darksky.setup_platform(self.hass, self.config, MagicMock()) self.assertTrue(mock_get_forecast.called) self.assertEqual(mock_get_forecast.call_count, 1) diff --git a/tests/components/sensor/test_mfi.py b/tests/components/sensor/test_mfi.py index 5374f34fb12..82577a5b2a0 100644 --- a/tests/components/sensor/test_mfi.py +++ b/tests/components/sensor/test_mfi.py @@ -71,8 +71,13 @@ class TestMfiSensorSetup(unittest.TestCase): config = dict(self.GOOD_CONFIG) del config[self.THING]['port'] assert setup_component(self.hass, self.COMPONENT.DOMAIN, config) - mock_client.assert_called_once_with( - 'foo', 'user', 'pass', port=6443, use_tls=True, verify=True) + self.assertEqual(mock_client.call_count, 1) + self.assertEqual( + mock_client.call_args, + mock.call( + 'foo', 'user', 'pass', port=6443, use_tls=True, verify=True + ) + ) @mock.patch('mficlient.client.MFiClient') def test_setup_with_port(self, mock_client): @@ -80,8 +85,13 @@ class TestMfiSensorSetup(unittest.TestCase): config = dict(self.GOOD_CONFIG) config[self.THING]['port'] = 6123 assert setup_component(self.hass, self.COMPONENT.DOMAIN, config) - mock_client.assert_called_once_with( - 'foo', 'user', 'pass', port=6123, use_tls=True, verify=True) + self.assertEqual(mock_client.call_count, 1) + self.assertEqual( + mock_client.call_args, + mock.call( + 'foo', 'user', 'pass', port=6123, use_tls=True, verify=True + ) + ) @mock.patch('mficlient.client.MFiClient') def test_setup_with_tls_disabled(self, mock_client): @@ -91,8 +101,13 @@ class TestMfiSensorSetup(unittest.TestCase): config[self.THING]['ssl'] = False config[self.THING]['verify_ssl'] = False assert setup_component(self.hass, self.COMPONENT.DOMAIN, config) - mock_client.assert_called_once_with( - 'foo', 'user', 'pass', port=6080, use_tls=False, verify=False) + self.assertEqual(mock_client.call_count, 1) + self.assertEqual( + mock_client.call_args, + mock.call( + 'foo', 'user', 'pass', port=6080, use_tls=False, verify=False + ) + ) @mock.patch('mficlient.client.MFiClient') @mock.patch('homeassistant.components.sensor.mfi.MfiSensor') @@ -180,4 +195,5 @@ class TestMfiSensor(unittest.TestCase): def test_update(self): """Test the update.""" self.sensor.update() - self.port.refresh.assert_called_once_with() + self.assertEqual(self.port.refresh.call_count, 1) + self.assertEqual(self.port.refresh.call_args, mock.call()) diff --git a/tests/components/sensor/test_min_max.py b/tests/components/sensor/test_min_max.py new file mode 100644 index 00000000000..bf49d4113c4 --- /dev/null +++ b/tests/components/sensor/test_min_max.py @@ -0,0 +1,161 @@ +"""The test for the min/max sensor platform.""" +import unittest + +from homeassistant.bootstrap import setup_component +from homeassistant.const import ( + STATE_UNKNOWN, ATTR_UNIT_OF_MEASUREMENT, TEMP_CELSIUS, TEMP_FAHRENHEIT) +from tests.common import get_test_home_assistant + + +class TestMinMaxSensor(unittest.TestCase): + """Test the min/max sensor.""" + + def setup_method(self, method): + """Set up things to be run when tests are started.""" + self.hass = get_test_home_assistant() + self.values = [17, 20, 15.2] + self.count = len(self.values) + self.min = min(self.values) + self.max = max(self.values) + self.mean = round(sum(self.values) / self.count, 2) + + def teardown_method(self, method): + """Stop everything that was started.""" + self.hass.stop() + + def test_min_sensor(self): + """Test the min sensor.""" + config = { + 'sensor': { + 'platform': 'min_max', + 'name': 'test', + 'type': 'min', + 'entity_ids': [ + 'sensor.test_1', + 'sensor.test_2', + 'sensor.test_3', + ] + } + } + + assert setup_component(self.hass, 'sensor', config) + + entity_ids = config['sensor']['entity_ids'] + + for entity_id, value in dict(zip(entity_ids, self.values)).items(): + self.hass.states.set(entity_id, value) + self.hass.block_till_done() + + state = self.hass.states.get('sensor.test_min') + + self.assertEqual(str(float(self.min)), state.state) + self.assertEqual(self.max, state.attributes.get('max_value')) + self.assertEqual(self.mean, state.attributes.get('mean')) + + def test_max_sensor(self): + """Test the max sensor.""" + config = { + 'sensor': { + 'platform': 'min_max', + 'name': 'test', + 'type': 'max', + 'entity_ids': [ + 'sensor.test_1', + 'sensor.test_2', + 'sensor.test_3', + ] + } + } + + assert setup_component(self.hass, 'sensor', config) + + entity_ids = config['sensor']['entity_ids'] + + for entity_id, value in dict(zip(entity_ids, self.values)).items(): + self.hass.states.set(entity_id, value) + self.hass.block_till_done() + + state = self.hass.states.get('sensor.test_max') + + self.assertEqual(str(float(self.max)), state.state) + self.assertEqual(self.min, state.attributes.get('min_value')) + self.assertEqual(self.mean, state.attributes.get('mean')) + + def test_not_enough_sensor_value(self): + """Test that there is nothing done if not enough values available.""" + config = { + 'sensor': { + 'platform': 'min_max', + 'name': 'test', + 'type': 'max', + 'entity_ids': [ + 'sensor.test_1', + 'sensor.test_2', + 'sensor.test_3', + ] + } + } + + assert setup_component(self.hass, 'sensor', config) + + entity_ids = config['sensor']['entity_ids'] + + self.hass.states.set(entity_ids[0], self.values[0]) + self.hass.block_till_done() + + state = self.hass.states.get('sensor.test_max') + self.assertEqual(STATE_UNKNOWN, state.state) + + self.hass.states.set(entity_ids[1], self.values[1]) + self.hass.block_till_done() + + state = self.hass.states.get('sensor.test_max') + self.assertEqual(STATE_UNKNOWN, state.state) + + self.hass.states.set(entity_ids[2], self.values[2]) + self.hass.block_till_done() + + state = self.hass.states.get('sensor.test_max') + self.assertNotEqual(STATE_UNKNOWN, state.state) + + def test_different_unit_of_measurement(self): + """Test for different unit of measurement.""" + config = { + 'sensor': { + 'platform': 'min_max', + 'name': 'test', + 'type': 'mean', + 'entity_ids': [ + 'sensor.test_1', + 'sensor.test_2', + 'sensor.test_3', + ] + } + } + + assert setup_component(self.hass, 'sensor', config) + + entity_ids = config['sensor']['entity_ids'] + + self.hass.states.set(entity_ids[0], self.values[0], + {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}) + self.hass.block_till_done() + + state = self.hass.states.get('sensor.test_mean') + + self.assertEqual(STATE_UNKNOWN, state.state) + self.assertEqual('°C', state.attributes.get('unit_of_measurement')) + + self.hass.states.set(entity_ids[1], self.values[1], + {ATTR_UNIT_OF_MEASUREMENT: TEMP_FAHRENHEIT}) + self.hass.block_till_done() + + self.assertEqual(STATE_UNKNOWN, state.state) + self.assertEqual('°C', state.attributes.get('unit_of_measurement')) + + self.hass.states.set(entity_ids[2], self.values[2], + {ATTR_UNIT_OF_MEASUREMENT: '%'}) + self.hass.block_till_done() + + self.assertEqual(STATE_UNKNOWN, state.state) + self.assertEqual('°C', state.attributes.get('unit_of_measurement')) diff --git a/tests/components/sensor/test_pilight.py b/tests/components/sensor/test_pilight.py new file mode 100644 index 00000000000..c78c91545ab --- /dev/null +++ b/tests/components/sensor/test_pilight.py @@ -0,0 +1,120 @@ +"""The tests for the Pilight sensor platform.""" +import logging + +from homeassistant.bootstrap import setup_component +import homeassistant.components.sensor as sensor +from homeassistant.components import pilight + +from tests.common import get_test_home_assistant, assert_setup_component + +HASS = None + + +def fire_pilight_message(protocol, data): + """Fire the fake pilight message.""" + message = {pilight.ATTR_PROTOCOL: protocol} + message.update(data) + HASS.bus.fire(pilight.EVENT, message) + + +def setup_function(): # pylint: disable=invalid-name + """Initialize a Home Assistant server.""" + global HASS + + HASS = get_test_home_assistant() + HASS.config.components = ['pilight'] + + +def teardown_function(): # pylint: disable=invalid-name + """Stop the Home Assistant server.""" + HASS.stop() + + +def test_sensor_value_from_code(): + """Test the setting of value via pilight.""" + with assert_setup_component(1): + setup_component(HASS, sensor.DOMAIN, { + sensor.DOMAIN: { + 'platform': 'pilight', + 'name': 'test', + 'variable': 'test', + 'payload': {'protocol': 'test-protocol'}, + 'unit_of_measurement': 'fav unit' + } + }) + + state = HASS.states.get('sensor.test') + assert state.state == 'unknown' + + unit_of_measurement = state.attributes.get('unit_of_measurement') + assert unit_of_measurement == 'fav unit' + + # Set value from data with correct payload + fire_pilight_message(protocol='test-protocol', + data={'test': 42}) + HASS.block_till_done() + state = HASS.states.get('sensor.test') + assert state.state == '42' + + +def test_disregard_wrong_payload(): + """Test omitting setting of value with wrong payload.""" + with assert_setup_component(1): + setup_component(HASS, sensor.DOMAIN, { + sensor.DOMAIN: { + 'platform': 'pilight', + 'name': 'test_2', + 'variable': 'test', + 'payload': {'uuid': '1-2-3-4', + 'protocol': 'test-protocol_2'} + } + }) + + # Try set value from data with incorrect payload + fire_pilight_message(protocol='test-protocol_2', + data={'test': 'data', + 'uuid': '0-0-0-0'}) + HASS.block_till_done() + state = HASS.states.get('sensor.test_2') + assert state.state == 'unknown' + + # Try set value from data with partially matched payload + fire_pilight_message(protocol='wrong-protocol', + data={'test': 'data', + 'uuid': '1-2-3-4'}) + HASS.block_till_done() + state = HASS.states.get('sensor.test_2') + assert state.state == 'unknown' + + # Try set value from data with fully matched payload + fire_pilight_message(protocol='test-protocol_2', + data={'test': 'data', + 'uuid': '1-2-3-4', + 'other_payload': 3.141}) + HASS.block_till_done() + state = HASS.states.get('sensor.test_2') + assert state.state == 'data' + + +def test_variable_missing(caplog): + """Check if error message when variable missing.""" + caplog.set_level(logging.ERROR) + with assert_setup_component(1): + setup_component(HASS, sensor.DOMAIN, { + sensor.DOMAIN: { + 'platform': 'pilight', + 'name': 'test_3', + 'variable': 'test', + 'payload': {'protocol': 'test-protocol'} + } + }) + + # Create code without sensor variable + fire_pilight_message(protocol='test-protocol', + data={'uuid': '1-2-3-4', + 'other_variable': 3.141}) + HASS.block_till_done() + + logs = caplog.text + + assert 'No variable test in received code' in logs diff --git a/tests/components/sensor/test_sleepiq.py b/tests/components/sensor/test_sleepiq.py index 2ec250f50c2..b0c937c4025 100644 --- a/tests/components/sensor/test_sleepiq.py +++ b/tests/components/sensor/test_sleepiq.py @@ -4,10 +4,10 @@ from unittest.mock import MagicMock import requests_mock -from homeassistant import core as ha from homeassistant.components.sensor import sleepiq from tests.components.test_sleepiq import mock_responses +from tests.common import get_test_home_assistant class TestSleepIQSensorSetup(unittest.TestCase): @@ -22,7 +22,7 @@ class TestSleepIQSensorSetup(unittest.TestCase): def setUp(self): """Initialize values for this testcase class.""" - self.hass = ha.HomeAssistant() + self.hass = get_test_home_assistant() self.username = 'foo' self.password = 'bar' self.config = { diff --git a/tests/components/sensor/test_tcp.py b/tests/components/sensor/test_tcp.py index a20c01eee52..d12eccccc63 100644 --- a/tests/components/sensor/test_tcp.py +++ b/tests/components/sensor/test_tcp.py @@ -1,247 +1,270 @@ """The tests for the TCP sensor platform.""" import socket +import unittest from copy import copy from uuid import uuid4 from unittest.mock import patch, Mock +from tests.common import (get_test_home_assistant, assert_setup_component) +from homeassistant.bootstrap import setup_component from homeassistant.components.sensor import tcp from homeassistant.helpers.entity import Entity -from tests.common import get_test_home_assistant - TEST_CONFIG = { - tcp.CONF_NAME: "test_name", - tcp.CONF_HOST: "test_host", - tcp.CONF_PORT: 12345, - tcp.CONF_TIMEOUT: tcp.DEFAULT_TIMEOUT + 1, - tcp.CONF_PAYLOAD: "test_payload", - tcp.CONF_UNIT: "test_unit", - tcp.CONF_VALUE_TEMPLATE: "test_template", - tcp.CONF_VALUE_ON: "test_on", - tcp.CONF_BUFFER_SIZE: tcp.DEFAULT_BUFFER_SIZE + 1 + 'sensor': { + 'platform': 'tcp', + tcp.CONF_NAME: 'test_name', + tcp.CONF_HOST: 'test_host', + tcp.CONF_PORT: 12345, + tcp.CONF_TIMEOUT: tcp.DEFAULT_TIMEOUT + 1, + tcp.CONF_PAYLOAD: 'test_payload', + tcp.CONF_UNIT_OF_MEASUREMENT: 'test_unit', + tcp.CONF_VALUE_TEMPLATE: 'test_template', + tcp.CONF_VALUE_ON: 'test_on', + tcp.CONF_BUFFER_SIZE: tcp.DEFAULT_BUFFER_SIZE + 1 + }, } + KEYS_AND_DEFAULTS = { - tcp.CONF_NAME: None, tcp.CONF_TIMEOUT: tcp.DEFAULT_TIMEOUT, - tcp.CONF_UNIT: None, + tcp.CONF_UNIT_OF_MEASUREMENT: None, tcp.CONF_VALUE_TEMPLATE: None, tcp.CONF_VALUE_ON: None, tcp.CONF_BUFFER_SIZE: tcp.DEFAULT_BUFFER_SIZE } -@patch('homeassistant.components.sensor.tcp.Sensor.update') -def test_setup_platform_valid_config(mock_update): - """Should check the supplied config and call add_entities with Sensor.""" - add_entities = Mock() - ret = tcp.setup_platform(None, TEST_CONFIG, add_entities) - assert ret is None, "setup_platform() should return None if successful." - assert add_entities.called - assert isinstance(add_entities.call_args[0][0][0], tcp.Sensor) - - -def test_setup_platform_invalid_config(): - """Should check the supplied config and return False if it is invalid.""" - config = copy(TEST_CONFIG) - del config[tcp.CONF_HOST] - assert tcp.setup_platform(None, config, None) is False - - -class TestTCPSensor(): +class TestTCPSensor(unittest.TestCase): """Test the TCP Sensor.""" - def setup_class(cls): + def setup_method(self, method): """Setup things to be run when tests are started.""" - cls.hass = get_test_home_assistant() + self.hass = get_test_home_assistant() - def teardown_class(cls): + def teardown_method(self, method): """Stop everything that was started.""" - cls.hass.stop() + self.hass.stop() - @patch('homeassistant.components.sensor.tcp.Sensor.update') + @patch('homeassistant.components.sensor.tcp.TcpSensor.update') + def test_setup_platform_valid_config(self, mock_update): + """Check a valid configuration and call add_devices with sensor.""" + with assert_setup_component(0, 'sensor'): + assert setup_component(self.hass, 'sensor', TEST_CONFIG) + + add_devices = Mock() + tcp.setup_platform(None, TEST_CONFIG['sensor'], add_devices) + assert add_devices.called + assert isinstance(add_devices.call_args[0][0][0], tcp.TcpSensor) + + def test_setup_platform_invalid_config(self): + """Check an invalid configuration.""" + with assert_setup_component(0): + assert setup_component(self.hass, 'sensor', { + 'sensor': { + 'platform': 'tcp', + 'porrt': 1234, + } + }) + + @patch('homeassistant.components.sensor.tcp.TcpSensor.update') def test_name(self, mock_update): - """Should return the name if set in the config.""" - sensor = tcp.Sensor(self.hass, TEST_CONFIG) - assert sensor.name == TEST_CONFIG[tcp.CONF_NAME] + """Return the name if set in the configuration.""" + sensor = tcp.TcpSensor(self.hass, TEST_CONFIG['sensor']) + assert sensor.name == TEST_CONFIG['sensor'][tcp.CONF_NAME] - @patch('homeassistant.components.sensor.tcp.Sensor.update') + @patch('homeassistant.components.sensor.tcp.TcpSensor.update') def test_name_not_set(self, mock_update): - """Should return the superclass name property if not set in config.""" - config = copy(TEST_CONFIG) + """Return the superclass name property if not set in configuration.""" + config = copy(TEST_CONFIG['sensor']) del config[tcp.CONF_NAME] entity = Entity() - sensor = tcp.Sensor(self.hass, config) + sensor = tcp.TcpSensor(self.hass, config) assert sensor.name == entity.name - @patch('homeassistant.components.sensor.tcp.Sensor.update') + @patch('homeassistant.components.sensor.tcp.TcpSensor.update') def test_state(self, mock_update): - """Should return the contents of _state.""" - sensor = tcp.Sensor(self.hass, TEST_CONFIG) + """Return the contents of _state.""" + sensor = tcp.TcpSensor(self.hass, TEST_CONFIG['sensor']) uuid = str(uuid4()) sensor._state = uuid assert sensor.state == uuid - @patch('homeassistant.components.sensor.tcp.Sensor.update') + @patch('homeassistant.components.sensor.tcp.TcpSensor.update') def test_unit_of_measurement(self, mock_update): - """Should return the configured unit of measurement.""" - sensor = tcp.Sensor(self.hass, TEST_CONFIG) - assert sensor.unit_of_measurement == TEST_CONFIG[tcp.CONF_UNIT] + """Return the configured unit of measurement.""" + sensor = tcp.TcpSensor(self.hass, TEST_CONFIG['sensor']) + assert sensor.unit_of_measurement == \ + TEST_CONFIG['sensor'][tcp.CONF_UNIT_OF_MEASUREMENT] - @patch("homeassistant.components.sensor.tcp.Sensor.update") + @patch('homeassistant.components.sensor.tcp.TcpSensor.update') def test_config_valid_keys(self, *args): - """Should store valid keys in _config.""" - sensor = tcp.Sensor(self.hass, TEST_CONFIG) - for key in TEST_CONFIG: + """Store valid keys in _config.""" + sensor = tcp.TcpSensor(self.hass, TEST_CONFIG['sensor']) + del TEST_CONFIG['sensor']['platform'] + + for key in TEST_CONFIG['sensor']: assert key in sensor._config def test_validate_config_valid_keys(self): - """Should return True when provided with the correct keys.""" - assert tcp.Sensor.validate_config(TEST_CONFIG) + """Return True when provided with the correct keys.""" + with assert_setup_component(0, 'sensor'): + assert setup_component(self.hass, 'sensor', TEST_CONFIG) - @patch("homeassistant.components.sensor.tcp.Sensor.update") + @patch('homeassistant.components.sensor.tcp.TcpSensor.update') def test_config_invalid_keys(self, mock_update): """Shouldn't store invalid keys in _config.""" - config = copy(TEST_CONFIG) + config = copy(TEST_CONFIG['sensor']) config.update({ - "a": "test_a", - "b": "test_b", - "c": "test_c" + 'a': 'test_a', + 'b': 'test_b', + 'c': 'test_c' }) - sensor = tcp.Sensor(self.hass, config) - for invalid_key in "abc": + sensor = tcp.TcpSensor(self.hass, config) + for invalid_key in 'abc': assert invalid_key not in sensor._config def test_validate_config_invalid_keys(self): """Test with invalid keys plus some extra.""" - config = copy(TEST_CONFIG) + config = copy(TEST_CONFIG['sensor']) config.update({ - "a": "test_a", - "b": "test_b", - "c": "test_c" + 'a': 'test_a', + 'b': 'test_b', + 'c': 'test_c' }) - assert tcp.Sensor.validate_config(config) + with assert_setup_component(0, 'sensor'): + assert setup_component(self.hass, 'sensor', {'tcp': config}) - @patch("homeassistant.components.sensor.tcp.Sensor.update") + @patch('homeassistant.components.sensor.tcp.TcpSensor.update') def test_config_uses_defaults(self, mock_update): - """Should use defaults where appropriate.""" - config = copy(TEST_CONFIG) - for key in KEYS_AND_DEFAULTS.keys(): + """Check if defaults were set.""" + config = copy(TEST_CONFIG['sensor']) + + for key in KEYS_AND_DEFAULTS: del config[key] - sensor = tcp.Sensor(self.hass, config) + + with assert_setup_component(1) as result_config: + assert setup_component(self.hass, 'sensor', { + 'sensor': config, + }) + + sensor = tcp.TcpSensor(self.hass, result_config['sensor'][0]) + for key, default in KEYS_AND_DEFAULTS.items(): assert sensor._config[key] == default def test_validate_config_missing_defaults(self): - """Should return True when defaulted keys are not provided.""" - config = copy(TEST_CONFIG) - for key in KEYS_AND_DEFAULTS.keys(): + """Return True when defaulted keys are not provided.""" + config = copy(TEST_CONFIG['sensor']) + + for key in KEYS_AND_DEFAULTS: del config[key] - assert tcp.Sensor.validate_config(config) + + with assert_setup_component(0, 'sensor'): + assert setup_component(self.hass, 'sensor', {'tcp': config}) def test_validate_config_missing_required(self): - """Should return False when required config items are missing.""" - for key in TEST_CONFIG: + """Return False when required config items are missing.""" + for key in TEST_CONFIG['sensor']: if key in KEYS_AND_DEFAULTS: continue - config = copy(TEST_CONFIG) + config = copy(TEST_CONFIG['sensor']) del config[key] - assert not tcp.Sensor.validate_config(config), ( - "validate_config() should have returned False since %r was not" - "provided." % key) + with assert_setup_component(0, 'sensor'): + assert setup_component(self.hass, 'sensor', {'tcp': config}) - @patch("homeassistant.components.sensor.tcp.Sensor.update") + @patch('homeassistant.components.sensor.tcp.TcpSensor.update') def test_init_calls_update(self, mock_update): - """Should call update() method during __init__().""" - tcp.Sensor(self.hass, TEST_CONFIG) + """Call update() method during __init__().""" + tcp.TcpSensor(self.hass, TEST_CONFIG) assert mock_update.called - @patch("socket.socket") - @patch("select.select", return_value=(True, False, False)) + @patch('socket.socket') + @patch('select.select', return_value=(True, False, False)) def test_update_connects_to_host_and_port(self, mock_select, mock_socket): - """Should connect to the configured host and port.""" - tcp.Sensor(self.hass, TEST_CONFIG) + """Connect to the configured host and port.""" + tcp.TcpSensor(self.hass, TEST_CONFIG['sensor']) mock_socket = mock_socket().__enter__() assert mock_socket.connect.mock_calls[0][1] == (( - TEST_CONFIG[tcp.CONF_HOST], - TEST_CONFIG[tcp.CONF_PORT]),) + TEST_CONFIG['sensor'][tcp.CONF_HOST], + TEST_CONFIG['sensor'][tcp.CONF_PORT]),) - @patch("socket.socket.connect", side_effect=socket.error()) + @patch('socket.socket.connect', side_effect=socket.error()) def test_update_returns_if_connecting_fails(self, *args): - """Should return if connecting to host fails.""" - with patch("homeassistant.components.sensor.tcp.Sensor.update"): - sensor = tcp.Sensor(self.hass, TEST_CONFIG) + """Return if connecting to host fails.""" + with patch('homeassistant.components.sensor.tcp.TcpSensor.update'): + sensor = tcp.TcpSensor(self.hass, TEST_CONFIG['sensor']) assert sensor.update() is None - @patch("socket.socket.connect") - @patch("socket.socket.send", side_effect=socket.error()) + @patch('socket.socket.connect') + @patch('socket.socket.send', side_effect=socket.error()) def test_update_returns_if_sending_fails(self, *args): - """Should return if sending fails.""" - with patch("homeassistant.components.sensor.tcp.Sensor.update"): - sensor = tcp.Sensor(self.hass, TEST_CONFIG) + """Return if sending fails.""" + with patch('homeassistant.components.sensor.tcp.TcpSensor.update'): + sensor = tcp.TcpSensor(self.hass, TEST_CONFIG['sensor']) assert sensor.update() is None - @patch("socket.socket.connect") - @patch("socket.socket.send") - @patch("select.select", return_value=(False, False, False)) + @patch('socket.socket.connect') + @patch('socket.socket.send') + @patch('select.select', return_value=(False, False, False)) def test_update_returns_if_select_fails(self, *args): - """Should return if select fails to return a socket.""" - with patch("homeassistant.components.sensor.tcp.Sensor.update"): - sensor = tcp.Sensor(self.hass, TEST_CONFIG) + """Return if select fails to return a socket.""" + with patch('homeassistant.components.sensor.tcp.TcpSensor.update'): + sensor = tcp.TcpSensor(self.hass, TEST_CONFIG['sensor']) assert sensor.update() is None - @patch("socket.socket") - @patch("select.select", return_value=(True, False, False)) + @patch('socket.socket') + @patch('select.select', return_value=(True, False, False)) def test_update_sends_payload(self, mock_select, mock_socket): - """Should send the configured payload as bytes.""" - tcp.Sensor(self.hass, TEST_CONFIG) + """Send the configured payload as bytes.""" + tcp.TcpSensor(self.hass, TEST_CONFIG['sensor']) mock_socket = mock_socket().__enter__() mock_socket.send.assert_called_with( - TEST_CONFIG[tcp.CONF_PAYLOAD].encode() + TEST_CONFIG['sensor'][tcp.CONF_PAYLOAD].encode() ) - @patch("socket.socket") - @patch("select.select", return_value=(True, False, False)) + @patch('socket.socket') + @patch('select.select', return_value=(True, False, False)) def test_update_calls_select_with_timeout(self, mock_select, mock_socket): - """Should provide the timeout argument to select.""" - tcp.Sensor(self.hass, TEST_CONFIG) + """Provide the timeout argument to select.""" + tcp.TcpSensor(self.hass, TEST_CONFIG['sensor']) mock_socket = mock_socket().__enter__() mock_select.assert_called_with( - [mock_socket], [], [], TEST_CONFIG[tcp.CONF_TIMEOUT]) + [mock_socket], [], [], TEST_CONFIG['sensor'][tcp.CONF_TIMEOUT]) - @patch("socket.socket") - @patch("select.select", return_value=(True, False, False)) + @patch('socket.socket') + @patch('select.select', return_value=(True, False, False)) def test_update_receives_packet_and_sets_as_state( self, mock_select, mock_socket): """Test the response from the socket and set it as the state.""" - test_value = "test_value" + test_value = 'test_value' mock_socket = mock_socket().__enter__() mock_socket.recv.return_value = test_value.encode() - config = copy(TEST_CONFIG) + config = copy(TEST_CONFIG['sensor']) del config[tcp.CONF_VALUE_TEMPLATE] - sensor = tcp.Sensor(self.hass, config) + sensor = tcp.TcpSensor(self.hass, config) assert sensor._state == test_value - @patch("socket.socket") - @patch("select.select", return_value=(True, False, False)) + @patch('socket.socket') + @patch('select.select', return_value=(True, False, False)) def test_update_renders_value_in_template(self, mock_select, mock_socket): - """Should render the value in the provided template.""" - test_value = "test_value" + """Render the value in the provided template.""" + test_value = 'test_value' mock_socket = mock_socket().__enter__() mock_socket.recv.return_value = test_value.encode() - config = copy(TEST_CONFIG) - config[tcp.CONF_VALUE_TEMPLATE] = "{{ value }} {{ 1+1 }}" - sensor = tcp.Sensor(self.hass, config) - assert sensor._state == "%s 2" % test_value + config = copy(TEST_CONFIG['sensor']) + config[tcp.CONF_VALUE_TEMPLATE] = '{{ value }} {{ 1+1 }}' + sensor = tcp.TcpSensor(self.hass, config) + assert sensor._state == '%s 2' % test_value - @patch("socket.socket") - @patch("select.select", return_value=(True, False, False)) + @patch('socket.socket') + @patch('select.select', return_value=(True, False, False)) def test_update_returns_if_template_render_fails( self, mock_select, mock_socket): - """Should return None if rendering the template fails.""" - test_value = "test_value" + """Return None if rendering the template fails.""" + test_value = 'test_value' mock_socket = mock_socket().__enter__() mock_socket.recv.return_value = test_value.encode() - config = copy(TEST_CONFIG) + config = copy(TEST_CONFIG['sensor']) config[tcp.CONF_VALUE_TEMPLATE] = "{{ this won't work" - sensor = tcp.Sensor(self.hass, config) + sensor = tcp.TcpSensor(self.hass, config) assert sensor.update() is None diff --git a/tests/components/sensor/test_template.py b/tests/components/sensor/test_template.py index b80f8032bf1..58f0fb84ac7 100644 --- a/tests/components/sensor/test_template.py +++ b/tests/components/sensor/test_template.py @@ -1,12 +1,15 @@ """The test for the Template sensor platform.""" -import homeassistant.bootstrap as bootstrap +from homeassistant.bootstrap import setup_component -from tests.common import get_test_home_assistant +from tests.common import get_test_home_assistant, assert_setup_component class TestTemplateSensor: """Test the Template sensor.""" + hass = None + # pylint: disable=invalid-name + def setup_method(self, method): """Setup things to be run when tests are started.""" self.hass = get_test_home_assistant() @@ -17,17 +20,18 @@ class TestTemplateSensor: def test_template(self): """Test template.""" - assert bootstrap.setup_component(self.hass, 'sensor', { - 'sensor': { - 'platform': 'template', - 'sensors': { - 'test_template_sensor': { - 'value_template': - "It {{ states.sensor.test_state.state }}." + with assert_setup_component(1): + assert setup_component(self.hass, 'sensor', { + 'sensor': { + 'platform': 'template', + 'sensors': { + 'test_template_sensor': { + 'value_template': + "It {{ states.sensor.test_state.state }}." + } } } - } - }) + }) state = self.hass.states.get('sensor.test_template_sensor') assert state.state == 'It .' @@ -39,83 +43,89 @@ class TestTemplateSensor: def test_template_syntax_error(self): """Test templating syntax error.""" - assert not bootstrap.setup_component(self.hass, 'sensor', { - 'sensor': { - 'platform': 'template', - 'sensors': { - 'test_template_sensor': { - 'value_template': - "{% if rubbish %}" + with assert_setup_component(0): + assert setup_component(self.hass, 'sensor', { + 'sensor': { + 'platform': 'template', + 'sensors': { + 'test_template_sensor': { + 'value_template': + "{% if rubbish %}" + } } } - } - }) + }) assert self.hass.states.all() == [] def test_template_attribute_missing(self): """Test missing attribute template.""" - assert bootstrap.setup_component(self.hass, 'sensor', { - 'sensor': { - 'platform': 'template', - 'sensors': { - 'test_template_sensor': { - 'value_template': - "It {{ states.sensor.test_state.attributes.missing }}." + with assert_setup_component(1): + assert setup_component(self.hass, 'sensor', { + 'sensor': { + 'platform': 'template', + 'sensors': { + 'test_template_sensor': { + 'value_template': 'It {{ states.sensor.test_state' + '.attributes.missing }}.' + } } } - } - }) + }) state = self.hass.states.get('sensor.test_template_sensor') assert state.state == 'unknown' def test_invalid_name_does_not_create(self): """Test invalid name.""" - assert not bootstrap.setup_component(self.hass, 'sensor', { - 'sensor': { - 'platform': 'template', - 'sensors': { - 'test INVALID sensor': { - 'value_template': - "{{ states.sensor.test_state.state }}" + with assert_setup_component(0): + assert setup_component(self.hass, 'sensor', { + 'sensor': { + 'platform': 'template', + 'sensors': { + 'test INVALID sensor': { + 'value_template': + "{{ states.sensor.test_state.state }}" + } } } - } - }) + }) assert self.hass.states.all() == [] def test_invalid_sensor_does_not_create(self): """Test invalid sensor.""" - assert not bootstrap.setup_component(self.hass, 'sensor', { - 'sensor': { - 'platform': 'template', - 'sensors': { - 'test_template_sensor': 'invalid' + with assert_setup_component(0): + assert setup_component(self.hass, 'sensor', { + 'sensor': { + 'platform': 'template', + 'sensors': { + 'test_template_sensor': 'invalid' + } } - } - }) + }) assert self.hass.states.all() == [] def test_no_sensors_does_not_create(self): """Test no sensors.""" - assert not bootstrap.setup_component(self.hass, 'sensor', { - 'sensor': { - 'platform': 'template' - } - }) + with assert_setup_component(0): + assert setup_component(self.hass, 'sensor', { + 'sensor': { + 'platform': 'template' + } + }) assert self.hass.states.all() == [] def test_missing_template_does_not_create(self): """Test missing template.""" - assert not bootstrap.setup_component(self.hass, 'sensor', { - 'sensor': { - 'platform': 'template', - 'sensors': { - 'test_template_sensor': { - 'not_value_template': - "{{ states.sensor.test_state.state }}" + with assert_setup_component(0): + assert setup_component(self.hass, 'sensor', { + 'sensor': { + 'platform': 'template', + 'sensors': { + 'test_template_sensor': { + 'not_value_template': + "{{ states.sensor.test_state.state }}" + } } } - } - }) + }) assert self.hass.states.all() == [] diff --git a/tests/components/sensor/test_wunderground.py b/tests/components/sensor/test_wunderground.py index ffb070f9ab9..f7f2e958ef7 100644 --- a/tests/components/sensor/test_wunderground.py +++ b/tests/components/sensor/test_wunderground.py @@ -11,7 +11,7 @@ VALID_CONFIG_PWS = { 'api_key': 'foo', 'pws_id': 'bar', 'monitored_conditions': [ - 'weather', 'feelslike_c' + 'weather', 'feelslike_c', 'alerts' ] } @@ -19,17 +19,19 @@ VALID_CONFIG = { 'platform': 'wunderground', 'api_key': 'foo', 'monitored_conditions': [ - 'weather', 'feelslike_c' + 'weather', 'feelslike_c', 'alerts' ] } FEELS_LIKE = '40' WEATHER = 'Clear' ICON_URL = 'http://icons.wxug.com/i/c/k/clear.gif' +ALERT_MESSAGE = 'This is a test alert message' def mocked_requests_get(*args, **kwargs): """Mock requests.get invocations.""" + # pylint: disable=too-few-public-methods class MockResponse: """Class to represent a mocked response.""" @@ -61,26 +63,36 @@ def mocked_requests_get(*args, **kwargs): "feelslike_c": FEELS_LIKE, "weather": WEATHER, "icon_url": ICON_URL - } + }, "alerts": [ + { + "type": 'FLO', + "description": "Areal Flood Warning", + "date": "9:36 PM CDT on September 22, 2016", + "expires": "10:00 AM CDT on September 23, 2016", + "message": ALERT_MESSAGE, + }, + + ], }, 200) else: return MockResponse({ - "response": { - "version": "0.1", - "termsofService": - "http://www.wunderground.com/weather/api/d/terms.html", - "features": {}, - "error": { - "type": "keynotfound", - "description": "this key does not exist" - } + "response": { + "version": "0.1", + "termsofService": + "http://www.wunderground.com/weather/api/d/terms.html", + "features": {}, + "error": { + "type": "keynotfound", + "description": "this key does not exist" } - }, 200) + } + }, 200) class TestWundergroundSetup(unittest.TestCase): """Test the WUnderground platform.""" + # pylint: disable=invalid-name DEVICES = [] def add_devices(self, devices): @@ -107,14 +119,13 @@ class TestWundergroundSetup(unittest.TestCase): self.add_devices, None)) self.assertTrue( wunderground.setup_platform(self.hass, VALID_CONFIG, - self.add_devices, - None)) + self.add_devices, None)) invalid_config = { 'platform': 'wunderground', 'api_key': 'BOB', 'pws_id': 'bar', 'monitored_conditions': [ - 'weather', 'feelslike_c' + 'weather', 'feelslike_c', 'alerts' ] } @@ -128,11 +139,17 @@ class TestWundergroundSetup(unittest.TestCase): wunderground.setup_platform(self.hass, VALID_CONFIG, self.add_devices, None) for device in self.DEVICES: + device.update() self.assertTrue(str(device.name).startswith('PWS_')) if device.name == 'PWS_weather': self.assertEqual(ICON_URL, device.entity_picture) self.assertEqual(WEATHER, device.state) self.assertIsNone(device.unit_of_measurement) + elif device.name == 'PWS_alerts': + self.assertEqual(1, device.state) + self.assertEqual(ALERT_MESSAGE, + device.device_state_attributes['Message']) + self.assertIsNone(device.entity_picture) else: self.assertIsNone(device.entity_picture) self.assertEqual(FEELS_LIKE, device.state) diff --git a/tests/components/sensor/test_yahoo_finance.py b/tests/components/sensor/test_yahoo_finance.py new file mode 100644 index 00000000000..5cbbf50dcab --- /dev/null +++ b/tests/components/sensor/test_yahoo_finance.py @@ -0,0 +1,41 @@ +"""The tests for the Yahoo Finance platform.""" +import json + +import unittest +from unittest.mock import patch + +import homeassistant.components.sensor as sensor +from homeassistant.bootstrap import setup_component +from tests.common import ( + get_test_home_assistant, load_fixture, assert_setup_component) + +VALID_CONFIG = { + 'platform': 'yahoo_finance', + 'symbol': 'YHOO' +} + + +class TestYahooFinanceSetup(unittest.TestCase): + """Test the Yahoo Finance platform.""" + + def setUp(self): + """Initialize values for this testcase class.""" + self.hass = get_test_home_assistant() + self.config = VALID_CONFIG + + def tearDown(self): + """Stop everything that was started.""" + self.hass.stop() + + @patch('yahoo_finance.Base._request', + return_value=json.loads(load_fixture('yahoo_finance.json'))) + def test_default_setup(self, m): # pylint: disable=invalid-name + """Test the default setup.""" + with assert_setup_component(1, sensor.DOMAIN): + assert setup_component(self.hass, sensor.DOMAIN, { + 'sensor': VALID_CONFIG}) + + state = self.hass.states.get('sensor.yahoo_stock') + self.assertEqual("41.69", state.attributes.get('open')) + self.assertEqual("41.79", state.attributes.get('prev_close')) + self.assertEqual("YHOO", state.attributes.get('unit_of_measurement')) diff --git a/tests/components/switch/test_flux.py b/tests/components/switch/test_flux.py index 01b5d797222..1ee865ef3ac 100644 --- a/tests/components/switch/test_flux.py +++ b/tests/components/switch/test_flux.py @@ -1,6 +1,6 @@ """The tests for the Flux switch platform.""" -import unittest from datetime import timedelta +import unittest from unittest.mock import patch from homeassistant.bootstrap import setup_component @@ -8,8 +8,10 @@ from homeassistant.components import switch, light from homeassistant.const import CONF_PLATFORM, STATE_ON, SERVICE_TURN_ON import homeassistant.loader as loader import homeassistant.util.dt as dt_util -from tests.common import get_test_home_assistant -from tests.common import fire_time_changed, mock_service + +from tests.common import ( + assert_setup_component, get_test_home_assistant, fire_time_changed, + mock_service) class TestSwitchFlux(unittest.TestCase): @@ -50,21 +52,23 @@ class TestSwitchFlux(unittest.TestCase): def test_valid_config_no_name(self): """Test configuration.""" - assert setup_component(self.hass, 'switch', { - 'switch': { - 'platform': 'flux', - 'lights': ['light.desk', 'light.lamp'] - } - }) + with assert_setup_component(1, 'switch'): + assert setup_component(self.hass, 'switch', { + 'switch': { + 'platform': 'flux', + 'lights': ['light.desk', 'light.lamp'] + } + }) def test_invalid_config_no_lights(self): """Test configuration.""" - assert not setup_component(self.hass, 'switch', { - 'switch': { - 'platform': 'flux', - 'name': 'flux' - } - }) + with assert_setup_component(0, 'switch'): + assert setup_component(self.hass, 'switch', { + 'switch': { + 'platform': 'flux', + 'name': 'flux' + } + }) def test_flux_when_switch_is_off(self): """Test the flux switch when it is off.""" diff --git a/tests/components/switch/test_mfi.py b/tests/components/switch/test_mfi.py index 53e032f3284..a73b35af2f8 100644 --- a/tests/components/switch/test_mfi.py +++ b/tests/components/switch/test_mfi.py @@ -65,7 +65,8 @@ class TestMfiSwitch(unittest.TestCase): def test_update(self): """Test update.""" self.switch.update() - self.port.refresh.assert_called_once_with() + self.assertEqual(self.port.refresh.call_count, 1) + self.assertEqual(self.port.refresh.call_args, mock.call()) def test_update_with_target_state(self): """Test update with target state.""" @@ -82,13 +83,15 @@ class TestMfiSwitch(unittest.TestCase): def test_turn_on(self): """Test turn_on.""" self.switch.turn_on() - self.port.control.assert_called_once_with(True) + self.assertEqual(self.port.control.call_count, 1) + self.assertEqual(self.port.control.call_args, mock.call(True)) self.assertTrue(self.switch._target_state) def test_turn_off(self): """Test turn_off.""" self.switch.turn_off() - self.port.control.assert_called_once_with(False) + self.assertEqual(self.port.control.call_count, 1) + self.assertEqual(self.port.control.call_args, mock.call(False)) self.assertFalse(self.switch._target_state) def test_current_power_mwh(self): diff --git a/tests/components/switch/test_template.py b/tests/components/switch/test_template.py index e13b0f7392b..af91c9a565b 100644 --- a/tests/components/switch/test_template.py +++ b/tests/components/switch/test_template.py @@ -6,12 +6,16 @@ from homeassistant.const import ( STATE_ON, STATE_OFF) -from tests.common import get_test_home_assistant +from tests.common import get_test_home_assistant, assert_setup_component class TestTemplateSwitch: """Test the Template switch.""" + hass = None + calls = None + # pylint: disable=invalid-name + def setup_method(self, method): """Setup things to be run when tests are started.""" self.hass = get_test_home_assistant() @@ -29,25 +33,26 @@ class TestTemplateSwitch: def test_template_state_text(self): """"Test the state text of a template.""" - assert bootstrap.setup_component(self.hass, 'switch', { - 'switch': { - 'platform': 'template', - 'switches': { - 'test_template_switch': { - 'value_template': - "{{ states.switch.test_state.state }}", - 'turn_on': { - 'service': 'switch.turn_on', - 'entity_id': 'switch.test_state' - }, - 'turn_off': { - 'service': 'switch.turn_off', - 'entity_id': 'switch.test_state' - }, + with assert_setup_component(1): + assert bootstrap.setup_component(self.hass, 'switch', { + 'switch': { + 'platform': 'template', + 'switches': { + 'test_template_switch': { + 'value_template': + "{{ states.switch.test_state.state }}", + 'turn_on': { + 'service': 'switch.turn_on', + 'entity_id': 'switch.test_state' + }, + 'turn_off': { + 'service': 'switch.turn_off', + 'entity_id': 'switch.test_state' + }, + } } } - } - }) + }) state = self.hass.states.set('switch.test_state', STATE_ON) self.hass.block_till_done() @@ -63,188 +68,197 @@ class TestTemplateSwitch: def test_template_state_boolean_on(self): """Test the setting of the state with boolean on.""" - assert bootstrap.setup_component(self.hass, 'switch', { - 'switch': { - 'platform': 'template', - 'switches': { - 'test_template_switch': { - 'value_template': - "{{ 1 == 1 }}", - 'turn_on': { - 'service': 'switch.turn_on', - 'entity_id': 'switch.test_state' - }, - 'turn_off': { - 'service': 'switch.turn_off', - 'entity_id': 'switch.test_state' - }, + with assert_setup_component(1): + assert bootstrap.setup_component(self.hass, 'switch', { + 'switch': { + 'platform': 'template', + 'switches': { + 'test_template_switch': { + 'value_template': + "{{ 1 == 1 }}", + 'turn_on': { + 'service': 'switch.turn_on', + 'entity_id': 'switch.test_state' + }, + 'turn_off': { + 'service': 'switch.turn_off', + 'entity_id': 'switch.test_state' + }, + } } } - } - }) + }) state = self.hass.states.get('switch.test_template_switch') assert state.state == STATE_ON def test_template_state_boolean_off(self): """Test the setting of the state with off.""" - assert bootstrap.setup_component(self.hass, 'switch', { - 'switch': { - 'platform': 'template', - 'switches': { - 'test_template_switch': { - 'value_template': - "{{ 1 == 2 }}", - 'turn_on': { - 'service': 'switch.turn_on', - 'entity_id': 'switch.test_state' - }, - 'turn_off': { - 'service': 'switch.turn_off', - 'entity_id': 'switch.test_state' - }, + with assert_setup_component(1): + assert bootstrap.setup_component(self.hass, 'switch', { + 'switch': { + 'platform': 'template', + 'switches': { + 'test_template_switch': { + 'value_template': + "{{ 1 == 2 }}", + 'turn_on': { + 'service': 'switch.turn_on', + 'entity_id': 'switch.test_state' + }, + 'turn_off': { + 'service': 'switch.turn_off', + 'entity_id': 'switch.test_state' + }, + } } } - } - }) + }) state = self.hass.states.get('switch.test_template_switch') assert state.state == STATE_OFF def test_template_syntax_error(self): """Test templating syntax error.""" - assert not bootstrap.setup_component(self.hass, 'switch', { - 'switch': { - 'platform': 'template', - 'switches': { - 'test_template_switch': { - 'value_template': - "{% if rubbish %}", - 'turn_on': { - 'service': 'switch.turn_on', - 'entity_id': 'switch.test_state' - }, - 'turn_off': { - 'service': 'switch.turn_off', - 'entity_id': 'switch.test_state' - }, + with assert_setup_component(0): + assert bootstrap.setup_component(self.hass, 'switch', { + 'switch': { + 'platform': 'template', + 'switches': { + 'test_template_switch': { + 'value_template': + "{% if rubbish %}", + 'turn_on': { + 'service': 'switch.turn_on', + 'entity_id': 'switch.test_state' + }, + 'turn_off': { + 'service': 'switch.turn_off', + 'entity_id': 'switch.test_state' + }, + } } } - } - }) + }) assert self.hass.states.all() == [] def test_invalid_name_does_not_create(self): """Test invalid name.""" - assert not bootstrap.setup_component(self.hass, 'switch', { - 'switch': { - 'platform': 'template', - 'switches': { - 'test INVALID switch': { - 'value_template': - "{{ rubbish }", - 'turn_on': { - 'service': 'switch.turn_on', - 'entity_id': 'switch.test_state' - }, - 'turn_off': { - 'service': 'switch.turn_off', - 'entity_id': 'switch.test_state' - }, + with assert_setup_component(0): + assert bootstrap.setup_component(self.hass, 'switch', { + 'switch': { + 'platform': 'template', + 'switches': { + 'test INVALID switch': { + 'value_template': + "{{ rubbish }", + 'turn_on': { + 'service': 'switch.turn_on', + 'entity_id': 'switch.test_state' + }, + 'turn_off': { + 'service': 'switch.turn_off', + 'entity_id': 'switch.test_state' + }, + } } } - } - }) + }) assert self.hass.states.all() == [] def test_invalid_switch_does_not_create(self): """Test invalid switch.""" - assert not bootstrap.setup_component(self.hass, 'switch', { - 'switch': { - 'platform': 'template', - 'switches': { - 'test_template_switch': 'Invalid' + with assert_setup_component(0): + assert bootstrap.setup_component(self.hass, 'switch', { + 'switch': { + 'platform': 'template', + 'switches': { + 'test_template_switch': 'Invalid' + } } - } - }) + }) assert self.hass.states.all() == [] def test_no_switches_does_not_create(self): """Test if there are no switches no creation.""" - assert not bootstrap.setup_component(self.hass, 'switch', { - 'switch': { - 'platform': 'template' - } - }) + with assert_setup_component(0): + assert bootstrap.setup_component(self.hass, 'switch', { + 'switch': { + 'platform': 'template' + } + }) assert self.hass.states.all() == [] def test_missing_template_does_not_create(self): """Test missing template.""" - assert not bootstrap.setup_component(self.hass, 'switch', { - 'switch': { - 'platform': 'template', - 'switches': { - 'test_template_switch': { - 'not_value_template': - "{{ states.switch.test_state.state }}", - 'turn_on': { - 'service': 'switch.turn_on', - 'entity_id': 'switch.test_state' - }, - 'turn_off': { - 'service': 'switch.turn_off', - 'entity_id': 'switch.test_state' - }, + with assert_setup_component(0): + assert bootstrap.setup_component(self.hass, 'switch', { + 'switch': { + 'platform': 'template', + 'switches': { + 'test_template_switch': { + 'not_value_template': + "{{ states.switch.test_state.state }}", + 'turn_on': { + 'service': 'switch.turn_on', + 'entity_id': 'switch.test_state' + }, + 'turn_off': { + 'service': 'switch.turn_off', + 'entity_id': 'switch.test_state' + }, + } } } - } - }) + }) assert self.hass.states.all() == [] def test_missing_on_does_not_create(self): """Test missing on.""" - assert not bootstrap.setup_component(self.hass, 'switch', { - 'switch': { - 'platform': 'template', - 'switches': { - 'test_template_switch': { - 'value_template': - "{{ states.switch.test_state.state }}", - 'not_on': { - 'service': 'switch.turn_on', - 'entity_id': 'switch.test_state' - }, - 'turn_off': { - 'service': 'switch.turn_off', - 'entity_id': 'switch.test_state' - }, + with assert_setup_component(0): + assert bootstrap.setup_component(self.hass, 'switch', { + 'switch': { + 'platform': 'template', + 'switches': { + 'test_template_switch': { + 'value_template': + "{{ states.switch.test_state.state }}", + 'not_on': { + 'service': 'switch.turn_on', + 'entity_id': 'switch.test_state' + }, + 'turn_off': { + 'service': 'switch.turn_off', + 'entity_id': 'switch.test_state' + }, + } } } - } - }) + }) assert self.hass.states.all() == [] def test_missing_off_does_not_create(self): """Test missing off.""" - assert not bootstrap.setup_component(self.hass, 'switch', { - 'switch': { - 'platform': 'template', - 'switches': { - 'test_template_switch': { - 'value_template': - "{{ states.switch.test_state.state }}", - 'turn_on': { - 'service': 'switch.turn_on', - 'entity_id': 'switch.test_state' - }, - 'not_off': { - 'service': 'switch.turn_off', - 'entity_id': 'switch.test_state' - }, + with assert_setup_component(0): + assert bootstrap.setup_component(self.hass, 'switch', { + 'switch': { + 'platform': 'template', + 'switches': { + 'test_template_switch': { + 'value_template': + "{{ states.switch.test_state.state }}", + 'turn_on': { + 'service': 'switch.turn_on', + 'entity_id': 'switch.test_state' + }, + 'not_off': { + 'service': 'switch.turn_off', + 'entity_id': 'switch.test_state' + }, + } } } - } - }) + }) assert self.hass.states.all() == [] def test_on_action(self): diff --git a/tests/components/test_alexa.py b/tests/components/test_alexa.py index a40b401c777..31d5d6eec5c 100644 --- a/tests/components/test_alexa.py +++ b/tests/components/test_alexa.py @@ -2,6 +2,7 @@ # pylint: disable=protected-access,too-many-public-methods import json import time +import datetime import unittest import requests @@ -13,19 +14,27 @@ from tests.common import get_test_instance_port, get_test_home_assistant API_PASSWORD = "test1234" SERVER_PORT = get_test_instance_port() -API_URL = "http://127.0.0.1:{}{}".format(SERVER_PORT, alexa.API_ENDPOINT) +BASE_API_URL = "http://127.0.0.1:{}".format(SERVER_PORT) +INTENTS_API_URL = "{}{}".format(BASE_API_URL, alexa.INTENTS_API_ENDPOINT) + HA_HEADERS = { const.HTTP_HEADER_HA_AUTH: API_PASSWORD, const.HTTP_HEADER_CONTENT_TYPE: const.CONTENT_TYPE_JSON, } -SESSION_ID = 'amzn1.echo-api.session.0000000-0000-0000-0000-00000000000' -APPLICATION_ID = 'amzn1.echo-sdk-ams.app.000000-d0ed-0000-ad00-000000d00ebe' -REQUEST_ID = 'amzn1.echo-api.request.0000000-0000-0000-0000-00000000000' +SESSION_ID = "amzn1.echo-api.session.0000000-0000-0000-0000-00000000000" +APPLICATION_ID = "amzn1.echo-sdk-ams.app.000000-d0ed-0000-ad00-000000d00ebe" +REQUEST_ID = "amzn1.echo-api.request.0000000-0000-0000-0000-00000000000" +# pylint: disable=invalid-name hass = None calls = [] +NPR_NEWS_MP3_URL = "https://pd.npr.org/anon.npr-mp3/npr/news/newscast.mp3" + +# 2016-10-10T19:51:42+00:00 +STATIC_TIME = datetime.datetime.utcfromtimestamp(1476129102) + def setUpModule(): # pylint: disable=invalid-name """Initialize a Home Assistant server for testing this module.""" @@ -36,23 +45,40 @@ def setUpModule(): # pylint: disable=invalid-name bootstrap.setup_component( hass, http.DOMAIN, {http.DOMAIN: {http.CONF_API_PASSWORD: API_PASSWORD, - http.CONF_SERVER_PORT: SERVER_PORT}}) + http.CONF_SERVER_PORT: SERVER_PORT}}) - hass.services.register('test', 'alexa', lambda call: calls.append(call)) + hass.services.register("test", "alexa", lambda call: calls.append(call)) bootstrap.setup_component(hass, alexa.DOMAIN, { # Key is here to verify we allow other keys in config too - 'homeassistant': {}, - 'alexa': { - 'intents': { - 'WhereAreWeIntent': { - 'speech': { - 'type': 'plaintext', - 'text': + "homeassistant": {}, + "alexa": { + "flash_briefings": { + "weather": [ + {"title": "Weekly forecast", + "text": "This week it will be sunny.", + "date": "2016-10-09T19:51:42.0Z"}, + {"title": "Current conditions", + "text": "Currently it is 80 degrees fahrenheit.", + "date": STATIC_TIME} + ], + "news_audio": { + "title": "NPR", + "audio": NPR_NEWS_MP3_URL, + "display_url": "https://npr.org", + "date": STATIC_TIME, + "uid": "uuid" + } + }, + "intents": { + "WhereAreWeIntent": { + "speech": { + "type": "plaintext", + "text": """ - {%- if is_state('device_tracker.paulus', 'home') - and is_state('device_tracker.anne_therese', - 'home') -%} + {%- if is_state("device_tracker.paulus", "home") + and is_state("device_tracker.anne_therese", + "home") -%} You are both home, you silly {%- else -%} Anne Therese is at {{ @@ -64,23 +90,23 @@ def setUpModule(): # pylint: disable=invalid-name """, } }, - 'GetZodiacHoroscopeIntent': { - 'speech': { - 'type': 'plaintext', - 'text': 'You told us your sign is {{ ZodiacSign }}.', + "GetZodiacHoroscopeIntent": { + "speech": { + "type": "plaintext", + "text": "You told us your sign is {{ ZodiacSign }}.", } }, - 'CallServiceIntent': { - 'speech': { - 'type': 'plaintext', - 'text': 'Service called', + "CallServiceIntent": { + "speech": { + "type": "plaintext", + "text": "Service called", }, - 'action': { - 'service': 'test.alexa', - 'data_template': { - 'hello': '{{ ZodiacSign }}' + "action": { + "service": "test.alexa", + "data_template": { + "hello": "{{ ZodiacSign }}" }, - 'entity_id': 'switch.test', + "entity_id": "switch.test", } } } @@ -96,11 +122,19 @@ def tearDownModule(): # pylint: disable=invalid-name hass.stop() -def _req(data={}): - return requests.post(API_URL, data=json.dumps(data), timeout=5, +def _intent_req(data={}): + return requests.post(INTENTS_API_URL, data=json.dumps(data), timeout=5, headers=HA_HEADERS) +def _flash_briefing_req(briefing_id=None): + url_format = "{}/api/alexa/flash_briefings/{}" + FLASH_BRIEFING_API_URL = url_format.format(BASE_API_URL, + briefing_id) + return requests.get(FLASH_BRIEFING_API_URL, timeout=5, + headers=HA_HEADERS) + + class TestAlexa(unittest.TestCase): """Test Alexa.""" @@ -108,231 +142,267 @@ class TestAlexa(unittest.TestCase): """Stop everything that was started.""" hass.block_till_done() - def test_launch_request(self): + def test_intent_launch_request(self): """Test the launch of a request.""" data = { - 'version': '1.0', - 'session': { - 'new': True, - 'sessionId': SESSION_ID, - 'application': { - 'applicationId': APPLICATION_ID + "version": "1.0", + "session": { + "new": True, + "sessionId": SESSION_ID, + "application": { + "applicationId": APPLICATION_ID }, - 'attributes': {}, - 'user': { - 'userId': 'amzn1.account.AM3B00000000000000000000000' + "attributes": {}, + "user": { + "userId": "amzn1.account.AM3B00000000000000000000000" } }, - 'request': { - 'type': 'LaunchRequest', - 'requestId': REQUEST_ID, - 'timestamp': '2015-05-13T12:34:56Z' + "request": { + "type": "LaunchRequest", + "requestId": REQUEST_ID, + "timestamp": "2015-05-13T12:34:56Z" } } - req = _req(data) + req = _intent_req(data) self.assertEqual(200, req.status_code) resp = req.json() - self.assertIn('outputSpeech', resp['response']) + self.assertIn("outputSpeech", resp["response"]) def test_intent_request_with_slots(self): """Test a request with slots.""" data = { - 'version': '1.0', - 'session': { - 'new': False, - 'sessionId': SESSION_ID, - 'application': { - 'applicationId': APPLICATION_ID + "version": "1.0", + "session": { + "new": False, + "sessionId": SESSION_ID, + "application": { + "applicationId": APPLICATION_ID }, - 'attributes': { - 'supportedHoroscopePeriods': { - 'daily': True, - 'weekly': False, - 'monthly': False + "attributes": { + "supportedHoroscopePeriods": { + "daily": True, + "weekly": False, + "monthly": False } }, - 'user': { - 'userId': 'amzn1.account.AM3B00000000000000000000000' + "user": { + "userId": "amzn1.account.AM3B00000000000000000000000" } }, - 'request': { - 'type': 'IntentRequest', - 'requestId': REQUEST_ID, - 'timestamp': '2015-05-13T12:34:56Z', - 'intent': { - 'name': 'GetZodiacHoroscopeIntent', - 'slots': { - 'ZodiacSign': { - 'name': 'ZodiacSign', - 'value': 'virgo' + "request": { + "type": "IntentRequest", + "requestId": REQUEST_ID, + "timestamp": "2015-05-13T12:34:56Z", + "intent": { + "name": "GetZodiacHoroscopeIntent", + "slots": { + "ZodiacSign": { + "name": "ZodiacSign", + "value": "virgo" } } } } } - req = _req(data) + req = _intent_req(data) self.assertEqual(200, req.status_code) - text = req.json().get('response', {}).get('outputSpeech', - {}).get('text') - self.assertEqual('You told us your sign is virgo.', text) + text = req.json().get("response", {}).get("outputSpeech", + {}).get("text") + self.assertEqual("You told us your sign is virgo.", text) def test_intent_request_with_slots_but_no_value(self): """Test a request with slots but no value.""" data = { - 'version': '1.0', - 'session': { - 'new': False, - 'sessionId': SESSION_ID, - 'application': { - 'applicationId': APPLICATION_ID + "version": "1.0", + "session": { + "new": False, + "sessionId": SESSION_ID, + "application": { + "applicationId": APPLICATION_ID }, - 'attributes': { - 'supportedHoroscopePeriods': { - 'daily': True, - 'weekly': False, - 'monthly': False + "attributes": { + "supportedHoroscopePeriods": { + "daily": True, + "weekly": False, + "monthly": False } }, - 'user': { - 'userId': 'amzn1.account.AM3B00000000000000000000000' + "user": { + "userId": "amzn1.account.AM3B00000000000000000000000" } }, - 'request': { - 'type': 'IntentRequest', - 'requestId': REQUEST_ID, - 'timestamp': '2015-05-13T12:34:56Z', - 'intent': { - 'name': 'GetZodiacHoroscopeIntent', - 'slots': { - 'ZodiacSign': { - 'name': 'ZodiacSign', + "request": { + "type": "IntentRequest", + "requestId": REQUEST_ID, + "timestamp": "2015-05-13T12:34:56Z", + "intent": { + "name": "GetZodiacHoroscopeIntent", + "slots": { + "ZodiacSign": { + "name": "ZodiacSign", } } } } } - req = _req(data) + req = _intent_req(data) self.assertEqual(200, req.status_code) - text = req.json().get('response', {}).get('outputSpeech', - {}).get('text') - self.assertEqual('You told us your sign is .', text) + text = req.json().get("response", {}).get("outputSpeech", + {}).get("text") + self.assertEqual("You told us your sign is .", text) def test_intent_request_without_slots(self): """Test a request without slots.""" data = { - 'version': '1.0', - 'session': { - 'new': False, - 'sessionId': SESSION_ID, - 'application': { - 'applicationId': APPLICATION_ID + "version": "1.0", + "session": { + "new": False, + "sessionId": SESSION_ID, + "application": { + "applicationId": APPLICATION_ID }, - 'attributes': { - 'supportedHoroscopePeriods': { - 'daily': True, - 'weekly': False, - 'monthly': False + "attributes": { + "supportedHoroscopePeriods": { + "daily": True, + "weekly": False, + "monthly": False } }, - 'user': { - 'userId': 'amzn1.account.AM3B00000000000000000000000' + "user": { + "userId": "amzn1.account.AM3B00000000000000000000000" } }, - 'request': { - 'type': 'IntentRequest', - 'requestId': REQUEST_ID, - 'timestamp': '2015-05-13T12:34:56Z', - 'intent': { - 'name': 'WhereAreWeIntent', + "request": { + "type": "IntentRequest", + "requestId": REQUEST_ID, + "timestamp": "2015-05-13T12:34:56Z", + "intent": { + "name": "WhereAreWeIntent", } } } - req = _req(data) + req = _intent_req(data) self.assertEqual(200, req.status_code) - text = req.json().get('response', {}).get('outputSpeech', - {}).get('text') + text = req.json().get("response", {}).get("outputSpeech", + {}).get("text") - self.assertEqual('Anne Therese is at unknown and Paulus is at unknown', + self.assertEqual("Anne Therese is at unknown and Paulus is at unknown", text) - hass.states.set('device_tracker.paulus', 'home') - hass.states.set('device_tracker.anne_therese', 'home') + hass.states.set("device_tracker.paulus", "home") + hass.states.set("device_tracker.anne_therese", "home") - req = _req(data) + req = _intent_req(data) self.assertEqual(200, req.status_code) - text = req.json().get('response', {}).get('outputSpeech', - {}).get('text') - self.assertEqual('You are both home, you silly', text) + text = req.json().get("response", {}).get("outputSpeech", + {}).get("text") + self.assertEqual("You are both home, you silly", text) def test_intent_request_calling_service(self): """Test a request for calling a service.""" data = { - 'version': '1.0', - 'session': { - 'new': False, - 'sessionId': SESSION_ID, - 'application': { - 'applicationId': APPLICATION_ID + "version": "1.0", + "session": { + "new": False, + "sessionId": SESSION_ID, + "application": { + "applicationId": APPLICATION_ID }, - 'attributes': {}, - 'user': { - 'userId': 'amzn1.account.AM3B00000000000000000000000' + "attributes": {}, + "user": { + "userId": "amzn1.account.AM3B00000000000000000000000" } }, - 'request': { - 'type': 'IntentRequest', - 'requestId': REQUEST_ID, - 'timestamp': '2015-05-13T12:34:56Z', - 'intent': { - 'name': 'CallServiceIntent', - 'slots': { - 'ZodiacSign': { - 'name': 'ZodiacSign', - 'value': 'virgo', + "request": { + "type": "IntentRequest", + "requestId": REQUEST_ID, + "timestamp": "2015-05-13T12:34:56Z", + "intent": { + "name": "CallServiceIntent", + "slots": { + "ZodiacSign": { + "name": "ZodiacSign", + "value": "virgo", } } } } } call_count = len(calls) - req = _req(data) + req = _intent_req(data) self.assertEqual(200, req.status_code) self.assertEqual(call_count + 1, len(calls)) call = calls[-1] - self.assertEqual('test', call.domain) - self.assertEqual('alexa', call.service) - self.assertEqual(['switch.test'], call.data.get('entity_id')) - self.assertEqual('virgo', call.data.get('hello')) + self.assertEqual("test", call.domain) + self.assertEqual("alexa", call.service) + self.assertEqual(["switch.test"], call.data.get("entity_id")) + self.assertEqual("virgo", call.data.get("hello")) - def test_session_ended_request(self): + def test_intent_session_ended_request(self): """Test the request for ending the session.""" data = { - 'version': '1.0', - 'session': { - 'new': False, - 'sessionId': SESSION_ID, - 'application': { - 'applicationId': APPLICATION_ID + "version": "1.0", + "session": { + "new": False, + "sessionId": SESSION_ID, + "application": { + "applicationId": APPLICATION_ID }, - 'attributes': { - 'supportedHoroscopePeriods': { - 'daily': True, - 'weekly': False, - 'monthly': False + "attributes": { + "supportedHoroscopePeriods": { + "daily": True, + "weekly": False, + "monthly": False } }, - 'user': { - 'userId': 'amzn1.account.AM3B00000000000000000000000' + "user": { + "userId": "amzn1.account.AM3B00000000000000000000000" } }, - 'request': { - 'type': 'SessionEndedRequest', - 'requestId': REQUEST_ID, - 'timestamp': '2015-05-13T12:34:56Z', - 'reason': 'USER_INITIATED' + "request": { + "type": "SessionEndedRequest", + "requestId": REQUEST_ID, + "timestamp": "2015-05-13T12:34:56Z", + "reason": "USER_INITIATED" } } - req = _req(data) + req = _intent_req(data) self.assertEqual(200, req.status_code) - self.assertEqual('', req.text) + self.assertEqual("", req.text) + + def test_flash_briefing_invalid_id(self): + """Test an invalid Flash Briefing ID.""" + req = _flash_briefing_req() + self.assertEqual(404, req.status_code) + self.assertEqual("", req.text) + + def test_flash_briefing_date_from_str(self): + """Test the response has a valid date parsed from string.""" + req = _flash_briefing_req("weather") + self.assertEqual(200, req.status_code) + self.assertEqual(req.json()[0].get(alexa.ATTR_UPDATE_DATE), + "2016-10-09T19:51:42.0Z") + + def test_flash_briefing_date_from_datetime(self): + """Test the response has a valid date from a datetime object.""" + req = _flash_briefing_req("weather") + self.assertEqual(200, req.status_code) + self.assertEqual(req.json()[1].get(alexa.ATTR_UPDATE_DATE), + '2016-10-10T19:51:42.0Z') + + def test_flash_briefing_valid(self): + """Test the response is valid.""" + data = [{ + "titleText": "NPR", + "redirectionURL": "https://npr.org", + "streamUrl": NPR_NEWS_MP3_URL, + "mainText": "", + "uid": "uuid", + "updateDate": '2016-10-10T19:51:42.0Z' + }] + + req = _flash_briefing_req("news_audio") + self.assertEqual(200, req.status_code) + response = req.json() + self.assertEqual(response, data) diff --git a/tests/components/test_api.py b/tests/components/test_api.py index dee4320824b..78affc70648 100644 --- a/tests/components/test_api.py +++ b/tests/components/test_api.py @@ -2,10 +2,9 @@ # pylint: disable=protected-access,too-many-public-methods from contextlib import closing import json -import tempfile import time import unittest -from unittest.mock import patch +from unittest.mock import Mock, patch import requests @@ -145,14 +144,14 @@ class TestAPI(unittest.TestCase): requests.post(_url(const.URL_API_STATES_ENTITY.format("test.test")), data=json.dumps({"state": "not_to_be_set"}), headers=HA_HEADERS) - hass.bus._pool.block_till_done() + hass.block_till_done() self.assertEqual(0, len(events)) requests.post(_url(const.URL_API_STATES_ENTITY.format("test.test")), data=json.dumps({"state": "not_to_be_set", "force_update": True}), headers=HA_HEADERS) - hass.bus._pool.block_till_done() + hass.block_till_done() self.assertEqual(1, len(events)) # pylint: disable=invalid-name @@ -244,15 +243,20 @@ class TestAPI(unittest.TestCase): def test_api_get_error_log(self): """Test the return of the error log.""" - test_content = 'Test String°' - with tempfile.NamedTemporaryFile() as log: - log.write(test_content.encode('utf-8')) - log.flush() + test_string = 'Test String°'.encode('UTF-8') - with patch.object(hass.config, 'path', return_value=log.name): - req = requests.get(_url(const.URL_API_ERROR_LOG), - headers=HA_HEADERS) - self.assertEqual(test_content, req.text) + # Can't use read_data with wsgiserver in Python 3.4.2. Due to a + # bug in read_data, it can't handle byte types ('Type str doesn't + # support the buffer API'), but wsgiserver requires byte types + # ('WSGI Applications must yield bytes'). So just mock our own + # read method. + m_open = Mock(return_value=Mock( + read=Mock(side_effect=[test_string])) + ) + with patch('homeassistant.components.http.open', m_open, create=True): + req = requests.get(_url(const.URL_API_ERROR_LOG), + headers=HA_HEADERS) + self.assertEqual(test_string, req.text.encode('UTF-8')) self.assertIsNone(req.headers.get('expires')) def test_api_get_event_listeners(self): diff --git a/tests/components/test_graphite.py b/tests/components/test_graphite.py index e9235c26542..fcbdbd85b19 100644 --- a/tests/components/test_graphite.py +++ b/tests/components/test_graphite.py @@ -29,7 +29,11 @@ class TestGraphite(unittest.TestCase): def test_setup(self, mock_socket): """Test setup.""" assert setup_component(self.hass, graphite.DOMAIN, {'graphite': {}}) - mock_socket.assert_called_once_with(socket.AF_INET, socket.SOCK_STREAM) + self.assertEqual(mock_socket.call_count, 1) + self.assertEqual( + mock_socket.call_args, + mock.call(socket.AF_INET, socket.SOCK_STREAM) + ) @patch('socket.socket') @patch('homeassistant.components.graphite.GraphiteFeeder') @@ -44,8 +48,15 @@ class TestGraphite(unittest.TestCase): } self.assertTrue(setup_component(self.hass, graphite.DOMAIN, config)) - mock_gf.assert_called_once_with(self.hass, 'foo', 123, 'me') - mock_socket.assert_called_once_with(socket.AF_INET, socket.SOCK_STREAM) + self.assertEqual(mock_gf.call_count, 1) + self.assertEqual( + mock_gf.call_args, mock.call(self.hass, 'foo', 123, 'me') + ) + self.assertEqual(mock_socket.call_count, 1) + self.assertEqual( + mock_socket.call_args, + mock.call(socket.AF_INET, socket.SOCK_STREAM) + ) @patch('socket.socket') @patch('homeassistant.components.graphite.GraphiteFeeder') @@ -60,7 +71,11 @@ class TestGraphite(unittest.TestCase): self.assertTrue(setup_component(self.hass, graphite.DOMAIN, config)) self.assertTrue(mock_gf.called) - mock_socket.assert_called_once_with(socket.AF_INET, socket.SOCK_STREAM) + self.assertEqual(mock_socket.call_count, 1) + self.assertEqual( + mock_socket.call_args, + mock.call(socket.AF_INET, socket.SOCK_STREAM) + ) def test_subscribe(self): """Test the subscription.""" @@ -70,26 +85,34 @@ class TestGraphite(unittest.TestCase): mock.call(EVENT_HOMEASSISTANT_START, gf.start_listen), mock.call(EVENT_HOMEASSISTANT_STOP, gf.shutdown), ]) - fake_hass.bus.listen.assert_called_once_with( - EVENT_STATE_CHANGED, gf.event_listener) + self.assertEqual(fake_hass.bus.listen.call_count, 1) + self.assertEqual( + fake_hass.bus.listen.call_args, + mock.call(EVENT_STATE_CHANGED, gf.event_listener) + ) def test_start(self): """Test the start.""" with mock.patch.object(self.gf, 'start') as mock_start: self.gf.start_listen('event') - mock_start.assert_called_once_with() + self.assertEqual(mock_start.call_count, 1) + self.assertEqual(mock_start.call_args, mock.call()) def test_shutdown(self): """Test the shutdown.""" with mock.patch.object(self.gf, '_queue') as mock_queue: self.gf.shutdown('event') - mock_queue.put.assert_called_once_with(self.gf._quit_object) + self.assertEqual(mock_queue.put.call_count, 1) + self.assertEqual( + mock_queue.put.call_args, mock.call(self.gf._quit_object) + ) def test_event_listener(self): """Test the event listener.""" with mock.patch.object(self.gf, '_queue') as mock_queue: self.gf.event_listener('foo') - mock_queue.put.assert_called_once_with('foo') + self.assertEqual(mock_queue.put.call_count, 1) + self.assertEqual(mock_queue.put.call_args, mock.call('foo')) @patch('time.time') def test_report_attributes(self, mock_time): @@ -164,21 +187,32 @@ class TestGraphite(unittest.TestCase): def test_send_to_graphite(self, mock_socket): """Test the sending of data.""" self.gf._send_to_graphite('foo') - mock_socket.assert_called_once_with(socket.AF_INET, - socket.SOCK_STREAM) + self.assertEqual(mock_socket.call_count, 1) + self.assertEqual( + mock_socket.call_args, + mock.call(socket.AF_INET, socket.SOCK_STREAM) + ) sock = mock_socket.return_value - sock.connect.assert_called_once_with(('foo', 123)) - sock.sendall.assert_called_once_with('foo'.encode('ascii')) - sock.send.assert_called_once_with('\n'.encode('ascii')) - sock.close.assert_called_once_with() + self.assertEqual(sock.connect.call_count, 1) + self.assertEqual(sock.connect.call_args, mock.call(('foo', 123))) + self.assertEqual(sock.sendall.call_count, 1) + self.assertEqual( + sock.sendall.call_args, mock.call('foo'.encode('ascii')) + ) + self.assertEqual(sock.send.call_count, 1) + self.assertEqual(sock.send.call_args, mock.call('\n'.encode('ascii'))) + self.assertEqual(sock.close.call_count, 1) + self.assertEqual(sock.close.call_args, mock.call()) def test_run_stops(self): """Test the stops.""" with mock.patch.object(self.gf, '_queue') as mock_queue: mock_queue.get.return_value = self.gf._quit_object self.assertEqual(None, self.gf.run()) - mock_queue.get.assert_called_once_with() - mock_queue.task_done.assert_called_once_with() + self.assertEqual(mock_queue.get.call_count, 1) + self.assertEqual(mock_queue.get.call_args, mock.call()) + self.assertEqual(mock_queue.task_done.call_count, 1) + self.assertEqual(mock_queue.task_done.call_args, mock.call()) def test_run(self): """Test the running.""" @@ -204,6 +238,8 @@ class TestGraphite(unittest.TestCase): self.gf.run() # Twice for two events, once for the stop self.assertEqual(3, mock_queue.task_done.call_count) - mock_r.assert_called_once_with( - 'entity', - event.data['new_state']) + self.assertEqual(mock_r.call_count, 1) + self.assertEqual( + mock_r.call_args, + mock.call('entity', event.data['new_state']) + ) diff --git a/tests/components/test_group.py b/tests/components/test_group.py index 9a2de824e90..5fe14c6377e 100644 --- a/tests/components/test_group.py +++ b/tests/components/test_group.py @@ -4,7 +4,7 @@ from collections import OrderedDict import unittest from unittest.mock import patch -from homeassistant.bootstrap import _setup_component +from homeassistant.bootstrap import setup_component from homeassistant.const import ( STATE_ON, STATE_OFF, STATE_HOME, STATE_UNKNOWN, ATTR_ICON, ATTR_HIDDEN, ATTR_ASSUMED_STATE, STATE_NOT_HOME, ) @@ -28,7 +28,7 @@ class TestComponentsGroup(unittest.TestCase): """Try to setup a group with mixed groupable states.""" self.hass.states.set('light.Bowl', STATE_ON) self.hass.states.set('device_tracker.Paulus', STATE_HOME) - group.Group( + group.Group.create_group( self.hass, 'person_and_light', ['light.Bowl', 'device_tracker.Paulus']) @@ -41,7 +41,7 @@ class TestComponentsGroup(unittest.TestCase): """Try to setup a group with a non existing state.""" self.hass.states.set('light.Bowl', STATE_ON) - grp = group.Group( + grp = group.Group.create_group( self.hass, 'light_and_nothing', ['light.Bowl', 'non.existing']) @@ -52,7 +52,7 @@ class TestComponentsGroup(unittest.TestCase): self.hass.states.set('cast.living_room', "Plex") self.hass.states.set('cast.bedroom', "Netflix") - grp = group.Group( + grp = group.Group.create_group( self.hass, 'chromecasts', ['cast.living_room', 'cast.bedroom']) @@ -60,7 +60,7 @@ class TestComponentsGroup(unittest.TestCase): def test_setup_empty_group(self): """Try to setup an empty group.""" - grp = group.Group(self.hass, 'nothing', []) + grp = group.Group.create_group(self.hass, 'nothing', []) self.assertEqual(STATE_UNKNOWN, grp.state) @@ -68,7 +68,7 @@ class TestComponentsGroup(unittest.TestCase): """Test if the group keeps track of states.""" self.hass.states.set('light.Bowl', STATE_ON) self.hass.states.set('light.Ceiling', STATE_OFF) - test_group = group.Group( + test_group = group.Group.create_group( self.hass, 'init_group', ['light.Bowl', 'light.Ceiling'], False) # Test if group setup in our init mode is ok @@ -82,7 +82,7 @@ class TestComponentsGroup(unittest.TestCase): """Test if turn off if the last device that was on turns off.""" self.hass.states.set('light.Bowl', STATE_OFF) self.hass.states.set('light.Ceiling', STATE_OFF) - test_group = group.Group( + test_group = group.Group.create_group( self.hass, 'init_group', ['light.Bowl', 'light.Ceiling'], False) self.hass.block_till_done() @@ -94,7 +94,7 @@ class TestComponentsGroup(unittest.TestCase): """Test if turn on if all devices were turned off and one turns on.""" self.hass.states.set('light.Bowl', STATE_OFF) self.hass.states.set('light.Ceiling', STATE_OFF) - test_group = group.Group( + test_group = group.Group.create_group( self.hass, 'init_group', ['light.Bowl', 'light.Ceiling'], False) # Turn one on @@ -108,7 +108,7 @@ class TestComponentsGroup(unittest.TestCase): """Test is_on method.""" self.hass.states.set('light.Bowl', STATE_ON) self.hass.states.set('light.Ceiling', STATE_OFF) - test_group = group.Group( + test_group = group.Group.create_group( self.hass, 'init_group', ['light.Bowl', 'light.Ceiling'], False) self.assertTrue(group.is_on(self.hass, test_group.entity_id)) @@ -123,7 +123,7 @@ class TestComponentsGroup(unittest.TestCase): """Test expand_entity_ids method.""" self.hass.states.set('light.Bowl', STATE_ON) self.hass.states.set('light.Ceiling', STATE_OFF) - test_group = group.Group( + test_group = group.Group.create_group( self.hass, 'init_group', ['light.Bowl', 'light.Ceiling'], False) self.assertEqual(sorted(['light.ceiling', 'light.bowl']), @@ -134,7 +134,7 @@ class TestComponentsGroup(unittest.TestCase): """Test that expand_entity_ids does not return duplicates.""" self.hass.states.set('light.Bowl', STATE_ON) self.hass.states.set('light.Ceiling', STATE_OFF) - test_group = group.Group( + test_group = group.Group.create_group( self.hass, 'init_group', ['light.Bowl', 'light.Ceiling'], False) self.assertEqual( @@ -155,7 +155,7 @@ class TestComponentsGroup(unittest.TestCase): """Test get_entity_ids method.""" self.hass.states.set('light.Bowl', STATE_ON) self.hass.states.set('light.Ceiling', STATE_OFF) - test_group = group.Group( + test_group = group.Group.create_group( self.hass, 'init_group', ['light.Bowl', 'light.Ceiling'], False) self.assertEqual( @@ -166,7 +166,7 @@ class TestComponentsGroup(unittest.TestCase): """Test if get_entity_ids works with a domain_filter.""" self.hass.states.set('switch.AC', STATE_OFF) - mixed_group = group.Group( + mixed_group = group.Group.create_group( self.hass, 'mixed_group', ['light.Bowl', 'switch.AC'], False) self.assertEqual( @@ -188,7 +188,7 @@ class TestComponentsGroup(unittest.TestCase): If no states existed and now a state it is tracking is being added as ON. """ - test_group = group.Group( + test_group = group.Group.create_group( self.hass, 'test group', ['light.not_there_1']) self.hass.states.set('light.not_there_1', STATE_ON) @@ -204,7 +204,7 @@ class TestComponentsGroup(unittest.TestCase): If no states existed and now a state it is tracking is being added as OFF. """ - test_group = group.Group( + test_group = group.Group.create_group( self.hass, 'test group', ['light.not_there_1']) self.hass.states.set('light.not_there_1', STATE_OFF) @@ -218,7 +218,7 @@ class TestComponentsGroup(unittest.TestCase): """Test setup method.""" self.hass.states.set('light.Bowl', STATE_ON) self.hass.states.set('light.Ceiling', STATE_OFF) - test_group = group.Group( + test_group = group.Group.create_group( self.hass, 'init_group', ['light.Bowl', 'light.Ceiling'], False) group_conf = OrderedDict() @@ -230,7 +230,7 @@ class TestComponentsGroup(unittest.TestCase): group_conf['test_group'] = 'hello.world,sensor.happy' group_conf['empty_group'] = {'name': 'Empty Group', 'entities': None} - _setup_component(self.hass, 'group', {'group': group_conf}) + setup_component(self.hass, 'group', {'group': group_conf}) group_state = self.hass.states.get( group.ENTITY_ID_FORMAT.format('second_group')) @@ -257,17 +257,19 @@ class TestComponentsGroup(unittest.TestCase): def test_groups_get_unique_names(self): """Two groups with same name should both have a unique entity id.""" - grp1 = group.Group(self.hass, 'Je suis Charlie') - grp2 = group.Group(self.hass, 'Je suis Charlie') + grp1 = group.Group.create_group(self.hass, 'Je suis Charlie') + grp2 = group.Group.create_group(self.hass, 'Je suis Charlie') self.assertNotEqual(grp1.entity_id, grp2.entity_id) def test_expand_entity_ids_expands_nested_groups(self): """Test if entity ids epands to nested groups.""" - group.Group(self.hass, 'light', ['light.test_1', 'light.test_2']) - group.Group(self.hass, 'switch', ['switch.test_1', 'switch.test_2']) - group.Group(self.hass, 'group_of_groups', ['group.light', - 'group.switch']) + group.Group.create_group( + self.hass, 'light', ['light.test_1', 'light.test_2']) + group.Group.create_group( + self.hass, 'switch', ['switch.test_1', 'switch.test_2']) + group.Group.create_group(self.hass, 'group_of_groups', ['group.light', + 'group.switch']) self.assertEqual( ['light.test_1', 'light.test_2', 'switch.test_1', 'switch.test_2'], @@ -278,7 +280,7 @@ class TestComponentsGroup(unittest.TestCase): """Test assumed state.""" self.hass.states.set('light.Bowl', STATE_ON) self.hass.states.set('light.Ceiling', STATE_OFF) - test_group = group.Group( + test_group = group.Group.create_group( self.hass, 'init_group', ['light.Bowl', 'light.Ceiling', 'sensor.no_exist']) @@ -304,7 +306,7 @@ class TestComponentsGroup(unittest.TestCase): self.hass.states.set('device_tracker.Adam', STATE_HOME) self.hass.states.set('device_tracker.Eve', STATE_NOT_HOME) self.hass.block_till_done() - group.Group( + group.Group.create_group( self.hass, 'peeps', ['device_tracker.Adam', 'device_tracker.Eve']) self.hass.states.set('device_tracker.Adam', 'cool_state_not_home') @@ -315,7 +317,7 @@ class TestComponentsGroup(unittest.TestCase): def test_reloading_groups(self): """Test reloading the group config.""" - _setup_component(self.hass, 'group', {'group': { + assert setup_component(self.hass, 'group', {'group': { 'second_group': { 'entities': 'light.Bowl', 'icon': 'mdi:work', @@ -342,3 +344,11 @@ class TestComponentsGroup(unittest.TestCase): assert self.hass.states.entity_ids() == ['group.hello'] assert self.hass.bus.listeners['state_changed'] == 1 + + def test_stopping_a_group(self): + """Test that a group correctly removes itself.""" + grp = group.Group.create_group( + self.hass, 'light', ['light.test_1', 'light.test_2']) + assert self.hass.states.entity_ids() == ['group.light'] + grp.stop() + assert self.hass.states.entity_ids() == [] diff --git a/tests/components/test_history.py b/tests/components/test_history.py index 80d0b1e9f9d..520afed81d9 100644 --- a/tests/components/test_history.py +++ b/tests/components/test_history.py @@ -43,7 +43,15 @@ class TestComponentHistory(unittest.TestCase): def test_setup(self): """Test setup method of history.""" mock_http_component(self.hass) - self.assertTrue(setup_component(self.hass, history.DOMAIN, {})) + config = history.CONFIG_SCHEMA({ + ha.DOMAIN: {}, + history.DOMAIN: {history.CONF_INCLUDE: { + history.CONF_DOMAINS: ['media_player'], + history.CONF_ENTITIES: ['thermostat.test']}, + history.CONF_EXCLUDE: { + history.CONF_DOMAINS: ['thermostat'], + history.CONF_ENTITIES: ['media_player.test']}}}) + self.assertTrue(setup_component(self.hass, history.DOMAIN, config)) def test_last_5_states(self): """Test retrieving the last 5 states.""" @@ -145,14 +153,236 @@ class TestComponentHistory(unittest.TestCase): def test_get_significant_states(self): """Test that only significant states are returned. - We inject a bunch of state updates from media player, zone and - thermostat. We should get back every thermostat change that + We should get back every thermostat change that includes an attribute change, but only the state updates for media player (attribute changes are not significant and not returned). """ + zero, four, states = self.record_states() + hist = history.get_significant_states( + zero, four, filters=history.Filters()) + assert states == hist + + def test_get_significant_states_entity_id(self): + """Test that only significant states are returned for one entity.""" + zero, four, states = self.record_states() + del states['media_player.test2'] + del states['thermostat.test'] + del states['thermostat.test2'] + del states['script.can_cancel_this_one'] + + hist = history.get_significant_states( + zero, four, 'media_player.test', + filters=history.Filters()) + assert states == hist + + def test_get_significant_states_exclude_domain(self): + """Test if significant states are returned when excluding domains. + + We should get back every thermostat change that includes an attribute + change, but no media player changes. + """ + zero, four, states = self.record_states() + del states['media_player.test'] + del states['media_player.test2'] + + config = history.CONFIG_SCHEMA({ + ha.DOMAIN: {}, + history.DOMAIN: {history.CONF_EXCLUDE: { + history.CONF_DOMAINS: ['media_player', ]}}}) + self.check_significant_states(zero, four, states, config) + + def test_get_significant_states_exclude_entity(self): + """Test if significant states are returned when excluding entities. + + We should get back every thermostat and script changes, but no media + player changes. + """ + zero, four, states = self.record_states() + del states['media_player.test'] + + config = history.CONFIG_SCHEMA({ + ha.DOMAIN: {}, + history.DOMAIN: {history.CONF_EXCLUDE: { + history.CONF_ENTITIES: ['media_player.test', ]}}}) + self.check_significant_states(zero, four, states, config) + + def test_get_significant_states_exclude(self): + """Test significant states when excluding entities and domains. + + We should not get back every thermostat and media player test changes. + """ + zero, four, states = self.record_states() + del states['media_player.test'] + del states['thermostat.test'] + del states['thermostat.test2'] + + config = history.CONFIG_SCHEMA({ + ha.DOMAIN: {}, + history.DOMAIN: {history.CONF_EXCLUDE: { + history.CONF_DOMAINS: ['thermostat', ], + history.CONF_ENTITIES: ['media_player.test', ]}}}) + self.check_significant_states(zero, four, states, config) + + def test_get_significant_states_exclude_include_entity(self): + """Test significant states when excluding domains and include entities. + + We should not get back every thermostat and media player test changes. + """ + zero, four, states = self.record_states() + del states['media_player.test2'] + del states['thermostat.test'] + del states['thermostat.test2'] + del states['script.can_cancel_this_one'] + + config = history.CONFIG_SCHEMA({ + ha.DOMAIN: {}, + history.DOMAIN: { + history.CONF_INCLUDE: { + history.CONF_ENTITIES: ['media_player.test', + 'thermostat.test']}, + history.CONF_EXCLUDE: { + history.CONF_DOMAINS: ['thermostat']}}}) + self.check_significant_states(zero, four, states, config) + + def test_get_significant_states_include_domain(self): + """Test if significant states are returned when including domains. + + We should get back every thermostat and script changes, but no media + player changes. + """ + zero, four, states = self.record_states() + del states['media_player.test'] + del states['media_player.test2'] + + config = history.CONFIG_SCHEMA({ + ha.DOMAIN: {}, + history.DOMAIN: {history.CONF_INCLUDE: { + history.CONF_DOMAINS: ['thermostat', 'script']}}}) + self.check_significant_states(zero, four, states, config) + + def test_get_significant_states_include_entity(self): + """Test if significant states are returned when including entities. + + We should only get back changes of the media_player.test entity. + """ + zero, four, states = self.record_states() + del states['media_player.test2'] + del states['thermostat.test'] + del states['thermostat.test2'] + del states['script.can_cancel_this_one'] + + config = history.CONFIG_SCHEMA({ + ha.DOMAIN: {}, + history.DOMAIN: {history.CONF_INCLUDE: { + history.CONF_ENTITIES: ['media_player.test']}}}) + self.check_significant_states(zero, four, states, config) + + def test_get_significant_states_include(self): + """Test significant states when including domains and entities. + + We should only get back changes of the media_player.test entity and the + thermostat domain. + """ + zero, four, states = self.record_states() + del states['media_player.test2'] + del states['script.can_cancel_this_one'] + + config = history.CONFIG_SCHEMA({ + ha.DOMAIN: {}, + history.DOMAIN: {history.CONF_INCLUDE: { + history.CONF_DOMAINS: ['thermostat'], + history.CONF_ENTITIES: ['media_player.test']}}}) + self.check_significant_states(zero, four, states, config) + + def test_get_significant_states_include_exclude_domain(self): + """Test if significant states when excluding and including domains. + + We should not get back any changes since we include only the + media_player domain but also exclude it. + """ + zero, four, states = self.record_states() + del states['media_player.test'] + del states['media_player.test2'] + del states['thermostat.test'] + del states['thermostat.test2'] + del states['script.can_cancel_this_one'] + + config = history.CONFIG_SCHEMA({ + ha.DOMAIN: {}, + history.DOMAIN: {history.CONF_INCLUDE: { + history.CONF_DOMAINS: ['media_player']}, + history.CONF_EXCLUDE: { + history.CONF_DOMAINS: ['media_player']}}}) + self.check_significant_states(zero, four, states, config) + + def test_get_significant_states_include_exclude_entity(self): + """Test if significant states when excluding and including domains. + + We should not get back any changes since we include only + media_player.test but also exclude it. + """ + zero, four, states = self.record_states() + del states['media_player.test'] + del states['media_player.test2'] + del states['thermostat.test'] + del states['thermostat.test2'] + del states['script.can_cancel_this_one'] + + config = history.CONFIG_SCHEMA({ + ha.DOMAIN: {}, + history.DOMAIN: {history.CONF_INCLUDE: { + history.CONF_ENTITIES: ['media_player.test']}, + history.CONF_EXCLUDE: { + history.CONF_ENTITIES: ['media_player.test']}}}) + self.check_significant_states(zero, four, states, config) + + def test_get_significant_states_include_exclude(self): + """Test if significant states when in/excluding domains and entities. + + We should only get back changes of the media_player.test2 entity. + """ + zero, four, states = self.record_states() + del states['media_player.test'] + del states['thermostat.test'] + del states['thermostat.test2'] + del states['script.can_cancel_this_one'] + + config = history.CONFIG_SCHEMA({ + ha.DOMAIN: {}, + history.DOMAIN: {history.CONF_INCLUDE: { + history.CONF_DOMAINS: ['media_player'], + history.CONF_ENTITIES: ['thermostat.test']}, + history.CONF_EXCLUDE: { + history.CONF_DOMAINS: ['thermostat'], + history.CONF_ENTITIES: ['media_player.test']}}}) + self.check_significant_states(zero, four, states, config) + + def check_significant_states(self, zero, four, states, config): + """Check if significant states are retrieved.""" + filters = history.Filters() + exclude = config[history.DOMAIN].get(history.CONF_EXCLUDE) + if exclude: + filters.excluded_entities = exclude[history.CONF_ENTITIES] + filters.excluded_domains = exclude[history.CONF_DOMAINS] + include = config[history.DOMAIN].get(history.CONF_INCLUDE) + if include: + filters.included_entities = include[history.CONF_ENTITIES] + filters.included_domains = include[history.CONF_DOMAINS] + + hist = history.get_significant_states(zero, four, filters=filters) + assert states == hist + + def record_states(self): + """Record some test states. + + We inject a bunch of state updates from media player, zone and + thermostat. + """ self.init_recorder() mp = 'media_player.test' + mp2 = 'media_player.test2' therm = 'thermostat.test' + therm2 = 'thermostat.test2' zone = 'zone.home' script_nc = 'script.cannot_cancel_this_one' script_c = 'script.can_cancel_this_one' @@ -168,7 +398,7 @@ class TestComponentHistory(unittest.TestCase): three = two + timedelta(seconds=1) four = three + timedelta(seconds=1) - states = {therm: [], mp: [], script_c: []} + states = {therm: [], therm2: [], mp: [], mp2: [], script_c: []} with patch('homeassistant.components.recorder.dt_util.utcnow', return_value=one): states[mp].append( @@ -177,6 +407,9 @@ class TestComponentHistory(unittest.TestCase): states[mp].append( set_state(mp, 'YouTube', attributes={'media_title': str(sentinel.mt2)})) + states[mp2].append( + set_state(mp2, 'YouTube', + attributes={'media_title': str(sentinel.mt2)})) states[therm].append( set_state(therm, 20, attributes={'current_temperature': 19.5})) @@ -192,6 +425,8 @@ class TestComponentHistory(unittest.TestCase): set_state(script_c, 'off', attributes={'can_cancel': True})) states[therm].append( set_state(therm, 21, attributes={'current_temperature': 19.8})) + states[therm2].append( + set_state(therm2, 20, attributes={'current_temperature': 19})) with patch('homeassistant.components.recorder.dt_util.utcnow', return_value=three): @@ -201,6 +436,7 @@ class TestComponentHistory(unittest.TestCase): # Attributes changed even though state is the same states[therm].append( set_state(therm, 21, attributes={'current_temperature': 20})) - - hist = history.get_significant_states(zero, four) - assert states == hist + # state will be skipped since entity is hidden + set_state(therm, 22, attributes={'current_temperature': 21, + 'hidden': True}) + return zero, four, states diff --git a/tests/components/test_http.py b/tests/components/test_http.py index e4e0fafd7c7..57f21fd76d2 100644 --- a/tests/components/test_http.py +++ b/tests/components/test_http.py @@ -2,6 +2,8 @@ # pylint: disable=protected-access,too-many-public-methods import logging import time +from ipaddress import ip_network +from unittest.mock import patch import requests @@ -10,26 +12,30 @@ import homeassistant.components.http as http from tests.common import get_test_instance_port, get_test_home_assistant -API_PASSWORD = "test1234" +API_PASSWORD = 'test1234' SERVER_PORT = get_test_instance_port() -HTTP_BASE = "127.0.0.1:{}".format(SERVER_PORT) -HTTP_BASE_URL = "http://{}".format(HTTP_BASE) +HTTP_BASE = '127.0.0.1:{}'.format(SERVER_PORT) +HTTP_BASE_URL = 'http://{}'.format(HTTP_BASE) HA_HEADERS = { const.HTTP_HEADER_HA_AUTH: API_PASSWORD, const.HTTP_HEADER_CONTENT_TYPE: const.CONTENT_TYPE_JSON, } +# Don't add 127.0.0.1/::1 as trusted, as it may interfere with other test cases +TRUSTED_NETWORKS = ['192.0.2.0/24', '2001:DB8:ABCD::/48', '100.64.0.1', + 'FD01:DB8::1'] CORS_ORIGINS = [HTTP_BASE_URL, HTTP_BASE] hass = None -def _url(path=""): +def _url(path=''): """Helper method to generate URLs.""" return HTTP_BASE_URL + path -def setUpModule(): # pylint: disable=invalid-name +# pylint: disable=invalid-name +def setUpModule(): """Initialize a Home Assistant server.""" global hass @@ -39,13 +45,21 @@ def setUpModule(): # pylint: disable=invalid-name hass.states.set('test.test', 'a_state') bootstrap.setup_component( - hass, http.DOMAIN, - {http.DOMAIN: {http.CONF_API_PASSWORD: API_PASSWORD, - http.CONF_SERVER_PORT: SERVER_PORT, - http.CONF_CORS_ORIGINS: CORS_ORIGINS}}) + hass, http.DOMAIN, { + http.DOMAIN: { + http.CONF_API_PASSWORD: API_PASSWORD, + http.CONF_SERVER_PORT: SERVER_PORT, + http.CONF_CORS_ORIGINS: CORS_ORIGINS, + } + } + ) bootstrap.setup_component(hass, 'api') + hass.wsgi.trusted_networks = [ + ip_network(trusted_network) + for trusted_network in TRUSTED_NETWORKS] + hass.start() time.sleep(0.05) @@ -72,20 +86,24 @@ class TestHttp: assert req.status_code == 401 - def test_access_denied_with_ip_no_in_approved_ips(self, caplog): - """Test access deniend with ip not in approved ip.""" - hass.wsgi.approved_ips = ['134.4.56.1'] + def test_access_denied_with_untrusted_ip(self, caplog): + """Test access with an untrusted ip address.""" + for remote_addr in ['198.51.100.1', '2001:DB8:FA1::1', '127.0.0.1', + '::1']: + with patch('homeassistant.components.http.' + 'HomeAssistantWSGI.get_real_ip', + return_value=remote_addr): + req = requests.get( + _url(const.URL_API), params={'api_password': ''}) - req = requests.get(_url(const.URL_API), - params={'api_password': ''}) - - assert req.status_code == 401 + assert req.status_code == 401, \ + "{} shouldn't be trusted".format(remote_addr) def test_access_with_password_in_header(self, caplog): """Test access with password in URL.""" # Hide logging from requests package that we use to test logging - caplog.set_level(logging.WARNING, - logger='requests.packages.urllib3.connectionpool') + caplog.set_level( + logging.WARNING, logger='requests.packages.urllib3.connectionpool') req = requests.get( _url(const.URL_API), @@ -100,19 +118,19 @@ class TestHttp: def test_access_denied_with_wrong_password_in_url(self): """Test access with wrong password.""" - req = requests.get(_url(const.URL_API), - params={'api_password': 'wrongpassword'}) + req = requests.get( + _url(const.URL_API), params={'api_password': 'wrongpassword'}) assert req.status_code == 401 def test_access_with_password_in_url(self, caplog): """Test access with password in URL.""" # Hide logging from requests package that we use to test logging - caplog.set_level(logging.WARNING, - logger='requests.packages.urllib3.connectionpool') + caplog.set_level( + logging.WARNING, logger='requests.packages.urllib3.connectionpool') - req = requests.get(_url(const.URL_API), - params={'api_password': API_PASSWORD}) + req = requests.get( + _url(const.URL_API), params={'api_password': API_PASSWORD}) assert req.status_code == 200 @@ -121,14 +139,18 @@ class TestHttp: # assert const.URL_API in logs assert API_PASSWORD not in logs - def test_access_with_ip_in_approved_ips(self, caplog): - """Test access with approved ip.""" - hass.wsgi.approved_ips = ['127.0.0.1', '134.4.56.1'] + def test_access_with_trusted_ip(self, caplog): + """Test access with trusted addresses.""" + for remote_addr in ['100.64.0.1', '192.0.2.100', 'FD01:DB8::1', + '2001:DB8:ABCD::1']: + with patch('homeassistant.components.http.' + 'HomeAssistantWSGI.get_real_ip', + return_value=remote_addr): + req = requests.get( + _url(const.URL_API), params={'api_password': ''}) - req = requests.get(_url(const.URL_API), - params={'api_password': ''}) - - assert req.status_code == 200 + assert req.status_code == 200, \ + '{} should be trusted'.format(remote_addr) def test_cors_allowed_with_password_in_url(self): """Test cross origin resource sharing with password in url.""" @@ -138,7 +160,7 @@ class TestHttp: allow_origin = const.HTTP_HEADER_ACCESS_CONTROL_ALLOW_ORIGIN allow_headers = const.HTTP_HEADER_ACCESS_CONTROL_ALLOW_HEADERS - all_allow_headers = ", ".join(const.ALLOWED_CORS_HEADERS) + all_allow_headers = ', '.join(const.ALLOWED_CORS_HEADERS) assert req.status_code == 200 assert req.headers.get(allow_origin) == HTTP_BASE_URL @@ -150,12 +172,11 @@ class TestHttp: const.HTTP_HEADER_HA_AUTH: API_PASSWORD, const.HTTP_HEADER_ORIGIN: HTTP_BASE_URL } - req = requests.get(_url(const.URL_API), - headers=headers) + req = requests.get(_url(const.URL_API), headers=headers) allow_origin = const.HTTP_HEADER_ACCESS_CONTROL_ALLOW_ORIGIN allow_headers = const.HTTP_HEADER_ACCESS_CONTROL_ALLOW_HEADERS - all_allow_headers = ", ".join(const.ALLOWED_CORS_HEADERS) + all_allow_headers = ', '.join(const.ALLOWED_CORS_HEADERS) assert req.status_code == 200 assert req.headers.get(allow_origin) == HTTP_BASE_URL @@ -166,8 +187,7 @@ class TestHttp: headers = { const.HTTP_HEADER_HA_AUTH: API_PASSWORD } - req = requests.get(_url(const.URL_API), - headers=headers) + req = requests.get(_url(const.URL_API), headers=headers) allow_origin = const.HTTP_HEADER_ACCESS_CONTROL_ALLOW_ORIGIN allow_headers = const.HTTP_HEADER_ACCESS_CONTROL_ALLOW_HEADERS @@ -183,12 +203,11 @@ class TestHttp: 'Access-Control-Request-Method': 'GET', 'Access-Control-Request-Headers': 'x-ha-access' } - req = requests.options(_url(const.URL_API), - headers=headers) + req = requests.options(_url(const.URL_API), headers=headers) allow_origin = const.HTTP_HEADER_ACCESS_CONTROL_ALLOW_ORIGIN allow_headers = const.HTTP_HEADER_ACCESS_CONTROL_ALLOW_HEADERS - all_allow_headers = ", ".join(const.ALLOWED_CORS_HEADERS) + all_allow_headers = ', '.join(const.ALLOWED_CORS_HEADERS) assert req.status_code == 200 assert req.headers.get(allow_origin) == HTTP_BASE_URL diff --git a/tests/components/test_influxdb.py b/tests/components/test_influxdb.py index 3210bf0db9f..1f934e64a19 100644 --- a/tests/components/test_influxdb.py +++ b/tests/components/test_influxdb.py @@ -1,5 +1,4 @@ """The tests for the InfluxDB component.""" -import copy import unittest from unittest import mock @@ -35,8 +34,8 @@ class TestInfluxDB(unittest.TestCase): } assert setup_component(self.hass, influxdb.DOMAIN, config) self.assertTrue(self.hass.bus.listen.called) - self.assertEqual(EVENT_STATE_CHANGED, - self.hass.bus.listen.call_args_list[0][0][0]) + self.assertEqual( + EVENT_STATE_CHANGED, self.hass.bus.listen.call_args_list[0][0][0]) self.assertTrue(mock_client.return_value.query.called) def test_setup_config_defaults(self, mock_client): @@ -50,21 +49,26 @@ class TestInfluxDB(unittest.TestCase): } assert setup_component(self.hass, influxdb.DOMAIN, config) self.assertTrue(self.hass.bus.listen.called) - self.assertEqual(EVENT_STATE_CHANGED, - self.hass.bus.listen.call_args_list[0][0][0]) + self.assertEqual( + EVENT_STATE_CHANGED, self.hass.bus.listen.call_args_list[0][0][0]) - def test_setup_missing_keys(self, mock_client): - """Test the setup with missing keys.""" + def test_setup_minimal_config(self, mock_client): + """Test the setup with minimal configuration.""" + config = { + 'influxdb': {} + } + + assert setup_component(self.hass, influxdb.DOMAIN, config) + + def test_setup_missing_password(self, mock_client): + """Test the setup with existing username and missing password.""" config = { 'influxdb': { - 'username': 'user', - 'password': 'pass', + 'username': 'user' } } - for missing in config['influxdb'].keys(): - config_copy = copy.deepcopy(config) - del config_copy['influxdb'][missing] - assert not setup_component(self.hass, influxdb.DOMAIN, config_copy) + + assert not setup_component(self.hass, influxdb.DOMAIN, config) def test_setup_query_fail(self, mock_client): """Test the setup for query failures.""" @@ -96,23 +100,22 @@ class TestInfluxDB(unittest.TestCase): """Test the event listener.""" self._setup() - valid = {'1': 1, - '1.0': 1.0, - STATE_ON: 1, - STATE_OFF: 0, - 'foo': 'foo'} + valid = { + '1': 1, + '1.0': 1.0, + STATE_ON: 1, + STATE_OFF: 0, + 'foo': 'foo' + } for in_, out in valid.items(): attrs = { - 'unit_of_measurement': 'foobars', - 'longitude': '1.1', - 'latitude': '2.2' - } - state = mock.MagicMock(state=in_, - domain='fake', - object_id='entity', - attributes=attrs) - event = mock.MagicMock(data={'new_state': state}, - time_fired=12345) + 'unit_of_measurement': 'foobars', + 'longitude': '1.1', + 'latitude': '2.2' + } + state = mock.MagicMock( + state=in_, domain='fake', object_id='entity', attributes=attrs) + event = mock.MagicMock(data={'new_state': state}, time_fired=12345) body = [{ 'measurement': 'foobars', 'tags': { @@ -127,7 +130,13 @@ class TestInfluxDB(unittest.TestCase): }, }] self.handler_method(event) - mock_client.return_value.write_points.assert_called_once_with(body) + self.assertEqual( + mock_client.return_value.write_points.call_count, 1 + ) + self.assertEqual( + mock_client.return_value.write_points.call_args, + mock.call(body) + ) mock_client.return_value.write_points.reset_mock() def test_event_listener_no_units(self, mock_client): @@ -139,13 +148,10 @@ class TestInfluxDB(unittest.TestCase): attrs = {'unit_of_measurement': unit} else: attrs = {} - state = mock.MagicMock(state=1, - domain='fake', - entity_id='entity-id', - object_id='entity', - attributes=attrs) - event = mock.MagicMock(data={'new_state': state}, - time_fired=12345) + state = mock.MagicMock( + state=1, domain='fake', entity_id='entity-id', + object_id='entity', attributes=attrs) + event = mock.MagicMock(data={'new_state': state}, time_fired=12345) body = [{ 'measurement': 'entity-id', 'tags': { @@ -158,20 +164,23 @@ class TestInfluxDB(unittest.TestCase): }, }] self.handler_method(event) - mock_client.return_value.write_points.assert_called_once_with(body) + self.assertEqual( + mock_client.return_value.write_points.call_count, 1 + ) + self.assertEqual( + mock_client.return_value.write_points.call_args, + mock.call(body) + ) mock_client.return_value.write_points.reset_mock() def test_event_listener_fail_write(self, mock_client): """Test the event listener for write failures.""" self._setup() - state = mock.MagicMock(state=1, - domain='fake', - entity_id='entity-id', - object_id='entity', - attributes={}) - event = mock.MagicMock(data={'new_state': state}, - time_fired=12345) + state = mock.MagicMock( + state=1, domain='fake', entity_id='entity-id', object_id='entity', + attributes={}) + event = mock.MagicMock(data={'new_state': state}, time_fired=12345) mock_client.return_value.write_points.side_effect = \ influx_client.exceptions.InfluxDBClientError('foo') self.handler_method(event) @@ -181,13 +190,10 @@ class TestInfluxDB(unittest.TestCase): self._setup() for state_state in (1, 'unknown', '', 'unavailable'): - state = mock.MagicMock(state=state_state, - domain='fake', - entity_id='entity-id', - object_id='entity', - attributes={}) - event = mock.MagicMock(data={'new_state': state}, - time_fired=12345) + state = mock.MagicMock( + state=state_state, domain='fake', entity_id='entity-id', + object_id='entity', attributes={}) + event = mock.MagicMock(data={'new_state': state}, time_fired=12345) body = [{ 'measurement': 'entity-id', 'tags': { @@ -201,8 +207,13 @@ class TestInfluxDB(unittest.TestCase): }] self.handler_method(event) if state_state == 1: - mock_client.return_value.write_points.assert_called_once_with( - body) + self.assertEqual( + mock_client.return_value.write_points.call_count, 1 + ) + self.assertEqual( + mock_client.return_value.write_points.call_args, + mock.call(body) + ) else: self.assertFalse(mock_client.return_value.write_points.called) mock_client.return_value.write_points.reset_mock() @@ -212,13 +223,10 @@ class TestInfluxDB(unittest.TestCase): self._setup() for entity_id in ('ok', 'blacklisted'): - state = mock.MagicMock(state=1, - domain='fake', - entity_id='fake.{}'.format(entity_id), - object_id=entity_id, - attributes={}) - event = mock.MagicMock(data={'new_state': state}, - time_fired=12345) + state = mock.MagicMock( + state=1, domain='fake', entity_id='fake.{}'.format(entity_id), + object_id=entity_id, attributes={}) + event = mock.MagicMock(data={'new_state': state}, time_fired=12345) body = [{ 'measurement': 'fake.{}'.format(entity_id), 'tags': { @@ -232,8 +240,13 @@ class TestInfluxDB(unittest.TestCase): }] self.handler_method(event) if entity_id == 'ok': - mock_client.return_value.write_points.assert_called_once_with( - body) + self.assertEqual( + mock_client.return_value.write_points.call_count, 1 + ) + self.assertEqual( + mock_client.return_value.write_points.call_args, + mock.call(body) + ) else: self.assertFalse(mock_client.return_value.write_points.called) mock_client.return_value.write_points.reset_mock() diff --git a/tests/components/test_init.py b/tests/components/test_init.py index 76878432ecd..44a60ee986f 100644 --- a/tests/components/test_init.py +++ b/tests/components/test_init.py @@ -1,8 +1,7 @@ """The testd for Core components.""" # pylint: disable=protected-access,too-many-public-methods import unittest -from unittest.mock import patch -from tempfile import TemporaryDirectory +from unittest.mock import patch, Mock import yaml @@ -13,7 +12,8 @@ from homeassistant.const import ( import homeassistant.components as comps from homeassistant.helpers import entity -from tests.common import get_test_home_assistant, mock_service +from tests.common import ( + get_test_home_assistant, mock_service, patch_yaml_files) class TestComponentsCore(unittest.TestCase): @@ -89,6 +89,7 @@ class TestComponentsCore(unittest.TestCase): ('sensor', 'turn_on', {'entity_id': ['sensor.bla']}, False), mock_call.call_args_list[1][0]) + @patch('homeassistant.config.os.path.isfile', Mock(return_value=True)) def test_reload_core_conf(self): """Test reload core conf service.""" ent = entity.Entity() @@ -101,23 +102,20 @@ class TestComponentsCore(unittest.TestCase): assert state.state == 'unknown' assert state.attributes == {} - with TemporaryDirectory() as conf_dir: - self.hass.config.config_dir = conf_dir - conf_yaml = self.hass.config.path(config.YAML_CONFIG_FILE) - - with open(conf_yaml, 'a') as fp: - fp.write(yaml.dump({ - ha.DOMAIN: { - 'latitude': 10, - 'longitude': 20, - 'customize': { - 'test.Entity': { - 'hello': 'world' - } + files = { + config.YAML_CONFIG_FILE: yaml.dump({ + ha.DOMAIN: { + 'latitude': 10, + 'longitude': 20, + 'customize': { + 'test.Entity': { + 'hello': 'world' } } - })) - + } + }) + } + with patch_yaml_files(files, True): comps.reload_core_config(self.hass) self.hass.block_till_done() @@ -131,17 +129,15 @@ class TestComponentsCore(unittest.TestCase): assert state.state == 'unknown' assert state.attributes.get('hello') == 'world' + @patch('homeassistant.config.os.path.isfile', Mock(return_value=True)) @patch('homeassistant.components._LOGGER.error') @patch('homeassistant.config.process_ha_core_config') def test_reload_core_with_wrong_conf(self, mock_process, mock_error): """Test reload core conf service.""" - with TemporaryDirectory() as conf_dir: - self.hass.config.config_dir = conf_dir - conf_yaml = self.hass.config.path(config.YAML_CONFIG_FILE) - - with open(conf_yaml, 'a') as fp: - fp.write(yaml.dump(['invalid', 'config'])) - + files = { + config.YAML_CONFIG_FILE: yaml.dump(['invalid', 'config']) + } + with patch_yaml_files(files, True): comps.reload_core_config(self.hass) self.hass.block_till_done() diff --git a/tests/components/test_input_select.py b/tests/components/test_input_select.py index a3f121576fb..8231390410e 100644 --- a/tests/components/test_input_select.py +++ b/tests/components/test_input_select.py @@ -6,7 +6,7 @@ from tests.common import get_test_home_assistant from homeassistant.bootstrap import setup_component from homeassistant.components.input_select import ( - ATTR_OPTIONS, DOMAIN, select_option) + ATTR_OPTIONS, DOMAIN, select_option, select_next, select_previous) from homeassistant.const import ( ATTR_ICON, ATTR_FRIENDLY_NAME) @@ -67,6 +67,66 @@ class TestInputSelect(unittest.TestCase): state = self.hass.states.get(entity_id) self.assertEqual('another option', state.state) + def test_select_next(self): + """Test select_next methods.""" + self.assertTrue( + setup_component(self.hass, DOMAIN, {DOMAIN: { + 'test_1': { + 'options': [ + 'first option', + 'middle option', + 'last option', + ], + 'initial': 'middle option', + }, + }})) + entity_id = 'input_select.test_1' + + state = self.hass.states.get(entity_id) + self.assertEqual('middle option', state.state) + + select_next(self.hass, entity_id) + self.hass.block_till_done() + + state = self.hass.states.get(entity_id) + self.assertEqual('last option', state.state) + + select_next(self.hass, entity_id) + self.hass.block_till_done() + + state = self.hass.states.get(entity_id) + self.assertEqual('first option', state.state) + + def test_select_previous(self): + """Test select_previous methods.""" + self.assertTrue( + setup_component(self.hass, DOMAIN, {DOMAIN: { + 'test_1': { + 'options': [ + 'first option', + 'middle option', + 'last option', + ], + 'initial': 'middle option', + }, + }})) + entity_id = 'input_select.test_1' + + state = self.hass.states.get(entity_id) + self.assertEqual('middle option', state.state) + + select_previous(self.hass, entity_id) + self.hass.block_till_done() + + state = self.hass.states.get(entity_id) + self.assertEqual('first option', state.state) + + select_previous(self.hass, entity_id) + self.hass.block_till_done() + + state = self.hass.states.get(entity_id) + self.assertEqual('last option', state.state) + def test_config_options(self): """Test configuration options.""" count_start = len(self.hass.states.entity_ids()) diff --git a/tests/components/test_logbook.py b/tests/components/test_logbook.py index 9e8ab09a5a6..2dcc47549df 100644 --- a/tests/components/test_logbook.py +++ b/tests/components/test_logbook.py @@ -186,6 +186,128 @@ class TestComponentLogbook(unittest.TestCase): self.assert_entry(entries[1], pointB, 'blu', domain='sensor', entity_id=entity_id2) + def test_exclude_automation_events(self): + """Test if automation entries can be excluded by entity_id.""" + name = 'My Automation Rule' + message = 'has been triggered' + domain = 'automation' + entity_id = 'automation.my_automation_rule' + entity_id2 = 'automation.my_automation_rule_2' + entity_id2 = 'sensor.blu' + + eventA = ha.Event(logbook.EVENT_LOGBOOK_ENTRY, { + logbook.ATTR_NAME: name, + logbook.ATTR_MESSAGE: message, + logbook.ATTR_DOMAIN: domain, + logbook.ATTR_ENTITY_ID: entity_id, + }) + eventB = ha.Event(logbook.EVENT_LOGBOOK_ENTRY, { + logbook.ATTR_NAME: name, + logbook.ATTR_MESSAGE: message, + logbook.ATTR_DOMAIN: domain, + logbook.ATTR_ENTITY_ID: entity_id2, + }) + + config = logbook.CONFIG_SCHEMA({ + ha.DOMAIN: {}, + logbook.DOMAIN: {logbook.CONF_EXCLUDE: { + logbook.CONF_ENTITIES: [entity_id, ]}}}) + events = logbook._exclude_events((ha.Event(EVENT_HOMEASSISTANT_STOP), + eventA, eventB), config) + entries = list(logbook.humanify(events)) + + self.assertEqual(2, len(entries)) + self.assert_entry( + entries[0], name='Home Assistant', message='stopped', + domain=ha.DOMAIN) + self.assert_entry( + entries[1], name=name, domain=domain, entity_id=entity_id2) + + def test_include_events_entity(self): + """Test if events are filtered if entity is included in config.""" + entity_id = 'sensor.bla' + entity_id2 = 'sensor.blu' + pointA = dt_util.utcnow() + pointB = pointA + timedelta(minutes=logbook.GROUP_BY_MINUTES) + + eventA = self.create_state_changed_event(pointA, entity_id, 10) + eventB = self.create_state_changed_event(pointB, entity_id2, 20) + + config = logbook.CONFIG_SCHEMA({ + ha.DOMAIN: {}, + logbook.DOMAIN: {logbook.CONF_INCLUDE: { + logbook.CONF_ENTITIES: [entity_id2, ]}}}) + events = logbook._exclude_events((ha.Event(EVENT_HOMEASSISTANT_STOP), + eventA, eventB), config) + entries = list(logbook.humanify(events)) + + self.assertEqual(2, len(entries)) + self.assert_entry( + entries[0], name='Home Assistant', message='stopped', + domain=ha.DOMAIN) + self.assert_entry( + entries[1], pointB, 'blu', domain='sensor', entity_id=entity_id2) + + def test_include_events_domain(self): + """Test if events are filtered if domain is included in config.""" + entity_id = 'switch.bla' + entity_id2 = 'sensor.blu' + pointA = dt_util.utcnow() + pointB = pointA + timedelta(minutes=logbook.GROUP_BY_MINUTES) + + eventA = self.create_state_changed_event(pointA, entity_id, 10) + eventB = self.create_state_changed_event(pointB, entity_id2, 20) + + config = logbook.CONFIG_SCHEMA({ + ha.DOMAIN: {}, + logbook.DOMAIN: {logbook.CONF_INCLUDE: { + logbook.CONF_DOMAINS: ['sensor', ]}}}) + events = logbook._exclude_events((ha.Event(EVENT_HOMEASSISTANT_START), + eventA, eventB), config) + entries = list(logbook.humanify(events)) + + self.assertEqual(2, len(entries)) + self.assert_entry(entries[0], name='Home Assistant', message='started', + domain=ha.DOMAIN) + self.assert_entry(entries[1], pointB, 'blu', domain='sensor', + entity_id=entity_id2) + + def test_include_exclude_events(self): + """Test if events are filtered if include and exclude is configured.""" + entity_id = 'switch.bla' + entity_id2 = 'sensor.blu' + entity_id3 = 'sensor.bli' + pointA = dt_util.utcnow() + pointB = pointA + timedelta(minutes=logbook.GROUP_BY_MINUTES) + + eventA1 = self.create_state_changed_event(pointA, entity_id, 10) + eventA2 = self.create_state_changed_event(pointA, entity_id2, 10) + eventA3 = self.create_state_changed_event(pointA, entity_id3, 10) + eventB1 = self.create_state_changed_event(pointB, entity_id, 20) + eventB2 = self.create_state_changed_event(pointB, entity_id2, 20) + + config = logbook.CONFIG_SCHEMA({ + ha.DOMAIN: {}, + logbook.DOMAIN: { + logbook.CONF_INCLUDE: { + logbook.CONF_DOMAINS: ['sensor', ], + logbook.CONF_ENTITIES: ['switch.bla', ]}, + logbook.CONF_EXCLUDE: { + logbook.CONF_DOMAINS: ['switch', ], + logbook.CONF_ENTITIES: ['sensor.bli', ]}}}) + events = logbook._exclude_events((ha.Event(EVENT_HOMEASSISTANT_START), + eventA1, eventA2, eventA3, + eventB1, eventB2), config) + entries = list(logbook.humanify(events)) + + self.assertEqual(3, len(entries)) + self.assert_entry(entries[0], name='Home Assistant', message='started', + domain=ha.DOMAIN) + self.assert_entry(entries[1], pointA, 'blu', domain='sensor', + entity_id=entity_id2) + self.assert_entry(entries[2], pointB, 'blu', domain='sensor', + entity_id=entity_id2) + def test_exclude_auto_groups(self): """Test if events of automatically generated groups are filtered.""" entity_id = 'switch.bla' diff --git a/tests/components/test_logentries.py b/tests/components/test_logentries.py index 94097fba32c..4bcef23ee7e 100644 --- a/tests/components/test_logentries.py +++ b/tests/components/test_logentries.py @@ -84,6 +84,9 @@ class TestLogentries(unittest.TestCase): 'logs/token', 'event': body} self.handler_method(event) - self.mock_post.assert_called_once_with( - payload['host'], data=payload, timeout=10) + self.assertEqual(self.mock_post.call_count, 1) + self.assertEqual( + self.mock_post.call_args, + mock.call(payload['host'], data=payload, timeout=10) + ) self.mock_post.reset_mock() diff --git a/tests/components/test_panel_custom.py b/tests/components/test_panel_custom.py index 6a41706db98..1ef12161bcb 100644 --- a/tests/components/test_panel_custom.py +++ b/tests/components/test_panel_custom.py @@ -1,9 +1,8 @@ """The tests for the panel_custom component.""" import os import shutil -from tempfile import NamedTemporaryFile import unittest -from unittest.mock import patch +from unittest.mock import Mock, patch from homeassistant import bootstrap from homeassistant.components import panel_custom @@ -47,31 +46,40 @@ class TestPanelCustom(unittest.TestCase): @patch('homeassistant.components.panel_custom.register_panel') def test_webcomponent_custom_path(self, mock_register, _mock_setup): """Test if a web component is found in config panels dir.""" - with NamedTemporaryFile() as fp: - config = { - 'panel_custom': { - 'name': 'todomvc', - 'webcomponent_path': fp.name, - 'sidebar_title': 'Sidebar Title', - 'sidebar_icon': 'mdi:iconicon', - 'url_path': 'nice_url', - 'config': 5, - } - } + filename = 'mock.file' - with patch('os.path.isfile', return_value=False): - assert not bootstrap.setup_component(self.hass, 'panel_custom', - config) - assert not mock_register.called - - assert bootstrap.setup_component(self.hass, 'panel_custom', config) - assert mock_register.called - args = mock_register.mock_calls[0][1] - kwargs = mock_register.mock_calls[0][2] - assert args == (self.hass, 'todomvc', fp.name) - assert kwargs == { - 'config': 5, - 'url_path': 'nice_url', + config = { + 'panel_custom': { + 'name': 'todomvc', + 'webcomponent_path': filename, + 'sidebar_title': 'Sidebar Title', 'sidebar_icon': 'mdi:iconicon', - 'sidebar_title': 'Sidebar Title' + 'url_path': 'nice_url', + 'config': 5, } + } + + with patch('os.path.isfile', Mock(return_value=False)): + assert not bootstrap.setup_component( + self.hass, 'panel_custom', config + ) + assert not mock_register.called + + with patch('os.path.isfile', Mock(return_value=True)): + with patch('os.access', Mock(return_value=True)): + assert bootstrap.setup_component( + self.hass, 'panel_custom', config + ) + + assert mock_register.called + + args = mock_register.mock_calls[0][1] + assert args == (self.hass, 'todomvc', filename) + + kwargs = mock_register.mock_calls[0][2] + assert kwargs == { + 'config': 5, + 'url_path': 'nice_url', + 'sidebar_icon': 'mdi:iconicon', + 'sidebar_title': 'Sidebar Title' + } diff --git a/tests/components/test_pilight.py b/tests/components/test_pilight.py new file mode 100644 index 00000000000..ca491ee838d --- /dev/null +++ b/tests/components/test_pilight.py @@ -0,0 +1,298 @@ +"""The tests for the pilight component.""" +import logging +import unittest +from unittest.mock import patch +import socket + +from homeassistant.bootstrap import setup_component +from homeassistant.components import pilight + +from tests.common import get_test_home_assistant, assert_setup_component + +_LOGGER = logging.getLogger(__name__) + + +class PilightDaemonSim: + """Class to fake the interface of the pilight python package. + + Is used in an asyncio loop, thus the mock cannot be accessed to + determine if methods where called?! + This is solved here in a hackish way by printing errors + that can be checked using logging.error mocks. + """ + + callback = None + called = None + + test_message = {"protocol": "kaku_switch", + "uuid": "1-2-3-4", + "message": { + "id": 0, + "unit": 0, + "off": 1}} + + def __init__(self, host, port): + """Init pilight client, ignore parameters.""" + pass + + def send_code(self, call): # pylint: disable=no-self-use + """Called pilight.send service is called.""" + _LOGGER.error('PilightDaemonSim payload: ' + str(call)) + + def start(self): + """Called homeassistant.start is called. + + Also sends one test message after start up + """ + _LOGGER.error('PilightDaemonSim start') + # Fake one code receive after daemon started + if not self.called: + self.callback(self.test_message) + self.called = True + + def stop(self): # pylint: disable=no-self-use + """Called homeassistant.stop is called.""" + _LOGGER.error('PilightDaemonSim stop') + + def set_callback(self, function): + """Callback called on event pilight.pilight_received.""" + self.callback = function + _LOGGER.error('PilightDaemonSim callback: ' + str(function)) + + +class TestPilight(unittest.TestCase): + """Test the Pilight component.""" + + def setUp(self): # pylint: disable=invalid-name + """Setup things to be run when tests are started.""" + self.hass = get_test_home_assistant() + + @patch('homeassistant.components.pilight._LOGGER.error') + def test_connection_failed_error(self, mock_error): + """Try to connect at 127.0.0.1:5000 with socket error.""" + with assert_setup_component(3): + with patch('pilight.pilight.Client', + side_effect=socket.error) as mock_client: + self.assertFalse(setup_component( + self.hass, pilight.DOMAIN, {pilight.DOMAIN: {}})) + mock_client.assert_called_once_with(host=pilight.DEFAULT_HOST, + port=pilight.DEFAULT_PORT) + self.assertEqual(1, mock_error.call_count) + + @patch('homeassistant.components.pilight._LOGGER.error') + def test_connection_timeout_error(self, mock_error): + """Try to connect at 127.0.0.1:5000 with socket timeout.""" + with assert_setup_component(3): + with patch('pilight.pilight.Client', + side_effect=socket.timeout) as mock_client: + self.assertFalse(setup_component( + self.hass, pilight.DOMAIN, {pilight.DOMAIN: {}})) + mock_client.assert_called_once_with(host=pilight.DEFAULT_HOST, + port=pilight.DEFAULT_PORT) + self.assertEqual(1, mock_error.call_count) + + @patch('pilight.pilight.Client', PilightDaemonSim) + @patch('homeassistant.core._LOGGER.error') + @patch('tests.components.test_pilight._LOGGER.error') + def test_send_code_no_protocol(self, mock_pilight_error, mock_error): + """Try to send data without protocol information, should give error.""" + with assert_setup_component(3): + self.assertTrue(setup_component( + self.hass, pilight.DOMAIN, {pilight.DOMAIN: {}})) + + # Call without protocol info, should be ignored with error + self.hass.services.call(pilight.DOMAIN, pilight.SERVICE_NAME, + service_data={'noprotocol': 'test', + 'value': 42}, + blocking=True) + self.hass.block_till_done() + error_log_call = mock_error.call_args_list[-1] + self.assertTrue( + 'required key not provided @ data[\'protocol\']' in + str(error_log_call)) + + @patch('pilight.pilight.Client', PilightDaemonSim) + @patch('tests.components.test_pilight._LOGGER.error') + def test_send_code(self, mock_pilight_error): + """Try to send proper data.""" + with assert_setup_component(3): + self.assertTrue(setup_component( + self.hass, pilight.DOMAIN, {pilight.DOMAIN: {}})) + + # Call with protocol info, should not give error + service_data = {'protocol': 'test', + 'value': 42} + self.hass.services.call(pilight.DOMAIN, pilight.SERVICE_NAME, + service_data=service_data, + blocking=True) + self.hass.block_till_done() + error_log_call = mock_pilight_error.call_args_list[-1] + service_data['protocol'] = [service_data['protocol']] + self.assertTrue(str(service_data) in str(error_log_call)) + + @patch('pilight.pilight.Client', PilightDaemonSim) + @patch('homeassistant.components.pilight._LOGGER.error') + def test_send_code_fail(self, mock_pilight_error): + """Check IOError exception error message.""" + with assert_setup_component(3): + with patch('pilight.pilight.Client.send_code', + side_effect=IOError): + self.assertTrue(setup_component( + self.hass, pilight.DOMAIN, {pilight.DOMAIN: {}})) + + # Call with protocol info, should not give error + service_data = {'protocol': 'test', + 'value': 42} + self.hass.services.call(pilight.DOMAIN, pilight.SERVICE_NAME, + service_data=service_data, + blocking=True) + self.hass.block_till_done() + error_log_call = mock_pilight_error.call_args_list[-1] + self.assertTrue('Pilight send failed' in str(error_log_call)) + + @patch('pilight.pilight.Client', PilightDaemonSim) + @patch('tests.components.test_pilight._LOGGER.error') + def test_start_stop(self, mock_pilight_error): + """Check correct startup and stop of pilight daemon.""" + with assert_setup_component(3): + self.assertTrue(setup_component( + self.hass, pilight.DOMAIN, {pilight.DOMAIN: {}})) + + # Test startup + self.hass.start() + self.hass.block_till_done() + error_log_call = mock_pilight_error.call_args_list[-2] + self.assertTrue( + 'PilightDaemonSim callback' in str(error_log_call)) + error_log_call = mock_pilight_error.call_args_list[-1] + self.assertTrue( + 'PilightDaemonSim start' in str(error_log_call)) + + # Test stop + self.hass.stop() + error_log_call = mock_pilight_error.call_args_list[-1] + self.assertTrue( + 'PilightDaemonSim stop' in str(error_log_call)) + + @patch('pilight.pilight.Client', PilightDaemonSim) + @patch('homeassistant.core._LOGGER.info') + def test_receive_code(self, mock_info): + """Check if code receiving via pilight daemon works.""" + with assert_setup_component(3): + self.assertTrue(setup_component( + self.hass, pilight.DOMAIN, {pilight.DOMAIN: {}})) + + # Test startup + self.hass.start() + self.hass.block_till_done() + + expected_message = dict( + {'protocol': PilightDaemonSim.test_message['protocol'], + 'uuid': PilightDaemonSim.test_message['uuid']}, + **PilightDaemonSim.test_message['message']) + error_log_call = mock_info.call_args_list[-1] + + # Check if all message parts are put on event bus + for key, value in expected_message.items(): + self.assertTrue(str(key) in str(error_log_call)) + self.assertTrue(str(value) in str(error_log_call)) + + @patch('pilight.pilight.Client', PilightDaemonSim) + @patch('homeassistant.core._LOGGER.info') + def test_whitelist_exact_match(self, mock_info): + """Check whitelist filter with matched data.""" + with assert_setup_component(3): + whitelist = { + 'protocol': [PilightDaemonSim.test_message['protocol']], + 'uuid': [PilightDaemonSim.test_message['uuid']], + 'id': [PilightDaemonSim.test_message['message']['id']], + 'unit': [PilightDaemonSim.test_message['message']['unit']]} + self.assertTrue(setup_component( + self.hass, pilight.DOMAIN, + {pilight.DOMAIN: {"whitelist": whitelist}})) + + self.hass.start() + self.hass.block_till_done() + + expected_message = dict( + {'protocol': PilightDaemonSim.test_message['protocol'], + 'uuid': PilightDaemonSim.test_message['uuid']}, + **PilightDaemonSim.test_message['message']) + info_log_call = mock_info.call_args_list[-1] + + # Check if all message parts are put on event bus + for key, value in expected_message.items(): + self.assertTrue(str(key) in str(info_log_call)) + self.assertTrue(str(value) in str(info_log_call)) + + @patch('pilight.pilight.Client', PilightDaemonSim) + @patch('homeassistant.core._LOGGER.info') + def test_whitelist_partial_match(self, mock_info): + """Check whitelist filter with partially matched data, should work.""" + with assert_setup_component(3): + whitelist = { + 'protocol': [PilightDaemonSim.test_message['protocol']], + 'id': [PilightDaemonSim.test_message['message']['id']]} + self.assertTrue(setup_component( + self.hass, pilight.DOMAIN, + {pilight.DOMAIN: {"whitelist": whitelist}})) + + self.hass.start() + self.hass.block_till_done() + + expected_message = dict( + {'protocol': PilightDaemonSim.test_message['protocol'], + 'uuid': PilightDaemonSim.test_message['uuid']}, + **PilightDaemonSim.test_message['message']) + info_log_call = mock_info.call_args_list[-1] + + # Check if all message parts are put on event bus + for key, value in expected_message.items(): + self.assertTrue(str(key) in str(info_log_call)) + self.assertTrue(str(value) in str(info_log_call)) + + @patch('pilight.pilight.Client', PilightDaemonSim) + @patch('homeassistant.core._LOGGER.info') + def test_whitelist_or_match(self, mock_info): + """Check whitelist filter with several subsection, should work.""" + with assert_setup_component(3): + whitelist = { + 'protocol': [PilightDaemonSim.test_message['protocol'], + 'other_protocoll'], + 'id': [PilightDaemonSim.test_message['message']['id']]} + self.assertTrue(setup_component( + self.hass, pilight.DOMAIN, + {pilight.DOMAIN: {"whitelist": whitelist}})) + + self.hass.start() + self.hass.block_till_done() + + expected_message = dict( + {'protocol': PilightDaemonSim.test_message['protocol'], + 'uuid': PilightDaemonSim.test_message['uuid']}, + **PilightDaemonSim.test_message['message']) + info_log_call = mock_info.call_args_list[-1] + + # Check if all message parts are put on event bus + for key, value in expected_message.items(): + self.assertTrue(str(key) in str(info_log_call)) + self.assertTrue(str(value) in str(info_log_call)) + + @patch('pilight.pilight.Client', PilightDaemonSim) + @patch('homeassistant.core._LOGGER.info') + def test_whitelist_no_match(self, mock_info): + """Check whitelist filter with unmatched data, should not work.""" + with assert_setup_component(3): + whitelist = { + 'protocol': ['wrong_protocoll'], + 'id': [PilightDaemonSim.test_message['message']['id']]} + self.assertTrue(setup_component( + self.hass, pilight.DOMAIN, + {pilight.DOMAIN: {"whitelist": whitelist}})) + + self.hass.start() + self.hass.block_till_done() + + info_log_call = mock_info.call_args_list[-1] + + self.assertFalse('Event pilight_received' in info_log_call) diff --git a/tests/components/test_proximity.py b/tests/components/test_proximity.py index cbd36a1fc1f..1a1033ab31d 100644 --- a/tests/components/test_proximity.py +++ b/tests/components/test_proximity.py @@ -1,13 +1,17 @@ """The tests for the Proximity component.""" -from homeassistant.components import proximity +import unittest +from homeassistant.components import proximity +from homeassistant.components.proximity import DOMAIN + +from homeassistant.bootstrap import setup_component from tests.common import get_test_home_assistant -class TestProximity: +class TestProximity(unittest.TestCase): """Test the Proximity component.""" - def setup_method(self, method): + def setUp(self): """Setup things to be run when tests are started.""" self.hass = get_test_home_assistant() self.hass.states.set( @@ -27,31 +31,34 @@ class TestProximity: 'radius': 10 }) - def teardown_method(self, method): + def tearDown(self): """Stop everything that was started.""" self.hass.stop() def test_proximities(self): """Test a list of proximities.""" - assert proximity.setup(self.hass, { - 'proximity': [{ - 'zone': 'home', - 'ignored_zones': { - 'work' + config = { + 'proximity': { + 'home': { + 'ignored_zones': [ + 'work' + ], + 'devices': [ + 'device_tracker.test1', + 'device_tracker.test2' + ], + 'tolerance': '1' }, - 'devices': { - 'device_tracker.test1', - 'device_tracker.test2' - }, - 'tolerance': '1' - }, { - 'zone': 'work', - 'devices': { - 'device_tracker.test1' - }, - 'tolerance': '1' - }] - }) + 'work': { + 'devices': [ + 'device_tracker.test1' + ], + 'tolerance': '1' + } + } + } + + self.assertTrue(setup_component(self.hass, DOMAIN, config)) proximities = ['home', 'work'] @@ -66,40 +73,46 @@ class TestProximity: state = self.hass.states.get('proximity.' + prox) assert state.state == '0' - def test_proximities_missing_devices(self): - """Test a list of proximities with one missing devices.""" - assert not proximity.setup(self.hass, { - 'proximity': [{ - 'zone': 'home', - 'ignored_zones': { - 'work' + def test_proximities_setup(self): + """Test a list of proximities with missing devices.""" + config = { + 'proximity': { + 'home': { + 'ignored_zones': [ + 'work' + ], + 'devices': [ + 'device_tracker.test1', + 'device_tracker.test2' + ], + 'tolerance': '1' }, - 'devices': { - 'device_tracker.test1', - 'device_tracker.test2' - }, - 'tolerance': '1' - }, { - 'zone': 'work', - 'tolerance': '1' - }] - }) + 'work': { + 'tolerance': '1' + } + } + } + + self.assertTrue(setup_component(self.hass, DOMAIN, config)) def test_proximity(self): """Test the proximity.""" - assert proximity.setup(self.hass, { + config = { 'proximity': { - 'zone': 'home', - 'ignored_zones': { - 'work' - }, - 'devices': { - 'device_tracker.test1', - 'device_tracker.test2' - }, - 'tolerance': '1' + 'home': { + 'ignored_zones': [ + 'work' + ], + 'devices': [ + 'device_tracker.test1', + 'device_tracker.test2' + ], + 'tolerance': '1' + } } - }) + } + + self.assertTrue(setup_component(self.hass, DOMAIN, config)) state = self.hass.states.get('proximity.home') assert state.state == 'not set' @@ -111,75 +124,23 @@ class TestProximity: state = self.hass.states.get('proximity.home') assert state.state == '0' - def test_no_devices_in_config(self): - """Test for missing devices in configuration.""" - assert not proximity.setup(self.hass, { - 'proximity': { - 'zone': 'home', - 'ignored_zones': { - 'work' - }, - 'tolerance': '1' - } - }) - - def test_no_tolerance_in_config(self): - """Test for missing tolerance in configuration .""" - assert proximity.setup(self.hass, { - 'proximity': { - 'zone': 'home', - 'ignored_zones': { - 'work' - }, - 'devices': { - 'device_tracker.test1', - 'device_tracker.test2' - } - } - }) - - def test_no_ignored_zones_in_config(self): - """Test for ignored zones in configuration.""" - assert proximity.setup(self.hass, { - 'proximity': { - 'zone': 'home', - 'devices': { - 'device_tracker.test1', - 'device_tracker.test2' - }, - 'tolerance': '1' - } - }) - - def test_no_zone_in_config(self): - """Test for missing zone in configuration.""" - assert proximity.setup(self.hass, { - 'proximity': { - 'ignored_zones': { - 'work' - }, - 'devices': { - 'device_tracker.test1', - 'device_tracker.test2' - }, - 'tolerance': '1' - } - }) - def test_device_tracker_test1_in_zone(self): """Test for tracker in zone.""" - assert proximity.setup(self.hass, { + config = { 'proximity': { - 'zone': 'home', - 'ignored_zones': { - 'work' - }, - 'devices': { - 'device_tracker.test1' - }, - 'tolerance': '1' + 'home': { + 'ignored_zones': [ + 'work' + ], + 'devices': [ + 'device_tracker.test1' + ], + 'tolerance': '1' + } } - }) + } + + self.assertTrue(setup_component(self.hass, DOMAIN, config)) self.hass.states.set( 'device_tracker.test1', 'home', @@ -196,19 +157,22 @@ class TestProximity: def test_device_trackers_in_zone(self): """Test for trackers in zone.""" - assert proximity.setup(self.hass, { + config = { 'proximity': { - 'zone': 'home', - 'ignored_zones': { - 'work' - }, - 'devices': { - 'device_tracker.test1', - 'device_tracker.test2' - }, - 'tolerance': '1' + 'home': { + 'ignored_zones': [ + 'work' + ], + 'devices': [ + 'device_tracker.test1', + 'device_tracker.test2' + ], + 'tolerance': '1' + } } - }) + } + + self.assertTrue(setup_component(self.hass, DOMAIN, config)) self.hass.states.set( 'device_tracker.test1', 'home', @@ -234,18 +198,21 @@ class TestProximity: def test_device_tracker_test1_away(self): """Test for tracker state away.""" - assert proximity.setup(self.hass, { + config = { 'proximity': { - 'zone': 'home', - 'ignored_zones': { - 'work' - }, - 'devices': { - 'device_tracker.test1' - }, - 'tolerance': '1' + 'home': { + 'ignored_zones': [ + 'work' + ], + 'devices': [ + 'device_tracker.test1', + ], + 'tolerance': '1' + } } - }) + } + + self.assertTrue(setup_component(self.hass, DOMAIN, config)) self.hass.states.set( 'device_tracker.test1', 'not_home', @@ -254,6 +221,7 @@ class TestProximity: 'latitude': 20.1, 'longitude': 10.1 }) + self.hass.block_till_done() state = self.hass.states.get('proximity.home') assert state.attributes.get('nearest') == 'test1' @@ -261,17 +229,21 @@ class TestProximity: def test_device_tracker_test1_awayfurther(self): """Test for tracker state away further.""" - assert proximity.setup(self.hass, { + config = { 'proximity': { - 'zone': 'home', - 'ignored_zones': { - 'work' - }, - 'devices': { - 'device_tracker.test1' + 'home': { + 'ignored_zones': [ + 'work' + ], + 'devices': [ + 'device_tracker.test1', + ], + 'tolerance': '1' } } - }) + } + + self.assertTrue(setup_component(self.hass, DOMAIN, config)) self.hass.states.set( 'device_tracker.test1', 'not_home', @@ -284,31 +256,6 @@ class TestProximity: state = self.hass.states.get('proximity.home') assert state.attributes.get('nearest') == 'test1' assert state.attributes.get('dir_of_travel') == 'unknown' - self.hass.states.set( - 'device_tracker.test1', 'not_home', - { - 'friendly_name': 'test1', - 'latitude': 40.1, - 'longitude': 20.1 - }) - self.hass.block_till_done() - state = self.hass.states.get('proximity.home') - assert state.attributes.get('nearest') == 'test1' - assert state.attributes.get('dir_of_travel') == 'away_from' - - def test_device_tracker_test1_awaycloser(self): - """Test for tracker state away closer.""" - assert proximity.setup(self.hass, { - 'proximity': { - 'zone': 'home', - 'ignored_zones': { - 'work' - }, - 'devices': { - 'device_tracker.test1' - } - } - }) self.hass.states.set( 'device_tracker.test1', 'not_home', @@ -320,32 +267,67 @@ class TestProximity: self.hass.block_till_done() state = self.hass.states.get('proximity.home') assert state.attributes.get('nearest') == 'test1' - assert state.attributes.get('dir_of_travel') == 'unknown' - self.hass.states.set( - 'device_tracker.test1', 'not_home', - { - 'friendly_name': 'test1', - 'latitude': 20.1, - 'longitude': 10.1 - }) - self.hass.block_till_done() - state = self.hass.states.get('proximity.home') - assert state.attributes.get('nearest') == 'test1' assert state.attributes.get('dir_of_travel') == 'towards' - def test_all_device_trackers_in_ignored_zone(self): - """Test for tracker in ignored zone.""" - assert proximity.setup(self.hass, { + def test_device_tracker_test1_awaycloser(self): + """Test for tracker state away closer.""" + config = { 'proximity': { - 'zone': 'home', - 'ignored_zones': { - 'work' - }, - 'devices': { - 'device_tracker.test1' + 'home': { + 'ignored_zones': [ + 'work' + ], + 'devices': [ + 'device_tracker.test1', + ], + 'tolerance': '1' } } - }) + } + + self.assertTrue(setup_component(self.hass, DOMAIN, config)) + + self.hass.states.set( + 'device_tracker.test1', 'not_home', + { + 'friendly_name': 'test1', + 'latitude': 40.1, + 'longitude': 20.1 + }) + self.hass.block_till_done() + state = self.hass.states.get('proximity.home') + assert state.attributes.get('nearest') == 'test1' + assert state.attributes.get('dir_of_travel') == 'unknown' + + self.hass.states.set( + 'device_tracker.test1', 'not_home', + { + 'friendly_name': 'test1', + 'latitude': 20.1, + 'longitude': 10.1 + }) + self.hass.block_till_done() + state = self.hass.states.get('proximity.home') + assert state.attributes.get('nearest') == 'test1' + assert state.attributes.get('dir_of_travel') == 'away_from' + + def test_all_device_trackers_in_ignored_zone(self): + """Test for tracker in ignored zone.""" + config = { + 'proximity': { + 'home': { + 'ignored_zones': [ + 'work' + ], + 'devices': [ + 'device_tracker.test1', + ], + 'tolerance': '1' + } + } + } + + self.assertTrue(setup_component(self.hass, DOMAIN, config)) self.hass.states.set( 'device_tracker.test1', 'work', @@ -360,18 +342,21 @@ class TestProximity: def test_device_tracker_test1_no_coordinates(self): """Test for tracker with no coordinates.""" - assert proximity.setup(self.hass, { + config = { 'proximity': { - 'zone': 'home', - 'ignored_zones': { - 'work' - }, - 'devices': { - 'device_tracker.test1' - }, - 'tolerance': '1' + 'home': { + 'ignored_zones': [ + 'work' + ], + 'devices': [ + 'device_tracker.test1', + ], + 'tolerance': '1' + } } - }) + } + + self.assertTrue(setup_component(self.hass, DOMAIN, config)) self.hass.states.set( 'device_tracker.test1', 'not_home', @@ -397,15 +382,18 @@ class TestProximity: 'friendly_name': 'test2' }) self.hass.block_till_done() + assert proximity.setup(self.hass, { 'proximity': { - 'zone': 'home', - 'ignored_zones': { - 'work' - }, - 'devices': { - 'device_tracker.test1', - 'device_tracker.test2' + 'home': { + 'ignored_zones': [ + 'work' + ], + 'devices': [ + 'device_tracker.test1', + 'device_tracker.test2' + ], + 'tolerance': '1', } } }) @@ -421,6 +409,7 @@ class TestProximity: state = self.hass.states.get('proximity.home') assert state.attributes.get('nearest') == 'test1' assert state.attributes.get('dir_of_travel') == 'unknown' + self.hass.states.set( 'device_tracker.test2', 'not_home', { @@ -449,13 +438,14 @@ class TestProximity: self.hass.block_till_done() assert proximity.setup(self.hass, { 'proximity': { - 'zone': 'home', - 'ignored_zones': { - 'work' - }, - 'devices': { - 'device_tracker.test1', - 'device_tracker.test2' + 'home': { + 'ignored_zones': [ + 'work' + ], + 'devices': [ + 'device_tracker.test1', + 'device_tracker.test2' + ] } } }) @@ -471,6 +461,7 @@ class TestProximity: state = self.hass.states.get('proximity.home') assert state.attributes.get('nearest') == 'test2' assert state.attributes.get('dir_of_travel') == 'unknown' + self.hass.states.set( 'device_tracker.test1', 'not_home', { @@ -499,13 +490,14 @@ class TestProximity: self.hass.block_till_done() assert proximity.setup(self.hass, { 'proximity': { - 'zone': 'home', - 'ignored_zones': { - 'work' - }, - 'devices': { - 'device_tracker.test1', - 'device_tracker.test2' + 'home': { + 'ignored_zones': [ + 'work' + ], + 'devices': [ + 'device_tracker.test1', + 'device_tracker.test2' + ] } } }) @@ -536,15 +528,17 @@ class TestProximity: 'friendly_name': 'test2' }) self.hass.block_till_done() + assert proximity.setup(self.hass, { 'proximity': { - 'zone': 'home', - 'ignored_zones': { - 'work' - }, - 'devices': { - 'device_tracker.test1', - 'device_tracker.test2' + 'home': { + 'ignored_zones': [ + 'work' + ], + 'devices': [ + 'device_tracker.test1', + 'device_tracker.test2' + ] } } }) @@ -566,6 +560,7 @@ class TestProximity: 'longitude': 10.1 }) self.hass.block_till_done() + self.hass.states.set( 'device_tracker.test1', 'not_home', { @@ -574,6 +569,7 @@ class TestProximity: 'longitude': 20.1 }) self.hass.block_till_done() + self.hass.states.set( 'device_tracker.test1', 'not_home', { @@ -582,12 +578,14 @@ class TestProximity: 'longitude': 15.1 }) self.hass.block_till_done() + self.hass.states.set( 'device_tracker.test1', 'work', { 'friendly_name': 'test1' }) self.hass.block_till_done() + state = self.hass.states.get('proximity.home') assert state.attributes.get('nearest') == 'test2' assert state.attributes.get('dir_of_travel') == 'unknown' @@ -596,14 +594,15 @@ class TestProximity: """Test for tracker states.""" assert proximity.setup(self.hass, { 'proximity': { - 'zone': 'home', - 'ignored_zones': { - 'work' - }, - 'devices': { - 'device_tracker.test1' - }, - 'tolerance': 1000 + 'home': { + 'ignored_zones': [ + 'work' + ], + 'devices': [ + 'device_tracker.test1' + ], + 'tolerance': 1000 + } } }) @@ -618,6 +617,7 @@ class TestProximity: state = self.hass.states.get('proximity.home') assert state.attributes.get('nearest') == 'test1' assert state.attributes.get('dir_of_travel') == 'unknown' + self.hass.states.set( 'device_tracker.test1', 'not_home', { @@ -644,15 +644,17 @@ class TestProximity: 'friendly_name': 'test2' }) self.hass.block_till_done() + assert proximity.setup(self.hass, { 'proximity': { - 'zone': 'home', - 'ignored_zones': { - 'work' - }, - 'devices': { - 'device_tracker.test1', - 'device_tracker.test2' + 'home': { + 'ignored_zones': [ + 'work' + ], + 'devices': [ + 'device_tracker.test1', + 'device_tracker.test2' + ] } } }) diff --git a/tests/components/test_splunk.py b/tests/components/test_splunk.py index 84dc4dfaac5..d893a699602 100644 --- a/tests/components/test_splunk.py +++ b/tests/components/test_splunk.py @@ -94,7 +94,12 @@ class TestSplunk(unittest.TestCase): payload = {'host': 'http://host:8088/services/collector/event', 'event': body} self.handler_method(event) - self.mock_post.assert_called_once_with( - payload['host'], data=payload, - headers={'Authorization': 'Splunk secret'}) + self.assertEqual(self.mock_post.call_count, 1) + self.assertEqual( + self.mock_post.call_args, + mock.call( + payload['host'], data=payload, + headers={'Authorization': 'Splunk secret'} + ) + ) self.mock_post.reset_mock() diff --git a/tests/components/test_statsd.py b/tests/components/test_statsd.py index 6cbb26b7416..ccc494fbc24 100644 --- a/tests/components/test_statsd.py +++ b/tests/components/test_statsd.py @@ -40,10 +40,11 @@ class TestStatsd(unittest.TestCase): hass = mock.MagicMock() hass.pool.worker_count = 2 self.assertTrue(setup_component(hass, statsd.DOMAIN, config)) - mock_connection.assert_called_once_with( - host='host', - port=123, - prefix='foo') + self.assertEqual(mock_connection.call_count, 1) + self.assertEqual( + mock_connection.call_args, + mock.call(host='host', port=123, prefix='foo') + ) self.assertTrue(hass.bus.listen.called) self.assertEqual(EVENT_STATE_CHANGED, @@ -64,10 +65,11 @@ class TestStatsd(unittest.TestCase): hass = mock.MagicMock() hass.pool.worker_count = 2 self.assertTrue(setup_component(hass, statsd.DOMAIN, config)) - mock_connection.assert_called_once_with( - host='host', - port=8125, - prefix='hass') + self.assertEqual(mock_connection.call_count, 1) + self.assertEqual( + mock_connection.call_args, + mock.call(host='host', port=8125, prefix='hass') + ) self.assertTrue(hass.bus.listen.called) @mock.patch('statsd.StatsClient') @@ -101,15 +103,18 @@ class TestStatsd(unittest.TestCase): mock_client.return_value.gauge.reset_mock() - mock_client.return_value.incr.assert_called_once_with( - state.entity_id, rate=statsd.DEFAULT_RATE) + self.assertEqual(mock_client.return_value.incr.call_count, 1) + self.assertEqual( + mock_client.return_value.incr.call_args, + mock.call(state.entity_id, rate=statsd.DEFAULT_RATE) + ) mock_client.return_value.incr.reset_mock() for invalid in ('foo', '', object): handler_method(mock.MagicMock(data={ 'new_state': ha.State('domain.test', invalid, {})})) self.assertFalse(mock_client.return_value.gauge.called) - self.assertFalse(mock_client.return_value.incr.called) + self.assertTrue(mock_client.return_value.incr.called) @mock.patch('statsd.StatsClient') def test_event_listener_attr_details(self, mock_client): @@ -146,12 +151,15 @@ class TestStatsd(unittest.TestCase): mock_client.return_value.gauge.reset_mock() - mock_client.return_value.incr.assert_called_once_with( - state.entity_id, rate=statsd.DEFAULT_RATE) + self.assertEqual(mock_client.return_value.incr.call_count, 1) + self.assertEqual( + mock_client.return_value.incr.call_args, + mock.call(state.entity_id, rate=statsd.DEFAULT_RATE) + ) mock_client.return_value.incr.reset_mock() for invalid in ('foo', '', object): handler_method(mock.MagicMock(data={ 'new_state': ha.State('domain.test', invalid, {})})) self.assertFalse(mock_client.return_value.gauge.called) - self.assertFalse(mock_client.return_value.incr.called) + self.assertTrue(mock_client.return_value.incr.called) diff --git a/tests/components/test_updater.py b/tests/components/test_updater.py index ec958a0d264..7cc2ba8d962 100644 --- a/tests/components/test_updater.py +++ b/tests/components/test_updater.py @@ -1,13 +1,17 @@ """The tests for the Updater component.""" +from datetime import datetime, timedelta import unittest from unittest.mock import patch +import os import requests +import requests_mock from homeassistant.bootstrap import setup_component from homeassistant.components import updater -import homeassistant.util.dt as dt_util -from tests.common import fire_time_changed, get_test_home_assistant + +from tests.common import ( + assert_setup_component, fire_time_changed, get_test_home_assistant) NEW_VERSION = '10000.0' @@ -18,65 +22,109 @@ MOCK_CURRENT_VERSION = '10.0' class TestUpdater(unittest.TestCase): """Test the Updater component.""" - def setUp(self): # pylint: disable=invalid-name + hass = None + + def setup_method(self, _): """Setup things to be run when tests are started.""" self.hass = get_test_home_assistant() - def tearDown(self): # pylint: disable=invalid-name + def teardown_method(self, _): """Stop everything that was started.""" self.hass.stop() @patch('homeassistant.components.updater.get_newest_version') - def test_new_version_shows_entity_on_start(self, mock_get_newest_version): + def test_new_version_shows_entity_on_start( # pylint: disable=invalid-name + self, mock_get_newest_version): """Test if new entity is created if new version is available.""" - mock_get_newest_version.return_value = NEW_VERSION + mock_get_newest_version.return_value = (NEW_VERSION, '') updater.CURRENT_VERSION = MOCK_CURRENT_VERSION - self.assertTrue(setup_component(self.hass, updater.DOMAIN, { - 'updater': {} - })) + with assert_setup_component(1) as config: + setup_component(self.hass, updater.DOMAIN, {updater.DOMAIN: {}}) + _dt = datetime.now() + timedelta(hours=1) + assert config['updater'] == {'reporting': True} + + for secs in [-1, 0, 1]: + fire_time_changed(self.hass, _dt + timedelta(seconds=secs)) + self.hass.block_till_done() self.assertTrue(self.hass.states.is_state( updater.ENTITY_ID, NEW_VERSION)) @patch('homeassistant.components.updater.get_newest_version') - def test_no_entity_on_same_version(self, mock_get_newest_version): + def test_no_entity_on_same_version( # pylint: disable=invalid-name + self, mock_get_newest_version): """Test if no entity is created if same version.""" - mock_get_newest_version.return_value = MOCK_CURRENT_VERSION + mock_get_newest_version.return_value = (MOCK_CURRENT_VERSION, '') updater.CURRENT_VERSION = MOCK_CURRENT_VERSION - self.assertTrue(setup_component(self.hass, updater.DOMAIN, { - 'updater': {} - })) + with assert_setup_component(1) as config: + assert setup_component( + self.hass, updater.DOMAIN, {updater.DOMAIN: {}}) + _dt = datetime.now() + timedelta(hours=1) + assert config['updater'] == {'reporting': True} self.assertIsNone(self.hass.states.get(updater.ENTITY_ID)) - mock_get_newest_version.return_value = NEW_VERSION + mock_get_newest_version.return_value = (NEW_VERSION, '') - fire_time_changed( - self.hass, dt_util.utcnow().replace(hour=0, minute=0, second=0)) - - self.hass.block_till_done() + for secs in [-1, 0, 1]: + fire_time_changed(self.hass, _dt + timedelta(seconds=secs)) + self.hass.block_till_done() self.assertTrue(self.hass.states.is_state( updater.ENTITY_ID, NEW_VERSION)) - @patch('homeassistant.components.updater.requests.get') - def test_errors_while_fetching_new_version(self, mock_get): + @patch('homeassistant.components.updater.requests.post') + def test_errors_while_fetching_new_version( # pylint: disable=invalid-name + self, mock_get): """Test for errors while fetching the new version.""" mock_get.side_effect = requests.RequestException - self.assertIsNone(updater.get_newest_version()) + uuid = '0000' + self.assertIsNone(updater.get_newest_version(uuid)) mock_get.side_effect = ValueError - self.assertIsNone(updater.get_newest_version()) + self.assertIsNone(updater.get_newest_version(uuid)) mock_get.side_effect = KeyError - self.assertIsNone(updater.get_newest_version()) + self.assertIsNone(updater.get_newest_version(uuid)) def test_updater_disabled_on_dev(self): """Test if the updater component is disabled on dev.""" updater.CURRENT_VERSION = MOCK_CURRENT_VERSION + 'dev' - self.assertFalse(setup_component(self.hass, updater.DOMAIN, { - 'updater': {} - })) + with assert_setup_component(1) as config: + assert not setup_component( + self.hass, updater.DOMAIN, {updater.DOMAIN: {}}) + assert config['updater'] == {'reporting': True} + + def test_uuid_function(self): + """Test if the uuid function works.""" + path = self.hass.config.path(updater.UPDATER_UUID_FILE) + try: + # pylint: disable=protected-access + uuid = updater._load_uuid(self.hass) + assert os.path.isfile(path) + uuid2 = updater._load_uuid(self.hass) + assert uuid == uuid2 + os.remove(path) + uuid2 = updater._load_uuid(self.hass) + assert uuid != uuid2 + finally: + os.remove(path) + + @requests_mock.Mocker() + def test_reporting_false_works(self, m): + """Test we do not send any data.""" + m.post(updater.UPDATER_URL, + json={'version': '0.15', + 'release-notes': 'https://home-assistant.io'}) + + response = updater.get_newest_version(None) + + assert response == ('0.15', 'https://home-assistant.io') + + history = m.request_history + + assert len(history) == 1 + assert history[0].json() == {} diff --git a/tests/components/thermostat/test_heat_control.py b/tests/components/thermostat/test_heat_control.py index 475e9c70046..300bfd6cc4a 100644 --- a/tests/components/thermostat/test_heat_control.py +++ b/tests/components/thermostat/test_heat_control.py @@ -4,7 +4,7 @@ import unittest from unittest import mock -from homeassistant.bootstrap import _setup_component +from homeassistant.bootstrap import setup_component from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, SERVICE_TURN_OFF, @@ -16,7 +16,7 @@ from homeassistant.const import ( from homeassistant.util.unit_system import METRIC_SYSTEM from homeassistant.components import thermostat -from tests.common import get_test_home_assistant +from tests.common import assert_setup_component, get_test_home_assistant ENTITY = 'thermostat.test' @@ -44,12 +44,13 @@ class TestSetupThermostatHeatControl(unittest.TestCase): 'name': 'test', 'target_sensor': ENT_SENSOR } - self.assertFalse(_setup_component(self.hass, 'thermostat', { - 'thermostat': config})) + with assert_setup_component(0): + setup_component(self.hass, 'thermostat', { + 'thermostat': config}) def test_valid_conf(self): """Test set up heat_control with valid config values.""" - self.assertTrue(_setup_component(self.hass, 'thermostat', + self.assertTrue(setup_component(self.hass, 'thermostat', {'thermostat': { 'platform': 'heat_control', 'name': 'test', diff --git a/tests/components/thermostat/test_honeywell.py b/tests/components/thermostat/test_honeywell.py index e4f75e508e5..b95cede77b3 100644 --- a/tests/components/thermostat/test_honeywell.py +++ b/tests/components/thermostat/test_honeywell.py @@ -52,7 +52,8 @@ class TestHoneywell(unittest.TestCase): self.assertFalse(result) result = honeywell.setup_platform(hass, config, add_devices) self.assertTrue(result) - mock_sc.assert_called_once_with('user', 'pass') + self.assertEqual(mock_sc.call_count, 1) + self.assertEqual(mock_sc.call_args, mock.call('user', 'pass')) mock_ht.assert_has_calls([ mock.call(mock_sc.return_value, devices_1[0]), mock.call(mock_sc.return_value, devices_2[0]), @@ -164,9 +165,13 @@ class TestHoneywell(unittest.TestCase): hass = mock.MagicMock() add_devices = mock.MagicMock() self.assertTrue(honeywell.setup_platform(hass, config, add_devices)) - mock_evo.assert_called_once_with('user', 'pass') - mock_evo.return_value.temperatures.assert_called_once_with( - force_refresh=True) + self.assertEqual(mock_evo.call_count, 1) + self.assertEqual(mock_evo.call_args, mock.call('user', 'pass')) + self.assertEqual(mock_evo.return_value.temperatures.call_count, 1) + self.assertEqual( + mock_evo.return_value.temperatures.call_args, + mock.call(force_refresh=True) + ) mock_round.assert_has_calls([ mock.call(mock_evo.return_value, 'foo', True, 20), mock.call(mock_evo.return_value, 'bar', False, 20), @@ -265,17 +270,26 @@ class TestHoneywellRound(unittest.TestCase): self.assertFalse(self.round1.is_away_mode_on) self.round1.turn_away_mode_on() self.assertTrue(self.round1.is_away_mode_on) - self.device.set_temperature.assert_called_once_with('House', 16) + self.assertEqual(self.device.set_temperature.call_count, 1) + self.assertEqual( + self.device.set_temperature.call_args, mock.call('House', 16) + ) self.device.set_temperature.reset_mock() self.round1.turn_away_mode_off() self.assertFalse(self.round1.is_away_mode_on) - self.device.cancel_temp_override.assert_called_once_with('House') + self.assertEqual(self.device.cancel_temp_override.call_count, 1) + self.assertEqual( + self.device.cancel_temp_override.call_args, mock.call('House') + ) def test_set_temperature(self): """Test setting the temperature.""" self.round1.set_temperature(25) - self.device.set_temperature.assert_called_once_with('House', 25) + self.assertEqual(self.device.set_temperature.call_count, 1) + self.assertEqual( + self.device.set_temperature.call_args, mock.call('House', 25) + ) def test_set_hvac_mode(self: unittest.TestCase) -> None: """Test setting the system operation.""" diff --git a/tests/fixtures/yahoo_finance.json b/tests/fixtures/yahoo_finance.json new file mode 100644 index 00000000000..8d59e80db96 --- /dev/null +++ b/tests/fixtures/yahoo_finance.json @@ -0,0 +1,85 @@ + { + "symbol": "YHOO", + "Ask": "42.42", + "AverageDailyVolume": "11397600", + "Bid": "42.41", + "AskRealtime": null, + "BidRealtime": null, + "BookValue": "29.83", + "Change_PercentChange": "+0.62 - +1.48%", + "Change": "+0.62", + "Commission": null, + "Currency": "USD", + "ChangeRealtime": null, + "AfterHoursChangeRealtime": null, + "DividendShare": null, + "LastTradeDate": "10/18/2016", + "TradeDate": null, + "EarningsShare": "-5.18", + "ErrorIndicationreturnedforsymbolchangedinvalid": null, + "EPSEstimateCurrentYear": "0.49", + "EPSEstimateNextYear": "0.57", + "EPSEstimateNextQuarter": "0.17", + "DaysLow": "41.86", + "DaysHigh": "42.42", + "YearLow": "26.15", + "YearHigh": "44.92", + "HoldingsGainPercent": null, + "AnnualizedGain": null, + "HoldingsGain": null, + "HoldingsGainPercentRealtime": null, + "HoldingsGainRealtime": null, + "MoreInfo": null, + "OrderBookRealtime": null, + "MarketCapitalization": "40.37B", + "MarketCapRealtime": null, + "EBITDA": "151.08M", + "ChangeFromYearLow": "16.26", + "PercentChangeFromYearLow": "+62.18%", + "LastTradeRealtimeWithTime": null, + "ChangePercentRealtime": null, + "ChangeFromYearHigh": "-2.51", + "PercebtChangeFromYearHigh": "-5.59%", + "LastTradeWithTime": "9:41am - <b>42.41</b>", + "LastTradePriceOnly": "42.41", + "HighLimit": null, + "LowLimit": null, + "DaysRange": "41.86 - 42.42", + "DaysRangeRealtime": null, + "FiftydayMovingAverage": "43.16", + "TwoHundreddayMovingAverage": "39.26", + "ChangeFromTwoHundreddayMovingAverage": "3.15", + "PercentChangeFromTwoHundreddayMovingAverage": "+8.03%", + "ChangeFromFiftydayMovingAverage": "-0.75", + "PercentChangeFromFiftydayMovingAverage": "-1.74%", + "Name": "Yahoo! Inc.", + "Notes": null, + "Open": "41.69", + "PreviousClose": "41.79", + "PricePaid": null, + "ChangeinPercent": "+1.48%", + "PriceSales": "8.13", + "PriceBook": "1.40", + "ExDividendDate": null, + "PERatio": null, + "DividendPayDate": null, + "PERatioRealtime": null, + "PEGRatio": "-24.57", + "PriceEPSEstimateCurrentYear": "86.55", + "PriceEPSEstimateNextYear": "74.40", + "Symbol": "YHOO", + "SharesOwned": null, + "ShortRatio": "5.05", + "LastTradeTime": "9:41am", + "TickerTrend": null, + "OneyrTargetPrice": "43.64", + "Volume": "946198", + "HoldingsValue": null, + "HoldingsValueRealtime": null, + "YearRange": "26.15 - 44.92", + "DaysValueChange": null, + "DaysValueChangeRealtime": null, + "StockExchange": "NMS", + "DividendYield": null, + "PercentChange": "+1.48%" +} diff --git a/tests/helpers/test_config_validation.py b/tests/helpers/test_config_validation.py index 9f929244888..3ff9755bba2 100644 --- a/tests/helpers/test_config_validation.py +++ b/tests/helpers/test_config_validation.py @@ -3,10 +3,11 @@ from collections import OrderedDict from datetime import timedelta import enum import os -import tempfile +from socket import _GLOBAL_DEFAULT_TIMEOUT import pytest import voluptuous as vol +from unittest.mock import Mock, patch import homeassistant.helpers.config_validation as cv @@ -68,18 +69,18 @@ def test_isfile(): """Validate that the value is an existing file.""" schema = vol.Schema(cv.isfile) - with tempfile.NamedTemporaryFile() as fp: - pass + fake_file = 'this-file-does-not.exist' + assert not os.path.isfile(fake_file) - for value in ('invalid', None, -1, 0, 80000, fp.name): + for value in ('invalid', None, -1, 0, 80000, fake_file): with pytest.raises(vol.Invalid): schema(value) - with tempfile.TemporaryDirectory() as tmp_path: - tmp_file = os.path.join(tmp_path, "test.txt") - with open(tmp_file, "w") as tmp_handl: - tmp_handl.write("test file") - schema(tmp_file) + # patching methods that allow us to fake a file existing + # with write access + with patch('os.path.isfile', Mock(return_value=True)), \ + patch('os.access', Mock(return_value=True)): + schema('test.txt') def test_url(): @@ -436,3 +437,22 @@ def test_enum(): schema('value3') TestEnum['value1'] + + +def test_socket_timeout(): + """Test socket timeout validator.""" + TEST_CONF_TIMEOUT = 'timeout' + + schema = vol.Schema( + {vol.Required(TEST_CONF_TIMEOUT, default=None): cv.socket_timeout}) + + with pytest.raises(vol.Invalid): + schema({TEST_CONF_TIMEOUT: 0.0}) + + with pytest.raises(vol.Invalid): + schema({TEST_CONF_TIMEOUT: -1}) + + assert _GLOBAL_DEFAULT_TIMEOUT == schema({TEST_CONF_TIMEOUT: + None})[TEST_CONF_TIMEOUT] + + assert 1.0 == schema({TEST_CONF_TIMEOUT: 1})[TEST_CONF_TIMEOUT] diff --git a/tests/helpers/test_entity_component.py b/tests/helpers/test_entity_component.py index 0ab87c57452..6f658a70518 100644 --- a/tests/helpers/test_entity_component.py +++ b/tests/helpers/test_entity_component.py @@ -68,46 +68,46 @@ class TestHelpersEntityComponent(unittest.TestCase): group_name='everyone') # No group after setup - assert 0 == len(self.hass.states.entity_ids()) + assert len(self.hass.states.entity_ids()) == 0 component.add_entities([EntityTest(name='hello')]) # group exists - assert 2 == len(self.hass.states.entity_ids()) - assert ['group.everyone'] == self.hass.states.entity_ids('group') + assert len(self.hass.states.entity_ids()) == 2 + assert self.hass.states.entity_ids('group') == ['group.everyone'] group = self.hass.states.get('group.everyone') - assert ('test_domain.hello',) == group.attributes.get('entity_id') + assert group.attributes.get('entity_id') == ('test_domain.hello',) # group extended component.add_entities([EntityTest(name='hello2')]) - assert 3 == len(self.hass.states.entity_ids()) + assert len(self.hass.states.entity_ids()) == 3 group = self.hass.states.get('group.everyone') - assert ['test_domain.hello', 'test_domain.hello2'] == \ - sorted(group.attributes.get('entity_id')) + assert sorted(group.attributes.get('entity_id')) == \ + ['test_domain.hello', 'test_domain.hello2'] def test_polling_only_updates_entities_it_should_poll(self): """Test the polling of only updated entities.""" component = EntityComponent(_LOGGER, DOMAIN, self.hass, 20) no_poll_ent = EntityTest(should_poll=False) - no_poll_ent.update_ha_state = Mock() + no_poll_ent.async_update = Mock() poll_ent = EntityTest(should_poll=True) - poll_ent.update_ha_state = Mock() + poll_ent.async_update = Mock() component.add_entities([no_poll_ent, poll_ent]) - no_poll_ent.update_ha_state.reset_mock() - poll_ent.update_ha_state.reset_mock() + no_poll_ent.async_update.reset_mock() + poll_ent.async_update.reset_mock() fire_time_changed(self.hass, dt_util.utcnow().replace(second=0)) self.hass.block_till_done() - assert not no_poll_ent.update_ha_state.called - assert poll_ent.update_ha_state.called + assert not no_poll_ent.async_update.called + assert poll_ent.async_update.called def test_update_state_adds_entities(self): """Test if updating poll entities cause an entity to be added works.""" @@ -118,7 +118,7 @@ class TestHelpersEntityComponent(unittest.TestCase): component.add_entities([ent2]) assert 1 == len(self.hass.states.entity_ids()) - ent2.update_ha_state = lambda *_: component.add_entities([ent1]) + ent2.update = lambda *_: component.add_entities([ent1]) fire_time_changed(self.hass, dt_util.utcnow().replace(second=0)) self.hass.block_till_done() @@ -225,7 +225,7 @@ class TestHelpersEntityComponent(unittest.TestCase): assert platform2_setup.called @patch('homeassistant.helpers.entity_component.EntityComponent' - '._setup_platform') + '._async_setup_platform') @patch('homeassistant.bootstrap.setup_component', return_value=True) def test_setup_does_discovery(self, mock_setup_component, mock_setup): """Test setup for discovery.""" @@ -242,7 +242,8 @@ class TestHelpersEntityComponent(unittest.TestCase): assert ('platform_test', {}, {'msg': 'discovery_info'}) == \ mock_setup.call_args[0] - @patch('homeassistant.helpers.entity_component.track_utc_time_change') + @patch('homeassistant.helpers.entity_component.' + 'async_track_utc_time_change') def test_set_scan_interval_via_config(self, mock_track): """Test the setting of the scan interval via configuration.""" def platform_setup(hass, config, add_devices, discovery_info=None): @@ -264,7 +265,8 @@ class TestHelpersEntityComponent(unittest.TestCase): assert mock_track.called assert [0, 30] == list(mock_track.call_args[1]['second']) - @patch('homeassistant.helpers.entity_component.track_utc_time_change') + @patch('homeassistant.helpers.entity_component.' + 'async_track_utc_time_change') def test_set_scan_interval_via_platform(self, mock_track): """Test the setting of the scan interval via platform.""" def platform_setup(hass, config, add_devices, discovery_info=None): diff --git a/tests/helpers/test_script.py b/tests/helpers/test_script.py index 9a868bd8d8a..93bf0268337 100644 --- a/tests/helpers/test_script.py +++ b/tests/helpers/test_script.py @@ -1,6 +1,7 @@ """The tests for the Script component.""" # pylint: disable=too-many-public-methods,protected-access from datetime import timedelta +from unittest import mock import unittest # Otherwise can't test just this file (import order issue) @@ -221,7 +222,7 @@ class TestScriptHelper(unittest.TestCase): 'hello': '{{ greeting }}', }, }, - {'delay': {'seconds': 5}}, + {'delay': '{{ delay_period }}'}, { 'service': 'test.script', 'data_template': { @@ -232,6 +233,7 @@ class TestScriptHelper(unittest.TestCase): script_obj.run({ 'greeting': 'world', 'greeting2': 'universe', + 'delay_period': '00:00:05' }) self.hass.block_till_done() @@ -279,3 +281,62 @@ class TestScriptHelper(unittest.TestCase): script_obj.run() self.hass.block_till_done() assert len(events) == 3 + + @mock.patch('homeassistant.helpers.script.condition.async_from_config') + def test_condition_created_once(self, async_from_config): + """Test that the conditions do not get created multiple times.""" + event = 'test_event' + events = [] + + def record_event(event): + """Add recorded event to set.""" + events.append(event) + + self.hass.bus.listen(event, record_event) + + self.hass.states.set('test.entity', 'hello') + + script_obj = script.Script(self.hass, cv.SCRIPT_SCHEMA([ + {'event': event}, + { + 'condition': 'template', + 'value_template': '{{ states.test.entity.state == "hello" }}', + }, + {'event': event}, + ])) + + script_obj.run() + script_obj.run() + self.hass.block_till_done() + assert async_from_config.call_count == 1 + assert len(script_obj._config_cache) == 1 + + def test_all_conditions_cached(self): + """Test that multiple conditions get cached.""" + event = 'test_event' + events = [] + + def record_event(event): + """Add recorded event to set.""" + events.append(event) + + self.hass.bus.listen(event, record_event) + + self.hass.states.set('test.entity', 'hello') + + script_obj = script.Script(self.hass, cv.SCRIPT_SCHEMA([ + {'event': event}, + { + 'condition': 'template', + 'value_template': '{{ states.test.entity.state == "hello" }}', + }, + { + 'condition': 'template', + 'value_template': '{{ states.test.entity.state != "hello" }}', + }, + {'event': event}, + ])) + + script_obj.run() + self.hass.block_till_done() + assert len(script_obj._config_cache) == 2 diff --git a/tests/helpers/test_service.py b/tests/helpers/test_service.py index 38af2178340..efe21f95d9b 100644 --- a/tests/helpers/test_service.py +++ b/tests/helpers/test_service.py @@ -139,7 +139,7 @@ class TestServiceHelpers(unittest.TestCase): self.hass.states.set('light.Ceiling', STATE_OFF) self.hass.states.set('light.Kitchen', STATE_OFF) - loader.get_component('group').Group( + loader.get_component('group').Group.create_group( self.hass, 'test', ['light.Ceiling', 'light.Kitchen']) call = ha.ServiceCall('light', 'turn_on', diff --git a/tests/helpers/test_template.py b/tests/helpers/test_template.py index 527d99df39e..e1e08b02b16 100644 --- a/tests/helpers/test_template.py +++ b/tests/helpers/test_template.py @@ -206,6 +206,34 @@ class TestHelpersTemplate(unittest.TestCase): '-', tpl.render_with_possible_json_value('hello', '-')) + def test_render_with_possible_json_value_with_missing_json_value(self): + """Render with possible JSON value with unknown JSON object.""" + tpl = template.Template('{{ value_json.goodbye }}', self.hass) + self.assertEqual( + '', + tpl.render_with_possible_json_value('{"hello": "world"}')) + + def test_render_with_possible_json_value_valid_with_is_defined(self): + """Render with possible JSON value with known JSON object.""" + tpl = template.Template('{{ value_json.hello|is_defined }}', self.hass) + self.assertEqual( + 'world', + tpl.render_with_possible_json_value('{"hello": "world"}')) + + def test_render_with_possible_json_value_undefined_json(self): + """Render with possible JSON value with unknown JSON object.""" + tpl = template.Template('{{ value_json.bye|is_defined }}', self.hass) + self.assertEqual( + '{"hello": "world"}', + tpl.render_with_possible_json_value('{"hello": "world"}')) + + def test_render_with_possible_json_value_undefined_json_error_value(self): + """Render with possible JSON value with unknown JSON object.""" + tpl = template.Template('{{ value_json.bye|is_defined }}', self.hass) + self.assertEqual( + '', + tpl.render_with_possible_json_value('{"hello": "world"}', '')) + def test_raise_exception_on_error(self): """Test raising an exception on error.""" with self.assertRaises(TemplateError): @@ -402,7 +430,8 @@ class TestHelpersTemplate(unittest.TestCase): 'longitude': self.hass.config.longitude, }) - group.Group(self.hass, 'location group', ['test_domain.object']) + group.Group.create_group( + self.hass, 'location group', ['test_domain.object']) self.assertEqual( 'test_domain.object', @@ -422,7 +451,8 @@ class TestHelpersTemplate(unittest.TestCase): 'longitude': self.hass.config.longitude, }) - group.Group(self.hass, 'location group', ['test_domain.object']) + group.Group.create_group( + self.hass, 'location group', ['test_domain.object']) self.assertEqual( 'test_domain.object', diff --git a/tests/scripts/test_check_config.py b/tests/scripts/test_check_config.py index e31c46b40a8..efe99f86ebd 100644 --- a/tests/scripts/test_check_config.py +++ b/tests/scripts/test_check_config.py @@ -74,13 +74,15 @@ class TestCheckConfig(unittest.TestCase): with patch_yaml_files(files): res = check_config.check(get_test_config_dir('component.yaml')) change_yaml_files(res) - self.assertDictEqual({ - 'components': {}, - 'except': {'http': {'password': 'err123'}}, - 'secret_cache': {}, - 'secrets': {}, - 'yaml_files': ['.../component.yaml'] - }, res) + + self.assertDictEqual({}, res['components']) + self.assertDictEqual( + {'http': {'password': 'err123'}}, + res['except'] + ) + self.assertDictEqual({}, res['secret_cache']) + self.assertDictEqual({}, res['secrets']) + self.assertListEqual(['.../component.yaml'], res['yaml_files']) files = { 'platform.yaml': (BASE_CONFIG + 'mqtt:\n\n' @@ -89,14 +91,18 @@ class TestCheckConfig(unittest.TestCase): with patch_yaml_files(files): res = check_config.check(get_test_config_dir('platform.yaml')) change_yaml_files(res) - self.assertDictEqual({ - 'components': {'mqtt': {'keepalive': 60, 'port': 1883, - 'protocol': '3.1.1'}}, - 'except': {'light.mqtt_json': {'platform': 'mqtt_json'}}, - 'secret_cache': {}, - 'secrets': {}, - 'yaml_files': ['.../platform.yaml'] - }, res) + self.assertDictEqual( + {'mqtt': {'keepalive': 60, 'port': 1883, 'protocol': '3.1.1'}, + 'light': []}, + res['components'] + ) + self.assertDictEqual( + {'light.mqtt_json': {'platform': 'mqtt_json'}}, + res['except'] + ) + self.assertDictEqual({}, res['secret_cache']) + self.assertDictEqual({}, res['secrets']) + self.assertListEqual(['.../platform.yaml'], res['yaml_files']) def test_component_platform_not_found(self, mock_get_loop): """Test errors if component or platform not found.""" @@ -107,25 +113,23 @@ class TestCheckConfig(unittest.TestCase): with patch_yaml_files(files): res = check_config.check(get_test_config_dir('badcomponent.yaml')) change_yaml_files(res) - self.assertDictEqual({ - 'components': {}, - 'except': {check_config.ERROR_STR: - ['Component not found: beer']}, - 'secret_cache': {}, - 'secrets': {}, - 'yaml_files': ['.../badcomponent.yaml'] - }, res) + self.assertDictEqual({}, res['components']) + self.assertDictEqual({check_config.ERROR_STR: + ['Component not found: beer']}, + res['except']) + self.assertDictEqual({}, res['secret_cache']) + self.assertDictEqual({}, res['secrets']) + self.assertListEqual(['.../badcomponent.yaml'], res['yaml_files']) res = check_config.check(get_test_config_dir('badplatform.yaml')) change_yaml_files(res) - self.assertDictEqual({ - 'components': {}, - 'except': {check_config.ERROR_STR: - ['Platform not found: light.beer']}, - 'secret_cache': {}, - 'secrets': {}, - 'yaml_files': ['.../badplatform.yaml'] - }, res) + self.assertDictEqual({'light': []}, res['components']) + self.assertDictEqual({check_config.ERROR_STR: + ['Platform not found: light.beer']}, + res['except']) + self.assertDictEqual({}, res['secret_cache']) + self.assertDictEqual({}, res['secrets']) + self.assertListEqual(['.../badplatform.yaml'], res['yaml_files']) def test_secrets(self, mock_get_loop): """Test secrets config checking method.""" diff --git a/tests/test_bootstrap.py b/tests/test_bootstrap.py index b74a1de0d35..c84c95f396c 100644 --- a/tests/test_bootstrap.py +++ b/tests/test_bootstrap.py @@ -1,8 +1,8 @@ """Test the bootstrapping.""" # pylint: disable=too-many-public-methods,protected-access -import tempfile from unittest import mock import threading +import logging import voluptuous as vol @@ -10,14 +10,23 @@ from homeassistant import bootstrap, loader import homeassistant.util.dt as dt_util from homeassistant.helpers.config_validation import PLATFORM_SCHEMA -from tests.common import get_test_home_assistant, MockModule, MockPlatform +from tests.common import \ + get_test_home_assistant, MockModule, MockPlatform, \ + assert_setup_component, patch_yaml_files ORIG_TIMEZONE = dt_util.DEFAULT_TIME_ZONE +_LOGGER = logging.getLogger(__name__) + class TestBootstrap: """Test the bootstrap utils.""" + hass = None + backup_cache = None + + # pylint: disable=invalid-name, no-self-use + def setup_method(self, method): """Setup the test.""" self.backup_cache = loader._COMPONENT_CACHE @@ -36,17 +45,27 @@ class TestBootstrap: self.hass.stop() loader._COMPONENT_CACHE = self.backup_cache + @mock.patch( + # prevent .HA_VERISON file from being written + 'homeassistant.bootstrap.conf_util.process_ha_config_upgrade', + mock.Mock() + ) @mock.patch('homeassistant.util.location.detect_location_info', return_value=None) def test_from_config_file(self, mock_detect): """Test with configuration file.""" components = ['browser', 'conversation', 'script'] - with tempfile.NamedTemporaryFile() as fp: - for comp in components: - fp.write('{}:\n'.format(comp).encode('utf-8')) - fp.flush() + files = { + 'config.yaml': ''.join( + '{}:\n'.format(comp) + for comp in components + ) + } - self.hass = bootstrap.from_config_file(fp.name) + with mock.patch('os.path.isfile', mock.Mock(return_value=True)), \ + mock.patch('os.access', mock.Mock(return_value=True)), \ + patch_yaml_files(files, True): + self.hass = bootstrap.from_config_file('config.yaml') components.append('group') assert sorted(components) == sorted(self.hass.config.components) @@ -110,59 +129,72 @@ class TestBootstrap: loader.set_component( 'platform_conf.whatever', MockPlatform('whatever')) - assert not bootstrap._setup_component(self.hass, 'platform_conf', { - 'platform_conf': { - 'hello': 'world', - 'invalid': 'extra', - } - }) - - assert not bootstrap._setup_component(self.hass, 'platform_conf', { - 'platform_conf': { - 'platform': 'whatever', - 'hello': 'world', - }, - - 'platform_conf 2': { - 'invalid': True - } - }) - - assert not bootstrap._setup_component(self.hass, 'platform_conf', { - 'platform_conf': { - 'platform': 'not_existing', - 'hello': 'world', - } - }) - - assert bootstrap._setup_component(self.hass, 'platform_conf', { - 'platform_conf': { - 'platform': 'whatever', - 'hello': 'world', - } - }) + with assert_setup_component(0): + assert bootstrap._setup_component(self.hass, 'platform_conf', { + 'platform_conf': { + 'hello': 'world', + 'invalid': 'extra', + } + }) self.hass.config.components.remove('platform_conf') - assert bootstrap._setup_component(self.hass, 'platform_conf', { - 'platform_conf': [{ - 'platform': 'whatever', - 'hello': 'world', - }] - }) + with assert_setup_component(1): + assert bootstrap._setup_component(self.hass, 'platform_conf', { + 'platform_conf': { + 'platform': 'whatever', + 'hello': 'world', + }, + 'platform_conf 2': { + 'invalid': True + } + }) self.hass.config.components.remove('platform_conf') - # Any falsey paltform config will be ignored (None, {}, etc) - assert bootstrap._setup_component(self.hass, 'platform_conf', { - 'platform_conf': None - }) + with assert_setup_component(0): + assert bootstrap._setup_component(self.hass, 'platform_conf', { + 'platform_conf': { + 'platform': 'not_existing', + 'hello': 'world', + } + }) self.hass.config.components.remove('platform_conf') - assert bootstrap._setup_component(self.hass, 'platform_conf', { - 'platform_conf': {} - }) + with assert_setup_component(1): + assert bootstrap._setup_component(self.hass, 'platform_conf', { + 'platform_conf': { + 'platform': 'whatever', + 'hello': 'world', + } + }) + + self.hass.config.components.remove('platform_conf') + + with assert_setup_component(1): + assert bootstrap._setup_component(self.hass, 'platform_conf', { + 'platform_conf': [{ + 'platform': 'whatever', + 'hello': 'world', + }] + }) + + self.hass.config.components.remove('platform_conf') + + # Any falsey platform config will be ignored (None, {}, etc) + with assert_setup_component(0) as config: + assert bootstrap._setup_component(self.hass, 'platform_conf', { + 'platform_conf': None + }) + assert 'platform_conf' in self.hass.config.components + assert not config['platform_conf'] # empty + + assert bootstrap._setup_component(self.hass, 'platform_conf', { + 'platform_conf': {} + }) + assert 'platform_conf' in self.hass.config.components + assert not config['platform_conf'] # empty def test_component_not_found(self): """setup_component should not crash if component doesn't exist.""" @@ -170,7 +202,6 @@ class TestBootstrap: def test_component_not_double_initialized(self): """Test we do not setup a component twice.""" - mock_setup = mock.MagicMock(return_value=True) loader.set_component('comp', MockModule('comp', setup=mock_setup)) @@ -195,15 +226,13 @@ class TestBootstrap: assert 'comp' not in self.hass.config.components def test_component_not_setup_twice_if_loaded_during_other_setup(self): - """ - Test component that gets setup while waiting for lock is not setup - twice. - """ + """Test component setup while waiting for lock is not setup twice.""" loader.set_component('comp', MockModule('comp')) result = [] def setup_component(): + """Setup the component.""" result.append(bootstrap.setup_component(self.hass, 'comp')) with bootstrap._SETUP_LOCK: @@ -250,15 +279,15 @@ class TestBootstrap: def test_home_assistant_core_config_validation(self): """Test if we pass in wrong information for HA conf.""" # Extensive HA conf validation testing is done in test_config.py + hass = get_test_home_assistant() assert None is bootstrap.from_config_dict({ 'homeassistant': { 'latitude': 'some string' } - }) + }, hass=hass) def test_component_setup_with_validation_and_dependency(self): """Test all config is passed to dependencies.""" - def config_check_setup(hass, config): """Setup method that tests config is passed in.""" if config.get('comp_a', {}).get('valid', False): @@ -283,36 +312,48 @@ class TestBootstrap: def test_platform_specific_config_validation(self): """Test platform that specifies config.""" - platform_schema = PLATFORM_SCHEMA.extend({ 'valid': True, }, extra=vol.PREVENT_EXTRA) + mock_setup = mock.MagicMock() + loader.set_component( 'switch.platform_a', - MockPlatform(platform_schema=platform_schema)) + MockPlatform(platform_schema=platform_schema, + setup_platform=mock_setup)) - assert not bootstrap.setup_component(self.hass, 'switch', { - 'switch': { - 'platform': 'platform_a', - 'invalid': True - } - }) + with assert_setup_component(0): + assert bootstrap.setup_component(self.hass, 'switch', { + 'switch': { + 'platform': 'platform_a', + 'invalid': True + } + }) + assert mock_setup.call_count == 0 - assert not bootstrap.setup_component(self.hass, 'switch', { - 'switch': { - 'platform': 'platform_a', - 'valid': True, - 'invalid_extra': True, - } - }) + self.hass.config.components.remove('switch') - assert bootstrap.setup_component(self.hass, 'switch', { - 'switch': { - 'platform': 'platform_a', - 'valid': True - } - }) + with assert_setup_component(0): + assert bootstrap.setup_component(self.hass, 'switch', { + 'switch': { + 'platform': 'platform_a', + 'valid': True, + 'invalid_extra': True, + } + }) + assert mock_setup.call_count == 0 + + self.hass.config.components.remove('switch') + + with assert_setup_component(1): + assert bootstrap.setup_component(self.hass, 'switch', { + 'switch': { + 'platform': 'platform_a', + 'valid': True + } + }) + assert mock_setup.call_count == 1 def test_disable_component_if_invalid_return(self): """Test disabling component if invalid return.""" diff --git a/tests/test_config.py b/tests/test_config.py index 1512a7688ea..4787da5bcde 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -1,7 +1,6 @@ """Test config utils.""" # pylint: disable=too-many-public-methods,protected-access import os -import tempfile import unittest import unittest.mock as mock @@ -212,49 +211,56 @@ class TestConfig(unittest.TestCase): assert state.attributes['hidden'] - def test_remove_lib_on_upgrade(self): + @mock.patch('homeassistant.config.shutil') + @mock.patch('homeassistant.config.os') + def test_remove_lib_on_upgrade(self, mock_os, mock_shutil): """Test removal of library on upgrade.""" - with tempfile.TemporaryDirectory() as config_dir: - version_path = os.path.join(config_dir, '.HA_VERSION') - lib_dir = os.path.join(config_dir, 'deps') - check_file = os.path.join(lib_dir, 'check') + ha_version = '0.7.0' - with open(version_path, 'wt') as outp: - outp.write('0.7.0') + mock_os.path.isdir = mock.Mock(return_value=True) - os.mkdir(lib_dir) - - with open(check_file, 'w'): - pass + mock_open = mock.mock_open() + with mock.patch('homeassistant.config.open', mock_open, create=True): + opened_file = mock_open.return_value + opened_file.readline.return_value = ha_version self.hass = get_test_home_assistant() - self.hass.config.config_dir = config_dir + self.hass.config.path = mock.Mock() - assert os.path.isfile(check_file) config_util.process_ha_config_upgrade(self.hass) - assert not os.path.isfile(check_file) - def test_not_remove_lib_if_not_upgrade(self): + hass_path = self.hass.config.path.return_value + + self.assertEqual(mock_os.path.isdir.call_count, 1) + self.assertEqual( + mock_os.path.isdir.call_args, mock.call(hass_path) + ) + + self.assertEqual(mock_shutil.rmtree.call_count, 1) + self.assertEqual( + mock_shutil.rmtree.call_args, mock.call(hass_path) + ) + + @mock.patch('homeassistant.config.shutil') + @mock.patch('homeassistant.config.os') + def test_not_remove_lib_if_not_upgrade(self, mock_os, mock_shutil): """Test removal of library with no upgrade.""" - with tempfile.TemporaryDirectory() as config_dir: - version_path = os.path.join(config_dir, '.HA_VERSION') - lib_dir = os.path.join(config_dir, 'deps') - check_file = os.path.join(lib_dir, 'check') + ha_version = __version__ - with open(version_path, 'wt') as outp: - outp.write(__version__) + mock_os.path.isdir = mock.Mock(return_value=True) - os.mkdir(lib_dir) - - with open(check_file, 'w'): - pass + mock_open = mock.mock_open() + with mock.patch('homeassistant.config.open', mock_open, create=True): + opened_file = mock_open.return_value + opened_file.readline.return_value = ha_version self.hass = get_test_home_assistant() - self.hass.config.config_dir = config_dir + self.hass.config.path = mock.Mock() config_util.process_ha_config_upgrade(self.hass) - assert os.path.isfile(check_file) + assert mock_os.path.isdir.call_count == 0 + assert mock_shutil.rmtree.call_count == 0 def test_loading_configuration(self): """Test loading core config onto hass object.""" diff --git a/tests/test_core.py b/tests/test_core.py index 39301b5614a..b3ab2ba4dbd 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -179,19 +179,16 @@ class TestEventBus(unittest.TestCase): def listener(_): pass - self.bus.listen('test', listener) + unsub = self.bus.listen('test', listener) self.assertEqual(old_count + 1, len(self.bus.listeners)) - # Try deleting a non registered listener, nothing should happen - self.bus._remove_listener('test', lambda x: len) - # Remove listener - self.bus._remove_listener('test', listener) + unsub() self.assertEqual(old_count, len(self.bus.listeners)) - # Try deleting listener while category doesn't exist either - self.bus._remove_listener('test', listener) + # Should do nothing now + unsub() def test_unsubscribe_listener(self): """Test unsubscribe listener from returned function.""" @@ -215,11 +212,48 @@ class TestEventBus(unittest.TestCase): assert len(calls) == 1 - def test_listen_once_event(self): + def test_listen_once_event_with_callback(self): """Test listen_once_event method.""" runs = [] - self.bus.listen_once('test_event', lambda x: runs.append(1)) + @ha.callback + def event_handler(event): + runs.append(event) + + self.bus.listen_once('test_event', event_handler) + + self.bus.fire('test_event') + # Second time it should not increase runs + self.bus.fire('test_event') + + self.hass.block_till_done() + self.assertEqual(1, len(runs)) + + def test_listen_once_event_with_coroutine(self): + """Test listen_once_event method.""" + runs = [] + + @asyncio.coroutine + def event_handler(event): + runs.append(event) + + self.bus.listen_once('test_event', event_handler) + + self.bus.fire('test_event') + # Second time it should not increase runs + self.bus.fire('test_event') + + self.hass.block_till_done() + self.assertEqual(1, len(runs)) + + def test_listen_once_event_with_thread(self): + """Test listen_once_event method.""" + runs = [] + + def event_handler(event): + runs.append(event) + + self.bus.listen_once('test_event', event_handler) self.bus.fire('test_event') # Second time it should not increase runs @@ -604,7 +638,7 @@ class TestWorkerPoolMonitor(object): schedule_handle = MagicMock() hass.loop.call_later.return_value = schedule_handle - ha.async_monitor_worker_pool(hass) + ha._async_monitor_worker_pool(hass) assert hass.loop.call_later.called assert hass.bus.async_listen_once.called assert not schedule_handle.called @@ -650,7 +684,7 @@ class TestAsyncCreateTimer(object): now.second = 1 mock_utcnow.reset_mock() - ha.async_create_timer(hass) + ha._async_create_timer(hass) assert len(hass.bus.async_listen_once.mock_calls) == 2 start_timer = hass.bus.async_listen_once.mock_calls[1][1][1] diff --git a/tests/test_remote.py b/tests/test_remote.py index 653971f8bc1..316f13c5fc2 100644 --- a/tests/test_remote.py +++ b/tests/test_remote.py @@ -69,7 +69,7 @@ def setUpModule(): # pylint: disable=invalid-name {http.DOMAIN: {http.CONF_API_PASSWORD: API_PASSWORD, http.CONF_SERVER_PORT: SLAVE_PORT}}) - with patch.object(ha, 'async_create_timer', return_value=None): + with patch.object(ha, '_async_create_timer', return_value=None): slave.start() diff --git a/tests/util/test_package.py b/tests/util/test_package.py index 3aa742516e4..20fb8ca9a2f 100644 --- a/tests/util/test_package.py +++ b/tests/util/test_package.py @@ -1,57 +1,130 @@ """Test Home Assistant package util methods.""" import os -import tempfile +import pkg_resources +import subprocess import unittest -from homeassistant.bootstrap import mount_local_lib_path +from distutils.sysconfig import get_python_lib +from unittest.mock import call, patch + import homeassistant.util.package as package RESOURCE_DIR = os.path.abspath( os.path.join(os.path.dirname(__file__), '..', 'resources')) -TEST_EXIST_REQ = "pip>=7.0.0" -TEST_NEW_REQ = "pyhelloworld3==1.0.0" +TEST_EXIST_REQ = 'pip>=7.0.0' +TEST_NEW_REQ = 'pyhelloworld3==1.0.0' TEST_ZIP_REQ = 'file://{}#{}' \ .format(os.path.join(RESOURCE_DIR, 'pyhelloworld3.zip'), TEST_NEW_REQ) -class TestPackageUtil(unittest.TestCase): +@patch('homeassistant.util.package.subprocess.call') +@patch('homeassistant.util.package.check_package_exists') +class TestPackageUtilInstallPackage(unittest.TestCase): """Test for homeassistant.util.package module.""" - def setUp(self): - """Create local library for testing.""" - self.tmp_dir = tempfile.TemporaryDirectory() - self.lib_dir = mount_local_lib_path(self.tmp_dir.name) - - def tearDown(self): - """Stop everything that was started.""" - self.tmp_dir.cleanup() - - def test_install_existing_package(self): + def test_install_existing_package(self, mock_exists, mock_subprocess): """Test an install attempt on an existing package.""" - self.assertTrue(package.check_package_exists( - TEST_EXIST_REQ, self.lib_dir)) + mock_exists.return_value = True self.assertTrue(package.install_package(TEST_EXIST_REQ)) - def test_install_package_zip(self): - """Test an install attempt from a zip path.""" - self.assertFalse(package.check_package_exists( - TEST_ZIP_REQ, self.lib_dir)) - self.assertFalse(package.check_package_exists( - TEST_NEW_REQ, self.lib_dir)) + self.assertEqual(mock_exists.call_count, 1) + self.assertEqual(mock_exists.call_args, call(TEST_EXIST_REQ, None)) - self.assertTrue(package.install_package( - TEST_ZIP_REQ, True, self.lib_dir)) + self.assertEqual(mock_subprocess.call_count, 0) - self.assertTrue(package.check_package_exists( - TEST_ZIP_REQ, self.lib_dir)) - self.assertTrue(package.check_package_exists( - TEST_NEW_REQ, self.lib_dir)) + @patch('homeassistant.util.package.sys') + def test_install(self, mock_sys, mock_exists, mock_subprocess): + """Test an install attempt on a package that doesn't exist.""" + mock_exists.return_value = False + mock_subprocess.return_value = 0 - try: - import pyhelloworld3 - except ImportError: - self.fail('Unable to import pyhelloworld3 after installing it.') + self.assertTrue(package.install_package(TEST_NEW_REQ, False)) - self.assertEqual(pyhelloworld3.__version__, '1.0.0') + self.assertEqual(mock_exists.call_count, 1) + + self.assertEqual(mock_subprocess.call_count, 1) + self.assertEqual( + mock_subprocess.call_args, + call([ + mock_sys.executable, '-m', 'pip', 'install', '--quiet', + TEST_NEW_REQ + ]) + ) + + @patch('homeassistant.util.package.sys') + def test_install_upgrade(self, mock_sys, mock_exists, mock_subprocess): + """Test an upgrade attempt on a package.""" + mock_exists.return_value = False + mock_subprocess.return_value = 0 + + self.assertTrue(package.install_package(TEST_NEW_REQ)) + + self.assertEqual(mock_exists.call_count, 1) + + self.assertEqual(mock_subprocess.call_count, 1) + self.assertEqual( + mock_subprocess.call_args, + call([ + mock_sys.executable, '-m', 'pip', 'install', '--quiet', + TEST_NEW_REQ, '--upgrade' + ]) + ) + + @patch('homeassistant.util.package.sys') + def test_install_target(self, mock_sys, mock_exists, mock_subprocess): + """Test an install with a target.""" + target = 'target_folder' + mock_exists.return_value = False + mock_subprocess.return_value = 0 + + self.assertTrue( + package.install_package(TEST_NEW_REQ, False, target=target) + ) + + self.assertEqual(mock_exists.call_count, 1) + + self.assertEqual(mock_subprocess.call_count, 1) + self.assertEqual( + mock_subprocess.call_args, + call([ + mock_sys.executable, '-m', 'pip', 'install', '--quiet', + TEST_NEW_REQ, '--target', os.path.abspath(target) + ]) + ) + + @patch('homeassistant.util.package._LOGGER') + @patch('homeassistant.util.package.sys') + def test_install_error(self, mock_sys, mock_logger, mock_exists, + mock_subprocess): + """Test an install with a target.""" + mock_exists.return_value = False + mock_subprocess.side_effect = [subprocess.SubprocessError] + + self.assertFalse(package.install_package(TEST_NEW_REQ)) + + self.assertEqual(mock_logger.exception.call_count, 1) + + +class TestPackageUtilCheckPackageExists(unittest.TestCase): + """Test for homeassistant.util.package module.""" + + def test_check_package_global(self): + """Test for a globally-installed package.""" + installed_package = list(pkg_resources.working_set)[0].project_name + + self.assertTrue(package.check_package_exists(installed_package, None)) + + def test_check_package_local(self): + """Test for a locally-installed package.""" + lib_dir = get_python_lib() + installed_package = list(pkg_resources.working_set)[0].project_name + + self.assertTrue( + package.check_package_exists(installed_package, lib_dir) + ) + + def test_check_package_zip(self): + """Test for an installed zip package.""" + self.assertFalse(package.check_package_exists(TEST_ZIP_REQ, None)) diff --git a/tests/util/test_yaml.py b/tests/util/test_yaml.py index 6b35e4f844c..7c7bb0b9255 100644 --- a/tests/util/test_yaml.py +++ b/tests/util/test_yaml.py @@ -2,7 +2,6 @@ import io import unittest import os -import tempfile from unittest.mock import patch from homeassistant.exceptions import HomeAssistantError @@ -68,77 +67,116 @@ class TestYaml(unittest.TestCase): def test_include_yaml(self): """Test include yaml.""" - with tempfile.NamedTemporaryFile() as include_file: - include_file.write(b"value") - include_file.seek(0) - conf = "key: !include {}".format(include_file.name) + with patch_yaml_files({'test.yaml': 'value'}): + conf = 'key: !include test.yaml' with io.StringIO(conf) as file: doc = yaml.yaml.safe_load(file) assert doc["key"] == "value" - def test_include_dir_list(self): + @patch('homeassistant.util.yaml.os.walk') + def test_include_dir_list(self, mock_walk): """Test include dir list yaml.""" - with tempfile.TemporaryDirectory() as include_dir: - file_1 = tempfile.NamedTemporaryFile(dir=include_dir, - suffix=".yaml", delete=False) - file_1.write(b"one") - file_1.close() - file_2 = tempfile.NamedTemporaryFile(dir=include_dir, - suffix=".yaml", delete=False) - file_2.write(b"two") - file_2.close() - conf = "key: !include_dir_list {}".format(include_dir) + mock_walk.return_value = [['/tmp', [], ['one.yaml', 'two.yaml']]] + + with patch_yaml_files({ + '/tmp/one.yaml': 'one', '/tmp/two.yaml': 'two' + }): + conf = "key: !include_dir_list /tmp" with io.StringIO(conf) as file: doc = yaml.yaml.safe_load(file) assert sorted(doc["key"]) == sorted(["one", "two"]) - def test_include_dir_named(self): + @patch('homeassistant.util.yaml.os.walk') + def test_include_dir_list_recursive(self, mock_walk): + """Test include dir recursive list yaml.""" + mock_walk.return_value = [ + ['/tmp', ['tmp2'], ['zero.yaml']], + ['/tmp/tmp2', [], ['one.yaml', 'two.yaml']], + ] + + with patch_yaml_files({ + '/tmp/zero.yaml': 'zero', '/tmp/tmp2/one.yaml': 'one', + '/tmp/tmp2/two.yaml': 'two' + }): + conf = "key: !include_dir_list /tmp" + with io.StringIO(conf) as file: + doc = yaml.yaml.safe_load(file) + assert sorted(doc["key"]) == sorted(["zero", "one", "two"]) + + @patch('homeassistant.util.yaml.os.walk') + def test_include_dir_named(self, mock_walk): """Test include dir named yaml.""" - with tempfile.TemporaryDirectory() as include_dir: - file_1 = tempfile.NamedTemporaryFile(dir=include_dir, - suffix=".yaml", delete=False) - file_1.write(b"one") - file_1.close() - file_2 = tempfile.NamedTemporaryFile(dir=include_dir, - suffix=".yaml", delete=False) - file_2.write(b"two") - file_2.close() - conf = "key: !include_dir_named {}".format(include_dir) - correct = {} - correct[os.path.splitext(os.path.basename(file_1.name))[0]] = "one" - correct[os.path.splitext(os.path.basename(file_2.name))[0]] = "two" + mock_walk.return_value = [['/tmp', [], ['first.yaml', 'second.yaml']]] + + with patch_yaml_files({ + '/tmp/first.yaml': 'one', '/tmp/second.yaml': 'two' + }): + conf = "key: !include_dir_named /tmp" + correct = {'first': 'one', 'second': 'two'} with io.StringIO(conf) as file: doc = yaml.yaml.safe_load(file) assert doc["key"] == correct - def test_include_dir_merge_list(self): + @patch('homeassistant.util.yaml.os.walk') + def test_include_dir_named_recursive(self, mock_walk): + """Test include dir named yaml.""" + mock_walk.return_value = [ + ['/tmp', ['tmp2'], ['first.yaml']], + ['/tmp/tmp2', [], ['second.yaml', 'third.yaml']], + ] + + with patch_yaml_files({ + '/tmp/first.yaml': 'one', '/tmp/tmp2/second.yaml': 'two', + '/tmp/tmp2/third.yaml': 'three' + }): + conf = "key: !include_dir_named /tmp" + correct = {'first': 'one', 'second': 'two', 'third': 'three'} + with io.StringIO(conf) as file: + doc = yaml.yaml.safe_load(file) + assert doc["key"] == correct + + @patch('homeassistant.util.yaml.os.walk') + def test_include_dir_merge_list(self, mock_walk): """Test include dir merge list yaml.""" - with tempfile.TemporaryDirectory() as include_dir: - file_1 = tempfile.NamedTemporaryFile(dir=include_dir, - suffix=".yaml", delete=False) - file_1.write(b"- one") - file_1.close() - file_2 = tempfile.NamedTemporaryFile(dir=include_dir, - suffix=".yaml", delete=False) - file_2.write(b"- two\n- three") - file_2.close() - conf = "key: !include_dir_merge_list {}".format(include_dir) + mock_walk.return_value = [['/tmp', [], ['first.yaml', 'second.yaml']]] + + with patch_yaml_files({ + '/tmp/first.yaml': '- one', + '/tmp/second.yaml': '- two\n- three' + }): + conf = "key: !include_dir_merge_list /tmp" with io.StringIO(conf) as file: doc = yaml.yaml.safe_load(file) assert sorted(doc["key"]) == sorted(["one", "two", "three"]) - def test_include_dir_merge_named(self): + @patch('homeassistant.util.yaml.os.walk') + def test_include_dir_merge_list_recursive(self, mock_walk): + """Test include dir merge list yaml.""" + mock_walk.return_value = [ + ['/tmp', ['tmp2'], ['first.yaml']], + ['/tmp/tmp2', [], ['second.yaml', 'third.yaml']], + ] + + with patch_yaml_files({ + '/tmp/first.yaml': '- one', '/tmp/tmp2/second.yaml': '- two', + '/tmp/tmp2/third.yaml': '- three\n- four' + }): + conf = "key: !include_dir_merge_list /tmp" + with io.StringIO(conf) as file: + doc = yaml.yaml.safe_load(file) + assert sorted(doc["key"]) == sorted(["one", "two", + "three", "four"]) + + @patch('homeassistant.util.yaml.os.walk') + def test_include_dir_merge_named(self, mock_walk): """Test include dir merge named yaml.""" - with tempfile.TemporaryDirectory() as include_dir: - file_1 = tempfile.NamedTemporaryFile(dir=include_dir, - suffix=".yaml", delete=False) - file_1.write(b"key1: one") - file_1.close() - file_2 = tempfile.NamedTemporaryFile(dir=include_dir, - suffix=".yaml", delete=False) - file_2.write(b"key2: two\nkey3: three") - file_2.close() - conf = "key: !include_dir_merge_named {}".format(include_dir) + mock_walk.return_value = [['/tmp', [], ['first.yaml', 'second.yaml']]] + + with patch_yaml_files({ + '/tmp/first.yaml': 'key1: one', + '/tmp/second.yaml': 'key2: two\nkey3: three' + }): + conf = "key: !include_dir_merge_named /tmp" with io.StringIO(conf) as file: doc = yaml.yaml.safe_load(file) assert doc["key"] == { @@ -147,6 +185,29 @@ class TestYaml(unittest.TestCase): "key3": "three" } + @patch('homeassistant.util.yaml.os.walk') + def test_include_dir_merge_named_recursive(self, mock_walk): + """Test include dir merge named yaml.""" + mock_walk.return_value = [ + ['/tmp', ['tmp2'], ['first.yaml']], + ['/tmp/tmp2', [], ['second.yaml', 'third.yaml']], + ] + + with patch_yaml_files({ + '/tmp/first.yaml': 'key1: one', + '/tmp/tmp2/second.yaml': 'key2: two', + '/tmp/tmp2/third.yaml': 'key3: three\nkey4: four' + }): + conf = "key: !include_dir_merge_named /tmp" + with io.StringIO(conf) as file: + doc = yaml.yaml.safe_load(file) + assert doc["key"] == { + "key1": "one", + "key2": "two", + "key3": "three", + "key4": "four" + } + FILES = {}