mirror of
https://github.com/home-assistant/core.git
synced 2025-07-28 07:37:34 +00:00
commit
70412fc0ba
@ -215,6 +215,9 @@ omit =
|
|||||||
homeassistant/components/opencv.py
|
homeassistant/components/opencv.py
|
||||||
homeassistant/components/*/opencv.py
|
homeassistant/components/*/opencv.py
|
||||||
|
|
||||||
|
homeassistant/components/openuv.py
|
||||||
|
homeassistant/components/*/openuv.py
|
||||||
|
|
||||||
homeassistant/components/pilight.py
|
homeassistant/components/pilight.py
|
||||||
homeassistant/components/*/pilight.py
|
homeassistant/components/*/pilight.py
|
||||||
|
|
||||||
@ -440,6 +443,7 @@ omit =
|
|||||||
homeassistant/components/device_tracker/netgear.py
|
homeassistant/components/device_tracker/netgear.py
|
||||||
homeassistant/components/device_tracker/nmap_tracker.py
|
homeassistant/components/device_tracker/nmap_tracker.py
|
||||||
homeassistant/components/device_tracker/ping.py
|
homeassistant/components/device_tracker/ping.py
|
||||||
|
homeassistant/components/device_tracker/ritassist.py
|
||||||
homeassistant/components/device_tracker/sky_hub.py
|
homeassistant/components/device_tracker/sky_hub.py
|
||||||
homeassistant/components/device_tracker/snmp.py
|
homeassistant/components/device_tracker/snmp.py
|
||||||
homeassistant/components/device_tracker/swisscom.py
|
homeassistant/components/device_tracker/swisscom.py
|
||||||
@ -509,6 +513,7 @@ omit =
|
|||||||
homeassistant/components/media_player/denon.py
|
homeassistant/components/media_player/denon.py
|
||||||
homeassistant/components/media_player/denonavr.py
|
homeassistant/components/media_player/denonavr.py
|
||||||
homeassistant/components/media_player/directv.py
|
homeassistant/components/media_player/directv.py
|
||||||
|
homeassistant/components/media_player/dlna_dmr.py
|
||||||
homeassistant/components/media_player/dunehd.py
|
homeassistant/components/media_player/dunehd.py
|
||||||
homeassistant/components/media_player/emby.py
|
homeassistant/components/media_player/emby.py
|
||||||
homeassistant/components/media_player/epson.py
|
homeassistant/components/media_player/epson.py
|
||||||
@ -532,6 +537,7 @@ omit =
|
|||||||
homeassistant/components/media_player/pandora.py
|
homeassistant/components/media_player/pandora.py
|
||||||
homeassistant/components/media_player/philips_js.py
|
homeassistant/components/media_player/philips_js.py
|
||||||
homeassistant/components/media_player/pioneer.py
|
homeassistant/components/media_player/pioneer.py
|
||||||
|
homeassistant/components/media_player/pjlink.py
|
||||||
homeassistant/components/media_player/plex.py
|
homeassistant/components/media_player/plex.py
|
||||||
homeassistant/components/media_player/roku.py
|
homeassistant/components/media_player/roku.py
|
||||||
homeassistant/components/media_player/russound_rio.py
|
homeassistant/components/media_player/russound_rio.py
|
||||||
@ -632,6 +638,7 @@ omit =
|
|||||||
homeassistant/components/sensor/eddystone_temperature.py
|
homeassistant/components/sensor/eddystone_temperature.py
|
||||||
homeassistant/components/sensor/eliqonline.py
|
homeassistant/components/sensor/eliqonline.py
|
||||||
homeassistant/components/sensor/emoncms.py
|
homeassistant/components/sensor/emoncms.py
|
||||||
|
homeassistant/components/sensor/enphase_envoy.py
|
||||||
homeassistant/components/sensor/envirophat.py
|
homeassistant/components/sensor/envirophat.py
|
||||||
homeassistant/components/sensor/etherscan.py
|
homeassistant/components/sensor/etherscan.py
|
||||||
homeassistant/components/sensor/fastdotcom.py
|
homeassistant/components/sensor/fastdotcom.py
|
||||||
|
@ -13,7 +13,8 @@ matrix:
|
|||||||
- python: "3.5.3"
|
- python: "3.5.3"
|
||||||
env: TOXENV=typing
|
env: TOXENV=typing
|
||||||
- python: "3.5.3"
|
- python: "3.5.3"
|
||||||
env: TOXENV=py35
|
env: TOXENV=cov
|
||||||
|
after_success: coveralls
|
||||||
- python: "3.6"
|
- python: "3.6"
|
||||||
env: TOXENV=py36
|
env: TOXENV=py36
|
||||||
- python: "3.7"
|
- python: "3.7"
|
||||||
@ -45,4 +46,3 @@ deploy:
|
|||||||
on:
|
on:
|
||||||
branch: dev
|
branch: dev
|
||||||
condition: $TOXENV = lint
|
condition: $TOXENV = lint
|
||||||
after_success: coveralls
|
|
||||||
|
@ -98,6 +98,8 @@ homeassistant/components/konnected.py @heythisisnate
|
|||||||
homeassistant/components/*/konnected.py @heythisisnate
|
homeassistant/components/*/konnected.py @heythisisnate
|
||||||
homeassistant/components/matrix.py @tinloaf
|
homeassistant/components/matrix.py @tinloaf
|
||||||
homeassistant/components/*/matrix.py @tinloaf
|
homeassistant/components/*/matrix.py @tinloaf
|
||||||
|
homeassistant/components/openuv.py @bachya
|
||||||
|
homeassistant/components/*/openuv.py @bachya
|
||||||
homeassistant/components/qwikswitch.py @kellerza
|
homeassistant/components/qwikswitch.py @kellerza
|
||||||
homeassistant/components/*/qwikswitch.py @kellerza
|
homeassistant/components/*/qwikswitch.py @kellerza
|
||||||
homeassistant/components/rainmachine/* @bachya
|
homeassistant/components/rainmachine/* @bachya
|
||||||
|
@ -10,7 +10,6 @@ LABEL maintainer="Paulus Schoutsen <Paulus@PaulusSchoutsen.nl>"
|
|||||||
#ENV INSTALL_OPENALPR no
|
#ENV INSTALL_OPENALPR no
|
||||||
#ENV INSTALL_FFMPEG no
|
#ENV INSTALL_FFMPEG no
|
||||||
#ENV INSTALL_LIBCEC no
|
#ENV INSTALL_LIBCEC no
|
||||||
#ENV INSTALL_PHANTOMJS no
|
|
||||||
#ENV INSTALL_SSOCR no
|
#ENV INSTALL_SSOCR no
|
||||||
#ENV INSTALL_IPERF3 no
|
#ENV INSTALL_IPERF3 no
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
Home Assistant |Build Status| |Coverage Status| |Chat Status|
|
Home Assistant |Build Status| |Coverage Status| |Chat Status| |Reviewed by Hound|
|
||||||
=============================================================
|
=================================================================================
|
||||||
|
|
||||||
Home Assistant is a home automation platform running on Python 3. It is able to track and control all devices at home and offer a platform for automating control.
|
Home Assistant is a home automation platform running on Python 3. It is able to track and control all devices at home and offer a platform for automating control.
|
||||||
|
|
||||||
@ -33,6 +33,8 @@ of a component, check the `Home Assistant help section <https://home-assistant.i
|
|||||||
:target: https://coveralls.io/r/home-assistant/home-assistant?branch=master
|
:target: https://coveralls.io/r/home-assistant/home-assistant?branch=master
|
||||||
.. |Chat Status| image:: https://img.shields.io/discord/330944238910963714.svg
|
.. |Chat Status| image:: https://img.shields.io/discord/330944238910963714.svg
|
||||||
:target: https://discord.gg/c5DvZ4e
|
:target: https://discord.gg/c5DvZ4e
|
||||||
|
.. |Reviewed by Hound| image:: https://img.shields.io/badge/Reviewed_by-Hound-8E64B0.svg
|
||||||
|
:target: https://houndci.com
|
||||||
.. |screenshot-states| image:: https://raw.github.com/home-assistant/home-assistant/master/docs/screenshots.png
|
.. |screenshot-states| image:: https://raw.github.com/home-assistant/home-assistant/master/docs/screenshots.png
|
||||||
:target: https://home-assistant.io/demo/
|
:target: https://home-assistant.io/demo/
|
||||||
.. |screenshot-components| image:: https://raw.github.com/home-assistant/home-assistant/dev/docs/screenshot-components.png
|
.. |screenshot-components| image:: https://raw.github.com/home-assistant/home-assistant/dev/docs/screenshot-components.png
|
||||||
|
@ -2,18 +2,23 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
from typing import List, Awaitable
|
||||||
|
|
||||||
|
import jwt
|
||||||
|
|
||||||
from homeassistant import data_entry_flow
|
from homeassistant import data_entry_flow
|
||||||
from homeassistant.core import callback
|
from homeassistant.core import callback, HomeAssistant
|
||||||
|
from homeassistant.util import dt as dt_util
|
||||||
|
|
||||||
from . import models
|
|
||||||
from . import auth_store
|
from . import auth_store
|
||||||
from .providers import auth_provider_from_config
|
from .providers import auth_provider_from_config
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
async def auth_manager_from_config(hass, provider_configs):
|
async def auth_manager_from_config(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
provider_configs: List[dict]) -> Awaitable['AuthManager']:
|
||||||
"""Initialize an auth manager from config."""
|
"""Initialize an auth manager from config."""
|
||||||
store = auth_store.AuthStore(hass)
|
store = auth_store.AuthStore(hass)
|
||||||
if provider_configs:
|
if provider_configs:
|
||||||
@ -51,7 +56,6 @@ class AuthManager:
|
|||||||
self.login_flow = data_entry_flow.FlowManager(
|
self.login_flow = data_entry_flow.FlowManager(
|
||||||
hass, self._async_create_login_flow,
|
hass, self._async_create_login_flow,
|
||||||
self._async_finish_login_flow)
|
self._async_finish_login_flow)
|
||||||
self._access_tokens = OrderedDict()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def active(self):
|
def active(self):
|
||||||
@ -178,43 +182,64 @@ class AuthManager:
|
|||||||
|
|
||||||
return await self._store.async_create_refresh_token(user, client_id)
|
return await self._store.async_create_refresh_token(user, client_id)
|
||||||
|
|
||||||
async def async_get_refresh_token(self, token):
|
async def async_get_refresh_token(self, token_id):
|
||||||
|
"""Get refresh token by id."""
|
||||||
|
return await self._store.async_get_refresh_token(token_id)
|
||||||
|
|
||||||
|
async def async_get_refresh_token_by_token(self, token):
|
||||||
"""Get refresh token by token."""
|
"""Get refresh token by token."""
|
||||||
return await self._store.async_get_refresh_token(token)
|
return await self._store.async_get_refresh_token_by_token(token)
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_create_access_token(self, refresh_token):
|
def async_create_access_token(self, refresh_token):
|
||||||
"""Create a new access token."""
|
"""Create a new access token."""
|
||||||
access_token = models.AccessToken(refresh_token=refresh_token)
|
# pylint: disable=no-self-use
|
||||||
self._access_tokens[access_token.token] = access_token
|
return jwt.encode({
|
||||||
return access_token
|
'iss': refresh_token.id,
|
||||||
|
'iat': dt_util.utcnow(),
|
||||||
|
'exp': dt_util.utcnow() + refresh_token.access_token_expiration,
|
||||||
|
}, refresh_token.jwt_key, algorithm='HS256').decode()
|
||||||
|
|
||||||
@callback
|
async def async_validate_access_token(self, token):
|
||||||
def async_get_access_token(self, token):
|
"""Return if an access token is valid."""
|
||||||
"""Get an access token."""
|
try:
|
||||||
tkn = self._access_tokens.get(token)
|
unverif_claims = jwt.decode(token, verify=False)
|
||||||
|
except jwt.InvalidTokenError:
|
||||||
if tkn is None:
|
|
||||||
_LOGGER.debug('Attempt to get non-existing access token')
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if tkn.expired or not tkn.refresh_token.user.is_active:
|
refresh_token = await self.async_get_refresh_token(
|
||||||
if tkn.expired:
|
unverif_claims.get('iss'))
|
||||||
_LOGGER.debug('Attempt to get expired access token')
|
|
||||||
else:
|
if refresh_token is None:
|
||||||
_LOGGER.debug('Attempt to get access token for inactive user')
|
jwt_key = ''
|
||||||
self._access_tokens.pop(token)
|
issuer = ''
|
||||||
|
else:
|
||||||
|
jwt_key = refresh_token.jwt_key
|
||||||
|
issuer = refresh_token.id
|
||||||
|
|
||||||
|
try:
|
||||||
|
jwt.decode(
|
||||||
|
token,
|
||||||
|
jwt_key,
|
||||||
|
leeway=10,
|
||||||
|
issuer=issuer,
|
||||||
|
algorithms=['HS256']
|
||||||
|
)
|
||||||
|
except jwt.InvalidTokenError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return tkn
|
if not refresh_token.user.is_active:
|
||||||
|
return None
|
||||||
|
|
||||||
async def _async_create_login_flow(self, handler, *, source, data):
|
return refresh_token
|
||||||
|
|
||||||
|
async def _async_create_login_flow(self, handler, *, context, data):
|
||||||
"""Create a login flow."""
|
"""Create a login flow."""
|
||||||
auth_provider = self._providers[handler]
|
auth_provider = self._providers[handler]
|
||||||
|
|
||||||
return await auth_provider.async_credential_flow()
|
return await auth_provider.async_credential_flow(context)
|
||||||
|
|
||||||
async def _async_finish_login_flow(self, result):
|
async def _async_finish_login_flow(self, context, result):
|
||||||
"""Result of a credential login flow."""
|
"""Result of a credential login flow."""
|
||||||
if result['type'] != data_entry_flow.RESULT_TYPE_CREATE_ENTRY:
|
if result['type'] != data_entry_flow.RESULT_TYPE_CREATE_ENTRY:
|
||||||
return None
|
return None
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
"""Storage for auth models."""
|
"""Storage for auth models."""
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
import hmac
|
||||||
|
|
||||||
from homeassistant.util import dt as dt_util
|
from homeassistant.util import dt as dt_util
|
||||||
|
|
||||||
@ -110,22 +111,36 @@ class AuthStore:
|
|||||||
async def async_create_refresh_token(self, user, client_id=None):
|
async def async_create_refresh_token(self, user, client_id=None):
|
||||||
"""Create a new token for a user."""
|
"""Create a new token for a user."""
|
||||||
refresh_token = models.RefreshToken(user=user, client_id=client_id)
|
refresh_token = models.RefreshToken(user=user, client_id=client_id)
|
||||||
user.refresh_tokens[refresh_token.token] = refresh_token
|
user.refresh_tokens[refresh_token.id] = refresh_token
|
||||||
await self.async_save()
|
await self.async_save()
|
||||||
return refresh_token
|
return refresh_token
|
||||||
|
|
||||||
async def async_get_refresh_token(self, token):
|
async def async_get_refresh_token(self, token_id):
|
||||||
"""Get refresh token by token."""
|
"""Get refresh token by id."""
|
||||||
if self._users is None:
|
if self._users is None:
|
||||||
await self.async_load()
|
await self.async_load()
|
||||||
|
|
||||||
for user in self._users.values():
|
for user in self._users.values():
|
||||||
refresh_token = user.refresh_tokens.get(token)
|
refresh_token = user.refresh_tokens.get(token_id)
|
||||||
if refresh_token is not None:
|
if refresh_token is not None:
|
||||||
return refresh_token
|
return refresh_token
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
async def async_get_refresh_token_by_token(self, token):
|
||||||
|
"""Get refresh token by token."""
|
||||||
|
if self._users is None:
|
||||||
|
await self.async_load()
|
||||||
|
|
||||||
|
found = None
|
||||||
|
|
||||||
|
for user in self._users.values():
|
||||||
|
for refresh_token in user.refresh_tokens.values():
|
||||||
|
if hmac.compare_digest(refresh_token.token, token):
|
||||||
|
found = refresh_token
|
||||||
|
|
||||||
|
return found
|
||||||
|
|
||||||
async def async_load(self):
|
async def async_load(self):
|
||||||
"""Load the users."""
|
"""Load the users."""
|
||||||
data = await self._store.async_load()
|
data = await self._store.async_load()
|
||||||
@ -153,9 +168,11 @@ class AuthStore:
|
|||||||
data=cred_dict['data'],
|
data=cred_dict['data'],
|
||||||
))
|
))
|
||||||
|
|
||||||
refresh_tokens = OrderedDict()
|
|
||||||
|
|
||||||
for rt_dict in data['refresh_tokens']:
|
for rt_dict in data['refresh_tokens']:
|
||||||
|
# Filter out the old keys that don't have jwt_key (pre-0.76)
|
||||||
|
if 'jwt_key' not in rt_dict:
|
||||||
|
continue
|
||||||
|
|
||||||
token = models.RefreshToken(
|
token = models.RefreshToken(
|
||||||
id=rt_dict['id'],
|
id=rt_dict['id'],
|
||||||
user=users[rt_dict['user_id']],
|
user=users[rt_dict['user_id']],
|
||||||
@ -164,18 +181,9 @@ class AuthStore:
|
|||||||
access_token_expiration=timedelta(
|
access_token_expiration=timedelta(
|
||||||
seconds=rt_dict['access_token_expiration']),
|
seconds=rt_dict['access_token_expiration']),
|
||||||
token=rt_dict['token'],
|
token=rt_dict['token'],
|
||||||
|
jwt_key=rt_dict['jwt_key']
|
||||||
)
|
)
|
||||||
refresh_tokens[token.id] = token
|
users[rt_dict['user_id']].refresh_tokens[token.id] = token
|
||||||
users[rt_dict['user_id']].refresh_tokens[token.token] = token
|
|
||||||
|
|
||||||
for ac_dict in data['access_tokens']:
|
|
||||||
refresh_token = refresh_tokens[ac_dict['refresh_token_id']]
|
|
||||||
token = models.AccessToken(
|
|
||||||
refresh_token=refresh_token,
|
|
||||||
created_at=dt_util.parse_datetime(ac_dict['created_at']),
|
|
||||||
token=ac_dict['token'],
|
|
||||||
)
|
|
||||||
refresh_token.access_tokens.append(token)
|
|
||||||
|
|
||||||
self._users = users
|
self._users = users
|
||||||
|
|
||||||
@ -213,27 +221,15 @@ class AuthStore:
|
|||||||
'access_token_expiration':
|
'access_token_expiration':
|
||||||
refresh_token.access_token_expiration.total_seconds(),
|
refresh_token.access_token_expiration.total_seconds(),
|
||||||
'token': refresh_token.token,
|
'token': refresh_token.token,
|
||||||
|
'jwt_key': refresh_token.jwt_key,
|
||||||
}
|
}
|
||||||
for user in self._users.values()
|
for user in self._users.values()
|
||||||
for refresh_token in user.refresh_tokens.values()
|
for refresh_token in user.refresh_tokens.values()
|
||||||
]
|
]
|
||||||
|
|
||||||
access_tokens = [
|
|
||||||
{
|
|
||||||
'id': user.id,
|
|
||||||
'refresh_token_id': refresh_token.id,
|
|
||||||
'created_at': access_token.created_at.isoformat(),
|
|
||||||
'token': access_token.token,
|
|
||||||
}
|
|
||||||
for user in self._users.values()
|
|
||||||
for refresh_token in user.refresh_tokens.values()
|
|
||||||
for access_token in refresh_token.access_tokens
|
|
||||||
]
|
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
'users': users,
|
'users': users,
|
||||||
'credentials': credentials,
|
'credentials': credentials,
|
||||||
'access_tokens': access_tokens,
|
|
||||||
'refresh_tokens': refresh_tokens,
|
'refresh_tokens': refresh_tokens,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -39,26 +39,8 @@ class RefreshToken:
|
|||||||
default=ACCESS_TOKEN_EXPIRATION)
|
default=ACCESS_TOKEN_EXPIRATION)
|
||||||
token = attr.ib(type=str,
|
token = attr.ib(type=str,
|
||||||
default=attr.Factory(lambda: generate_secret(64)))
|
default=attr.Factory(lambda: generate_secret(64)))
|
||||||
access_tokens = attr.ib(type=list, default=attr.Factory(list), cmp=False)
|
jwt_key = attr.ib(type=str,
|
||||||
|
default=attr.Factory(lambda: generate_secret(64)))
|
||||||
|
|
||||||
@attr.s(slots=True)
|
|
||||||
class AccessToken:
|
|
||||||
"""Access token to access the API.
|
|
||||||
|
|
||||||
These will only ever be stored in memory and not be persisted.
|
|
||||||
"""
|
|
||||||
|
|
||||||
refresh_token = attr.ib(type=RefreshToken)
|
|
||||||
created_at = attr.ib(type=datetime, default=attr.Factory(dt_util.utcnow))
|
|
||||||
token = attr.ib(type=str,
|
|
||||||
default=attr.Factory(generate_secret))
|
|
||||||
|
|
||||||
@property
|
|
||||||
def expired(self):
|
|
||||||
"""Return if this token has expired."""
|
|
||||||
expires = self.created_at + self.refresh_token.access_token_expiration
|
|
||||||
return dt_util.utcnow() > expires
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(slots=True)
|
@attr.s(slots=True)
|
||||||
|
@ -123,7 +123,7 @@ class AuthProvider:
|
|||||||
|
|
||||||
# Implement by extending class
|
# Implement by extending class
|
||||||
|
|
||||||
async def async_credential_flow(self):
|
async def async_credential_flow(self, context):
|
||||||
"""Return the data flow for logging in with auth provider."""
|
"""Return the data flow for logging in with auth provider."""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@ -158,7 +158,7 @@ class HassAuthProvider(AuthProvider):
|
|||||||
self.data = Data(self.hass)
|
self.data = Data(self.hass)
|
||||||
await self.data.async_load()
|
await self.data.async_load()
|
||||||
|
|
||||||
async def async_credential_flow(self):
|
async def async_credential_flow(self, context):
|
||||||
"""Return a flow to login."""
|
"""Return a flow to login."""
|
||||||
return LoginFlow(self)
|
return LoginFlow(self)
|
||||||
|
|
||||||
|
@ -31,7 +31,7 @@ class InvalidAuthError(HomeAssistantError):
|
|||||||
class ExampleAuthProvider(AuthProvider):
|
class ExampleAuthProvider(AuthProvider):
|
||||||
"""Example auth provider based on hardcoded usernames and passwords."""
|
"""Example auth provider based on hardcoded usernames and passwords."""
|
||||||
|
|
||||||
async def async_credential_flow(self):
|
async def async_credential_flow(self, context):
|
||||||
"""Return a flow to login."""
|
"""Return a flow to login."""
|
||||||
return LoginFlow(self)
|
return LoginFlow(self)
|
||||||
|
|
||||||
|
@ -36,7 +36,7 @@ class LegacyApiPasswordAuthProvider(AuthProvider):
|
|||||||
|
|
||||||
DEFAULT_TITLE = 'Legacy API Password'
|
DEFAULT_TITLE = 'Legacy API Password'
|
||||||
|
|
||||||
async def async_credential_flow(self):
|
async def async_credential_flow(self, context):
|
||||||
"""Return a flow to login."""
|
"""Return a flow to login."""
|
||||||
return LoginFlow(self)
|
return LoginFlow(self)
|
||||||
|
|
||||||
|
@ -10,6 +10,7 @@ Component design guidelines:
|
|||||||
import asyncio
|
import asyncio
|
||||||
import itertools as it
|
import itertools as it
|
||||||
import logging
|
import logging
|
||||||
|
from typing import Awaitable
|
||||||
|
|
||||||
import homeassistant.core as ha
|
import homeassistant.core as ha
|
||||||
import homeassistant.config as conf_util
|
import homeassistant.config as conf_util
|
||||||
@ -109,7 +110,7 @@ def async_reload_core_config(hass):
|
|||||||
|
|
||||||
|
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
def async_setup(hass, config):
|
def async_setup(hass: ha.HomeAssistant, config: dict) -> Awaitable[bool]:
|
||||||
"""Set up general services related to Home Assistant."""
|
"""Set up general services related to Home Assistant."""
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
def async_handle_turn_service(service):
|
def async_handle_turn_service(service):
|
||||||
|
@ -21,7 +21,7 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
|
|
||||||
_CONFIGURING = {}
|
_CONFIGURING = {}
|
||||||
|
|
||||||
REQUIREMENTS = ['py-august==0.4.0']
|
REQUIREMENTS = ['py-august==0.6.0']
|
||||||
|
|
||||||
DEFAULT_TIMEOUT = 10
|
DEFAULT_TIMEOUT = 10
|
||||||
ACTIVITY_FETCH_LIMIT = 10
|
ACTIVITY_FETCH_LIMIT = 10
|
||||||
|
@ -155,7 +155,7 @@ class GrantTokenView(HomeAssistantView):
|
|||||||
access_token = hass.auth.async_create_access_token(refresh_token)
|
access_token = hass.auth.async_create_access_token(refresh_token)
|
||||||
|
|
||||||
return self.json({
|
return self.json({
|
||||||
'access_token': access_token.token,
|
'access_token': access_token,
|
||||||
'token_type': 'Bearer',
|
'token_type': 'Bearer',
|
||||||
'refresh_token': refresh_token.token,
|
'refresh_token': refresh_token.token,
|
||||||
'expires_in':
|
'expires_in':
|
||||||
@ -178,7 +178,7 @@ class GrantTokenView(HomeAssistantView):
|
|||||||
'error': 'invalid_request',
|
'error': 'invalid_request',
|
||||||
}, status_code=400)
|
}, status_code=400)
|
||||||
|
|
||||||
refresh_token = await hass.auth.async_get_refresh_token(token)
|
refresh_token = await hass.auth.async_get_refresh_token_by_token(token)
|
||||||
|
|
||||||
if refresh_token is None:
|
if refresh_token is None:
|
||||||
return self.json({
|
return self.json({
|
||||||
@ -193,7 +193,7 @@ class GrantTokenView(HomeAssistantView):
|
|||||||
access_token = hass.auth.async_create_access_token(refresh_token)
|
access_token = hass.auth.async_create_access_token(refresh_token)
|
||||||
|
|
||||||
return self.json({
|
return self.json({
|
||||||
'access_token': access_token.token,
|
'access_token': access_token,
|
||||||
'token_type': 'Bearer',
|
'token_type': 'Bearer',
|
||||||
'expires_in':
|
'expires_in':
|
||||||
int(refresh_token.access_token_expiration.total_seconds()),
|
int(refresh_token.access_token_expiration.total_seconds()),
|
||||||
|
@ -1,6 +1,10 @@
|
|||||||
"""Helpers to resolve client ID/secret."""
|
"""Helpers to resolve client ID/secret."""
|
||||||
|
import asyncio
|
||||||
|
from html.parser import HTMLParser
|
||||||
from ipaddress import ip_address, ip_network
|
from ipaddress import ip_address, ip_network
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse, urljoin
|
||||||
|
|
||||||
|
from aiohttp.client_exceptions import ClientError
|
||||||
|
|
||||||
# IP addresses of loopback interfaces
|
# IP addresses of loopback interfaces
|
||||||
ALLOWED_IPS = (
|
ALLOWED_IPS = (
|
||||||
@ -16,7 +20,7 @@ ALLOWED_NETWORKS = (
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def verify_redirect_uri(client_id, redirect_uri):
|
async def verify_redirect_uri(hass, client_id, redirect_uri):
|
||||||
"""Verify that the client and redirect uri match."""
|
"""Verify that the client and redirect uri match."""
|
||||||
try:
|
try:
|
||||||
client_id_parts = _parse_client_id(client_id)
|
client_id_parts = _parse_client_id(client_id)
|
||||||
@ -25,16 +29,75 @@ def verify_redirect_uri(client_id, redirect_uri):
|
|||||||
|
|
||||||
redirect_parts = _parse_url(redirect_uri)
|
redirect_parts = _parse_url(redirect_uri)
|
||||||
|
|
||||||
# IndieAuth 4.2.2 allows for redirect_uri to be on different domain
|
|
||||||
# but needs to be specified in link tag when fetching `client_id`.
|
|
||||||
# This is not implemented.
|
|
||||||
|
|
||||||
# Verify redirect url and client url have same scheme and domain.
|
# Verify redirect url and client url have same scheme and domain.
|
||||||
return (
|
is_valid = (
|
||||||
client_id_parts.scheme == redirect_parts.scheme and
|
client_id_parts.scheme == redirect_parts.scheme and
|
||||||
client_id_parts.netloc == redirect_parts.netloc
|
client_id_parts.netloc == redirect_parts.netloc
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if is_valid:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# IndieAuth 4.2.2 allows for redirect_uri to be on different domain
|
||||||
|
# but needs to be specified in link tag when fetching `client_id`.
|
||||||
|
redirect_uris = await fetch_redirect_uris(hass, client_id)
|
||||||
|
return redirect_uri in redirect_uris
|
||||||
|
|
||||||
|
|
||||||
|
class LinkTagParser(HTMLParser):
|
||||||
|
"""Parser to find link tags."""
|
||||||
|
|
||||||
|
def __init__(self, rel):
|
||||||
|
"""Initialize a link tag parser."""
|
||||||
|
super().__init__()
|
||||||
|
self.rel = rel
|
||||||
|
self.found = []
|
||||||
|
|
||||||
|
def handle_starttag(self, tag, attrs):
|
||||||
|
"""Handle finding a start tag."""
|
||||||
|
if tag != 'link':
|
||||||
|
return
|
||||||
|
|
||||||
|
attrs = dict(attrs)
|
||||||
|
|
||||||
|
if attrs.get('rel') == self.rel:
|
||||||
|
self.found.append(attrs.get('href'))
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_redirect_uris(hass, url):
|
||||||
|
"""Find link tag with redirect_uri values.
|
||||||
|
|
||||||
|
IndieAuth 4.2.2
|
||||||
|
|
||||||
|
The client SHOULD publish one or more <link> tags or Link HTTP headers with
|
||||||
|
a rel attribute of redirect_uri at the client_id URL.
|
||||||
|
|
||||||
|
We limit to the first 10kB of the page.
|
||||||
|
|
||||||
|
We do not implement extracting redirect uris from headers.
|
||||||
|
"""
|
||||||
|
session = hass.helpers.aiohttp_client.async_get_clientsession()
|
||||||
|
parser = LinkTagParser('redirect_uri')
|
||||||
|
chunks = 0
|
||||||
|
try:
|
||||||
|
resp = await session.get(url, timeout=5)
|
||||||
|
|
||||||
|
async for data in resp.content.iter_chunked(1024):
|
||||||
|
parser.feed(data.decode())
|
||||||
|
chunks += 1
|
||||||
|
|
||||||
|
if chunks == 10:
|
||||||
|
break
|
||||||
|
|
||||||
|
except (asyncio.TimeoutError, ClientError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Authorization endpoints verifying that a redirect_uri is allowed for use
|
||||||
|
# by a client MUST look for an exact match of the given redirect_uri in the
|
||||||
|
# request against the list of redirect_uris discovered after resolving any
|
||||||
|
# relative URLs.
|
||||||
|
return [urljoin(url, found) for found in parser.found]
|
||||||
|
|
||||||
|
|
||||||
def verify_client_id(client_id):
|
def verify_client_id(client_id):
|
||||||
"""Verify that the client id is valid."""
|
"""Verify that the client id is valid."""
|
||||||
|
@ -54,7 +54,6 @@ have type "create_entry" and "result" key will contain an authorization code.
|
|||||||
"flow_id": "8f7e42faab604bcab7ac43c44ca34d58",
|
"flow_id": "8f7e42faab604bcab7ac43c44ca34d58",
|
||||||
"handler": ["insecure_example", null],
|
"handler": ["insecure_example", null],
|
||||||
"result": "411ee2f916e648d691e937ae9344681e",
|
"result": "411ee2f916e648d691e937ae9344681e",
|
||||||
"source": "user",
|
|
||||||
"title": "Example",
|
"title": "Example",
|
||||||
"type": "create_entry",
|
"type": "create_entry",
|
||||||
"version": 1
|
"version": 1
|
||||||
@ -68,8 +67,6 @@ from homeassistant.components.http.ban import process_wrong_login, \
|
|||||||
log_invalid_auth
|
log_invalid_auth
|
||||||
from homeassistant.components.http.data_validator import RequestDataValidator
|
from homeassistant.components.http.data_validator import RequestDataValidator
|
||||||
from homeassistant.components.http.view import HomeAssistantView
|
from homeassistant.components.http.view import HomeAssistantView
|
||||||
from homeassistant.helpers.data_entry_flow import (
|
|
||||||
FlowManagerIndexView, FlowManagerResourceView)
|
|
||||||
from . import indieauth
|
from . import indieauth
|
||||||
|
|
||||||
|
|
||||||
@ -97,13 +94,41 @@ class AuthProvidersView(HomeAssistantView):
|
|||||||
} for provider in request.app['hass'].auth.auth_providers])
|
} for provider in request.app['hass'].auth.auth_providers])
|
||||||
|
|
||||||
|
|
||||||
class LoginFlowIndexView(FlowManagerIndexView):
|
def _prepare_result_json(result):
|
||||||
|
"""Convert result to JSON."""
|
||||||
|
if result['type'] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY:
|
||||||
|
data = result.copy()
|
||||||
|
data.pop('result')
|
||||||
|
data.pop('data')
|
||||||
|
return data
|
||||||
|
|
||||||
|
if result['type'] != data_entry_flow.RESULT_TYPE_FORM:
|
||||||
|
return result
|
||||||
|
|
||||||
|
import voluptuous_serialize
|
||||||
|
|
||||||
|
data = result.copy()
|
||||||
|
|
||||||
|
schema = data['data_schema']
|
||||||
|
if schema is None:
|
||||||
|
data['data_schema'] = []
|
||||||
|
else:
|
||||||
|
data['data_schema'] = voluptuous_serialize.convert(schema)
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
class LoginFlowIndexView(HomeAssistantView):
|
||||||
"""View to create a config flow."""
|
"""View to create a config flow."""
|
||||||
|
|
||||||
url = '/auth/login_flow'
|
url = '/auth/login_flow'
|
||||||
name = 'api:auth:login_flow'
|
name = 'api:auth:login_flow'
|
||||||
requires_auth = False
|
requires_auth = False
|
||||||
|
|
||||||
|
def __init__(self, flow_mgr):
|
||||||
|
"""Initialize the flow manager index view."""
|
||||||
|
self._flow_mgr = flow_mgr
|
||||||
|
|
||||||
async def get(self, request):
|
async def get(self, request):
|
||||||
"""Do not allow index of flows in progress."""
|
"""Do not allow index of flows in progress."""
|
||||||
return aiohttp.web.Response(status=405)
|
return aiohttp.web.Response(status=405)
|
||||||
@ -116,15 +141,26 @@ class LoginFlowIndexView(FlowManagerIndexView):
|
|||||||
@log_invalid_auth
|
@log_invalid_auth
|
||||||
async def post(self, request, data):
|
async def post(self, request, data):
|
||||||
"""Create a new login flow."""
|
"""Create a new login flow."""
|
||||||
if not indieauth.verify_redirect_uri(data['client_id'],
|
if not await indieauth.verify_redirect_uri(
|
||||||
data['redirect_uri']):
|
request.app['hass'], data['client_id'], data['redirect_uri']):
|
||||||
return self.json_message('invalid client id or redirect uri', 400)
|
return self.json_message('invalid client id or redirect uri', 400)
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
if isinstance(data['handler'], list):
|
||||||
return await super().post(request)
|
handler = tuple(data['handler'])
|
||||||
|
else:
|
||||||
|
handler = data['handler']
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = await self._flow_mgr.async_init(handler, context={})
|
||||||
|
except data_entry_flow.UnknownHandler:
|
||||||
|
return self.json_message('Invalid handler specified', 404)
|
||||||
|
except data_entry_flow.UnknownStep:
|
||||||
|
return self.json_message('Handler does not support init', 400)
|
||||||
|
|
||||||
|
return self.json(_prepare_result_json(result))
|
||||||
|
|
||||||
|
|
||||||
class LoginFlowResourceView(FlowManagerResourceView):
|
class LoginFlowResourceView(HomeAssistantView):
|
||||||
"""View to interact with the flow manager."""
|
"""View to interact with the flow manager."""
|
||||||
|
|
||||||
url = '/auth/login_flow/{flow_id}'
|
url = '/auth/login_flow/{flow_id}'
|
||||||
@ -133,10 +169,10 @@ class LoginFlowResourceView(FlowManagerResourceView):
|
|||||||
|
|
||||||
def __init__(self, flow_mgr, store_credentials):
|
def __init__(self, flow_mgr, store_credentials):
|
||||||
"""Initialize the login flow resource view."""
|
"""Initialize the login flow resource view."""
|
||||||
super().__init__(flow_mgr)
|
self._flow_mgr = flow_mgr
|
||||||
self._store_credentials = store_credentials
|
self._store_credentials = store_credentials
|
||||||
|
|
||||||
async def get(self, request, flow_id):
|
async def get(self, request):
|
||||||
"""Do not allow getting status of a flow in progress."""
|
"""Do not allow getting status of a flow in progress."""
|
||||||
return self.json_message('Invalid flow specified', 404)
|
return self.json_message('Invalid flow specified', 404)
|
||||||
|
|
||||||
@ -164,9 +200,18 @@ class LoginFlowResourceView(FlowManagerResourceView):
|
|||||||
if result['errors'] is not None and \
|
if result['errors'] is not None and \
|
||||||
result['errors'].get('base') == 'invalid_auth':
|
result['errors'].get('base') == 'invalid_auth':
|
||||||
await process_wrong_login(request)
|
await process_wrong_login(request)
|
||||||
return self.json(self._prepare_result_json(result))
|
return self.json(_prepare_result_json(result))
|
||||||
|
|
||||||
result.pop('data')
|
result.pop('data')
|
||||||
result['result'] = self._store_credentials(client_id, result['result'])
|
result['result'] = self._store_credentials(client_id, result['result'])
|
||||||
|
|
||||||
return self.json(result)
|
return self.json(result)
|
||||||
|
|
||||||
|
async def delete(self, request, flow_id):
|
||||||
|
"""Cancel a flow in progress."""
|
||||||
|
try:
|
||||||
|
self._flow_mgr.async_abort(flow_id)
|
||||||
|
except data_entry_flow.UnknownFlow:
|
||||||
|
return self.json_message('Invalid flow specified', 404)
|
||||||
|
|
||||||
|
return self.json_message('Flow aborted')
|
||||||
|
@ -122,7 +122,6 @@ class BayesianBinarySensor(BinarySensorDevice):
|
|||||||
def async_added_to_hass(self):
|
def async_added_to_hass(self):
|
||||||
"""Call when entity about to be added."""
|
"""Call when entity about to be added."""
|
||||||
@callback
|
@callback
|
||||||
# pylint: disable=invalid-name
|
|
||||||
def async_threshold_sensor_state_listener(entity, old_state,
|
def async_threshold_sensor_state_listener(entity, old_state,
|
||||||
new_state):
|
new_state):
|
||||||
"""Handle sensor state changes."""
|
"""Handle sensor state changes."""
|
||||||
|
@ -16,10 +16,7 @@ DEPENDENCIES = ['homematicip_cloud']
|
|||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
ATTR_WINDOW_STATE = 'window_state'
|
STATE_SMOKE_OFF = 'IDLE_OFF'
|
||||||
ATTR_EVENT_DELAY = 'event_delay'
|
|
||||||
ATTR_MOTION_DETECTED = 'motion_detected'
|
|
||||||
ATTR_ILLUMINATION = 'illumination'
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_platform(hass, config, async_add_devices,
|
async def async_setup_platform(hass, config, async_add_devices,
|
||||||
@ -30,15 +27,18 @@ async def async_setup_platform(hass, config, async_add_devices,
|
|||||||
|
|
||||||
async def async_setup_entry(hass, config_entry, async_add_devices):
|
async def async_setup_entry(hass, config_entry, async_add_devices):
|
||||||
"""Set up the HomematicIP binary sensor from a config entry."""
|
"""Set up the HomematicIP binary sensor from a config entry."""
|
||||||
from homematicip.device import (ShutterContact, MotionDetectorIndoor)
|
from homematicip.aio.device import (
|
||||||
|
AsyncShutterContact, AsyncMotionDetectorIndoor, AsyncSmokeDetector)
|
||||||
|
|
||||||
home = hass.data[HMIPC_DOMAIN][config_entry.data[HMIPC_HAPID]].home
|
home = hass.data[HMIPC_DOMAIN][config_entry.data[HMIPC_HAPID]].home
|
||||||
devices = []
|
devices = []
|
||||||
for device in home.devices:
|
for device in home.devices:
|
||||||
if isinstance(device, ShutterContact):
|
if isinstance(device, AsyncShutterContact):
|
||||||
devices.append(HomematicipShutterContact(home, device))
|
devices.append(HomematicipShutterContact(home, device))
|
||||||
elif isinstance(device, MotionDetectorIndoor):
|
elif isinstance(device, AsyncMotionDetectorIndoor):
|
||||||
devices.append(HomematicipMotionDetector(home, device))
|
devices.append(HomematicipMotionDetector(home, device))
|
||||||
|
elif isinstance(device, AsyncSmokeDetector):
|
||||||
|
devices.append(HomematicipSmokeDetector(home, device))
|
||||||
|
|
||||||
if devices:
|
if devices:
|
||||||
async_add_devices(devices)
|
async_add_devices(devices)
|
||||||
@ -47,10 +47,6 @@ async def async_setup_entry(hass, config_entry, async_add_devices):
|
|||||||
class HomematicipShutterContact(HomematicipGenericDevice, BinarySensorDevice):
|
class HomematicipShutterContact(HomematicipGenericDevice, BinarySensorDevice):
|
||||||
"""HomematicIP shutter contact."""
|
"""HomematicIP shutter contact."""
|
||||||
|
|
||||||
def __init__(self, home, device):
|
|
||||||
"""Initialize the shutter contact."""
|
|
||||||
super().__init__(home, device)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def device_class(self):
|
def device_class(self):
|
||||||
"""Return the class of this sensor."""
|
"""Return the class of this sensor."""
|
||||||
@ -69,11 +65,7 @@ class HomematicipShutterContact(HomematicipGenericDevice, BinarySensorDevice):
|
|||||||
|
|
||||||
|
|
||||||
class HomematicipMotionDetector(HomematicipGenericDevice, BinarySensorDevice):
|
class HomematicipMotionDetector(HomematicipGenericDevice, BinarySensorDevice):
|
||||||
"""MomematicIP motion detector."""
|
"""HomematicIP motion detector."""
|
||||||
|
|
||||||
def __init__(self, home, device):
|
|
||||||
"""Initialize the shutter contact."""
|
|
||||||
super().__init__(home, device)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def device_class(self):
|
def device_class(self):
|
||||||
@ -86,3 +78,17 @@ class HomematicipMotionDetector(HomematicipGenericDevice, BinarySensorDevice):
|
|||||||
if self._device.sabotage:
|
if self._device.sabotage:
|
||||||
return True
|
return True
|
||||||
return self._device.motionDetected
|
return self._device.motionDetected
|
||||||
|
|
||||||
|
|
||||||
|
class HomematicipSmokeDetector(HomematicipGenericDevice, BinarySensorDevice):
|
||||||
|
"""HomematicIP smoke detector."""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def device_class(self):
|
||||||
|
"""Return the class of this sensor."""
|
||||||
|
return 'smoke'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_on(self):
|
||||||
|
"""Return true if smoke is detected."""
|
||||||
|
return self._device.smokeDetectorAlarmType != STATE_SMOKE_OFF
|
||||||
|
103
homeassistant/components/binary_sensor/openuv.py
Normal file
103
homeassistant/components/binary_sensor/openuv.py
Normal file
@ -0,0 +1,103 @@
|
|||||||
|
"""
|
||||||
|
This platform provides binary sensors for OpenUV data.
|
||||||
|
|
||||||
|
For more details about this platform, please refer to the documentation at
|
||||||
|
https://home-assistant.io/components/binary_sensor.openuv/
|
||||||
|
"""
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from homeassistant.components.binary_sensor import BinarySensorDevice
|
||||||
|
from homeassistant.const import CONF_MONITORED_CONDITIONS
|
||||||
|
from homeassistant.core import callback
|
||||||
|
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||||
|
from homeassistant.components.openuv import (
|
||||||
|
BINARY_SENSORS, DATA_PROTECTION_WINDOW, DOMAIN, TOPIC_UPDATE,
|
||||||
|
TYPE_PROTECTION_WINDOW, OpenUvEntity)
|
||||||
|
from homeassistant.util.dt import as_local, parse_datetime, utcnow
|
||||||
|
|
||||||
|
DEPENDENCIES = ['openuv']
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
ATTR_PROTECTION_WINDOW_STARTING_TIME = 'start_time'
|
||||||
|
ATTR_PROTECTION_WINDOW_STARTING_UV = 'start_uv'
|
||||||
|
ATTR_PROTECTION_WINDOW_ENDING_TIME = 'end_time'
|
||||||
|
ATTR_PROTECTION_WINDOW_ENDING_UV = 'end_uv'
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_platform(
|
||||||
|
hass, config, async_add_devices, discovery_info=None):
|
||||||
|
"""Set up the OpenUV binary sensor platform."""
|
||||||
|
if discovery_info is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
openuv = hass.data[DOMAIN]
|
||||||
|
|
||||||
|
binary_sensors = []
|
||||||
|
for sensor_type in discovery_info[CONF_MONITORED_CONDITIONS]:
|
||||||
|
name, icon = BINARY_SENSORS[sensor_type]
|
||||||
|
binary_sensors.append(
|
||||||
|
OpenUvBinarySensor(openuv, sensor_type, name, icon))
|
||||||
|
|
||||||
|
async_add_devices(binary_sensors, True)
|
||||||
|
|
||||||
|
|
||||||
|
class OpenUvBinarySensor(OpenUvEntity, BinarySensorDevice):
|
||||||
|
"""Define a binary sensor for OpenUV."""
|
||||||
|
|
||||||
|
def __init__(self, openuv, sensor_type, name, icon):
|
||||||
|
"""Initialize the sensor."""
|
||||||
|
super().__init__(openuv)
|
||||||
|
|
||||||
|
self._icon = icon
|
||||||
|
self._latitude = openuv.client.latitude
|
||||||
|
self._longitude = openuv.client.longitude
|
||||||
|
self._name = name
|
||||||
|
self._sensor_type = sensor_type
|
||||||
|
self._state = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def icon(self):
|
||||||
|
"""Return the icon."""
|
||||||
|
return self._icon
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_on(self):
|
||||||
|
"""Return the status of the sensor."""
|
||||||
|
return self._state
|
||||||
|
|
||||||
|
@property
|
||||||
|
def should_poll(self):
|
||||||
|
"""Disable polling."""
|
||||||
|
return False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def unique_id(self) -> str:
|
||||||
|
"""Return a unique, HASS-friendly identifier for this entity."""
|
||||||
|
return '{0}_{1}_{2}'.format(
|
||||||
|
self._latitude, self._longitude, self._sensor_type)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _update_data(self):
|
||||||
|
"""Update the state."""
|
||||||
|
self.async_schedule_update_ha_state(True)
|
||||||
|
|
||||||
|
async def async_added_to_hass(self):
|
||||||
|
"""Register callbacks."""
|
||||||
|
async_dispatcher_connect(
|
||||||
|
self.hass, TOPIC_UPDATE, self._update_data)
|
||||||
|
|
||||||
|
async def async_update(self):
|
||||||
|
"""Update the state."""
|
||||||
|
data = self.openuv.data[DATA_PROTECTION_WINDOW]['result']
|
||||||
|
if self._sensor_type == TYPE_PROTECTION_WINDOW:
|
||||||
|
self._state = parse_datetime(
|
||||||
|
data['from_time']) <= utcnow() <= parse_datetime(
|
||||||
|
data['to_time'])
|
||||||
|
self._attrs.update({
|
||||||
|
ATTR_PROTECTION_WINDOW_ENDING_TIME:
|
||||||
|
as_local(parse_datetime(data['to_time'])),
|
||||||
|
ATTR_PROTECTION_WINDOW_ENDING_UV: data['to_uv'],
|
||||||
|
ATTR_PROTECTION_WINDOW_STARTING_UV: data['from_uv'],
|
||||||
|
ATTR_PROTECTION_WINDOW_STARTING_TIME:
|
||||||
|
as_local(parse_datetime(data['from_time'])),
|
||||||
|
})
|
@ -86,7 +86,6 @@ class ThresholdSensor(BinarySensorDevice):
|
|||||||
self._state = False
|
self._state = False
|
||||||
self.sensor_value = None
|
self.sensor_value = None
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
|
||||||
@callback
|
@callback
|
||||||
def async_threshold_sensor_state_listener(
|
def async_threshold_sensor_state_listener(
|
||||||
entity, old_state, new_state):
|
entity, old_state, new_state):
|
||||||
|
@ -4,93 +4,34 @@ Support for Velbus Binary Sensors.
|
|||||||
For more details about this platform, please refer to the documentation at
|
For more details about this platform, please refer to the documentation at
|
||||||
https://home-assistant.io/components/binary_sensor.velbus/
|
https://home-assistant.io/components/binary_sensor.velbus/
|
||||||
"""
|
"""
|
||||||
import asyncio
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from homeassistant.const import CONF_NAME, CONF_DEVICES
|
|
||||||
from homeassistant.components.binary_sensor import BinarySensorDevice
|
from homeassistant.components.binary_sensor import BinarySensorDevice
|
||||||
from homeassistant.components.binary_sensor import PLATFORM_SCHEMA
|
from homeassistant.components.velbus import (
|
||||||
from homeassistant.components.velbus import DOMAIN
|
DOMAIN as VELBUS_DOMAIN, VelbusEntity)
|
||||||
import homeassistant.helpers.config_validation as cv
|
|
||||||
|
|
||||||
|
|
||||||
DEPENDENCIES = ['velbus']
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
DEPENDENCIES = ['velbus']
|
||||||
vol.Required(CONF_DEVICES): vol.All(cv.ensure_list, [
|
|
||||||
{
|
|
||||||
vol.Required('module'): cv.positive_int,
|
|
||||||
vol.Required('channel'): cv.positive_int,
|
|
||||||
vol.Required(CONF_NAME): cv.string,
|
|
||||||
vol.Optional('is_pushbutton'): cv.boolean
|
|
||||||
}
|
|
||||||
])
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
async def async_setup_platform(hass, config, async_add_devices,
|
||||||
|
discovery_info=None):
|
||||||
"""Set up Velbus binary sensors."""
|
"""Set up Velbus binary sensors."""
|
||||||
velbus = hass.data[DOMAIN]
|
if discovery_info is None:
|
||||||
|
return
|
||||||
add_devices(VelbusBinarySensor(sensor, velbus)
|
sensors = []
|
||||||
for sensor in config[CONF_DEVICES])
|
for sensor in discovery_info:
|
||||||
|
module = hass.data[VELBUS_DOMAIN].get_module(sensor[0])
|
||||||
|
channel = sensor[1]
|
||||||
|
sensors.append(VelbusBinarySensor(module, channel))
|
||||||
|
async_add_devices(sensors)
|
||||||
|
|
||||||
|
|
||||||
class VelbusBinarySensor(BinarySensorDevice):
|
class VelbusBinarySensor(VelbusEntity, BinarySensorDevice):
|
||||||
"""Representation of a Velbus Binary Sensor."""
|
"""Representation of a Velbus Binary Sensor."""
|
||||||
|
|
||||||
def __init__(self, binary_sensor, velbus):
|
|
||||||
"""Initialize a Velbus light."""
|
|
||||||
self._velbus = velbus
|
|
||||||
self._name = binary_sensor[CONF_NAME]
|
|
||||||
self._module = binary_sensor['module']
|
|
||||||
self._channel = binary_sensor['channel']
|
|
||||||
self._is_pushbutton = 'is_pushbutton' in binary_sensor \
|
|
||||||
and binary_sensor['is_pushbutton']
|
|
||||||
self._state = False
|
|
||||||
|
|
||||||
@asyncio.coroutine
|
|
||||||
def async_added_to_hass(self):
|
|
||||||
"""Add listener for Velbus messages on bus."""
|
|
||||||
yield from self.hass.async_add_job(
|
|
||||||
self._velbus.subscribe, self._on_message)
|
|
||||||
|
|
||||||
def _on_message(self, message):
|
|
||||||
import velbus
|
|
||||||
if isinstance(message, velbus.PushButtonStatusMessage):
|
|
||||||
if message.address == self._module and \
|
|
||||||
self._channel in message.get_channels():
|
|
||||||
if self._is_pushbutton:
|
|
||||||
if self._channel in message.closed:
|
|
||||||
self._toggle()
|
|
||||||
else:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
self._toggle()
|
|
||||||
|
|
||||||
def _toggle(self):
|
|
||||||
if self._state is True:
|
|
||||||
self._state = False
|
|
||||||
else:
|
|
||||||
self._state = True
|
|
||||||
self.schedule_update_ha_state()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def should_poll(self):
|
|
||||||
"""No polling needed."""
|
|
||||||
return False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self):
|
|
||||||
"""Return the display name of this sensor."""
|
|
||||||
return self._name
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_on(self):
|
def is_on(self):
|
||||||
"""Return true if the sensor is on."""
|
"""Return true if the sensor is on."""
|
||||||
return self._state
|
return self._module.is_closed(self._channel)
|
||||||
|
@ -17,7 +17,7 @@ import homeassistant.helpers.config_validation as cv
|
|||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
REQUIREMENTS = ['holidays==0.9.5']
|
REQUIREMENTS = ['holidays==0.9.6']
|
||||||
|
|
||||||
# List of all countries currently supported by holidays
|
# List of all countries currently supported by holidays
|
||||||
# There seems to be no way to get the list out at runtime
|
# There seems to be no way to get the list out at runtime
|
||||||
@ -25,9 +25,9 @@ ALL_COUNTRIES = ['Argentina', 'AR', 'Australia', 'AU', 'Austria', 'AT',
|
|||||||
'Belgium', 'BE', 'Canada', 'CA', 'Colombia', 'CO', 'Czech',
|
'Belgium', 'BE', 'Canada', 'CA', 'Colombia', 'CO', 'Czech',
|
||||||
'CZ', 'Denmark', 'DK', 'England', 'EuropeanCentralBank',
|
'CZ', 'Denmark', 'DK', 'England', 'EuropeanCentralBank',
|
||||||
'ECB', 'TAR', 'Finland', 'FI', 'France', 'FRA', 'Germany',
|
'ECB', 'TAR', 'Finland', 'FI', 'France', 'FRA', 'Germany',
|
||||||
'DE', 'Hungary', 'HU', 'Ireland', 'Isle of Man', 'Italy',
|
'DE', 'Hungary', 'HU', 'India', 'IND', 'Ireland',
|
||||||
'IT', 'Japan', 'JP', 'Mexico', 'MX', 'Netherlands', 'NL',
|
'Isle of Man', 'Italy', 'IT', 'Japan', 'JP', 'Mexico', 'MX',
|
||||||
'NewZealand', 'NZ', 'Northern Ireland',
|
'Netherlands', 'NL', 'NewZealand', 'NZ', 'Northern Ireland',
|
||||||
'Norway', 'NO', 'Polish', 'PL', 'Portugal', 'PT',
|
'Norway', 'NO', 'Polish', 'PL', 'Portugal', 'PT',
|
||||||
'PortugalExt', 'PTE', 'Scotland', 'Slovenia', 'SI',
|
'PortugalExt', 'PTE', 'Scotland', 'Slovenia', 'SI',
|
||||||
'Slovakia', 'SK', 'South Africa', 'ZA', 'Spain', 'ES',
|
'Slovakia', 'SK', 'South Africa', 'ZA', 'Spain', 'ES',
|
||||||
|
@ -26,9 +26,6 @@ CONF_PROJECT_DUE_DATE = 'due_date_days'
|
|||||||
CONF_PROJECT_LABEL_WHITELIST = 'labels'
|
CONF_PROJECT_LABEL_WHITELIST = 'labels'
|
||||||
CONF_PROJECT_WHITELIST = 'include_projects'
|
CONF_PROJECT_WHITELIST = 'include_projects'
|
||||||
|
|
||||||
# https://github.com/PyCQA/pylint/pull/2320
|
|
||||||
# pylint: disable=fixme
|
|
||||||
|
|
||||||
# Calendar Platform: Does this calendar event last all day?
|
# Calendar Platform: Does this calendar event last all day?
|
||||||
ALL_DAY = 'all_day'
|
ALL_DAY = 'all_day'
|
||||||
# Attribute: All tasks in this project
|
# Attribute: All tasks in this project
|
||||||
|
@ -57,6 +57,7 @@ class YiCamera(Camera):
|
|||||||
self._last_url = None
|
self._last_url = None
|
||||||
self._manager = hass.data[DATA_FFMPEG]
|
self._manager = hass.data[DATA_FFMPEG]
|
||||||
self._name = config[CONF_NAME]
|
self._name = config[CONF_NAME]
|
||||||
|
self._is_on = True
|
||||||
self.host = config[CONF_HOST]
|
self.host = config[CONF_HOST]
|
||||||
self.port = config[CONF_PORT]
|
self.port = config[CONF_PORT]
|
||||||
self.path = config[CONF_PATH]
|
self.path = config[CONF_PATH]
|
||||||
@ -68,6 +69,11 @@ class YiCamera(Camera):
|
|||||||
"""Camera brand."""
|
"""Camera brand."""
|
||||||
return DEFAULT_BRAND
|
return DEFAULT_BRAND
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_on(self):
|
||||||
|
"""Determine whether the camera is on."""
|
||||||
|
return self._is_on
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self):
|
||||||
"""Return the name of this camera."""
|
"""Return the name of this camera."""
|
||||||
@ -81,7 +87,7 @@ class YiCamera(Camera):
|
|||||||
try:
|
try:
|
||||||
await ftp.connect(self.host)
|
await ftp.connect(self.host)
|
||||||
await ftp.login(self.user, self.passwd)
|
await ftp.login(self.user, self.passwd)
|
||||||
except StatusCodeError as err:
|
except (ConnectionRefusedError, StatusCodeError) as err:
|
||||||
raise PlatformNotReady(err)
|
raise PlatformNotReady(err)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -101,12 +107,13 @@ class YiCamera(Camera):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
await ftp.quit()
|
await ftp.quit()
|
||||||
|
self._is_on = True
|
||||||
return 'ftp://{0}:{1}@{2}:{3}{4}/{5}/{6}'.format(
|
return 'ftp://{0}:{1}@{2}:{3}{4}/{5}/{6}'.format(
|
||||||
self.user, self.passwd, self.host, self.port, self.path,
|
self.user, self.passwd, self.host, self.port, self.path,
|
||||||
latest_dir, videos[-1])
|
latest_dir, videos[-1])
|
||||||
except (ConnectionRefusedError, StatusCodeError) as err:
|
except (ConnectionRefusedError, StatusCodeError) as err:
|
||||||
_LOGGER.error('Error while fetching video: %s', err)
|
_LOGGER.error('Error while fetching video: %s', err)
|
||||||
|
self._is_on = False
|
||||||
return None
|
return None
|
||||||
|
|
||||||
async def async_camera_image(self):
|
async def async_camera_image(self):
|
||||||
@ -114,7 +121,7 @@ class YiCamera(Camera):
|
|||||||
from haffmpeg import ImageFrame, IMAGE_JPEG
|
from haffmpeg import ImageFrame, IMAGE_JPEG
|
||||||
|
|
||||||
url = await self._get_latest_video_url()
|
url = await self._get_latest_video_url()
|
||||||
if url != self._last_url:
|
if url and url != self._last_url:
|
||||||
ffmpeg = ImageFrame(self._manager.binary, loop=self.hass.loop)
|
ffmpeg = ImageFrame(self._manager.binary, loop=self.hass.loop)
|
||||||
self._last_image = await asyncio.shield(
|
self._last_image = await asyncio.shield(
|
||||||
ffmpeg.get_image(
|
ffmpeg.get_image(
|
||||||
@ -130,6 +137,9 @@ class YiCamera(Camera):
|
|||||||
"""Generate an HTTP MJPEG stream from the camera."""
|
"""Generate an HTTP MJPEG stream from the camera."""
|
||||||
from haffmpeg import CameraMjpeg
|
from haffmpeg import CameraMjpeg
|
||||||
|
|
||||||
|
if not self._is_on:
|
||||||
|
return
|
||||||
|
|
||||||
stream = CameraMjpeg(self._manager.binary, loop=self.hass.loop)
|
stream = CameraMjpeg(self._manager.binary, loop=self.hass.loop)
|
||||||
await stream.open_camera(
|
await stream.open_camera(
|
||||||
self._last_url, extra_cmd=self._extra_arguments)
|
self._last_url, extra_cmd=self._extra_arguments)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
"""Component to embed Google Cast."""
|
"""Component to embed Google Cast."""
|
||||||
from homeassistant import data_entry_flow
|
from homeassistant import config_entries
|
||||||
from homeassistant.helpers import config_entry_flow
|
from homeassistant.helpers import config_entry_flow
|
||||||
|
|
||||||
|
|
||||||
@ -15,7 +15,7 @@ async def async_setup(hass, config):
|
|||||||
|
|
||||||
if conf is not None:
|
if conf is not None:
|
||||||
hass.async_create_task(hass.config_entries.flow.async_init(
|
hass.async_create_task(hass.config_entries.flow.async_init(
|
||||||
DOMAIN, source=data_entry_flow.SOURCE_IMPORT))
|
DOMAIN, context={'source': config_entries.SOURCE_IMPORT}))
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -18,7 +18,7 @@ from homeassistant.const import (
|
|||||||
CONF_HOST, TEMP_FAHRENHEIT, ATTR_TEMPERATURE, PRECISION_HALVES)
|
CONF_HOST, TEMP_FAHRENHEIT, ATTR_TEMPERATURE, PRECISION_HALVES)
|
||||||
import homeassistant.helpers.config_validation as cv
|
import homeassistant.helpers.config_validation as cv
|
||||||
|
|
||||||
REQUIREMENTS = ['radiotherm==1.3']
|
REQUIREMENTS = ['radiotherm==1.4.1']
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -7,7 +7,7 @@ from homeassistant.helpers.data_entry_flow import (
|
|||||||
FlowManagerIndexView, FlowManagerResourceView)
|
FlowManagerIndexView, FlowManagerResourceView)
|
||||||
|
|
||||||
|
|
||||||
REQUIREMENTS = ['voluptuous-serialize==1']
|
REQUIREMENTS = ['voluptuous-serialize==2.0.0']
|
||||||
|
|
||||||
|
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
@ -96,7 +96,7 @@ class ConfigManagerFlowIndexView(FlowManagerIndexView):
|
|||||||
|
|
||||||
return self.json([
|
return self.json([
|
||||||
flw for flw in hass.config_entries.flow.async_progress()
|
flw for flw in hass.config_entries.flow.async_progress()
|
||||||
if flw['source'] != data_entry_flow.SOURCE_USER])
|
if flw['context']['source'] != config_entries.SOURCE_USER])
|
||||||
|
|
||||||
|
|
||||||
class ConfigManagerFlowResourceView(FlowManagerResourceView):
|
class ConfigManagerFlowResourceView(FlowManagerResourceView):
|
||||||
|
@ -4,8 +4,10 @@ Support for Tahoma cover - shutters etc.
|
|||||||
For more details about this platform, please refer to the documentation at
|
For more details about this platform, please refer to the documentation at
|
||||||
https://home-assistant.io/components/cover.tahoma/
|
https://home-assistant.io/components/cover.tahoma/
|
||||||
"""
|
"""
|
||||||
|
from datetime import timedelta
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
from homeassistant.util.dt import utcnow
|
||||||
from homeassistant.components.cover import CoverDevice, ATTR_POSITION
|
from homeassistant.components.cover import CoverDevice, ATTR_POSITION
|
||||||
from homeassistant.components.tahoma import (
|
from homeassistant.components.tahoma import (
|
||||||
DOMAIN as TAHOMA_DOMAIN, TahomaDevice)
|
DOMAIN as TAHOMA_DOMAIN, TahomaDevice)
|
||||||
@ -14,6 +16,13 @@ DEPENDENCIES = ['tahoma']
|
|||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
ATTR_MEM_POS = 'memorized_position'
|
||||||
|
ATTR_RSSI_LEVEL = 'rssi_level'
|
||||||
|
ATTR_LOCK_START_TS = 'lock_start_ts'
|
||||||
|
ATTR_LOCK_END_TS = 'lock_end_ts'
|
||||||
|
ATTR_LOCK_LEVEL = 'lock_level'
|
||||||
|
ATTR_LOCK_ORIG = 'lock_originator'
|
||||||
|
|
||||||
|
|
||||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||||
"""Set up the Tahoma covers."""
|
"""Set up the Tahoma covers."""
|
||||||
@ -27,27 +36,107 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||||||
class TahomaCover(TahomaDevice, CoverDevice):
|
class TahomaCover(TahomaDevice, CoverDevice):
|
||||||
"""Representation a Tahoma Cover."""
|
"""Representation a Tahoma Cover."""
|
||||||
|
|
||||||
|
def __init__(self, tahoma_device, controller):
|
||||||
|
"""Initialize the device."""
|
||||||
|
super().__init__(tahoma_device, controller)
|
||||||
|
|
||||||
|
self._closure = 0
|
||||||
|
# 100 equals open
|
||||||
|
self._position = 100
|
||||||
|
self._closed = False
|
||||||
|
self._rssi_level = None
|
||||||
|
self._icon = None
|
||||||
|
# Can be 0 and bigger
|
||||||
|
self._lock_timer = 0
|
||||||
|
self._lock_start_ts = None
|
||||||
|
self._lock_end_ts = None
|
||||||
|
# Can be 'comfortLevel1', 'comfortLevel2', 'comfortLevel3',
|
||||||
|
# 'comfortLevel4', 'environmentProtection', 'humanProtection',
|
||||||
|
# 'userLevel1', 'userLevel2'
|
||||||
|
self._lock_level = None
|
||||||
|
# Can be 'LSC', 'SAAC', 'SFC', 'UPS', 'externalGateway', 'localUser',
|
||||||
|
# 'myself', 'rain', 'security', 'temperature', 'timer', 'user', 'wind'
|
||||||
|
self._lock_originator = None
|
||||||
|
|
||||||
def update(self):
|
def update(self):
|
||||||
"""Update method."""
|
"""Update method."""
|
||||||
self.controller.get_states([self.tahoma_device])
|
self.controller.get_states([self.tahoma_device])
|
||||||
|
|
||||||
|
# For vertical covers
|
||||||
|
self._closure = self.tahoma_device.active_states.get(
|
||||||
|
'core:ClosureState')
|
||||||
|
# For horizontal covers
|
||||||
|
if self._closure is None:
|
||||||
|
self._closure = self.tahoma_device.active_states.get(
|
||||||
|
'core:DeploymentState')
|
||||||
|
|
||||||
|
# For all, if available
|
||||||
|
if 'core:PriorityLockTimerState' in self.tahoma_device.active_states:
|
||||||
|
old_lock_timer = self._lock_timer
|
||||||
|
self._lock_timer = \
|
||||||
|
self.tahoma_device.active_states['core:PriorityLockTimerState']
|
||||||
|
# Derive timestamps from _lock_timer, only if not already set or
|
||||||
|
# something has changed
|
||||||
|
if self._lock_timer > 0:
|
||||||
|
_LOGGER.debug("Update %s, lock_timer: %d", self._name,
|
||||||
|
self._lock_timer)
|
||||||
|
if self._lock_start_ts is None:
|
||||||
|
self._lock_start_ts = utcnow()
|
||||||
|
if self._lock_end_ts is None or \
|
||||||
|
old_lock_timer != self._lock_timer:
|
||||||
|
self._lock_end_ts = utcnow() +\
|
||||||
|
timedelta(seconds=self._lock_timer)
|
||||||
|
else:
|
||||||
|
self._lock_start_ts = None
|
||||||
|
self._lock_end_ts = None
|
||||||
|
else:
|
||||||
|
self._lock_timer = 0
|
||||||
|
self._lock_start_ts = None
|
||||||
|
self._lock_end_ts = None
|
||||||
|
|
||||||
|
self._lock_level = self.tahoma_device.active_states.get(
|
||||||
|
'io:PriorityLockLevelState')
|
||||||
|
|
||||||
|
self._lock_originator = self.tahoma_device.active_states.get(
|
||||||
|
'io:PriorityLockOriginatorState')
|
||||||
|
|
||||||
|
self._rssi_level = self.tahoma_device.active_states.get(
|
||||||
|
'core:RSSILevelState')
|
||||||
|
|
||||||
|
# Define which icon to use
|
||||||
|
if self._lock_timer > 0:
|
||||||
|
if self._lock_originator == 'wind':
|
||||||
|
self._icon = 'mdi:weather-windy'
|
||||||
|
else:
|
||||||
|
self._icon = 'mdi:lock-alert'
|
||||||
|
else:
|
||||||
|
self._icon = None
|
||||||
|
|
||||||
|
# Define current position.
|
||||||
|
# _position: 0 is closed, 100 is fully open.
|
||||||
|
# 'core:ClosureState': 100 is closed, 0 is fully open.
|
||||||
|
if self._closure is not None:
|
||||||
|
self._position = 100 - self._closure
|
||||||
|
if self._position <= 5:
|
||||||
|
self._position = 0
|
||||||
|
if self._position >= 95:
|
||||||
|
self._position = 100
|
||||||
|
self._closed = self._position == 0
|
||||||
|
else:
|
||||||
|
self._position = None
|
||||||
|
if 'core:OpenClosedState' in self.tahoma_device.active_states:
|
||||||
|
self._closed = \
|
||||||
|
self.tahoma_device.active_states['core:OpenClosedState']\
|
||||||
|
== 'closed'
|
||||||
|
else:
|
||||||
|
self._closed = False
|
||||||
|
|
||||||
|
_LOGGER.debug("Update %s, position: %d", self._name, self._position)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def current_cover_position(self):
|
def current_cover_position(self):
|
||||||
"""
|
"""Return current position of cover."""
|
||||||
Return current position of cover.
|
return self._position
|
||||||
|
|
||||||
0 is closed, 100 is fully open.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
position = 100 - \
|
|
||||||
self.tahoma_device.active_states['core:ClosureState']
|
|
||||||
if position <= 5:
|
|
||||||
return 0
|
|
||||||
if position >= 95:
|
|
||||||
return 100
|
|
||||||
return position
|
|
||||||
except KeyError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def set_cover_position(self, **kwargs):
|
def set_cover_position(self, **kwargs):
|
||||||
"""Move the cover to a specific position."""
|
"""Move the cover to a specific position."""
|
||||||
@ -56,8 +145,7 @@ class TahomaCover(TahomaDevice, CoverDevice):
|
|||||||
@property
|
@property
|
||||||
def is_closed(self):
|
def is_closed(self):
|
||||||
"""Return if the cover is closed."""
|
"""Return if the cover is closed."""
|
||||||
if self.current_cover_position is not None:
|
return self._closed
|
||||||
return self.current_cover_position == 0
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def device_class(self):
|
def device_class(self):
|
||||||
@ -66,13 +154,47 @@ class TahomaCover(TahomaDevice, CoverDevice):
|
|||||||
return 'window'
|
return 'window'
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def device_state_attributes(self):
|
||||||
|
"""Return the device state attributes."""
|
||||||
|
attr = {}
|
||||||
|
super_attr = super().device_state_attributes
|
||||||
|
if super_attr is not None:
|
||||||
|
attr.update(super_attr)
|
||||||
|
|
||||||
|
if 'core:Memorized1PositionState' in self.tahoma_device.active_states:
|
||||||
|
attr[ATTR_MEM_POS] = self.tahoma_device.active_states[
|
||||||
|
'core:Memorized1PositionState']
|
||||||
|
if self._rssi_level is not None:
|
||||||
|
attr[ATTR_RSSI_LEVEL] = self._rssi_level
|
||||||
|
if self._lock_start_ts is not None:
|
||||||
|
attr[ATTR_LOCK_START_TS] = self._lock_start_ts.isoformat()
|
||||||
|
if self._lock_end_ts is not None:
|
||||||
|
attr[ATTR_LOCK_END_TS] = self._lock_end_ts.isoformat()
|
||||||
|
if self._lock_level is not None:
|
||||||
|
attr[ATTR_LOCK_LEVEL] = self._lock_level
|
||||||
|
if self._lock_originator is not None:
|
||||||
|
attr[ATTR_LOCK_ORIG] = self._lock_originator
|
||||||
|
return attr
|
||||||
|
|
||||||
|
@property
|
||||||
|
def icon(self):
|
||||||
|
"""Return the icon to use in the frontend, if any."""
|
||||||
|
return self._icon
|
||||||
|
|
||||||
def open_cover(self, **kwargs):
|
def open_cover(self, **kwargs):
|
||||||
"""Open the cover."""
|
"""Open the cover."""
|
||||||
self.apply_action('open')
|
if self.tahoma_device.type == 'io:HorizontalAwningIOComponent':
|
||||||
|
self.apply_action('close')
|
||||||
|
else:
|
||||||
|
self.apply_action('open')
|
||||||
|
|
||||||
def close_cover(self, **kwargs):
|
def close_cover(self, **kwargs):
|
||||||
"""Close the cover."""
|
"""Close the cover."""
|
||||||
self.apply_action('close')
|
if self.tahoma_device.type == 'io:HorizontalAwningIOComponent':
|
||||||
|
self.apply_action('open')
|
||||||
|
else:
|
||||||
|
self.apply_action('close')
|
||||||
|
|
||||||
def stop_cover(self, **kwargs):
|
def stop_cover(self, **kwargs):
|
||||||
"""Stop the cover."""
|
"""Stop the cover."""
|
||||||
@ -87,5 +209,10 @@ class TahomaCover(TahomaDevice, CoverDevice):
|
|||||||
'rts:ExteriorVenetianBlindRTSComponent',
|
'rts:ExteriorVenetianBlindRTSComponent',
|
||||||
'rts:BlindRTSComponent'):
|
'rts:BlindRTSComponent'):
|
||||||
self.apply_action('my')
|
self.apply_action('my')
|
||||||
|
elif self.tahoma_device.type in \
|
||||||
|
('io:HorizontalAwningIOComponent',
|
||||||
|
'io:RollerShutterGenericIOComponent',
|
||||||
|
'io:VerticalExteriorAwningIOComponent'):
|
||||||
|
self.apply_action('stop')
|
||||||
else:
|
else:
|
||||||
self.apply_action('stopIdentify')
|
self.apply_action('stopIdentify')
|
||||||
|
@ -24,7 +24,8 @@
|
|||||||
"data": {
|
"data": {
|
||||||
"allow_clip_sensor": "Import virtueller Sensoren zulassen",
|
"allow_clip_sensor": "Import virtueller Sensoren zulassen",
|
||||||
"allow_deconz_groups": "Import von deCONZ-Gruppen zulassen"
|
"allow_deconz_groups": "Import von deCONZ-Gruppen zulassen"
|
||||||
}
|
},
|
||||||
|
"title": "Weitere Konfigurationsoptionen f\u00fcr deCONZ"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"title": "deCONZ Zigbee Gateway"
|
"title": "deCONZ Zigbee Gateway"
|
||||||
|
@ -6,6 +6,7 @@ https://home-assistant.io/components/deconz/
|
|||||||
"""
|
"""
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from homeassistant import config_entries
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
CONF_API_KEY, CONF_EVENT, CONF_HOST,
|
CONF_API_KEY, CONF_EVENT, CONF_HOST,
|
||||||
CONF_ID, CONF_PORT, EVENT_HOMEASSISTANT_STOP)
|
CONF_ID, CONF_PORT, EVENT_HOMEASSISTANT_STOP)
|
||||||
@ -22,7 +23,7 @@ from .const import (
|
|||||||
CONF_ALLOW_CLIP_SENSOR, CONFIG_FILE, DATA_DECONZ_EVENT,
|
CONF_ALLOW_CLIP_SENSOR, CONFIG_FILE, DATA_DECONZ_EVENT,
|
||||||
DATA_DECONZ_ID, DATA_DECONZ_UNSUB, DOMAIN, _LOGGER)
|
DATA_DECONZ_ID, DATA_DECONZ_UNSUB, DOMAIN, _LOGGER)
|
||||||
|
|
||||||
REQUIREMENTS = ['pydeconz==42']
|
REQUIREMENTS = ['pydeconz==43']
|
||||||
|
|
||||||
CONFIG_SCHEMA = vol.Schema({
|
CONFIG_SCHEMA = vol.Schema({
|
||||||
DOMAIN: vol.Schema({
|
DOMAIN: vol.Schema({
|
||||||
@ -60,7 +61,9 @@ async def async_setup(hass, config):
|
|||||||
deconz_config = config[DOMAIN]
|
deconz_config = config[DOMAIN]
|
||||||
if deconz_config and not configured_hosts(hass):
|
if deconz_config and not configured_hosts(hass):
|
||||||
hass.async_add_job(hass.config_entries.flow.async_init(
|
hass.async_add_job(hass.config_entries.flow.async_init(
|
||||||
DOMAIN, source='import', data=deconz_config
|
DOMAIN,
|
||||||
|
context={'source': config_entries.SOURCE_IMPORT},
|
||||||
|
data=deconz_config
|
||||||
))
|
))
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -96,7 +99,7 @@ async def async_setup_entry(hass, config_entry):
|
|||||||
hass.data[DATA_DECONZ_EVENT] = []
|
hass.data[DATA_DECONZ_EVENT] = []
|
||||||
hass.data[DATA_DECONZ_UNSUB] = []
|
hass.data[DATA_DECONZ_UNSUB] = []
|
||||||
|
|
||||||
for component in ['binary_sensor', 'light', 'scene', 'sensor']:
|
for component in ['binary_sensor', 'light', 'scene', 'sensor', 'switch']:
|
||||||
hass.async_create_task(hass.config_entries.async_forward_entry_setup(
|
hass.async_create_task(hass.config_entries.async_forward_entry_setup(
|
||||||
config_entry, component))
|
config_entry, component))
|
||||||
|
|
||||||
|
@ -33,6 +33,10 @@ class DeconzFlowHandler(data_entry_flow.FlowHandler):
|
|||||||
self.bridges = []
|
self.bridges = []
|
||||||
self.deconz_config = {}
|
self.deconz_config = {}
|
||||||
|
|
||||||
|
async def async_step_user(self, user_input=None):
|
||||||
|
"""Handle a flow initialized by the user."""
|
||||||
|
return await self.async_step_init(user_input)
|
||||||
|
|
||||||
async def async_step_init(self, user_input=None):
|
async def async_step_init(self, user_input=None):
|
||||||
"""Handle a deCONZ config flow start.
|
"""Handle a deCONZ config flow start.
|
||||||
|
|
||||||
|
@ -14,3 +14,7 @@ CONF_ALLOW_DECONZ_GROUPS = 'allow_deconz_groups'
|
|||||||
|
|
||||||
ATTR_DARK = 'dark'
|
ATTR_DARK = 'dark'
|
||||||
ATTR_ON = 'on'
|
ATTR_ON = 'on'
|
||||||
|
|
||||||
|
POWER_PLUGS = ["On/Off plug-in unit", "Smart plug"]
|
||||||
|
SIRENS = ["Warning device"]
|
||||||
|
SWITCH_TYPES = POWER_PLUGS + SIRENS
|
||||||
|
@ -5,24 +5,22 @@ For more details about this platform, please refer to the documentation at
|
|||||||
https://home-assistant.io/components/device_tracker.bt_home_hub_5/
|
https://home-assistant.io/components/device_tracker.bt_home_hub_5/
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
import re
|
|
||||||
import xml.etree.ElementTree as ET
|
|
||||||
import json
|
|
||||||
from urllib.parse import unquote
|
|
||||||
|
|
||||||
import requests
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
import homeassistant.helpers.config_validation as cv
|
import homeassistant.helpers.config_validation as cv
|
||||||
from homeassistant.components.device_tracker import (
|
from homeassistant.components.device_tracker import (DOMAIN, PLATFORM_SCHEMA,
|
||||||
DOMAIN, PLATFORM_SCHEMA, DeviceScanner)
|
DeviceScanner)
|
||||||
from homeassistant.const import CONF_HOST
|
from homeassistant.const import CONF_HOST
|
||||||
|
|
||||||
|
REQUIREMENTS = ['bthomehub5-devicelist==0.1.1']
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
_MAC_REGEX = re.compile(r'(([0-9A-Fa-f]{1,2}\:){5}[0-9A-Fa-f]{1,2})')
|
|
||||||
|
CONF_DEFAULT_IP = '192.168.1.254'
|
||||||
|
|
||||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||||
vol.Required(CONF_HOST): cv.string
|
vol.Optional(CONF_HOST, default=CONF_DEFAULT_IP): cv.string,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
@ -38,18 +36,19 @@ class BTHomeHub5DeviceScanner(DeviceScanner):
|
|||||||
|
|
||||||
def __init__(self, config):
|
def __init__(self, config):
|
||||||
"""Initialise the scanner."""
|
"""Initialise the scanner."""
|
||||||
|
import bthomehub5_devicelist
|
||||||
|
|
||||||
_LOGGER.info("Initialising BT Home Hub 5")
|
_LOGGER.info("Initialising BT Home Hub 5")
|
||||||
self.host = config.get(CONF_HOST, '192.168.1.254')
|
self.host = config[CONF_HOST]
|
||||||
self.last_results = {}
|
self.last_results = {}
|
||||||
self.url = 'http://{}/nonAuth/home_status.xml'.format(self.host)
|
|
||||||
|
|
||||||
# Test the router is accessible
|
# Test the router is accessible
|
||||||
data = _get_homehub_data(self.url)
|
data = bthomehub5_devicelist.get_devicelist(self.host)
|
||||||
self.success_init = data is not None
|
self.success_init = data is not None
|
||||||
|
|
||||||
def scan_devices(self):
|
def scan_devices(self):
|
||||||
"""Scan for new devices and return a list with found device IDs."""
|
"""Scan for new devices and return a list with found device IDs."""
|
||||||
self._update_info()
|
self.update_info()
|
||||||
|
|
||||||
return (device for device in self.last_results)
|
return (device for device in self.last_results)
|
||||||
|
|
||||||
@ -57,71 +56,23 @@ class BTHomeHub5DeviceScanner(DeviceScanner):
|
|||||||
"""Return the name of the given device or None if we don't know."""
|
"""Return the name of the given device or None if we don't know."""
|
||||||
# If not initialised and not already scanned and not found.
|
# If not initialised and not already scanned and not found.
|
||||||
if device not in self.last_results:
|
if device not in self.last_results:
|
||||||
self._update_info()
|
self.update_info()
|
||||||
|
|
||||||
if not self.last_results:
|
if not self.last_results:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return self.last_results.get(device)
|
return self.last_results.get(device)
|
||||||
|
|
||||||
def _update_info(self):
|
def update_info(self):
|
||||||
"""Ensure the information from the BT Home Hub 5 is up to date.
|
"""Ensure the information from the BT Home Hub 5 is up to date."""
|
||||||
|
import bthomehub5_devicelist
|
||||||
Return boolean if scanning successful.
|
|
||||||
"""
|
|
||||||
if not self.success_init:
|
|
||||||
return False
|
|
||||||
|
|
||||||
_LOGGER.info("Scanning")
|
_LOGGER.info("Scanning")
|
||||||
|
|
||||||
data = _get_homehub_data(self.url)
|
data = bthomehub5_devicelist.get_devicelist(self.host)
|
||||||
|
|
||||||
if not data:
|
if not data:
|
||||||
_LOGGER.warning("Error scanning devices")
|
_LOGGER.warning("Error scanning devices")
|
||||||
return False
|
return
|
||||||
|
|
||||||
self.last_results = data
|
self.last_results = data
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def _get_homehub_data(url):
|
|
||||||
"""Retrieve data from BT Home Hub 5 and return parsed result."""
|
|
||||||
try:
|
|
||||||
response = requests.get(url, timeout=5)
|
|
||||||
except requests.exceptions.Timeout:
|
|
||||||
_LOGGER.exception("Connection to the router timed out")
|
|
||||||
return
|
|
||||||
if response.status_code == 200:
|
|
||||||
return _parse_homehub_response(response.text)
|
|
||||||
_LOGGER.error("Invalid response from Home Hub: %s", response)
|
|
||||||
|
|
||||||
|
|
||||||
def _parse_homehub_response(data_str):
|
|
||||||
"""Parse the BT Home Hub 5 data format."""
|
|
||||||
root = ET.fromstring(data_str)
|
|
||||||
|
|
||||||
dirty_json = root.find('known_device_list').get('value')
|
|
||||||
|
|
||||||
# Normalise the JavaScript data to JSON.
|
|
||||||
clean_json = unquote(dirty_json.replace('\'', '\"')
|
|
||||||
.replace('{', '{\"')
|
|
||||||
.replace(':\"', '\":\"')
|
|
||||||
.replace('\",', '\",\"'))
|
|
||||||
|
|
||||||
known_devices = [x for x in json.loads(clean_json) if x]
|
|
||||||
|
|
||||||
devices = {}
|
|
||||||
|
|
||||||
for device in known_devices:
|
|
||||||
name = device.get('name')
|
|
||||||
mac = device.get('mac')
|
|
||||||
|
|
||||||
if _MAC_REGEX.match(mac) or ',' in mac:
|
|
||||||
for mac_addr in mac.split(','):
|
|
||||||
if _MAC_REGEX.match(mac_addr):
|
|
||||||
devices[mac_addr] = name
|
|
||||||
else:
|
|
||||||
devices[mac] = name
|
|
||||||
|
|
||||||
return devices
|
|
||||||
|
@ -17,7 +17,7 @@ from homeassistant.helpers.event import track_time_interval
|
|||||||
from homeassistant.helpers.typing import ConfigType
|
from homeassistant.helpers.typing import ConfigType
|
||||||
from homeassistant.util import slugify
|
from homeassistant.util import slugify
|
||||||
|
|
||||||
REQUIREMENTS = ['locationsharinglib==2.0.7']
|
REQUIREMENTS = ['locationsharinglib==2.0.11']
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -26,18 +26,21 @@ ATTR_FULL_NAME = 'full_name'
|
|||||||
ATTR_LAST_SEEN = 'last_seen'
|
ATTR_LAST_SEEN = 'last_seen'
|
||||||
ATTR_NICKNAME = 'nickname'
|
ATTR_NICKNAME = 'nickname'
|
||||||
|
|
||||||
|
CONF_MAX_GPS_ACCURACY = 'max_gps_accuracy'
|
||||||
|
|
||||||
CREDENTIALS_FILE = '.google_maps_location_sharing.cookies'
|
CREDENTIALS_FILE = '.google_maps_location_sharing.cookies'
|
||||||
|
|
||||||
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=30)
|
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=30)
|
||||||
|
|
||||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||||
vol.Required(CONF_USERNAME): cv.string,
|
|
||||||
vol.Required(CONF_PASSWORD): cv.string,
|
vol.Required(CONF_PASSWORD): cv.string,
|
||||||
|
vol.Required(CONF_USERNAME): cv.string,
|
||||||
|
vol.Optional(CONF_MAX_GPS_ACCURACY, default=100000): vol.Coerce(float),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
def setup_scanner(hass, config: ConfigType, see, discovery_info=None):
|
def setup_scanner(hass, config: ConfigType, see, discovery_info=None):
|
||||||
"""Set up the scanner."""
|
"""Set up the Google Maps Location sharing scanner."""
|
||||||
scanner = GoogleMapsScanner(hass, config, see)
|
scanner = GoogleMapsScanner(hass, config, see)
|
||||||
return scanner.success_init
|
return scanner.success_init
|
||||||
|
|
||||||
@ -53,6 +56,7 @@ class GoogleMapsScanner:
|
|||||||
self.see = see
|
self.see = see
|
||||||
self.username = config[CONF_USERNAME]
|
self.username = config[CONF_USERNAME]
|
||||||
self.password = config[CONF_PASSWORD]
|
self.password = config[CONF_PASSWORD]
|
||||||
|
self.max_gps_accuracy = config[CONF_MAX_GPS_ACCURACY]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.service = Service(self.username, self.password,
|
self.service = Service(self.username, self.password,
|
||||||
@ -76,6 +80,14 @@ class GoogleMapsScanner:
|
|||||||
_LOGGER.warning("No location(s) shared with this account")
|
_LOGGER.warning("No location(s) shared with this account")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if self.max_gps_accuracy is not None and \
|
||||||
|
person.accuracy > self.max_gps_accuracy:
|
||||||
|
_LOGGER.info("Ignoring %s update because expected GPS "
|
||||||
|
"accuracy %s is not met: %s",
|
||||||
|
person.nickname, self.max_gps_accuracy,
|
||||||
|
person.accuracy)
|
||||||
|
continue
|
||||||
|
|
||||||
attrs = {
|
attrs = {
|
||||||
ATTR_ADDRESS: person.address,
|
ATTR_ADDRESS: person.address,
|
||||||
ATTR_FULL_NAME: person.full_name,
|
ATTR_FULL_NAME: person.full_name,
|
||||||
|
@ -85,8 +85,7 @@ class HuaweiDeviceScanner(DeviceScanner):
|
|||||||
active_clients = [client for client in data if client.state]
|
active_clients = [client for client in data if client.state]
|
||||||
self.last_results = active_clients
|
self.last_results = active_clients
|
||||||
|
|
||||||
# pylint: disable=logging-not-lazy
|
_LOGGER.debug("Active clients: %s", "\n"
|
||||||
_LOGGER.debug("Active clients: " + "\n"
|
|
||||||
.join((client.mac + " " + client.name)
|
.join((client.mac + " " + client.name)
|
||||||
for client in active_clients))
|
for client in active_clients))
|
||||||
return True
|
return True
|
||||||
|
@ -5,18 +5,18 @@ For more details about this platform, please refer to the documentation at
|
|||||||
https://home-assistant.io/components/device_tracker.keenetic_ndms2/
|
https://home-assistant.io/components/device_tracker.keenetic_ndms2/
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
from collections import namedtuple
|
|
||||||
|
|
||||||
import requests
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
import homeassistant.helpers.config_validation as cv
|
import homeassistant.helpers.config_validation as cv
|
||||||
from homeassistant.components.device_tracker import (
|
from homeassistant.components.device_tracker import (
|
||||||
DOMAIN, PLATFORM_SCHEMA, DeviceScanner)
|
DOMAIN, PLATFORM_SCHEMA, DeviceScanner)
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
CONF_HOST, CONF_PORT, CONF_PASSWORD, CONF_USERNAME
|
||||||
)
|
)
|
||||||
|
|
||||||
|
REQUIREMENTS = ['ndms2_client==0.0.3']
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
# Interface name to track devices for. Most likely one will not need to
|
# Interface name to track devices for. Most likely one will not need to
|
||||||
@ -25,11 +25,13 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
CONF_INTERFACE = 'interface'
|
CONF_INTERFACE = 'interface'
|
||||||
|
|
||||||
DEFAULT_INTERFACE = 'Home'
|
DEFAULT_INTERFACE = 'Home'
|
||||||
|
DEFAULT_PORT = 23
|
||||||
|
|
||||||
|
|
||||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||||
vol.Required(CONF_HOST): cv.string,
|
vol.Required(CONF_HOST): cv.string,
|
||||||
vol.Required(CONF_USERNAME): cv.string,
|
vol.Required(CONF_USERNAME): cv.string,
|
||||||
|
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||||
vol.Required(CONF_PASSWORD): cv.string,
|
vol.Required(CONF_PASSWORD): cv.string,
|
||||||
vol.Required(CONF_INTERFACE, default=DEFAULT_INTERFACE): cv.string,
|
vol.Required(CONF_INTERFACE, default=DEFAULT_INTERFACE): cv.string,
|
||||||
})
|
})
|
||||||
@ -42,21 +44,22 @@ def get_scanner(_hass, config):
|
|||||||
return scanner if scanner.success_init else None
|
return scanner if scanner.success_init else None
|
||||||
|
|
||||||
|
|
||||||
Device = namedtuple('Device', ['mac', 'name'])
|
|
||||||
|
|
||||||
|
|
||||||
class KeeneticNDMS2DeviceScanner(DeviceScanner):
|
class KeeneticNDMS2DeviceScanner(DeviceScanner):
|
||||||
"""This class scans for devices using keenetic NDMS2 web interface."""
|
"""This class scans for devices using keenetic NDMS2 web interface."""
|
||||||
|
|
||||||
def __init__(self, config):
|
def __init__(self, config):
|
||||||
"""Initialize the scanner."""
|
"""Initialize the scanner."""
|
||||||
|
from ndms2_client import Client, TelnetConnection
|
||||||
self.last_results = []
|
self.last_results = []
|
||||||
|
|
||||||
self._url = 'http://%s/rci/show/ip/arp' % config[CONF_HOST]
|
|
||||||
self._interface = config[CONF_INTERFACE]
|
self._interface = config[CONF_INTERFACE]
|
||||||
|
|
||||||
self._username = config.get(CONF_USERNAME)
|
self._client = Client(TelnetConnection(
|
||||||
self._password = config.get(CONF_PASSWORD)
|
config.get(CONF_HOST),
|
||||||
|
config.get(CONF_PORT),
|
||||||
|
config.get(CONF_USERNAME),
|
||||||
|
config.get(CONF_PASSWORD),
|
||||||
|
))
|
||||||
|
|
||||||
self.success_init = self._update_info()
|
self.success_init = self._update_info()
|
||||||
_LOGGER.info("Scanner initialized")
|
_LOGGER.info("Scanner initialized")
|
||||||
@ -69,53 +72,32 @@ class KeeneticNDMS2DeviceScanner(DeviceScanner):
|
|||||||
|
|
||||||
def get_device_name(self, device):
|
def get_device_name(self, device):
|
||||||
"""Return the name of the given device or None if we don't know."""
|
"""Return the name of the given device or None if we don't know."""
|
||||||
filter_named = [result.name for result in self.last_results
|
name = next((
|
||||||
if result.mac == device]
|
result.name for result in self.last_results
|
||||||
|
if result.mac == device), None)
|
||||||
|
return name
|
||||||
|
|
||||||
if filter_named:
|
def get_extra_attributes(self, device):
|
||||||
return filter_named[0]
|
"""Return the IP of the given device."""
|
||||||
return None
|
attributes = next((
|
||||||
|
{'ip': result.ip} for result in self.last_results
|
||||||
|
if result.mac == device), {})
|
||||||
|
return attributes
|
||||||
|
|
||||||
def _update_info(self):
|
def _update_info(self):
|
||||||
"""Get ARP from keenetic router."""
|
"""Get ARP from keenetic router."""
|
||||||
_LOGGER.info("Fetching...")
|
_LOGGER.debug("Fetching devices from router...")
|
||||||
|
|
||||||
last_results = []
|
from ndms2_client import ConnectionException
|
||||||
|
|
||||||
# doing a request
|
|
||||||
try:
|
try:
|
||||||
from requests.auth import HTTPDigestAuth
|
self.last_results = [
|
||||||
res = requests.get(self._url, timeout=10, auth=HTTPDigestAuth(
|
dev
|
||||||
self._username, self._password
|
for dev in self._client.get_devices()
|
||||||
))
|
if dev.interface == self._interface
|
||||||
except requests.exceptions.Timeout:
|
]
|
||||||
_LOGGER.error(
|
_LOGGER.debug("Successfully fetched data from router")
|
||||||
"Connection to the router timed out at URL %s", self._url)
|
return True
|
||||||
|
|
||||||
|
except ConnectionException:
|
||||||
|
_LOGGER.error("Error fetching data from router")
|
||||||
return False
|
return False
|
||||||
if res.status_code != 200:
|
|
||||||
_LOGGER.error(
|
|
||||||
"Connection failed with http code %s", res.status_code)
|
|
||||||
return False
|
|
||||||
try:
|
|
||||||
result = res.json()
|
|
||||||
except ValueError:
|
|
||||||
# If json decoder could not parse the response
|
|
||||||
_LOGGER.error("Failed to parse response from router")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# parsing response
|
|
||||||
for info in result:
|
|
||||||
if info.get('interface') != self._interface:
|
|
||||||
continue
|
|
||||||
mac = info.get('mac')
|
|
||||||
name = info.get('name')
|
|
||||||
# No address = no item :)
|
|
||||||
if mac is None:
|
|
||||||
continue
|
|
||||||
|
|
||||||
last_results.append(Device(mac.upper(), name))
|
|
||||||
|
|
||||||
self.last_results = last_results
|
|
||||||
|
|
||||||
_LOGGER.info("Request successful")
|
|
||||||
return True
|
|
||||||
|
@ -15,7 +15,7 @@ from homeassistant.const import (
|
|||||||
CONF_HOST, CONF_PASSWORD, CONF_USERNAME, CONF_PORT, CONF_SSL,
|
CONF_HOST, CONF_PASSWORD, CONF_USERNAME, CONF_PORT, CONF_SSL,
|
||||||
CONF_DEVICES, CONF_EXCLUDE)
|
CONF_DEVICES, CONF_EXCLUDE)
|
||||||
|
|
||||||
REQUIREMENTS = ['pynetgear==0.4.0']
|
REQUIREMENTS = ['pynetgear==0.4.1']
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -38,7 +38,7 @@ class Host:
|
|||||||
self.dev_id = dev_id
|
self.dev_id = dev_id
|
||||||
self._count = config[CONF_PING_COUNT]
|
self._count = config[CONF_PING_COUNT]
|
||||||
if sys.platform == 'win32':
|
if sys.platform == 'win32':
|
||||||
self._ping_cmd = ['ping', '-n 1', '-w', '1000', self.ip_address]
|
self._ping_cmd = ['ping', '-n', '1', '-w', '1000', self.ip_address]
|
||||||
else:
|
else:
|
||||||
self._ping_cmd = ['ping', '-n', '-q', '-c1', '-W1',
|
self._ping_cmd = ['ping', '-n', '-q', '-c1', '-W1',
|
||||||
self.ip_address]
|
self.ip_address]
|
||||||
|
87
homeassistant/components/device_tracker/ritassist.py
Normal file
87
homeassistant/components/device_tracker/ritassist.py
Normal file
@ -0,0 +1,87 @@
|
|||||||
|
"""
|
||||||
|
Support for RitAssist Platform.
|
||||||
|
|
||||||
|
For more details about this platform, please refer to the documentation at
|
||||||
|
https://home-assistant.io/components/device_tracker.ritassist/
|
||||||
|
"""
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import requests
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
import homeassistant.helpers.config_validation as cv
|
||||||
|
from homeassistant.components.device_tracker import PLATFORM_SCHEMA
|
||||||
|
from homeassistant.const import CONF_USERNAME, CONF_PASSWORD
|
||||||
|
from homeassistant.helpers.event import track_utc_time_change
|
||||||
|
|
||||||
|
REQUIREMENTS = ['ritassist==0.5']
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
CONF_CLIENT_ID = 'client_id'
|
||||||
|
CONF_CLIENT_SECRET = 'client_secret'
|
||||||
|
CONF_INCLUDE = 'include'
|
||||||
|
|
||||||
|
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||||
|
vol.Required(CONF_USERNAME): cv.string,
|
||||||
|
vol.Required(CONF_PASSWORD): cv.string,
|
||||||
|
vol.Required(CONF_CLIENT_ID): cv.string,
|
||||||
|
vol.Required(CONF_CLIENT_SECRET): cv.string,
|
||||||
|
vol.Optional(CONF_INCLUDE, default=[]):
|
||||||
|
vol.All(cv.ensure_list, [cv.string])
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
def setup_scanner(hass, config: dict, see, discovery_info=None):
|
||||||
|
"""Set up the DeviceScanner and check if login is valid."""
|
||||||
|
scanner = RitAssistDeviceScanner(config, see)
|
||||||
|
if not scanner.login(hass):
|
||||||
|
_LOGGER.error('RitAssist authentication failed')
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class RitAssistDeviceScanner:
|
||||||
|
"""Define a scanner for the RitAssist platform."""
|
||||||
|
|
||||||
|
def __init__(self, config, see):
|
||||||
|
"""Initialize RitAssistDeviceScanner."""
|
||||||
|
from ritassist import API
|
||||||
|
|
||||||
|
self._include = config.get(CONF_INCLUDE)
|
||||||
|
self._see = see
|
||||||
|
|
||||||
|
self._api = API(config.get(CONF_CLIENT_ID),
|
||||||
|
config.get(CONF_CLIENT_SECRET),
|
||||||
|
config.get(CONF_USERNAME),
|
||||||
|
config.get(CONF_PASSWORD))
|
||||||
|
|
||||||
|
def setup(self, hass):
|
||||||
|
"""Setup a timer and start gathering devices."""
|
||||||
|
self._refresh()
|
||||||
|
track_utc_time_change(hass,
|
||||||
|
lambda now: self._refresh(),
|
||||||
|
second=range(0, 60, 30))
|
||||||
|
|
||||||
|
def login(self, hass):
|
||||||
|
"""Perform a login on the RitAssist API."""
|
||||||
|
if self._api.login():
|
||||||
|
self.setup(hass)
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _refresh(self) -> None:
|
||||||
|
"""Refresh device information from the platform."""
|
||||||
|
try:
|
||||||
|
devices = self._api.get_devices()
|
||||||
|
|
||||||
|
for device in devices:
|
||||||
|
if (not self._include or
|
||||||
|
device.license_plate in self._include):
|
||||||
|
self._see(dev_id=device.plate_as_id,
|
||||||
|
gps=(device.latitude, device.longitude),
|
||||||
|
attributes=device.state_attributes,
|
||||||
|
icon='mdi:car')
|
||||||
|
|
||||||
|
except requests.exceptions.ConnectionError:
|
||||||
|
_LOGGER.error('ConnectionError: Could not connect to RitAssist')
|
@ -14,7 +14,7 @@ from homeassistant.components.device_tracker import (
|
|||||||
DOMAIN, PLATFORM_SCHEMA, DeviceScanner)
|
DOMAIN, PLATFORM_SCHEMA, DeviceScanner)
|
||||||
from homeassistant.const import CONF_HOST
|
from homeassistant.const import CONF_HOST
|
||||||
|
|
||||||
REQUIREMENTS = ['pysnmp==4.4.4']
|
REQUIREMENTS = ['pysnmp==4.4.5']
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -13,7 +13,7 @@ import os
|
|||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant import data_entry_flow
|
from homeassistant import config_entries
|
||||||
from homeassistant.core import callback
|
from homeassistant.core import callback
|
||||||
from homeassistant.const import EVENT_HOMEASSISTANT_START
|
from homeassistant.const import EVENT_HOMEASSISTANT_START
|
||||||
import homeassistant.helpers.config_validation as cv
|
import homeassistant.helpers.config_validation as cv
|
||||||
@ -21,7 +21,7 @@ from homeassistant.helpers.event import async_track_point_in_utc_time
|
|||||||
from homeassistant.helpers.discovery import async_load_platform, async_discover
|
from homeassistant.helpers.discovery import async_load_platform, async_discover
|
||||||
import homeassistant.util.dt as dt_util
|
import homeassistant.util.dt as dt_util
|
||||||
|
|
||||||
REQUIREMENTS = ['netdisco==1.5.0']
|
REQUIREMENTS = ['netdisco==2.0.0']
|
||||||
|
|
||||||
DOMAIN = 'discovery'
|
DOMAIN = 'discovery'
|
||||||
|
|
||||||
@ -89,6 +89,7 @@ SERVICE_HANDLERS = {
|
|||||||
|
|
||||||
OPTIONAL_SERVICE_HANDLERS = {
|
OPTIONAL_SERVICE_HANDLERS = {
|
||||||
SERVICE_HOMEKIT: ('homekit_controller', None),
|
SERVICE_HOMEKIT: ('homekit_controller', None),
|
||||||
|
'dlna_dmr': ('media_player', 'dlna_dmr'),
|
||||||
}
|
}
|
||||||
|
|
||||||
CONF_IGNORE = 'ignore'
|
CONF_IGNORE = 'ignore'
|
||||||
@ -137,7 +138,7 @@ async def async_setup(hass, config):
|
|||||||
if service in CONFIG_ENTRY_HANDLERS:
|
if service in CONFIG_ENTRY_HANDLERS:
|
||||||
await hass.config_entries.flow.async_init(
|
await hass.config_entries.flow.async_init(
|
||||||
CONFIG_ENTRY_HANDLERS[service],
|
CONFIG_ENTRY_HANDLERS[service],
|
||||||
source=data_entry_flow.SOURCE_DISCOVERY,
|
context={'source': config_entries.SOURCE_DISCOVERY},
|
||||||
data=info
|
data=info
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
@ -18,6 +18,9 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
|
|
||||||
CONF_NIGHT_MODE = 'night_mode'
|
CONF_NIGHT_MODE = 'night_mode'
|
||||||
|
|
||||||
|
ATTR_IS_NIGHT_MODE = 'is_night_mode'
|
||||||
|
ATTR_IS_AUTO_MODE = 'is_auto_mode'
|
||||||
|
|
||||||
DEPENDENCIES = ['dyson']
|
DEPENDENCIES = ['dyson']
|
||||||
DYSON_FAN_DEVICES = 'dyson_fan_devices'
|
DYSON_FAN_DEVICES = 'dyson_fan_devices'
|
||||||
|
|
||||||
@ -158,7 +161,7 @@ class DysonPureCoolLinkDevice(FanEntity):
|
|||||||
def is_on(self):
|
def is_on(self):
|
||||||
"""Return true if the entity is on."""
|
"""Return true if the entity is on."""
|
||||||
if self._device.state:
|
if self._device.state:
|
||||||
return self._device.state.fan_state == "FAN"
|
return self._device.state.fan_mode == "FAN"
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -232,3 +235,11 @@ class DysonPureCoolLinkDevice(FanEntity):
|
|||||||
def supported_features(self) -> int:
|
def supported_features(self) -> int:
|
||||||
"""Flag supported features."""
|
"""Flag supported features."""
|
||||||
return SUPPORT_OSCILLATE | SUPPORT_SET_SPEED
|
return SUPPORT_OSCILLATE | SUPPORT_SET_SPEED
|
||||||
|
|
||||||
|
@property
|
||||||
|
def device_state_attributes(self) -> dict:
|
||||||
|
"""Return optional state attributes."""
|
||||||
|
return {
|
||||||
|
ATTR_IS_NIGHT_MODE: self.is_night_mode,
|
||||||
|
ATTR_IS_AUTO_MODE: self.is_auto_mode
|
||||||
|
}
|
||||||
|
@ -1,187 +0,0 @@
|
|||||||
"""
|
|
||||||
Support for Velbus platform.
|
|
||||||
|
|
||||||
For more details about this platform, please refer to the documentation at
|
|
||||||
https://home-assistant.io/components/fan.velbus/
|
|
||||||
"""
|
|
||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from homeassistant.components.fan import (
|
|
||||||
SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH, FanEntity, SUPPORT_SET_SPEED,
|
|
||||||
PLATFORM_SCHEMA)
|
|
||||||
from homeassistant.components.velbus import DOMAIN
|
|
||||||
from homeassistant.const import CONF_NAME, CONF_DEVICES, STATE_OFF
|
|
||||||
import homeassistant.helpers.config_validation as cv
|
|
||||||
|
|
||||||
DEPENDENCIES = ['velbus']
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
|
||||||
vol.Required(CONF_DEVICES): vol.All(cv.ensure_list, [
|
|
||||||
{
|
|
||||||
vol.Required('module'): cv.positive_int,
|
|
||||||
vol.Required('channel_low'): cv.positive_int,
|
|
||||||
vol.Required('channel_medium'): cv.positive_int,
|
|
||||||
vol.Required('channel_high'): cv.positive_int,
|
|
||||||
vol.Required(CONF_NAME): cv.string,
|
|
||||||
}
|
|
||||||
])
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
|
||||||
"""Set up Fans."""
|
|
||||||
velbus = hass.data[DOMAIN]
|
|
||||||
add_devices(VelbusFan(fan, velbus) for fan in config[CONF_DEVICES])
|
|
||||||
|
|
||||||
|
|
||||||
class VelbusFan(FanEntity):
|
|
||||||
"""Representation of a Velbus Fan."""
|
|
||||||
|
|
||||||
def __init__(self, fan, velbus):
|
|
||||||
"""Initialize a Velbus light."""
|
|
||||||
self._velbus = velbus
|
|
||||||
self._name = fan[CONF_NAME]
|
|
||||||
self._module = fan['module']
|
|
||||||
self._channel_low = fan['channel_low']
|
|
||||||
self._channel_medium = fan['channel_medium']
|
|
||||||
self._channel_high = fan['channel_high']
|
|
||||||
self._channels = [self._channel_low, self._channel_medium,
|
|
||||||
self._channel_high]
|
|
||||||
self._channels_state = [False, False, False]
|
|
||||||
self._speed = STATE_OFF
|
|
||||||
|
|
||||||
@asyncio.coroutine
|
|
||||||
def async_added_to_hass(self):
|
|
||||||
"""Add listener for Velbus messages on bus."""
|
|
||||||
def _init_velbus():
|
|
||||||
"""Initialize Velbus on startup."""
|
|
||||||
self._velbus.subscribe(self._on_message)
|
|
||||||
self.get_status()
|
|
||||||
|
|
||||||
yield from self.hass.async_add_job(_init_velbus)
|
|
||||||
|
|
||||||
def _on_message(self, message):
|
|
||||||
import velbus
|
|
||||||
if isinstance(message, velbus.RelayStatusMessage) and \
|
|
||||||
message.address == self._module and \
|
|
||||||
message.channel in self._channels:
|
|
||||||
if message.channel == self._channel_low:
|
|
||||||
self._channels_state[0] = message.is_on()
|
|
||||||
elif message.channel == self._channel_medium:
|
|
||||||
self._channels_state[1] = message.is_on()
|
|
||||||
elif message.channel == self._channel_high:
|
|
||||||
self._channels_state[2] = message.is_on()
|
|
||||||
self._calculate_speed()
|
|
||||||
self.schedule_update_ha_state()
|
|
||||||
|
|
||||||
def _calculate_speed(self):
|
|
||||||
if self._is_off():
|
|
||||||
self._speed = STATE_OFF
|
|
||||||
elif self._is_low():
|
|
||||||
self._speed = SPEED_LOW
|
|
||||||
elif self._is_medium():
|
|
||||||
self._speed = SPEED_MEDIUM
|
|
||||||
elif self._is_high():
|
|
||||||
self._speed = SPEED_HIGH
|
|
||||||
|
|
||||||
def _is_off(self):
|
|
||||||
return self._channels_state[0] is False and \
|
|
||||||
self._channels_state[1] is False and \
|
|
||||||
self._channels_state[2] is False
|
|
||||||
|
|
||||||
def _is_low(self):
|
|
||||||
return self._channels_state[0] is True and \
|
|
||||||
self._channels_state[1] is False and \
|
|
||||||
self._channels_state[2] is False
|
|
||||||
|
|
||||||
def _is_medium(self):
|
|
||||||
return self._channels_state[0] is True and \
|
|
||||||
self._channels_state[1] is True and \
|
|
||||||
self._channels_state[2] is False
|
|
||||||
|
|
||||||
def _is_high(self):
|
|
||||||
return self._channels_state[0] is True and \
|
|
||||||
self._channels_state[1] is False and \
|
|
||||||
self._channels_state[2] is True
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self):
|
|
||||||
"""Return the display name of this light."""
|
|
||||||
return self._name
|
|
||||||
|
|
||||||
@property
|
|
||||||
def should_poll(self):
|
|
||||||
"""Disable polling."""
|
|
||||||
return False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def speed(self):
|
|
||||||
"""Return the current speed."""
|
|
||||||
return self._speed
|
|
||||||
|
|
||||||
@property
|
|
||||||
def speed_list(self):
|
|
||||||
"""Get the list of available speeds."""
|
|
||||||
return [STATE_OFF, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH]
|
|
||||||
|
|
||||||
def turn_on(self, speed=None, **kwargs):
|
|
||||||
"""Turn on the entity."""
|
|
||||||
if speed is None:
|
|
||||||
speed = SPEED_MEDIUM
|
|
||||||
self.set_speed(speed)
|
|
||||||
|
|
||||||
def turn_off(self, **kwargs):
|
|
||||||
"""Turn off the entity."""
|
|
||||||
self.set_speed(STATE_OFF)
|
|
||||||
|
|
||||||
def set_speed(self, speed):
|
|
||||||
"""Set the speed of the fan."""
|
|
||||||
channels_off = []
|
|
||||||
channels_on = []
|
|
||||||
if speed == STATE_OFF:
|
|
||||||
channels_off = self._channels
|
|
||||||
elif speed == SPEED_LOW:
|
|
||||||
channels_off = [self._channel_medium, self._channel_high]
|
|
||||||
channels_on = [self._channel_low]
|
|
||||||
elif speed == SPEED_MEDIUM:
|
|
||||||
channels_off = [self._channel_high]
|
|
||||||
channels_on = [self._channel_low, self._channel_medium]
|
|
||||||
elif speed == SPEED_HIGH:
|
|
||||||
channels_off = [self._channel_medium]
|
|
||||||
channels_on = [self._channel_low, self._channel_high]
|
|
||||||
for channel in channels_off:
|
|
||||||
self._relay_off(channel)
|
|
||||||
for channel in channels_on:
|
|
||||||
self._relay_on(channel)
|
|
||||||
self.schedule_update_ha_state()
|
|
||||||
|
|
||||||
def _relay_on(self, channel):
|
|
||||||
import velbus
|
|
||||||
message = velbus.SwitchRelayOnMessage()
|
|
||||||
message.set_defaults(self._module)
|
|
||||||
message.relay_channels = [channel]
|
|
||||||
self._velbus.send(message)
|
|
||||||
|
|
||||||
def _relay_off(self, channel):
|
|
||||||
import velbus
|
|
||||||
message = velbus.SwitchRelayOffMessage()
|
|
||||||
message.set_defaults(self._module)
|
|
||||||
message.relay_channels = [channel]
|
|
||||||
self._velbus.send(message)
|
|
||||||
|
|
||||||
def get_status(self):
|
|
||||||
"""Retrieve current status."""
|
|
||||||
import velbus
|
|
||||||
message = velbus.ModuleStatusRequestMessage()
|
|
||||||
message.set_defaults(self._module)
|
|
||||||
message.channels = self._channels
|
|
||||||
self._velbus.send(message)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def supported_features(self):
|
|
||||||
"""Flag supported features."""
|
|
||||||
return SUPPORT_SET_SPEED
|
|
@ -26,7 +26,7 @@ from homeassistant.helpers.translation import async_get_translations
|
|||||||
from homeassistant.loader import bind_hass
|
from homeassistant.loader import bind_hass
|
||||||
from homeassistant.util.yaml import load_yaml
|
from homeassistant.util.yaml import load_yaml
|
||||||
|
|
||||||
REQUIREMENTS = ['home-assistant-frontend==20180804.0']
|
REQUIREMENTS = ['home-assistant-frontend==20180816.1']
|
||||||
|
|
||||||
DOMAIN = 'frontend'
|
DOMAIN = 'frontend'
|
||||||
DEPENDENCIES = ['api', 'websocket_api', 'http', 'system_log',
|
DEPENDENCIES = ['api', 'websocket_api', 'http', 'system_log',
|
||||||
@ -249,6 +249,7 @@ async def async_setup(hass, config):
|
|||||||
|
|
||||||
index_view = IndexView(repo_path, js_version, hass.auth.active)
|
index_view = IndexView(repo_path, js_version, hass.auth.active)
|
||||||
hass.http.register_view(index_view)
|
hass.http.register_view(index_view)
|
||||||
|
hass.http.register_view(AuthorizeView(repo_path, js_version))
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_finalize_panel(panel):
|
def async_finalize_panel(panel):
|
||||||
@ -334,6 +335,35 @@ def _async_setup_themes(hass, themes):
|
|||||||
hass.services.async_register(DOMAIN, SERVICE_RELOAD_THEMES, reload_themes)
|
hass.services.async_register(DOMAIN, SERVICE_RELOAD_THEMES, reload_themes)
|
||||||
|
|
||||||
|
|
||||||
|
class AuthorizeView(HomeAssistantView):
|
||||||
|
"""Serve the frontend."""
|
||||||
|
|
||||||
|
url = '/auth/authorize'
|
||||||
|
name = 'auth:authorize'
|
||||||
|
requires_auth = False
|
||||||
|
|
||||||
|
def __init__(self, repo_path, js_option):
|
||||||
|
"""Initialize the frontend view."""
|
||||||
|
self.repo_path = repo_path
|
||||||
|
self.js_option = js_option
|
||||||
|
|
||||||
|
async def get(self, request: web.Request):
|
||||||
|
"""Redirect to the authorize page."""
|
||||||
|
latest = self.repo_path is not None or \
|
||||||
|
_is_latest(self.js_option, request)
|
||||||
|
|
||||||
|
if latest:
|
||||||
|
location = '/frontend_latest/authorize.html'
|
||||||
|
else:
|
||||||
|
location = '/frontend_es5/authorize.html'
|
||||||
|
|
||||||
|
location += '?{}'.format(request.query_string)
|
||||||
|
|
||||||
|
return web.Response(status=302, headers={
|
||||||
|
'location': location
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
class IndexView(HomeAssistantView):
|
class IndexView(HomeAssistantView):
|
||||||
"""Serve the frontend."""
|
"""Serve the frontend."""
|
||||||
|
|
||||||
|
@ -178,7 +178,7 @@ def async_setup(hass, config):
|
|||||||
refresh_token = None
|
refresh_token = None
|
||||||
if 'hassio_user' in data:
|
if 'hassio_user' in data:
|
||||||
user = yield from hass.auth.async_get_user(data['hassio_user'])
|
user = yield from hass.auth.async_get_user(data['hassio_user'])
|
||||||
if user:
|
if user and user.refresh_tokens:
|
||||||
refresh_token = list(user.refresh_tokens.values())[0]
|
refresh_token = list(user.refresh_tokens.values())[0]
|
||||||
|
|
||||||
if refresh_token is None:
|
if refresh_token is None:
|
||||||
|
@ -17,9 +17,11 @@
|
|||||||
"hapid": "Accesspoint ID (SGTIN)",
|
"hapid": "Accesspoint ID (SGTIN)",
|
||||||
"name": "Name (optional, wird als Pr\u00e4fix f\u00fcr alle Ger\u00e4te verwendet)",
|
"name": "Name (optional, wird als Pr\u00e4fix f\u00fcr alle Ger\u00e4te verwendet)",
|
||||||
"pin": "PIN Code (optional)"
|
"pin": "PIN Code (optional)"
|
||||||
}
|
},
|
||||||
|
"title": "HometicIP Accesspoint ausw\u00e4hlen"
|
||||||
},
|
},
|
||||||
"link": {
|
"link": {
|
||||||
|
"description": "Dr\u00fccken Sie den blauen Taster auf dem Accesspoint, sowie den Senden Button um HomematicIP mit Home Assistant zu verbinden.\n\n",
|
||||||
"title": "Verkn\u00fcpfe den Accesspoint"
|
"title": "Verkn\u00fcpfe den Accesspoint"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -22,7 +22,7 @@
|
|||||||
},
|
},
|
||||||
"link": {
|
"link": {
|
||||||
"description": "Trykk p\u00e5 den bl\u00e5 knappen p\u00e5 tilgangspunktet og send knappen for \u00e5 registrere HomematicIP med Home Assistant. \n\n",
|
"description": "Trykk p\u00e5 den bl\u00e5 knappen p\u00e5 tilgangspunktet og send knappen for \u00e5 registrere HomematicIP med Home Assistant. \n\n",
|
||||||
"title": "Link Tilgangspunkt"
|
"title": "Link tilgangspunkt"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"title": "HomematicIP Sky"
|
"title": "HomematicIP Sky"
|
||||||
|
@ -21,7 +21,7 @@
|
|||||||
"title": "Escolher ponto de acesso HomematicIP"
|
"title": "Escolher ponto de acesso HomematicIP"
|
||||||
},
|
},
|
||||||
"link": {
|
"link": {
|
||||||
"description": "Pressione o bot\u00e3o azul no accesspoint e o bot\u00e3o enviar para registrar HomematicIP com Home Assistant.\n\n! [Localiza\u00e7\u00e3o do bot\u00e3o na ponte] (/ static/images/config_flows/config_homematicip_cloud.png)",
|
"description": "Pressione o bot\u00e3o azul no ponto de acesso e o bot\u00e3o enviar para registrar HomematicIP com o Home Assistant.\n\n",
|
||||||
"title": "Associar ponto de acesso"
|
"title": "Associar ponto de acesso"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -10,6 +10,7 @@ import logging
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
import homeassistant.helpers.config_validation as cv
|
import homeassistant.helpers.config_validation as cv
|
||||||
|
from homeassistant import config_entries
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
DOMAIN, HMIPC_HAPID, HMIPC_AUTHTOKEN, HMIPC_NAME,
|
DOMAIN, HMIPC_HAPID, HMIPC_AUTHTOKEN, HMIPC_NAME,
|
||||||
@ -41,7 +42,8 @@ async def async_setup(hass, config):
|
|||||||
for conf in accesspoints:
|
for conf in accesspoints:
|
||||||
if conf[CONF_ACCESSPOINT] not in configured_haps(hass):
|
if conf[CONF_ACCESSPOINT] not in configured_haps(hass):
|
||||||
hass.async_add_job(hass.config_entries.flow.async_init(
|
hass.async_add_job(hass.config_entries.flow.async_init(
|
||||||
DOMAIN, source='import', data={
|
DOMAIN, context={'source': config_entries.SOURCE_IMPORT},
|
||||||
|
data={
|
||||||
HMIPC_HAPID: conf[CONF_ACCESSPOINT],
|
HMIPC_HAPID: conf[CONF_ACCESSPOINT],
|
||||||
HMIPC_AUTHTOKEN: conf[CONF_AUTHTOKEN],
|
HMIPC_AUTHTOKEN: conf[CONF_AUTHTOKEN],
|
||||||
HMIPC_NAME: conf[CONF_NAME],
|
HMIPC_NAME: conf[CONF_NAME],
|
||||||
|
@ -27,6 +27,10 @@ class HomematicipCloudFlowHandler(data_entry_flow.FlowHandler):
|
|||||||
"""Initialize HomematicIP Cloud config flow."""
|
"""Initialize HomematicIP Cloud config flow."""
|
||||||
self.auth = None
|
self.auth = None
|
||||||
|
|
||||||
|
async def async_step_user(self, user_input=None):
|
||||||
|
"""Handle a flow initialized by the user."""
|
||||||
|
return await self.async_step_init(user_input)
|
||||||
|
|
||||||
async def async_step_init(self, user_input=None):
|
async def async_step_init(self, user_input=None):
|
||||||
"""Handle a flow start."""
|
"""Handle a flow start."""
|
||||||
errors = {}
|
errors = {}
|
||||||
|
@ -8,6 +8,7 @@ from ipaddress import ip_network
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import ssl
|
import ssl
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
from aiohttp.web_exceptions import HTTPMovedPermanently
|
from aiohttp.web_exceptions import HTTPMovedPermanently
|
||||||
@ -16,7 +17,6 @@ import voluptuous as vol
|
|||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, SERVER_PORT)
|
EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, SERVER_PORT)
|
||||||
import homeassistant.helpers.config_validation as cv
|
import homeassistant.helpers.config_validation as cv
|
||||||
import homeassistant.remote as rem
|
|
||||||
import homeassistant.util as hass_util
|
import homeassistant.util as hass_util
|
||||||
from homeassistant.util.logging import HideSensitiveDataFilter
|
from homeassistant.util.logging import HideSensitiveDataFilter
|
||||||
from homeassistant.util import ssl as ssl_util
|
from homeassistant.util import ssl as ssl_util
|
||||||
@ -49,6 +49,10 @@ CONF_TRUSTED_PROXIES = 'trusted_proxies'
|
|||||||
CONF_TRUSTED_NETWORKS = 'trusted_networks'
|
CONF_TRUSTED_NETWORKS = 'trusted_networks'
|
||||||
CONF_LOGIN_ATTEMPTS_THRESHOLD = 'login_attempts_threshold'
|
CONF_LOGIN_ATTEMPTS_THRESHOLD = 'login_attempts_threshold'
|
||||||
CONF_IP_BAN_ENABLED = 'ip_ban_enabled'
|
CONF_IP_BAN_ENABLED = 'ip_ban_enabled'
|
||||||
|
CONF_SSL_PROFILE = 'ssl_profile'
|
||||||
|
|
||||||
|
SSL_MODERN = 'modern'
|
||||||
|
SSL_INTERMEDIATE = 'intermediate'
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -66,15 +70,17 @@ HTTP_SCHEMA = vol.Schema({
|
|||||||
vol.Optional(CONF_SSL_KEY): cv.isfile,
|
vol.Optional(CONF_SSL_KEY): cv.isfile,
|
||||||
vol.Optional(CONF_CORS_ORIGINS, default=[]):
|
vol.Optional(CONF_CORS_ORIGINS, default=[]):
|
||||||
vol.All(cv.ensure_list, [cv.string]),
|
vol.All(cv.ensure_list, [cv.string]),
|
||||||
vol.Optional(CONF_USE_X_FORWARDED_FOR, default=False): cv.boolean,
|
vol.Inclusive(CONF_USE_X_FORWARDED_FOR, 'proxy'): cv.boolean,
|
||||||
vol.Optional(CONF_TRUSTED_PROXIES, default=[]):
|
vol.Inclusive(CONF_TRUSTED_PROXIES, 'proxy'):
|
||||||
vol.All(cv.ensure_list, [ip_network]),
|
vol.All(cv.ensure_list, [ip_network]),
|
||||||
vol.Optional(CONF_TRUSTED_NETWORKS, default=[]):
|
vol.Optional(CONF_TRUSTED_NETWORKS, default=[]):
|
||||||
vol.All(cv.ensure_list, [ip_network]),
|
vol.All(cv.ensure_list, [ip_network]),
|
||||||
vol.Optional(CONF_LOGIN_ATTEMPTS_THRESHOLD,
|
vol.Optional(CONF_LOGIN_ATTEMPTS_THRESHOLD,
|
||||||
default=NO_LOGIN_ATTEMPT_THRESHOLD):
|
default=NO_LOGIN_ATTEMPT_THRESHOLD):
|
||||||
vol.Any(cv.positive_int, NO_LOGIN_ATTEMPT_THRESHOLD),
|
vol.Any(cv.positive_int, NO_LOGIN_ATTEMPT_THRESHOLD),
|
||||||
vol.Optional(CONF_IP_BAN_ENABLED, default=True): cv.boolean
|
vol.Optional(CONF_IP_BAN_ENABLED, default=True): cv.boolean,
|
||||||
|
vol.Optional(CONF_SSL_PROFILE, default=SSL_MODERN):
|
||||||
|
vol.In([SSL_INTERMEDIATE, SSL_MODERN]),
|
||||||
})
|
})
|
||||||
|
|
||||||
CONFIG_SCHEMA = vol.Schema({
|
CONFIG_SCHEMA = vol.Schema({
|
||||||
@ -82,6 +88,28 @@ CONFIG_SCHEMA = vol.Schema({
|
|||||||
}, extra=vol.ALLOW_EXTRA)
|
}, extra=vol.ALLOW_EXTRA)
|
||||||
|
|
||||||
|
|
||||||
|
class ApiConfig:
|
||||||
|
"""Configuration settings for API server."""
|
||||||
|
|
||||||
|
def __init__(self, host: str, port: Optional[int] = SERVER_PORT,
|
||||||
|
use_ssl: bool = False,
|
||||||
|
api_password: Optional[str] = None) -> None:
|
||||||
|
"""Initialize a new API config object."""
|
||||||
|
self.host = host
|
||||||
|
self.port = port
|
||||||
|
self.api_password = api_password
|
||||||
|
|
||||||
|
if host.startswith(("http://", "https://")):
|
||||||
|
self.base_url = host
|
||||||
|
elif use_ssl:
|
||||||
|
self.base_url = "https://{}".format(host)
|
||||||
|
else:
|
||||||
|
self.base_url = "http://{}".format(host)
|
||||||
|
|
||||||
|
if port is not None:
|
||||||
|
self.base_url += ':{}'.format(port)
|
||||||
|
|
||||||
|
|
||||||
async def async_setup(hass, config):
|
async def async_setup(hass, config):
|
||||||
"""Set up the HTTP API and debug interface."""
|
"""Set up the HTTP API and debug interface."""
|
||||||
conf = config.get(DOMAIN)
|
conf = config.get(DOMAIN)
|
||||||
@ -96,11 +124,12 @@ async def async_setup(hass, config):
|
|||||||
ssl_peer_certificate = conf.get(CONF_SSL_PEER_CERTIFICATE)
|
ssl_peer_certificate = conf.get(CONF_SSL_PEER_CERTIFICATE)
|
||||||
ssl_key = conf.get(CONF_SSL_KEY)
|
ssl_key = conf.get(CONF_SSL_KEY)
|
||||||
cors_origins = conf[CONF_CORS_ORIGINS]
|
cors_origins = conf[CONF_CORS_ORIGINS]
|
||||||
use_x_forwarded_for = conf[CONF_USE_X_FORWARDED_FOR]
|
use_x_forwarded_for = conf.get(CONF_USE_X_FORWARDED_FOR, False)
|
||||||
trusted_proxies = conf[CONF_TRUSTED_PROXIES]
|
trusted_proxies = conf.get(CONF_TRUSTED_PROXIES, [])
|
||||||
trusted_networks = conf[CONF_TRUSTED_NETWORKS]
|
trusted_networks = conf[CONF_TRUSTED_NETWORKS]
|
||||||
is_ban_enabled = conf[CONF_IP_BAN_ENABLED]
|
is_ban_enabled = conf[CONF_IP_BAN_ENABLED]
|
||||||
login_threshold = conf[CONF_LOGIN_ATTEMPTS_THRESHOLD]
|
login_threshold = conf[CONF_LOGIN_ATTEMPTS_THRESHOLD]
|
||||||
|
ssl_profile = conf[CONF_SSL_PROFILE]
|
||||||
|
|
||||||
if api_password is not None:
|
if api_password is not None:
|
||||||
logging.getLogger('aiohttp.access').addFilter(
|
logging.getLogger('aiohttp.access').addFilter(
|
||||||
@ -119,7 +148,8 @@ async def async_setup(hass, config):
|
|||||||
trusted_proxies=trusted_proxies,
|
trusted_proxies=trusted_proxies,
|
||||||
trusted_networks=trusted_networks,
|
trusted_networks=trusted_networks,
|
||||||
login_threshold=login_threshold,
|
login_threshold=login_threshold,
|
||||||
is_ban_enabled=is_ban_enabled
|
is_ban_enabled=is_ban_enabled,
|
||||||
|
ssl_profile=ssl_profile,
|
||||||
)
|
)
|
||||||
|
|
||||||
async def stop_server(event):
|
async def stop_server(event):
|
||||||
@ -146,8 +176,8 @@ async def async_setup(hass, config):
|
|||||||
host = hass_util.get_local_ip()
|
host = hass_util.get_local_ip()
|
||||||
port = server_port
|
port = server_port
|
||||||
|
|
||||||
hass.config.api = rem.API(host, api_password, port,
|
hass.config.api = ApiConfig(host, port, ssl_certificate is not None,
|
||||||
ssl_certificate is not None)
|
api_password)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -159,7 +189,7 @@ class HomeAssistantHTTP:
|
|||||||
ssl_certificate, ssl_peer_certificate,
|
ssl_certificate, ssl_peer_certificate,
|
||||||
ssl_key, server_host, server_port, cors_origins,
|
ssl_key, server_host, server_port, cors_origins,
|
||||||
use_x_forwarded_for, trusted_proxies, trusted_networks,
|
use_x_forwarded_for, trusted_proxies, trusted_networks,
|
||||||
login_threshold, is_ban_enabled):
|
login_threshold, is_ban_enabled, ssl_profile):
|
||||||
"""Initialize the HTTP Home Assistant server."""
|
"""Initialize the HTTP Home Assistant server."""
|
||||||
app = self.app = web.Application(
|
app = self.app = web.Application(
|
||||||
middlewares=[staticresource_middleware])
|
middlewares=[staticresource_middleware])
|
||||||
@ -199,6 +229,7 @@ class HomeAssistantHTTP:
|
|||||||
self.server_host = server_host
|
self.server_host = server_host
|
||||||
self.server_port = server_port
|
self.server_port = server_port
|
||||||
self.is_ban_enabled = is_ban_enabled
|
self.is_ban_enabled = is_ban_enabled
|
||||||
|
self.ssl_profile = ssl_profile
|
||||||
self._handler = None
|
self._handler = None
|
||||||
self.server = None
|
self.server = None
|
||||||
|
|
||||||
@ -285,7 +316,10 @@ class HomeAssistantHTTP:
|
|||||||
|
|
||||||
if self.ssl_certificate:
|
if self.ssl_certificate:
|
||||||
try:
|
try:
|
||||||
context = ssl_util.server_context()
|
if self.ssl_profile == SSL_INTERMEDIATE:
|
||||||
|
context = ssl_util.server_context_intermediate()
|
||||||
|
else:
|
||||||
|
context = ssl_util.server_context_modern()
|
||||||
context.load_cert_chain(self.ssl_certificate, self.ssl_key)
|
context.load_cert_chain(self.ssl_certificate, self.ssl_key)
|
||||||
except OSError as error:
|
except OSError as error:
|
||||||
_LOGGER.error("Could not read SSL certificate from %s: %s",
|
_LOGGER.error("Could not read SSL certificate from %s: %s",
|
||||||
|
@ -106,11 +106,11 @@ async def async_validate_auth_header(request, api_password=None):
|
|||||||
|
|
||||||
if auth_type == 'Bearer':
|
if auth_type == 'Bearer':
|
||||||
hass = request.app['hass']
|
hass = request.app['hass']
|
||||||
access_token = hass.auth.async_get_access_token(auth_val)
|
refresh_token = await hass.auth.async_validate_access_token(auth_val)
|
||||||
if access_token is None:
|
if refresh_token is None:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
request['hass_user'] = access_token.refresh_token.user
|
request['hass_user'] = refresh_token.user
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if auth_type == 'Basic' and api_password is not None:
|
if auth_type == 'Basic' and api_password is not None:
|
||||||
|
@ -24,6 +24,6 @@
|
|||||||
"title": "Hub verbinden"
|
"title": "Hub verbinden"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"title": ""
|
"title": "Philips Hue"
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -1,5 +1,9 @@
|
|||||||
{
|
{
|
||||||
"config": {
|
"config": {
|
||||||
|
"abort": {
|
||||||
|
"all_configured": "Toate pun\u021bile Philips Hue sunt deja configurate",
|
||||||
|
"discover_timeout": "Imposibil de descoperit podurile Hue"
|
||||||
|
},
|
||||||
"error": {
|
"error": {
|
||||||
"linking": "A ap\u0103rut o eroare de leg\u0103tur\u0103 necunoscut\u0103.",
|
"linking": "A ap\u0103rut o eroare de leg\u0103tur\u0103 necunoscut\u0103.",
|
||||||
"register_failed": "Nu a reu\u0219it \u00eenregistrarea, \u00eencerca\u021bi din nou"
|
"register_failed": "Nu a reu\u0219it \u00eenregistrarea, \u00eencerca\u021bi din nou"
|
||||||
|
@ -9,7 +9,7 @@ import logging
|
|||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant import data_entry_flow
|
from homeassistant import config_entries
|
||||||
from homeassistant.const import CONF_FILENAME, CONF_HOST
|
from homeassistant.const import CONF_FILENAME, CONF_HOST
|
||||||
from homeassistant.helpers import aiohttp_client, config_validation as cv
|
from homeassistant.helpers import aiohttp_client, config_validation as cv
|
||||||
|
|
||||||
@ -108,7 +108,8 @@ async def async_setup(hass, config):
|
|||||||
# deadlock: creating a config entry will set up the component but the
|
# deadlock: creating a config entry will set up the component but the
|
||||||
# setup would block till the entry is created!
|
# setup would block till the entry is created!
|
||||||
hass.async_add_job(hass.config_entries.flow.async_init(
|
hass.async_add_job(hass.config_entries.flow.async_init(
|
||||||
DOMAIN, source=data_entry_flow.SOURCE_IMPORT, data={
|
DOMAIN, context={'source': config_entries.SOURCE_IMPORT},
|
||||||
|
data={
|
||||||
'host': bridge_conf[CONF_HOST],
|
'host': bridge_conf[CONF_HOST],
|
||||||
'path': bridge_conf[CONF_FILENAME],
|
'path': bridge_conf[CONF_FILENAME],
|
||||||
}
|
}
|
||||||
|
@ -51,7 +51,8 @@ class HueBridge:
|
|||||||
# linking procedure. When linking succeeds, it will remove the
|
# linking procedure. When linking succeeds, it will remove the
|
||||||
# old config entry.
|
# old config entry.
|
||||||
hass.async_add_job(hass.config_entries.flow.async_init(
|
hass.async_add_job(hass.config_entries.flow.async_init(
|
||||||
DOMAIN, source='import', data={
|
DOMAIN, context={'source': config_entries.SOURCE_IMPORT},
|
||||||
|
data={
|
||||||
'host': host,
|
'host': host,
|
||||||
}
|
}
|
||||||
))
|
))
|
||||||
|
@ -50,6 +50,10 @@ class HueFlowHandler(data_entry_flow.FlowHandler):
|
|||||||
"""Initialize the Hue flow."""
|
"""Initialize the Hue flow."""
|
||||||
self.host = None
|
self.host = None
|
||||||
|
|
||||||
|
async def async_step_user(self, user_input=None):
|
||||||
|
"""Handle a flow initialized by the user."""
|
||||||
|
return await self.async_step_init(user_input)
|
||||||
|
|
||||||
async def async_step_init(self, user_input=None):
|
async def async_step_init(self, user_input=None):
|
||||||
"""Handle a flow start."""
|
"""Handle a flow start."""
|
||||||
from aiohue.discovery import discover_nupnp
|
from aiohue.discovery import discover_nupnp
|
||||||
|
@ -17,25 +17,29 @@ import homeassistant.helpers.config_validation as cv
|
|||||||
from homeassistant.components.image_processing import (
|
from homeassistant.components.image_processing import (
|
||||||
PLATFORM_SCHEMA, ImageProcessingFaceEntity, ATTR_CONFIDENCE, CONF_SOURCE,
|
PLATFORM_SCHEMA, ImageProcessingFaceEntity, ATTR_CONFIDENCE, CONF_SOURCE,
|
||||||
CONF_ENTITY_ID, CONF_NAME, DOMAIN)
|
CONF_ENTITY_ID, CONF_NAME, DOMAIN)
|
||||||
from homeassistant.const import (CONF_IP_ADDRESS, CONF_PORT)
|
from homeassistant.const import (
|
||||||
|
CONF_IP_ADDRESS, CONF_PORT, CONF_PASSWORD, CONF_USERNAME,
|
||||||
|
HTTP_BAD_REQUEST, HTTP_OK, HTTP_UNAUTHORIZED)
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
ATTR_BOUNDING_BOX = 'bounding_box'
|
ATTR_BOUNDING_BOX = 'bounding_box'
|
||||||
ATTR_CLASSIFIER = 'classifier'
|
ATTR_CLASSIFIER = 'classifier'
|
||||||
ATTR_IMAGE_ID = 'image_id'
|
ATTR_IMAGE_ID = 'image_id'
|
||||||
|
ATTR_ID = 'id'
|
||||||
ATTR_MATCHED = 'matched'
|
ATTR_MATCHED = 'matched'
|
||||||
|
FACEBOX_NAME = 'name'
|
||||||
CLASSIFIER = 'facebox'
|
CLASSIFIER = 'facebox'
|
||||||
DATA_FACEBOX = 'facebox_classifiers'
|
DATA_FACEBOX = 'facebox_classifiers'
|
||||||
EVENT_CLASSIFIER_TEACH = 'image_processing.teach_classifier'
|
|
||||||
FILE_PATH = 'file_path'
|
FILE_PATH = 'file_path'
|
||||||
SERVICE_TEACH_FACE = 'facebox_teach_face'
|
SERVICE_TEACH_FACE = 'facebox_teach_face'
|
||||||
TIMEOUT = 9
|
|
||||||
|
|
||||||
|
|
||||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||||
vol.Required(CONF_IP_ADDRESS): cv.string,
|
vol.Required(CONF_IP_ADDRESS): cv.string,
|
||||||
vol.Required(CONF_PORT): cv.port,
|
vol.Required(CONF_PORT): cv.port,
|
||||||
|
vol.Optional(CONF_USERNAME): cv.string,
|
||||||
|
vol.Optional(CONF_PASSWORD): cv.string,
|
||||||
})
|
})
|
||||||
|
|
||||||
SERVICE_TEACH_SCHEMA = vol.Schema({
|
SERVICE_TEACH_SCHEMA = vol.Schema({
|
||||||
@ -45,6 +49,26 @@ SERVICE_TEACH_SCHEMA = vol.Schema({
|
|||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
|
def check_box_health(url, username, password):
|
||||||
|
"""Check the health of the classifier and return its id if healthy."""
|
||||||
|
kwargs = {}
|
||||||
|
if username:
|
||||||
|
kwargs['auth'] = requests.auth.HTTPBasicAuth(username, password)
|
||||||
|
try:
|
||||||
|
response = requests.get(
|
||||||
|
url,
|
||||||
|
**kwargs
|
||||||
|
)
|
||||||
|
if response.status_code == HTTP_UNAUTHORIZED:
|
||||||
|
_LOGGER.error("AuthenticationError on %s", CLASSIFIER)
|
||||||
|
return None
|
||||||
|
if response.status_code == HTTP_OK:
|
||||||
|
return response.json()['hostname']
|
||||||
|
except requests.exceptions.ConnectionError:
|
||||||
|
_LOGGER.error("ConnectionError: Is %s running?", CLASSIFIER)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def encode_image(image):
|
def encode_image(image):
|
||||||
"""base64 encode an image stream."""
|
"""base64 encode an image stream."""
|
||||||
base64_img = base64.b64encode(image).decode('ascii')
|
base64_img = base64.b64encode(image).decode('ascii')
|
||||||
@ -63,10 +87,10 @@ def parse_faces(api_faces):
|
|||||||
for entry in api_faces:
|
for entry in api_faces:
|
||||||
face = {}
|
face = {}
|
||||||
if entry['matched']: # This data is only in matched faces.
|
if entry['matched']: # This data is only in matched faces.
|
||||||
face[ATTR_NAME] = entry['name']
|
face[FACEBOX_NAME] = entry['name']
|
||||||
face[ATTR_IMAGE_ID] = entry['id']
|
face[ATTR_IMAGE_ID] = entry['id']
|
||||||
else: # Lets be explicit.
|
else: # Lets be explicit.
|
||||||
face[ATTR_NAME] = None
|
face[FACEBOX_NAME] = None
|
||||||
face[ATTR_IMAGE_ID] = None
|
face[ATTR_IMAGE_ID] = None
|
||||||
face[ATTR_CONFIDENCE] = round(100.0*entry['confidence'], 2)
|
face[ATTR_CONFIDENCE] = round(100.0*entry['confidence'], 2)
|
||||||
face[ATTR_MATCHED] = entry['matched']
|
face[ATTR_MATCHED] = entry['matched']
|
||||||
@ -75,17 +99,46 @@ def parse_faces(api_faces):
|
|||||||
return known_faces
|
return known_faces
|
||||||
|
|
||||||
|
|
||||||
def post_image(url, image):
|
def post_image(url, image, username, password):
|
||||||
"""Post an image to the classifier."""
|
"""Post an image to the classifier."""
|
||||||
|
kwargs = {}
|
||||||
|
if username:
|
||||||
|
kwargs['auth'] = requests.auth.HTTPBasicAuth(username, password)
|
||||||
try:
|
try:
|
||||||
response = requests.post(
|
response = requests.post(
|
||||||
url,
|
url,
|
||||||
json={"base64": encode_image(image)},
|
json={"base64": encode_image(image)},
|
||||||
timeout=TIMEOUT
|
**kwargs
|
||||||
)
|
)
|
||||||
|
if response.status_code == HTTP_UNAUTHORIZED:
|
||||||
|
_LOGGER.error("AuthenticationError on %s", CLASSIFIER)
|
||||||
|
return None
|
||||||
return response
|
return response
|
||||||
except requests.exceptions.ConnectionError:
|
except requests.exceptions.ConnectionError:
|
||||||
_LOGGER.error("ConnectionError: Is %s running?", CLASSIFIER)
|
_LOGGER.error("ConnectionError: Is %s running?", CLASSIFIER)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def teach_file(url, name, file_path, username, password):
|
||||||
|
"""Teach the classifier a name associated with a file."""
|
||||||
|
kwargs = {}
|
||||||
|
if username:
|
||||||
|
kwargs['auth'] = requests.auth.HTTPBasicAuth(username, password)
|
||||||
|
try:
|
||||||
|
with open(file_path, 'rb') as open_file:
|
||||||
|
response = requests.post(
|
||||||
|
url,
|
||||||
|
data={FACEBOX_NAME: name, ATTR_ID: file_path},
|
||||||
|
files={'file': open_file},
|
||||||
|
**kwargs
|
||||||
|
)
|
||||||
|
if response.status_code == HTTP_UNAUTHORIZED:
|
||||||
|
_LOGGER.error("AuthenticationError on %s", CLASSIFIER)
|
||||||
|
elif response.status_code == HTTP_BAD_REQUEST:
|
||||||
|
_LOGGER.error("%s teaching of file %s failed with message:%s",
|
||||||
|
CLASSIFIER, file_path, response.text)
|
||||||
|
except requests.exceptions.ConnectionError:
|
||||||
|
_LOGGER.error("ConnectionError: Is %s running?", CLASSIFIER)
|
||||||
|
|
||||||
|
|
||||||
def valid_file_path(file_path):
|
def valid_file_path(file_path):
|
||||||
@ -104,13 +157,20 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||||||
if DATA_FACEBOX not in hass.data:
|
if DATA_FACEBOX not in hass.data:
|
||||||
hass.data[DATA_FACEBOX] = []
|
hass.data[DATA_FACEBOX] = []
|
||||||
|
|
||||||
|
ip_address = config[CONF_IP_ADDRESS]
|
||||||
|
port = config[CONF_PORT]
|
||||||
|
username = config.get(CONF_USERNAME)
|
||||||
|
password = config.get(CONF_PASSWORD)
|
||||||
|
url_health = "http://{}:{}/healthz".format(ip_address, port)
|
||||||
|
hostname = check_box_health(url_health, username, password)
|
||||||
|
if hostname is None:
|
||||||
|
return
|
||||||
|
|
||||||
entities = []
|
entities = []
|
||||||
for camera in config[CONF_SOURCE]:
|
for camera in config[CONF_SOURCE]:
|
||||||
facebox = FaceClassifyEntity(
|
facebox = FaceClassifyEntity(
|
||||||
config[CONF_IP_ADDRESS],
|
ip_address, port, username, password, hostname,
|
||||||
config[CONF_PORT],
|
camera[CONF_ENTITY_ID], camera.get(CONF_NAME))
|
||||||
camera[CONF_ENTITY_ID],
|
|
||||||
camera.get(CONF_NAME))
|
|
||||||
entities.append(facebox)
|
entities.append(facebox)
|
||||||
hass.data[DATA_FACEBOX].append(facebox)
|
hass.data[DATA_FACEBOX].append(facebox)
|
||||||
add_devices(entities)
|
add_devices(entities)
|
||||||
@ -129,33 +189,37 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||||||
classifier.teach(name, file_path)
|
classifier.teach(name, file_path)
|
||||||
|
|
||||||
hass.services.register(
|
hass.services.register(
|
||||||
DOMAIN,
|
DOMAIN, SERVICE_TEACH_FACE, service_handle,
|
||||||
SERVICE_TEACH_FACE,
|
|
||||||
service_handle,
|
|
||||||
schema=SERVICE_TEACH_SCHEMA)
|
schema=SERVICE_TEACH_SCHEMA)
|
||||||
|
|
||||||
|
|
||||||
class FaceClassifyEntity(ImageProcessingFaceEntity):
|
class FaceClassifyEntity(ImageProcessingFaceEntity):
|
||||||
"""Perform a face classification."""
|
"""Perform a face classification."""
|
||||||
|
|
||||||
def __init__(self, ip, port, camera_entity, name=None):
|
def __init__(self, ip_address, port, username, password, hostname,
|
||||||
|
camera_entity, name=None):
|
||||||
"""Init with the API key and model id."""
|
"""Init with the API key and model id."""
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self._url_check = "http://{}:{}/{}/check".format(ip, port, CLASSIFIER)
|
self._url_check = "http://{}:{}/{}/check".format(
|
||||||
self._url_teach = "http://{}:{}/{}/teach".format(ip, port, CLASSIFIER)
|
ip_address, port, CLASSIFIER)
|
||||||
|
self._url_teach = "http://{}:{}/{}/teach".format(
|
||||||
|
ip_address, port, CLASSIFIER)
|
||||||
|
self._username = username
|
||||||
|
self._password = password
|
||||||
|
self._hostname = hostname
|
||||||
self._camera = camera_entity
|
self._camera = camera_entity
|
||||||
if name:
|
if name:
|
||||||
self._name = name
|
self._name = name
|
||||||
else:
|
else:
|
||||||
camera_name = split_entity_id(camera_entity)[1]
|
camera_name = split_entity_id(camera_entity)[1]
|
||||||
self._name = "{} {}".format(
|
self._name = "{} {}".format(CLASSIFIER, camera_name)
|
||||||
CLASSIFIER, camera_name)
|
|
||||||
self._matched = {}
|
self._matched = {}
|
||||||
|
|
||||||
def process_image(self, image):
|
def process_image(self, image):
|
||||||
"""Process an image."""
|
"""Process an image."""
|
||||||
response = post_image(self._url_check, image)
|
response = post_image(
|
||||||
if response is not None:
|
self._url_check, image, self._username, self._password)
|
||||||
|
if response:
|
||||||
response_json = response.json()
|
response_json = response.json()
|
||||||
if response_json['success']:
|
if response_json['success']:
|
||||||
total_faces = response_json['facesCount']
|
total_faces = response_json['facesCount']
|
||||||
@ -173,34 +237,8 @@ class FaceClassifyEntity(ImageProcessingFaceEntity):
|
|||||||
if (not self.hass.config.is_allowed_path(file_path)
|
if (not self.hass.config.is_allowed_path(file_path)
|
||||||
or not valid_file_path(file_path)):
|
or not valid_file_path(file_path)):
|
||||||
return
|
return
|
||||||
with open(file_path, 'rb') as open_file:
|
teach_file(
|
||||||
response = requests.post(
|
self._url_teach, name, file_path, self._username, self._password)
|
||||||
self._url_teach,
|
|
||||||
data={ATTR_NAME: name, 'id': file_path},
|
|
||||||
files={'file': open_file})
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
self.hass.bus.fire(
|
|
||||||
EVENT_CLASSIFIER_TEACH, {
|
|
||||||
ATTR_CLASSIFIER: CLASSIFIER,
|
|
||||||
ATTR_NAME: name,
|
|
||||||
FILE_PATH: file_path,
|
|
||||||
'success': True,
|
|
||||||
'message': None
|
|
||||||
})
|
|
||||||
|
|
||||||
elif response.status_code == 400:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"%s teaching of file %s failed with message:%s",
|
|
||||||
CLASSIFIER, file_path, response.text)
|
|
||||||
self.hass.bus.fire(
|
|
||||||
EVENT_CLASSIFIER_TEACH, {
|
|
||||||
ATTR_CLASSIFIER: CLASSIFIER,
|
|
||||||
ATTR_NAME: name,
|
|
||||||
FILE_PATH: file_path,
|
|
||||||
'success': False,
|
|
||||||
'message': response.text
|
|
||||||
})
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def camera_entity(self):
|
def camera_entity(self):
|
||||||
@ -218,4 +256,5 @@ class FaceClassifyEntity(ImageProcessingFaceEntity):
|
|||||||
return {
|
return {
|
||||||
'matched_faces': self._matched,
|
'matched_faces': self._matched,
|
||||||
'total_matched_faces': len(self._matched),
|
'total_matched_faces': len(self._matched),
|
||||||
|
'hostname': self._hostname
|
||||||
}
|
}
|
||||||
|
@ -4,9 +4,9 @@ Support for deCONZ light.
|
|||||||
For more details about this component, please refer to the documentation at
|
For more details about this component, please refer to the documentation at
|
||||||
https://home-assistant.io/components/light.deconz/
|
https://home-assistant.io/components/light.deconz/
|
||||||
"""
|
"""
|
||||||
from homeassistant.components.deconz import (
|
from homeassistant.components.deconz.const import (
|
||||||
DOMAIN as DATA_DECONZ, DATA_DECONZ_ID, DATA_DECONZ_UNSUB)
|
CONF_ALLOW_DECONZ_GROUPS, DOMAIN as DATA_DECONZ,
|
||||||
from homeassistant.components.deconz.const import CONF_ALLOW_DECONZ_GROUPS
|
DATA_DECONZ_ID, DATA_DECONZ_UNSUB, SWITCH_TYPES)
|
||||||
from homeassistant.components.light import (
|
from homeassistant.components.light import (
|
||||||
ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR,
|
ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR,
|
||||||
ATTR_TRANSITION, EFFECT_COLORLOOP, FLASH_LONG, FLASH_SHORT,
|
ATTR_TRANSITION, EFFECT_COLORLOOP, FLASH_LONG, FLASH_SHORT,
|
||||||
@ -32,7 +32,8 @@ async def async_setup_entry(hass, config_entry, async_add_devices):
|
|||||||
"""Add light from deCONZ."""
|
"""Add light from deCONZ."""
|
||||||
entities = []
|
entities = []
|
||||||
for light in lights:
|
for light in lights:
|
||||||
entities.append(DeconzLight(light))
|
if light.type not in SWITCH_TYPES:
|
||||||
|
entities.append(DeconzLight(light))
|
||||||
async_add_devices(entities, True)
|
async_add_devices(entities, True)
|
||||||
|
|
||||||
hass.data[DATA_DECONZ_UNSUB].append(
|
hass.data[DATA_DECONZ_UNSUB].append(
|
||||||
@ -189,3 +190,12 @@ class DeconzLight(Light):
|
|||||||
del data['on']
|
del data['on']
|
||||||
|
|
||||||
await self._light.async_set_state(data)
|
await self._light.async_set_state(data)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def device_state_attributes(self):
|
||||||
|
"""Return the device state attributes."""
|
||||||
|
attributes = {}
|
||||||
|
attributes['is_deconz_group'] = self._light.type == 'LightGroup'
|
||||||
|
if self._light.type == 'LightGroup':
|
||||||
|
attributes['all_on'] = self._light.all_on
|
||||||
|
return attributes
|
||||||
|
@ -254,8 +254,6 @@ def _mean_tuple(*args):
|
|||||||
return tuple(sum(l) / len(l) for l in zip(*args))
|
return tuple(sum(l) / len(l) for l in zip(*args))
|
||||||
|
|
||||||
|
|
||||||
# https://github.com/PyCQA/pylint/issues/1831
|
|
||||||
# pylint: disable=bad-whitespace
|
|
||||||
def _reduce_attribute(states: List[State],
|
def _reduce_attribute(states: List[State],
|
||||||
key: str,
|
key: str,
|
||||||
default: Optional[Any] = None,
|
default: Optional[Any] = None,
|
||||||
|
@ -1,104 +0,0 @@
|
|||||||
"""
|
|
||||||
Support for Velbus lights.
|
|
||||||
|
|
||||||
For more details about this platform, please refer to the documentation at
|
|
||||||
https://home-assistant.io/components/light.velbus/
|
|
||||||
"""
|
|
||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from homeassistant.const import CONF_NAME, CONF_DEVICES
|
|
||||||
from homeassistant.components.light import Light, PLATFORM_SCHEMA
|
|
||||||
from homeassistant.components.velbus import DOMAIN
|
|
||||||
import homeassistant.helpers.config_validation as cv
|
|
||||||
|
|
||||||
DEPENDENCIES = ['velbus']
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
|
||||||
vol.Required(CONF_DEVICES): vol.All(cv.ensure_list, [
|
|
||||||
{
|
|
||||||
vol.Required('module'): cv.positive_int,
|
|
||||||
vol.Required('channel'): cv.positive_int,
|
|
||||||
vol.Required(CONF_NAME): cv.string
|
|
||||||
}
|
|
||||||
])
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
|
||||||
"""Set up Lights."""
|
|
||||||
velbus = hass.data[DOMAIN]
|
|
||||||
add_devices(VelbusLight(light, velbus) for light in config[CONF_DEVICES])
|
|
||||||
|
|
||||||
|
|
||||||
class VelbusLight(Light):
|
|
||||||
"""Representation of a Velbus Light."""
|
|
||||||
|
|
||||||
def __init__(self, light, velbus):
|
|
||||||
"""Initialize a Velbus light."""
|
|
||||||
self._velbus = velbus
|
|
||||||
self._name = light[CONF_NAME]
|
|
||||||
self._module = light['module']
|
|
||||||
self._channel = light['channel']
|
|
||||||
self._state = False
|
|
||||||
|
|
||||||
@asyncio.coroutine
|
|
||||||
def async_added_to_hass(self):
|
|
||||||
"""Add listener for Velbus messages on bus."""
|
|
||||||
def _init_velbus():
|
|
||||||
"""Initialize Velbus on startup."""
|
|
||||||
self._velbus.subscribe(self._on_message)
|
|
||||||
self.get_status()
|
|
||||||
|
|
||||||
yield from self.hass.async_add_job(_init_velbus)
|
|
||||||
|
|
||||||
def _on_message(self, message):
|
|
||||||
import velbus
|
|
||||||
if isinstance(message, velbus.RelayStatusMessage) and \
|
|
||||||
message.address == self._module and \
|
|
||||||
message.channel == self._channel:
|
|
||||||
self._state = message.is_on()
|
|
||||||
self.schedule_update_ha_state()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self):
|
|
||||||
"""Return the display name of this light."""
|
|
||||||
return self._name
|
|
||||||
|
|
||||||
@property
|
|
||||||
def should_poll(self):
|
|
||||||
"""Disable polling."""
|
|
||||||
return False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_on(self):
|
|
||||||
"""Return true if the light is on."""
|
|
||||||
return self._state
|
|
||||||
|
|
||||||
def turn_on(self, **kwargs):
|
|
||||||
"""Instruct the light to turn on."""
|
|
||||||
import velbus
|
|
||||||
message = velbus.SwitchRelayOnMessage()
|
|
||||||
message.set_defaults(self._module)
|
|
||||||
message.relay_channels = [self._channel]
|
|
||||||
self._velbus.send(message)
|
|
||||||
|
|
||||||
def turn_off(self, **kwargs):
|
|
||||||
"""Instruct the light to turn off."""
|
|
||||||
import velbus
|
|
||||||
message = velbus.SwitchRelayOffMessage()
|
|
||||||
message.set_defaults(self._module)
|
|
||||||
message.relay_channels = [self._channel]
|
|
||||||
self._velbus.send(message)
|
|
||||||
|
|
||||||
def get_status(self):
|
|
||||||
"""Retrieve current status."""
|
|
||||||
import velbus
|
|
||||||
message = velbus.ModuleStatusRequestMessage()
|
|
||||||
message.set_defaults(self._module)
|
|
||||||
message.channels = [self._channel]
|
|
||||||
self._velbus.send(message)
|
|
@ -10,5 +10,5 @@ DOMAIN = 'map'
|
|||||||
async def async_setup(hass, config):
|
async def async_setup(hass, config):
|
||||||
"""Register the built-in map panel."""
|
"""Register the built-in map panel."""
|
||||||
await hass.components.frontend.async_register_built_in_panel(
|
await hass.components.frontend.async_register_built_in_panel(
|
||||||
'map', 'map', 'mdi:account-location')
|
'map', 'map', 'hass:account-location')
|
||||||
return True
|
return True
|
||||||
|
@ -14,7 +14,7 @@ from homeassistant.components.media_player import (
|
|||||||
SERVICE_PLAY_MEDIA)
|
SERVICE_PLAY_MEDIA)
|
||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
|
|
||||||
REQUIREMENTS = ['youtube_dl==2018.07.29']
|
REQUIREMENTS = ['youtube_dl==2018.08.04']
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
400
homeassistant/components/media_player/dlna_dmr.py
Normal file
400
homeassistant/components/media_player/dlna_dmr.py
Normal file
@ -0,0 +1,400 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""
|
||||||
|
Support for DLNA DMR (Device Media Renderer).
|
||||||
|
|
||||||
|
For more details about this platform, please refer to the documentation at
|
||||||
|
https://home-assistant.io/components/media_player.dlna_dmr/
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import functools
|
||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
import homeassistant.helpers.config_validation as cv
|
||||||
|
from homeassistant.components.media_player import (
|
||||||
|
SUPPORT_PLAY, SUPPORT_PAUSE, SUPPORT_STOP,
|
||||||
|
SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET,
|
||||||
|
SUPPORT_PLAY_MEDIA,
|
||||||
|
SUPPORT_PREVIOUS_TRACK, SUPPORT_NEXT_TRACK,
|
||||||
|
MediaPlayerDevice,
|
||||||
|
PLATFORM_SCHEMA)
|
||||||
|
from homeassistant.const import (
|
||||||
|
EVENT_HOMEASSISTANT_STOP,
|
||||||
|
CONF_URL, CONF_NAME,
|
||||||
|
STATE_OFF, STATE_ON, STATE_IDLE, STATE_PLAYING, STATE_PAUSED)
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.exceptions import PlatformNotReady
|
||||||
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
|
from homeassistant.util import get_local_ip
|
||||||
|
|
||||||
|
|
||||||
|
DLNA_DMR_DATA = 'dlna_dmr'
|
||||||
|
|
||||||
|
REQUIREMENTS = [
|
||||||
|
'async-upnp-client==0.12.3',
|
||||||
|
]
|
||||||
|
|
||||||
|
DEFAULT_NAME = 'DLNA Digital Media Renderer'
|
||||||
|
DEFAULT_LISTEN_PORT = 8301
|
||||||
|
|
||||||
|
CONF_LISTEN_IP = 'listen_ip'
|
||||||
|
CONF_LISTEN_PORT = 'listen_port'
|
||||||
|
|
||||||
|
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||||
|
vol.Required(CONF_URL): cv.string,
|
||||||
|
vol.Optional(CONF_LISTEN_IP): cv.string,
|
||||||
|
vol.Optional(CONF_LISTEN_PORT, default=DEFAULT_LISTEN_PORT): cv.port,
|
||||||
|
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||||
|
})
|
||||||
|
|
||||||
|
HOME_ASSISTANT_UPNP_CLASS_MAPPING = {
|
||||||
|
'music': 'object.item.audioItem',
|
||||||
|
'tvshow': 'object.item.videoItem',
|
||||||
|
'video': 'object.item.videoItem',
|
||||||
|
'episode': 'object.item.videoItem',
|
||||||
|
'channel': 'object.item.videoItem',
|
||||||
|
'playlist': 'object.item.playlist',
|
||||||
|
}
|
||||||
|
HOME_ASSISTANT_UPNP_MIME_TYPE_MAPPING = {
|
||||||
|
'music': 'audio/*',
|
||||||
|
'tvshow': 'video/*',
|
||||||
|
'video': 'video/*',
|
||||||
|
'episode': 'video/*',
|
||||||
|
'channel': 'video/*',
|
||||||
|
'playlist': 'playlist/*',
|
||||||
|
}
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def catch_request_errors():
|
||||||
|
"""Catch asyncio.TimeoutError, aiohttp.ClientError errors."""
|
||||||
|
def call_wrapper(func):
|
||||||
|
"""Call wrapper for decorator."""
|
||||||
|
@functools.wraps(func)
|
||||||
|
def wrapper(self, *args, **kwargs):
|
||||||
|
"""Catch asyncio.TimeoutError, aiohttp.ClientError errors."""
|
||||||
|
try:
|
||||||
|
return func(self, *args, **kwargs)
|
||||||
|
except (asyncio.TimeoutError, aiohttp.ClientError):
|
||||||
|
_LOGGER.error("Error during call %s", func.__name__)
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
return call_wrapper
|
||||||
|
|
||||||
|
|
||||||
|
async def async_start_event_handler(hass, server_host, server_port, requester):
|
||||||
|
"""Register notify view."""
|
||||||
|
hass_data = hass.data[DLNA_DMR_DATA]
|
||||||
|
if 'event_handler' in hass_data:
|
||||||
|
return hass_data['event_handler']
|
||||||
|
|
||||||
|
# start event handler
|
||||||
|
from async_upnp_client.aiohttp import AiohttpNotifyServer
|
||||||
|
server = AiohttpNotifyServer(requester,
|
||||||
|
server_port,
|
||||||
|
server_host,
|
||||||
|
hass.loop)
|
||||||
|
await server.start_server()
|
||||||
|
_LOGGER.info('UPNP/DLNA event handler listening on: %s',
|
||||||
|
server.callback_url)
|
||||||
|
hass_data['notify_server'] = server
|
||||||
|
hass_data['event_handler'] = server.event_handler
|
||||||
|
|
||||||
|
# register for graceful shutdown
|
||||||
|
async def async_stop_server(event):
|
||||||
|
"""Stop server."""
|
||||||
|
_LOGGER.debug('Stopping UPNP/DLNA event handler')
|
||||||
|
await server.stop_server()
|
||||||
|
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, async_stop_server)
|
||||||
|
|
||||||
|
return hass_data['event_handler']
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_platform(hass: HomeAssistant,
|
||||||
|
config,
|
||||||
|
async_add_devices,
|
||||||
|
discovery_info=None):
|
||||||
|
"""Set up DLNA DMR platform."""
|
||||||
|
if config.get(CONF_URL) is not None:
|
||||||
|
url = config[CONF_URL]
|
||||||
|
name = config.get(CONF_NAME)
|
||||||
|
elif discovery_info is not None:
|
||||||
|
url = discovery_info['ssdp_description']
|
||||||
|
name = discovery_info.get('name')
|
||||||
|
|
||||||
|
if DLNA_DMR_DATA not in hass.data:
|
||||||
|
hass.data[DLNA_DMR_DATA] = {}
|
||||||
|
|
||||||
|
if 'lock' not in hass.data[DLNA_DMR_DATA]:
|
||||||
|
hass.data[DLNA_DMR_DATA]['lock'] = asyncio.Lock()
|
||||||
|
|
||||||
|
# build upnp/aiohttp requester
|
||||||
|
from async_upnp_client.aiohttp import AiohttpSessionRequester
|
||||||
|
session = async_get_clientsession(hass)
|
||||||
|
requester = AiohttpSessionRequester(session, True)
|
||||||
|
|
||||||
|
# ensure event handler has been started
|
||||||
|
with await hass.data[DLNA_DMR_DATA]['lock']:
|
||||||
|
server_host = config.get(CONF_LISTEN_IP)
|
||||||
|
if server_host is None:
|
||||||
|
server_host = get_local_ip()
|
||||||
|
server_port = config.get(CONF_LISTEN_PORT, DEFAULT_LISTEN_PORT)
|
||||||
|
event_handler = await async_start_event_handler(hass,
|
||||||
|
server_host,
|
||||||
|
server_port,
|
||||||
|
requester)
|
||||||
|
|
||||||
|
# create upnp device
|
||||||
|
from async_upnp_client import UpnpFactory
|
||||||
|
factory = UpnpFactory(requester, disable_state_variable_validation=True)
|
||||||
|
try:
|
||||||
|
upnp_device = await factory.async_create_device(url)
|
||||||
|
except (asyncio.TimeoutError, aiohttp.ClientError):
|
||||||
|
raise PlatformNotReady()
|
||||||
|
|
||||||
|
# wrap with DmrDevice
|
||||||
|
from async_upnp_client.dlna import DmrDevice
|
||||||
|
dlna_device = DmrDevice(upnp_device, event_handler)
|
||||||
|
|
||||||
|
# create our own device
|
||||||
|
device = DlnaDmrDevice(dlna_device, name)
|
||||||
|
_LOGGER.debug("Adding device: %s", device)
|
||||||
|
async_add_devices([device], True)
|
||||||
|
|
||||||
|
|
||||||
|
class DlnaDmrDevice(MediaPlayerDevice):
|
||||||
|
"""Representation of a DLNA DMR device."""
|
||||||
|
|
||||||
|
def __init__(self, dmr_device, name=None):
|
||||||
|
"""Initializer."""
|
||||||
|
self._device = dmr_device
|
||||||
|
self._name = name
|
||||||
|
|
||||||
|
self._available = False
|
||||||
|
self._subscription_renew_time = None
|
||||||
|
|
||||||
|
async def async_added_to_hass(self):
|
||||||
|
"""Callback when added."""
|
||||||
|
self._device.on_event = self._on_event
|
||||||
|
|
||||||
|
# register unsubscribe on stop
|
||||||
|
bus = self.hass.bus
|
||||||
|
bus.async_listen_once(EVENT_HOMEASSISTANT_STOP,
|
||||||
|
self._async_on_hass_stop)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def available(self):
|
||||||
|
"""Device is available."""
|
||||||
|
return self._available
|
||||||
|
|
||||||
|
async def _async_on_hass_stop(self, event):
|
||||||
|
"""Event handler on HASS stop."""
|
||||||
|
with await self.hass.data[DLNA_DMR_DATA]['lock']:
|
||||||
|
await self._device.async_unsubscribe_services()
|
||||||
|
|
||||||
|
async def async_update(self):
|
||||||
|
"""Retrieve the latest data."""
|
||||||
|
was_available = self._available
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self._device.async_update()
|
||||||
|
self._available = True
|
||||||
|
except (asyncio.TimeoutError, aiohttp.ClientError):
|
||||||
|
self._available = False
|
||||||
|
_LOGGER.debug("Device unavailable")
|
||||||
|
return
|
||||||
|
|
||||||
|
# do we need to (re-)subscribe?
|
||||||
|
now = datetime.now()
|
||||||
|
should_renew = self._subscription_renew_time and \
|
||||||
|
now >= self._subscription_renew_time
|
||||||
|
if should_renew or \
|
||||||
|
not was_available and self._available:
|
||||||
|
try:
|
||||||
|
timeout = await self._device.async_subscribe_services()
|
||||||
|
self._subscription_renew_time = datetime.now() + timeout / 2
|
||||||
|
except (asyncio.TimeoutError, aiohttp.ClientError):
|
||||||
|
self._available = False
|
||||||
|
_LOGGER.debug("Could not (re)subscribe")
|
||||||
|
|
||||||
|
def _on_event(self, service, state_variables):
|
||||||
|
"""State variable(s) changed, let home-assistant know."""
|
||||||
|
self.schedule_update_ha_state()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def supported_features(self):
|
||||||
|
"""Flag media player features that are supported."""
|
||||||
|
supported_features = 0
|
||||||
|
|
||||||
|
if self._device.has_volume_level:
|
||||||
|
supported_features |= SUPPORT_VOLUME_SET
|
||||||
|
if self._device.has_volume_mute:
|
||||||
|
supported_features |= SUPPORT_VOLUME_MUTE
|
||||||
|
if self._device.has_play:
|
||||||
|
supported_features |= SUPPORT_PLAY
|
||||||
|
if self._device.has_pause:
|
||||||
|
supported_features |= SUPPORT_PAUSE
|
||||||
|
if self._device.has_stop:
|
||||||
|
supported_features |= SUPPORT_STOP
|
||||||
|
if self._device.has_previous:
|
||||||
|
supported_features |= SUPPORT_PREVIOUS_TRACK
|
||||||
|
if self._device.has_next:
|
||||||
|
supported_features |= SUPPORT_NEXT_TRACK
|
||||||
|
if self._device.has_play_media:
|
||||||
|
supported_features |= SUPPORT_PLAY_MEDIA
|
||||||
|
|
||||||
|
return supported_features
|
||||||
|
|
||||||
|
@property
|
||||||
|
def volume_level(self):
|
||||||
|
"""Volume level of the media player (0..1)."""
|
||||||
|
return self._device.volume_level
|
||||||
|
|
||||||
|
@catch_request_errors()
|
||||||
|
async def async_set_volume_level(self, volume):
|
||||||
|
"""Set volume level, range 0..1."""
|
||||||
|
await self._device.async_set_volume_level(volume)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_volume_muted(self):
|
||||||
|
"""Boolean if volume is currently muted."""
|
||||||
|
return self._device.is_volume_muted
|
||||||
|
|
||||||
|
@catch_request_errors()
|
||||||
|
async def async_mute_volume(self, mute):
|
||||||
|
"""Mute the volume."""
|
||||||
|
desired_mute = bool(mute)
|
||||||
|
await self._device.async_mute_volume(desired_mute)
|
||||||
|
|
||||||
|
@catch_request_errors()
|
||||||
|
async def async_media_pause(self):
|
||||||
|
"""Send pause command."""
|
||||||
|
if not self._device.can_pause:
|
||||||
|
_LOGGER.debug('Cannot do Pause')
|
||||||
|
return
|
||||||
|
|
||||||
|
await self._device.async_pause()
|
||||||
|
|
||||||
|
@catch_request_errors()
|
||||||
|
async def async_media_play(self):
|
||||||
|
"""Send play command."""
|
||||||
|
if not self._device.can_play:
|
||||||
|
_LOGGER.debug('Cannot do Play')
|
||||||
|
return
|
||||||
|
|
||||||
|
await self._device.async_play()
|
||||||
|
|
||||||
|
@catch_request_errors()
|
||||||
|
async def async_media_stop(self):
|
||||||
|
"""Send stop command."""
|
||||||
|
if not self._device.can_stop:
|
||||||
|
_LOGGER.debug('Cannot do Stop')
|
||||||
|
return
|
||||||
|
|
||||||
|
await self._device.async_stop()
|
||||||
|
|
||||||
|
@catch_request_errors()
|
||||||
|
async def async_play_media(self, media_type, media_id, **kwargs):
|
||||||
|
"""Play a piece of media."""
|
||||||
|
title = "Home Assistant"
|
||||||
|
mime_type = HOME_ASSISTANT_UPNP_MIME_TYPE_MAPPING[media_type]
|
||||||
|
upnp_class = HOME_ASSISTANT_UPNP_CLASS_MAPPING[media_type]
|
||||||
|
|
||||||
|
# stop current playing media
|
||||||
|
if self._device.can_stop:
|
||||||
|
await self.async_media_stop()
|
||||||
|
|
||||||
|
# queue media
|
||||||
|
await self._device.async_set_transport_uri(media_id,
|
||||||
|
title,
|
||||||
|
mime_type,
|
||||||
|
upnp_class)
|
||||||
|
await self._device.async_wait_for_can_play()
|
||||||
|
|
||||||
|
# if already playing, no need to call Play
|
||||||
|
from async_upnp_client import dlna
|
||||||
|
if self._device.state == dlna.STATE_PLAYING:
|
||||||
|
return
|
||||||
|
|
||||||
|
# play it
|
||||||
|
await self.async_media_play()
|
||||||
|
|
||||||
|
@catch_request_errors()
|
||||||
|
async def async_media_previous_track(self):
|
||||||
|
"""Send previous track command."""
|
||||||
|
if not self._device.can_previous:
|
||||||
|
_LOGGER.debug('Cannot do Previous')
|
||||||
|
return
|
||||||
|
|
||||||
|
await self._device.async_previous()
|
||||||
|
|
||||||
|
@catch_request_errors()
|
||||||
|
async def async_media_next_track(self):
|
||||||
|
"""Send next track command."""
|
||||||
|
if not self._device.can_next:
|
||||||
|
_LOGGER.debug('Cannot do Next')
|
||||||
|
return
|
||||||
|
|
||||||
|
await self._device.async_next()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def media_title(self):
|
||||||
|
"""Title of current playing media."""
|
||||||
|
return self._device.media_title
|
||||||
|
|
||||||
|
@property
|
||||||
|
def media_image_url(self):
|
||||||
|
"""Image url of current playing media."""
|
||||||
|
return self._device.media_image_url
|
||||||
|
|
||||||
|
@property
|
||||||
|
def state(self):
|
||||||
|
"""State of the player."""
|
||||||
|
if not self._available:
|
||||||
|
return STATE_OFF
|
||||||
|
|
||||||
|
from async_upnp_client import dlna
|
||||||
|
if self._device.state is None:
|
||||||
|
return STATE_ON
|
||||||
|
if self._device.state == dlna.STATE_PLAYING:
|
||||||
|
return STATE_PLAYING
|
||||||
|
if self._device.state == dlna.STATE_PAUSED:
|
||||||
|
return STATE_PAUSED
|
||||||
|
|
||||||
|
return STATE_IDLE
|
||||||
|
|
||||||
|
@property
|
||||||
|
def media_duration(self):
|
||||||
|
"""Duration of current playing media in seconds."""
|
||||||
|
return self._device.media_duration
|
||||||
|
|
||||||
|
@property
|
||||||
|
def media_position(self):
|
||||||
|
"""Position of current playing media in seconds."""
|
||||||
|
return self._device.media_position
|
||||||
|
|
||||||
|
@property
|
||||||
|
def media_position_updated_at(self):
|
||||||
|
"""When was the position of the current playing media valid.
|
||||||
|
|
||||||
|
Returns value from homeassistant.util.dt.utcnow().
|
||||||
|
"""
|
||||||
|
return self._device.media_position_updated_at
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
"""Return the name of the device."""
|
||||||
|
if self._name:
|
||||||
|
return self._name
|
||||||
|
return self._device.name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def unique_id(self) -> str:
|
||||||
|
"""Return an unique ID."""
|
||||||
|
return self._device.udn
|
@ -160,6 +160,7 @@ def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
|||||||
if DATA_KODI not in hass.data:
|
if DATA_KODI not in hass.data:
|
||||||
hass.data[DATA_KODI] = dict()
|
hass.data[DATA_KODI] = dict()
|
||||||
|
|
||||||
|
unique_id = None
|
||||||
# Is this a manual configuration?
|
# Is this a manual configuration?
|
||||||
if discovery_info is None:
|
if discovery_info is None:
|
||||||
name = config.get(CONF_NAME)
|
name = config.get(CONF_NAME)
|
||||||
@ -175,6 +176,9 @@ def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
|||||||
tcp_port = DEFAULT_TCP_PORT
|
tcp_port = DEFAULT_TCP_PORT
|
||||||
encryption = DEFAULT_PROXY_SSL
|
encryption = DEFAULT_PROXY_SSL
|
||||||
websocket = DEFAULT_ENABLE_WEBSOCKET
|
websocket = DEFAULT_ENABLE_WEBSOCKET
|
||||||
|
properties = discovery_info.get('properties')
|
||||||
|
if properties is not None:
|
||||||
|
unique_id = properties.get('uuid', None)
|
||||||
|
|
||||||
# Only add a device once, so discovered devices do not override manual
|
# Only add a device once, so discovered devices do not override manual
|
||||||
# config.
|
# config.
|
||||||
@ -182,6 +186,14 @@ def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
|||||||
if ip_addr in hass.data[DATA_KODI]:
|
if ip_addr in hass.data[DATA_KODI]:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# If we got an unique id, check that it does not exist already.
|
||||||
|
# This is necessary as netdisco does not deterministally return the same
|
||||||
|
# advertisement when the service is offered over multiple IP addresses.
|
||||||
|
if unique_id is not None:
|
||||||
|
for device in hass.data[DATA_KODI].values():
|
||||||
|
if device.unique_id == unique_id:
|
||||||
|
return
|
||||||
|
|
||||||
entity = KodiDevice(
|
entity = KodiDevice(
|
||||||
hass,
|
hass,
|
||||||
name=name,
|
name=name,
|
||||||
@ -190,7 +202,8 @@ def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
|||||||
password=config.get(CONF_PASSWORD),
|
password=config.get(CONF_PASSWORD),
|
||||||
turn_on_action=config.get(CONF_TURN_ON_ACTION),
|
turn_on_action=config.get(CONF_TURN_ON_ACTION),
|
||||||
turn_off_action=config.get(CONF_TURN_OFF_ACTION),
|
turn_off_action=config.get(CONF_TURN_OFF_ACTION),
|
||||||
timeout=config.get(CONF_TIMEOUT), websocket=websocket)
|
timeout=config.get(CONF_TIMEOUT), websocket=websocket,
|
||||||
|
unique_id=unique_id)
|
||||||
|
|
||||||
hass.data[DATA_KODI][ip_addr] = entity
|
hass.data[DATA_KODI][ip_addr] = entity
|
||||||
async_add_devices([entity], update_before_add=True)
|
async_add_devices([entity], update_before_add=True)
|
||||||
@ -260,12 +273,14 @@ class KodiDevice(MediaPlayerDevice):
|
|||||||
def __init__(self, hass, name, host, port, tcp_port, encryption=False,
|
def __init__(self, hass, name, host, port, tcp_port, encryption=False,
|
||||||
username=None, password=None,
|
username=None, password=None,
|
||||||
turn_on_action=None, turn_off_action=None,
|
turn_on_action=None, turn_off_action=None,
|
||||||
timeout=DEFAULT_TIMEOUT, websocket=True):
|
timeout=DEFAULT_TIMEOUT, websocket=True,
|
||||||
|
unique_id=None):
|
||||||
"""Initialize the Kodi device."""
|
"""Initialize the Kodi device."""
|
||||||
import jsonrpc_async
|
import jsonrpc_async
|
||||||
import jsonrpc_websocket
|
import jsonrpc_websocket
|
||||||
self.hass = hass
|
self.hass = hass
|
||||||
self._name = name
|
self._name = name
|
||||||
|
self._unique_id = unique_id
|
||||||
|
|
||||||
kwargs = {
|
kwargs = {
|
||||||
'timeout': timeout,
|
'timeout': timeout,
|
||||||
@ -384,6 +399,11 @@ class KodiDevice(MediaPlayerDevice):
|
|||||||
_LOGGER.debug("Unable to fetch kodi data", exc_info=True)
|
_LOGGER.debug("Unable to fetch kodi data", exc_info=True)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def unique_id(self):
|
||||||
|
"""Return the unique id of the device."""
|
||||||
|
return self._unique_id
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def state(self):
|
def state(self):
|
||||||
"""Return the state of the device."""
|
"""Return the state of the device."""
|
||||||
|
@ -25,7 +25,7 @@ from homeassistant.const import (
|
|||||||
)
|
)
|
||||||
import homeassistant.helpers.config_validation as cv
|
import homeassistant.helpers.config_validation as cv
|
||||||
|
|
||||||
REQUIREMENTS = ['pymediaroom==0.6.3']
|
REQUIREMENTS = ['pymediaroom==0.6.4']
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
157
homeassistant/components/media_player/pjlink.py
Normal file
157
homeassistant/components/media_player/pjlink.py
Normal file
@ -0,0 +1,157 @@
|
|||||||
|
"""
|
||||||
|
Support for controlling projector via the PJLink protocol.
|
||||||
|
|
||||||
|
For more details about this platform, please refer to the documentation at
|
||||||
|
https://home-assistant.io/components/media_player.pjlink/
|
||||||
|
"""
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from homeassistant.components.media_player import (
|
||||||
|
SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE,
|
||||||
|
SUPPORT_SELECT_SOURCE, PLATFORM_SCHEMA, MediaPlayerDevice)
|
||||||
|
from homeassistant.const import (
|
||||||
|
STATE_OFF, STATE_ON, CONF_HOST,
|
||||||
|
CONF_NAME, CONF_PASSWORD, CONF_PORT)
|
||||||
|
import homeassistant.helpers.config_validation as cv
|
||||||
|
|
||||||
|
REQUIREMENTS = ['pypjlink2==1.2.0']
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
CONF_ENCODING = 'encoding'
|
||||||
|
|
||||||
|
DEFAULT_PORT = 4352
|
||||||
|
DEFAULT_ENCODING = 'utf-8'
|
||||||
|
|
||||||
|
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||||
|
vol.Required(CONF_HOST): cv.string,
|
||||||
|
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||||
|
vol.Optional(CONF_NAME): cv.string,
|
||||||
|
vol.Optional(CONF_ENCODING, default=DEFAULT_ENCODING): cv.string,
|
||||||
|
vol.Optional(CONF_PASSWORD): cv.string,
|
||||||
|
})
|
||||||
|
|
||||||
|
SUPPORT_PJLINK = SUPPORT_VOLUME_MUTE | \
|
||||||
|
SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_SELECT_SOURCE
|
||||||
|
|
||||||
|
|
||||||
|
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||||
|
"""Set up the PJLink platform."""
|
||||||
|
host = config.get(CONF_HOST)
|
||||||
|
port = config.get(CONF_PORT)
|
||||||
|
name = config.get(CONF_NAME)
|
||||||
|
encoding = config.get(CONF_ENCODING)
|
||||||
|
password = config.get(CONF_PASSWORD)
|
||||||
|
|
||||||
|
if 'pjlink' not in hass.data:
|
||||||
|
hass.data['pjlink'] = {}
|
||||||
|
hass_data = hass.data['pjlink']
|
||||||
|
|
||||||
|
device_label = "{}:{}".format(host, port)
|
||||||
|
if device_label in hass_data:
|
||||||
|
return
|
||||||
|
|
||||||
|
device = PjLinkDevice(host, port, name, encoding, password)
|
||||||
|
hass_data[device_label] = device
|
||||||
|
add_devices([device], True)
|
||||||
|
|
||||||
|
|
||||||
|
def format_input_source(input_source_name, input_source_number):
|
||||||
|
"""Format input source for display in UI."""
|
||||||
|
return "{} {}".format(input_source_name, input_source_number)
|
||||||
|
|
||||||
|
|
||||||
|
class PjLinkDevice(MediaPlayerDevice):
|
||||||
|
"""Representation of a PJLink device."""
|
||||||
|
|
||||||
|
def __init__(self, host, port, name, encoding, password):
|
||||||
|
"""Iinitialize the PJLink device."""
|
||||||
|
self._host = host
|
||||||
|
self._port = port
|
||||||
|
self._name = name
|
||||||
|
self._password = password
|
||||||
|
self._encoding = encoding
|
||||||
|
self._muted = False
|
||||||
|
self._pwstate = STATE_OFF
|
||||||
|
self._current_source = None
|
||||||
|
with self.projector() as projector:
|
||||||
|
if not self._name:
|
||||||
|
self._name = projector.get_name()
|
||||||
|
inputs = projector.get_inputs()
|
||||||
|
self._source_name_mapping = \
|
||||||
|
{format_input_source(*x): x for x in inputs}
|
||||||
|
self._source_list = sorted(self._source_name_mapping.keys())
|
||||||
|
|
||||||
|
def projector(self):
|
||||||
|
"""Create PJLink Projector instance."""
|
||||||
|
from pypjlink import Projector
|
||||||
|
projector = Projector.from_address(self._host, self._port,
|
||||||
|
self._encoding)
|
||||||
|
projector.authenticate(self._password)
|
||||||
|
return projector
|
||||||
|
|
||||||
|
def update(self):
|
||||||
|
"""Get the latest state from the device."""
|
||||||
|
with self.projector() as projector:
|
||||||
|
pwstate = projector.get_power()
|
||||||
|
if pwstate == 'off':
|
||||||
|
self._pwstate = STATE_OFF
|
||||||
|
else:
|
||||||
|
self._pwstate = STATE_ON
|
||||||
|
self._muted = projector.get_mute()[1]
|
||||||
|
self._current_source = \
|
||||||
|
format_input_source(*projector.get_input())
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
"""Return the name of the device."""
|
||||||
|
return self._name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def state(self):
|
||||||
|
"""Return the state of the device."""
|
||||||
|
return self._pwstate
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_volume_muted(self):
|
||||||
|
"""Return boolean indicating mute status."""
|
||||||
|
return self._muted
|
||||||
|
|
||||||
|
@property
|
||||||
|
def source(self):
|
||||||
|
"""Return current input source."""
|
||||||
|
return self._current_source
|
||||||
|
|
||||||
|
@property
|
||||||
|
def source_list(self):
|
||||||
|
"""Return all available input sources."""
|
||||||
|
return self._source_list
|
||||||
|
|
||||||
|
@property
|
||||||
|
def supported_features(self):
|
||||||
|
"""Return projector supported features."""
|
||||||
|
return SUPPORT_PJLINK
|
||||||
|
|
||||||
|
def turn_off(self):
|
||||||
|
"""Turn projector off."""
|
||||||
|
with self.projector() as projector:
|
||||||
|
projector.set_power('off')
|
||||||
|
|
||||||
|
def turn_on(self):
|
||||||
|
"""Turn projector on."""
|
||||||
|
with self.projector() as projector:
|
||||||
|
projector.set_power('on')
|
||||||
|
|
||||||
|
def mute_volume(self, mute):
|
||||||
|
"""Mute (true) of unmute (false) media player."""
|
||||||
|
with self.projector() as projector:
|
||||||
|
from pypjlink import MUTE_AUDIO
|
||||||
|
projector.set_mute(MUTE_AUDIO, mute)
|
||||||
|
|
||||||
|
def select_source(self, source):
|
||||||
|
"""Set the input source."""
|
||||||
|
source = self._source_name_mapping[source]
|
||||||
|
with self.projector() as projector:
|
||||||
|
projector.set_input(*source)
|
@ -32,7 +32,8 @@ from homeassistant.util.async_ import (
|
|||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
EVENT_HOMEASSISTANT_STOP, CONF_VALUE_TEMPLATE, CONF_USERNAME,
|
EVENT_HOMEASSISTANT_STOP, CONF_VALUE_TEMPLATE, CONF_USERNAME,
|
||||||
CONF_PASSWORD, CONF_PORT, CONF_PROTOCOL, CONF_PAYLOAD)
|
CONF_PASSWORD, CONF_PORT, CONF_PROTOCOL, CONF_PAYLOAD)
|
||||||
from homeassistant.components.mqtt.server import HBMQTT_CONFIG_SCHEMA
|
|
||||||
|
from .server import HBMQTT_CONFIG_SCHEMA
|
||||||
|
|
||||||
REQUIREMENTS = ['paho-mqtt==1.3.1']
|
REQUIREMENTS = ['paho-mqtt==1.3.1']
|
||||||
|
|
||||||
@ -306,7 +307,8 @@ async def _async_setup_server(hass: HomeAssistantType,
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
success, broker_config = \
|
success, broker_config = \
|
||||||
await server.async_start(hass, conf.get(CONF_EMBEDDED))
|
await server.async_start(
|
||||||
|
hass, conf.get(CONF_PASSWORD), conf.get(CONF_EMBEDDED))
|
||||||
|
|
||||||
if not success:
|
if not success:
|
||||||
return None
|
return None
|
||||||
@ -349,6 +351,16 @@ async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool:
|
|||||||
if CONF_EMBEDDED not in conf and CONF_BROKER in conf:
|
if CONF_EMBEDDED not in conf and CONF_BROKER in conf:
|
||||||
broker_config = None
|
broker_config = None
|
||||||
else:
|
else:
|
||||||
|
if (conf.get(CONF_PASSWORD) is None and
|
||||||
|
config.get('http') is not None and
|
||||||
|
config['http'].get('api_password') is not None):
|
||||||
|
_LOGGER.error(
|
||||||
|
"Starting from release 0.76, the embedded MQTT broker does not"
|
||||||
|
" use api_password as default password anymore. Please set"
|
||||||
|
" password configuration. See https://home-assistant.io/docs/"
|
||||||
|
"mqtt/broker#embedded-broker for details")
|
||||||
|
return False
|
||||||
|
|
||||||
broker_config = await _async_setup_server(hass, config)
|
broker_config = await _async_setup_server(hass, config)
|
||||||
|
|
||||||
if CONF_BROKER in conf:
|
if CONF_BROKER in conf:
|
||||||
|
@ -27,27 +27,29 @@ HBMQTT_CONFIG_SCHEMA = vol.Any(None, vol.Schema({
|
|||||||
})
|
})
|
||||||
}, extra=vol.ALLOW_EXTRA))
|
}, extra=vol.ALLOW_EXTRA))
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
def async_start(hass, server_config):
|
def async_start(hass, password, server_config):
|
||||||
"""Initialize MQTT Server.
|
"""Initialize MQTT Server.
|
||||||
|
|
||||||
This method is a coroutine.
|
This method is a coroutine.
|
||||||
"""
|
"""
|
||||||
from hbmqtt.broker import Broker, BrokerException
|
from hbmqtt.broker import Broker, BrokerException
|
||||||
|
|
||||||
|
passwd = tempfile.NamedTemporaryFile()
|
||||||
try:
|
try:
|
||||||
passwd = tempfile.NamedTemporaryFile()
|
|
||||||
|
|
||||||
if server_config is None:
|
if server_config is None:
|
||||||
server_config, client_config = generate_config(hass, passwd)
|
server_config, client_config = generate_config(
|
||||||
|
hass, passwd, password)
|
||||||
else:
|
else:
|
||||||
client_config = None
|
client_config = None
|
||||||
|
|
||||||
broker = Broker(server_config, hass.loop)
|
broker = Broker(server_config, hass.loop)
|
||||||
yield from broker.start()
|
yield from broker.start()
|
||||||
except BrokerException:
|
except BrokerException:
|
||||||
logging.getLogger(__name__).exception("Error initializing MQTT server")
|
_LOGGER.exception("Error initializing MQTT server")
|
||||||
return False, None
|
return False, None
|
||||||
finally:
|
finally:
|
||||||
passwd.close()
|
passwd.close()
|
||||||
@ -63,9 +65,10 @@ def async_start(hass, server_config):
|
|||||||
return True, client_config
|
return True, client_config
|
||||||
|
|
||||||
|
|
||||||
def generate_config(hass, passwd):
|
def generate_config(hass, passwd, password):
|
||||||
"""Generate a configuration based on current Home Assistant instance."""
|
"""Generate a configuration based on current Home Assistant instance."""
|
||||||
from homeassistant.components.mqtt import PROTOCOL_311
|
from . import PROTOCOL_311
|
||||||
|
|
||||||
config = {
|
config = {
|
||||||
'listeners': {
|
'listeners': {
|
||||||
'default': {
|
'default': {
|
||||||
@ -79,29 +82,26 @@ def generate_config(hass, passwd):
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
'auth': {
|
'auth': {
|
||||||
'allow-anonymous': hass.config.api.api_password is None
|
'allow-anonymous': password is None
|
||||||
},
|
},
|
||||||
'plugins': ['auth_anonymous'],
|
'plugins': ['auth_anonymous'],
|
||||||
}
|
}
|
||||||
|
|
||||||
if hass.config.api.api_password:
|
if password:
|
||||||
username = 'homeassistant'
|
username = 'homeassistant'
|
||||||
password = hass.config.api.api_password
|
|
||||||
|
|
||||||
# Encrypt with what hbmqtt uses to verify
|
# Encrypt with what hbmqtt uses to verify
|
||||||
from passlib.apps import custom_app_context
|
from passlib.apps import custom_app_context
|
||||||
|
|
||||||
passwd.write(
|
passwd.write(
|
||||||
'homeassistant:{}\n'.format(
|
'homeassistant:{}\n'.format(
|
||||||
custom_app_context.encrypt(
|
custom_app_context.encrypt(password)).encode('utf-8'))
|
||||||
hass.config.api.api_password)).encode('utf-8'))
|
|
||||||
passwd.flush()
|
passwd.flush()
|
||||||
|
|
||||||
config['auth']['password-file'] = passwd.name
|
config['auth']['password-file'] = passwd.name
|
||||||
config['plugins'].append('auth_file')
|
config['plugins'].append('auth_file')
|
||||||
else:
|
else:
|
||||||
username = None
|
username = None
|
||||||
password = None
|
|
||||||
|
|
||||||
client_config = ('localhost', 1883, username, password, None, PROTOCOL_311)
|
client_config = ('localhost', 1883, username, password, None, PROTOCOL_311)
|
||||||
|
|
||||||
|
@ -22,7 +22,7 @@ from .const import (
|
|||||||
from .device import get_mysensors_devices
|
from .device import get_mysensors_devices
|
||||||
from .gateway import get_mysensors_gateway, setup_gateways, finish_setup
|
from .gateway import get_mysensors_gateway, setup_gateways, finish_setup
|
||||||
|
|
||||||
REQUIREMENTS = ['pymysensors==0.16.0']
|
REQUIREMENTS = ['pymysensors==0.17.0']
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -186,12 +186,16 @@ def _discover_mysensors_platform(hass, platform, new_devices):
|
|||||||
|
|
||||||
async def _gw_start(hass, gateway):
|
async def _gw_start(hass, gateway):
|
||||||
"""Start the gateway."""
|
"""Start the gateway."""
|
||||||
|
# Don't use hass.async_create_task to avoid holding up setup indefinitely.
|
||||||
|
connect_task = hass.loop.create_task(gateway.start())
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def gw_stop(event):
|
def gw_stop(event):
|
||||||
"""Trigger to stop the gateway."""
|
"""Trigger to stop the gateway."""
|
||||||
hass.async_add_job(gateway.stop())
|
hass.async_add_job(gateway.stop())
|
||||||
|
if not connect_task.done():
|
||||||
|
connect_task.cancel()
|
||||||
|
|
||||||
await gateway.start()
|
|
||||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, gw_stop)
|
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, gw_stop)
|
||||||
if gateway.device == 'mqtt':
|
if gateway.device == 'mqtt':
|
||||||
# Gatways connected via mqtt doesn't send gateway ready message.
|
# Gatways connected via mqtt doesn't send gateway ready message.
|
||||||
|
@ -2,6 +2,8 @@
|
|||||||
"config": {
|
"config": {
|
||||||
"abort": {
|
"abort": {
|
||||||
"already_setup": "Sie k\u00f6nnen nur ein einziges Nest-Konto konfigurieren.",
|
"already_setup": "Sie k\u00f6nnen nur ein einziges Nest-Konto konfigurieren.",
|
||||||
|
"authorize_url_fail": "Unbekannter Fehler beim Erstellen der Authorisierungs-URL",
|
||||||
|
"authorize_url_timeout": "Zeit\u00fcberschreitung beim Erstellen der Authorisierungs-URL",
|
||||||
"no_flows": "Sie m\u00fcssen Nest konfigurieren, bevor Sie sich authentifizieren k\u00f6nnen. [Bitte lesen Sie die Anweisungen] (https://www.home-assistant.io/components/nest/)."
|
"no_flows": "Sie m\u00fcssen Nest konfigurieren, bevor Sie sich authentifizieren k\u00f6nnen. [Bitte lesen Sie die Anweisungen] (https://www.home-assistant.io/components/nest/)."
|
||||||
},
|
},
|
||||||
"error": {
|
"error": {
|
||||||
|
@ -4,19 +4,21 @@ Support for Nest devices.
|
|||||||
For more details about this component, please refer to the documentation at
|
For more details about this component, please refer to the documentation at
|
||||||
https://home-assistant.io/components/nest/
|
https://home-assistant.io/components/nest/
|
||||||
"""
|
"""
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
|
||||||
import logging
|
import logging
|
||||||
import socket
|
import socket
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
import threading
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from homeassistant import config_entries
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
CONF_STRUCTURE, CONF_FILENAME, CONF_BINARY_SENSORS, CONF_SENSORS,
|
CONF_STRUCTURE, CONF_FILENAME, CONF_BINARY_SENSORS, CONF_SENSORS,
|
||||||
CONF_MONITORED_CONDITIONS,
|
CONF_MONITORED_CONDITIONS,
|
||||||
EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP)
|
EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP)
|
||||||
|
from homeassistant.core import callback
|
||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
from homeassistant.helpers.dispatcher import async_dispatcher_send, \
|
from homeassistant.helpers.dispatcher import dispatcher_send, \
|
||||||
async_dispatcher_connect
|
async_dispatcher_connect
|
||||||
from homeassistant.helpers.entity import Entity
|
from homeassistant.helpers.entity import Entity
|
||||||
|
|
||||||
@ -70,24 +72,25 @@ CONFIG_SCHEMA = vol.Schema({
|
|||||||
}, extra=vol.ALLOW_EXTRA)
|
}, extra=vol.ALLOW_EXTRA)
|
||||||
|
|
||||||
|
|
||||||
async def async_nest_update_event_broker(hass, nest):
|
def nest_update_event_broker(hass, nest):
|
||||||
"""
|
"""
|
||||||
Dispatch SIGNAL_NEST_UPDATE to devices when nest stream API received data.
|
Dispatch SIGNAL_NEST_UPDATE to devices when nest stream API received data.
|
||||||
|
|
||||||
nest.update_event.wait will block the thread in most of time,
|
Runs in its own thread.
|
||||||
so specific an executor to save default thread pool.
|
|
||||||
"""
|
"""
|
||||||
_LOGGER.debug("listening nest.update_event")
|
_LOGGER.debug("listening nest.update_event")
|
||||||
with ThreadPoolExecutor(max_workers=1) as executor:
|
|
||||||
while True:
|
while hass.is_running:
|
||||||
await hass.loop.run_in_executor(executor, nest.update_event.wait)
|
nest.update_event.wait()
|
||||||
if hass.is_running:
|
|
||||||
nest.update_event.clear()
|
if not hass.is_running:
|
||||||
_LOGGER.debug("dispatching nest data update")
|
break
|
||||||
async_dispatcher_send(hass, SIGNAL_NEST_UPDATE)
|
|
||||||
else:
|
nest.update_event.clear()
|
||||||
_LOGGER.debug("stop listening nest.update_event")
|
_LOGGER.debug("dispatching nest data update")
|
||||||
return
|
dispatcher_send(hass, SIGNAL_NEST_UPDATE)
|
||||||
|
|
||||||
|
_LOGGER.debug("stop listening nest.update_event")
|
||||||
|
|
||||||
|
|
||||||
async def async_setup(hass, config):
|
async def async_setup(hass, config):
|
||||||
@ -103,7 +106,8 @@ async def async_setup(hass, config):
|
|||||||
access_token_cache_file = hass.config.path(filename)
|
access_token_cache_file = hass.config.path(filename)
|
||||||
|
|
||||||
hass.async_add_job(hass.config_entries.flow.async_init(
|
hass.async_add_job(hass.config_entries.flow.async_init(
|
||||||
DOMAIN, source='import', data={
|
DOMAIN, context={'source': config_entries.SOURCE_IMPORT},
|
||||||
|
data={
|
||||||
'nest_conf_path': access_token_cache_file,
|
'nest_conf_path': access_token_cache_file,
|
||||||
}
|
}
|
||||||
))
|
))
|
||||||
@ -165,16 +169,21 @@ async def async_setup_entry(hass, entry):
|
|||||||
hass.services.async_register(
|
hass.services.async_register(
|
||||||
DOMAIN, 'set_mode', set_mode, schema=AWAY_SCHEMA)
|
DOMAIN, 'set_mode', set_mode, schema=AWAY_SCHEMA)
|
||||||
|
|
||||||
|
@callback
|
||||||
def start_up(event):
|
def start_up(event):
|
||||||
"""Start Nest update event listener."""
|
"""Start Nest update event listener."""
|
||||||
hass.async_add_job(async_nest_update_event_broker, hass, nest)
|
threading.Thread(
|
||||||
|
name='Nest update listener',
|
||||||
|
target=nest_update_event_broker,
|
||||||
|
args=(hass, nest)
|
||||||
|
).start()
|
||||||
|
|
||||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, start_up)
|
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, start_up)
|
||||||
|
|
||||||
|
@callback
|
||||||
def shut_down(event):
|
def shut_down(event):
|
||||||
"""Stop Nest update event listener."""
|
"""Stop Nest update event listener."""
|
||||||
if nest:
|
nest.update_event.set()
|
||||||
nest.update_event.set()
|
|
||||||
|
|
||||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, shut_down)
|
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, shut_down)
|
||||||
|
|
||||||
|
@ -58,6 +58,10 @@ class NestFlowHandler(data_entry_flow.FlowHandler):
|
|||||||
"""Initialize the Nest config flow."""
|
"""Initialize the Nest config flow."""
|
||||||
self.flow_impl = None
|
self.flow_impl = None
|
||||||
|
|
||||||
|
async def async_step_user(self, user_input=None):
|
||||||
|
"""Handle a flow initialized by the user."""
|
||||||
|
return await self.async_step_init(user_input)
|
||||||
|
|
||||||
async def async_step_init(self, user_input=None):
|
async def async_step_init(self, user_input=None):
|
||||||
"""Handle a flow start."""
|
"""Handle a flow start."""
|
||||||
flows = self.hass.data.get(DATA_FLOW_IMPL, {})
|
flows = self.hass.data.get(DATA_FLOW_IMPL, {})
|
||||||
|
@ -26,7 +26,7 @@ from homeassistant.const import (
|
|||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
from homeassistant.util import ensure_unique_string
|
from homeassistant.util import ensure_unique_string
|
||||||
|
|
||||||
REQUIREMENTS = ['pywebpush==1.6.0', 'PyJWT==1.6.0']
|
REQUIREMENTS = ['pywebpush==1.6.0']
|
||||||
|
|
||||||
DEPENDENCIES = ['frontend']
|
DEPENDENCIES = ['frontend']
|
||||||
|
|
||||||
|
@ -13,7 +13,7 @@ from homeassistant.components.notify import (
|
|||||||
from homeassistant.const import CONF_ACCESS_TOKEN
|
from homeassistant.const import CONF_ACCESS_TOKEN
|
||||||
import homeassistant.helpers.config_validation as cv
|
import homeassistant.helpers.config_validation as cv
|
||||||
|
|
||||||
REQUIREMENTS = ['Mastodon.py==1.3.0']
|
REQUIREMENTS = ['Mastodon.py==1.3.1']
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -1,104 +0,0 @@
|
|||||||
"""
|
|
||||||
Telstra API platform for notify component.
|
|
||||||
|
|
||||||
For more details about this platform, please refer to the documentation at
|
|
||||||
https://home-assistant.io/components/notify.telstra/
|
|
||||||
"""
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from aiohttp.hdrs import CONTENT_TYPE, AUTHORIZATION
|
|
||||||
import requests
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from homeassistant.components.notify import (
|
|
||||||
ATTR_TITLE, PLATFORM_SCHEMA, BaseNotificationService)
|
|
||||||
from homeassistant.const import CONTENT_TYPE_JSON
|
|
||||||
import homeassistant.helpers.config_validation as cv
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
CONF_CONSUMER_KEY = 'consumer_key'
|
|
||||||
CONF_CONSUMER_SECRET = 'consumer_secret'
|
|
||||||
CONF_PHONE_NUMBER = 'phone_number'
|
|
||||||
|
|
||||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
|
||||||
vol.Required(CONF_CONSUMER_KEY): cv.string,
|
|
||||||
vol.Required(CONF_CONSUMER_SECRET): cv.string,
|
|
||||||
vol.Required(CONF_PHONE_NUMBER): cv.string,
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
def get_service(hass, config, discovery_info=None):
|
|
||||||
"""Get the Telstra SMS API notification service."""
|
|
||||||
consumer_key = config.get(CONF_CONSUMER_KEY)
|
|
||||||
consumer_secret = config.get(CONF_CONSUMER_SECRET)
|
|
||||||
phone_number = config.get(CONF_PHONE_NUMBER)
|
|
||||||
|
|
||||||
if _authenticate(consumer_key, consumer_secret) is False:
|
|
||||||
_LOGGER.exception("Error obtaining authorization from Telstra API")
|
|
||||||
return None
|
|
||||||
|
|
||||||
return TelstraNotificationService(
|
|
||||||
consumer_key, consumer_secret, phone_number)
|
|
||||||
|
|
||||||
|
|
||||||
class TelstraNotificationService(BaseNotificationService):
|
|
||||||
"""Implementation of a notification service for the Telstra SMS API."""
|
|
||||||
|
|
||||||
def __init__(self, consumer_key, consumer_secret, phone_number):
|
|
||||||
"""Initialize the service."""
|
|
||||||
self._consumer_key = consumer_key
|
|
||||||
self._consumer_secret = consumer_secret
|
|
||||||
self._phone_number = phone_number
|
|
||||||
|
|
||||||
def send_message(self, message="", **kwargs):
|
|
||||||
"""Send a message to a user."""
|
|
||||||
title = kwargs.get(ATTR_TITLE)
|
|
||||||
|
|
||||||
# Retrieve authorization first
|
|
||||||
token_response = _authenticate(
|
|
||||||
self._consumer_key, self._consumer_secret)
|
|
||||||
if token_response is False:
|
|
||||||
_LOGGER.exception("Error obtaining authorization from Telstra API")
|
|
||||||
return
|
|
||||||
|
|
||||||
# Send the SMS
|
|
||||||
if title:
|
|
||||||
text = '{} {}'.format(title, message)
|
|
||||||
else:
|
|
||||||
text = message
|
|
||||||
|
|
||||||
message_data = {
|
|
||||||
'to': self._phone_number,
|
|
||||||
'body': text,
|
|
||||||
}
|
|
||||||
message_resource = 'https://api.telstra.com/v1/sms/messages'
|
|
||||||
message_headers = {
|
|
||||||
CONTENT_TYPE: CONTENT_TYPE_JSON,
|
|
||||||
AUTHORIZATION: 'Bearer {}'.format(token_response['access_token']),
|
|
||||||
}
|
|
||||||
message_response = requests.post(
|
|
||||||
message_resource, headers=message_headers, json=message_data,
|
|
||||||
timeout=10)
|
|
||||||
|
|
||||||
if message_response.status_code != 202:
|
|
||||||
_LOGGER.exception("Failed to send SMS. Status code: %d",
|
|
||||||
message_response.status_code)
|
|
||||||
|
|
||||||
|
|
||||||
def _authenticate(consumer_key, consumer_secret):
|
|
||||||
"""Authenticate with the Telstra API."""
|
|
||||||
token_data = {
|
|
||||||
'client_id': consumer_key,
|
|
||||||
'client_secret': consumer_secret,
|
|
||||||
'grant_type': 'client_credentials',
|
|
||||||
'scope': 'SMS'
|
|
||||||
}
|
|
||||||
token_resource = 'https://api.telstra.com/v1/oauth/token'
|
|
||||||
token_response = requests.get(
|
|
||||||
token_resource, params=token_data, timeout=10).json()
|
|
||||||
|
|
||||||
if 'error' in token_response:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return token_response
|
|
@ -2,9 +2,10 @@
|
|||||||
from homeassistant.core import callback
|
from homeassistant.core import callback
|
||||||
from homeassistant.loader import bind_hass
|
from homeassistant.loader import bind_hass
|
||||||
|
|
||||||
from .const import STEPS, STEP_USER, DOMAIN
|
from .const import DOMAIN, STEP_USER, STEPS
|
||||||
|
|
||||||
DEPENDENCIES = ['http']
|
DEPENDENCIES = ['http']
|
||||||
|
|
||||||
STORAGE_KEY = DOMAIN
|
STORAGE_KEY = DOMAIN
|
||||||
STORAGE_VERSION = 1
|
STORAGE_VERSION = 1
|
||||||
|
|
||||||
@ -21,7 +22,7 @@ def async_is_onboarded(hass):
|
|||||||
|
|
||||||
|
|
||||||
async def async_setup(hass, config):
|
async def async_setup(hass, config):
|
||||||
"""Set up the onboard component."""
|
"""Set up the onboarding component."""
|
||||||
store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY)
|
store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY)
|
||||||
data = await store.async_load()
|
data = await store.async_load()
|
||||||
|
|
||||||
|
@ -3,21 +3,21 @@ import asyncio
|
|||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.core import callback
|
|
||||||
from homeassistant.components.http.view import HomeAssistantView
|
|
||||||
from homeassistant.components.http.data_validator import RequestDataValidator
|
from homeassistant.components.http.data_validator import RequestDataValidator
|
||||||
|
from homeassistant.components.http.view import HomeAssistantView
|
||||||
|
from homeassistant.core import callback
|
||||||
|
|
||||||
from .const import DOMAIN, STEPS, STEP_USER
|
from .const import DOMAIN, STEP_USER, STEPS
|
||||||
|
|
||||||
|
|
||||||
async def async_setup(hass, data, store):
|
async def async_setup(hass, data, store):
|
||||||
"""Setup onboarding."""
|
"""Set up the onboarding view."""
|
||||||
hass.http.register_view(OnboardingView(data, store))
|
hass.http.register_view(OnboardingView(data, store))
|
||||||
hass.http.register_view(UserOnboardingView(data, store))
|
hass.http.register_view(UserOnboardingView(data, store))
|
||||||
|
|
||||||
|
|
||||||
class OnboardingView(HomeAssistantView):
|
class OnboardingView(HomeAssistantView):
|
||||||
"""Returns the onboarding status."""
|
"""Return the onboarding status."""
|
||||||
|
|
||||||
requires_auth = False
|
requires_auth = False
|
||||||
url = '/api/onboarding'
|
url = '/api/onboarding'
|
||||||
|
182
homeassistant/components/openuv.py
Normal file
182
homeassistant/components/openuv.py
Normal file
@ -0,0 +1,182 @@
|
|||||||
|
"""
|
||||||
|
Support for data from openuv.io.
|
||||||
|
|
||||||
|
For more details about this component, please refer to the documentation at
|
||||||
|
https://home-assistant.io/components/openuv/
|
||||||
|
"""
|
||||||
|
import logging
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from homeassistant.const import (
|
||||||
|
ATTR_ATTRIBUTION, CONF_API_KEY, CONF_BINARY_SENSORS, CONF_ELEVATION,
|
||||||
|
CONF_LATITUDE, CONF_LONGITUDE, CONF_MONITORED_CONDITIONS,
|
||||||
|
CONF_SCAN_INTERVAL, CONF_SENSORS)
|
||||||
|
from homeassistant.helpers import (
|
||||||
|
aiohttp_client, config_validation as cv, discovery)
|
||||||
|
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||||
|
from homeassistant.helpers.entity import Entity
|
||||||
|
from homeassistant.helpers.event import async_track_time_interval
|
||||||
|
|
||||||
|
REQUIREMENTS = ['pyopenuv==1.0.1']
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
DOMAIN = 'openuv'
|
||||||
|
|
||||||
|
DATA_PROTECTION_WINDOW = 'protection_window'
|
||||||
|
DATA_UV = 'uv'
|
||||||
|
|
||||||
|
DEFAULT_ATTRIBUTION = 'Data provided by OpenUV'
|
||||||
|
DEFAULT_SCAN_INTERVAL = timedelta(minutes=30)
|
||||||
|
|
||||||
|
NOTIFICATION_ID = 'openuv_notification'
|
||||||
|
NOTIFICATION_TITLE = 'OpenUV Component Setup'
|
||||||
|
|
||||||
|
TOPIC_UPDATE = '{0}_data_update'.format(DOMAIN)
|
||||||
|
|
||||||
|
TYPE_CURRENT_OZONE_LEVEL = 'current_ozone_level'
|
||||||
|
TYPE_CURRENT_UV_INDEX = 'current_uv_index'
|
||||||
|
TYPE_MAX_UV_INDEX = 'max_uv_index'
|
||||||
|
TYPE_PROTECTION_WINDOW = 'uv_protection_window'
|
||||||
|
TYPE_SAFE_EXPOSURE_TIME_1 = 'safe_exposure_time_type_1'
|
||||||
|
TYPE_SAFE_EXPOSURE_TIME_2 = 'safe_exposure_time_type_2'
|
||||||
|
TYPE_SAFE_EXPOSURE_TIME_3 = 'safe_exposure_time_type_3'
|
||||||
|
TYPE_SAFE_EXPOSURE_TIME_4 = 'safe_exposure_time_type_4'
|
||||||
|
TYPE_SAFE_EXPOSURE_TIME_5 = 'safe_exposure_time_type_5'
|
||||||
|
TYPE_SAFE_EXPOSURE_TIME_6 = 'safe_exposure_time_type_6'
|
||||||
|
|
||||||
|
BINARY_SENSORS = {
|
||||||
|
TYPE_PROTECTION_WINDOW: ('Protection Window', 'mdi:sunglasses')
|
||||||
|
}
|
||||||
|
|
||||||
|
BINARY_SENSOR_SCHEMA = vol.Schema({
|
||||||
|
vol.Optional(CONF_MONITORED_CONDITIONS, default=list(BINARY_SENSORS)):
|
||||||
|
vol.All(cv.ensure_list, [vol.In(BINARY_SENSORS)])
|
||||||
|
})
|
||||||
|
|
||||||
|
SENSORS = {
|
||||||
|
TYPE_CURRENT_OZONE_LEVEL: (
|
||||||
|
'Current Ozone Level', 'mdi:vector-triangle', 'du'),
|
||||||
|
TYPE_CURRENT_UV_INDEX: ('Current UV Index', 'mdi:weather-sunny', 'index'),
|
||||||
|
TYPE_MAX_UV_INDEX: ('Max UV Index', 'mdi:weather-sunny', 'index'),
|
||||||
|
TYPE_SAFE_EXPOSURE_TIME_1: (
|
||||||
|
'Skin Type 1 Safe Exposure Time', 'mdi:timer', 'minutes'),
|
||||||
|
TYPE_SAFE_EXPOSURE_TIME_2: (
|
||||||
|
'Skin Type 2 Safe Exposure Time', 'mdi:timer', 'minutes'),
|
||||||
|
TYPE_SAFE_EXPOSURE_TIME_3: (
|
||||||
|
'Skin Type 3 Safe Exposure Time', 'mdi:timer', 'minutes'),
|
||||||
|
TYPE_SAFE_EXPOSURE_TIME_4: (
|
||||||
|
'Skin Type 4 Safe Exposure Time', 'mdi:timer', 'minutes'),
|
||||||
|
TYPE_SAFE_EXPOSURE_TIME_5: (
|
||||||
|
'Skin Type 5 Safe Exposure Time', 'mdi:timer', 'minutes'),
|
||||||
|
TYPE_SAFE_EXPOSURE_TIME_6: (
|
||||||
|
'Skin Type 6 Safe Exposure Time', 'mdi:timer', 'minutes'),
|
||||||
|
}
|
||||||
|
|
||||||
|
SENSOR_SCHEMA = vol.Schema({
|
||||||
|
vol.Optional(CONF_MONITORED_CONDITIONS, default=list(SENSORS)):
|
||||||
|
vol.All(cv.ensure_list, [vol.In(SENSORS)])
|
||||||
|
})
|
||||||
|
|
||||||
|
CONFIG_SCHEMA = vol.Schema({
|
||||||
|
DOMAIN: vol.Schema({
|
||||||
|
vol.Required(CONF_API_KEY): cv.string,
|
||||||
|
vol.Optional(CONF_ELEVATION): float,
|
||||||
|
vol.Optional(CONF_LATITUDE): cv.latitude,
|
||||||
|
vol.Optional(CONF_LONGITUDE): cv.longitude,
|
||||||
|
vol.Optional(CONF_BINARY_SENSORS, default={}): BINARY_SENSOR_SCHEMA,
|
||||||
|
vol.Optional(CONF_SENSORS, default={}): SENSOR_SCHEMA,
|
||||||
|
vol.Optional(CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL):
|
||||||
|
cv.time_period,
|
||||||
|
})
|
||||||
|
}, extra=vol.ALLOW_EXTRA)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup(hass, config):
|
||||||
|
"""Set up the OpenUV component."""
|
||||||
|
from pyopenuv import Client
|
||||||
|
from pyopenuv.errors import OpenUvError
|
||||||
|
|
||||||
|
conf = config[DOMAIN]
|
||||||
|
api_key = conf[CONF_API_KEY]
|
||||||
|
elevation = conf.get(CONF_ELEVATION, hass.config.elevation)
|
||||||
|
latitude = conf.get(CONF_LATITUDE, hass.config.latitude)
|
||||||
|
longitude = conf.get(CONF_LONGITUDE, hass.config.longitude)
|
||||||
|
|
||||||
|
try:
|
||||||
|
websession = aiohttp_client.async_get_clientsession(hass)
|
||||||
|
openuv = OpenUV(
|
||||||
|
Client(
|
||||||
|
api_key, latitude, longitude, websession, altitude=elevation),
|
||||||
|
conf[CONF_BINARY_SENSORS][CONF_MONITORED_CONDITIONS] +
|
||||||
|
conf[CONF_SENSORS][CONF_MONITORED_CONDITIONS])
|
||||||
|
await openuv.async_update()
|
||||||
|
hass.data[DOMAIN] = openuv
|
||||||
|
except OpenUvError as err:
|
||||||
|
_LOGGER.error('An error occurred: %s', str(err))
|
||||||
|
hass.components.persistent_notification.create(
|
||||||
|
'Error: {0}<br />'
|
||||||
|
'You will need to restart hass after fixing.'
|
||||||
|
''.format(err),
|
||||||
|
title=NOTIFICATION_TITLE,
|
||||||
|
notification_id=NOTIFICATION_ID)
|
||||||
|
return False
|
||||||
|
|
||||||
|
for component, schema in [
|
||||||
|
('binary_sensor', conf[CONF_BINARY_SENSORS]),
|
||||||
|
('sensor', conf[CONF_SENSORS]),
|
||||||
|
]:
|
||||||
|
hass.async_create_task(
|
||||||
|
discovery.async_load_platform(
|
||||||
|
hass, component, DOMAIN, schema, config))
|
||||||
|
|
||||||
|
async def refresh_sensors(event_time):
|
||||||
|
"""Refresh OpenUV data."""
|
||||||
|
_LOGGER.debug('Refreshing OpenUV data')
|
||||||
|
await openuv.async_update()
|
||||||
|
async_dispatcher_send(hass, TOPIC_UPDATE)
|
||||||
|
|
||||||
|
async_track_time_interval(hass, refresh_sensors, conf[CONF_SCAN_INTERVAL])
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class OpenUV:
|
||||||
|
"""Define a generic OpenUV object."""
|
||||||
|
|
||||||
|
def __init__(self, client, monitored_conditions):
|
||||||
|
"""Initialize."""
|
||||||
|
self._monitored_conditions = monitored_conditions
|
||||||
|
self.client = client
|
||||||
|
self.data = {}
|
||||||
|
|
||||||
|
async def async_update(self):
|
||||||
|
"""Update sensor/binary sensor data."""
|
||||||
|
if TYPE_PROTECTION_WINDOW in self._monitored_conditions:
|
||||||
|
data = await self.client.uv_protection_window()
|
||||||
|
self.data[DATA_PROTECTION_WINDOW] = data
|
||||||
|
|
||||||
|
if any(c in self._monitored_conditions for c in SENSORS):
|
||||||
|
data = await self.client.uv_index()
|
||||||
|
self.data[DATA_UV] = data
|
||||||
|
|
||||||
|
|
||||||
|
class OpenUvEntity(Entity):
|
||||||
|
"""Define a generic OpenUV entity."""
|
||||||
|
|
||||||
|
def __init__(self, openuv):
|
||||||
|
"""Initialize."""
|
||||||
|
self._attrs = {ATTR_ATTRIBUTION: DEFAULT_ATTRIBUTION}
|
||||||
|
self._name = None
|
||||||
|
self.openuv = openuv
|
||||||
|
|
||||||
|
@property
|
||||||
|
def device_state_attributes(self):
|
||||||
|
"""Return the state attributes."""
|
||||||
|
return self._attrs
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
"""Return the name of the entity."""
|
||||||
|
return self._name
|
@ -6,10 +6,11 @@ https://home-assistant.io/components/persistent_notification/
|
|||||||
"""
|
"""
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
from typing import Awaitable
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.core import callback
|
from homeassistant.core import callback, HomeAssistant
|
||||||
from homeassistant.exceptions import TemplateError
|
from homeassistant.exceptions import TemplateError
|
||||||
from homeassistant.loader import bind_hass
|
from homeassistant.loader import bind_hass
|
||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
@ -58,7 +59,8 @@ def dismiss(hass, notification_id):
|
|||||||
|
|
||||||
@callback
|
@callback
|
||||||
@bind_hass
|
@bind_hass
|
||||||
def async_create(hass, message, title=None, notification_id=None):
|
def async_create(hass: HomeAssistant, message: str, title: str = None,
|
||||||
|
notification_id: str = None) -> None:
|
||||||
"""Generate a notification."""
|
"""Generate a notification."""
|
||||||
data = {
|
data = {
|
||||||
key: value for key, value in [
|
key: value for key, value in [
|
||||||
@ -68,7 +70,8 @@ def async_create(hass, message, title=None, notification_id=None):
|
|||||||
] if value is not None
|
] if value is not None
|
||||||
}
|
}
|
||||||
|
|
||||||
hass.async_add_job(hass.services.async_call(DOMAIN, SERVICE_CREATE, data))
|
hass.async_create_task(
|
||||||
|
hass.services.async_call(DOMAIN, SERVICE_CREATE, data))
|
||||||
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
@ -81,7 +84,7 @@ def async_dismiss(hass, notification_id):
|
|||||||
|
|
||||||
|
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
def async_setup(hass, config):
|
def async_setup(hass: HomeAssistant, config: dict) -> Awaitable[bool]:
|
||||||
"""Set up the persistent notification component."""
|
"""Set up the persistent notification component."""
|
||||||
@callback
|
@callback
|
||||||
def create_service(call):
|
def create_service(call):
|
||||||
|
@ -114,6 +114,27 @@ def _drop_index(engine, table_name, index_name):
|
|||||||
"critical operation.", index_name, table_name)
|
"critical operation.", index_name, table_name)
|
||||||
|
|
||||||
|
|
||||||
|
def _add_columns(engine, table_name, columns_def):
|
||||||
|
"""Add columns to a table."""
|
||||||
|
from sqlalchemy import text
|
||||||
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
|
|
||||||
|
columns_def = ['ADD COLUMN {}'.format(col_def) for col_def in columns_def]
|
||||||
|
|
||||||
|
try:
|
||||||
|
engine.execute(text("ALTER TABLE {table} {columns_def}".format(
|
||||||
|
table=table_name,
|
||||||
|
columns_def=', '.join(columns_def))))
|
||||||
|
return
|
||||||
|
except SQLAlchemyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
for column_def in columns_def:
|
||||||
|
engine.execute(text("ALTER TABLE {table} {column_def}".format(
|
||||||
|
table=table_name,
|
||||||
|
column_def=column_def)))
|
||||||
|
|
||||||
|
|
||||||
def _apply_update(engine, new_version, old_version):
|
def _apply_update(engine, new_version, old_version):
|
||||||
"""Perform operations to bring schema up to date."""
|
"""Perform operations to bring schema up to date."""
|
||||||
if new_version == 1:
|
if new_version == 1:
|
||||||
@ -146,6 +167,19 @@ def _apply_update(engine, new_version, old_version):
|
|||||||
elif new_version == 5:
|
elif new_version == 5:
|
||||||
# Create supporting index for States.event_id foreign key
|
# Create supporting index for States.event_id foreign key
|
||||||
_create_index(engine, "states", "ix_states_event_id")
|
_create_index(engine, "states", "ix_states_event_id")
|
||||||
|
elif new_version == 6:
|
||||||
|
_add_columns(engine, "events", [
|
||||||
|
'context_id CHARACTER(36)',
|
||||||
|
'context_user_id CHARACTER(36)',
|
||||||
|
])
|
||||||
|
_create_index(engine, "events", "ix_events_context_id")
|
||||||
|
_create_index(engine, "events", "ix_events_context_user_id")
|
||||||
|
_add_columns(engine, "states", [
|
||||||
|
'context_id CHARACTER(36)',
|
||||||
|
'context_user_id CHARACTER(36)',
|
||||||
|
])
|
||||||
|
_create_index(engine, "states", "ix_states_context_id")
|
||||||
|
_create_index(engine, "states", "ix_states_context_user_id")
|
||||||
else:
|
else:
|
||||||
raise ValueError("No schema migration defined for version {}"
|
raise ValueError("No schema migration defined for version {}"
|
||||||
.format(new_version))
|
.format(new_version))
|
||||||
|
@ -9,14 +9,15 @@ from sqlalchemy import (
|
|||||||
from sqlalchemy.ext.declarative import declarative_base
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
|
||||||
import homeassistant.util.dt as dt_util
|
import homeassistant.util.dt as dt_util
|
||||||
from homeassistant.core import Event, EventOrigin, State, split_entity_id
|
from homeassistant.core import (
|
||||||
|
Context, Event, EventOrigin, State, split_entity_id)
|
||||||
from homeassistant.remote import JSONEncoder
|
from homeassistant.remote import JSONEncoder
|
||||||
|
|
||||||
# SQLAlchemy Schema
|
# SQLAlchemy Schema
|
||||||
# pylint: disable=invalid-name
|
# pylint: disable=invalid-name
|
||||||
Base = declarative_base()
|
Base = declarative_base()
|
||||||
|
|
||||||
SCHEMA_VERSION = 5
|
SCHEMA_VERSION = 6
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -31,6 +32,8 @@ class Events(Base): # type: ignore
|
|||||||
origin = Column(String(32))
|
origin = Column(String(32))
|
||||||
time_fired = Column(DateTime(timezone=True), index=True)
|
time_fired = Column(DateTime(timezone=True), index=True)
|
||||||
created = Column(DateTime(timezone=True), default=datetime.utcnow)
|
created = Column(DateTime(timezone=True), default=datetime.utcnow)
|
||||||
|
context_id = Column(String(36), index=True)
|
||||||
|
context_user_id = Column(String(36), index=True)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_event(event):
|
def from_event(event):
|
||||||
@ -38,16 +41,23 @@ class Events(Base): # type: ignore
|
|||||||
return Events(event_type=event.event_type,
|
return Events(event_type=event.event_type,
|
||||||
event_data=json.dumps(event.data, cls=JSONEncoder),
|
event_data=json.dumps(event.data, cls=JSONEncoder),
|
||||||
origin=str(event.origin),
|
origin=str(event.origin),
|
||||||
time_fired=event.time_fired)
|
time_fired=event.time_fired,
|
||||||
|
context_id=event.context.id,
|
||||||
|
context_user_id=event.context.user_id)
|
||||||
|
|
||||||
def to_native(self):
|
def to_native(self):
|
||||||
"""Convert to a natve HA Event."""
|
"""Convert to a natve HA Event."""
|
||||||
|
context = Context(
|
||||||
|
id=self.context_id,
|
||||||
|
user_id=self.context_user_id
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
return Event(
|
return Event(
|
||||||
self.event_type,
|
self.event_type,
|
||||||
json.loads(self.event_data),
|
json.loads(self.event_data),
|
||||||
EventOrigin(self.origin),
|
EventOrigin(self.origin),
|
||||||
_process_timestamp(self.time_fired)
|
_process_timestamp(self.time_fired),
|
||||||
|
context=context,
|
||||||
)
|
)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
# When json.loads fails
|
# When json.loads fails
|
||||||
@ -69,6 +79,8 @@ class States(Base): # type: ignore
|
|||||||
last_updated = Column(DateTime(timezone=True), default=datetime.utcnow,
|
last_updated = Column(DateTime(timezone=True), default=datetime.utcnow,
|
||||||
index=True)
|
index=True)
|
||||||
created = Column(DateTime(timezone=True), default=datetime.utcnow)
|
created = Column(DateTime(timezone=True), default=datetime.utcnow)
|
||||||
|
context_id = Column(String(36), index=True)
|
||||||
|
context_user_id = Column(String(36), index=True)
|
||||||
|
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
# Used for fetching the state of entities at a specific time
|
# Used for fetching the state of entities at a specific time
|
||||||
@ -82,7 +94,11 @@ class States(Base): # type: ignore
|
|||||||
entity_id = event.data['entity_id']
|
entity_id = event.data['entity_id']
|
||||||
state = event.data.get('new_state')
|
state = event.data.get('new_state')
|
||||||
|
|
||||||
dbstate = States(entity_id=entity_id)
|
dbstate = States(
|
||||||
|
entity_id=entity_id,
|
||||||
|
context_id=event.context.id,
|
||||||
|
context_user_id=event.context.user_id,
|
||||||
|
)
|
||||||
|
|
||||||
# State got deleted
|
# State got deleted
|
||||||
if state is None:
|
if state is None:
|
||||||
@ -103,12 +119,17 @@ class States(Base): # type: ignore
|
|||||||
|
|
||||||
def to_native(self):
|
def to_native(self):
|
||||||
"""Convert to an HA state object."""
|
"""Convert to an HA state object."""
|
||||||
|
context = Context(
|
||||||
|
id=self.context_id,
|
||||||
|
user_id=self.context_user_id
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
return State(
|
return State(
|
||||||
self.entity_id, self.state,
|
self.entity_id, self.state,
|
||||||
json.loads(self.attributes),
|
json.loads(self.attributes),
|
||||||
_process_timestamp(self.last_changed),
|
_process_timestamp(self.last_changed),
|
||||||
_process_timestamp(self.last_updated)
|
_process_timestamp(self.last_updated),
|
||||||
|
context=context,
|
||||||
)
|
)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
# When json.loads fails
|
# When json.loads fails
|
||||||
|
@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"state": {
|
||||||
|
"first_quarter": "\u0627\u0644\u0631\u0628\u0639 \u0627\u0644\u0623\u0648\u0644",
|
||||||
|
"full_moon": "\u0627\u0644\u0642\u0645\u0631 \u0627\u0644\u0643\u0627\u0645\u0644"
|
||||||
|
}
|
||||||
|
}
|
12
homeassistant/components/sensor/.translations/moon.ca.json
Normal file
12
homeassistant/components/sensor/.translations/moon.ca.json
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"state": {
|
||||||
|
"first_quarter": "Quart creixent",
|
||||||
|
"full_moon": "Lluna plena",
|
||||||
|
"last_quarter": "Quart minvant",
|
||||||
|
"new_moon": "Lluna nova",
|
||||||
|
"waning_crescent": "Lluna vella minvant",
|
||||||
|
"waning_gibbous": "Gibosa minvant",
|
||||||
|
"waxing_crescent": "Lluna nova visible",
|
||||||
|
"waxing_gibbous": "Gibosa creixent"
|
||||||
|
}
|
||||||
|
}
|
12
homeassistant/components/sensor/.translations/moon.de.json
Normal file
12
homeassistant/components/sensor/.translations/moon.de.json
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"state": {
|
||||||
|
"first_quarter": "Erstes Viertel",
|
||||||
|
"full_moon": "Vollmond",
|
||||||
|
"last_quarter": "Letztes Viertel",
|
||||||
|
"new_moon": "Neumond",
|
||||||
|
"waning_crescent": "Abnehmende Sichel",
|
||||||
|
"waning_gibbous": "Drittes Viertel",
|
||||||
|
"waxing_crescent": " Zunehmende Sichel",
|
||||||
|
"waxing_gibbous": "Zweites Viertel"
|
||||||
|
}
|
||||||
|
}
|
12
homeassistant/components/sensor/.translations/moon.en.json
Normal file
12
homeassistant/components/sensor/.translations/moon.en.json
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"state": {
|
||||||
|
"first_quarter": "First quarter",
|
||||||
|
"full_moon": "Full moon",
|
||||||
|
"last_quarter": "Last quarter",
|
||||||
|
"new_moon": "New moon",
|
||||||
|
"waning_crescent": "Waning crescent",
|
||||||
|
"waning_gibbous": "Waning gibbous",
|
||||||
|
"waxing_crescent": "Waxing crescent",
|
||||||
|
"waxing_gibbous": "Waxing gibbous"
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"state": {
|
||||||
|
"first_quarter": "Cuarto creciente",
|
||||||
|
"full_moon": "Luna llena",
|
||||||
|
"last_quarter": "Cuarto menguante",
|
||||||
|
"new_moon": "Luna nueva",
|
||||||
|
"waning_crescent": "Luna menguante",
|
||||||
|
"waning_gibbous": "Luna menguante gibosa",
|
||||||
|
"waxing_crescent": "Luna creciente",
|
||||||
|
"waxing_gibbous": "Luna creciente gibosa"
|
||||||
|
}
|
||||||
|
}
|
12
homeassistant/components/sensor/.translations/moon.fr.json
Normal file
12
homeassistant/components/sensor/.translations/moon.fr.json
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"state": {
|
||||||
|
"first_quarter": "Premier quartier",
|
||||||
|
"full_moon": "Pleine lune",
|
||||||
|
"last_quarter": "Dernier quartier",
|
||||||
|
"new_moon": "Nouvelle lune",
|
||||||
|
"waning_crescent": "Dernier croissant",
|
||||||
|
"waning_gibbous": "Gibbeuse d\u00e9croissante",
|
||||||
|
"waxing_crescent": "Premier croissant",
|
||||||
|
"waxing_gibbous": "Gibbeuse croissante"
|
||||||
|
}
|
||||||
|
}
|
12
homeassistant/components/sensor/.translations/moon.ko.json
Normal file
12
homeassistant/components/sensor/.translations/moon.ko.json
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"state": {
|
||||||
|
"first_quarter": "\ubc18\ub2ec(\ucc28\uc624\ub974\ub294)",
|
||||||
|
"full_moon": "\ubcf4\ub984\ub2ec",
|
||||||
|
"last_quarter": "\ubc18\ub2ec(\uc904\uc5b4\ub4dc\ub294)",
|
||||||
|
"new_moon": "\uc0ad\uc6d4",
|
||||||
|
"waning_crescent": "\uadf8\ubbd0\ub2ec",
|
||||||
|
"waning_gibbous": "\ud558\ud604\ub2ec",
|
||||||
|
"waxing_crescent": "\ucd08\uc2b9\ub2ec",
|
||||||
|
"waxing_gibbous": "\uc0c1\ud604\ub2ec"
|
||||||
|
}
|
||||||
|
}
|
12
homeassistant/components/sensor/.translations/moon.nl.json
Normal file
12
homeassistant/components/sensor/.translations/moon.nl.json
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"state": {
|
||||||
|
"first_quarter": "Eerste kwartier",
|
||||||
|
"full_moon": "Volle maan",
|
||||||
|
"last_quarter": "Laatste kwartier",
|
||||||
|
"new_moon": "Nieuwe maan",
|
||||||
|
"waning_crescent": "Krimpende, sikkelvormige maan",
|
||||||
|
"waning_gibbous": "Krimpende, vooruitspringende maan",
|
||||||
|
"waxing_crescent": "Wassende, sikkelvormige maan",
|
||||||
|
"waxing_gibbous": "Wassende, vooruitspringende maan"
|
||||||
|
}
|
||||||
|
}
|
12
homeassistant/components/sensor/.translations/moon.no.json
Normal file
12
homeassistant/components/sensor/.translations/moon.no.json
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"state": {
|
||||||
|
"first_quarter": "F\u00f8rste kvartdel",
|
||||||
|
"full_moon": "Fullm\u00e5ne",
|
||||||
|
"last_quarter": "Siste kvartdel",
|
||||||
|
"new_moon": "Nym\u00e5ne",
|
||||||
|
"waning_crescent": "Minkende halvm\u00e5ne",
|
||||||
|
"waning_gibbous": "Minkende trekvartm\u00e5ne",
|
||||||
|
"waxing_crescent": "Voksende halvm\u00e5ne",
|
||||||
|
"waxing_gibbous": "Voksende trekvartm\u00e5ne"
|
||||||
|
}
|
||||||
|
}
|
12
homeassistant/components/sensor/.translations/moon.ru.json
Normal file
12
homeassistant/components/sensor/.translations/moon.ru.json
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"state": {
|
||||||
|
"first_quarter": "\u041f\u0435\u0440\u0432\u0430\u044f \u0447\u0435\u0442\u0432\u0435\u0440\u0442\u044c",
|
||||||
|
"full_moon": "\u041f\u043e\u043b\u043d\u043e\u043b\u0443\u043d\u0438\u0435",
|
||||||
|
"last_quarter": "\u041f\u043e\u0441\u043b\u0435\u0434\u043d\u044f\u044f \u0447\u0435\u0442\u0432\u0435\u0440\u0442\u044c",
|
||||||
|
"new_moon": "\u041d\u043e\u0432\u043e\u043b\u0443\u043d\u0438\u0435",
|
||||||
|
"waning_crescent": "\u0421\u0442\u0430\u0440\u0430\u044f \u043b\u0443\u043d\u0430",
|
||||||
|
"waning_gibbous": "\u0423\u0431\u044b\u0432\u0430\u044e\u0449\u0430\u044f \u043b\u0443\u043d\u0430",
|
||||||
|
"waxing_crescent": "\u041c\u043e\u043b\u043e\u0434\u0430\u044f \u043b\u0443\u043d\u0430",
|
||||||
|
"waxing_gibbous": "\u041f\u0440\u0438\u0431\u044b\u0432\u0430\u044e\u0449\u0430\u044f \u043b\u0443\u043d\u0430"
|
||||||
|
}
|
||||||
|
}
|
12
homeassistant/components/sensor/.translations/moon.sl.json
Normal file
12
homeassistant/components/sensor/.translations/moon.sl.json
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"state": {
|
||||||
|
"first_quarter": "Prvi krajec",
|
||||||
|
"full_moon": "Polna luna",
|
||||||
|
"last_quarter": "Zadnji krajec",
|
||||||
|
"new_moon": "Mlaj",
|
||||||
|
"waning_crescent": "Zadnji izbo\u010dec",
|
||||||
|
"waning_gibbous": "Zadnji srpec",
|
||||||
|
"waxing_crescent": " Prvi izbo\u010dec",
|
||||||
|
"waxing_gibbous": "Prvi srpec"
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"state": {
|
||||||
|
"first_quarter": "\u4e0a\u5f26\u6708",
|
||||||
|
"full_moon": "\u6ee1\u6708",
|
||||||
|
"last_quarter": "\u4e0b\u5f26\u6708",
|
||||||
|
"new_moon": "\u65b0\u6708",
|
||||||
|
"waning_crescent": "\u6b8b\u6708",
|
||||||
|
"waning_gibbous": "\u4e8f\u51f8\u6708",
|
||||||
|
"waxing_crescent": "\u5ce8\u7709\u6708",
|
||||||
|
"waxing_gibbous": "\u76c8\u51f8\u6708"
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"state": {
|
||||||
|
"first_quarter": "\u4e0a\u5f26\u6708",
|
||||||
|
"full_moon": "\u6eff\u6708",
|
||||||
|
"last_quarter": "\u4e0b\u5f26\u6708",
|
||||||
|
"new_moon": "\u65b0\u6708",
|
||||||
|
"waning_crescent": "\u6b98\u6708",
|
||||||
|
"waning_gibbous": "\u8667\u51f8\u6708",
|
||||||
|
"waxing_crescent": "\u86fe\u7709\u6708",
|
||||||
|
"waxing_gibbous": "\u76c8\u51f8\u6708"
|
||||||
|
}
|
||||||
|
}
|
@ -22,7 +22,6 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
ATTR_CLOSE = 'close'
|
ATTR_CLOSE = 'close'
|
||||||
ATTR_HIGH = 'high'
|
ATTR_HIGH = 'high'
|
||||||
ATTR_LOW = 'low'
|
ATTR_LOW = 'low'
|
||||||
ATTR_VOLUME = 'volume'
|
|
||||||
|
|
||||||
CONF_ATTRIBUTION = "Stock market information provided by Alpha Vantage"
|
CONF_ATTRIBUTION = "Stock market information provided by Alpha Vantage"
|
||||||
CONF_FOREIGN_EXCHANGE = 'foreign_exchange'
|
CONF_FOREIGN_EXCHANGE = 'foreign_exchange'
|
||||||
@ -148,7 +147,6 @@ class AlphaVantageSensor(Entity):
|
|||||||
ATTR_CLOSE: self.values['4. close'],
|
ATTR_CLOSE: self.values['4. close'],
|
||||||
ATTR_HIGH: self.values['2. high'],
|
ATTR_HIGH: self.values['2. high'],
|
||||||
ATTR_LOW: self.values['3. low'],
|
ATTR_LOW: self.values['3. low'],
|
||||||
ATTR_VOLUME: self.values['5. volume'],
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
107
homeassistant/components/sensor/enphase_envoy.py
Normal file
107
homeassistant/components/sensor/enphase_envoy.py
Normal file
@ -0,0 +1,107 @@
|
|||||||
|
"""
|
||||||
|
Support for Enphase Envoy solar energy monitor.
|
||||||
|
|
||||||
|
For more details about this platform, please refer to the documentation at
|
||||||
|
https://home-assistant.io/components/sensor.enphase_envoy/
|
||||||
|
"""
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from homeassistant.helpers.entity import Entity
|
||||||
|
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
||||||
|
import homeassistant.helpers.config_validation as cv
|
||||||
|
from homeassistant.const import (CONF_IP_ADDRESS, CONF_MONITORED_CONDITIONS)
|
||||||
|
|
||||||
|
|
||||||
|
REQUIREMENTS = ['envoy_reader==0.1']
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
SENSORS = {
|
||||||
|
"production": ("Envoy Current Energy Production", 'W'),
|
||||||
|
"daily_production": ("Envoy Today's Energy Production", "Wh"),
|
||||||
|
"7_days_production": ("Envoy Last Seven Days Energy Production", "Wh"),
|
||||||
|
"lifetime_production": ("Envoy Lifetime Energy Production", "Wh"),
|
||||||
|
"consumption": ("Envoy Current Energy Consumption", "W"),
|
||||||
|
"daily_consumption": ("Envoy Today's Energy Consumption", "Wh"),
|
||||||
|
"7_days_consumption": ("Envoy Last Seven Days Energy Consumption", "Wh"),
|
||||||
|
"lifetime_consumption": ("Envoy Lifetime Energy Consumption", "Wh")
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
ICON = 'mdi:flash'
|
||||||
|
|
||||||
|
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||||
|
vol.Required(CONF_IP_ADDRESS): cv.string,
|
||||||
|
vol.Optional(CONF_MONITORED_CONDITIONS, default=list(SENSORS)):
|
||||||
|
vol.All(cv.ensure_list, [vol.In(list(SENSORS))])})
|
||||||
|
|
||||||
|
|
||||||
|
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||||
|
"""Set up the Enphase Envoy sensor."""
|
||||||
|
ip_address = config[CONF_IP_ADDRESS]
|
||||||
|
monitored_conditions = config[CONF_MONITORED_CONDITIONS]
|
||||||
|
|
||||||
|
# Iterate through the list of sensors
|
||||||
|
for condition in monitored_conditions:
|
||||||
|
add_devices([Envoy(ip_address, condition, SENSORS[condition][0],
|
||||||
|
SENSORS[condition][1])], True)
|
||||||
|
|
||||||
|
|
||||||
|
class Envoy(Entity):
|
||||||
|
"""Implementation of the Enphase Envoy sensors."""
|
||||||
|
|
||||||
|
def __init__(self, ip_address, sensor_type, name, unit):
|
||||||
|
"""Initialize the sensor."""
|
||||||
|
self._ip_address = ip_address
|
||||||
|
self._name = name
|
||||||
|
self._unit_of_measurement = unit
|
||||||
|
self._type = sensor_type
|
||||||
|
self._state = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
"""Return the name of the sensor."""
|
||||||
|
return self._name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def state(self):
|
||||||
|
"""Return the state of the sensor."""
|
||||||
|
return self._state
|
||||||
|
|
||||||
|
@property
|
||||||
|
def unit_of_measurement(self):
|
||||||
|
"""Return the unit of measurement of this entity, if any."""
|
||||||
|
return self._unit_of_measurement
|
||||||
|
|
||||||
|
@property
|
||||||
|
def icon(self):
|
||||||
|
"""Icon to use in the frontend, if any."""
|
||||||
|
return ICON
|
||||||
|
|
||||||
|
def update(self):
|
||||||
|
"""Get the energy production data from the Enphase Envoy."""
|
||||||
|
import envoy_reader
|
||||||
|
|
||||||
|
if self._type == "production":
|
||||||
|
self._state = int(envoy_reader.production(self._ip_address))
|
||||||
|
elif self._type == "daily_production":
|
||||||
|
self._state = int(envoy_reader.daily_production(self._ip_address))
|
||||||
|
elif self._type == "7_days_production":
|
||||||
|
self._state = int(envoy_reader.seven_days_production(
|
||||||
|
self._ip_address))
|
||||||
|
elif self._type == "lifetime_production":
|
||||||
|
self._state = int(envoy_reader.lifetime_production(
|
||||||
|
self._ip_address))
|
||||||
|
|
||||||
|
elif self._type == "consumption":
|
||||||
|
self._state = int(envoy_reader.consumption(self._ip_address))
|
||||||
|
elif self._type == "daily_consumption":
|
||||||
|
self._state = int(envoy_reader.daily_consumption(
|
||||||
|
self._ip_address))
|
||||||
|
elif self._type == "7_days_consumption":
|
||||||
|
self._state = int(envoy_reader.seven_days_consumption(
|
||||||
|
self._ip_address))
|
||||||
|
elif self._type == "lifetime_consumption":
|
||||||
|
self._state = int(envoy_reader.lifetime_consumption(
|
||||||
|
self._ip_address))
|
@ -164,7 +164,7 @@ class IrishRailTransportData:
|
|||||||
ATTR_TRAIN_TYPE: train.get('type')}
|
ATTR_TRAIN_TYPE: train.get('type')}
|
||||||
self.info.append(train_data)
|
self.info.append(train_data)
|
||||||
|
|
||||||
if not self.info or not self.info:
|
if not self.info:
|
||||||
self.info = self._empty_train_data()
|
self.info = self._empty_train_data()
|
||||||
|
|
||||||
def _empty_train_data(self):
|
def _empty_train_data(self):
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user