mirror of
https://github.com/home-assistant/core.git
synced 2025-09-23 03:49:31 +00:00
Compare commits
44 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
a309a00929 | ||
![]() |
55be5bf880 | ||
![]() |
7b37dcd8ed | ||
![]() |
e36bdd717a | ||
![]() |
fa650b648c | ||
![]() |
ac2310e7f9 | ||
![]() |
aee5c16803 | ||
![]() |
5f0816ea25 | ||
![]() |
6a6037790f | ||
![]() |
d2b0c35319 | ||
![]() |
d707a1b072 | ||
![]() |
ca12db9271 | ||
![]() |
346a4b399d | ||
![]() |
2090252936 | ||
![]() |
a28091e94a | ||
![]() |
ae8cb0ccdf | ||
![]() |
06a608e342 | ||
![]() |
9af95e8577 | ||
![]() |
29a9781bf7 | ||
![]() |
877eddf43d | ||
![]() |
88e3e73bb4 | ||
![]() |
3aa1bcbb77 | ||
![]() |
f973b35cef | ||
![]() |
4e08aa8b05 | ||
![]() |
8e917ccf73 | ||
![]() |
0b62011626 | ||
![]() |
d520a02b8c | ||
![]() |
1e469b39ad | ||
![]() |
c2f615839d | ||
![]() |
657bf33e32 | ||
![]() |
0ca87007fd | ||
![]() |
d0d9d853f2 | ||
![]() |
8348878e7e | ||
![]() |
b70be5f2f2 | ||
![]() |
fddb565e4c | ||
![]() |
f3e6820042 | ||
![]() |
ae98f13181 | ||
![]() |
ab38e7d98a | ||
![]() |
9797b09d44 | ||
![]() |
4908d4358c | ||
![]() |
67d728fc50 | ||
![]() |
912409ed0c | ||
![]() |
ac8c889b0f | ||
![]() |
67a721d39b |
@@ -21,6 +21,10 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from .axis_base import AxisEntityBase
|
||||
from .const import DOMAIN as AXIS_DOMAIN
|
||||
|
||||
AXIS_IMAGE = "http://{host}:{port}/axis-cgi/jpg/image.cgi"
|
||||
AXIS_VIDEO = "http://{host}:{port}/axis-cgi/mjpg/video.cgi"
|
||||
AXIS_STREAM = "rtsp://{user}:{password}@{host}/axis-media/media.amp?videocodec=h264"
|
||||
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Axis camera video stream."""
|
||||
@@ -32,13 +36,11 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
CONF_NAME: config_entry.data[CONF_NAME],
|
||||
CONF_USERNAME: config_entry.data[CONF_USERNAME],
|
||||
CONF_PASSWORD: config_entry.data[CONF_PASSWORD],
|
||||
CONF_MJPEG_URL: (
|
||||
f"http://{config_entry.data[CONF_HOST]}"
|
||||
f":{config_entry.data[CONF_PORT]}/axis-cgi/mjpg/video.cgi"
|
||||
CONF_MJPEG_URL: AXIS_VIDEO.format(
|
||||
host=config_entry.data[CONF_HOST], port=config_entry.data[CONF_PORT],
|
||||
),
|
||||
CONF_STILL_IMAGE_URL: (
|
||||
f"http://{config_entry.data[CONF_HOST]}"
|
||||
f":{config_entry.data[CONF_PORT]}/axis-cgi/jpg/image.cgi"
|
||||
CONF_STILL_IMAGE_URL: AXIS_IMAGE.format(
|
||||
host=config_entry.data[CONF_HOST], port=config_entry.data[CONF_PORT],
|
||||
),
|
||||
CONF_AUTHENTICATION: HTTP_DIGEST_AUTHENTICATION,
|
||||
}
|
||||
@@ -70,19 +72,17 @@ class AxisCamera(AxisEntityBase, MjpegCamera):
|
||||
|
||||
async def stream_source(self):
|
||||
"""Return the stream source."""
|
||||
return (
|
||||
f"rtsp://{self.device.config_entry.data[CONF_USERNAME]}´"
|
||||
f":{self.device.config_entry.data[CONF_PASSWORD]}"
|
||||
f"@{self.device.host}/axis-media/media.amp?videocodec=h264"
|
||||
return AXIS_STREAM.format(
|
||||
user=self.device.config_entry.data[CONF_USERNAME],
|
||||
password=self.device.config_entry.data[CONF_PASSWORD],
|
||||
host=self.device.host,
|
||||
)
|
||||
|
||||
def _new_address(self):
|
||||
"""Set new device address for video stream."""
|
||||
port = self.device.config_entry.data[CONF_PORT]
|
||||
self._mjpeg_url = (f"http://{self.device.host}:{port}/axis-cgi/mjpg/video.cgi",)
|
||||
self._still_image_url = (
|
||||
f"http://{self.device.host}:{port}/axis-cgi/jpg/image.cgi"
|
||||
)
|
||||
self._mjpeg_url = AXIS_VIDEO.format(host=self.device.host, port=port)
|
||||
self._still_image_url = AXIS_IMAGE.format(host=self.device.host, port=port)
|
||||
|
||||
@property
|
||||
def unique_id(self):
|
||||
|
@@ -141,7 +141,7 @@ async def async_request_stream(hass, entity_id, fmt):
|
||||
source,
|
||||
fmt=fmt,
|
||||
keepalive=camera_prefs.preload_stream,
|
||||
options=camera.options,
|
||||
options=camera.stream_options,
|
||||
)
|
||||
|
||||
|
||||
|
@@ -3,7 +3,7 @@
|
||||
"name": "Google Cast",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/cast",
|
||||
"requirements": ["pychromecast==4.1.1"],
|
||||
"requirements": ["pychromecast==4.2.0"],
|
||||
"dependencies": [],
|
||||
"after_dependencies": ["cloud"],
|
||||
"zeroconf": ["_googlecast._tcp.local."],
|
||||
|
@@ -7,9 +7,9 @@ from .const import ATTRIBUTION, OPTION_WORLDWIDE
|
||||
|
||||
SENSORS = {
|
||||
"confirmed": "mdi:emoticon-neutral-outline",
|
||||
"current": "mdi:emoticon-frown-outline",
|
||||
"current": "mdi:emoticon-sad-outline",
|
||||
"recovered": "mdi:emoticon-happy-outline",
|
||||
"deaths": "mdi:emoticon-dead-outline",
|
||||
"deaths": "mdi:emoticon-cry-outline",
|
||||
}
|
||||
|
||||
|
||||
|
@@ -3,11 +3,12 @@
|
||||
"name": "Home Assistant Frontend",
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"requirements": [
|
||||
"home-assistant-frontend==20200316.1"
|
||||
"home-assistant-frontend==20200318.1"
|
||||
],
|
||||
"dependencies": [
|
||||
"api",
|
||||
"auth",
|
||||
"device_automation",
|
||||
"http",
|
||||
"lovelace",
|
||||
"onboarding",
|
||||
@@ -19,4 +20,4 @@
|
||||
"@home-assistant/frontend"
|
||||
],
|
||||
"quality_scale": "internal"
|
||||
}
|
||||
}
|
||||
|
@@ -190,16 +190,15 @@ async def async_setup(hass, config):
|
||||
|
||||
hass.http.register_view(HassIOView(host, websession))
|
||||
|
||||
if "frontend" in hass.config.components:
|
||||
await hass.components.panel_custom.async_register_panel(
|
||||
frontend_url_path="hassio",
|
||||
webcomponent_name="hassio-main",
|
||||
sidebar_title="Supervisor",
|
||||
sidebar_icon="hass:home-assistant",
|
||||
js_url="/api/hassio/app/entrypoint.js",
|
||||
embed_iframe=True,
|
||||
require_admin=True,
|
||||
)
|
||||
await hass.components.panel_custom.async_register_panel(
|
||||
frontend_url_path="hassio",
|
||||
webcomponent_name="hassio-main",
|
||||
sidebar_title="Supervisor",
|
||||
sidebar_icon="hass:home-assistant",
|
||||
js_url="/api/hassio/app/entrypoint.js",
|
||||
embed_iframe=True,
|
||||
require_admin=True,
|
||||
)
|
||||
|
||||
await hassio.update_hass_api(config.get("http", {}), refresh_token)
|
||||
|
||||
|
@@ -14,7 +14,7 @@
|
||||
"busy_error": "Device refused to add pairing as it is already pairing with another controller.",
|
||||
"max_peers_error": "Device refused to add pairing as it has no free pairing storage.",
|
||||
"max_tries_error": "Device refused to add pairing as it has received more than 100 unsuccessful authentication attempts.",
|
||||
"pairing_failed": "An unhandled error occured while attempting to pair with this device. This may be a temporary failure or your device may not be supported currently.",
|
||||
"pairing_failed": "An unhandled error occurred while attempting to pair with this device. This may be a temporary failure or your device may not be supported currently.",
|
||||
"unable_to_pair": "Unable to pair, please try again.",
|
||||
"unknown_error": "Device reported an unknown error. Pairing failed."
|
||||
},
|
||||
|
@@ -3,7 +3,7 @@
|
||||
"name": "HomeKit Controller",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
|
||||
"requirements": ["aiohomekit[IP]==0.2.29.1"],
|
||||
"requirements": ["aiohomekit[IP]==0.2.29.2"],
|
||||
"dependencies": [],
|
||||
"zeroconf": ["_hap._tcp.local."],
|
||||
"codeowners": ["@Jc2k"]
|
||||
|
@@ -25,7 +25,7 @@
|
||||
"max_peers_error": "Device refused to add pairing as it has no free pairing storage.",
|
||||
"busy_error": "Device refused to add pairing as it is already pairing with another controller.",
|
||||
"max_tries_error": "Device refused to add pairing as it has received more than 100 unsuccessful authentication attempts.",
|
||||
"pairing_failed": "An unhandled error occured while attempting to pair with this device. This may be a temporary failure or your device may not be supported currently."
|
||||
"pairing_failed": "An unhandled error occurred while attempting to pair with this device. This may be a temporary failure or your device may not be supported currently."
|
||||
},
|
||||
"abort": {
|
||||
"no_devices": "No unpaired devices could be found",
|
||||
|
@@ -97,6 +97,7 @@ class IcloudAccount:
|
||||
self._owner_fullname = None
|
||||
self._family_members_fullname = {}
|
||||
self._devices = {}
|
||||
self._retried_fetch = False
|
||||
|
||||
self.listeners = []
|
||||
|
||||
@@ -122,10 +123,6 @@ class IcloudAccount:
|
||||
_LOGGER.error("No iCloud device found")
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
if DEVICE_STATUS_CODES.get(list(api_devices)[0][DEVICE_STATUS]) == "pending":
|
||||
_LOGGER.warning("Pending devices, trying again ...")
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
self._owner_fullname = f"{user_info['firstName']} {user_info['lastName']}"
|
||||
|
||||
self._family_members_fullname = {}
|
||||
@@ -157,28 +154,15 @@ class IcloudAccount:
|
||||
)
|
||||
return
|
||||
|
||||
if DEVICE_STATUS_CODES.get(list(api_devices)[0][DEVICE_STATUS]) == "pending":
|
||||
_LOGGER.warning("Pending devices, trying again in 15s")
|
||||
self._fetch_interval = 0.25
|
||||
dispatcher_send(self.hass, self.signal_device_update)
|
||||
track_point_in_utc_time(
|
||||
self.hass,
|
||||
self.keep_alive,
|
||||
utcnow() + timedelta(minutes=self._fetch_interval),
|
||||
)
|
||||
return
|
||||
|
||||
# Gets devices infos
|
||||
new_device = False
|
||||
for device in api_devices:
|
||||
status = device.status(DEVICE_STATUS_SET)
|
||||
device_id = status[DEVICE_ID]
|
||||
device_name = status[DEVICE_NAME]
|
||||
device_status = DEVICE_STATUS_CODES.get(status[DEVICE_STATUS], "error")
|
||||
|
||||
if (
|
||||
device_status == "pending"
|
||||
or status[DEVICE_BATTERY_STATUS] == "Unknown"
|
||||
status[DEVICE_BATTERY_STATUS] == "Unknown"
|
||||
or status.get(DEVICE_BATTERY_LEVEL) is None
|
||||
):
|
||||
continue
|
||||
@@ -198,7 +182,16 @@ class IcloudAccount:
|
||||
self._devices[device_id].update(status)
|
||||
new_device = True
|
||||
|
||||
self._fetch_interval = self._determine_interval()
|
||||
if (
|
||||
DEVICE_STATUS_CODES.get(list(api_devices)[0][DEVICE_STATUS]) == "pending"
|
||||
and not self._retried_fetch
|
||||
):
|
||||
_LOGGER.warning("Pending devices, trying again in 15s")
|
||||
self._fetch_interval = 0.25
|
||||
self._retried_fetch = True
|
||||
else:
|
||||
self._fetch_interval = self._determine_interval()
|
||||
self._retried_fetch = False
|
||||
|
||||
dispatcher_send(self.hass, self.signal_device_update)
|
||||
if new_device:
|
||||
|
@@ -3,7 +3,7 @@
|
||||
"name": "Apple iCloud",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/icloud",
|
||||
"requirements": ["pyicloud==0.9.4"],
|
||||
"requirements": ["pyicloud==0.9.5"],
|
||||
"dependencies": [],
|
||||
"codeowners": ["@Quentame"]
|
||||
}
|
||||
|
@@ -88,24 +88,22 @@ def _cv_input_text(cfg):
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: cv.schema_with_slug_keys(
|
||||
vol.Any(
|
||||
vol.All(
|
||||
{
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_MIN, default=CONF_MIN_VALUE): vol.Coerce(int),
|
||||
vol.Optional(CONF_MAX, default=CONF_MAX_VALUE): vol.Coerce(int),
|
||||
vol.Optional(CONF_INITIAL, ""): cv.string,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
|
||||
vol.Optional(CONF_PATTERN): cv.string,
|
||||
vol.Optional(CONF_MODE, default=MODE_TEXT): vol.In(
|
||||
[MODE_TEXT, MODE_PASSWORD]
|
||||
),
|
||||
},
|
||||
_cv_input_text,
|
||||
),
|
||||
None,
|
||||
)
|
||||
vol.All(
|
||||
lambda value: value or {},
|
||||
{
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_MIN, default=CONF_MIN_VALUE): vol.Coerce(int),
|
||||
vol.Optional(CONF_MAX, default=CONF_MAX_VALUE): vol.Coerce(int),
|
||||
vol.Optional(CONF_INITIAL, ""): cv.string,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
|
||||
vol.Optional(CONF_PATTERN): cv.string,
|
||||
vol.Optional(CONF_MODE, default=MODE_TEXT): vol.In(
|
||||
[MODE_TEXT, MODE_PASSWORD]
|
||||
),
|
||||
},
|
||||
_cv_input_text,
|
||||
),
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
@@ -203,13 +201,6 @@ class InputText(RestoreEntity):
|
||||
@classmethod
|
||||
def from_yaml(cls, config: typing.Dict) -> "InputText":
|
||||
"""Return entity instance initialized from yaml storage."""
|
||||
# set defaults for empty config
|
||||
config = {
|
||||
CONF_MAX: CONF_MAX_VALUE,
|
||||
CONF_MIN: CONF_MIN_VALUE,
|
||||
CONF_MODE: MODE_TEXT,
|
||||
**config,
|
||||
}
|
||||
input_text = cls(config)
|
||||
input_text.entity_id = f"{DOMAIN}.{config[CONF_ID]}"
|
||||
input_text.editable = False
|
||||
|
@@ -332,16 +332,17 @@ class MikrotikHub:
|
||||
async def async_add_options(self):
|
||||
"""Populate default options for Mikrotik."""
|
||||
if not self.config_entry.options:
|
||||
data = dict(self.config_entry.data)
|
||||
options = {
|
||||
CONF_ARP_PING: self.config_entry.data.pop(CONF_ARP_PING, False),
|
||||
CONF_FORCE_DHCP: self.config_entry.data.pop(CONF_FORCE_DHCP, False),
|
||||
CONF_DETECTION_TIME: self.config_entry.data.pop(
|
||||
CONF_ARP_PING: data.pop(CONF_ARP_PING, False),
|
||||
CONF_FORCE_DHCP: data.pop(CONF_FORCE_DHCP, False),
|
||||
CONF_DETECTION_TIME: data.pop(
|
||||
CONF_DETECTION_TIME, DEFAULT_DETECTION_TIME
|
||||
),
|
||||
}
|
||||
|
||||
self.hass.config_entries.async_update_entry(
|
||||
self.config_entry, options=options
|
||||
self.config_entry, data=data, options=options
|
||||
)
|
||||
|
||||
async def request_update(self):
|
||||
|
@@ -28,6 +28,7 @@ from . import api, config_flow
|
||||
from .const import (
|
||||
AUTH,
|
||||
CONF_CLOUDHOOK_URL,
|
||||
DATA_DEVICE_IDS,
|
||||
DATA_PERSONS,
|
||||
DOMAIN,
|
||||
OAUTH2_AUTHORIZE,
|
||||
@@ -65,6 +66,7 @@ async def async_setup(hass: HomeAssistant, config: dict):
|
||||
"""Set up the Netatmo component."""
|
||||
hass.data[DOMAIN] = {}
|
||||
hass.data[DOMAIN][DATA_PERSONS] = {}
|
||||
hass.data[DOMAIN][DATA_DEVICE_IDS] = {}
|
||||
|
||||
if DOMAIN not in config:
|
||||
return True
|
||||
@@ -104,7 +106,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
webhook_unregister(hass, entry.data[CONF_WEBHOOK_ID])
|
||||
|
||||
async def register_webhook(event):
|
||||
# Wait for the could integration to be ready
|
||||
# Wait for the cloud integration to be ready
|
||||
await asyncio.sleep(WAIT_FOR_CLOUD)
|
||||
|
||||
if CONF_WEBHOOK_ID not in entry.data:
|
||||
@@ -112,6 +114,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
hass.config_entries.async_update_entry(entry, data=data)
|
||||
|
||||
if hass.components.cloud.async_active_subscription():
|
||||
# Wait for cloud connection to be established
|
||||
await asyncio.sleep(WAIT_FOR_CLOUD)
|
||||
|
||||
if CONF_CLOUDHOOK_URL not in entry.data:
|
||||
webhook_url = await hass.components.cloud.async_create_cloudhook(
|
||||
entry.data[CONF_WEBHOOK_ID]
|
||||
@@ -144,6 +149,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
"""Unload a config entry."""
|
||||
if CONF_WEBHOOK_ID in entry.data:
|
||||
await hass.async_add_executor_job(
|
||||
hass.data[DOMAIN][entry.entry_id][AUTH].dropwebhook
|
||||
)
|
||||
|
||||
unload_ok = all(
|
||||
await asyncio.gather(
|
||||
*[
|
||||
@@ -152,14 +162,10 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
if unload_ok:
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
||||
if CONF_WEBHOOK_ID in entry.data:
|
||||
await hass.async_add_executor_job(
|
||||
hass.data[DOMAIN][entry.entry_id][AUTH].dropwebhook()
|
||||
)
|
||||
|
||||
return unload_ok
|
||||
|
||||
|
||||
|
@@ -84,21 +84,11 @@ class NetatmoCamera(Camera):
|
||||
self._unique_id = f"{self._camera_id}-{self._camera_type}"
|
||||
self._verify_ssl = verify_ssl
|
||||
self._quality = quality
|
||||
|
||||
# URLs
|
||||
self._vpnurl = None
|
||||
self._localurl = None
|
||||
|
||||
# Monitoring status
|
||||
self._status = None
|
||||
|
||||
# SD Card status
|
||||
self._sd_status = None
|
||||
|
||||
# Power status
|
||||
self._alim_status = None
|
||||
|
||||
# Is local
|
||||
self._is_local = None
|
||||
|
||||
def camera_image(self):
|
||||
@@ -219,8 +209,6 @@ class NetatmoCamera(Camera):
|
||||
|
||||
def update(self):
|
||||
"""Update entity status."""
|
||||
|
||||
# Refresh camera data
|
||||
self._data.update()
|
||||
|
||||
camera = self._data.camera_data.get_camera(cid=self._camera_id)
|
||||
|
@@ -441,6 +441,11 @@ class ThermostatData:
|
||||
except TypeError:
|
||||
_LOGGER.error("ThermostatData::setup() got error")
|
||||
return False
|
||||
except pyatmo.exceptions.NoDevice:
|
||||
_LOGGER.debug(
|
||||
"No climate devices for %s (%s)", self.home_name, self.home_id
|
||||
)
|
||||
return False
|
||||
return True
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
|
@@ -33,6 +33,7 @@ class NetatmoFlowHandler(
|
||||
"read_station",
|
||||
"read_thermostat",
|
||||
"write_camera",
|
||||
"write_presence",
|
||||
"write_thermostat",
|
||||
]
|
||||
|
||||
|
@@ -14,12 +14,12 @@ MODELS = {
|
||||
"NOC": "Smart Outdoor Camera",
|
||||
"NSD": "Smart Smoke Alarm",
|
||||
"NACamDoorTag": "Smart Door and Window Sensors",
|
||||
"NHC": "Smart Indoor Air Quality Monitor",
|
||||
"NAMain": "Smart Home Weather station – indoor module",
|
||||
"NAModule1": "Smart Home Weather station – outdoor module",
|
||||
"NAModule4": "Smart Additional Indoor module",
|
||||
"NAModule3": "Smart Rain Gauge",
|
||||
"NAModule2": "Smart Anemometer",
|
||||
"NHC": "Home Coach",
|
||||
}
|
||||
|
||||
AUTH = "netatmo_auth"
|
||||
@@ -32,6 +32,7 @@ CONF_CLOUDHOOK_URL = "cloudhook_url"
|
||||
OAUTH2_AUTHORIZE = "https://api.netatmo.com/oauth2/authorize"
|
||||
OAUTH2_TOKEN = "https://api.netatmo.com/oauth2/token"
|
||||
|
||||
DATA_DEVICE_IDS = "netatmo_device_ids"
|
||||
DATA_PERSONS = "netatmo_persons"
|
||||
|
||||
NETATMO_WEBHOOK_URL = None
|
||||
|
@@ -77,7 +77,11 @@ PERSON_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{vol.Optional(DOMAIN): vol.All(cv.ensure_list, cv.remove_falsy, [PERSON_SCHEMA])},
|
||||
{
|
||||
vol.Optional(DOMAIN, default=[]): vol.All(
|
||||
cv.ensure_list, cv.remove_falsy, [PERSON_SCHEMA]
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
@@ -25,6 +25,7 @@ from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.service import verify_domain_control
|
||||
|
||||
from .const import (
|
||||
CONF_ZONE_RUN_TIME,
|
||||
DATA_CLIENT,
|
||||
DATA_PROGRAMS,
|
||||
DATA_PROVISION_SETTINGS,
|
||||
@@ -33,6 +34,8 @@ from .const import (
|
||||
DATA_ZONES,
|
||||
DATA_ZONES_DETAILS,
|
||||
DEFAULT_PORT,
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DEFAULT_ZONE_RUN,
|
||||
DOMAIN,
|
||||
PROGRAM_UPDATE_TOPIC,
|
||||
SENSOR_UPDATE_TOPIC,
|
||||
@@ -41,19 +44,14 @@ from .const import (
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DATA_LISTENER = "listener"
|
||||
|
||||
CONF_CONTROLLERS = "controllers"
|
||||
CONF_PROGRAM_ID = "program_id"
|
||||
CONF_SECONDS = "seconds"
|
||||
CONF_ZONE_ID = "zone_id"
|
||||
CONF_ZONE_RUN_TIME = "zone_run_time"
|
||||
|
||||
DEFAULT_ATTRIBUTION = "Data provided by Green Electronics LLC"
|
||||
DEFAULT_ICON = "mdi:water"
|
||||
DEFAULT_SCAN_INTERVAL = timedelta(seconds=60)
|
||||
DEFAULT_SSL = True
|
||||
DEFAULT_ZONE_RUN = 60 * 10
|
||||
|
||||
SERVICE_ALTER_PROGRAM = vol.Schema({vol.Required(CONF_PROGRAM_ID): cv.positive_int})
|
||||
|
||||
@@ -109,7 +107,6 @@ async def async_setup(hass, config):
|
||||
"""Set up the RainMachine component."""
|
||||
hass.data[DOMAIN] = {}
|
||||
hass.data[DOMAIN][DATA_CLIENT] = {}
|
||||
hass.data[DOMAIN][DATA_LISTENER] = {}
|
||||
|
||||
if DOMAIN not in config:
|
||||
return True
|
||||
@@ -143,7 +140,7 @@ async def async_setup_entry(hass, config_entry):
|
||||
config_entry.data[CONF_IP_ADDRESS],
|
||||
config_entry.data[CONF_PASSWORD],
|
||||
port=config_entry.data[CONF_PORT],
|
||||
ssl=config_entry.data[CONF_SSL],
|
||||
ssl=config_entry.data.get(CONF_SSL, DEFAULT_SSL),
|
||||
)
|
||||
except RainMachineError as err:
|
||||
_LOGGER.error("An error occurred: %s", err)
|
||||
@@ -156,8 +153,10 @@ async def async_setup_entry(hass, config_entry):
|
||||
rainmachine = RainMachine(
|
||||
hass,
|
||||
controller,
|
||||
config_entry.data[CONF_ZONE_RUN_TIME],
|
||||
config_entry.data[CONF_SCAN_INTERVAL],
|
||||
config_entry.data.get(CONF_ZONE_RUN_TIME, DEFAULT_ZONE_RUN),
|
||||
config_entry.data.get(
|
||||
CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL.total_seconds()
|
||||
),
|
||||
)
|
||||
|
||||
# Update the data object, which at this point (prior to any sensors registering
|
||||
@@ -260,9 +259,6 @@ async def async_unload_entry(hass, config_entry):
|
||||
"""Unload an OpenUV config entry."""
|
||||
hass.data[DOMAIN][DATA_CLIENT].pop(config_entry.entry_id)
|
||||
|
||||
remove_listener = hass.data[DOMAIN][DATA_LISTENER].pop(config_entry.entry_id)
|
||||
remove_listener()
|
||||
|
||||
tasks = [
|
||||
hass.config_entries.async_forward_entry_unload(config_entry, component)
|
||||
for component in ("binary_sensor", "sensor", "switch")
|
||||
|
@@ -4,10 +4,22 @@ from regenmaschine.errors import RainMachineError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD, CONF_PORT
|
||||
from homeassistant.const import (
|
||||
CONF_IP_ADDRESS,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
CONF_SCAN_INTERVAL,
|
||||
CONF_SSL,
|
||||
)
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
|
||||
from .const import DEFAULT_PORT, DOMAIN # pylint: disable=unused-import
|
||||
from .const import ( # pylint: disable=unused-import
|
||||
CONF_ZONE_RUN_TIME,
|
||||
DEFAULT_PORT,
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DEFAULT_ZONE_RUN,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
|
||||
class RainMachineFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
@@ -53,8 +65,8 @@ class RainMachineFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
user_input[CONF_IP_ADDRESS],
|
||||
user_input[CONF_PASSWORD],
|
||||
websession,
|
||||
port=user_input.get(CONF_PORT, DEFAULT_PORT),
|
||||
ssl=True,
|
||||
port=user_input[CONF_PORT],
|
||||
ssl=user_input.get(CONF_SSL, True),
|
||||
)
|
||||
except RainMachineError:
|
||||
return await self._show_form({CONF_PASSWORD: "invalid_credentials"})
|
||||
@@ -63,5 +75,17 @@ class RainMachineFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
# access token without using the IP address and password, so we have to
|
||||
# store it:
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_IP_ADDRESS], data=user_input
|
||||
title=user_input[CONF_IP_ADDRESS],
|
||||
data={
|
||||
CONF_IP_ADDRESS: user_input[CONF_IP_ADDRESS],
|
||||
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||
CONF_PORT: user_input[CONF_PORT],
|
||||
CONF_SSL: user_input.get(CONF_SSL, True),
|
||||
CONF_SCAN_INTERVAL: user_input.get(
|
||||
CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL.total_seconds()
|
||||
),
|
||||
CONF_ZONE_RUN_TIME: user_input.get(
|
||||
CONF_ZONE_RUN_TIME, DEFAULT_ZONE_RUN
|
||||
),
|
||||
},
|
||||
)
|
||||
|
@@ -1,6 +1,10 @@
|
||||
"""Define constants for the SimpliSafe component."""
|
||||
from datetime import timedelta
|
||||
|
||||
DOMAIN = "rainmachine"
|
||||
|
||||
CONF_ZONE_RUN_TIME = "zone_run_time"
|
||||
|
||||
DATA_CLIENT = "client"
|
||||
DATA_PROGRAMS = "programs"
|
||||
DATA_PROVISION_SETTINGS = "provision.settings"
|
||||
@@ -10,6 +14,8 @@ DATA_ZONES = "zones"
|
||||
DATA_ZONES_DETAILS = "zones_details"
|
||||
|
||||
DEFAULT_PORT = 8080
|
||||
DEFAULT_SCAN_INTERVAL = timedelta(seconds=60)
|
||||
DEFAULT_ZONE_RUN = 60 * 10
|
||||
|
||||
PROGRAM_UPDATE_TOPIC = f"{DOMAIN}_program_update"
|
||||
SENSOR_UPDATE_TOPIC = f"{DOMAIN}_data_update"
|
||||
|
@@ -46,6 +46,7 @@ class SamsungTVBridge(ABC):
|
||||
self.method = method
|
||||
self.host = host
|
||||
self.token = None
|
||||
self.default_port = None
|
||||
self._remote = None
|
||||
self._callback = None
|
||||
|
||||
@@ -191,6 +192,7 @@ class SamsungTVWSBridge(SamsungTVBridge):
|
||||
"""Initialize Bridge."""
|
||||
super().__init__(method, host, port)
|
||||
self.token = token
|
||||
self.default_port = 8001
|
||||
|
||||
def try_connect(self):
|
||||
"""Try to connect to the Websocket TV."""
|
||||
|
@@ -71,13 +71,27 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
):
|
||||
turn_on_action = hass.data[DOMAIN][ip_address][CONF_ON_ACTION]
|
||||
on_script = Script(hass, turn_on_action)
|
||||
async_add_entities([SamsungTVDevice(config_entry, on_script)])
|
||||
|
||||
# Initialize bridge
|
||||
data = config_entry.data.copy()
|
||||
bridge = SamsungTVBridge.get_bridge(
|
||||
data[CONF_METHOD], data[CONF_HOST], data[CONF_PORT], data.get(CONF_TOKEN),
|
||||
)
|
||||
if bridge.port is None and bridge.default_port is not None:
|
||||
# For backward compat, set default port for websocket tv
|
||||
data[CONF_PORT] = bridge.default_port
|
||||
hass.config_entries.async_update_entry(config_entry, data=data)
|
||||
bridge = SamsungTVBridge.get_bridge(
|
||||
data[CONF_METHOD], data[CONF_HOST], data[CONF_PORT], data.get(CONF_TOKEN),
|
||||
)
|
||||
|
||||
async_add_entities([SamsungTVDevice(bridge, config_entry, on_script)])
|
||||
|
||||
|
||||
class SamsungTVDevice(MediaPlayerDevice):
|
||||
"""Representation of a Samsung TV."""
|
||||
|
||||
def __init__(self, config_entry, on_script):
|
||||
def __init__(self, bridge, config_entry, on_script):
|
||||
"""Initialize the Samsung device."""
|
||||
self._config_entry = config_entry
|
||||
self._manufacturer = config_entry.data.get(CONF_MANUFACTURER)
|
||||
@@ -93,13 +107,7 @@ class SamsungTVDevice(MediaPlayerDevice):
|
||||
# Mark the end of a shutdown command (need to wait 15 seconds before
|
||||
# sending the next command to avoid turning the TV back ON).
|
||||
self._end_of_power_off = None
|
||||
# Initialize bridge
|
||||
self._bridge = SamsungTVBridge.get_bridge(
|
||||
config_entry.data[CONF_METHOD],
|
||||
config_entry.data[CONF_HOST],
|
||||
config_entry.data[CONF_PORT],
|
||||
config_entry.data.get(CONF_TOKEN),
|
||||
)
|
||||
self._bridge = bridge
|
||||
self._bridge.register_reauth_callback(self.access_denied)
|
||||
|
||||
def access_denied(self):
|
||||
|
@@ -3,6 +3,7 @@
|
||||
"name": "Sighthound",
|
||||
"documentation": "https://www.home-assistant.io/integrations/sighthound",
|
||||
"requirements": [
|
||||
"pillow==7.0.0",
|
||||
"simplehound==0.3"
|
||||
],
|
||||
"dependencies": [],
|
||||
|
@@ -3,7 +3,7 @@
|
||||
"name": "SimpliSafe",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/simplisafe",
|
||||
"requirements": ["simplisafe-python==9.0.2"],
|
||||
"requirements": ["simplisafe-python==9.0.3"],
|
||||
"dependencies": [],
|
||||
"codeowners": ["@bachya"]
|
||||
}
|
||||
|
@@ -27,7 +27,7 @@ DOMAIN = "somfy"
|
||||
|
||||
CONF_CLIENT_ID = "client_id"
|
||||
CONF_CLIENT_SECRET = "client_secret"
|
||||
CONF_OPTIMISTIC = "optimisitic"
|
||||
CONF_OPTIMISTIC = "optimistic"
|
||||
|
||||
SOMFY_AUTH_CALLBACK_PATH = "/auth/somfy/callback"
|
||||
SOMFY_AUTH_START = "/auth/somfy"
|
||||
@@ -36,8 +36,8 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_CLIENT_ID): cv.string,
|
||||
vol.Required(CONF_CLIENT_SECRET): cv.string,
|
||||
vol.Inclusive(CONF_CLIENT_ID, "oauth"): cv.string,
|
||||
vol.Inclusive(CONF_CLIENT_SECRET, "oauth"): cv.string,
|
||||
vol.Optional(CONF_OPTIMISTIC, default=False): cv.boolean,
|
||||
}
|
||||
)
|
||||
@@ -51,23 +51,21 @@ SOMFY_COMPONENTS = ["cover", "switch"]
|
||||
async def async_setup(hass, config):
|
||||
"""Set up the Somfy component."""
|
||||
hass.data[DOMAIN] = {}
|
||||
domain_config = config.get(DOMAIN, {})
|
||||
hass.data[DOMAIN][CONF_OPTIMISTIC] = domain_config.get(CONF_OPTIMISTIC, False)
|
||||
|
||||
if DOMAIN not in config:
|
||||
return True
|
||||
|
||||
hass.data[DOMAIN][CONF_OPTIMISTIC] = config[DOMAIN][CONF_OPTIMISTIC]
|
||||
|
||||
config_flow.SomfyFlowHandler.async_register_implementation(
|
||||
hass,
|
||||
config_entry_oauth2_flow.LocalOAuth2Implementation(
|
||||
if CONF_CLIENT_ID in domain_config:
|
||||
config_flow.SomfyFlowHandler.async_register_implementation(
|
||||
hass,
|
||||
DOMAIN,
|
||||
config[DOMAIN][CONF_CLIENT_ID],
|
||||
config[DOMAIN][CONF_CLIENT_SECRET],
|
||||
"https://accounts.somfy.com/oauth/oauth/v2/auth",
|
||||
"https://accounts.somfy.com/oauth/oauth/v2/token",
|
||||
),
|
||||
)
|
||||
config_entry_oauth2_flow.LocalOAuth2Implementation(
|
||||
hass,
|
||||
DOMAIN,
|
||||
config[DOMAIN][CONF_CLIENT_ID],
|
||||
config[DOMAIN][CONF_CLIENT_SECRET],
|
||||
"https://accounts.somfy.com/oauth/oauth/v2/auth",
|
||||
"https://accounts.somfy.com/oauth/oauth/v2/token",
|
||||
),
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
@@ -91,7 +91,7 @@ class LogEntry:
|
||||
|
||||
def __init__(self, record, stack, source):
|
||||
"""Initialize a log entry."""
|
||||
self.first_occured = self.timestamp = record.created
|
||||
self.first_occurred = self.timestamp = record.created
|
||||
self.name = record.name
|
||||
self.level = record.levelname
|
||||
self.message = deque([record.getMessage()], maxlen=5)
|
||||
@@ -117,7 +117,7 @@ class LogEntry:
|
||||
"timestamp": self.timestamp,
|
||||
"exception": self.exception,
|
||||
"count": self.count,
|
||||
"first_occured": self.first_occured,
|
||||
"first_occurred": self.first_occurred,
|
||||
}
|
||||
|
||||
|
||||
|
@@ -5,7 +5,7 @@
|
||||
"client_secret": "The client secret from the configuration is invalid.",
|
||||
"no_agreements": "This account has no Toon displays.",
|
||||
"no_app": "You need to configure Toon before being able to authenticate with it. [Please read the instructions](https://www.home-assistant.io/components/toon/).",
|
||||
"unknown_auth_fail": "Unexpected error occured, while authenticating."
|
||||
"unknown_auth_fail": "Unexpected error occurred, while authenticating."
|
||||
},
|
||||
"error": {
|
||||
"credentials": "The provided credentials are invalid.",
|
||||
|
@@ -26,7 +26,7 @@
|
||||
"abort": {
|
||||
"client_id": "The client ID from the configuration is invalid.",
|
||||
"client_secret": "The client secret from the configuration is invalid.",
|
||||
"unknown_auth_fail": "Unexpected error occured, while authenticating.",
|
||||
"unknown_auth_fail": "Unexpected error occurred, while authenticating.",
|
||||
"no_agreements": "This account has no Toon displays.",
|
||||
"no_app": "You need to configure Toon before being able to authenticate with it. [Please read the instructions](https://www.home-assistant.io/components/toon/)."
|
||||
}
|
||||
|
@@ -2,7 +2,7 @@
|
||||
"domain": "velbus",
|
||||
"name": "Velbus",
|
||||
"documentation": "https://www.home-assistant.io/integrations/velbus",
|
||||
"requirements": ["python-velbus==2.0.42"],
|
||||
"requirements": ["python-velbus==2.0.43"],
|
||||
"config_flow": true,
|
||||
"dependencies": [],
|
||||
"codeowners": ["@Cereal2nd", "@brefra"]
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""Viessmann ViCare climate device."""
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import requests
|
||||
@@ -79,6 +80,9 @@ HA_TO_VICARE_PRESET_HEATING = {
|
||||
|
||||
PYVICARE_ERROR = "error"
|
||||
|
||||
# Scan interval of 15 minutes seems to be safe to not hit the ViCare server rate limit
|
||||
SCAN_INTERVAL = timedelta(seconds=900)
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
"""Create the ViCare climate devices."""
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""Viessmann ViCare water_heater device."""
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import requests
|
||||
@@ -42,6 +43,9 @@ HA_TO_VICARE_HVAC_DHW = {
|
||||
|
||||
PYVICARE_ERROR = "error"
|
||||
|
||||
# Scan interval of 15 minutes seems to be safe to not hit the ViCare server rate limit
|
||||
SCAN_INTERVAL = timedelta(seconds=900)
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
"""Create the ViCare water_heater devices."""
|
||||
|
@@ -89,9 +89,19 @@ async def async_setup_entry(hass, config_entry):
|
||||
Will automatically load components to support devices found on the network.
|
||||
"""
|
||||
|
||||
hass.data[DATA_ZHA] = hass.data.get(DATA_ZHA, {})
|
||||
hass.data[DATA_ZHA][DATA_ZHA_DISPATCHERS] = []
|
||||
hass.data[DATA_ZHA][DATA_ZHA_PLATFORM_LOADED] = asyncio.Event()
|
||||
zha_data = hass.data.setdefault(DATA_ZHA, {})
|
||||
config = zha_data.get(DATA_ZHA_CONFIG, {})
|
||||
|
||||
if config.get(CONF_ENABLE_QUIRKS, True):
|
||||
# needs to be done here so that the ZHA module is finished loading
|
||||
# before zhaquirks is imported
|
||||
import zhaquirks # noqa: F401 pylint: disable=unused-import, import-outside-toplevel, import-error
|
||||
|
||||
zha_gateway = ZHAGateway(hass, config, config_entry)
|
||||
await zha_gateway.async_initialize()
|
||||
|
||||
zha_data[DATA_ZHA_DISPATCHERS] = []
|
||||
zha_data[DATA_ZHA_PLATFORM_LOADED] = asyncio.Event()
|
||||
platforms = []
|
||||
for component in COMPONENTS:
|
||||
platforms.append(
|
||||
@@ -102,20 +112,10 @@ async def async_setup_entry(hass, config_entry):
|
||||
|
||||
async def _platforms_loaded():
|
||||
await asyncio.gather(*platforms)
|
||||
hass.data[DATA_ZHA][DATA_ZHA_PLATFORM_LOADED].set()
|
||||
zha_data[DATA_ZHA_PLATFORM_LOADED].set()
|
||||
|
||||
hass.async_create_task(_platforms_loaded())
|
||||
|
||||
config = hass.data[DATA_ZHA].get(DATA_ZHA_CONFIG, {})
|
||||
|
||||
if config.get(CONF_ENABLE_QUIRKS, True):
|
||||
# needs to be done here so that the ZHA module is finished loading
|
||||
# before zhaquirks is imported
|
||||
import zhaquirks # noqa: F401 pylint: disable=unused-import, import-outside-toplevel, import-error
|
||||
|
||||
zha_gateway = ZHAGateway(hass, config, config_entry)
|
||||
await zha_gateway.async_initialize()
|
||||
|
||||
device_registry = await hass.helpers.device_registry.async_get_registry()
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=config_entry.entry_id,
|
||||
@@ -130,8 +130,8 @@ async def async_setup_entry(hass, config_entry):
|
||||
|
||||
async def async_zha_shutdown(event):
|
||||
"""Handle shutdown tasks."""
|
||||
await hass.data[DATA_ZHA][DATA_ZHA_GATEWAY].shutdown()
|
||||
await hass.data[DATA_ZHA][DATA_ZHA_GATEWAY].async_update_device_storage()
|
||||
await zha_data[DATA_ZHA_GATEWAY].shutdown()
|
||||
await zha_data[DATA_ZHA_GATEWAY].async_update_device_storage()
|
||||
|
||||
hass.bus.async_listen_once(ha_const.EVENT_HOMEASSISTANT_STOP, async_zha_shutdown)
|
||||
hass.async_create_task(zha_gateway.async_load_devices())
|
||||
|
@@ -85,11 +85,11 @@ class ZigbeeChannel(LogMixin):
|
||||
self, cluster: zha_typing.ZigpyClusterType, ch_pool: zha_typing.ChannelPoolType
|
||||
) -> None:
|
||||
"""Initialize ZigbeeChannel."""
|
||||
self._channel_name = cluster.ep_attribute
|
||||
self._generic_id = f"channel_0x{cluster.cluster_id:04x}"
|
||||
self._channel_name = getattr(cluster, "ep_attribute", self._generic_id)
|
||||
if self.CHANNEL_NAME:
|
||||
self._channel_name = self.CHANNEL_NAME
|
||||
self._ch_pool = ch_pool
|
||||
self._generic_id = f"channel_0x{cluster.cluster_id:04x}"
|
||||
self._cluster = cluster
|
||||
self._id = f"{ch_pool.id}:0x{cluster.cluster_id:04x}"
|
||||
unique_id = ch_pool.unique_id.replace("-", ":")
|
||||
|
@@ -7,10 +7,12 @@ import logging
|
||||
import os
|
||||
import traceback
|
||||
|
||||
from serial import SerialException
|
||||
import zigpy.device as zigpy_dev
|
||||
|
||||
from homeassistant.components.system_log import LogEntry, _figure_out_source
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.device_registry import (
|
||||
CONNECTION_ZIGBEE,
|
||||
async_get_registry as get_dev_reg,
|
||||
@@ -98,7 +100,6 @@ class ZHAGateway:
|
||||
self.ha_entity_registry = None
|
||||
self.application_controller = None
|
||||
self.radio_description = None
|
||||
hass.data[DATA_ZHA][DATA_ZHA_GATEWAY] = self
|
||||
self._log_levels = {
|
||||
DEBUG_LEVEL_ORIGINAL: async_capture_log_levels(),
|
||||
DEBUG_LEVEL_CURRENT: async_capture_log_levels(),
|
||||
@@ -122,7 +123,11 @@ class ZHAGateway:
|
||||
radio_details = RADIO_TYPES[radio_type]
|
||||
radio = radio_details[ZHA_GW_RADIO]()
|
||||
self.radio_description = radio_details[ZHA_GW_RADIO_DESCRIPTION]
|
||||
await radio.connect(usb_path, baudrate)
|
||||
try:
|
||||
await radio.connect(usb_path, baudrate)
|
||||
except (SerialException, OSError) as exception:
|
||||
_LOGGER.error("Couldn't open serial port for ZHA: %s", str(exception))
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
if CONF_DATABASE in self._config:
|
||||
database = self._config[CONF_DATABASE]
|
||||
@@ -133,7 +138,22 @@ class ZHAGateway:
|
||||
apply_application_controller_patch(self)
|
||||
self.application_controller.add_listener(self)
|
||||
self.application_controller.groups.add_listener(self)
|
||||
await self.application_controller.startup(auto_form=True)
|
||||
|
||||
try:
|
||||
res = await self.application_controller.startup(auto_form=True)
|
||||
if res is False:
|
||||
await self.application_controller.shutdown()
|
||||
raise ConfigEntryNotReady
|
||||
except asyncio.TimeoutError as exception:
|
||||
_LOGGER.error(
|
||||
"Couldn't start %s coordinator",
|
||||
radio_details[ZHA_GW_RADIO_DESCRIPTION],
|
||||
exc_info=exception,
|
||||
)
|
||||
radio.close()
|
||||
raise ConfigEntryNotReady from exception
|
||||
|
||||
self._hass.data[DATA_ZHA][DATA_ZHA_GATEWAY] = self
|
||||
self._hass.data[DATA_ZHA][DATA_ZHA_BRIDGE_ID] = str(
|
||||
self.application_controller.ieee
|
||||
)
|
||||
|
@@ -1,6 +1,6 @@
|
||||
"""Support for the definition of zones."""
|
||||
import logging
|
||||
from typing import Dict, Optional, cast
|
||||
from typing import Any, Dict, Optional, cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -18,6 +18,7 @@ from homeassistant.const import (
|
||||
CONF_RADIUS,
|
||||
EVENT_CORE_CONFIG_UPDATE,
|
||||
SERVICE_RELOAD,
|
||||
STATE_UNAVAILABLE,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant, ServiceCall, State, callback
|
||||
from homeassistant.helpers import (
|
||||
@@ -65,8 +66,20 @@ UPDATE_FIELDS = {
|
||||
}
|
||||
|
||||
|
||||
def empty_value(value: Any) -> Any:
|
||||
"""Test if the user has the default config value from adding "zone:"."""
|
||||
if isinstance(value, dict) and len(value) == 0:
|
||||
return []
|
||||
|
||||
raise vol.Invalid("Not a default value")
|
||||
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{vol.Optional(DOMAIN): vol.All(cv.ensure_list, [vol.Schema(CREATE_FIELDS)])},
|
||||
{
|
||||
vol.Optional(DOMAIN, default=[]): vol.Any(
|
||||
vol.All(cv.ensure_list, [vol.Schema(CREATE_FIELDS)]), empty_value,
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
@@ -93,7 +106,7 @@ def async_active_zone(
|
||||
closest = None
|
||||
|
||||
for zone in zones:
|
||||
if zone.attributes.get(ATTR_PASSIVE):
|
||||
if zone.state == STATE_UNAVAILABLE or zone.attributes.get(ATTR_PASSIVE):
|
||||
continue
|
||||
|
||||
zone_dist = distance(
|
||||
@@ -126,6 +139,9 @@ def in_zone(zone: State, latitude: float, longitude: float, radius: float = 0) -
|
||||
|
||||
Async friendly.
|
||||
"""
|
||||
if zone.state == STATE_UNAVAILABLE:
|
||||
return False
|
||||
|
||||
zone_dist = distance(
|
||||
latitude,
|
||||
longitude,
|
||||
@@ -180,7 +196,7 @@ async def async_setup(hass: HomeAssistant, config: Dict) -> bool:
|
||||
component, storage_collection, lambda conf: Zone(conf, True)
|
||||
)
|
||||
|
||||
if DOMAIN in config:
|
||||
if config[DOMAIN]:
|
||||
await yaml_collection.async_load(config[DOMAIN])
|
||||
|
||||
await storage_collection.async_load()
|
||||
@@ -206,7 +222,7 @@ async def async_setup(hass: HomeAssistant, config: Dict) -> bool:
|
||||
conf = await component.async_prepare_reload(skip_reset=True)
|
||||
if conf is None:
|
||||
return
|
||||
await yaml_collection.async_load(conf.get(DOMAIN, []))
|
||||
await yaml_collection.async_load(conf[DOMAIN])
|
||||
|
||||
service.async_register_admin_service(
|
||||
hass,
|
||||
|
@@ -565,9 +565,25 @@ def _log_pkg_error(package: str, component: str, config: Dict, message: str) ->
|
||||
def _identify_config_schema(module: ModuleType) -> Tuple[Optional[str], Optional[Dict]]:
|
||||
"""Extract the schema and identify list or dict based."""
|
||||
try:
|
||||
schema = module.CONFIG_SCHEMA.schema[module.DOMAIN] # type: ignore
|
||||
except (AttributeError, KeyError):
|
||||
key = next(k for k in module.CONFIG_SCHEMA.schema if k == module.DOMAIN) # type: ignore
|
||||
except (AttributeError, StopIteration):
|
||||
return None, None
|
||||
|
||||
schema = module.CONFIG_SCHEMA.schema[key] # type: ignore
|
||||
|
||||
if hasattr(key, "default") and not isinstance(
|
||||
key.default, vol.schema_builder.Undefined
|
||||
):
|
||||
default_value = schema(key.default())
|
||||
|
||||
if isinstance(default_value, dict):
|
||||
return "dict", schema
|
||||
|
||||
if isinstance(default_value, list):
|
||||
return "list", schema
|
||||
|
||||
return None, None
|
||||
|
||||
t_schema = str(schema)
|
||||
if t_schema.startswith("{") or "schema_with_slug_keys" in t_schema:
|
||||
return ("dict", schema)
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""Constants used by Home Assistant components."""
|
||||
MAJOR_VERSION = 0
|
||||
MINOR_VERSION = 107
|
||||
PATCH_VERSION = "0b6"
|
||||
PATCH_VERSION = "3"
|
||||
__short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__ = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER = (3, 7, 0)
|
||||
|
@@ -12,7 +12,7 @@ cryptography==2.8
|
||||
defusedxml==0.6.0
|
||||
distro==1.4.0
|
||||
hass-nabucasa==0.32.2
|
||||
home-assistant-frontend==20200316.1
|
||||
home-assistant-frontend==20200318.1
|
||||
importlib-metadata==1.5.0
|
||||
jinja2>=2.10.3
|
||||
netdisco==2.6.0
|
||||
|
@@ -163,7 +163,7 @@ aioftp==0.12.0
|
||||
aioharmony==0.1.13
|
||||
|
||||
# homeassistant.components.homekit_controller
|
||||
aiohomekit[IP]==0.2.29.1
|
||||
aiohomekit[IP]==0.2.29.2
|
||||
|
||||
# homeassistant.components.emulated_hue
|
||||
# homeassistant.components.http
|
||||
@@ -696,7 +696,7 @@ hole==0.5.0
|
||||
holidays==0.10.1
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20200316.1
|
||||
home-assistant-frontend==20200318.1
|
||||
|
||||
# homeassistant.components.zwave
|
||||
homeassistant-pyozw==0.1.9
|
||||
@@ -1017,6 +1017,7 @@ pilight==0.1.1
|
||||
# homeassistant.components.proxy
|
||||
# homeassistant.components.qrcode
|
||||
# homeassistant.components.seven_segments
|
||||
# homeassistant.components.sighthound
|
||||
# homeassistant.components.tensorflow
|
||||
pillow==7.0.0
|
||||
|
||||
@@ -1188,7 +1189,7 @@ pycfdns==0.0.1
|
||||
pychannels==1.0.0
|
||||
|
||||
# homeassistant.components.cast
|
||||
pychromecast==4.1.1
|
||||
pychromecast==4.2.0
|
||||
|
||||
# homeassistant.components.cmus
|
||||
pycmus==0.1.1
|
||||
@@ -1315,7 +1316,7 @@ pyhomeworks==0.0.6
|
||||
pyialarm==0.3
|
||||
|
||||
# homeassistant.components.icloud
|
||||
pyicloud==0.9.4
|
||||
pyicloud==0.9.5
|
||||
|
||||
# homeassistant.components.intesishome
|
||||
pyintesishome==1.6
|
||||
@@ -1665,7 +1666,7 @@ python-telnet-vlc==1.0.4
|
||||
python-twitch-client==0.6.0
|
||||
|
||||
# homeassistant.components.velbus
|
||||
python-velbus==2.0.42
|
||||
python-velbus==2.0.43
|
||||
|
||||
# homeassistant.components.vlc
|
||||
python-vlc==1.1.2
|
||||
@@ -1855,7 +1856,7 @@ simplehound==0.3
|
||||
simplepush==1.1.4
|
||||
|
||||
# homeassistant.components.simplisafe
|
||||
simplisafe-python==9.0.2
|
||||
simplisafe-python==9.0.3
|
||||
|
||||
# homeassistant.components.sisyphus
|
||||
sisyphus-control==2.2.1
|
||||
|
@@ -62,7 +62,7 @@ aiobotocore==0.11.1
|
||||
aioesphomeapi==2.6.1
|
||||
|
||||
# homeassistant.components.homekit_controller
|
||||
aiohomekit[IP]==0.2.29.1
|
||||
aiohomekit[IP]==0.2.29.2
|
||||
|
||||
# homeassistant.components.emulated_hue
|
||||
# homeassistant.components.http
|
||||
@@ -263,7 +263,7 @@ hole==0.5.0
|
||||
holidays==0.10.1
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20200316.1
|
||||
home-assistant-frontend==20200318.1
|
||||
|
||||
# homeassistant.components.zwave
|
||||
homeassistant-pyozw==0.1.9
|
||||
@@ -365,6 +365,14 @@ pexpect==4.6.0
|
||||
# homeassistant.components.pilight
|
||||
pilight==0.1.1
|
||||
|
||||
# homeassistant.components.doods
|
||||
# homeassistant.components.proxy
|
||||
# homeassistant.components.qrcode
|
||||
# homeassistant.components.seven_segments
|
||||
# homeassistant.components.sighthound
|
||||
# homeassistant.components.tensorflow
|
||||
pillow==7.0.0
|
||||
|
||||
# homeassistant.components.plex
|
||||
plexapi==3.3.0
|
||||
|
||||
@@ -443,7 +451,7 @@ pyblackbird==0.5
|
||||
pybotvac==0.0.17
|
||||
|
||||
# homeassistant.components.cast
|
||||
pychromecast==4.1.1
|
||||
pychromecast==4.2.0
|
||||
|
||||
# homeassistant.components.coolmaster
|
||||
pycoolmasternet==0.0.4
|
||||
@@ -483,7 +491,7 @@ pyheos==0.6.0
|
||||
pyhomematic==0.1.65
|
||||
|
||||
# homeassistant.components.icloud
|
||||
pyicloud==0.9.4
|
||||
pyicloud==0.9.5
|
||||
|
||||
# homeassistant.components.ipma
|
||||
pyipma==2.0.5
|
||||
@@ -587,7 +595,7 @@ python-nest==4.1.0
|
||||
python-twitch-client==0.6.0
|
||||
|
||||
# homeassistant.components.velbus
|
||||
python-velbus==2.0.42
|
||||
python-velbus==2.0.43
|
||||
|
||||
# homeassistant.components.awair
|
||||
python_awair==0.0.4
|
||||
@@ -641,7 +649,7 @@ sentry-sdk==0.13.5
|
||||
simplehound==0.3
|
||||
|
||||
# homeassistant.components.simplisafe
|
||||
simplisafe-python==9.0.2
|
||||
simplisafe-python==9.0.3
|
||||
|
||||
# homeassistant.components.sleepiq
|
||||
sleepyq==0.7
|
||||
|
@@ -1,5 +1,5 @@
|
||||
"""Initializer helpers for HomematicIP fake server."""
|
||||
from asynctest import CoroutineMock, MagicMock, Mock
|
||||
from asynctest import CoroutineMock, MagicMock, Mock, patch
|
||||
from homematicip.aio.auth import AsyncAuth
|
||||
from homematicip.aio.connection import AsyncConnection
|
||||
from homematicip.aio.home import AsyncHome
|
||||
@@ -106,9 +106,10 @@ async def mock_hap_with_service_fixture(
|
||||
|
||||
|
||||
@pytest.fixture(name="simple_mock_home")
|
||||
def simple_mock_home_fixture() -> AsyncHome:
|
||||
"""Return a simple AsyncHome Mock."""
|
||||
return Mock(
|
||||
def simple_mock_home_fixture():
|
||||
"""Return a simple mocked connection."""
|
||||
|
||||
mock_home = Mock(
|
||||
spec=AsyncHome,
|
||||
name="Demo",
|
||||
devices=[],
|
||||
@@ -120,6 +121,27 @@ def simple_mock_home_fixture() -> AsyncHome:
|
||||
connected=True,
|
||||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.homematicip_cloud.hap.AsyncHome",
|
||||
autospec=True,
|
||||
return_value=mock_home,
|
||||
):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture(name="mock_connection_init")
|
||||
def mock_connection_init_fixture():
|
||||
"""Return a simple mocked connection."""
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.homematicip_cloud.hap.AsyncHome.init",
|
||||
return_value=None,
|
||||
), patch(
|
||||
"homeassistant.components.homematicip_cloud.hap.AsyncAuth.init",
|
||||
return_value=None,
|
||||
):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture(name="simple_mock_auth")
|
||||
def simple_mock_auth_fixture() -> AsyncAuth:
|
||||
|
@@ -16,12 +16,15 @@ DEFAULT_CONFIG = {HMIPC_HAPID: "ABC123", HMIPC_PIN: "123", HMIPC_NAME: "hmip"}
|
||||
IMPORT_CONFIG = {HMIPC_HAPID: "ABC123", HMIPC_AUTHTOKEN: "123", HMIPC_NAME: "hmip"}
|
||||
|
||||
|
||||
async def test_flow_works(hass):
|
||||
async def test_flow_works(hass, simple_mock_home):
|
||||
"""Test config flow."""
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.homematicip_cloud.hap.HomematicipAuth.async_checkbutton",
|
||||
return_value=False,
|
||||
), patch(
|
||||
"homeassistant.components.homematicip_cloud.hap.HomematicipAuth.get_auth",
|
||||
return_value=True,
|
||||
):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
HMIPC_DOMAIN, context={"source": "user"}, data=DEFAULT_CONFIG
|
||||
@@ -137,7 +140,7 @@ async def test_init_already_configured(hass):
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
|
||||
async def test_import_config(hass):
|
||||
async def test_import_config(hass, simple_mock_home):
|
||||
"""Test importing a host with an existing config file."""
|
||||
with patch(
|
||||
"homeassistant.components.homematicip_cloud.hap.HomematicipAuth.async_checkbutton",
|
||||
|
@@ -125,14 +125,11 @@ async def test_hap_create(hass, hmip_config_entry, simple_mock_home):
|
||||
hass.config.components.add(HMIPC_DOMAIN)
|
||||
hap = HomematicipHAP(hass, hmip_config_entry)
|
||||
assert hap
|
||||
with patch(
|
||||
"homeassistant.components.homematicip_cloud.hap.AsyncHome",
|
||||
return_value=simple_mock_home,
|
||||
), patch.object(hap, "async_connect"):
|
||||
with patch.object(hap, "async_connect"):
|
||||
assert await hap.async_setup()
|
||||
|
||||
|
||||
async def test_hap_create_exception(hass, hmip_config_entry):
|
||||
async def test_hap_create_exception(hass, hmip_config_entry, mock_connection_init):
|
||||
"""Mock AsyncHome to execute get_hap."""
|
||||
hass.config.components.add(HMIPC_DOMAIN)
|
||||
|
||||
|
@@ -24,7 +24,9 @@ from homeassistant.setup import async_setup_component
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
async def test_config_with_accesspoint_passed_to_config_entry(hass):
|
||||
async def test_config_with_accesspoint_passed_to_config_entry(
|
||||
hass, mock_connection, simple_mock_home
|
||||
):
|
||||
"""Test that config for a accesspoint are loaded via config entry."""
|
||||
|
||||
entry_config = {
|
||||
@@ -51,7 +53,9 @@ async def test_config_with_accesspoint_passed_to_config_entry(hass):
|
||||
assert isinstance(hass.data[HMIPC_DOMAIN]["ABC123"], HomematicipHAP)
|
||||
|
||||
|
||||
async def test_config_already_registered_not_passed_to_config_entry(hass):
|
||||
async def test_config_already_registered_not_passed_to_config_entry(
|
||||
hass, simple_mock_home
|
||||
):
|
||||
"""Test that an already registered accesspoint does not get imported."""
|
||||
|
||||
mock_config = {HMIPC_AUTHTOKEN: "123", HMIPC_HAPID: "ABC123", HMIPC_NAME: "name"}
|
||||
@@ -87,7 +91,9 @@ async def test_config_already_registered_not_passed_to_config_entry(hass):
|
||||
assert config_entries[0].unique_id == "ABC123"
|
||||
|
||||
|
||||
async def test_load_entry_fails_due_to_connection_error(hass, hmip_config_entry):
|
||||
async def test_load_entry_fails_due_to_connection_error(
|
||||
hass, hmip_config_entry, mock_connection_init
|
||||
):
|
||||
"""Test load entry fails due to connection error."""
|
||||
hmip_config_entry.add_to_hass(hass)
|
||||
|
||||
@@ -101,7 +107,9 @@ async def test_load_entry_fails_due_to_connection_error(hass, hmip_config_entry)
|
||||
assert hmip_config_entry.state == ENTRY_STATE_SETUP_RETRY
|
||||
|
||||
|
||||
async def test_load_entry_fails_due_to_generic_exception(hass, hmip_config_entry):
|
||||
async def test_load_entry_fails_due_to_generic_exception(
|
||||
hass, hmip_config_entry, simple_mock_home
|
||||
):
|
||||
"""Test load entry fails due to generic exception."""
|
||||
hmip_config_entry.add_to_hass(hass)
|
||||
|
||||
|
@@ -140,16 +140,7 @@ async def test_webhook_update_registration(webhook_client, authed_api_client):
|
||||
async def test_webhook_handle_get_zones(hass, create_registrations, webhook_client):
|
||||
"""Test that we can get zones properly."""
|
||||
await async_setup_component(
|
||||
hass,
|
||||
ZONE_DOMAIN,
|
||||
{
|
||||
ZONE_DOMAIN: {
|
||||
"name": "test",
|
||||
"latitude": 32.880837,
|
||||
"longitude": -117.237561,
|
||||
"radius": 250,
|
||||
}
|
||||
},
|
||||
hass, ZONE_DOMAIN, {ZONE_DOMAIN: {}},
|
||||
)
|
||||
|
||||
resp = await webhook_client.post(
|
||||
@@ -161,7 +152,8 @@ async def test_webhook_handle_get_zones(hass, create_registrations, webhook_clie
|
||||
|
||||
json = await resp.json()
|
||||
assert len(json) == 1
|
||||
assert json[0]["entity_id"] == "zone.home"
|
||||
zones = sorted(json, key=lambda entry: entry["entity_id"])
|
||||
assert zones[0]["entity_id"] == "zone.home"
|
||||
|
||||
|
||||
async def test_webhook_handle_get_config(hass, create_registrations, webhook_client):
|
||||
|
@@ -65,6 +65,7 @@ async def test_full_flow(hass, aiohttp_client, aioclient_mock):
|
||||
"read_station",
|
||||
"read_thermostat",
|
||||
"write_camera",
|
||||
"write_presence",
|
||||
"write_thermostat",
|
||||
]
|
||||
)
|
||||
|
@@ -4,7 +4,7 @@ from unittest.mock import patch
|
||||
from regenmaschine.errors import RainMachineError
|
||||
|
||||
from homeassistant import data_entry_flow
|
||||
from homeassistant.components.rainmachine import DOMAIN, config_flow
|
||||
from homeassistant.components.rainmachine import CONF_ZONE_RUN_TIME, DOMAIN, config_flow
|
||||
from homeassistant.config_entries import SOURCE_USER
|
||||
from homeassistant.const import (
|
||||
CONF_IP_ADDRESS,
|
||||
@@ -98,6 +98,7 @@ async def test_step_import(hass):
|
||||
CONF_PORT: 8080,
|
||||
CONF_SSL: True,
|
||||
CONF_SCAN_INTERVAL: 60,
|
||||
CONF_ZONE_RUN_TIME: 600,
|
||||
}
|
||||
|
||||
|
||||
@@ -129,4 +130,5 @@ async def test_step_user(hass):
|
||||
CONF_PORT: 8080,
|
||||
CONF_SSL: True,
|
||||
CONF_SCAN_INTERVAL: 60,
|
||||
CONF_ZONE_RUN_TIME: 600,
|
||||
}
|
||||
|
@@ -157,7 +157,7 @@ async def test_dedup_logs(hass, hass_client):
|
||||
log_msg()
|
||||
log = await get_error_log(hass, hass_client, 3)
|
||||
assert_log(log[0], "", ["error message 2", "error message 2-2"], "ERROR")
|
||||
assert log[0]["timestamp"] > log[0]["first_occured"]
|
||||
assert log[0]["timestamp"] > log[0]["first_occurred"]
|
||||
|
||||
log_msg("2-3")
|
||||
log_msg("2-4")
|
||||
|
@@ -108,7 +108,7 @@ async def test_no_clients(hass):
|
||||
"""Test the update_clients function when no clients are found."""
|
||||
await setup_unifi_integration(hass)
|
||||
|
||||
assert len(hass.states.async_all()) == 1
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 0
|
||||
|
||||
|
||||
async def test_tracked_devices(hass):
|
||||
@@ -123,7 +123,7 @@ async def test_tracked_devices(hass):
|
||||
devices_response=[DEVICE_1, DEVICE_2],
|
||||
known_wireless_clients=(CLIENT_4["mac"],),
|
||||
)
|
||||
assert len(hass.states.async_all()) == 7
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 6
|
||||
|
||||
client_1 = hass.states.get("device_tracker.client_1")
|
||||
assert client_1 is not None
|
||||
@@ -184,7 +184,7 @@ async def test_controller_state_change(hass):
|
||||
controller = await setup_unifi_integration(
|
||||
hass, clients_response=[CLIENT_1], devices_response=[DEVICE_1],
|
||||
)
|
||||
assert len(hass.states.async_all()) == 3
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 2
|
||||
|
||||
# Controller unavailable
|
||||
controller.async_unifi_signalling_callback(
|
||||
@@ -214,7 +214,7 @@ async def test_option_track_clients(hass):
|
||||
controller = await setup_unifi_integration(
|
||||
hass, clients_response=[CLIENT_1, CLIENT_2], devices_response=[DEVICE_1],
|
||||
)
|
||||
assert len(hass.states.async_all()) == 4
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 3
|
||||
|
||||
client_1 = hass.states.get("device_tracker.client_1")
|
||||
assert client_1 is not None
|
||||
@@ -259,7 +259,7 @@ async def test_option_track_wired_clients(hass):
|
||||
controller = await setup_unifi_integration(
|
||||
hass, clients_response=[CLIENT_1, CLIENT_2], devices_response=[DEVICE_1],
|
||||
)
|
||||
assert len(hass.states.async_all()) == 4
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 3
|
||||
|
||||
client_1 = hass.states.get("device_tracker.client_1")
|
||||
assert client_1 is not None
|
||||
@@ -304,7 +304,7 @@ async def test_option_track_devices(hass):
|
||||
controller = await setup_unifi_integration(
|
||||
hass, clients_response=[CLIENT_1, CLIENT_2], devices_response=[DEVICE_1],
|
||||
)
|
||||
assert len(hass.states.async_all()) == 4
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 3
|
||||
|
||||
client_1 = hass.states.get("device_tracker.client_1")
|
||||
assert client_1 is not None
|
||||
@@ -349,7 +349,7 @@ async def test_option_ssid_filter(hass):
|
||||
controller = await setup_unifi_integration(
|
||||
hass, options={CONF_SSID_FILTER: ["ssid"]}, clients_response=[CLIENT_3],
|
||||
)
|
||||
assert len(hass.states.async_all()) == 2
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 1
|
||||
|
||||
# SSID filter active
|
||||
client_3 = hass.states.get("device_tracker.client_3")
|
||||
@@ -387,7 +387,7 @@ async def test_wireless_client_go_wired_issue(hass):
|
||||
client_1_client["last_seen"] = dt_util.as_timestamp(dt_util.utcnow())
|
||||
|
||||
controller = await setup_unifi_integration(hass, clients_response=[client_1_client])
|
||||
assert len(hass.states.async_all()) == 2
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 1
|
||||
|
||||
client_1 = hass.states.get("device_tracker.client_1")
|
||||
assert client_1 is not None
|
||||
@@ -460,7 +460,7 @@ async def test_restoring_client(hass):
|
||||
clients_response=[CLIENT_2],
|
||||
clients_all_response=[CLIENT_1],
|
||||
)
|
||||
assert len(hass.states.async_all()) == 3
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 2
|
||||
|
||||
device_1 = hass.states.get("device_tracker.client_1")
|
||||
assert device_1 is not None
|
||||
@@ -474,7 +474,7 @@ async def test_dont_track_clients(hass):
|
||||
clients_response=[CLIENT_1],
|
||||
devices_response=[DEVICE_1],
|
||||
)
|
||||
assert len(hass.states.async_all()) == 2
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 1
|
||||
|
||||
client_1 = hass.states.get("device_tracker.client_1")
|
||||
assert client_1 is None
|
||||
@@ -492,7 +492,7 @@ async def test_dont_track_devices(hass):
|
||||
clients_response=[CLIENT_1],
|
||||
devices_response=[DEVICE_1],
|
||||
)
|
||||
assert len(hass.states.async_all()) == 2
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 1
|
||||
|
||||
client_1 = hass.states.get("device_tracker.client_1")
|
||||
assert client_1 is not None
|
||||
@@ -509,7 +509,7 @@ async def test_dont_track_wired_clients(hass):
|
||||
options={unifi.controller.CONF_TRACK_WIRED_CLIENTS: False},
|
||||
clients_response=[CLIENT_1, CLIENT_2],
|
||||
)
|
||||
assert len(hass.states.async_all()) == 2
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 1
|
||||
|
||||
client_1 = hass.states.get("device_tracker.client_1")
|
||||
assert client_1 is not None
|
||||
|
@@ -55,7 +55,7 @@ async def test_no_clients(hass):
|
||||
)
|
||||
|
||||
assert len(controller.mock_requests) == 4
|
||||
assert len(hass.states.async_all()) == 1
|
||||
assert len(hass.states.async_entity_ids("sensor")) == 0
|
||||
|
||||
|
||||
async def test_sensors(hass):
|
||||
@@ -71,7 +71,7 @@ async def test_sensors(hass):
|
||||
)
|
||||
|
||||
assert len(controller.mock_requests) == 4
|
||||
assert len(hass.states.async_all()) == 5
|
||||
assert len(hass.states.async_entity_ids("sensor")) == 4
|
||||
|
||||
wired_client_rx = hass.states.get("sensor.wired_client_name_rx")
|
||||
assert wired_client_rx.state == "1234.0"
|
||||
|
@@ -209,7 +209,7 @@ async def test_no_clients(hass):
|
||||
)
|
||||
|
||||
assert len(controller.mock_requests) == 4
|
||||
assert len(hass.states.async_all()) == 1
|
||||
assert len(hass.states.async_entity_ids("switch")) == 0
|
||||
|
||||
|
||||
async def test_controller_not_client(hass):
|
||||
@@ -222,7 +222,7 @@ async def test_controller_not_client(hass):
|
||||
)
|
||||
|
||||
assert len(controller.mock_requests) == 4
|
||||
assert len(hass.states.async_all()) == 1
|
||||
assert len(hass.states.async_entity_ids("switch")) == 0
|
||||
cloudkey = hass.states.get("switch.cloud_key")
|
||||
assert cloudkey is None
|
||||
|
||||
@@ -240,7 +240,7 @@ async def test_not_admin(hass):
|
||||
)
|
||||
|
||||
assert len(controller.mock_requests) == 4
|
||||
assert len(hass.states.async_all()) == 1
|
||||
assert len(hass.states.async_entity_ids("switch")) == 0
|
||||
|
||||
|
||||
async def test_switches(hass):
|
||||
@@ -258,7 +258,7 @@ async def test_switches(hass):
|
||||
)
|
||||
|
||||
assert len(controller.mock_requests) == 4
|
||||
assert len(hass.states.async_all()) == 4
|
||||
assert len(hass.states.async_entity_ids("switch")) == 3
|
||||
|
||||
switch_1 = hass.states.get("switch.poe_client_1")
|
||||
assert switch_1 is not None
|
||||
@@ -312,7 +312,7 @@ async def test_new_client_discovered_on_block_control(hass):
|
||||
)
|
||||
|
||||
assert len(controller.mock_requests) == 4
|
||||
assert len(hass.states.async_all()) == 1
|
||||
assert len(hass.states.async_entity_ids("switch")) == 0
|
||||
|
||||
blocked = hass.states.get("switch.block_client_1")
|
||||
assert blocked is None
|
||||
@@ -324,7 +324,7 @@ async def test_new_client_discovered_on_block_control(hass):
|
||||
controller.api.session_handler("data")
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(hass.states.async_all()) == 2
|
||||
assert len(hass.states.async_entity_ids("switch")) == 1
|
||||
blocked = hass.states.get("switch.block_client_1")
|
||||
assert blocked is not None
|
||||
|
||||
@@ -336,7 +336,7 @@ async def test_option_block_clients(hass):
|
||||
options={CONF_BLOCK_CLIENT: [BLOCKED["mac"]]},
|
||||
clients_all_response=[BLOCKED, UNBLOCKED],
|
||||
)
|
||||
assert len(hass.states.async_all()) == 2
|
||||
assert len(hass.states.async_entity_ids("switch")) == 1
|
||||
|
||||
# Add a second switch
|
||||
hass.config_entries.async_update_entry(
|
||||
@@ -344,28 +344,28 @@ async def test_option_block_clients(hass):
|
||||
options={CONF_BLOCK_CLIENT: [BLOCKED["mac"], UNBLOCKED["mac"]]},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert len(hass.states.async_all()) == 3
|
||||
assert len(hass.states.async_entity_ids("switch")) == 2
|
||||
|
||||
# Remove the second switch again
|
||||
hass.config_entries.async_update_entry(
|
||||
controller.config_entry, options={CONF_BLOCK_CLIENT: [BLOCKED["mac"]]},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert len(hass.states.async_all()) == 2
|
||||
assert len(hass.states.async_entity_ids("switch")) == 1
|
||||
|
||||
# Enable one and remove another one
|
||||
hass.config_entries.async_update_entry(
|
||||
controller.config_entry, options={CONF_BLOCK_CLIENT: [UNBLOCKED["mac"]]},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert len(hass.states.async_all()) == 2
|
||||
assert len(hass.states.async_entity_ids("switch")) == 1
|
||||
|
||||
# Remove one
|
||||
hass.config_entries.async_update_entry(
|
||||
controller.config_entry, options={CONF_BLOCK_CLIENT: []},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert len(hass.states.async_all()) == 1
|
||||
assert len(hass.states.async_entity_ids("switch")) == 0
|
||||
|
||||
|
||||
async def test_new_client_discovered_on_poe_control(hass):
|
||||
@@ -378,7 +378,7 @@ async def test_new_client_discovered_on_poe_control(hass):
|
||||
)
|
||||
|
||||
assert len(controller.mock_requests) == 4
|
||||
assert len(hass.states.async_all()) == 2
|
||||
assert len(hass.states.async_entity_ids("switch")) == 1
|
||||
|
||||
controller.api.websocket._data = {
|
||||
"meta": {"message": "sta:sync"},
|
||||
@@ -391,7 +391,7 @@ async def test_new_client_discovered_on_poe_control(hass):
|
||||
"switch", "turn_off", {"entity_id": "switch.poe_client_1"}, blocking=True
|
||||
)
|
||||
assert len(controller.mock_requests) == 5
|
||||
assert len(hass.states.async_all()) == 3
|
||||
assert len(hass.states.async_entity_ids("switch")) == 2
|
||||
assert controller.mock_requests[4] == {
|
||||
"json": {
|
||||
"port_overrides": [{"port_idx": 1, "portconf_id": "1a1", "poe_mode": "off"}]
|
||||
@@ -430,7 +430,7 @@ async def test_ignore_multiple_poe_clients_on_same_port(hass):
|
||||
)
|
||||
|
||||
assert len(controller.mock_requests) == 4
|
||||
assert len(hass.states.async_all()) == 4
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 3
|
||||
|
||||
switch_1 = hass.states.get("switch.poe_client_1")
|
||||
switch_2 = hass.states.get("switch.poe_client_2")
|
||||
@@ -481,7 +481,7 @@ async def test_restoring_client(hass):
|
||||
)
|
||||
|
||||
assert len(controller.mock_requests) == 4
|
||||
assert len(hass.states.async_all()) == 3
|
||||
assert len(hass.states.async_entity_ids("switch")) == 2
|
||||
|
||||
device_1 = hass.states.get("switch.client_1")
|
||||
assert device_1 is not None
|
||||
|
@@ -487,3 +487,18 @@ async def test_import_config_entry(hass):
|
||||
assert state.attributes[zone.ATTR_RADIUS] == 3
|
||||
assert state.attributes[zone.ATTR_PASSIVE] is False
|
||||
assert state.attributes[ATTR_ICON] == "mdi:from-config-entry"
|
||||
|
||||
|
||||
async def test_zone_empty_setup(hass):
|
||||
"""Set up zone with empty config."""
|
||||
assert await setup.async_setup_component(hass, DOMAIN, {"zone": {}})
|
||||
|
||||
|
||||
async def test_unavailable_zone(hass):
|
||||
"""Test active zone with unavailable zones."""
|
||||
assert await setup.async_setup_component(hass, DOMAIN, {"zone": {}})
|
||||
hass.states.async_set("zone.bla", "unavailable", {"restored": True})
|
||||
|
||||
assert zone.async_active_zone(hass, 0.0, 0.01) is None
|
||||
|
||||
assert zone.in_zone(hass.states.get("zone.bla"), 0, 0) is False
|
||||
|
@@ -10,6 +10,7 @@ from unittest.mock import Mock
|
||||
import asynctest
|
||||
from asynctest import CoroutineMock, patch
|
||||
import pytest
|
||||
import voluptuous as vol
|
||||
from voluptuous import Invalid, MultipleInvalid
|
||||
import yaml
|
||||
|
||||
@@ -989,3 +990,20 @@ async def test_component_config_exceptions(hass, caplog):
|
||||
"Unknown error validating config for test_platform platform for test_domain component with PLATFORM_SCHEMA"
|
||||
in caplog.text
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"domain, schema, expected",
|
||||
[
|
||||
("zone", vol.Schema({vol.Optional("zone", default=[]): list}), "list"),
|
||||
("zone", vol.Schema({vol.Optional("zone", default=dict): dict}), "dict"),
|
||||
],
|
||||
)
|
||||
def test_identify_config_schema(domain, schema, expected):
|
||||
"""Test identify config schema."""
|
||||
assert (
|
||||
config_util._identify_config_schema(Mock(DOMAIN=domain, CONFIG_SCHEMA=schema))[
|
||||
0
|
||||
]
|
||||
== expected
|
||||
)
|
||||
|
Reference in New Issue
Block a user