mirror of
https://github.com/home-assistant/core.git
synced 2025-09-24 20:39:28 +00:00
Compare commits
72 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
baf7fb7264 | ||
![]() |
edfe8e1583 | ||
![]() |
b36b1dbc70 | ||
![]() |
253c848692 | ||
![]() |
7a6ac578b4 | ||
![]() |
95de94e53f | ||
![]() |
181b2803cd | ||
![]() |
e0f2fa33df | ||
![]() |
884c346bdf | ||
![]() |
c218ff5a75 | ||
![]() |
fa43a218d2 | ||
![]() |
f4cc64d289 | ||
![]() |
4c31829832 | ||
![]() |
ff32c1c3e9 | ||
![]() |
677c276b41 | ||
![]() |
ca1c696f54 | ||
![]() |
78e5878247 | ||
![]() |
470537bc5f | ||
![]() |
64556f6f69 | ||
![]() |
2785b067e3 | ||
![]() |
a129bc05ae | ||
![]() |
197736f66b | ||
![]() |
d82d7fa2e9 | ||
![]() |
663db747e9 | ||
![]() |
57998f6f0f | ||
![]() |
edbb995fff | ||
![]() |
312903025d | ||
![]() |
0ae5c325fe | ||
![]() |
a309a00929 | ||
![]() |
55be5bf880 | ||
![]() |
7b37dcd8ed | ||
![]() |
e36bdd717a | ||
![]() |
fa650b648c | ||
![]() |
ac2310e7f9 | ||
![]() |
aee5c16803 | ||
![]() |
5f0816ea25 | ||
![]() |
6a6037790f | ||
![]() |
d2b0c35319 | ||
![]() |
d707a1b072 | ||
![]() |
ca12db9271 | ||
![]() |
346a4b399d | ||
![]() |
2090252936 | ||
![]() |
a28091e94a | ||
![]() |
ae8cb0ccdf | ||
![]() |
06a608e342 | ||
![]() |
9af95e8577 | ||
![]() |
29a9781bf7 | ||
![]() |
877eddf43d | ||
![]() |
88e3e73bb4 | ||
![]() |
3aa1bcbb77 | ||
![]() |
f973b35cef | ||
![]() |
4e08aa8b05 | ||
![]() |
8e917ccf73 | ||
![]() |
0b62011626 | ||
![]() |
d520a02b8c | ||
![]() |
1e469b39ad | ||
![]() |
c2f615839d | ||
![]() |
657bf33e32 | ||
![]() |
0ca87007fd | ||
![]() |
d0d9d853f2 | ||
![]() |
8348878e7e | ||
![]() |
b70be5f2f2 | ||
![]() |
fddb565e4c | ||
![]() |
f3e6820042 | ||
![]() |
ae98f13181 | ||
![]() |
ab38e7d98a | ||
![]() |
9797b09d44 | ||
![]() |
4908d4358c | ||
![]() |
67d728fc50 | ||
![]() |
912409ed0c | ||
![]() |
ac8c889b0f | ||
![]() |
67a721d39b |
@@ -2,9 +2,15 @@
|
||||
.git
|
||||
.github
|
||||
config
|
||||
docs
|
||||
|
||||
# Development
|
||||
.devcontainer
|
||||
.vscode
|
||||
|
||||
# Test related files
|
||||
.tox
|
||||
tests
|
||||
|
||||
# Other virtualization methods
|
||||
venv
|
||||
|
17
Dockerfile
Normal file
17
Dockerfile
Normal file
@@ -0,0 +1,17 @@
|
||||
ARG BUILD_FROM
|
||||
FROM ${BUILD_FROM}
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
## Setup Home Assistant
|
||||
COPY . homeassistant/
|
||||
RUN pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links "${WHEELS_LINKS}" \
|
||||
-r homeassistant/requirements_all.txt -c homeassistant/homeassistant/package_constraints.txt \
|
||||
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links "${WHEELS_LINKS}" \
|
||||
-e ./homeassistant \
|
||||
&& python3 -m compileall homeassistant/homeassistant
|
||||
|
||||
# Home Assistant S6-Overlay
|
||||
COPY rootfs /
|
||||
|
||||
WORKDIR /config
|
@@ -14,7 +14,7 @@ schedules:
|
||||
always: true
|
||||
variables:
|
||||
- name: versionBuilder
|
||||
value: '6.9'
|
||||
value: '7.2.0'
|
||||
- group: docker
|
||||
- group: github
|
||||
- group: twine
|
||||
@@ -108,11 +108,9 @@ stages:
|
||||
docker run --rm --privileged \
|
||||
-v ~/.docker:/root/.docker:rw \
|
||||
-v /run/docker.sock:/run/docker.sock:rw \
|
||||
-v $(pwd):/homeassistant:ro \
|
||||
-v $(pwd):/data:ro \
|
||||
homeassistant/amd64-builder:$(versionBuilder) \
|
||||
--homeassistant $(homeassistantRelease) "--$(buildArch)" \
|
||||
-r https://github.com/home-assistant/hassio-homeassistant \
|
||||
-t generic --docker-hub homeassistant
|
||||
--generic $(homeassistantRelease) "--$(buildArch)" -t /data \
|
||||
|
||||
docker run --rm --privileged \
|
||||
-v ~/.docker:/root/.docker \
|
||||
|
@@ -5,6 +5,7 @@ trigger:
|
||||
branches:
|
||||
include:
|
||||
- dev
|
||||
- rc
|
||||
paths:
|
||||
include:
|
||||
- requirements_all.txt
|
||||
@@ -18,7 +19,7 @@ schedules:
|
||||
always: true
|
||||
variables:
|
||||
- name: versionWheels
|
||||
value: '1.4-3.7-alpine3.10'
|
||||
value: '1.10.1-3.7-alpine3.11'
|
||||
resources:
|
||||
repositories:
|
||||
- repository: azure
|
||||
@@ -32,8 +33,10 @@ jobs:
|
||||
builderVersion: '$(versionWheels)'
|
||||
builderApk: 'build-base;cmake;git;linux-headers;bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;autoconf;automake;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev'
|
||||
builderPip: 'Cython;numpy'
|
||||
skipBinary: 'aiohttp'
|
||||
wheelsRequirement: 'requirements_wheels.txt'
|
||||
wheelsRequirementDiff: 'requirements_diff.txt'
|
||||
wheelsConstraint: 'homeassistant/package_constraints.txt'
|
||||
preBuild:
|
||||
- script: |
|
||||
cp requirements_all.txt requirements_wheels.txt
|
||||
@@ -69,9 +72,5 @@ jobs:
|
||||
sed -i "s|# py_noaa|py_noaa|g" ${requirement_file}
|
||||
sed -i "s|# bme680|bme680|g" ${requirement_file}
|
||||
sed -i "s|# python-gammu|python-gammu|g" ${requirement_file}
|
||||
|
||||
if [[ "$(buildArch)" =~ arm ]]; then
|
||||
sed -i "s|# VL53L1X|VL53L1X|g" ${requirement_file}
|
||||
fi
|
||||
done
|
||||
displayName: 'Prepare requirements files for Hass.io'
|
||||
|
14
build.json
Normal file
14
build.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"image": "homeassistant/{arch}-homeassistant",
|
||||
"build_from": {
|
||||
"aarch64": "homeassistant/aarch64-homeassistant-base:7.0.1",
|
||||
"armhf": "homeassistant/armhf-homeassistant-base:7.0.1",
|
||||
"armv7": "homeassistant/armv7-homeassistant-base:7.0.1",
|
||||
"amd64": "homeassistant/amd64-homeassistant-base:7.0.1",
|
||||
"i386": "homeassistant/i386-homeassistant-base:7.0.1"
|
||||
},
|
||||
"labels": {
|
||||
"io.hass.type": "core"
|
||||
},
|
||||
"version_tag": true
|
||||
}
|
@@ -73,8 +73,8 @@ async def async_setup(hass, config):
|
||||
conf.get("ssh_key", conf.get("pub_key", "")),
|
||||
conf[CONF_MODE],
|
||||
conf[CONF_REQUIRE_IP],
|
||||
conf[CONF_INTERFACE],
|
||||
conf[CONF_DNSMASQ],
|
||||
interface=conf[CONF_INTERFACE],
|
||||
dnsmasq=conf[CONF_DNSMASQ],
|
||||
)
|
||||
|
||||
await api.connection.async_connect()
|
||||
|
@@ -2,7 +2,7 @@
|
||||
"domain": "asuswrt",
|
||||
"name": "ASUSWRT",
|
||||
"documentation": "https://www.home-assistant.io/integrations/asuswrt",
|
||||
"requirements": ["aioasuswrt==1.2.2"],
|
||||
"requirements": ["aioasuswrt==1.2.3"],
|
||||
"dependencies": [],
|
||||
"codeowners": ["@kennedyshead"]
|
||||
}
|
||||
|
@@ -21,6 +21,10 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from .axis_base import AxisEntityBase
|
||||
from .const import DOMAIN as AXIS_DOMAIN
|
||||
|
||||
AXIS_IMAGE = "http://{host}:{port}/axis-cgi/jpg/image.cgi"
|
||||
AXIS_VIDEO = "http://{host}:{port}/axis-cgi/mjpg/video.cgi"
|
||||
AXIS_STREAM = "rtsp://{user}:{password}@{host}/axis-media/media.amp?videocodec=h264"
|
||||
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Axis camera video stream."""
|
||||
@@ -32,13 +36,11 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
CONF_NAME: config_entry.data[CONF_NAME],
|
||||
CONF_USERNAME: config_entry.data[CONF_USERNAME],
|
||||
CONF_PASSWORD: config_entry.data[CONF_PASSWORD],
|
||||
CONF_MJPEG_URL: (
|
||||
f"http://{config_entry.data[CONF_HOST]}"
|
||||
f":{config_entry.data[CONF_PORT]}/axis-cgi/mjpg/video.cgi"
|
||||
CONF_MJPEG_URL: AXIS_VIDEO.format(
|
||||
host=config_entry.data[CONF_HOST], port=config_entry.data[CONF_PORT],
|
||||
),
|
||||
CONF_STILL_IMAGE_URL: (
|
||||
f"http://{config_entry.data[CONF_HOST]}"
|
||||
f":{config_entry.data[CONF_PORT]}/axis-cgi/jpg/image.cgi"
|
||||
CONF_STILL_IMAGE_URL: AXIS_IMAGE.format(
|
||||
host=config_entry.data[CONF_HOST], port=config_entry.data[CONF_PORT],
|
||||
),
|
||||
CONF_AUTHENTICATION: HTTP_DIGEST_AUTHENTICATION,
|
||||
}
|
||||
@@ -70,19 +72,17 @@ class AxisCamera(AxisEntityBase, MjpegCamera):
|
||||
|
||||
async def stream_source(self):
|
||||
"""Return the stream source."""
|
||||
return (
|
||||
f"rtsp://{self.device.config_entry.data[CONF_USERNAME]}´"
|
||||
f":{self.device.config_entry.data[CONF_PASSWORD]}"
|
||||
f"@{self.device.host}/axis-media/media.amp?videocodec=h264"
|
||||
return AXIS_STREAM.format(
|
||||
user=self.device.config_entry.data[CONF_USERNAME],
|
||||
password=self.device.config_entry.data[CONF_PASSWORD],
|
||||
host=self.device.host,
|
||||
)
|
||||
|
||||
def _new_address(self):
|
||||
"""Set new device address for video stream."""
|
||||
port = self.device.config_entry.data[CONF_PORT]
|
||||
self._mjpeg_url = (f"http://{self.device.host}:{port}/axis-cgi/mjpg/video.cgi",)
|
||||
self._still_image_url = (
|
||||
f"http://{self.device.host}:{port}/axis-cgi/jpg/image.cgi"
|
||||
)
|
||||
self._mjpeg_url = AXIS_VIDEO.format(host=self.device.host, port=port)
|
||||
self._still_image_url = AXIS_IMAGE.format(host=self.device.host, port=port)
|
||||
|
||||
@property
|
||||
def unique_id(self):
|
||||
|
@@ -141,7 +141,7 @@ async def async_request_stream(hass, entity_id, fmt):
|
||||
source,
|
||||
fmt=fmt,
|
||||
keepalive=camera_prefs.preload_stream,
|
||||
options=camera.options,
|
||||
options=camera.stream_options,
|
||||
)
|
||||
|
||||
|
||||
|
@@ -3,7 +3,7 @@
|
||||
"name": "Google Cast",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/cast",
|
||||
"requirements": ["pychromecast==4.1.1"],
|
||||
"requirements": ["pychromecast==4.2.0"],
|
||||
"dependencies": [],
|
||||
"after_dependencies": ["cloud"],
|
||||
"zeroconf": ["_googlecast._tcp.local."],
|
||||
|
@@ -7,9 +7,9 @@ from .const import ATTRIBUTION, OPTION_WORLDWIDE
|
||||
|
||||
SENSORS = {
|
||||
"confirmed": "mdi:emoticon-neutral-outline",
|
||||
"current": "mdi:emoticon-frown-outline",
|
||||
"current": "mdi:emoticon-sad-outline",
|
||||
"recovered": "mdi:emoticon-happy-outline",
|
||||
"deaths": "mdi:emoticon-dead-outline",
|
||||
"deaths": "mdi:emoticon-cry-outline",
|
||||
}
|
||||
|
||||
|
||||
|
@@ -3,11 +3,12 @@
|
||||
"name": "Home Assistant Frontend",
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"requirements": [
|
||||
"home-assistant-frontend==20200316.1"
|
||||
"home-assistant-frontend==20200318.1"
|
||||
],
|
||||
"dependencies": [
|
||||
"api",
|
||||
"auth",
|
||||
"device_automation",
|
||||
"http",
|
||||
"lovelace",
|
||||
"onboarding",
|
||||
@@ -19,4 +20,4 @@
|
||||
"@home-assistant/frontend"
|
||||
],
|
||||
"quality_scale": "internal"
|
||||
}
|
||||
}
|
||||
|
@@ -190,16 +190,15 @@ async def async_setup(hass, config):
|
||||
|
||||
hass.http.register_view(HassIOView(host, websession))
|
||||
|
||||
if "frontend" in hass.config.components:
|
||||
await hass.components.panel_custom.async_register_panel(
|
||||
frontend_url_path="hassio",
|
||||
webcomponent_name="hassio-main",
|
||||
sidebar_title="Supervisor",
|
||||
sidebar_icon="hass:home-assistant",
|
||||
js_url="/api/hassio/app/entrypoint.js",
|
||||
embed_iframe=True,
|
||||
require_admin=True,
|
||||
)
|
||||
await hass.components.panel_custom.async_register_panel(
|
||||
frontend_url_path="hassio",
|
||||
webcomponent_name="hassio-main",
|
||||
sidebar_title="Supervisor",
|
||||
sidebar_icon="hass:home-assistant",
|
||||
js_url="/api/hassio/app/entrypoint.js",
|
||||
embed_iframe=True,
|
||||
require_admin=True,
|
||||
)
|
||||
|
||||
await hassio.update_hass_api(config.get("http", {}), refresh_token)
|
||||
|
||||
|
@@ -14,7 +14,7 @@
|
||||
"busy_error": "Device refused to add pairing as it is already pairing with another controller.",
|
||||
"max_peers_error": "Device refused to add pairing as it has no free pairing storage.",
|
||||
"max_tries_error": "Device refused to add pairing as it has received more than 100 unsuccessful authentication attempts.",
|
||||
"pairing_failed": "An unhandled error occured while attempting to pair with this device. This may be a temporary failure or your device may not be supported currently.",
|
||||
"pairing_failed": "An unhandled error occurred while attempting to pair with this device. This may be a temporary failure or your device may not be supported currently.",
|
||||
"unable_to_pair": "Unable to pair, please try again.",
|
||||
"unknown_error": "Device reported an unknown error. Pairing failed."
|
||||
},
|
||||
|
@@ -3,7 +3,7 @@
|
||||
"name": "HomeKit Controller",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
|
||||
"requirements": ["aiohomekit[IP]==0.2.29.1"],
|
||||
"requirements": ["aiohomekit[IP]==0.2.29.2"],
|
||||
"dependencies": [],
|
||||
"zeroconf": ["_hap._tcp.local."],
|
||||
"codeowners": ["@Jc2k"]
|
||||
|
@@ -25,7 +25,7 @@
|
||||
"max_peers_error": "Device refused to add pairing as it has no free pairing storage.",
|
||||
"busy_error": "Device refused to add pairing as it is already pairing with another controller.",
|
||||
"max_tries_error": "Device refused to add pairing as it has received more than 100 unsuccessful authentication attempts.",
|
||||
"pairing_failed": "An unhandled error occured while attempting to pair with this device. This may be a temporary failure or your device may not be supported currently."
|
||||
"pairing_failed": "An unhandled error occurred while attempting to pair with this device. This may be a temporary failure or your device may not be supported currently."
|
||||
},
|
||||
"abort": {
|
||||
"no_devices": "No unpaired devices could be found",
|
||||
|
@@ -5,7 +5,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/huawei_lte",
|
||||
"requirements": [
|
||||
"getmac==0.8.1",
|
||||
"huawei-lte-api==1.4.10",
|
||||
"huawei-lte-api==1.4.11",
|
||||
"stringcase==1.2.0",
|
||||
"url-normalize==1.4.1"
|
||||
],
|
||||
|
@@ -97,6 +97,7 @@ class IcloudAccount:
|
||||
self._owner_fullname = None
|
||||
self._family_members_fullname = {}
|
||||
self._devices = {}
|
||||
self._retried_fetch = False
|
||||
|
||||
self.listeners = []
|
||||
|
||||
@@ -122,10 +123,6 @@ class IcloudAccount:
|
||||
_LOGGER.error("No iCloud device found")
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
if DEVICE_STATUS_CODES.get(list(api_devices)[0][DEVICE_STATUS]) == "pending":
|
||||
_LOGGER.warning("Pending devices, trying again ...")
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
self._owner_fullname = f"{user_info['firstName']} {user_info['lastName']}"
|
||||
|
||||
self._family_members_fullname = {}
|
||||
@@ -157,28 +154,15 @@ class IcloudAccount:
|
||||
)
|
||||
return
|
||||
|
||||
if DEVICE_STATUS_CODES.get(list(api_devices)[0][DEVICE_STATUS]) == "pending":
|
||||
_LOGGER.warning("Pending devices, trying again in 15s")
|
||||
self._fetch_interval = 0.25
|
||||
dispatcher_send(self.hass, self.signal_device_update)
|
||||
track_point_in_utc_time(
|
||||
self.hass,
|
||||
self.keep_alive,
|
||||
utcnow() + timedelta(minutes=self._fetch_interval),
|
||||
)
|
||||
return
|
||||
|
||||
# Gets devices infos
|
||||
new_device = False
|
||||
for device in api_devices:
|
||||
status = device.status(DEVICE_STATUS_SET)
|
||||
device_id = status[DEVICE_ID]
|
||||
device_name = status[DEVICE_NAME]
|
||||
device_status = DEVICE_STATUS_CODES.get(status[DEVICE_STATUS], "error")
|
||||
|
||||
if (
|
||||
device_status == "pending"
|
||||
or status[DEVICE_BATTERY_STATUS] == "Unknown"
|
||||
status[DEVICE_BATTERY_STATUS] == "Unknown"
|
||||
or status.get(DEVICE_BATTERY_LEVEL) is None
|
||||
):
|
||||
continue
|
||||
@@ -198,7 +182,16 @@ class IcloudAccount:
|
||||
self._devices[device_id].update(status)
|
||||
new_device = True
|
||||
|
||||
self._fetch_interval = self._determine_interval()
|
||||
if (
|
||||
DEVICE_STATUS_CODES.get(list(api_devices)[0][DEVICE_STATUS]) == "pending"
|
||||
and not self._retried_fetch
|
||||
):
|
||||
_LOGGER.warning("Pending devices, trying again in 15s")
|
||||
self._fetch_interval = 0.25
|
||||
self._retried_fetch = True
|
||||
else:
|
||||
self._fetch_interval = self._determine_interval()
|
||||
self._retried_fetch = False
|
||||
|
||||
dispatcher_send(self.hass, self.signal_device_update)
|
||||
if new_device:
|
||||
|
@@ -3,7 +3,7 @@
|
||||
"name": "Apple iCloud",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/icloud",
|
||||
"requirements": ["pyicloud==0.9.4"],
|
||||
"requirements": ["pyicloud==0.9.5"],
|
||||
"dependencies": [],
|
||||
"codeowners": ["@Quentame"]
|
||||
}
|
||||
|
@@ -88,24 +88,22 @@ def _cv_input_text(cfg):
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: cv.schema_with_slug_keys(
|
||||
vol.Any(
|
||||
vol.All(
|
||||
{
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_MIN, default=CONF_MIN_VALUE): vol.Coerce(int),
|
||||
vol.Optional(CONF_MAX, default=CONF_MAX_VALUE): vol.Coerce(int),
|
||||
vol.Optional(CONF_INITIAL, ""): cv.string,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
|
||||
vol.Optional(CONF_PATTERN): cv.string,
|
||||
vol.Optional(CONF_MODE, default=MODE_TEXT): vol.In(
|
||||
[MODE_TEXT, MODE_PASSWORD]
|
||||
),
|
||||
},
|
||||
_cv_input_text,
|
||||
),
|
||||
None,
|
||||
)
|
||||
vol.All(
|
||||
lambda value: value or {},
|
||||
{
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_MIN, default=CONF_MIN_VALUE): vol.Coerce(int),
|
||||
vol.Optional(CONF_MAX, default=CONF_MAX_VALUE): vol.Coerce(int),
|
||||
vol.Optional(CONF_INITIAL, ""): cv.string,
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
|
||||
vol.Optional(CONF_PATTERN): cv.string,
|
||||
vol.Optional(CONF_MODE, default=MODE_TEXT): vol.In(
|
||||
[MODE_TEXT, MODE_PASSWORD]
|
||||
),
|
||||
},
|
||||
_cv_input_text,
|
||||
),
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
@@ -203,13 +201,6 @@ class InputText(RestoreEntity):
|
||||
@classmethod
|
||||
def from_yaml(cls, config: typing.Dict) -> "InputText":
|
||||
"""Return entity instance initialized from yaml storage."""
|
||||
# set defaults for empty config
|
||||
config = {
|
||||
CONF_MAX: CONF_MAX_VALUE,
|
||||
CONF_MIN: CONF_MIN_VALUE,
|
||||
CONF_MODE: MODE_TEXT,
|
||||
**config,
|
||||
}
|
||||
input_text = cls(config)
|
||||
input_text.entity_id = f"{DOMAIN}.{config[CONF_ID]}"
|
||||
input_text.editable = False
|
||||
|
@@ -332,16 +332,17 @@ class MikrotikHub:
|
||||
async def async_add_options(self):
|
||||
"""Populate default options for Mikrotik."""
|
||||
if not self.config_entry.options:
|
||||
data = dict(self.config_entry.data)
|
||||
options = {
|
||||
CONF_ARP_PING: self.config_entry.data.pop(CONF_ARP_PING, False),
|
||||
CONF_FORCE_DHCP: self.config_entry.data.pop(CONF_FORCE_DHCP, False),
|
||||
CONF_DETECTION_TIME: self.config_entry.data.pop(
|
||||
CONF_ARP_PING: data.pop(CONF_ARP_PING, False),
|
||||
CONF_FORCE_DHCP: data.pop(CONF_FORCE_DHCP, False),
|
||||
CONF_DETECTION_TIME: data.pop(
|
||||
CONF_DETECTION_TIME, DEFAULT_DETECTION_TIME
|
||||
),
|
||||
}
|
||||
|
||||
self.hass.config_entries.async_update_entry(
|
||||
self.config_entry, options=options
|
||||
self.config_entry, data=data, options=options
|
||||
)
|
||||
|
||||
async def request_update(self):
|
||||
|
@@ -28,6 +28,7 @@ from . import api, config_flow
|
||||
from .const import (
|
||||
AUTH,
|
||||
CONF_CLOUDHOOK_URL,
|
||||
DATA_DEVICE_IDS,
|
||||
DATA_PERSONS,
|
||||
DOMAIN,
|
||||
OAUTH2_AUTHORIZE,
|
||||
@@ -65,6 +66,7 @@ async def async_setup(hass: HomeAssistant, config: dict):
|
||||
"""Set up the Netatmo component."""
|
||||
hass.data[DOMAIN] = {}
|
||||
hass.data[DOMAIN][DATA_PERSONS] = {}
|
||||
hass.data[DOMAIN][DATA_DEVICE_IDS] = {}
|
||||
|
||||
if DOMAIN not in config:
|
||||
return True
|
||||
@@ -104,7 +106,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
webhook_unregister(hass, entry.data[CONF_WEBHOOK_ID])
|
||||
|
||||
async def register_webhook(event):
|
||||
# Wait for the could integration to be ready
|
||||
# Wait for the cloud integration to be ready
|
||||
await asyncio.sleep(WAIT_FOR_CLOUD)
|
||||
|
||||
if CONF_WEBHOOK_ID not in entry.data:
|
||||
@@ -112,6 +114,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
hass.config_entries.async_update_entry(entry, data=data)
|
||||
|
||||
if hass.components.cloud.async_active_subscription():
|
||||
# Wait for cloud connection to be established
|
||||
await asyncio.sleep(WAIT_FOR_CLOUD)
|
||||
|
||||
if CONF_CLOUDHOOK_URL not in entry.data:
|
||||
webhook_url = await hass.components.cloud.async_create_cloudhook(
|
||||
entry.data[CONF_WEBHOOK_ID]
|
||||
@@ -144,6 +149,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
"""Unload a config entry."""
|
||||
if CONF_WEBHOOK_ID in entry.data:
|
||||
await hass.async_add_executor_job(
|
||||
hass.data[DOMAIN][entry.entry_id][AUTH].dropwebhook
|
||||
)
|
||||
|
||||
unload_ok = all(
|
||||
await asyncio.gather(
|
||||
*[
|
||||
@@ -152,14 +162,10 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
if unload_ok:
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
||||
if CONF_WEBHOOK_ID in entry.data:
|
||||
await hass.async_add_executor_job(
|
||||
hass.data[DOMAIN][entry.entry_id][AUTH].dropwebhook()
|
||||
)
|
||||
|
||||
return unload_ok
|
||||
|
||||
|
||||
|
@@ -84,21 +84,11 @@ class NetatmoCamera(Camera):
|
||||
self._unique_id = f"{self._camera_id}-{self._camera_type}"
|
||||
self._verify_ssl = verify_ssl
|
||||
self._quality = quality
|
||||
|
||||
# URLs
|
||||
self._vpnurl = None
|
||||
self._localurl = None
|
||||
|
||||
# Monitoring status
|
||||
self._status = None
|
||||
|
||||
# SD Card status
|
||||
self._sd_status = None
|
||||
|
||||
# Power status
|
||||
self._alim_status = None
|
||||
|
||||
# Is local
|
||||
self._is_local = None
|
||||
|
||||
def camera_image(self):
|
||||
@@ -219,8 +209,6 @@ class NetatmoCamera(Camera):
|
||||
|
||||
def update(self):
|
||||
"""Update entity status."""
|
||||
|
||||
# Refresh camera data
|
||||
self._data.update()
|
||||
|
||||
camera = self._data.camera_data.get_camera(cid=self._camera_id)
|
||||
|
@@ -441,6 +441,11 @@ class ThermostatData:
|
||||
except TypeError:
|
||||
_LOGGER.error("ThermostatData::setup() got error")
|
||||
return False
|
||||
except pyatmo.exceptions.NoDevice:
|
||||
_LOGGER.debug(
|
||||
"No climate devices for %s (%s)", self.home_name, self.home_id
|
||||
)
|
||||
return False
|
||||
return True
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
|
@@ -33,6 +33,7 @@ class NetatmoFlowHandler(
|
||||
"read_station",
|
||||
"read_thermostat",
|
||||
"write_camera",
|
||||
"write_presence",
|
||||
"write_thermostat",
|
||||
]
|
||||
|
||||
|
@@ -14,12 +14,12 @@ MODELS = {
|
||||
"NOC": "Smart Outdoor Camera",
|
||||
"NSD": "Smart Smoke Alarm",
|
||||
"NACamDoorTag": "Smart Door and Window Sensors",
|
||||
"NHC": "Smart Indoor Air Quality Monitor",
|
||||
"NAMain": "Smart Home Weather station – indoor module",
|
||||
"NAModule1": "Smart Home Weather station – outdoor module",
|
||||
"NAModule4": "Smart Additional Indoor module",
|
||||
"NAModule3": "Smart Rain Gauge",
|
||||
"NAModule2": "Smart Anemometer",
|
||||
"NHC": "Home Coach",
|
||||
}
|
||||
|
||||
AUTH = "netatmo_auth"
|
||||
@@ -32,6 +32,7 @@ CONF_CLOUDHOOK_URL = "cloudhook_url"
|
||||
OAUTH2_AUTHORIZE = "https://api.netatmo.com/oauth2/authorize"
|
||||
OAUTH2_TOKEN = "https://api.netatmo.com/oauth2/token"
|
||||
|
||||
DATA_DEVICE_IDS = "netatmo_device_ids"
|
||||
DATA_PERSONS = "netatmo_persons"
|
||||
|
||||
NETATMO_WEBHOOK_URL = None
|
||||
|
@@ -505,4 +505,6 @@ class ONVIFHassCamera(Camera):
|
||||
@property
|
||||
def unique_id(self) -> Optional[str]:
|
||||
"""Return a unique ID."""
|
||||
if self._profile_index:
|
||||
return f"{self._mac}_{self._profile_index}"
|
||||
return self._mac
|
||||
|
@@ -77,7 +77,11 @@ PERSON_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{vol.Optional(DOMAIN): vol.All(cv.ensure_list, cv.remove_falsy, [PERSON_SCHEMA])},
|
||||
{
|
||||
vol.Optional(DOMAIN, default=[]): vol.All(
|
||||
cv.ensure_list, cv.remove_falsy, [PERSON_SCHEMA]
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
@@ -75,8 +75,9 @@ async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry):
|
||||
|
||||
def token_saver(token):
|
||||
_LOGGER.debug("Saving updated token")
|
||||
entry.data[CONF_TOKEN] = token
|
||||
hass.config_entries.async_update_entry(entry, data={**entry.data})
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, data={**entry.data, CONF_TOKEN: token}
|
||||
)
|
||||
|
||||
# Force token update.
|
||||
entry.data[CONF_TOKEN]["expires_in"] = -1
|
||||
@@ -105,12 +106,18 @@ async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry):
|
||||
async def async_setup_webhook(hass: HomeAssistantType, entry: ConfigEntry, session):
|
||||
"""Set up a webhook to handle binary sensor events."""
|
||||
if CONF_WEBHOOK_ID not in entry.data:
|
||||
entry.data[CONF_WEBHOOK_ID] = hass.components.webhook.async_generate_id()
|
||||
entry.data[CONF_WEBHOOK_URL] = hass.components.webhook.async_generate_url(
|
||||
entry.data[CONF_WEBHOOK_ID]
|
||||
webhook_id = hass.components.webhook.async_generate_id()
|
||||
webhook_url = hass.components.webhook.async_generate_url(webhook_id)
|
||||
_LOGGER.info("Registering new webhook at: %s", webhook_url)
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data={
|
||||
**entry.data,
|
||||
CONF_WEBHOOK_ID: webhook_id,
|
||||
CONF_WEBHOOK_URL: webhook_url,
|
||||
},
|
||||
)
|
||||
_LOGGER.info("Registering new webhook at: %s", entry.data[CONF_WEBHOOK_URL])
|
||||
hass.config_entries.async_update_entry(entry, data={**entry.data})
|
||||
await hass.async_add_executor_job(
|
||||
session.update_webhook,
|
||||
entry.data[CONF_WEBHOOK_URL],
|
||||
|
@@ -25,6 +25,7 @@ from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.service import verify_domain_control
|
||||
|
||||
from .const import (
|
||||
CONF_ZONE_RUN_TIME,
|
||||
DATA_CLIENT,
|
||||
DATA_PROGRAMS,
|
||||
DATA_PROVISION_SETTINGS,
|
||||
@@ -33,6 +34,8 @@ from .const import (
|
||||
DATA_ZONES,
|
||||
DATA_ZONES_DETAILS,
|
||||
DEFAULT_PORT,
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DEFAULT_ZONE_RUN,
|
||||
DOMAIN,
|
||||
PROGRAM_UPDATE_TOPIC,
|
||||
SENSOR_UPDATE_TOPIC,
|
||||
@@ -41,19 +44,14 @@ from .const import (
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DATA_LISTENER = "listener"
|
||||
|
||||
CONF_CONTROLLERS = "controllers"
|
||||
CONF_PROGRAM_ID = "program_id"
|
||||
CONF_SECONDS = "seconds"
|
||||
CONF_ZONE_ID = "zone_id"
|
||||
CONF_ZONE_RUN_TIME = "zone_run_time"
|
||||
|
||||
DEFAULT_ATTRIBUTION = "Data provided by Green Electronics LLC"
|
||||
DEFAULT_ICON = "mdi:water"
|
||||
DEFAULT_SCAN_INTERVAL = timedelta(seconds=60)
|
||||
DEFAULT_SSL = True
|
||||
DEFAULT_ZONE_RUN = 60 * 10
|
||||
|
||||
SERVICE_ALTER_PROGRAM = vol.Schema({vol.Required(CONF_PROGRAM_ID): cv.positive_int})
|
||||
|
||||
@@ -109,7 +107,6 @@ async def async_setup(hass, config):
|
||||
"""Set up the RainMachine component."""
|
||||
hass.data[DOMAIN] = {}
|
||||
hass.data[DOMAIN][DATA_CLIENT] = {}
|
||||
hass.data[DOMAIN][DATA_LISTENER] = {}
|
||||
|
||||
if DOMAIN not in config:
|
||||
return True
|
||||
@@ -143,7 +140,7 @@ async def async_setup_entry(hass, config_entry):
|
||||
config_entry.data[CONF_IP_ADDRESS],
|
||||
config_entry.data[CONF_PASSWORD],
|
||||
port=config_entry.data[CONF_PORT],
|
||||
ssl=config_entry.data[CONF_SSL],
|
||||
ssl=config_entry.data.get(CONF_SSL, DEFAULT_SSL),
|
||||
)
|
||||
except RainMachineError as err:
|
||||
_LOGGER.error("An error occurred: %s", err)
|
||||
@@ -156,8 +153,10 @@ async def async_setup_entry(hass, config_entry):
|
||||
rainmachine = RainMachine(
|
||||
hass,
|
||||
controller,
|
||||
config_entry.data[CONF_ZONE_RUN_TIME],
|
||||
config_entry.data[CONF_SCAN_INTERVAL],
|
||||
config_entry.data.get(CONF_ZONE_RUN_TIME, DEFAULT_ZONE_RUN),
|
||||
config_entry.data.get(
|
||||
CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL.total_seconds()
|
||||
),
|
||||
)
|
||||
|
||||
# Update the data object, which at this point (prior to any sensors registering
|
||||
@@ -260,9 +259,6 @@ async def async_unload_entry(hass, config_entry):
|
||||
"""Unload an OpenUV config entry."""
|
||||
hass.data[DOMAIN][DATA_CLIENT].pop(config_entry.entry_id)
|
||||
|
||||
remove_listener = hass.data[DOMAIN][DATA_LISTENER].pop(config_entry.entry_id)
|
||||
remove_listener()
|
||||
|
||||
tasks = [
|
||||
hass.config_entries.async_forward_entry_unload(config_entry, component)
|
||||
for component in ("binary_sensor", "sensor", "switch")
|
||||
|
@@ -4,10 +4,22 @@ from regenmaschine.errors import RainMachineError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD, CONF_PORT
|
||||
from homeassistant.const import (
|
||||
CONF_IP_ADDRESS,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
CONF_SCAN_INTERVAL,
|
||||
CONF_SSL,
|
||||
)
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
|
||||
from .const import DEFAULT_PORT, DOMAIN # pylint: disable=unused-import
|
||||
from .const import ( # pylint: disable=unused-import
|
||||
CONF_ZONE_RUN_TIME,
|
||||
DEFAULT_PORT,
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DEFAULT_ZONE_RUN,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
|
||||
class RainMachineFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
@@ -53,8 +65,8 @@ class RainMachineFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
user_input[CONF_IP_ADDRESS],
|
||||
user_input[CONF_PASSWORD],
|
||||
websession,
|
||||
port=user_input.get(CONF_PORT, DEFAULT_PORT),
|
||||
ssl=True,
|
||||
port=user_input[CONF_PORT],
|
||||
ssl=user_input.get(CONF_SSL, True),
|
||||
)
|
||||
except RainMachineError:
|
||||
return await self._show_form({CONF_PASSWORD: "invalid_credentials"})
|
||||
@@ -63,5 +75,17 @@ class RainMachineFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
# access token without using the IP address and password, so we have to
|
||||
# store it:
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_IP_ADDRESS], data=user_input
|
||||
title=user_input[CONF_IP_ADDRESS],
|
||||
data={
|
||||
CONF_IP_ADDRESS: user_input[CONF_IP_ADDRESS],
|
||||
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||
CONF_PORT: user_input[CONF_PORT],
|
||||
CONF_SSL: user_input.get(CONF_SSL, True),
|
||||
CONF_SCAN_INTERVAL: user_input.get(
|
||||
CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL.total_seconds()
|
||||
),
|
||||
CONF_ZONE_RUN_TIME: user_input.get(
|
||||
CONF_ZONE_RUN_TIME, DEFAULT_ZONE_RUN
|
||||
),
|
||||
},
|
||||
)
|
||||
|
@@ -1,6 +1,10 @@
|
||||
"""Define constants for the SimpliSafe component."""
|
||||
from datetime import timedelta
|
||||
|
||||
DOMAIN = "rainmachine"
|
||||
|
||||
CONF_ZONE_RUN_TIME = "zone_run_time"
|
||||
|
||||
DATA_CLIENT = "client"
|
||||
DATA_PROGRAMS = "programs"
|
||||
DATA_PROVISION_SETTINGS = "provision.settings"
|
||||
@@ -10,6 +14,8 @@ DATA_ZONES = "zones"
|
||||
DATA_ZONES_DETAILS = "zones_details"
|
||||
|
||||
DEFAULT_PORT = 8080
|
||||
DEFAULT_SCAN_INTERVAL = timedelta(seconds=60)
|
||||
DEFAULT_ZONE_RUN = 60 * 10
|
||||
|
||||
PROGRAM_UPDATE_TOPIC = f"{DOMAIN}_program_update"
|
||||
SENSOR_UPDATE_TOPIC = f"{DOMAIN}_data_update"
|
||||
|
@@ -46,6 +46,7 @@ class SamsungTVBridge(ABC):
|
||||
self.method = method
|
||||
self.host = host
|
||||
self.token = None
|
||||
self.default_port = None
|
||||
self._remote = None
|
||||
self._callback = None
|
||||
|
||||
@@ -191,6 +192,7 @@ class SamsungTVWSBridge(SamsungTVBridge):
|
||||
"""Initialize Bridge."""
|
||||
super().__init__(method, host, port)
|
||||
self.token = token
|
||||
self.default_port = 8001
|
||||
|
||||
def try_connect(self):
|
||||
"""Try to connect to the Websocket TV."""
|
||||
@@ -204,6 +206,7 @@ class SamsungTVWSBridge(SamsungTVBridge):
|
||||
CONF_TIMEOUT: 31,
|
||||
}
|
||||
|
||||
result = None
|
||||
try:
|
||||
LOGGER.debug("Try config: %s", config)
|
||||
with SamsungTVWS(
|
||||
@@ -221,9 +224,13 @@ class SamsungTVWSBridge(SamsungTVBridge):
|
||||
return RESULT_SUCCESS
|
||||
except WebSocketException:
|
||||
LOGGER.debug("Working but unsupported config: %s", config)
|
||||
return RESULT_NOT_SUPPORTED
|
||||
result = RESULT_NOT_SUPPORTED
|
||||
except (OSError, ConnectionFailure) as err:
|
||||
LOGGER.debug("Failing config: %s, error: %s", config, err)
|
||||
# pylint: disable=useless-else-on-loop
|
||||
else:
|
||||
if result:
|
||||
return result
|
||||
|
||||
return RESULT_NOT_SUCCESSFUL
|
||||
|
||||
|
@@ -71,13 +71,27 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
):
|
||||
turn_on_action = hass.data[DOMAIN][ip_address][CONF_ON_ACTION]
|
||||
on_script = Script(hass, turn_on_action)
|
||||
async_add_entities([SamsungTVDevice(config_entry, on_script)])
|
||||
|
||||
# Initialize bridge
|
||||
data = config_entry.data.copy()
|
||||
bridge = SamsungTVBridge.get_bridge(
|
||||
data[CONF_METHOD], data[CONF_HOST], data[CONF_PORT], data.get(CONF_TOKEN),
|
||||
)
|
||||
if bridge.port is None and bridge.default_port is not None:
|
||||
# For backward compat, set default port for websocket tv
|
||||
data[CONF_PORT] = bridge.default_port
|
||||
hass.config_entries.async_update_entry(config_entry, data=data)
|
||||
bridge = SamsungTVBridge.get_bridge(
|
||||
data[CONF_METHOD], data[CONF_HOST], data[CONF_PORT], data.get(CONF_TOKEN),
|
||||
)
|
||||
|
||||
async_add_entities([SamsungTVDevice(bridge, config_entry, on_script)])
|
||||
|
||||
|
||||
class SamsungTVDevice(MediaPlayerDevice):
|
||||
"""Representation of a Samsung TV."""
|
||||
|
||||
def __init__(self, config_entry, on_script):
|
||||
def __init__(self, bridge, config_entry, on_script):
|
||||
"""Initialize the Samsung device."""
|
||||
self._config_entry = config_entry
|
||||
self._manufacturer = config_entry.data.get(CONF_MANUFACTURER)
|
||||
@@ -93,13 +107,7 @@ class SamsungTVDevice(MediaPlayerDevice):
|
||||
# Mark the end of a shutdown command (need to wait 15 seconds before
|
||||
# sending the next command to avoid turning the TV back ON).
|
||||
self._end_of_power_off = None
|
||||
# Initialize bridge
|
||||
self._bridge = SamsungTVBridge.get_bridge(
|
||||
config_entry.data[CONF_METHOD],
|
||||
config_entry.data[CONF_HOST],
|
||||
config_entry.data[CONF_PORT],
|
||||
config_entry.data.get(CONF_TOKEN),
|
||||
)
|
||||
self._bridge = bridge
|
||||
self._bridge.register_reauth_callback(self.access_denied)
|
||||
|
||||
def access_denied(self):
|
||||
|
@@ -3,6 +3,7 @@
|
||||
"name": "Sighthound",
|
||||
"documentation": "https://www.home-assistant.io/integrations/sighthound",
|
||||
"requirements": [
|
||||
"pillow==7.0.0",
|
||||
"simplehound==0.3"
|
||||
],
|
||||
"dependencies": [],
|
||||
|
@@ -3,7 +3,7 @@
|
||||
"name": "SimpliSafe",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/simplisafe",
|
||||
"requirements": ["simplisafe-python==9.0.2"],
|
||||
"requirements": ["simplisafe-python==9.0.4"],
|
||||
"dependencies": [],
|
||||
"codeowners": ["@bachya"]
|
||||
}
|
||||
|
@@ -27,7 +27,7 @@ DOMAIN = "somfy"
|
||||
|
||||
CONF_CLIENT_ID = "client_id"
|
||||
CONF_CLIENT_SECRET = "client_secret"
|
||||
CONF_OPTIMISTIC = "optimisitic"
|
||||
CONF_OPTIMISTIC = "optimistic"
|
||||
|
||||
SOMFY_AUTH_CALLBACK_PATH = "/auth/somfy/callback"
|
||||
SOMFY_AUTH_START = "/auth/somfy"
|
||||
@@ -36,8 +36,8 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_CLIENT_ID): cv.string,
|
||||
vol.Required(CONF_CLIENT_SECRET): cv.string,
|
||||
vol.Inclusive(CONF_CLIENT_ID, "oauth"): cv.string,
|
||||
vol.Inclusive(CONF_CLIENT_SECRET, "oauth"): cv.string,
|
||||
vol.Optional(CONF_OPTIMISTIC, default=False): cv.boolean,
|
||||
}
|
||||
)
|
||||
@@ -51,23 +51,21 @@ SOMFY_COMPONENTS = ["cover", "switch"]
|
||||
async def async_setup(hass, config):
|
||||
"""Set up the Somfy component."""
|
||||
hass.data[DOMAIN] = {}
|
||||
domain_config = config.get(DOMAIN, {})
|
||||
hass.data[DOMAIN][CONF_OPTIMISTIC] = domain_config.get(CONF_OPTIMISTIC, False)
|
||||
|
||||
if DOMAIN not in config:
|
||||
return True
|
||||
|
||||
hass.data[DOMAIN][CONF_OPTIMISTIC] = config[DOMAIN][CONF_OPTIMISTIC]
|
||||
|
||||
config_flow.SomfyFlowHandler.async_register_implementation(
|
||||
hass,
|
||||
config_entry_oauth2_flow.LocalOAuth2Implementation(
|
||||
if CONF_CLIENT_ID in domain_config:
|
||||
config_flow.SomfyFlowHandler.async_register_implementation(
|
||||
hass,
|
||||
DOMAIN,
|
||||
config[DOMAIN][CONF_CLIENT_ID],
|
||||
config[DOMAIN][CONF_CLIENT_SECRET],
|
||||
"https://accounts.somfy.com/oauth/oauth/v2/auth",
|
||||
"https://accounts.somfy.com/oauth/oauth/v2/token",
|
||||
),
|
||||
)
|
||||
config_entry_oauth2_flow.LocalOAuth2Implementation(
|
||||
hass,
|
||||
DOMAIN,
|
||||
config[DOMAIN][CONF_CLIENT_ID],
|
||||
config[DOMAIN][CONF_CLIENT_SECRET],
|
||||
"https://accounts.somfy.com/oauth/oauth/v2/auth",
|
||||
"https://accounts.somfy.com/oauth/oauth/v2/token",
|
||||
),
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
@@ -91,7 +91,7 @@ class LogEntry:
|
||||
|
||||
def __init__(self, record, stack, source):
|
||||
"""Initialize a log entry."""
|
||||
self.first_occured = self.timestamp = record.created
|
||||
self.first_occurred = self.timestamp = record.created
|
||||
self.name = record.name
|
||||
self.level = record.levelname
|
||||
self.message = deque([record.getMessage()], maxlen=5)
|
||||
@@ -117,7 +117,7 @@ class LogEntry:
|
||||
"timestamp": self.timestamp,
|
||||
"exception": self.exception,
|
||||
"count": self.count,
|
||||
"first_occured": self.first_occured,
|
||||
"first_occurred": self.first_occurred,
|
||||
}
|
||||
|
||||
|
||||
|
@@ -1,6 +1,7 @@
|
||||
"""Ask tankerkoenig.de for petrol price information."""
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from math import ceil
|
||||
|
||||
import pytankerkoenig
|
||||
import voluptuous as vol
|
||||
@@ -164,27 +165,41 @@ class TankerkoenigData:
|
||||
)
|
||||
return False
|
||||
self.add_station(additional_station_data["station"])
|
||||
if len(self.stations) > 10:
|
||||
_LOGGER.warning(
|
||||
"Found more than 10 stations to check. "
|
||||
"This might invalidate your api-key on the long run. "
|
||||
"Try using a smaller radius"
|
||||
)
|
||||
return True
|
||||
|
||||
async def fetch_data(self):
|
||||
"""Get the latest data from tankerkoenig.de."""
|
||||
_LOGGER.debug("Fetching new data from tankerkoenig.de")
|
||||
station_ids = list(self.stations)
|
||||
data = await self._hass.async_add_executor_job(
|
||||
pytankerkoenig.getPriceList, self._api_key, station_ids
|
||||
)
|
||||
|
||||
if data["ok"]:
|
||||
prices = {}
|
||||
|
||||
# The API seems to only return at most 10 results, so split the list in chunks of 10
|
||||
# and merge it together.
|
||||
for index in range(ceil(len(station_ids) / 10)):
|
||||
data = await self._hass.async_add_executor_job(
|
||||
pytankerkoenig.getPriceList,
|
||||
self._api_key,
|
||||
station_ids[index * 10 : (index + 1) * 10],
|
||||
)
|
||||
|
||||
_LOGGER.debug("Received data: %s", data)
|
||||
if not data["ok"]:
|
||||
_LOGGER.error(
|
||||
"Error fetching data from tankerkoenig.de: %s", data["message"]
|
||||
)
|
||||
raise TankerkoenigError(data["message"])
|
||||
if "prices" not in data:
|
||||
_LOGGER.error("Did not receive price information from tankerkoenig.de")
|
||||
raise TankerkoenigError("No prices in data")
|
||||
else:
|
||||
_LOGGER.error(
|
||||
"Error fetching data from tankerkoenig.de: %s", data["message"]
|
||||
)
|
||||
raise TankerkoenigError(data["message"])
|
||||
return data["prices"]
|
||||
prices.update(data["prices"])
|
||||
return prices
|
||||
|
||||
def add_station(self, station: dict):
|
||||
"""Add fuel station to the entity list."""
|
||||
|
@@ -5,7 +5,7 @@
|
||||
"client_secret": "The client secret from the configuration is invalid.",
|
||||
"no_agreements": "This account has no Toon displays.",
|
||||
"no_app": "You need to configure Toon before being able to authenticate with it. [Please read the instructions](https://www.home-assistant.io/components/toon/).",
|
||||
"unknown_auth_fail": "Unexpected error occured, while authenticating."
|
||||
"unknown_auth_fail": "Unexpected error occurred, while authenticating."
|
||||
},
|
||||
"error": {
|
||||
"credentials": "The provided credentials are invalid.",
|
||||
|
@@ -26,7 +26,7 @@
|
||||
"abort": {
|
||||
"client_id": "The client ID from the configuration is invalid.",
|
||||
"client_secret": "The client secret from the configuration is invalid.",
|
||||
"unknown_auth_fail": "Unexpected error occured, while authenticating.",
|
||||
"unknown_auth_fail": "Unexpected error occurred, while authenticating.",
|
||||
"no_agreements": "This account has no Toon displays.",
|
||||
"no_app": "You need to configure Toon before being able to authenticate with it. [Please read the instructions](https://www.home-assistant.io/components/toon/)."
|
||||
}
|
||||
|
@@ -18,7 +18,7 @@ from homeassistant.const import (
|
||||
STATE_ALARM_TRIGGERED,
|
||||
)
|
||||
|
||||
from . import DOMAIN as TOTALCONNECT_DOMAIN
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -30,7 +30,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
|
||||
alarms = []
|
||||
|
||||
client = hass.data[TOTALCONNECT_DOMAIN].client
|
||||
client = hass.data[DOMAIN].client
|
||||
|
||||
for location_id, location in client.locations.items():
|
||||
location_name = location.location_name
|
||||
@@ -71,7 +71,7 @@ class TotalConnectAlarm(alarm.AlarmControlPanel):
|
||||
|
||||
def update(self):
|
||||
"""Return the state of the device."""
|
||||
status = self._client.get_armed_status(self._location_id)
|
||||
self._client.get_armed_status(self._location_id)
|
||||
attr = {
|
||||
"location_name": self._name,
|
||||
"location_id": self._location_id,
|
||||
@@ -79,47 +79,36 @@ class TotalConnectAlarm(alarm.AlarmControlPanel):
|
||||
"low_battery": self._client.locations[self._location_id].low_battery,
|
||||
"cover_tampered": self._client.locations[
|
||||
self._location_id
|
||||
].is_cover_tampered,
|
||||
].is_cover_tampered(),
|
||||
"triggered_source": None,
|
||||
"triggered_zone": None,
|
||||
}
|
||||
|
||||
if status in (self._client.DISARMED, self._client.DISARMED_BYPASS):
|
||||
if self._client.locations[self._location_id].is_disarmed():
|
||||
state = STATE_ALARM_DISARMED
|
||||
elif status in (
|
||||
self._client.ARMED_STAY,
|
||||
self._client.ARMED_STAY_INSTANT,
|
||||
self._client.ARMED_STAY_INSTANT_BYPASS,
|
||||
):
|
||||
elif self._client.locations[self._location_id].is_armed_home():
|
||||
state = STATE_ALARM_ARMED_HOME
|
||||
elif status == self._client.ARMED_STAY_NIGHT:
|
||||
elif self._client.locations[self._location_id].is_armed_night():
|
||||
state = STATE_ALARM_ARMED_NIGHT
|
||||
elif status in (
|
||||
self._client.ARMED_AWAY,
|
||||
self._client.ARMED_AWAY_BYPASS,
|
||||
self._client.ARMED_AWAY_INSTANT,
|
||||
self._client.ARMED_AWAY_INSTANT_BYPASS,
|
||||
):
|
||||
elif self._client.locations[self._location_id].is_armed_away():
|
||||
state = STATE_ALARM_ARMED_AWAY
|
||||
elif status == self._client.ARMED_CUSTOM_BYPASS:
|
||||
elif self._client.locations[self._location_id].is_armed_custom_bypass():
|
||||
state = STATE_ALARM_ARMED_CUSTOM_BYPASS
|
||||
elif status == self._client.ARMING:
|
||||
elif self._client.locations[self._location_id].is_arming():
|
||||
state = STATE_ALARM_ARMING
|
||||
elif status == self._client.DISARMING:
|
||||
elif self._client.locations[self._location_id].is_disarming():
|
||||
state = STATE_ALARM_DISARMING
|
||||
elif status == self._client.ALARMING:
|
||||
elif self._client.locations[self._location_id].is_triggered_police():
|
||||
state = STATE_ALARM_TRIGGERED
|
||||
attr["triggered_source"] = "Police/Medical"
|
||||
elif status == self._client.ALARMING_FIRE_SMOKE:
|
||||
elif self._client.locations[self._location_id].is_triggered_fire():
|
||||
state = STATE_ALARM_TRIGGERED
|
||||
attr["triggered_source"] = "Fire/Smoke"
|
||||
elif status == self._client.ALARMING_CARBON_MONOXIDE:
|
||||
elif self._client.locations[self._location_id].is_triggered_gas():
|
||||
state = STATE_ALARM_TRIGGERED
|
||||
attr["triggered_source"] = "Carbon Monoxide"
|
||||
else:
|
||||
logging.info(
|
||||
"Total Connect Client returned unknown status code: %s", status
|
||||
)
|
||||
logging.info("Total Connect Client returned unknown status")
|
||||
state = None
|
||||
|
||||
self._state = state
|
||||
|
3
homeassistant/components/totalconnect/const.py
Normal file
3
homeassistant/components/totalconnect/const.py
Normal file
@@ -0,0 +1,3 @@
|
||||
"""TotalConnect constants."""
|
||||
|
||||
DOMAIN = "totalconnect"
|
@@ -2,7 +2,7 @@
|
||||
"domain": "velbus",
|
||||
"name": "Velbus",
|
||||
"documentation": "https://www.home-assistant.io/integrations/velbus",
|
||||
"requirements": ["python-velbus==2.0.42"],
|
||||
"requirements": ["python-velbus==2.0.43"],
|
||||
"config_flow": true,
|
||||
"dependencies": [],
|
||||
"codeowners": ["@Cereal2nd", "@brefra"]
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""Viessmann ViCare climate device."""
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import requests
|
||||
@@ -79,6 +80,9 @@ HA_TO_VICARE_PRESET_HEATING = {
|
||||
|
||||
PYVICARE_ERROR = "error"
|
||||
|
||||
# Scan interval of 15 minutes seems to be safe to not hit the ViCare server rate limit
|
||||
SCAN_INTERVAL = timedelta(seconds=900)
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
"""Create the ViCare climate devices."""
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""Viessmann ViCare water_heater device."""
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import requests
|
||||
@@ -42,6 +43,9 @@ HA_TO_VICARE_HVAC_DHW = {
|
||||
|
||||
PYVICARE_ERROR = "error"
|
||||
|
||||
# Scan interval of 15 minutes seems to be safe to not hit the ViCare server rate limit
|
||||
SCAN_INTERVAL = timedelta(seconds=900)
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
"""Create the ViCare water_heater devices."""
|
||||
|
@@ -8,6 +8,8 @@ import voluptuous as vol
|
||||
from homeassistant import config_entries, const as ha_const
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.device_registry import CONNECTION_ZIGBEE
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.typing import HomeAssistantType
|
||||
|
||||
from . import api
|
||||
from .core import ZHAGateway
|
||||
@@ -27,6 +29,7 @@ from .core.const import (
|
||||
DEFAULT_BAUDRATE,
|
||||
DEFAULT_RADIO_TYPE,
|
||||
DOMAIN,
|
||||
SIGNAL_ADD_ENTITIES,
|
||||
RadioType,
|
||||
)
|
||||
|
||||
@@ -89,24 +92,15 @@ async def async_setup_entry(hass, config_entry):
|
||||
Will automatically load components to support devices found on the network.
|
||||
"""
|
||||
|
||||
hass.data[DATA_ZHA] = hass.data.get(DATA_ZHA, {})
|
||||
hass.data[DATA_ZHA][DATA_ZHA_DISPATCHERS] = []
|
||||
hass.data[DATA_ZHA][DATA_ZHA_PLATFORM_LOADED] = asyncio.Event()
|
||||
platforms = []
|
||||
zha_data = hass.data.setdefault(DATA_ZHA, {})
|
||||
zha_data[DATA_ZHA_PLATFORM_LOADED] = {}
|
||||
config = zha_data.get(DATA_ZHA_CONFIG, {})
|
||||
|
||||
zha_data[DATA_ZHA_DISPATCHERS] = []
|
||||
for component in COMPONENTS:
|
||||
platforms.append(
|
||||
hass.async_create_task(
|
||||
hass.config_entries.async_forward_entry_setup(config_entry, component)
|
||||
)
|
||||
)
|
||||
|
||||
async def _platforms_loaded():
|
||||
await asyncio.gather(*platforms)
|
||||
hass.data[DATA_ZHA][DATA_ZHA_PLATFORM_LOADED].set()
|
||||
|
||||
hass.async_create_task(_platforms_loaded())
|
||||
|
||||
config = hass.data[DATA_ZHA].get(DATA_ZHA_CONFIG, {})
|
||||
zha_data[component] = []
|
||||
coro = hass.config_entries.async_forward_entry_setup(config_entry, component)
|
||||
zha_data[DATA_ZHA_PLATFORM_LOADED][component] = hass.async_create_task(coro)
|
||||
|
||||
if config.get(CONF_ENABLE_QUIRKS, True):
|
||||
# needs to be done here so that the ZHA module is finished loading
|
||||
@@ -130,11 +124,11 @@ async def async_setup_entry(hass, config_entry):
|
||||
|
||||
async def async_zha_shutdown(event):
|
||||
"""Handle shutdown tasks."""
|
||||
await hass.data[DATA_ZHA][DATA_ZHA_GATEWAY].shutdown()
|
||||
await hass.data[DATA_ZHA][DATA_ZHA_GATEWAY].async_update_device_storage()
|
||||
await zha_data[DATA_ZHA_GATEWAY].shutdown()
|
||||
await zha_data[DATA_ZHA_GATEWAY].async_update_device_storage()
|
||||
|
||||
hass.bus.async_listen_once(ha_const.EVENT_HOMEASSISTANT_STOP, async_zha_shutdown)
|
||||
hass.async_create_task(zha_gateway.async_load_devices())
|
||||
hass.async_create_task(async_load_entities(hass, config_entry))
|
||||
return True
|
||||
|
||||
|
||||
@@ -152,3 +146,20 @@ async def async_unload_entry(hass, config_entry):
|
||||
await hass.config_entries.async_forward_entry_unload(config_entry, component)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_load_entities(
|
||||
hass: HomeAssistantType, config_entry: config_entries.ConfigEntry
|
||||
) -> None:
|
||||
"""Load entities after integration was setup."""
|
||||
await hass.data[DATA_ZHA][DATA_ZHA_GATEWAY].async_prepare_entities()
|
||||
to_setup = [
|
||||
hass.data[DATA_ZHA][DATA_ZHA_PLATFORM_LOADED][comp]
|
||||
for comp in COMPONENTS
|
||||
if hass.data[DATA_ZHA][comp]
|
||||
]
|
||||
results = await asyncio.gather(*to_setup, return_exceptions=True)
|
||||
for res in results:
|
||||
if isinstance(res, Exception):
|
||||
_LOGGER.warning("Couldn't setup zha platform: %s", res)
|
||||
async_dispatcher_send(hass, SIGNAL_ADD_ENTITIES)
|
||||
|
@@ -49,7 +49,7 @@ STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, DOMAIN)
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Zigbee Home Automation binary sensor from config entry."""
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN] = []
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN]
|
||||
|
||||
unsub = async_dispatcher_connect(
|
||||
hass,
|
||||
|
@@ -85,11 +85,11 @@ class ZigbeeChannel(LogMixin):
|
||||
self, cluster: zha_typing.ZigpyClusterType, ch_pool: zha_typing.ChannelPoolType
|
||||
) -> None:
|
||||
"""Initialize ZigbeeChannel."""
|
||||
self._channel_name = cluster.ep_attribute
|
||||
self._generic_id = f"channel_0x{cluster.cluster_id:04x}"
|
||||
self._channel_name = getattr(cluster, "ep_attribute", self._generic_id)
|
||||
if self.CHANNEL_NAME:
|
||||
self._channel_name = self.CHANNEL_NAME
|
||||
self._ch_pool = ch_pool
|
||||
self._generic_id = f"channel_0x{cluster.cluster_id:04x}"
|
||||
self._cluster = cluster
|
||||
self._id = f"{ch_pool.id}:0x{cluster.cluster_id:04x}"
|
||||
unique_id = ch_pool.unique_id.replace("-", ":")
|
||||
|
@@ -8,6 +8,16 @@ from homeassistant.core import callback
|
||||
from homeassistant.helpers.typing import HomeAssistantType
|
||||
|
||||
from . import const as zha_const, registries as zha_regs, typing as zha_typing
|
||||
from .. import ( # noqa: F401 pylint: disable=unused-import,
|
||||
binary_sensor,
|
||||
cover,
|
||||
device_tracker,
|
||||
fan,
|
||||
light,
|
||||
lock,
|
||||
sensor,
|
||||
switch,
|
||||
)
|
||||
from .channels import base
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
@@ -7,10 +7,12 @@ import logging
|
||||
import os
|
||||
import traceback
|
||||
|
||||
from serial import SerialException
|
||||
import zigpy.device as zigpy_dev
|
||||
|
||||
from homeassistant.components.system_log import LogEntry, _figure_out_source
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.device_registry import (
|
||||
CONNECTION_ZIGBEE,
|
||||
async_get_registry as get_dev_reg,
|
||||
@@ -34,7 +36,6 @@ from .const import (
|
||||
DATA_ZHA,
|
||||
DATA_ZHA_BRIDGE_ID,
|
||||
DATA_ZHA_GATEWAY,
|
||||
DATA_ZHA_PLATFORM_LOADED,
|
||||
DEBUG_COMP_BELLOWS,
|
||||
DEBUG_COMP_ZHA,
|
||||
DEBUG_COMP_ZIGPY,
|
||||
@@ -98,7 +99,6 @@ class ZHAGateway:
|
||||
self.ha_entity_registry = None
|
||||
self.application_controller = None
|
||||
self.radio_description = None
|
||||
hass.data[DATA_ZHA][DATA_ZHA_GATEWAY] = self
|
||||
self._log_levels = {
|
||||
DEBUG_LEVEL_ORIGINAL: async_capture_log_levels(),
|
||||
DEBUG_LEVEL_CURRENT: async_capture_log_levels(),
|
||||
@@ -122,7 +122,11 @@ class ZHAGateway:
|
||||
radio_details = RADIO_TYPES[radio_type]
|
||||
radio = radio_details[ZHA_GW_RADIO]()
|
||||
self.radio_description = radio_details[ZHA_GW_RADIO_DESCRIPTION]
|
||||
await radio.connect(usb_path, baudrate)
|
||||
try:
|
||||
await radio.connect(usb_path, baudrate)
|
||||
except (SerialException, OSError) as exception:
|
||||
_LOGGER.error("Couldn't open serial port for ZHA: %s", str(exception))
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
if CONF_DATABASE in self._config:
|
||||
database = self._config[CONF_DATABASE]
|
||||
@@ -133,38 +137,59 @@ class ZHAGateway:
|
||||
apply_application_controller_patch(self)
|
||||
self.application_controller.add_listener(self)
|
||||
self.application_controller.groups.add_listener(self)
|
||||
await self.application_controller.startup(auto_form=True)
|
||||
|
||||
try:
|
||||
res = await self.application_controller.startup(auto_form=True)
|
||||
if res is False:
|
||||
await self.application_controller.shutdown()
|
||||
raise ConfigEntryNotReady
|
||||
except asyncio.TimeoutError as exception:
|
||||
_LOGGER.error(
|
||||
"Couldn't start %s coordinator",
|
||||
radio_details[ZHA_GW_RADIO_DESCRIPTION],
|
||||
exc_info=exception,
|
||||
)
|
||||
radio.close()
|
||||
raise ConfigEntryNotReady from exception
|
||||
|
||||
self._hass.data[DATA_ZHA][DATA_ZHA_GATEWAY] = self
|
||||
self._hass.data[DATA_ZHA][DATA_ZHA_BRIDGE_ID] = str(
|
||||
self.application_controller.ieee
|
||||
)
|
||||
await self.async_load_devices()
|
||||
self._initialize_groups()
|
||||
|
||||
async def async_load_devices(self) -> None:
|
||||
"""Restore ZHA devices from zigpy application state."""
|
||||
await self._hass.data[DATA_ZHA][DATA_ZHA_PLATFORM_LOADED].wait()
|
||||
zigpy_devices = self.application_controller.devices.values()
|
||||
for zigpy_device in zigpy_devices:
|
||||
self._async_get_or_create_device(zigpy_device, restored=True)
|
||||
|
||||
async def async_prepare_entities(self) -> None:
|
||||
"""Prepare entities by initializing device channels."""
|
||||
semaphore = asyncio.Semaphore(2)
|
||||
|
||||
async def _throttle(device: zha_typing.ZigpyDeviceType):
|
||||
async def _throttle(zha_device: zha_typing.ZhaDeviceType, cached: bool):
|
||||
async with semaphore:
|
||||
await self.async_device_restored(device)
|
||||
await zha_device.async_initialize(from_cache=cached)
|
||||
|
||||
zigpy_devices = self.application_controller.devices.values()
|
||||
_LOGGER.debug("Loading battery powered devices")
|
||||
await asyncio.gather(
|
||||
*[
|
||||
_throttle(dev)
|
||||
for dev in zigpy_devices
|
||||
if not dev.node_desc.is_mains_powered
|
||||
_throttle(dev, cached=True)
|
||||
for dev in self.devices.values()
|
||||
if not dev.is_mains_powered
|
||||
]
|
||||
)
|
||||
async_dispatcher_send(self._hass, SIGNAL_ADD_ENTITIES)
|
||||
|
||||
_LOGGER.debug("Loading mains powered devices")
|
||||
await asyncio.gather(
|
||||
*[_throttle(dev) for dev in zigpy_devices if dev.node_desc.is_mains_powered]
|
||||
*[
|
||||
_throttle(dev, cached=False)
|
||||
for dev in self.devices.values()
|
||||
if dev.is_mains_powered
|
||||
]
|
||||
)
|
||||
async_dispatcher_send(self._hass, SIGNAL_ADD_ENTITIES)
|
||||
|
||||
def device_joined(self, device):
|
||||
"""Handle device joined.
|
||||
|
@@ -29,7 +29,7 @@ STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, DOMAIN)
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Zigbee Home Automation cover from config entry."""
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN] = []
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN]
|
||||
|
||||
unsub = async_dispatcher_connect(
|
||||
hass,
|
||||
|
@@ -26,7 +26,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Zigbee Home Automation device tracker from config entry."""
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN] = []
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN]
|
||||
|
||||
unsub = async_dispatcher_connect(
|
||||
hass,
|
||||
|
@@ -53,7 +53,7 @@ STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, DOMAIN)
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Zigbee Home Automation fan from config entry."""
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN] = []
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN]
|
||||
|
||||
unsub = async_dispatcher_connect(
|
||||
hass,
|
||||
|
@@ -52,7 +52,7 @@ _REFRESH_INTERVAL = (45, 75)
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Zigbee Home Automation light from config entry."""
|
||||
entities_to_create = hass.data[DATA_ZHA][light.DOMAIN] = []
|
||||
entities_to_create = hass.data[DATA_ZHA][light.DOMAIN]
|
||||
|
||||
unsub = async_dispatcher_connect(
|
||||
hass,
|
||||
|
@@ -36,7 +36,7 @@ VALUE_TO_STATE = dict(enumerate(STATE_LIST))
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Zigbee Home Automation Door Lock from config entry."""
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN] = []
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN]
|
||||
|
||||
unsub = async_dispatcher_connect(
|
||||
hass,
|
||||
|
@@ -68,7 +68,7 @@ STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, DOMAIN)
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Zigbee Home Automation sensor from config entry."""
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN] = []
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN]
|
||||
|
||||
unsub = async_dispatcher_connect(
|
||||
hass,
|
||||
|
@@ -26,7 +26,7 @@ STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, DOMAIN)
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Zigbee Home Automation switch from config entry."""
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN] = []
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN]
|
||||
|
||||
unsub = async_dispatcher_connect(
|
||||
hass,
|
||||
|
@@ -1,6 +1,6 @@
|
||||
"""Support for the definition of zones."""
|
||||
import logging
|
||||
from typing import Dict, Optional, cast
|
||||
from typing import Any, Dict, Optional, cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -18,6 +18,7 @@ from homeassistant.const import (
|
||||
CONF_RADIUS,
|
||||
EVENT_CORE_CONFIG_UPDATE,
|
||||
SERVICE_RELOAD,
|
||||
STATE_UNAVAILABLE,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant, ServiceCall, State, callback
|
||||
from homeassistant.helpers import (
|
||||
@@ -65,8 +66,20 @@ UPDATE_FIELDS = {
|
||||
}
|
||||
|
||||
|
||||
def empty_value(value: Any) -> Any:
|
||||
"""Test if the user has the default config value from adding "zone:"."""
|
||||
if isinstance(value, dict) and len(value) == 0:
|
||||
return []
|
||||
|
||||
raise vol.Invalid("Not a default value")
|
||||
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{vol.Optional(DOMAIN): vol.All(cv.ensure_list, [vol.Schema(CREATE_FIELDS)])},
|
||||
{
|
||||
vol.Optional(DOMAIN, default=[]): vol.Any(
|
||||
vol.All(cv.ensure_list, [vol.Schema(CREATE_FIELDS)]), empty_value,
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
@@ -93,7 +106,7 @@ def async_active_zone(
|
||||
closest = None
|
||||
|
||||
for zone in zones:
|
||||
if zone.attributes.get(ATTR_PASSIVE):
|
||||
if zone.state == STATE_UNAVAILABLE or zone.attributes.get(ATTR_PASSIVE):
|
||||
continue
|
||||
|
||||
zone_dist = distance(
|
||||
@@ -126,6 +139,9 @@ def in_zone(zone: State, latitude: float, longitude: float, radius: float = 0) -
|
||||
|
||||
Async friendly.
|
||||
"""
|
||||
if zone.state == STATE_UNAVAILABLE:
|
||||
return False
|
||||
|
||||
zone_dist = distance(
|
||||
latitude,
|
||||
longitude,
|
||||
@@ -180,7 +196,7 @@ async def async_setup(hass: HomeAssistant, config: Dict) -> bool:
|
||||
component, storage_collection, lambda conf: Zone(conf, True)
|
||||
)
|
||||
|
||||
if DOMAIN in config:
|
||||
if config[DOMAIN]:
|
||||
await yaml_collection.async_load(config[DOMAIN])
|
||||
|
||||
await storage_collection.async_load()
|
||||
@@ -206,7 +222,7 @@ async def async_setup(hass: HomeAssistant, config: Dict) -> bool:
|
||||
conf = await component.async_prepare_reload(skip_reset=True)
|
||||
if conf is None:
|
||||
return
|
||||
await yaml_collection.async_load(conf.get(DOMAIN, []))
|
||||
await yaml_collection.async_load(conf[DOMAIN])
|
||||
|
||||
service.async_register_admin_service(
|
||||
hass,
|
||||
|
@@ -3,7 +3,7 @@
|
||||
"name": "Z-Wave",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/zwave",
|
||||
"requirements": ["homeassistant-pyozw==0.1.9", "pydispatcher==2.0.5"],
|
||||
"requirements": ["homeassistant-pyozw==0.1.10", "pydispatcher==2.0.5"],
|
||||
"dependencies": [],
|
||||
"codeowners": ["@home-assistant/z-wave"]
|
||||
}
|
||||
|
@@ -562,18 +562,36 @@ def _log_pkg_error(package: str, component: str, config: Dict, message: str) ->
|
||||
_LOGGER.error(message)
|
||||
|
||||
|
||||
def _identify_config_schema(module: ModuleType) -> Tuple[Optional[str], Optional[Dict]]:
|
||||
def _identify_config_schema(module: ModuleType) -> Optional[str]:
|
||||
"""Extract the schema and identify list or dict based."""
|
||||
try:
|
||||
schema = module.CONFIG_SCHEMA.schema[module.DOMAIN] # type: ignore
|
||||
except (AttributeError, KeyError):
|
||||
return None, None
|
||||
key = next(k for k in module.CONFIG_SCHEMA.schema if k == module.DOMAIN) # type: ignore
|
||||
except (AttributeError, StopIteration):
|
||||
return None
|
||||
|
||||
schema = module.CONFIG_SCHEMA.schema[key] # type: ignore
|
||||
|
||||
if hasattr(key, "default") and not isinstance(
|
||||
key.default, vol.schema_builder.Undefined
|
||||
):
|
||||
default_value = module.CONFIG_SCHEMA({module.DOMAIN: key.default()})[ # type: ignore
|
||||
module.DOMAIN # type: ignore
|
||||
]
|
||||
|
||||
if isinstance(default_value, dict):
|
||||
return "dict"
|
||||
|
||||
if isinstance(default_value, list):
|
||||
return "list"
|
||||
|
||||
return None
|
||||
|
||||
t_schema = str(schema)
|
||||
if t_schema.startswith("{") or "schema_with_slug_keys" in t_schema:
|
||||
return ("dict", schema)
|
||||
return "dict"
|
||||
if t_schema.startswith(("[", "All(<function ensure_list")):
|
||||
return ("list", schema)
|
||||
return "", schema
|
||||
return "list"
|
||||
return None
|
||||
|
||||
|
||||
def _recursive_merge(conf: Dict[str, Any], package: Dict[str, Any]) -> Union[bool, str]:
|
||||
@@ -626,8 +644,7 @@ async def merge_packages_config(
|
||||
merge_list = hasattr(component, "PLATFORM_SCHEMA")
|
||||
|
||||
if not merge_list and hasattr(component, "CONFIG_SCHEMA"):
|
||||
merge_type, _ = _identify_config_schema(component)
|
||||
merge_list = merge_type == "list"
|
||||
merge_list = _identify_config_schema(component) == "list"
|
||||
|
||||
if merge_list:
|
||||
config[comp_name] = cv.remove_falsy(
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""Constants used by Home Assistant components."""
|
||||
MAJOR_VERSION = 0
|
||||
MINOR_VERSION = 107
|
||||
PATCH_VERSION = "0b6"
|
||||
PATCH_VERSION = "6"
|
||||
__short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__ = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER = (3, 7, 0)
|
||||
|
@@ -701,7 +701,9 @@ class Script:
|
||||
|
||||
def _log(self, msg, *args, level=logging.INFO):
|
||||
if self.name:
|
||||
msg = f"{self.name}: {msg}"
|
||||
msg = f"%s: {msg}"
|
||||
args = [self.name, *args]
|
||||
|
||||
if level == _LOG_EXCEPTION:
|
||||
self._logger.exception(msg, *args)
|
||||
else:
|
||||
|
@@ -12,7 +12,7 @@ cryptography==2.8
|
||||
defusedxml==0.6.0
|
||||
distro==1.4.0
|
||||
hass-nabucasa==0.32.2
|
||||
home-assistant-frontend==20200316.1
|
||||
home-assistant-frontend==20200318.1
|
||||
importlib-metadata==1.5.0
|
||||
jinja2>=2.10.3
|
||||
netdisco==2.6.0
|
||||
|
@@ -139,7 +139,7 @@ aio_georss_gdacs==0.3
|
||||
aioambient==1.0.4
|
||||
|
||||
# homeassistant.components.asuswrt
|
||||
aioasuswrt==1.2.2
|
||||
aioasuswrt==1.2.3
|
||||
|
||||
# homeassistant.components.automatic
|
||||
aioautomatic==0.6.5
|
||||
@@ -163,7 +163,7 @@ aioftp==0.12.0
|
||||
aioharmony==0.1.13
|
||||
|
||||
# homeassistant.components.homekit_controller
|
||||
aiohomekit[IP]==0.2.29.1
|
||||
aiohomekit[IP]==0.2.29.2
|
||||
|
||||
# homeassistant.components.emulated_hue
|
||||
# homeassistant.components.http
|
||||
@@ -696,10 +696,10 @@ hole==0.5.0
|
||||
holidays==0.10.1
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20200316.1
|
||||
home-assistant-frontend==20200318.1
|
||||
|
||||
# homeassistant.components.zwave
|
||||
homeassistant-pyozw==0.1.9
|
||||
homeassistant-pyozw==0.1.10
|
||||
|
||||
# homeassistant.components.homematicip_cloud
|
||||
homematicip==0.10.17
|
||||
@@ -712,7 +712,7 @@ horimote==0.4.1
|
||||
httplib2==0.10.3
|
||||
|
||||
# homeassistant.components.huawei_lte
|
||||
huawei-lte-api==1.4.10
|
||||
huawei-lte-api==1.4.11
|
||||
|
||||
# homeassistant.components.hydrawise
|
||||
hydrawiser==0.1.1
|
||||
@@ -1017,6 +1017,7 @@ pilight==0.1.1
|
||||
# homeassistant.components.proxy
|
||||
# homeassistant.components.qrcode
|
||||
# homeassistant.components.seven_segments
|
||||
# homeassistant.components.sighthound
|
||||
# homeassistant.components.tensorflow
|
||||
pillow==7.0.0
|
||||
|
||||
@@ -1188,7 +1189,7 @@ pycfdns==0.0.1
|
||||
pychannels==1.0.0
|
||||
|
||||
# homeassistant.components.cast
|
||||
pychromecast==4.1.1
|
||||
pychromecast==4.2.0
|
||||
|
||||
# homeassistant.components.cmus
|
||||
pycmus==0.1.1
|
||||
@@ -1315,7 +1316,7 @@ pyhomeworks==0.0.6
|
||||
pyialarm==0.3
|
||||
|
||||
# homeassistant.components.icloud
|
||||
pyicloud==0.9.4
|
||||
pyicloud==0.9.5
|
||||
|
||||
# homeassistant.components.intesishome
|
||||
pyintesishome==1.6
|
||||
@@ -1665,7 +1666,7 @@ python-telnet-vlc==1.0.4
|
||||
python-twitch-client==0.6.0
|
||||
|
||||
# homeassistant.components.velbus
|
||||
python-velbus==2.0.42
|
||||
python-velbus==2.0.43
|
||||
|
||||
# homeassistant.components.vlc
|
||||
python-vlc==1.1.2
|
||||
@@ -1855,7 +1856,7 @@ simplehound==0.3
|
||||
simplepush==1.1.4
|
||||
|
||||
# homeassistant.components.simplisafe
|
||||
simplisafe-python==9.0.2
|
||||
simplisafe-python==9.0.4
|
||||
|
||||
# homeassistant.components.sisyphus
|
||||
sisyphus-control==2.2.1
|
||||
|
@@ -50,7 +50,7 @@ aio_georss_gdacs==0.3
|
||||
aioambient==1.0.4
|
||||
|
||||
# homeassistant.components.asuswrt
|
||||
aioasuswrt==1.2.2
|
||||
aioasuswrt==1.2.3
|
||||
|
||||
# homeassistant.components.automatic
|
||||
aioautomatic==0.6.5
|
||||
@@ -62,7 +62,7 @@ aiobotocore==0.11.1
|
||||
aioesphomeapi==2.6.1
|
||||
|
||||
# homeassistant.components.homekit_controller
|
||||
aiohomekit[IP]==0.2.29.1
|
||||
aiohomekit[IP]==0.2.29.2
|
||||
|
||||
# homeassistant.components.emulated_hue
|
||||
# homeassistant.components.http
|
||||
@@ -263,10 +263,10 @@ hole==0.5.0
|
||||
holidays==0.10.1
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20200316.1
|
||||
home-assistant-frontend==20200318.1
|
||||
|
||||
# homeassistant.components.zwave
|
||||
homeassistant-pyozw==0.1.9
|
||||
homeassistant-pyozw==0.1.10
|
||||
|
||||
# homeassistant.components.homematicip_cloud
|
||||
homematicip==0.10.17
|
||||
@@ -276,7 +276,7 @@ homematicip==0.10.17
|
||||
httplib2==0.10.3
|
||||
|
||||
# homeassistant.components.huawei_lte
|
||||
huawei-lte-api==1.4.10
|
||||
huawei-lte-api==1.4.11
|
||||
|
||||
# homeassistant.components.iaqualink
|
||||
iaqualink==0.3.1
|
||||
@@ -365,6 +365,14 @@ pexpect==4.6.0
|
||||
# homeassistant.components.pilight
|
||||
pilight==0.1.1
|
||||
|
||||
# homeassistant.components.doods
|
||||
# homeassistant.components.proxy
|
||||
# homeassistant.components.qrcode
|
||||
# homeassistant.components.seven_segments
|
||||
# homeassistant.components.sighthound
|
||||
# homeassistant.components.tensorflow
|
||||
pillow==7.0.0
|
||||
|
||||
# homeassistant.components.plex
|
||||
plexapi==3.3.0
|
||||
|
||||
@@ -443,7 +451,7 @@ pyblackbird==0.5
|
||||
pybotvac==0.0.17
|
||||
|
||||
# homeassistant.components.cast
|
||||
pychromecast==4.1.1
|
||||
pychromecast==4.2.0
|
||||
|
||||
# homeassistant.components.coolmaster
|
||||
pycoolmasternet==0.0.4
|
||||
@@ -483,7 +491,7 @@ pyheos==0.6.0
|
||||
pyhomematic==0.1.65
|
||||
|
||||
# homeassistant.components.icloud
|
||||
pyicloud==0.9.4
|
||||
pyicloud==0.9.5
|
||||
|
||||
# homeassistant.components.ipma
|
||||
pyipma==2.0.5
|
||||
@@ -587,7 +595,7 @@ python-nest==4.1.0
|
||||
python-twitch-client==0.6.0
|
||||
|
||||
# homeassistant.components.velbus
|
||||
python-velbus==2.0.42
|
||||
python-velbus==2.0.43
|
||||
|
||||
# homeassistant.components.awair
|
||||
python_awair==0.0.4
|
||||
@@ -641,7 +649,7 @@ sentry-sdk==0.13.5
|
||||
simplehound==0.3
|
||||
|
||||
# homeassistant.components.simplisafe
|
||||
simplisafe-python==9.0.2
|
||||
simplisafe-python==9.0.4
|
||||
|
||||
# homeassistant.components.sleepiq
|
||||
sleepyq==0.7
|
||||
|
5
rootfs/etc/services.d/home-assistant/finish
Normal file
5
rootfs/etc/services.d/home-assistant/finish
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/usr/bin/execlineb -S0
|
||||
# ==============================================================================
|
||||
# Take down the S6 supervision tree when Home Assistant fails
|
||||
# ==============================================================================
|
||||
s6-svscanctl -t /var/run/s6/services
|
7
rootfs/etc/services.d/home-assistant/run
Normal file
7
rootfs/etc/services.d/home-assistant/run
Normal file
@@ -0,0 +1,7 @@
|
||||
#!/usr/bin/with-contenv bashio
|
||||
# ==============================================================================
|
||||
# Start Home Assistant service
|
||||
# ==============================================================================
|
||||
cd /config || bashio::exit.nok "Can't find config folder!"
|
||||
|
||||
exec python3 -m homeassistant --config /config
|
@@ -1,5 +1,5 @@
|
||||
"""Initializer helpers for HomematicIP fake server."""
|
||||
from asynctest import CoroutineMock, MagicMock, Mock
|
||||
from asynctest import CoroutineMock, MagicMock, Mock, patch
|
||||
from homematicip.aio.auth import AsyncAuth
|
||||
from homematicip.aio.connection import AsyncConnection
|
||||
from homematicip.aio.home import AsyncHome
|
||||
@@ -106,9 +106,10 @@ async def mock_hap_with_service_fixture(
|
||||
|
||||
|
||||
@pytest.fixture(name="simple_mock_home")
|
||||
def simple_mock_home_fixture() -> AsyncHome:
|
||||
"""Return a simple AsyncHome Mock."""
|
||||
return Mock(
|
||||
def simple_mock_home_fixture():
|
||||
"""Return a simple mocked connection."""
|
||||
|
||||
mock_home = Mock(
|
||||
spec=AsyncHome,
|
||||
name="Demo",
|
||||
devices=[],
|
||||
@@ -120,6 +121,27 @@ def simple_mock_home_fixture() -> AsyncHome:
|
||||
connected=True,
|
||||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.homematicip_cloud.hap.AsyncHome",
|
||||
autospec=True,
|
||||
return_value=mock_home,
|
||||
):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture(name="mock_connection_init")
|
||||
def mock_connection_init_fixture():
|
||||
"""Return a simple mocked connection."""
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.homematicip_cloud.hap.AsyncHome.init",
|
||||
return_value=None,
|
||||
), patch(
|
||||
"homeassistant.components.homematicip_cloud.hap.AsyncAuth.init",
|
||||
return_value=None,
|
||||
):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture(name="simple_mock_auth")
|
||||
def simple_mock_auth_fixture() -> AsyncAuth:
|
||||
|
@@ -16,12 +16,15 @@ DEFAULT_CONFIG = {HMIPC_HAPID: "ABC123", HMIPC_PIN: "123", HMIPC_NAME: "hmip"}
|
||||
IMPORT_CONFIG = {HMIPC_HAPID: "ABC123", HMIPC_AUTHTOKEN: "123", HMIPC_NAME: "hmip"}
|
||||
|
||||
|
||||
async def test_flow_works(hass):
|
||||
async def test_flow_works(hass, simple_mock_home):
|
||||
"""Test config flow."""
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.homematicip_cloud.hap.HomematicipAuth.async_checkbutton",
|
||||
return_value=False,
|
||||
), patch(
|
||||
"homeassistant.components.homematicip_cloud.hap.HomematicipAuth.get_auth",
|
||||
return_value=True,
|
||||
):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
HMIPC_DOMAIN, context={"source": "user"}, data=DEFAULT_CONFIG
|
||||
@@ -137,7 +140,7 @@ async def test_init_already_configured(hass):
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
|
||||
async def test_import_config(hass):
|
||||
async def test_import_config(hass, simple_mock_home):
|
||||
"""Test importing a host with an existing config file."""
|
||||
with patch(
|
||||
"homeassistant.components.homematicip_cloud.hap.HomematicipAuth.async_checkbutton",
|
||||
|
@@ -125,14 +125,11 @@ async def test_hap_create(hass, hmip_config_entry, simple_mock_home):
|
||||
hass.config.components.add(HMIPC_DOMAIN)
|
||||
hap = HomematicipHAP(hass, hmip_config_entry)
|
||||
assert hap
|
||||
with patch(
|
||||
"homeassistant.components.homematicip_cloud.hap.AsyncHome",
|
||||
return_value=simple_mock_home,
|
||||
), patch.object(hap, "async_connect"):
|
||||
with patch.object(hap, "async_connect"):
|
||||
assert await hap.async_setup()
|
||||
|
||||
|
||||
async def test_hap_create_exception(hass, hmip_config_entry):
|
||||
async def test_hap_create_exception(hass, hmip_config_entry, mock_connection_init):
|
||||
"""Mock AsyncHome to execute get_hap."""
|
||||
hass.config.components.add(HMIPC_DOMAIN)
|
||||
|
||||
|
@@ -24,7 +24,9 @@ from homeassistant.setup import async_setup_component
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
async def test_config_with_accesspoint_passed_to_config_entry(hass):
|
||||
async def test_config_with_accesspoint_passed_to_config_entry(
|
||||
hass, mock_connection, simple_mock_home
|
||||
):
|
||||
"""Test that config for a accesspoint are loaded via config entry."""
|
||||
|
||||
entry_config = {
|
||||
@@ -51,7 +53,9 @@ async def test_config_with_accesspoint_passed_to_config_entry(hass):
|
||||
assert isinstance(hass.data[HMIPC_DOMAIN]["ABC123"], HomematicipHAP)
|
||||
|
||||
|
||||
async def test_config_already_registered_not_passed_to_config_entry(hass):
|
||||
async def test_config_already_registered_not_passed_to_config_entry(
|
||||
hass, simple_mock_home
|
||||
):
|
||||
"""Test that an already registered accesspoint does not get imported."""
|
||||
|
||||
mock_config = {HMIPC_AUTHTOKEN: "123", HMIPC_HAPID: "ABC123", HMIPC_NAME: "name"}
|
||||
@@ -87,7 +91,9 @@ async def test_config_already_registered_not_passed_to_config_entry(hass):
|
||||
assert config_entries[0].unique_id == "ABC123"
|
||||
|
||||
|
||||
async def test_load_entry_fails_due_to_connection_error(hass, hmip_config_entry):
|
||||
async def test_load_entry_fails_due_to_connection_error(
|
||||
hass, hmip_config_entry, mock_connection_init
|
||||
):
|
||||
"""Test load entry fails due to connection error."""
|
||||
hmip_config_entry.add_to_hass(hass)
|
||||
|
||||
@@ -101,7 +107,9 @@ async def test_load_entry_fails_due_to_connection_error(hass, hmip_config_entry)
|
||||
assert hmip_config_entry.state == ENTRY_STATE_SETUP_RETRY
|
||||
|
||||
|
||||
async def test_load_entry_fails_due_to_generic_exception(hass, hmip_config_entry):
|
||||
async def test_load_entry_fails_due_to_generic_exception(
|
||||
hass, hmip_config_entry, simple_mock_home
|
||||
):
|
||||
"""Test load entry fails due to generic exception."""
|
||||
hmip_config_entry.add_to_hass(hass)
|
||||
|
||||
|
@@ -140,16 +140,7 @@ async def test_webhook_update_registration(webhook_client, authed_api_client):
|
||||
async def test_webhook_handle_get_zones(hass, create_registrations, webhook_client):
|
||||
"""Test that we can get zones properly."""
|
||||
await async_setup_component(
|
||||
hass,
|
||||
ZONE_DOMAIN,
|
||||
{
|
||||
ZONE_DOMAIN: {
|
||||
"name": "test",
|
||||
"latitude": 32.880837,
|
||||
"longitude": -117.237561,
|
||||
"radius": 250,
|
||||
}
|
||||
},
|
||||
hass, ZONE_DOMAIN, {ZONE_DOMAIN: {}},
|
||||
)
|
||||
|
||||
resp = await webhook_client.post(
|
||||
@@ -161,7 +152,8 @@ async def test_webhook_handle_get_zones(hass, create_registrations, webhook_clie
|
||||
|
||||
json = await resp.json()
|
||||
assert len(json) == 1
|
||||
assert json[0]["entity_id"] == "zone.home"
|
||||
zones = sorted(json, key=lambda entry: entry["entity_id"])
|
||||
assert zones[0]["entity_id"] == "zone.home"
|
||||
|
||||
|
||||
async def test_webhook_handle_get_config(hass, create_registrations, webhook_client):
|
||||
|
@@ -65,6 +65,7 @@ async def test_full_flow(hass, aiohttp_client, aioclient_mock):
|
||||
"read_station",
|
||||
"read_thermostat",
|
||||
"write_camera",
|
||||
"write_presence",
|
||||
"write_thermostat",
|
||||
]
|
||||
)
|
||||
|
@@ -4,7 +4,7 @@ from unittest.mock import patch
|
||||
from regenmaschine.errors import RainMachineError
|
||||
|
||||
from homeassistant import data_entry_flow
|
||||
from homeassistant.components.rainmachine import DOMAIN, config_flow
|
||||
from homeassistant.components.rainmachine import CONF_ZONE_RUN_TIME, DOMAIN, config_flow
|
||||
from homeassistant.config_entries import SOURCE_USER
|
||||
from homeassistant.const import (
|
||||
CONF_IP_ADDRESS,
|
||||
@@ -98,6 +98,7 @@ async def test_step_import(hass):
|
||||
CONF_PORT: 8080,
|
||||
CONF_SSL: True,
|
||||
CONF_SCAN_INTERVAL: 60,
|
||||
CONF_ZONE_RUN_TIME: 600,
|
||||
}
|
||||
|
||||
|
||||
@@ -129,4 +130,5 @@ async def test_step_user(hass):
|
||||
CONF_PORT: 8080,
|
||||
CONF_SSL: True,
|
||||
CONF_SCAN_INTERVAL: 60,
|
||||
CONF_ZONE_RUN_TIME: 600,
|
||||
}
|
||||
|
@@ -1,5 +1,5 @@
|
||||
"""Tests for Samsung TV config flow."""
|
||||
from unittest.mock import call, patch
|
||||
from unittest.mock import Mock, PropertyMock, call, patch
|
||||
|
||||
from asynctest import mock
|
||||
import pytest
|
||||
@@ -19,7 +19,7 @@ from homeassistant.components.ssdp import (
|
||||
ATTR_UPNP_MODEL_NAME,
|
||||
ATTR_UPNP_UDN,
|
||||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_ID, CONF_METHOD, CONF_NAME
|
||||
from homeassistant.const import CONF_HOST, CONF_ID, CONF_METHOD, CONF_NAME, CONF_TOKEN
|
||||
|
||||
MOCK_USER_DATA = {CONF_HOST: "fake_host", CONF_NAME: "fake_name"}
|
||||
MOCK_SSDP_DATA = {
|
||||
@@ -46,6 +46,20 @@ AUTODETECT_LEGACY = {
|
||||
"host": "fake_host",
|
||||
"timeout": 31,
|
||||
}
|
||||
AUTODETECT_WEBSOCKET_PLAIN = {
|
||||
"host": "fake_host",
|
||||
"name": "HomeAssistant",
|
||||
"port": 8001,
|
||||
"timeout": 31,
|
||||
"token": None,
|
||||
}
|
||||
AUTODETECT_WEBSOCKET_SSL = {
|
||||
"host": "fake_host",
|
||||
"name": "HomeAssistant",
|
||||
"port": 8002,
|
||||
"timeout": 31,
|
||||
"token": None,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(name="remote")
|
||||
@@ -446,20 +460,48 @@ async def test_autodetect_websocket(hass, remote, remotews):
|
||||
with patch(
|
||||
"homeassistant.components.samsungtv.bridge.Remote", side_effect=OSError("Boom"),
|
||||
), patch("homeassistant.components.samsungtv.bridge.SamsungTVWS") as remotews:
|
||||
enter = Mock()
|
||||
type(enter).token = PropertyMock(return_value="123456789")
|
||||
remote = Mock()
|
||||
remote.__enter__ = Mock(return_value=enter)
|
||||
remote.__exit__ = Mock(return_value=False)
|
||||
remotews.return_value = remote
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": "user"}, data=MOCK_USER_DATA
|
||||
)
|
||||
assert result["type"] == "create_entry"
|
||||
assert result["data"][CONF_METHOD] == "websocket"
|
||||
assert result["data"][CONF_TOKEN] == "123456789"
|
||||
assert remotews.call_count == 1
|
||||
assert remotews.call_args_list == [call(**AUTODETECT_WEBSOCKET_PLAIN)]
|
||||
|
||||
|
||||
async def test_autodetect_websocket_ssl(hass, remote, remotews):
|
||||
"""Test for send key with autodetection of protocol."""
|
||||
with patch(
|
||||
"homeassistant.components.samsungtv.bridge.Remote", side_effect=OSError("Boom"),
|
||||
), patch(
|
||||
"homeassistant.components.samsungtv.bridge.SamsungTVWS",
|
||||
side_effect=[WebSocketProtocolException("Boom"), mock.DEFAULT],
|
||||
) as remotews:
|
||||
enter = Mock()
|
||||
type(enter).token = PropertyMock(return_value="123456789")
|
||||
remote = Mock()
|
||||
remote.__enter__ = Mock(return_value=enter)
|
||||
remote.__exit__ = Mock(return_value=False)
|
||||
remotews.return_value = remote
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": "user"}, data=MOCK_USER_DATA
|
||||
)
|
||||
assert result["type"] == "create_entry"
|
||||
assert result["data"][CONF_METHOD] == "websocket"
|
||||
assert result["data"][CONF_TOKEN] == "123456789"
|
||||
assert remotews.call_count == 2
|
||||
assert remotews.call_args_list == [
|
||||
call(
|
||||
host="fake_host",
|
||||
name="HomeAssistant",
|
||||
port=8001,
|
||||
timeout=31,
|
||||
token=None,
|
||||
)
|
||||
call(**AUTODETECT_WEBSOCKET_PLAIN),
|
||||
call(**AUTODETECT_WEBSOCKET_SSL),
|
||||
]
|
||||
|
||||
|
||||
@@ -524,18 +566,6 @@ async def test_autodetect_none(hass, remote, remotews):
|
||||
]
|
||||
assert remotews.call_count == 2
|
||||
assert remotews.call_args_list == [
|
||||
call(
|
||||
host="fake_host",
|
||||
name="HomeAssistant",
|
||||
port=8001,
|
||||
timeout=31,
|
||||
token=None,
|
||||
),
|
||||
call(
|
||||
host="fake_host",
|
||||
name="HomeAssistant",
|
||||
port=8002,
|
||||
timeout=31,
|
||||
token=None,
|
||||
),
|
||||
call(**AUTODETECT_WEBSOCKET_PLAIN),
|
||||
call(**AUTODETECT_WEBSOCKET_SSL),
|
||||
]
|
||||
|
@@ -34,8 +34,11 @@ from homeassistant.const import (
|
||||
ATTR_FRIENDLY_NAME,
|
||||
ATTR_SUPPORTED_FEATURES,
|
||||
CONF_HOST,
|
||||
CONF_IP_ADDRESS,
|
||||
CONF_METHOD,
|
||||
CONF_NAME,
|
||||
CONF_PORT,
|
||||
CONF_TOKEN,
|
||||
SERVICE_MEDIA_NEXT_TRACK,
|
||||
SERVICE_MEDIA_PAUSE,
|
||||
SERVICE_MEDIA_PLAY,
|
||||
@@ -51,7 +54,7 @@ from homeassistant.const import (
|
||||
from homeassistant.setup import async_setup_component
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from tests.common import async_fire_time_changed
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||
|
||||
ENTITY_ID = f"{DOMAIN}.fake"
|
||||
MOCK_CONFIG = {
|
||||
@@ -64,17 +67,40 @@ MOCK_CONFIG = {
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
MOCK_CONFIGWS = {
|
||||
SAMSUNGTV_DOMAIN: [
|
||||
{
|
||||
CONF_HOST: "fake",
|
||||
CONF_NAME: "fake",
|
||||
CONF_PORT: 8001,
|
||||
CONF_TOKEN: "123456789",
|
||||
CONF_ON_ACTION: [{"delay": "00:00:01"}],
|
||||
}
|
||||
]
|
||||
}
|
||||
MOCK_CALLS_WS = {
|
||||
"host": "fake",
|
||||
"port": 8001,
|
||||
"token": None,
|
||||
"timeout": 31,
|
||||
"name": "HomeAssistant",
|
||||
}
|
||||
|
||||
MOCK_ENTRY_WS = {
|
||||
CONF_IP_ADDRESS: "test",
|
||||
CONF_HOST: "fake",
|
||||
CONF_METHOD: "websocket",
|
||||
CONF_NAME: "fake",
|
||||
CONF_PORT: 8001,
|
||||
CONF_TOKEN: "abcde",
|
||||
}
|
||||
MOCK_CALLS_ENTRY_WS = {
|
||||
"host": "fake",
|
||||
"name": "HomeAssistant",
|
||||
"port": 8001,
|
||||
"timeout": 1,
|
||||
"token": "abcde",
|
||||
}
|
||||
|
||||
ENTITY_ID_NOTURNON = f"{DOMAIN}.fake_noturnon"
|
||||
MOCK_CONFIG_NOTURNON = {
|
||||
@@ -155,6 +181,52 @@ async def test_setup_without_turnon(hass, remote):
|
||||
assert hass.states.get(ENTITY_ID_NOTURNON)
|
||||
|
||||
|
||||
async def test_setup_websocket(hass, remotews, mock_now):
|
||||
"""Test setup of platform."""
|
||||
with patch("homeassistant.components.samsungtv.bridge.SamsungTVWS") as remote_class:
|
||||
enter = mock.Mock()
|
||||
type(enter).token = mock.PropertyMock(return_value="987654321")
|
||||
remote = mock.Mock()
|
||||
remote.__enter__ = mock.Mock(return_value=enter)
|
||||
remote.__exit__ = mock.Mock()
|
||||
remote_class.return_value = remote
|
||||
|
||||
await setup_samsungtv(hass, MOCK_CONFIGWS)
|
||||
|
||||
assert remote_class.call_count == 1
|
||||
assert remote_class.call_args_list == [call(**MOCK_CALLS_WS)]
|
||||
assert hass.states.get(ENTITY_ID)
|
||||
|
||||
|
||||
async def test_setup_websocket_2(hass, mock_now):
|
||||
"""Test setup of platform from config entry."""
|
||||
entity_id = f"{DOMAIN}.fake"
|
||||
|
||||
entry = MockConfigEntry(
|
||||
domain=SAMSUNGTV_DOMAIN, data=MOCK_ENTRY_WS, unique_id=entity_id,
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
config_entries = hass.config_entries.async_entries(SAMSUNGTV_DOMAIN)
|
||||
assert len(config_entries) == 1
|
||||
assert entry is config_entries[0]
|
||||
|
||||
assert await async_setup_component(hass, SAMSUNGTV_DOMAIN, {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
next_update = mock_now + timedelta(minutes=5)
|
||||
with patch(
|
||||
"homeassistant.components.samsungtv.bridge.SamsungTVWS"
|
||||
) as remote, patch("homeassistant.util.dt.utcnow", return_value=next_update):
|
||||
async_fire_time_changed(hass, next_update)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
assert remote.call_count == 1
|
||||
assert remote.call_args_list == [call(**MOCK_CALLS_ENTRY_WS)]
|
||||
|
||||
|
||||
async def test_update_on(hass, remote, mock_now):
|
||||
"""Testing update tv on."""
|
||||
await setup_samsungtv(hass, MOCK_CONFIG)
|
||||
|
@@ -157,7 +157,7 @@ async def test_dedup_logs(hass, hass_client):
|
||||
log_msg()
|
||||
log = await get_error_log(hass, hass_client, 3)
|
||||
assert_log(log[0], "", ["error message 2", "error message 2-2"], "ERROR")
|
||||
assert log[0]["timestamp"] > log[0]["first_occured"]
|
||||
assert log[0]["timestamp"] > log[0]["first_occurred"]
|
||||
|
||||
log_msg("2-3")
|
||||
log_msg("2-4")
|
||||
|
@@ -108,7 +108,7 @@ async def test_no_clients(hass):
|
||||
"""Test the update_clients function when no clients are found."""
|
||||
await setup_unifi_integration(hass)
|
||||
|
||||
assert len(hass.states.async_all()) == 1
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 0
|
||||
|
||||
|
||||
async def test_tracked_devices(hass):
|
||||
@@ -123,7 +123,7 @@ async def test_tracked_devices(hass):
|
||||
devices_response=[DEVICE_1, DEVICE_2],
|
||||
known_wireless_clients=(CLIENT_4["mac"],),
|
||||
)
|
||||
assert len(hass.states.async_all()) == 7
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 6
|
||||
|
||||
client_1 = hass.states.get("device_tracker.client_1")
|
||||
assert client_1 is not None
|
||||
@@ -184,7 +184,7 @@ async def test_controller_state_change(hass):
|
||||
controller = await setup_unifi_integration(
|
||||
hass, clients_response=[CLIENT_1], devices_response=[DEVICE_1],
|
||||
)
|
||||
assert len(hass.states.async_all()) == 3
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 2
|
||||
|
||||
# Controller unavailable
|
||||
controller.async_unifi_signalling_callback(
|
||||
@@ -214,7 +214,7 @@ async def test_option_track_clients(hass):
|
||||
controller = await setup_unifi_integration(
|
||||
hass, clients_response=[CLIENT_1, CLIENT_2], devices_response=[DEVICE_1],
|
||||
)
|
||||
assert len(hass.states.async_all()) == 4
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 3
|
||||
|
||||
client_1 = hass.states.get("device_tracker.client_1")
|
||||
assert client_1 is not None
|
||||
@@ -259,7 +259,7 @@ async def test_option_track_wired_clients(hass):
|
||||
controller = await setup_unifi_integration(
|
||||
hass, clients_response=[CLIENT_1, CLIENT_2], devices_response=[DEVICE_1],
|
||||
)
|
||||
assert len(hass.states.async_all()) == 4
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 3
|
||||
|
||||
client_1 = hass.states.get("device_tracker.client_1")
|
||||
assert client_1 is not None
|
||||
@@ -304,7 +304,7 @@ async def test_option_track_devices(hass):
|
||||
controller = await setup_unifi_integration(
|
||||
hass, clients_response=[CLIENT_1, CLIENT_2], devices_response=[DEVICE_1],
|
||||
)
|
||||
assert len(hass.states.async_all()) == 4
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 3
|
||||
|
||||
client_1 = hass.states.get("device_tracker.client_1")
|
||||
assert client_1 is not None
|
||||
@@ -349,7 +349,7 @@ async def test_option_ssid_filter(hass):
|
||||
controller = await setup_unifi_integration(
|
||||
hass, options={CONF_SSID_FILTER: ["ssid"]}, clients_response=[CLIENT_3],
|
||||
)
|
||||
assert len(hass.states.async_all()) == 2
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 1
|
||||
|
||||
# SSID filter active
|
||||
client_3 = hass.states.get("device_tracker.client_3")
|
||||
@@ -387,7 +387,7 @@ async def test_wireless_client_go_wired_issue(hass):
|
||||
client_1_client["last_seen"] = dt_util.as_timestamp(dt_util.utcnow())
|
||||
|
||||
controller = await setup_unifi_integration(hass, clients_response=[client_1_client])
|
||||
assert len(hass.states.async_all()) == 2
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 1
|
||||
|
||||
client_1 = hass.states.get("device_tracker.client_1")
|
||||
assert client_1 is not None
|
||||
@@ -460,7 +460,7 @@ async def test_restoring_client(hass):
|
||||
clients_response=[CLIENT_2],
|
||||
clients_all_response=[CLIENT_1],
|
||||
)
|
||||
assert len(hass.states.async_all()) == 3
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 2
|
||||
|
||||
device_1 = hass.states.get("device_tracker.client_1")
|
||||
assert device_1 is not None
|
||||
@@ -474,7 +474,7 @@ async def test_dont_track_clients(hass):
|
||||
clients_response=[CLIENT_1],
|
||||
devices_response=[DEVICE_1],
|
||||
)
|
||||
assert len(hass.states.async_all()) == 2
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 1
|
||||
|
||||
client_1 = hass.states.get("device_tracker.client_1")
|
||||
assert client_1 is None
|
||||
@@ -492,7 +492,7 @@ async def test_dont_track_devices(hass):
|
||||
clients_response=[CLIENT_1],
|
||||
devices_response=[DEVICE_1],
|
||||
)
|
||||
assert len(hass.states.async_all()) == 2
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 1
|
||||
|
||||
client_1 = hass.states.get("device_tracker.client_1")
|
||||
assert client_1 is not None
|
||||
@@ -509,7 +509,7 @@ async def test_dont_track_wired_clients(hass):
|
||||
options={unifi.controller.CONF_TRACK_WIRED_CLIENTS: False},
|
||||
clients_response=[CLIENT_1, CLIENT_2],
|
||||
)
|
||||
assert len(hass.states.async_all()) == 2
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 1
|
||||
|
||||
client_1 = hass.states.get("device_tracker.client_1")
|
||||
assert client_1 is not None
|
||||
|
@@ -55,7 +55,7 @@ async def test_no_clients(hass):
|
||||
)
|
||||
|
||||
assert len(controller.mock_requests) == 4
|
||||
assert len(hass.states.async_all()) == 1
|
||||
assert len(hass.states.async_entity_ids("sensor")) == 0
|
||||
|
||||
|
||||
async def test_sensors(hass):
|
||||
@@ -71,7 +71,7 @@ async def test_sensors(hass):
|
||||
)
|
||||
|
||||
assert len(controller.mock_requests) == 4
|
||||
assert len(hass.states.async_all()) == 5
|
||||
assert len(hass.states.async_entity_ids("sensor")) == 4
|
||||
|
||||
wired_client_rx = hass.states.get("sensor.wired_client_name_rx")
|
||||
assert wired_client_rx.state == "1234.0"
|
||||
|
@@ -209,7 +209,7 @@ async def test_no_clients(hass):
|
||||
)
|
||||
|
||||
assert len(controller.mock_requests) == 4
|
||||
assert len(hass.states.async_all()) == 1
|
||||
assert len(hass.states.async_entity_ids("switch")) == 0
|
||||
|
||||
|
||||
async def test_controller_not_client(hass):
|
||||
@@ -222,7 +222,7 @@ async def test_controller_not_client(hass):
|
||||
)
|
||||
|
||||
assert len(controller.mock_requests) == 4
|
||||
assert len(hass.states.async_all()) == 1
|
||||
assert len(hass.states.async_entity_ids("switch")) == 0
|
||||
cloudkey = hass.states.get("switch.cloud_key")
|
||||
assert cloudkey is None
|
||||
|
||||
@@ -240,7 +240,7 @@ async def test_not_admin(hass):
|
||||
)
|
||||
|
||||
assert len(controller.mock_requests) == 4
|
||||
assert len(hass.states.async_all()) == 1
|
||||
assert len(hass.states.async_entity_ids("switch")) == 0
|
||||
|
||||
|
||||
async def test_switches(hass):
|
||||
@@ -258,7 +258,7 @@ async def test_switches(hass):
|
||||
)
|
||||
|
||||
assert len(controller.mock_requests) == 4
|
||||
assert len(hass.states.async_all()) == 4
|
||||
assert len(hass.states.async_entity_ids("switch")) == 3
|
||||
|
||||
switch_1 = hass.states.get("switch.poe_client_1")
|
||||
assert switch_1 is not None
|
||||
@@ -312,7 +312,7 @@ async def test_new_client_discovered_on_block_control(hass):
|
||||
)
|
||||
|
||||
assert len(controller.mock_requests) == 4
|
||||
assert len(hass.states.async_all()) == 1
|
||||
assert len(hass.states.async_entity_ids("switch")) == 0
|
||||
|
||||
blocked = hass.states.get("switch.block_client_1")
|
||||
assert blocked is None
|
||||
@@ -324,7 +324,7 @@ async def test_new_client_discovered_on_block_control(hass):
|
||||
controller.api.session_handler("data")
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(hass.states.async_all()) == 2
|
||||
assert len(hass.states.async_entity_ids("switch")) == 1
|
||||
blocked = hass.states.get("switch.block_client_1")
|
||||
assert blocked is not None
|
||||
|
||||
@@ -336,7 +336,7 @@ async def test_option_block_clients(hass):
|
||||
options={CONF_BLOCK_CLIENT: [BLOCKED["mac"]]},
|
||||
clients_all_response=[BLOCKED, UNBLOCKED],
|
||||
)
|
||||
assert len(hass.states.async_all()) == 2
|
||||
assert len(hass.states.async_entity_ids("switch")) == 1
|
||||
|
||||
# Add a second switch
|
||||
hass.config_entries.async_update_entry(
|
||||
@@ -344,28 +344,28 @@ async def test_option_block_clients(hass):
|
||||
options={CONF_BLOCK_CLIENT: [BLOCKED["mac"], UNBLOCKED["mac"]]},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert len(hass.states.async_all()) == 3
|
||||
assert len(hass.states.async_entity_ids("switch")) == 2
|
||||
|
||||
# Remove the second switch again
|
||||
hass.config_entries.async_update_entry(
|
||||
controller.config_entry, options={CONF_BLOCK_CLIENT: [BLOCKED["mac"]]},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert len(hass.states.async_all()) == 2
|
||||
assert len(hass.states.async_entity_ids("switch")) == 1
|
||||
|
||||
# Enable one and remove another one
|
||||
hass.config_entries.async_update_entry(
|
||||
controller.config_entry, options={CONF_BLOCK_CLIENT: [UNBLOCKED["mac"]]},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert len(hass.states.async_all()) == 2
|
||||
assert len(hass.states.async_entity_ids("switch")) == 1
|
||||
|
||||
# Remove one
|
||||
hass.config_entries.async_update_entry(
|
||||
controller.config_entry, options={CONF_BLOCK_CLIENT: []},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert len(hass.states.async_all()) == 1
|
||||
assert len(hass.states.async_entity_ids("switch")) == 0
|
||||
|
||||
|
||||
async def test_new_client_discovered_on_poe_control(hass):
|
||||
@@ -378,7 +378,7 @@ async def test_new_client_discovered_on_poe_control(hass):
|
||||
)
|
||||
|
||||
assert len(controller.mock_requests) == 4
|
||||
assert len(hass.states.async_all()) == 2
|
||||
assert len(hass.states.async_entity_ids("switch")) == 1
|
||||
|
||||
controller.api.websocket._data = {
|
||||
"meta": {"message": "sta:sync"},
|
||||
@@ -391,7 +391,7 @@ async def test_new_client_discovered_on_poe_control(hass):
|
||||
"switch", "turn_off", {"entity_id": "switch.poe_client_1"}, blocking=True
|
||||
)
|
||||
assert len(controller.mock_requests) == 5
|
||||
assert len(hass.states.async_all()) == 3
|
||||
assert len(hass.states.async_entity_ids("switch")) == 2
|
||||
assert controller.mock_requests[4] == {
|
||||
"json": {
|
||||
"port_overrides": [{"port_idx": 1, "portconf_id": "1a1", "poe_mode": "off"}]
|
||||
@@ -430,7 +430,7 @@ async def test_ignore_multiple_poe_clients_on_same_port(hass):
|
||||
)
|
||||
|
||||
assert len(controller.mock_requests) == 4
|
||||
assert len(hass.states.async_all()) == 4
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 3
|
||||
|
||||
switch_1 = hass.states.get("switch.poe_client_1")
|
||||
switch_2 = hass.states.get("switch.poe_client_2")
|
||||
@@ -481,7 +481,7 @@ async def test_restoring_client(hass):
|
||||
)
|
||||
|
||||
assert len(controller.mock_requests) == 4
|
||||
assert len(hass.states.async_all()) == 3
|
||||
assert len(hass.states.async_entity_ids("switch")) == 2
|
||||
|
||||
device_1 = hass.states.get("switch.client_1")
|
||||
assert device_1 is not None
|
||||
|
@@ -487,3 +487,18 @@ async def test_import_config_entry(hass):
|
||||
assert state.attributes[zone.ATTR_RADIUS] == 3
|
||||
assert state.attributes[zone.ATTR_PASSIVE] is False
|
||||
assert state.attributes[ATTR_ICON] == "mdi:from-config-entry"
|
||||
|
||||
|
||||
async def test_zone_empty_setup(hass):
|
||||
"""Set up zone with empty config."""
|
||||
assert await setup.async_setup_component(hass, DOMAIN, {"zone": {}})
|
||||
|
||||
|
||||
async def test_unavailable_zone(hass):
|
||||
"""Test active zone with unavailable zones."""
|
||||
assert await setup.async_setup_component(hass, DOMAIN, {"zone": {}})
|
||||
hass.states.async_set("zone.bla", "unavailable", {"restored": True})
|
||||
|
||||
assert zone.async_active_zone(hass, 0.0, 0.01) is None
|
||||
|
||||
assert zone.in_zone(hass.states.get("zone.bla"), 0, 0) is False
|
||||
|
@@ -1743,3 +1743,15 @@ async def test_if_running_parallel(hass):
|
||||
assert len(events) == 4
|
||||
assert events[2].data["value"] == 2
|
||||
assert events[3].data["value"] == 2
|
||||
|
||||
|
||||
async def test_script_logging(caplog):
|
||||
"""Test script logging."""
|
||||
script_obj = script.Script(None, [], "Script with % Name")
|
||||
script_obj._log("Test message with name %s", 1)
|
||||
|
||||
assert "Script with % Name: Test message with name 1" in caplog.text
|
||||
|
||||
script_obj = script.Script(None, [])
|
||||
script_obj._log("Test message without name %s", 2)
|
||||
assert "Test message without name 2" in caplog.text
|
||||
|
@@ -10,6 +10,7 @@ from unittest.mock import Mock
|
||||
import asynctest
|
||||
from asynctest import CoroutineMock, patch
|
||||
import pytest
|
||||
import voluptuous as vol
|
||||
from voluptuous import Invalid, MultipleInvalid
|
||||
import yaml
|
||||
|
||||
@@ -721,7 +722,7 @@ async def test_merge_id_schema(hass):
|
||||
for domain, expected_type in types.items():
|
||||
integration = await async_get_integration(hass, domain)
|
||||
module = integration.get_component()
|
||||
typ, _ = config_util._identify_config_schema(module)
|
||||
typ = config_util._identify_config_schema(module)
|
||||
assert typ == expected_type, f"{domain} expected {expected_type}, got {typ}"
|
||||
|
||||
|
||||
@@ -989,3 +990,35 @@ async def test_component_config_exceptions(hass, caplog):
|
||||
"Unknown error validating config for test_platform platform for test_domain component with PLATFORM_SCHEMA"
|
||||
in caplog.text
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"domain, schema, expected",
|
||||
[
|
||||
("zone", vol.Schema({vol.Optional("zone", default=list): [int]}), "list"),
|
||||
("zone", vol.Schema({vol.Optional("zone", default=[]): [int]}), "list"),
|
||||
(
|
||||
"zone",
|
||||
vol.Schema({vol.Optional("zone", default={}): {vol.Optional("hello"): 1}}),
|
||||
"dict",
|
||||
),
|
||||
(
|
||||
"zone",
|
||||
vol.Schema(
|
||||
{vol.Optional("zone", default=dict): {vol.Optional("hello"): 1}}
|
||||
),
|
||||
"dict",
|
||||
),
|
||||
("zone", vol.Schema({vol.Optional("zone"): int}), None),
|
||||
("zone", vol.Schema({"zone": int}), None),
|
||||
("not_existing", vol.Schema({vol.Optional("zone", default=dict): dict}), None,),
|
||||
("non_existing", vol.Schema({"zone": int}), None),
|
||||
("zone", vol.Schema({}), None),
|
||||
],
|
||||
)
|
||||
def test_identify_config_schema(domain, schema, expected):
|
||||
"""Test identify config schema."""
|
||||
assert (
|
||||
config_util._identify_config_schema(Mock(DOMAIN=domain, CONFIG_SCHEMA=schema))
|
||||
== expected
|
||||
)
|
||||
|
Reference in New Issue
Block a user