mirror of
https://github.com/home-assistant/core.git
synced 2025-09-22 11:29:33 +00:00
Compare commits
55 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
07ce284acd | ||
![]() |
eac03ef4be | ||
![]() |
a38e075bda | ||
![]() |
cced74740f | ||
![]() |
1236c2b91e | ||
![]() |
d54d7c6958 | ||
![]() |
9365ba3fcf | ||
![]() |
baf7fb7264 | ||
![]() |
edfe8e1583 | ||
![]() |
b36b1dbc70 | ||
![]() |
253c848692 | ||
![]() |
7a6ac578b4 | ||
![]() |
95de94e53f | ||
![]() |
181b2803cd | ||
![]() |
e0f2fa33df | ||
![]() |
884c346bdf | ||
![]() |
c218ff5a75 | ||
![]() |
fa43a218d2 | ||
![]() |
f4cc64d289 | ||
![]() |
4c31829832 | ||
![]() |
ff32c1c3e9 | ||
![]() |
677c276b41 | ||
![]() |
ca1c696f54 | ||
![]() |
78e5878247 | ||
![]() |
470537bc5f | ||
![]() |
64556f6f69 | ||
![]() |
2785b067e3 | ||
![]() |
a129bc05ae | ||
![]() |
197736f66b | ||
![]() |
d82d7fa2e9 | ||
![]() |
663db747e9 | ||
![]() |
57998f6f0f | ||
![]() |
edbb995fff | ||
![]() |
312903025d | ||
![]() |
0ae5c325fe | ||
![]() |
a309a00929 | ||
![]() |
55be5bf880 | ||
![]() |
7b37dcd8ed | ||
![]() |
e36bdd717a | ||
![]() |
fa650b648c | ||
![]() |
ac2310e7f9 | ||
![]() |
aee5c16803 | ||
![]() |
5f0816ea25 | ||
![]() |
6a6037790f | ||
![]() |
d2b0c35319 | ||
![]() |
d707a1b072 | ||
![]() |
ca12db9271 | ||
![]() |
346a4b399d | ||
![]() |
2090252936 | ||
![]() |
a28091e94a | ||
![]() |
ae8cb0ccdf | ||
![]() |
06a608e342 | ||
![]() |
9af95e8577 | ||
![]() |
29a9781bf7 | ||
![]() |
877eddf43d |
@@ -2,9 +2,15 @@
|
||||
.git
|
||||
.github
|
||||
config
|
||||
docs
|
||||
|
||||
# Development
|
||||
.devcontainer
|
||||
.vscode
|
||||
|
||||
# Test related files
|
||||
.tox
|
||||
tests
|
||||
|
||||
# Other virtualization methods
|
||||
venv
|
||||
|
17
Dockerfile
Normal file
17
Dockerfile
Normal file
@@ -0,0 +1,17 @@
|
||||
ARG BUILD_FROM
|
||||
FROM ${BUILD_FROM}
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
## Setup Home Assistant
|
||||
COPY . homeassistant/
|
||||
RUN pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links "${WHEELS_LINKS}" \
|
||||
-r homeassistant/requirements_all.txt -c homeassistant/homeassistant/package_constraints.txt \
|
||||
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links "${WHEELS_LINKS}" \
|
||||
-e ./homeassistant \
|
||||
&& python3 -m compileall homeassistant/homeassistant
|
||||
|
||||
# Home Assistant S6-Overlay
|
||||
COPY rootfs /
|
||||
|
||||
WORKDIR /config
|
@@ -14,7 +14,7 @@ schedules:
|
||||
always: true
|
||||
variables:
|
||||
- name: versionBuilder
|
||||
value: '6.9'
|
||||
value: '7.2.0'
|
||||
- group: docker
|
||||
- group: github
|
||||
- group: twine
|
||||
@@ -108,11 +108,9 @@ stages:
|
||||
docker run --rm --privileged \
|
||||
-v ~/.docker:/root/.docker:rw \
|
||||
-v /run/docker.sock:/run/docker.sock:rw \
|
||||
-v $(pwd):/homeassistant:ro \
|
||||
-v $(pwd):/data:ro \
|
||||
homeassistant/amd64-builder:$(versionBuilder) \
|
||||
--homeassistant $(homeassistantRelease) "--$(buildArch)" \
|
||||
-r https://github.com/home-assistant/hassio-homeassistant \
|
||||
-t generic --docker-hub homeassistant
|
||||
--generic $(homeassistantRelease) "--$(buildArch)" -t /data \
|
||||
|
||||
docker run --rm --privileged \
|
||||
-v ~/.docker:/root/.docker \
|
||||
|
@@ -5,6 +5,7 @@ trigger:
|
||||
branches:
|
||||
include:
|
||||
- dev
|
||||
- rc
|
||||
paths:
|
||||
include:
|
||||
- requirements_all.txt
|
||||
@@ -18,7 +19,7 @@ schedules:
|
||||
always: true
|
||||
variables:
|
||||
- name: versionWheels
|
||||
value: '1.4-3.7-alpine3.10'
|
||||
value: '1.10.1-3.7-alpine3.11'
|
||||
resources:
|
||||
repositories:
|
||||
- repository: azure
|
||||
@@ -32,8 +33,10 @@ jobs:
|
||||
builderVersion: '$(versionWheels)'
|
||||
builderApk: 'build-base;cmake;git;linux-headers;bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;autoconf;automake;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev'
|
||||
builderPip: 'Cython;numpy'
|
||||
skipBinary: 'aiohttp'
|
||||
wheelsRequirement: 'requirements_wheels.txt'
|
||||
wheelsRequirementDiff: 'requirements_diff.txt'
|
||||
wheelsConstraint: 'homeassistant/package_constraints.txt'
|
||||
preBuild:
|
||||
- script: |
|
||||
cp requirements_all.txt requirements_wheels.txt
|
||||
@@ -69,9 +72,5 @@ jobs:
|
||||
sed -i "s|# py_noaa|py_noaa|g" ${requirement_file}
|
||||
sed -i "s|# bme680|bme680|g" ${requirement_file}
|
||||
sed -i "s|# python-gammu|python-gammu|g" ${requirement_file}
|
||||
|
||||
if [[ "$(buildArch)" =~ arm ]]; then
|
||||
sed -i "s|# VL53L1X|VL53L1X|g" ${requirement_file}
|
||||
fi
|
||||
done
|
||||
displayName: 'Prepare requirements files for Hass.io'
|
||||
|
14
build.json
Normal file
14
build.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"image": "homeassistant/{arch}-homeassistant",
|
||||
"build_from": {
|
||||
"aarch64": "homeassistant/aarch64-homeassistant-base:7.0.1",
|
||||
"armhf": "homeassistant/armhf-homeassistant-base:7.0.1",
|
||||
"armv7": "homeassistant/armv7-homeassistant-base:7.0.1",
|
||||
"amd64": "homeassistant/amd64-homeassistant-base:7.0.1",
|
||||
"i386": "homeassistant/i386-homeassistant-base:7.0.1"
|
||||
},
|
||||
"labels": {
|
||||
"io.hass.type": "core"
|
||||
},
|
||||
"version_tag": true
|
||||
}
|
@@ -73,8 +73,8 @@ async def async_setup(hass, config):
|
||||
conf.get("ssh_key", conf.get("pub_key", "")),
|
||||
conf[CONF_MODE],
|
||||
conf[CONF_REQUIRE_IP],
|
||||
conf[CONF_INTERFACE],
|
||||
conf[CONF_DNSMASQ],
|
||||
interface=conf[CONF_INTERFACE],
|
||||
dnsmasq=conf[CONF_DNSMASQ],
|
||||
)
|
||||
|
||||
await api.connection.async_connect()
|
||||
|
@@ -2,7 +2,7 @@
|
||||
"domain": "asuswrt",
|
||||
"name": "ASUSWRT",
|
||||
"documentation": "https://www.home-assistant.io/integrations/asuswrt",
|
||||
"requirements": ["aioasuswrt==1.2.2"],
|
||||
"requirements": ["aioasuswrt==1.2.3"],
|
||||
"dependencies": [],
|
||||
"codeowners": ["@kennedyshead"]
|
||||
}
|
||||
|
@@ -21,6 +21,10 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from .axis_base import AxisEntityBase
|
||||
from .const import DOMAIN as AXIS_DOMAIN
|
||||
|
||||
AXIS_IMAGE = "http://{host}:{port}/axis-cgi/jpg/image.cgi"
|
||||
AXIS_VIDEO = "http://{host}:{port}/axis-cgi/mjpg/video.cgi"
|
||||
AXIS_STREAM = "rtsp://{user}:{password}@{host}/axis-media/media.amp?videocodec=h264"
|
||||
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Axis camera video stream."""
|
||||
@@ -32,13 +36,11 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
CONF_NAME: config_entry.data[CONF_NAME],
|
||||
CONF_USERNAME: config_entry.data[CONF_USERNAME],
|
||||
CONF_PASSWORD: config_entry.data[CONF_PASSWORD],
|
||||
CONF_MJPEG_URL: (
|
||||
f"http://{config_entry.data[CONF_HOST]}"
|
||||
f":{config_entry.data[CONF_PORT]}/axis-cgi/mjpg/video.cgi"
|
||||
CONF_MJPEG_URL: AXIS_VIDEO.format(
|
||||
host=config_entry.data[CONF_HOST], port=config_entry.data[CONF_PORT],
|
||||
),
|
||||
CONF_STILL_IMAGE_URL: (
|
||||
f"http://{config_entry.data[CONF_HOST]}"
|
||||
f":{config_entry.data[CONF_PORT]}/axis-cgi/jpg/image.cgi"
|
||||
CONF_STILL_IMAGE_URL: AXIS_IMAGE.format(
|
||||
host=config_entry.data[CONF_HOST], port=config_entry.data[CONF_PORT],
|
||||
),
|
||||
CONF_AUTHENTICATION: HTTP_DIGEST_AUTHENTICATION,
|
||||
}
|
||||
@@ -70,19 +72,17 @@ class AxisCamera(AxisEntityBase, MjpegCamera):
|
||||
|
||||
async def stream_source(self):
|
||||
"""Return the stream source."""
|
||||
return (
|
||||
f"rtsp://{self.device.config_entry.data[CONF_USERNAME]}´"
|
||||
f":{self.device.config_entry.data[CONF_PASSWORD]}"
|
||||
f"@{self.device.host}/axis-media/media.amp?videocodec=h264"
|
||||
return AXIS_STREAM.format(
|
||||
user=self.device.config_entry.data[CONF_USERNAME],
|
||||
password=self.device.config_entry.data[CONF_PASSWORD],
|
||||
host=self.device.host,
|
||||
)
|
||||
|
||||
def _new_address(self):
|
||||
"""Set new device address for video stream."""
|
||||
port = self.device.config_entry.data[CONF_PORT]
|
||||
self._mjpeg_url = (f"http://{self.device.host}:{port}/axis-cgi/mjpg/video.cgi",)
|
||||
self._still_image_url = (
|
||||
f"http://{self.device.host}:{port}/axis-cgi/jpg/image.cgi"
|
||||
)
|
||||
self._mjpeg_url = AXIS_VIDEO.format(host=self.device.host, port=port)
|
||||
self._still_image_url = AXIS_IMAGE.format(host=self.device.host, port=port)
|
||||
|
||||
@property
|
||||
def unique_id(self):
|
||||
|
@@ -141,7 +141,7 @@ async def async_request_stream(hass, entity_id, fmt):
|
||||
source,
|
||||
fmt=fmt,
|
||||
keepalive=camera_prefs.preload_stream,
|
||||
options=camera.options,
|
||||
options=camera.stream_options,
|
||||
)
|
||||
|
||||
|
||||
|
@@ -3,7 +3,7 @@
|
||||
"name": "HomeKit Controller",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
|
||||
"requirements": ["aiohomekit[IP]==0.2.29.1"],
|
||||
"requirements": ["aiohomekit[IP]==0.2.29.2"],
|
||||
"dependencies": [],
|
||||
"zeroconf": ["_hap._tcp.local."],
|
||||
"codeowners": ["@Jc2k"]
|
||||
|
@@ -5,7 +5,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/huawei_lte",
|
||||
"requirements": [
|
||||
"getmac==0.8.1",
|
||||
"huawei-lte-api==1.4.10",
|
||||
"huawei-lte-api==1.4.11",
|
||||
"stringcase==1.2.0",
|
||||
"url-normalize==1.4.1"
|
||||
],
|
||||
|
@@ -104,7 +104,7 @@ async def async_get_api(hass):
|
||||
|
||||
async def async_get_location(hass, api, latitude, longitude):
|
||||
"""Retrieve pyipma location, location name to be used as the entity name."""
|
||||
with async_timeout.timeout(10):
|
||||
with async_timeout.timeout(30):
|
||||
location = await Location.get(api, float(latitude), float(longitude))
|
||||
|
||||
_LOGGER.debug(
|
||||
|
@@ -28,6 +28,7 @@ from . import api, config_flow
|
||||
from .const import (
|
||||
AUTH,
|
||||
CONF_CLOUDHOOK_URL,
|
||||
DATA_DEVICE_IDS,
|
||||
DATA_PERSONS,
|
||||
DOMAIN,
|
||||
OAUTH2_AUTHORIZE,
|
||||
@@ -65,6 +66,7 @@ async def async_setup(hass: HomeAssistant, config: dict):
|
||||
"""Set up the Netatmo component."""
|
||||
hass.data[DOMAIN] = {}
|
||||
hass.data[DOMAIN][DATA_PERSONS] = {}
|
||||
hass.data[DOMAIN][DATA_DEVICE_IDS] = {}
|
||||
|
||||
if DOMAIN not in config:
|
||||
return True
|
||||
@@ -104,7 +106,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
webhook_unregister(hass, entry.data[CONF_WEBHOOK_ID])
|
||||
|
||||
async def register_webhook(event):
|
||||
# Wait for the could integration to be ready
|
||||
# Wait for the cloud integration to be ready
|
||||
await asyncio.sleep(WAIT_FOR_CLOUD)
|
||||
|
||||
if CONF_WEBHOOK_ID not in entry.data:
|
||||
@@ -112,6 +114,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
hass.config_entries.async_update_entry(entry, data=data)
|
||||
|
||||
if hass.components.cloud.async_active_subscription():
|
||||
# Wait for cloud connection to be established
|
||||
await asyncio.sleep(WAIT_FOR_CLOUD)
|
||||
|
||||
if CONF_CLOUDHOOK_URL not in entry.data:
|
||||
webhook_url = await hass.components.cloud.async_create_cloudhook(
|
||||
entry.data[CONF_WEBHOOK_ID]
|
||||
@@ -144,6 +149,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
"""Unload a config entry."""
|
||||
if CONF_WEBHOOK_ID in entry.data:
|
||||
await hass.async_add_executor_job(
|
||||
hass.data[DOMAIN][entry.entry_id][AUTH].dropwebhook
|
||||
)
|
||||
|
||||
unload_ok = all(
|
||||
await asyncio.gather(
|
||||
*[
|
||||
@@ -152,14 +162,10 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
if unload_ok:
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
||||
if CONF_WEBHOOK_ID in entry.data:
|
||||
await hass.async_add_executor_job(
|
||||
hass.data[DOMAIN][entry.entry_id][AUTH].dropwebhook()
|
||||
)
|
||||
|
||||
return unload_ok
|
||||
|
||||
|
||||
|
@@ -84,21 +84,11 @@ class NetatmoCamera(Camera):
|
||||
self._unique_id = f"{self._camera_id}-{self._camera_type}"
|
||||
self._verify_ssl = verify_ssl
|
||||
self._quality = quality
|
||||
|
||||
# URLs
|
||||
self._vpnurl = None
|
||||
self._localurl = None
|
||||
|
||||
# Monitoring status
|
||||
self._status = None
|
||||
|
||||
# SD Card status
|
||||
self._sd_status = None
|
||||
|
||||
# Power status
|
||||
self._alim_status = None
|
||||
|
||||
# Is local
|
||||
self._is_local = None
|
||||
|
||||
def camera_image(self):
|
||||
@@ -219,8 +209,6 @@ class NetatmoCamera(Camera):
|
||||
|
||||
def update(self):
|
||||
"""Update entity status."""
|
||||
|
||||
# Refresh camera data
|
||||
self._data.update()
|
||||
|
||||
camera = self._data.camera_data.get_camera(cid=self._camera_id)
|
||||
|
@@ -441,6 +441,11 @@ class ThermostatData:
|
||||
except TypeError:
|
||||
_LOGGER.error("ThermostatData::setup() got error")
|
||||
return False
|
||||
except pyatmo.exceptions.NoDevice:
|
||||
_LOGGER.debug(
|
||||
"No climate devices for %s (%s)", self.home_name, self.home_id
|
||||
)
|
||||
return False
|
||||
return True
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
|
@@ -33,6 +33,7 @@ class NetatmoFlowHandler(
|
||||
"read_station",
|
||||
"read_thermostat",
|
||||
"write_camera",
|
||||
"write_presence",
|
||||
"write_thermostat",
|
||||
]
|
||||
|
||||
|
@@ -14,12 +14,12 @@ MODELS = {
|
||||
"NOC": "Smart Outdoor Camera",
|
||||
"NSD": "Smart Smoke Alarm",
|
||||
"NACamDoorTag": "Smart Door and Window Sensors",
|
||||
"NHC": "Smart Indoor Air Quality Monitor",
|
||||
"NAMain": "Smart Home Weather station – indoor module",
|
||||
"NAModule1": "Smart Home Weather station – outdoor module",
|
||||
"NAModule4": "Smart Additional Indoor module",
|
||||
"NAModule3": "Smart Rain Gauge",
|
||||
"NAModule2": "Smart Anemometer",
|
||||
"NHC": "Home Coach",
|
||||
}
|
||||
|
||||
AUTH = "netatmo_auth"
|
||||
@@ -32,6 +32,7 @@ CONF_CLOUDHOOK_URL = "cloudhook_url"
|
||||
OAUTH2_AUTHORIZE = "https://api.netatmo.com/oauth2/authorize"
|
||||
OAUTH2_TOKEN = "https://api.netatmo.com/oauth2/token"
|
||||
|
||||
DATA_DEVICE_IDS = "netatmo_device_ids"
|
||||
DATA_PERSONS = "netatmo_persons"
|
||||
|
||||
NETATMO_WEBHOOK_URL = None
|
||||
|
@@ -505,4 +505,6 @@ class ONVIFHassCamera(Camera):
|
||||
@property
|
||||
def unique_id(self) -> Optional[str]:
|
||||
"""Return a unique ID."""
|
||||
if self._profile_index:
|
||||
return f"{self._mac}_{self._profile_index}"
|
||||
return self._mac
|
||||
|
@@ -77,7 +77,11 @@ PERSON_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{vol.Optional(DOMAIN): vol.All(cv.ensure_list, cv.remove_falsy, [PERSON_SCHEMA])},
|
||||
{
|
||||
vol.Optional(DOMAIN, default=[]): vol.All(
|
||||
cv.ensure_list, cv.remove_falsy, [PERSON_SCHEMA]
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
@@ -75,8 +75,9 @@ async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry):
|
||||
|
||||
def token_saver(token):
|
||||
_LOGGER.debug("Saving updated token")
|
||||
entry.data[CONF_TOKEN] = token
|
||||
hass.config_entries.async_update_entry(entry, data={**entry.data})
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, data={**entry.data, CONF_TOKEN: token}
|
||||
)
|
||||
|
||||
# Force token update.
|
||||
entry.data[CONF_TOKEN]["expires_in"] = -1
|
||||
@@ -105,12 +106,18 @@ async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry):
|
||||
async def async_setup_webhook(hass: HomeAssistantType, entry: ConfigEntry, session):
|
||||
"""Set up a webhook to handle binary sensor events."""
|
||||
if CONF_WEBHOOK_ID not in entry.data:
|
||||
entry.data[CONF_WEBHOOK_ID] = hass.components.webhook.async_generate_id()
|
||||
entry.data[CONF_WEBHOOK_URL] = hass.components.webhook.async_generate_url(
|
||||
entry.data[CONF_WEBHOOK_ID]
|
||||
webhook_id = hass.components.webhook.async_generate_id()
|
||||
webhook_url = hass.components.webhook.async_generate_url(webhook_id)
|
||||
_LOGGER.info("Registering new webhook at: %s", webhook_url)
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data={
|
||||
**entry.data,
|
||||
CONF_WEBHOOK_ID: webhook_id,
|
||||
CONF_WEBHOOK_URL: webhook_url,
|
||||
},
|
||||
)
|
||||
_LOGGER.info("Registering new webhook at: %s", entry.data[CONF_WEBHOOK_URL])
|
||||
hass.config_entries.async_update_entry(entry, data={**entry.data})
|
||||
await hass.async_add_executor_job(
|
||||
session.update_webhook,
|
||||
entry.data[CONF_WEBHOOK_URL],
|
||||
|
@@ -25,6 +25,7 @@ from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.service import verify_domain_control
|
||||
|
||||
from .const import (
|
||||
CONF_ZONE_RUN_TIME,
|
||||
DATA_CLIENT,
|
||||
DATA_PROGRAMS,
|
||||
DATA_PROVISION_SETTINGS,
|
||||
@@ -33,6 +34,8 @@ from .const import (
|
||||
DATA_ZONES,
|
||||
DATA_ZONES_DETAILS,
|
||||
DEFAULT_PORT,
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DEFAULT_ZONE_RUN,
|
||||
DOMAIN,
|
||||
PROGRAM_UPDATE_TOPIC,
|
||||
SENSOR_UPDATE_TOPIC,
|
||||
@@ -41,19 +44,14 @@ from .const import (
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DATA_LISTENER = "listener"
|
||||
|
||||
CONF_CONTROLLERS = "controllers"
|
||||
CONF_PROGRAM_ID = "program_id"
|
||||
CONF_SECONDS = "seconds"
|
||||
CONF_ZONE_ID = "zone_id"
|
||||
CONF_ZONE_RUN_TIME = "zone_run_time"
|
||||
|
||||
DEFAULT_ATTRIBUTION = "Data provided by Green Electronics LLC"
|
||||
DEFAULT_ICON = "mdi:water"
|
||||
DEFAULT_SCAN_INTERVAL = timedelta(seconds=60)
|
||||
DEFAULT_SSL = True
|
||||
DEFAULT_ZONE_RUN = 60 * 10
|
||||
|
||||
SERVICE_ALTER_PROGRAM = vol.Schema({vol.Required(CONF_PROGRAM_ID): cv.positive_int})
|
||||
|
||||
@@ -109,7 +107,6 @@ async def async_setup(hass, config):
|
||||
"""Set up the RainMachine component."""
|
||||
hass.data[DOMAIN] = {}
|
||||
hass.data[DOMAIN][DATA_CLIENT] = {}
|
||||
hass.data[DOMAIN][DATA_LISTENER] = {}
|
||||
|
||||
if DOMAIN not in config:
|
||||
return True
|
||||
@@ -143,7 +140,7 @@ async def async_setup_entry(hass, config_entry):
|
||||
config_entry.data[CONF_IP_ADDRESS],
|
||||
config_entry.data[CONF_PASSWORD],
|
||||
port=config_entry.data[CONF_PORT],
|
||||
ssl=config_entry.data[CONF_SSL],
|
||||
ssl=config_entry.data.get(CONF_SSL, DEFAULT_SSL),
|
||||
)
|
||||
except RainMachineError as err:
|
||||
_LOGGER.error("An error occurred: %s", err)
|
||||
@@ -156,8 +153,10 @@ async def async_setup_entry(hass, config_entry):
|
||||
rainmachine = RainMachine(
|
||||
hass,
|
||||
controller,
|
||||
config_entry.data[CONF_ZONE_RUN_TIME],
|
||||
config_entry.data[CONF_SCAN_INTERVAL],
|
||||
config_entry.data.get(CONF_ZONE_RUN_TIME, DEFAULT_ZONE_RUN),
|
||||
config_entry.data.get(
|
||||
CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL.total_seconds()
|
||||
),
|
||||
)
|
||||
|
||||
# Update the data object, which at this point (prior to any sensors registering
|
||||
@@ -260,9 +259,6 @@ async def async_unload_entry(hass, config_entry):
|
||||
"""Unload an OpenUV config entry."""
|
||||
hass.data[DOMAIN][DATA_CLIENT].pop(config_entry.entry_id)
|
||||
|
||||
remove_listener = hass.data[DOMAIN][DATA_LISTENER].pop(config_entry.entry_id)
|
||||
remove_listener()
|
||||
|
||||
tasks = [
|
||||
hass.config_entries.async_forward_entry_unload(config_entry, component)
|
||||
for component in ("binary_sensor", "sensor", "switch")
|
||||
|
@@ -4,10 +4,22 @@ from regenmaschine.errors import RainMachineError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD, CONF_PORT
|
||||
from homeassistant.const import (
|
||||
CONF_IP_ADDRESS,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
CONF_SCAN_INTERVAL,
|
||||
CONF_SSL,
|
||||
)
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
|
||||
from .const import DEFAULT_PORT, DOMAIN # pylint: disable=unused-import
|
||||
from .const import ( # pylint: disable=unused-import
|
||||
CONF_ZONE_RUN_TIME,
|
||||
DEFAULT_PORT,
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DEFAULT_ZONE_RUN,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
|
||||
class RainMachineFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
@@ -53,8 +65,8 @@ class RainMachineFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
user_input[CONF_IP_ADDRESS],
|
||||
user_input[CONF_PASSWORD],
|
||||
websession,
|
||||
port=user_input.get(CONF_PORT, DEFAULT_PORT),
|
||||
ssl=True,
|
||||
port=user_input[CONF_PORT],
|
||||
ssl=user_input.get(CONF_SSL, True),
|
||||
)
|
||||
except RainMachineError:
|
||||
return await self._show_form({CONF_PASSWORD: "invalid_credentials"})
|
||||
@@ -63,5 +75,17 @@ class RainMachineFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
# access token without using the IP address and password, so we have to
|
||||
# store it:
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_IP_ADDRESS], data=user_input
|
||||
title=user_input[CONF_IP_ADDRESS],
|
||||
data={
|
||||
CONF_IP_ADDRESS: user_input[CONF_IP_ADDRESS],
|
||||
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||
CONF_PORT: user_input[CONF_PORT],
|
||||
CONF_SSL: user_input.get(CONF_SSL, True),
|
||||
CONF_SCAN_INTERVAL: user_input.get(
|
||||
CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL.total_seconds()
|
||||
),
|
||||
CONF_ZONE_RUN_TIME: user_input.get(
|
||||
CONF_ZONE_RUN_TIME, DEFAULT_ZONE_RUN
|
||||
),
|
||||
},
|
||||
)
|
||||
|
@@ -1,6 +1,10 @@
|
||||
"""Define constants for the SimpliSafe component."""
|
||||
from datetime import timedelta
|
||||
|
||||
DOMAIN = "rainmachine"
|
||||
|
||||
CONF_ZONE_RUN_TIME = "zone_run_time"
|
||||
|
||||
DATA_CLIENT = "client"
|
||||
DATA_PROGRAMS = "programs"
|
||||
DATA_PROVISION_SETTINGS = "provision.settings"
|
||||
@@ -10,6 +14,8 @@ DATA_ZONES = "zones"
|
||||
DATA_ZONES_DETAILS = "zones_details"
|
||||
|
||||
DEFAULT_PORT = 8080
|
||||
DEFAULT_SCAN_INTERVAL = timedelta(seconds=60)
|
||||
DEFAULT_ZONE_RUN = 60 * 10
|
||||
|
||||
PROGRAM_UPDATE_TOPIC = f"{DOMAIN}_program_update"
|
||||
SENSOR_UPDATE_TOPIC = f"{DOMAIN}_data_update"
|
||||
|
@@ -342,7 +342,6 @@ class Recorder(threading.Thread):
|
||||
# has changed. This reduces the disk io.
|
||||
while True:
|
||||
event = self.queue.get()
|
||||
|
||||
if event is None:
|
||||
self._close_run()
|
||||
self._close_connection()
|
||||
@@ -356,7 +355,7 @@ class Recorder(threading.Thread):
|
||||
self.queue.task_done()
|
||||
if self.commit_interval:
|
||||
self._timechanges_seen += 1
|
||||
if self.commit_interval >= self._timechanges_seen:
|
||||
if self._timechanges_seen >= self.commit_interval:
|
||||
self._timechanges_seen = 0
|
||||
self._commit_event_session_or_retry()
|
||||
continue
|
||||
@@ -376,6 +375,9 @@ class Recorder(threading.Thread):
|
||||
self.event_session.flush()
|
||||
except (TypeError, ValueError):
|
||||
_LOGGER.warning("Event is not JSON serializable: %s", event)
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
# Must catch the exception to prevent the loop from collapsing
|
||||
_LOGGER.exception("Error adding event: %s", err)
|
||||
|
||||
if dbevent and event.event_type == EVENT_STATE_CHANGED:
|
||||
try:
|
||||
@@ -387,6 +389,9 @@ class Recorder(threading.Thread):
|
||||
"State is not JSON serializable: %s",
|
||||
event.data.get("new_state"),
|
||||
)
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
# Must catch the exception to prevent the loop from collapsing
|
||||
_LOGGER.exception("Error adding state change: %s", err)
|
||||
|
||||
# If they do not have a commit interval
|
||||
# than we commit right away
|
||||
@@ -404,17 +409,26 @@ class Recorder(threading.Thread):
|
||||
try:
|
||||
self._commit_event_session()
|
||||
return
|
||||
|
||||
except exc.OperationalError as err:
|
||||
_LOGGER.error(
|
||||
"Error in database connectivity: %s. " "(retrying in %s seconds)",
|
||||
err,
|
||||
self.db_retry_wait,
|
||||
)
|
||||
except (exc.InternalError, exc.OperationalError) as err:
|
||||
if err.connection_invalidated:
|
||||
_LOGGER.error(
|
||||
"Database connection invalidated: %s. "
|
||||
"(retrying in %s seconds)",
|
||||
err,
|
||||
self.db_retry_wait,
|
||||
)
|
||||
else:
|
||||
_LOGGER.error(
|
||||
"Error in database connectivity: %s. "
|
||||
"(retrying in %s seconds)",
|
||||
err,
|
||||
self.db_retry_wait,
|
||||
)
|
||||
tries += 1
|
||||
|
||||
except exc.SQLAlchemyError:
|
||||
_LOGGER.exception("Error saving events")
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
# Must catch the exception to prevent the loop from collapsing
|
||||
_LOGGER.exception("Error saving events: %s", err)
|
||||
return
|
||||
|
||||
_LOGGER.error(
|
||||
@@ -423,10 +437,15 @@ class Recorder(threading.Thread):
|
||||
)
|
||||
try:
|
||||
self.event_session.close()
|
||||
except exc.SQLAlchemyError:
|
||||
_LOGGER.exception("Failed to close event session.")
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
# Must catch the exception to prevent the loop from collapsing
|
||||
_LOGGER.exception("Error while closing event session: %s", err)
|
||||
|
||||
self.event_session = self.get_session()
|
||||
try:
|
||||
self.event_session = self.get_session()
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
# Must catch the exception to prevent the loop from collapsing
|
||||
_LOGGER.exception("Error while creating new event session: %s", err)
|
||||
|
||||
def _commit_event_session(self):
|
||||
try:
|
||||
|
@@ -206,6 +206,7 @@ class SamsungTVWSBridge(SamsungTVBridge):
|
||||
CONF_TIMEOUT: 31,
|
||||
}
|
||||
|
||||
result = None
|
||||
try:
|
||||
LOGGER.debug("Try config: %s", config)
|
||||
with SamsungTVWS(
|
||||
@@ -223,9 +224,13 @@ class SamsungTVWSBridge(SamsungTVBridge):
|
||||
return RESULT_SUCCESS
|
||||
except WebSocketException:
|
||||
LOGGER.debug("Working but unsupported config: %s", config)
|
||||
return RESULT_NOT_SUPPORTED
|
||||
result = RESULT_NOT_SUPPORTED
|
||||
except (OSError, ConnectionFailure) as err:
|
||||
LOGGER.debug("Failing config: %s, error: %s", config, err)
|
||||
# pylint: disable=useless-else-on-loop
|
||||
else:
|
||||
if result:
|
||||
return result
|
||||
|
||||
return RESULT_NOT_SUCCESSFUL
|
||||
|
||||
|
@@ -3,6 +3,7 @@
|
||||
"name": "Sighthound",
|
||||
"documentation": "https://www.home-assistant.io/integrations/sighthound",
|
||||
"requirements": [
|
||||
"pillow==7.0.0",
|
||||
"simplehound==0.3"
|
||||
],
|
||||
"dependencies": [],
|
||||
|
@@ -3,7 +3,7 @@
|
||||
"name": "SimpliSafe",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/simplisafe",
|
||||
"requirements": ["simplisafe-python==9.0.2"],
|
||||
"requirements": ["simplisafe-python==9.0.4"],
|
||||
"dependencies": [],
|
||||
"codeowners": ["@bachya"]
|
||||
}
|
||||
|
@@ -27,7 +27,7 @@ DOMAIN = "somfy"
|
||||
|
||||
CONF_CLIENT_ID = "client_id"
|
||||
CONF_CLIENT_SECRET = "client_secret"
|
||||
CONF_OPTIMISTIC = "optimisitic"
|
||||
CONF_OPTIMISTIC = "optimistic"
|
||||
|
||||
SOMFY_AUTH_CALLBACK_PATH = "/auth/somfy/callback"
|
||||
SOMFY_AUTH_START = "/auth/somfy"
|
||||
@@ -36,8 +36,8 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_CLIENT_ID): cv.string,
|
||||
vol.Required(CONF_CLIENT_SECRET): cv.string,
|
||||
vol.Inclusive(CONF_CLIENT_ID, "oauth"): cv.string,
|
||||
vol.Inclusive(CONF_CLIENT_SECRET, "oauth"): cv.string,
|
||||
vol.Optional(CONF_OPTIMISTIC, default=False): cv.boolean,
|
||||
}
|
||||
)
|
||||
@@ -51,23 +51,21 @@ SOMFY_COMPONENTS = ["cover", "switch"]
|
||||
async def async_setup(hass, config):
|
||||
"""Set up the Somfy component."""
|
||||
hass.data[DOMAIN] = {}
|
||||
domain_config = config.get(DOMAIN, {})
|
||||
hass.data[DOMAIN][CONF_OPTIMISTIC] = domain_config.get(CONF_OPTIMISTIC, False)
|
||||
|
||||
if DOMAIN not in config:
|
||||
return True
|
||||
|
||||
hass.data[DOMAIN][CONF_OPTIMISTIC] = config[DOMAIN][CONF_OPTIMISTIC]
|
||||
|
||||
config_flow.SomfyFlowHandler.async_register_implementation(
|
||||
hass,
|
||||
config_entry_oauth2_flow.LocalOAuth2Implementation(
|
||||
if CONF_CLIENT_ID in domain_config:
|
||||
config_flow.SomfyFlowHandler.async_register_implementation(
|
||||
hass,
|
||||
DOMAIN,
|
||||
config[DOMAIN][CONF_CLIENT_ID],
|
||||
config[DOMAIN][CONF_CLIENT_SECRET],
|
||||
"https://accounts.somfy.com/oauth/oauth/v2/auth",
|
||||
"https://accounts.somfy.com/oauth/oauth/v2/token",
|
||||
),
|
||||
)
|
||||
config_entry_oauth2_flow.LocalOAuth2Implementation(
|
||||
hass,
|
||||
DOMAIN,
|
||||
config[DOMAIN][CONF_CLIENT_ID],
|
||||
config[DOMAIN][CONF_CLIENT_SECRET],
|
||||
"https://accounts.somfy.com/oauth/oauth/v2/auth",
|
||||
"https://accounts.somfy.com/oauth/oauth/v2/token",
|
||||
),
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
@@ -1,6 +1,7 @@
|
||||
"""Ask tankerkoenig.de for petrol price information."""
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from math import ceil
|
||||
|
||||
import pytankerkoenig
|
||||
import voluptuous as vol
|
||||
@@ -164,27 +165,41 @@ class TankerkoenigData:
|
||||
)
|
||||
return False
|
||||
self.add_station(additional_station_data["station"])
|
||||
if len(self.stations) > 10:
|
||||
_LOGGER.warning(
|
||||
"Found more than 10 stations to check. "
|
||||
"This might invalidate your api-key on the long run. "
|
||||
"Try using a smaller radius"
|
||||
)
|
||||
return True
|
||||
|
||||
async def fetch_data(self):
|
||||
"""Get the latest data from tankerkoenig.de."""
|
||||
_LOGGER.debug("Fetching new data from tankerkoenig.de")
|
||||
station_ids = list(self.stations)
|
||||
data = await self._hass.async_add_executor_job(
|
||||
pytankerkoenig.getPriceList, self._api_key, station_ids
|
||||
)
|
||||
|
||||
if data["ok"]:
|
||||
prices = {}
|
||||
|
||||
# The API seems to only return at most 10 results, so split the list in chunks of 10
|
||||
# and merge it together.
|
||||
for index in range(ceil(len(station_ids) / 10)):
|
||||
data = await self._hass.async_add_executor_job(
|
||||
pytankerkoenig.getPriceList,
|
||||
self._api_key,
|
||||
station_ids[index * 10 : (index + 1) * 10],
|
||||
)
|
||||
|
||||
_LOGGER.debug("Received data: %s", data)
|
||||
if not data["ok"]:
|
||||
_LOGGER.error(
|
||||
"Error fetching data from tankerkoenig.de: %s", data["message"]
|
||||
)
|
||||
raise TankerkoenigError(data["message"])
|
||||
if "prices" not in data:
|
||||
_LOGGER.error("Did not receive price information from tankerkoenig.de")
|
||||
raise TankerkoenigError("No prices in data")
|
||||
else:
|
||||
_LOGGER.error(
|
||||
"Error fetching data from tankerkoenig.de: %s", data["message"]
|
||||
)
|
||||
raise TankerkoenigError(data["message"])
|
||||
return data["prices"]
|
||||
prices.update(data["prices"])
|
||||
return prices
|
||||
|
||||
def add_station(self, station: dict):
|
||||
"""Add fuel station to the entity list."""
|
||||
|
@@ -18,7 +18,7 @@ from homeassistant.const import (
|
||||
STATE_ALARM_TRIGGERED,
|
||||
)
|
||||
|
||||
from . import DOMAIN as TOTALCONNECT_DOMAIN
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -30,7 +30,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
|
||||
alarms = []
|
||||
|
||||
client = hass.data[TOTALCONNECT_DOMAIN].client
|
||||
client = hass.data[DOMAIN].client
|
||||
|
||||
for location_id, location in client.locations.items():
|
||||
location_name = location.location_name
|
||||
@@ -71,7 +71,7 @@ class TotalConnectAlarm(alarm.AlarmControlPanel):
|
||||
|
||||
def update(self):
|
||||
"""Return the state of the device."""
|
||||
status = self._client.get_armed_status(self._location_id)
|
||||
self._client.get_armed_status(self._location_id)
|
||||
attr = {
|
||||
"location_name": self._name,
|
||||
"location_id": self._location_id,
|
||||
@@ -79,47 +79,36 @@ class TotalConnectAlarm(alarm.AlarmControlPanel):
|
||||
"low_battery": self._client.locations[self._location_id].low_battery,
|
||||
"cover_tampered": self._client.locations[
|
||||
self._location_id
|
||||
].is_cover_tampered,
|
||||
].is_cover_tampered(),
|
||||
"triggered_source": None,
|
||||
"triggered_zone": None,
|
||||
}
|
||||
|
||||
if status in (self._client.DISARMED, self._client.DISARMED_BYPASS):
|
||||
if self._client.locations[self._location_id].is_disarmed():
|
||||
state = STATE_ALARM_DISARMED
|
||||
elif status in (
|
||||
self._client.ARMED_STAY,
|
||||
self._client.ARMED_STAY_INSTANT,
|
||||
self._client.ARMED_STAY_INSTANT_BYPASS,
|
||||
):
|
||||
elif self._client.locations[self._location_id].is_armed_home():
|
||||
state = STATE_ALARM_ARMED_HOME
|
||||
elif status == self._client.ARMED_STAY_NIGHT:
|
||||
elif self._client.locations[self._location_id].is_armed_night():
|
||||
state = STATE_ALARM_ARMED_NIGHT
|
||||
elif status in (
|
||||
self._client.ARMED_AWAY,
|
||||
self._client.ARMED_AWAY_BYPASS,
|
||||
self._client.ARMED_AWAY_INSTANT,
|
||||
self._client.ARMED_AWAY_INSTANT_BYPASS,
|
||||
):
|
||||
elif self._client.locations[self._location_id].is_armed_away():
|
||||
state = STATE_ALARM_ARMED_AWAY
|
||||
elif status == self._client.ARMED_CUSTOM_BYPASS:
|
||||
elif self._client.locations[self._location_id].is_armed_custom_bypass():
|
||||
state = STATE_ALARM_ARMED_CUSTOM_BYPASS
|
||||
elif status == self._client.ARMING:
|
||||
elif self._client.locations[self._location_id].is_arming():
|
||||
state = STATE_ALARM_ARMING
|
||||
elif status == self._client.DISARMING:
|
||||
elif self._client.locations[self._location_id].is_disarming():
|
||||
state = STATE_ALARM_DISARMING
|
||||
elif status == self._client.ALARMING:
|
||||
elif self._client.locations[self._location_id].is_triggered_police():
|
||||
state = STATE_ALARM_TRIGGERED
|
||||
attr["triggered_source"] = "Police/Medical"
|
||||
elif status == self._client.ALARMING_FIRE_SMOKE:
|
||||
elif self._client.locations[self._location_id].is_triggered_fire():
|
||||
state = STATE_ALARM_TRIGGERED
|
||||
attr["triggered_source"] = "Fire/Smoke"
|
||||
elif status == self._client.ALARMING_CARBON_MONOXIDE:
|
||||
elif self._client.locations[self._location_id].is_triggered_gas():
|
||||
state = STATE_ALARM_TRIGGERED
|
||||
attr["triggered_source"] = "Carbon Monoxide"
|
||||
else:
|
||||
logging.info(
|
||||
"Total Connect Client returned unknown status code: %s", status
|
||||
)
|
||||
logging.info("Total Connect Client returned unknown status")
|
||||
state = None
|
||||
|
||||
self._state = state
|
||||
|
3
homeassistant/components/totalconnect/const.py
Normal file
3
homeassistant/components/totalconnect/const.py
Normal file
@@ -0,0 +1,3 @@
|
||||
"""TotalConnect constants."""
|
||||
|
||||
DOMAIN = "totalconnect"
|
@@ -83,6 +83,8 @@ async def async_setup_entry(hass, config_entry):
|
||||
controller_id = get_controller_id_from_config_entry(config_entry)
|
||||
hass.data[DOMAIN][controller_id] = controller
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, controller.shutdown)
|
||||
|
||||
if controller.mac is None:
|
||||
return True
|
||||
|
||||
@@ -96,8 +98,6 @@ async def async_setup_entry(hass, config_entry):
|
||||
# sw_version=config.raw['swversion'],
|
||||
)
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, controller.shutdown)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
@@ -64,7 +64,7 @@ class VelbusLight(VelbusEntity, Light):
|
||||
@property
|
||||
def brightness(self):
|
||||
"""Return the brightness of the light."""
|
||||
return self._module.get_dimmer_state(self._channel)
|
||||
return int((self._module.get_dimmer_state(self._channel) * 255) / 100)
|
||||
|
||||
def turn_on(self, **kwargs):
|
||||
"""Instruct the Velbus light to turn on."""
|
||||
@@ -80,10 +80,15 @@ class VelbusLight(VelbusEntity, Light):
|
||||
attr, *args = "set_led_state", self._channel, "on"
|
||||
else:
|
||||
if ATTR_BRIGHTNESS in kwargs:
|
||||
# Make sure a low but non-zero value is not rounded down to zero
|
||||
if kwargs[ATTR_BRIGHTNESS] == 0:
|
||||
brightness = 0
|
||||
else:
|
||||
brightness = max(int((kwargs[ATTR_BRIGHTNESS] * 100) / 255), 1)
|
||||
attr, *args = (
|
||||
"set_dimmer_state",
|
||||
self._channel,
|
||||
kwargs[ATTR_BRIGHTNESS],
|
||||
brightness,
|
||||
kwargs.get(ATTR_TRANSITION, 0),
|
||||
)
|
||||
else:
|
||||
|
@@ -2,7 +2,7 @@
|
||||
"domain": "velbus",
|
||||
"name": "Velbus",
|
||||
"documentation": "https://www.home-assistant.io/integrations/velbus",
|
||||
"requirements": ["python-velbus==2.0.42"],
|
||||
"requirements": ["python-velbus==2.0.43"],
|
||||
"config_flow": true,
|
||||
"dependencies": [],
|
||||
"codeowners": ["@Cereal2nd", "@brefra"]
|
||||
|
@@ -8,6 +8,8 @@ import voluptuous as vol
|
||||
from homeassistant import config_entries, const as ha_const
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.device_registry import CONNECTION_ZIGBEE
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.typing import HomeAssistantType
|
||||
|
||||
from . import api
|
||||
from .core import ZHAGateway
|
||||
@@ -27,6 +29,7 @@ from .core.const import (
|
||||
DEFAULT_BAUDRATE,
|
||||
DEFAULT_RADIO_TYPE,
|
||||
DOMAIN,
|
||||
SIGNAL_ADD_ENTITIES,
|
||||
RadioType,
|
||||
)
|
||||
|
||||
@@ -89,24 +92,11 @@ async def async_setup_entry(hass, config_entry):
|
||||
Will automatically load components to support devices found on the network.
|
||||
"""
|
||||
|
||||
hass.data[DATA_ZHA] = hass.data.get(DATA_ZHA, {})
|
||||
hass.data[DATA_ZHA][DATA_ZHA_DISPATCHERS] = []
|
||||
hass.data[DATA_ZHA][DATA_ZHA_PLATFORM_LOADED] = asyncio.Event()
|
||||
platforms = []
|
||||
zha_data = hass.data.setdefault(DATA_ZHA, {})
|
||||
config = zha_data.get(DATA_ZHA_CONFIG, {})
|
||||
|
||||
for component in COMPONENTS:
|
||||
platforms.append(
|
||||
hass.async_create_task(
|
||||
hass.config_entries.async_forward_entry_setup(config_entry, component)
|
||||
)
|
||||
)
|
||||
|
||||
async def _platforms_loaded():
|
||||
await asyncio.gather(*platforms)
|
||||
hass.data[DATA_ZHA][DATA_ZHA_PLATFORM_LOADED].set()
|
||||
|
||||
hass.async_create_task(_platforms_loaded())
|
||||
|
||||
config = hass.data[DATA_ZHA].get(DATA_ZHA_CONFIG, {})
|
||||
zha_data.setdefault(component, [])
|
||||
|
||||
if config.get(CONF_ENABLE_QUIRKS, True):
|
||||
# needs to be done here so that the ZHA module is finished loading
|
||||
@@ -116,6 +106,12 @@ async def async_setup_entry(hass, config_entry):
|
||||
zha_gateway = ZHAGateway(hass, config, config_entry)
|
||||
await zha_gateway.async_initialize()
|
||||
|
||||
zha_data[DATA_ZHA_DISPATCHERS] = []
|
||||
zha_data[DATA_ZHA_PLATFORM_LOADED] = []
|
||||
for component in COMPONENTS:
|
||||
coro = hass.config_entries.async_forward_entry_setup(config_entry, component)
|
||||
zha_data[DATA_ZHA_PLATFORM_LOADED].append(hass.async_create_task(coro))
|
||||
|
||||
device_registry = await hass.helpers.device_registry.async_get_registry()
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=config_entry.entry_id,
|
||||
@@ -130,11 +126,11 @@ async def async_setup_entry(hass, config_entry):
|
||||
|
||||
async def async_zha_shutdown(event):
|
||||
"""Handle shutdown tasks."""
|
||||
await hass.data[DATA_ZHA][DATA_ZHA_GATEWAY].shutdown()
|
||||
await hass.data[DATA_ZHA][DATA_ZHA_GATEWAY].async_update_device_storage()
|
||||
await zha_data[DATA_ZHA_GATEWAY].shutdown()
|
||||
await zha_data[DATA_ZHA_GATEWAY].async_update_device_storage()
|
||||
|
||||
hass.bus.async_listen_once(ha_const.EVENT_HOMEASSISTANT_STOP, async_zha_shutdown)
|
||||
hass.async_create_task(zha_gateway.async_load_devices())
|
||||
asyncio.create_task(async_load_entities(hass, config_entry))
|
||||
return True
|
||||
|
||||
|
||||
@@ -152,3 +148,16 @@ async def async_unload_entry(hass, config_entry):
|
||||
await hass.config_entries.async_forward_entry_unload(config_entry, component)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_load_entities(
|
||||
hass: HomeAssistantType, config_entry: config_entries.ConfigEntry
|
||||
) -> None:
|
||||
"""Load entities after integration was setup."""
|
||||
await hass.data[DATA_ZHA][DATA_ZHA_GATEWAY].async_prepare_entities()
|
||||
to_setup = hass.data[DATA_ZHA][DATA_ZHA_PLATFORM_LOADED]
|
||||
results = await asyncio.gather(*to_setup, return_exceptions=True)
|
||||
for res in results:
|
||||
if isinstance(res, Exception):
|
||||
_LOGGER.warning("Couldn't setup zha platform: %s", res)
|
||||
async_dispatcher_send(hass, SIGNAL_ADD_ENTITIES)
|
||||
|
@@ -49,7 +49,7 @@ STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, DOMAIN)
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Zigbee Home Automation binary sensor from config entry."""
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN] = []
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN]
|
||||
|
||||
unsub = async_dispatcher_connect(
|
||||
hass,
|
||||
|
@@ -85,11 +85,11 @@ class ZigbeeChannel(LogMixin):
|
||||
self, cluster: zha_typing.ZigpyClusterType, ch_pool: zha_typing.ChannelPoolType
|
||||
) -> None:
|
||||
"""Initialize ZigbeeChannel."""
|
||||
self._channel_name = cluster.ep_attribute
|
||||
self._generic_id = f"channel_0x{cluster.cluster_id:04x}"
|
||||
self._channel_name = getattr(cluster, "ep_attribute", self._generic_id)
|
||||
if self.CHANNEL_NAME:
|
||||
self._channel_name = self.CHANNEL_NAME
|
||||
self._ch_pool = ch_pool
|
||||
self._generic_id = f"channel_0x{cluster.cluster_id:04x}"
|
||||
self._cluster = cluster
|
||||
self._id = f"{ch_pool.id}:0x{cluster.cluster_id:04x}"
|
||||
unique_id = ch_pool.unique_id.replace("-", ":")
|
||||
|
@@ -8,6 +8,16 @@ from homeassistant.core import callback
|
||||
from homeassistant.helpers.typing import HomeAssistantType
|
||||
|
||||
from . import const as zha_const, registries as zha_regs, typing as zha_typing
|
||||
from .. import ( # noqa: F401 pylint: disable=unused-import,
|
||||
binary_sensor,
|
||||
cover,
|
||||
device_tracker,
|
||||
fan,
|
||||
light,
|
||||
lock,
|
||||
sensor,
|
||||
switch,
|
||||
)
|
||||
from .channels import base
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
@@ -7,10 +7,12 @@ import logging
|
||||
import os
|
||||
import traceback
|
||||
|
||||
from serial import SerialException
|
||||
import zigpy.device as zigpy_dev
|
||||
|
||||
from homeassistant.components.system_log import LogEntry, _figure_out_source
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.device_registry import (
|
||||
CONNECTION_ZIGBEE,
|
||||
async_get_registry as get_dev_reg,
|
||||
@@ -34,7 +36,6 @@ from .const import (
|
||||
DATA_ZHA,
|
||||
DATA_ZHA_BRIDGE_ID,
|
||||
DATA_ZHA_GATEWAY,
|
||||
DATA_ZHA_PLATFORM_LOADED,
|
||||
DEBUG_COMP_BELLOWS,
|
||||
DEBUG_COMP_ZHA,
|
||||
DEBUG_COMP_ZIGPY,
|
||||
@@ -98,7 +99,6 @@ class ZHAGateway:
|
||||
self.ha_entity_registry = None
|
||||
self.application_controller = None
|
||||
self.radio_description = None
|
||||
hass.data[DATA_ZHA][DATA_ZHA_GATEWAY] = self
|
||||
self._log_levels = {
|
||||
DEBUG_LEVEL_ORIGINAL: async_capture_log_levels(),
|
||||
DEBUG_LEVEL_CURRENT: async_capture_log_levels(),
|
||||
@@ -122,7 +122,11 @@ class ZHAGateway:
|
||||
radio_details = RADIO_TYPES[radio_type]
|
||||
radio = radio_details[ZHA_GW_RADIO]()
|
||||
self.radio_description = radio_details[ZHA_GW_RADIO_DESCRIPTION]
|
||||
await radio.connect(usb_path, baudrate)
|
||||
try:
|
||||
await radio.connect(usb_path, baudrate)
|
||||
except (SerialException, OSError) as exception:
|
||||
_LOGGER.error("Couldn't open serial port for ZHA: %s", str(exception))
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
if CONF_DATABASE in self._config:
|
||||
database = self._config[CONF_DATABASE]
|
||||
@@ -133,38 +137,59 @@ class ZHAGateway:
|
||||
apply_application_controller_patch(self)
|
||||
self.application_controller.add_listener(self)
|
||||
self.application_controller.groups.add_listener(self)
|
||||
await self.application_controller.startup(auto_form=True)
|
||||
|
||||
try:
|
||||
res = await self.application_controller.startup(auto_form=True)
|
||||
if res is False:
|
||||
await self.application_controller.shutdown()
|
||||
raise ConfigEntryNotReady
|
||||
except asyncio.TimeoutError as exception:
|
||||
_LOGGER.error(
|
||||
"Couldn't start %s coordinator",
|
||||
radio_details[ZHA_GW_RADIO_DESCRIPTION],
|
||||
exc_info=exception,
|
||||
)
|
||||
radio.close()
|
||||
raise ConfigEntryNotReady from exception
|
||||
|
||||
self._hass.data[DATA_ZHA][DATA_ZHA_GATEWAY] = self
|
||||
self._hass.data[DATA_ZHA][DATA_ZHA_BRIDGE_ID] = str(
|
||||
self.application_controller.ieee
|
||||
)
|
||||
await self.async_load_devices()
|
||||
self._initialize_groups()
|
||||
|
||||
async def async_load_devices(self) -> None:
|
||||
"""Restore ZHA devices from zigpy application state."""
|
||||
await self._hass.data[DATA_ZHA][DATA_ZHA_PLATFORM_LOADED].wait()
|
||||
zigpy_devices = self.application_controller.devices.values()
|
||||
for zigpy_device in zigpy_devices:
|
||||
self._async_get_or_create_device(zigpy_device, restored=True)
|
||||
|
||||
async def async_prepare_entities(self) -> None:
|
||||
"""Prepare entities by initializing device channels."""
|
||||
semaphore = asyncio.Semaphore(2)
|
||||
|
||||
async def _throttle(device: zha_typing.ZigpyDeviceType):
|
||||
async def _throttle(zha_device: zha_typing.ZhaDeviceType, cached: bool):
|
||||
async with semaphore:
|
||||
await self.async_device_restored(device)
|
||||
await zha_device.async_initialize(from_cache=cached)
|
||||
|
||||
zigpy_devices = self.application_controller.devices.values()
|
||||
_LOGGER.debug("Loading battery powered devices")
|
||||
await asyncio.gather(
|
||||
*[
|
||||
_throttle(dev)
|
||||
for dev in zigpy_devices
|
||||
if not dev.node_desc.is_mains_powered
|
||||
_throttle(dev, cached=True)
|
||||
for dev in self.devices.values()
|
||||
if not dev.is_mains_powered
|
||||
]
|
||||
)
|
||||
async_dispatcher_send(self._hass, SIGNAL_ADD_ENTITIES)
|
||||
|
||||
_LOGGER.debug("Loading mains powered devices")
|
||||
await asyncio.gather(
|
||||
*[_throttle(dev) for dev in zigpy_devices if dev.node_desc.is_mains_powered]
|
||||
*[
|
||||
_throttle(dev, cached=False)
|
||||
for dev in self.devices.values()
|
||||
if dev.is_mains_powered
|
||||
]
|
||||
)
|
||||
async_dispatcher_send(self._hass, SIGNAL_ADD_ENTITIES)
|
||||
|
||||
def device_joined(self, device):
|
||||
"""Handle device joined.
|
||||
|
@@ -29,7 +29,7 @@ STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, DOMAIN)
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Zigbee Home Automation cover from config entry."""
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN] = []
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN]
|
||||
|
||||
unsub = async_dispatcher_connect(
|
||||
hass,
|
||||
|
@@ -26,7 +26,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Zigbee Home Automation device tracker from config entry."""
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN] = []
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN]
|
||||
|
||||
unsub = async_dispatcher_connect(
|
||||
hass,
|
||||
|
@@ -53,7 +53,7 @@ STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, DOMAIN)
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Zigbee Home Automation fan from config entry."""
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN] = []
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN]
|
||||
|
||||
unsub = async_dispatcher_connect(
|
||||
hass,
|
||||
|
@@ -52,7 +52,7 @@ _REFRESH_INTERVAL = (45, 75)
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Zigbee Home Automation light from config entry."""
|
||||
entities_to_create = hass.data[DATA_ZHA][light.DOMAIN] = []
|
||||
entities_to_create = hass.data[DATA_ZHA][light.DOMAIN]
|
||||
|
||||
unsub = async_dispatcher_connect(
|
||||
hass,
|
||||
|
@@ -36,7 +36,7 @@ VALUE_TO_STATE = dict(enumerate(STATE_LIST))
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Zigbee Home Automation Door Lock from config entry."""
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN] = []
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN]
|
||||
|
||||
unsub = async_dispatcher_connect(
|
||||
hass,
|
||||
|
@@ -68,7 +68,7 @@ STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, DOMAIN)
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Zigbee Home Automation sensor from config entry."""
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN] = []
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN]
|
||||
|
||||
unsub = async_dispatcher_connect(
|
||||
hass,
|
||||
|
@@ -26,7 +26,7 @@ STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, DOMAIN)
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Zigbee Home Automation switch from config entry."""
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN] = []
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN]
|
||||
|
||||
unsub = async_dispatcher_connect(
|
||||
hass,
|
||||
|
@@ -3,7 +3,7 @@
|
||||
"name": "Z-Wave",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/zwave",
|
||||
"requirements": ["homeassistant-pyozw==0.1.9", "pydispatcher==2.0.5"],
|
||||
"requirements": ["homeassistant-pyozw==0.1.10", "pydispatcher==2.0.5"],
|
||||
"dependencies": [],
|
||||
"codeowners": ["@home-assistant/z-wave"]
|
||||
}
|
||||
|
@@ -562,18 +562,36 @@ def _log_pkg_error(package: str, component: str, config: Dict, message: str) ->
|
||||
_LOGGER.error(message)
|
||||
|
||||
|
||||
def _identify_config_schema(module: ModuleType) -> Tuple[Optional[str], Optional[Dict]]:
|
||||
def _identify_config_schema(module: ModuleType) -> Optional[str]:
|
||||
"""Extract the schema and identify list or dict based."""
|
||||
try:
|
||||
schema = module.CONFIG_SCHEMA.schema[module.DOMAIN] # type: ignore
|
||||
except (AttributeError, KeyError):
|
||||
return None, None
|
||||
key = next(k for k in module.CONFIG_SCHEMA.schema if k == module.DOMAIN) # type: ignore
|
||||
except (AttributeError, StopIteration):
|
||||
return None
|
||||
|
||||
schema = module.CONFIG_SCHEMA.schema[key] # type: ignore
|
||||
|
||||
if hasattr(key, "default") and not isinstance(
|
||||
key.default, vol.schema_builder.Undefined
|
||||
):
|
||||
default_value = module.CONFIG_SCHEMA({module.DOMAIN: key.default()})[ # type: ignore
|
||||
module.DOMAIN # type: ignore
|
||||
]
|
||||
|
||||
if isinstance(default_value, dict):
|
||||
return "dict"
|
||||
|
||||
if isinstance(default_value, list):
|
||||
return "list"
|
||||
|
||||
return None
|
||||
|
||||
t_schema = str(schema)
|
||||
if t_schema.startswith("{") or "schema_with_slug_keys" in t_schema:
|
||||
return ("dict", schema)
|
||||
return "dict"
|
||||
if t_schema.startswith(("[", "All(<function ensure_list")):
|
||||
return ("list", schema)
|
||||
return "", schema
|
||||
return "list"
|
||||
return None
|
||||
|
||||
|
||||
def _recursive_merge(conf: Dict[str, Any], package: Dict[str, Any]) -> Union[bool, str]:
|
||||
@@ -626,8 +644,7 @@ async def merge_packages_config(
|
||||
merge_list = hasattr(component, "PLATFORM_SCHEMA")
|
||||
|
||||
if not merge_list and hasattr(component, "CONFIG_SCHEMA"):
|
||||
merge_type, _ = _identify_config_schema(component)
|
||||
merge_list = merge_type == "list"
|
||||
merge_list = _identify_config_schema(component) == "list"
|
||||
|
||||
if merge_list:
|
||||
config[comp_name] = cv.remove_falsy(
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""Constants used by Home Assistant components."""
|
||||
MAJOR_VERSION = 0
|
||||
MINOR_VERSION = 107
|
||||
PATCH_VERSION = "1"
|
||||
PATCH_VERSION = "7"
|
||||
__short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__ = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER = (3, 7, 0)
|
||||
|
@@ -701,7 +701,9 @@ class Script:
|
||||
|
||||
def _log(self, msg, *args, level=logging.INFO):
|
||||
if self.name:
|
||||
msg = f"{self.name}: {msg}"
|
||||
msg = f"%s: {msg}"
|
||||
args = [self.name, *args]
|
||||
|
||||
if level == _LOG_EXCEPTION:
|
||||
self._logger.exception(msg, *args)
|
||||
else:
|
||||
|
@@ -139,7 +139,7 @@ aio_georss_gdacs==0.3
|
||||
aioambient==1.0.4
|
||||
|
||||
# homeassistant.components.asuswrt
|
||||
aioasuswrt==1.2.2
|
||||
aioasuswrt==1.2.3
|
||||
|
||||
# homeassistant.components.automatic
|
||||
aioautomatic==0.6.5
|
||||
@@ -163,7 +163,7 @@ aioftp==0.12.0
|
||||
aioharmony==0.1.13
|
||||
|
||||
# homeassistant.components.homekit_controller
|
||||
aiohomekit[IP]==0.2.29.1
|
||||
aiohomekit[IP]==0.2.29.2
|
||||
|
||||
# homeassistant.components.emulated_hue
|
||||
# homeassistant.components.http
|
||||
@@ -699,7 +699,7 @@ holidays==0.10.1
|
||||
home-assistant-frontend==20200318.1
|
||||
|
||||
# homeassistant.components.zwave
|
||||
homeassistant-pyozw==0.1.9
|
||||
homeassistant-pyozw==0.1.10
|
||||
|
||||
# homeassistant.components.homematicip_cloud
|
||||
homematicip==0.10.17
|
||||
@@ -712,7 +712,7 @@ horimote==0.4.1
|
||||
httplib2==0.10.3
|
||||
|
||||
# homeassistant.components.huawei_lte
|
||||
huawei-lte-api==1.4.10
|
||||
huawei-lte-api==1.4.11
|
||||
|
||||
# homeassistant.components.hydrawise
|
||||
hydrawiser==0.1.1
|
||||
@@ -1017,6 +1017,7 @@ pilight==0.1.1
|
||||
# homeassistant.components.proxy
|
||||
# homeassistant.components.qrcode
|
||||
# homeassistant.components.seven_segments
|
||||
# homeassistant.components.sighthound
|
||||
# homeassistant.components.tensorflow
|
||||
pillow==7.0.0
|
||||
|
||||
@@ -1665,7 +1666,7 @@ python-telnet-vlc==1.0.4
|
||||
python-twitch-client==0.6.0
|
||||
|
||||
# homeassistant.components.velbus
|
||||
python-velbus==2.0.42
|
||||
python-velbus==2.0.43
|
||||
|
||||
# homeassistant.components.vlc
|
||||
python-vlc==1.1.2
|
||||
@@ -1855,7 +1856,7 @@ simplehound==0.3
|
||||
simplepush==1.1.4
|
||||
|
||||
# homeassistant.components.simplisafe
|
||||
simplisafe-python==9.0.2
|
||||
simplisafe-python==9.0.4
|
||||
|
||||
# homeassistant.components.sisyphus
|
||||
sisyphus-control==2.2.1
|
||||
|
@@ -50,7 +50,7 @@ aio_georss_gdacs==0.3
|
||||
aioambient==1.0.4
|
||||
|
||||
# homeassistant.components.asuswrt
|
||||
aioasuswrt==1.2.2
|
||||
aioasuswrt==1.2.3
|
||||
|
||||
# homeassistant.components.automatic
|
||||
aioautomatic==0.6.5
|
||||
@@ -62,7 +62,7 @@ aiobotocore==0.11.1
|
||||
aioesphomeapi==2.6.1
|
||||
|
||||
# homeassistant.components.homekit_controller
|
||||
aiohomekit[IP]==0.2.29.1
|
||||
aiohomekit[IP]==0.2.29.2
|
||||
|
||||
# homeassistant.components.emulated_hue
|
||||
# homeassistant.components.http
|
||||
@@ -266,7 +266,7 @@ holidays==0.10.1
|
||||
home-assistant-frontend==20200318.1
|
||||
|
||||
# homeassistant.components.zwave
|
||||
homeassistant-pyozw==0.1.9
|
||||
homeassistant-pyozw==0.1.10
|
||||
|
||||
# homeassistant.components.homematicip_cloud
|
||||
homematicip==0.10.17
|
||||
@@ -276,7 +276,7 @@ homematicip==0.10.17
|
||||
httplib2==0.10.3
|
||||
|
||||
# homeassistant.components.huawei_lte
|
||||
huawei-lte-api==1.4.10
|
||||
huawei-lte-api==1.4.11
|
||||
|
||||
# homeassistant.components.iaqualink
|
||||
iaqualink==0.3.1
|
||||
@@ -365,6 +365,14 @@ pexpect==4.6.0
|
||||
# homeassistant.components.pilight
|
||||
pilight==0.1.1
|
||||
|
||||
# homeassistant.components.doods
|
||||
# homeassistant.components.proxy
|
||||
# homeassistant.components.qrcode
|
||||
# homeassistant.components.seven_segments
|
||||
# homeassistant.components.sighthound
|
||||
# homeassistant.components.tensorflow
|
||||
pillow==7.0.0
|
||||
|
||||
# homeassistant.components.plex
|
||||
plexapi==3.3.0
|
||||
|
||||
@@ -587,7 +595,7 @@ python-nest==4.1.0
|
||||
python-twitch-client==0.6.0
|
||||
|
||||
# homeassistant.components.velbus
|
||||
python-velbus==2.0.42
|
||||
python-velbus==2.0.43
|
||||
|
||||
# homeassistant.components.awair
|
||||
python_awair==0.0.4
|
||||
@@ -641,7 +649,7 @@ sentry-sdk==0.13.5
|
||||
simplehound==0.3
|
||||
|
||||
# homeassistant.components.simplisafe
|
||||
simplisafe-python==9.0.2
|
||||
simplisafe-python==9.0.4
|
||||
|
||||
# homeassistant.components.sleepiq
|
||||
sleepyq==0.7
|
||||
|
5
rootfs/etc/services.d/home-assistant/finish
Normal file
5
rootfs/etc/services.d/home-assistant/finish
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/usr/bin/execlineb -S0
|
||||
# ==============================================================================
|
||||
# Take down the S6 supervision tree when Home Assistant fails
|
||||
# ==============================================================================
|
||||
s6-svscanctl -t /var/run/s6/services
|
7
rootfs/etc/services.d/home-assistant/run
Normal file
7
rootfs/etc/services.d/home-assistant/run
Normal file
@@ -0,0 +1,7 @@
|
||||
#!/usr/bin/with-contenv bashio
|
||||
# ==============================================================================
|
||||
# Start Home Assistant service
|
||||
# ==============================================================================
|
||||
cd /config || bashio::exit.nok "Can't find config folder!"
|
||||
|
||||
exec python3 -m homeassistant --config /config
|
@@ -65,6 +65,7 @@ async def test_full_flow(hass, aiohttp_client, aioclient_mock):
|
||||
"read_station",
|
||||
"read_thermostat",
|
||||
"write_camera",
|
||||
"write_presence",
|
||||
"write_thermostat",
|
||||
]
|
||||
)
|
||||
|
@@ -4,7 +4,7 @@ from unittest.mock import patch
|
||||
from regenmaschine.errors import RainMachineError
|
||||
|
||||
from homeassistant import data_entry_flow
|
||||
from homeassistant.components.rainmachine import DOMAIN, config_flow
|
||||
from homeassistant.components.rainmachine import CONF_ZONE_RUN_TIME, DOMAIN, config_flow
|
||||
from homeassistant.config_entries import SOURCE_USER
|
||||
from homeassistant.const import (
|
||||
CONF_IP_ADDRESS,
|
||||
@@ -98,6 +98,7 @@ async def test_step_import(hass):
|
||||
CONF_PORT: 8080,
|
||||
CONF_SSL: True,
|
||||
CONF_SCAN_INTERVAL: 60,
|
||||
CONF_ZONE_RUN_TIME: 600,
|
||||
}
|
||||
|
||||
|
||||
@@ -129,4 +130,5 @@ async def test_step_user(hass):
|
||||
CONF_PORT: 8080,
|
||||
CONF_SSL: True,
|
||||
CONF_SCAN_INTERVAL: 60,
|
||||
CONF_ZONE_RUN_TIME: 600,
|
||||
}
|
||||
|
@@ -1,5 +1,5 @@
|
||||
"""Tests for Samsung TV config flow."""
|
||||
from unittest.mock import call, patch
|
||||
from unittest.mock import Mock, PropertyMock, call, patch
|
||||
|
||||
from asynctest import mock
|
||||
import pytest
|
||||
@@ -19,7 +19,7 @@ from homeassistant.components.ssdp import (
|
||||
ATTR_UPNP_MODEL_NAME,
|
||||
ATTR_UPNP_UDN,
|
||||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_ID, CONF_METHOD, CONF_NAME
|
||||
from homeassistant.const import CONF_HOST, CONF_ID, CONF_METHOD, CONF_NAME, CONF_TOKEN
|
||||
|
||||
MOCK_USER_DATA = {CONF_HOST: "fake_host", CONF_NAME: "fake_name"}
|
||||
MOCK_SSDP_DATA = {
|
||||
@@ -46,6 +46,20 @@ AUTODETECT_LEGACY = {
|
||||
"host": "fake_host",
|
||||
"timeout": 31,
|
||||
}
|
||||
AUTODETECT_WEBSOCKET_PLAIN = {
|
||||
"host": "fake_host",
|
||||
"name": "HomeAssistant",
|
||||
"port": 8001,
|
||||
"timeout": 31,
|
||||
"token": None,
|
||||
}
|
||||
AUTODETECT_WEBSOCKET_SSL = {
|
||||
"host": "fake_host",
|
||||
"name": "HomeAssistant",
|
||||
"port": 8002,
|
||||
"timeout": 31,
|
||||
"token": None,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(name="remote")
|
||||
@@ -446,20 +460,48 @@ async def test_autodetect_websocket(hass, remote, remotews):
|
||||
with patch(
|
||||
"homeassistant.components.samsungtv.bridge.Remote", side_effect=OSError("Boom"),
|
||||
), patch("homeassistant.components.samsungtv.bridge.SamsungTVWS") as remotews:
|
||||
enter = Mock()
|
||||
type(enter).token = PropertyMock(return_value="123456789")
|
||||
remote = Mock()
|
||||
remote.__enter__ = Mock(return_value=enter)
|
||||
remote.__exit__ = Mock(return_value=False)
|
||||
remotews.return_value = remote
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": "user"}, data=MOCK_USER_DATA
|
||||
)
|
||||
assert result["type"] == "create_entry"
|
||||
assert result["data"][CONF_METHOD] == "websocket"
|
||||
assert result["data"][CONF_TOKEN] == "123456789"
|
||||
assert remotews.call_count == 1
|
||||
assert remotews.call_args_list == [call(**AUTODETECT_WEBSOCKET_PLAIN)]
|
||||
|
||||
|
||||
async def test_autodetect_websocket_ssl(hass, remote, remotews):
|
||||
"""Test for send key with autodetection of protocol."""
|
||||
with patch(
|
||||
"homeassistant.components.samsungtv.bridge.Remote", side_effect=OSError("Boom"),
|
||||
), patch(
|
||||
"homeassistant.components.samsungtv.bridge.SamsungTVWS",
|
||||
side_effect=[WebSocketProtocolException("Boom"), mock.DEFAULT],
|
||||
) as remotews:
|
||||
enter = Mock()
|
||||
type(enter).token = PropertyMock(return_value="123456789")
|
||||
remote = Mock()
|
||||
remote.__enter__ = Mock(return_value=enter)
|
||||
remote.__exit__ = Mock(return_value=False)
|
||||
remotews.return_value = remote
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": "user"}, data=MOCK_USER_DATA
|
||||
)
|
||||
assert result["type"] == "create_entry"
|
||||
assert result["data"][CONF_METHOD] == "websocket"
|
||||
assert result["data"][CONF_TOKEN] == "123456789"
|
||||
assert remotews.call_count == 2
|
||||
assert remotews.call_args_list == [
|
||||
call(
|
||||
host="fake_host",
|
||||
name="HomeAssistant",
|
||||
port=8001,
|
||||
timeout=31,
|
||||
token=None,
|
||||
)
|
||||
call(**AUTODETECT_WEBSOCKET_PLAIN),
|
||||
call(**AUTODETECT_WEBSOCKET_SSL),
|
||||
]
|
||||
|
||||
|
||||
@@ -524,18 +566,6 @@ async def test_autodetect_none(hass, remote, remotews):
|
||||
]
|
||||
assert remotews.call_count == 2
|
||||
assert remotews.call_args_list == [
|
||||
call(
|
||||
host="fake_host",
|
||||
name="HomeAssistant",
|
||||
port=8001,
|
||||
timeout=31,
|
||||
token=None,
|
||||
),
|
||||
call(
|
||||
host="fake_host",
|
||||
name="HomeAssistant",
|
||||
port=8002,
|
||||
timeout=31,
|
||||
token=None,
|
||||
),
|
||||
call(**AUTODETECT_WEBSOCKET_PLAIN),
|
||||
call(**AUTODETECT_WEBSOCKET_SSL),
|
||||
]
|
||||
|
@@ -34,8 +34,11 @@ from homeassistant.const import (
|
||||
ATTR_FRIENDLY_NAME,
|
||||
ATTR_SUPPORTED_FEATURES,
|
||||
CONF_HOST,
|
||||
CONF_IP_ADDRESS,
|
||||
CONF_METHOD,
|
||||
CONF_NAME,
|
||||
CONF_PORT,
|
||||
CONF_TOKEN,
|
||||
SERVICE_MEDIA_NEXT_TRACK,
|
||||
SERVICE_MEDIA_PAUSE,
|
||||
SERVICE_MEDIA_PLAY,
|
||||
@@ -51,7 +54,7 @@ from homeassistant.const import (
|
||||
from homeassistant.setup import async_setup_component
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from tests.common import async_fire_time_changed
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||
|
||||
ENTITY_ID = f"{DOMAIN}.fake"
|
||||
MOCK_CONFIG = {
|
||||
@@ -64,17 +67,40 @@ MOCK_CONFIG = {
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
MOCK_CONFIGWS = {
|
||||
SAMSUNGTV_DOMAIN: [
|
||||
{
|
||||
CONF_HOST: "fake",
|
||||
CONF_NAME: "fake",
|
||||
CONF_PORT: 8001,
|
||||
CONF_TOKEN: "123456789",
|
||||
CONF_ON_ACTION: [{"delay": "00:00:01"}],
|
||||
}
|
||||
]
|
||||
}
|
||||
MOCK_CALLS_WS = {
|
||||
"host": "fake",
|
||||
"port": 8001,
|
||||
"token": None,
|
||||
"timeout": 31,
|
||||
"name": "HomeAssistant",
|
||||
}
|
||||
|
||||
MOCK_ENTRY_WS = {
|
||||
CONF_IP_ADDRESS: "test",
|
||||
CONF_HOST: "fake",
|
||||
CONF_METHOD: "websocket",
|
||||
CONF_NAME: "fake",
|
||||
CONF_PORT: 8001,
|
||||
CONF_TOKEN: "abcde",
|
||||
}
|
||||
MOCK_CALLS_ENTRY_WS = {
|
||||
"host": "fake",
|
||||
"name": "HomeAssistant",
|
||||
"port": 8001,
|
||||
"timeout": 1,
|
||||
"token": "abcde",
|
||||
}
|
||||
|
||||
ENTITY_ID_NOTURNON = f"{DOMAIN}.fake_noturnon"
|
||||
MOCK_CONFIG_NOTURNON = {
|
||||
@@ -155,6 +181,52 @@ async def test_setup_without_turnon(hass, remote):
|
||||
assert hass.states.get(ENTITY_ID_NOTURNON)
|
||||
|
||||
|
||||
async def test_setup_websocket(hass, remotews, mock_now):
|
||||
"""Test setup of platform."""
|
||||
with patch("homeassistant.components.samsungtv.bridge.SamsungTVWS") as remote_class:
|
||||
enter = mock.Mock()
|
||||
type(enter).token = mock.PropertyMock(return_value="987654321")
|
||||
remote = mock.Mock()
|
||||
remote.__enter__ = mock.Mock(return_value=enter)
|
||||
remote.__exit__ = mock.Mock()
|
||||
remote_class.return_value = remote
|
||||
|
||||
await setup_samsungtv(hass, MOCK_CONFIGWS)
|
||||
|
||||
assert remote_class.call_count == 1
|
||||
assert remote_class.call_args_list == [call(**MOCK_CALLS_WS)]
|
||||
assert hass.states.get(ENTITY_ID)
|
||||
|
||||
|
||||
async def test_setup_websocket_2(hass, mock_now):
|
||||
"""Test setup of platform from config entry."""
|
||||
entity_id = f"{DOMAIN}.fake"
|
||||
|
||||
entry = MockConfigEntry(
|
||||
domain=SAMSUNGTV_DOMAIN, data=MOCK_ENTRY_WS, unique_id=entity_id,
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
config_entries = hass.config_entries.async_entries(SAMSUNGTV_DOMAIN)
|
||||
assert len(config_entries) == 1
|
||||
assert entry is config_entries[0]
|
||||
|
||||
assert await async_setup_component(hass, SAMSUNGTV_DOMAIN, {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
next_update = mock_now + timedelta(minutes=5)
|
||||
with patch(
|
||||
"homeassistant.components.samsungtv.bridge.SamsungTVWS"
|
||||
) as remote, patch("homeassistant.util.dt.utcnow", return_value=next_update):
|
||||
async_fire_time_changed(hass, next_update)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
assert remote.call_count == 1
|
||||
assert remote.call_args_list == [call(**MOCK_CALLS_ENTRY_WS)]
|
||||
|
||||
|
||||
async def test_update_on(hass, remote, mock_now):
|
||||
"""Testing update tv on."""
|
||||
await setup_samsungtv(hass, MOCK_CONFIG)
|
||||
|
@@ -123,7 +123,7 @@ async def test_tracked_devices(hass):
|
||||
devices_response=[DEVICE_1, DEVICE_2],
|
||||
known_wireless_clients=(CLIENT_4["mac"],),
|
||||
)
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 5
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 6
|
||||
|
||||
client_1 = hass.states.get("device_tracker.client_1")
|
||||
assert client_1 is not None
|
||||
@@ -349,7 +349,7 @@ async def test_option_ssid_filter(hass):
|
||||
controller = await setup_unifi_integration(
|
||||
hass, options={CONF_SSID_FILTER: ["ssid"]}, clients_response=[CLIENT_3],
|
||||
)
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 0
|
||||
assert len(hass.states.async_entity_ids("device_tracker")) == 1
|
||||
|
||||
# SSID filter active
|
||||
client_3 = hass.states.get("device_tracker.client_3")
|
||||
|
@@ -1743,3 +1743,15 @@ async def test_if_running_parallel(hass):
|
||||
assert len(events) == 4
|
||||
assert events[2].data["value"] == 2
|
||||
assert events[3].data["value"] == 2
|
||||
|
||||
|
||||
async def test_script_logging(caplog):
|
||||
"""Test script logging."""
|
||||
script_obj = script.Script(None, [], "Script with % Name")
|
||||
script_obj._log("Test message with name %s", 1)
|
||||
|
||||
assert "Script with % Name: Test message with name 1" in caplog.text
|
||||
|
||||
script_obj = script.Script(None, [])
|
||||
script_obj._log("Test message without name %s", 2)
|
||||
assert "Test message without name 2" in caplog.text
|
||||
|
@@ -10,6 +10,7 @@ from unittest.mock import Mock
|
||||
import asynctest
|
||||
from asynctest import CoroutineMock, patch
|
||||
import pytest
|
||||
import voluptuous as vol
|
||||
from voluptuous import Invalid, MultipleInvalid
|
||||
import yaml
|
||||
|
||||
@@ -721,7 +722,7 @@ async def test_merge_id_schema(hass):
|
||||
for domain, expected_type in types.items():
|
||||
integration = await async_get_integration(hass, domain)
|
||||
module = integration.get_component()
|
||||
typ, _ = config_util._identify_config_schema(module)
|
||||
typ = config_util._identify_config_schema(module)
|
||||
assert typ == expected_type, f"{domain} expected {expected_type}, got {typ}"
|
||||
|
||||
|
||||
@@ -989,3 +990,35 @@ async def test_component_config_exceptions(hass, caplog):
|
||||
"Unknown error validating config for test_platform platform for test_domain component with PLATFORM_SCHEMA"
|
||||
in caplog.text
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"domain, schema, expected",
|
||||
[
|
||||
("zone", vol.Schema({vol.Optional("zone", default=list): [int]}), "list"),
|
||||
("zone", vol.Schema({vol.Optional("zone", default=[]): [int]}), "list"),
|
||||
(
|
||||
"zone",
|
||||
vol.Schema({vol.Optional("zone", default={}): {vol.Optional("hello"): 1}}),
|
||||
"dict",
|
||||
),
|
||||
(
|
||||
"zone",
|
||||
vol.Schema(
|
||||
{vol.Optional("zone", default=dict): {vol.Optional("hello"): 1}}
|
||||
),
|
||||
"dict",
|
||||
),
|
||||
("zone", vol.Schema({vol.Optional("zone"): int}), None),
|
||||
("zone", vol.Schema({"zone": int}), None),
|
||||
("not_existing", vol.Schema({vol.Optional("zone", default=dict): dict}), None,),
|
||||
("non_existing", vol.Schema({"zone": int}), None),
|
||||
("zone", vol.Schema({}), None),
|
||||
],
|
||||
)
|
||||
def test_identify_config_schema(domain, schema, expected):
|
||||
"""Test identify config schema."""
|
||||
assert (
|
||||
config_util._identify_config_schema(Mock(DOMAIN=domain, CONFIG_SCHEMA=schema))
|
||||
== expected
|
||||
)
|
||||
|
Reference in New Issue
Block a user