mirror of
https://github.com/home-assistant/core.git
synced 2025-09-22 03:19:33 +00:00
Compare commits
35 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
07ce284acd | ||
![]() |
eac03ef4be | ||
![]() |
a38e075bda | ||
![]() |
cced74740f | ||
![]() |
1236c2b91e | ||
![]() |
d54d7c6958 | ||
![]() |
9365ba3fcf | ||
![]() |
baf7fb7264 | ||
![]() |
edfe8e1583 | ||
![]() |
b36b1dbc70 | ||
![]() |
253c848692 | ||
![]() |
7a6ac578b4 | ||
![]() |
95de94e53f | ||
![]() |
181b2803cd | ||
![]() |
e0f2fa33df | ||
![]() |
884c346bdf | ||
![]() |
c218ff5a75 | ||
![]() |
fa43a218d2 | ||
![]() |
f4cc64d289 | ||
![]() |
4c31829832 | ||
![]() |
ff32c1c3e9 | ||
![]() |
677c276b41 | ||
![]() |
ca1c696f54 | ||
![]() |
78e5878247 | ||
![]() |
470537bc5f | ||
![]() |
64556f6f69 | ||
![]() |
2785b067e3 | ||
![]() |
a129bc05ae | ||
![]() |
197736f66b | ||
![]() |
d82d7fa2e9 | ||
![]() |
663db747e9 | ||
![]() |
57998f6f0f | ||
![]() |
edbb995fff | ||
![]() |
312903025d | ||
![]() |
0ae5c325fe |
@@ -2,9 +2,15 @@
|
||||
.git
|
||||
.github
|
||||
config
|
||||
docs
|
||||
|
||||
# Development
|
||||
.devcontainer
|
||||
.vscode
|
||||
|
||||
# Test related files
|
||||
.tox
|
||||
tests
|
||||
|
||||
# Other virtualization methods
|
||||
venv
|
||||
|
17
Dockerfile
Normal file
17
Dockerfile
Normal file
@@ -0,0 +1,17 @@
|
||||
ARG BUILD_FROM
|
||||
FROM ${BUILD_FROM}
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
## Setup Home Assistant
|
||||
COPY . homeassistant/
|
||||
RUN pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links "${WHEELS_LINKS}" \
|
||||
-r homeassistant/requirements_all.txt -c homeassistant/homeassistant/package_constraints.txt \
|
||||
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links "${WHEELS_LINKS}" \
|
||||
-e ./homeassistant \
|
||||
&& python3 -m compileall homeassistant/homeassistant
|
||||
|
||||
# Home Assistant S6-Overlay
|
||||
COPY rootfs /
|
||||
|
||||
WORKDIR /config
|
@@ -14,7 +14,7 @@ schedules:
|
||||
always: true
|
||||
variables:
|
||||
- name: versionBuilder
|
||||
value: '6.9'
|
||||
value: '7.2.0'
|
||||
- group: docker
|
||||
- group: github
|
||||
- group: twine
|
||||
@@ -108,11 +108,9 @@ stages:
|
||||
docker run --rm --privileged \
|
||||
-v ~/.docker:/root/.docker:rw \
|
||||
-v /run/docker.sock:/run/docker.sock:rw \
|
||||
-v $(pwd):/homeassistant:ro \
|
||||
-v $(pwd):/data:ro \
|
||||
homeassistant/amd64-builder:$(versionBuilder) \
|
||||
--homeassistant $(homeassistantRelease) "--$(buildArch)" \
|
||||
-r https://github.com/home-assistant/hassio-homeassistant \
|
||||
-t generic --docker-hub homeassistant
|
||||
--generic $(homeassistantRelease) "--$(buildArch)" -t /data \
|
||||
|
||||
docker run --rm --privileged \
|
||||
-v ~/.docker:/root/.docker \
|
||||
|
@@ -5,6 +5,7 @@ trigger:
|
||||
branches:
|
||||
include:
|
||||
- dev
|
||||
- rc
|
||||
paths:
|
||||
include:
|
||||
- requirements_all.txt
|
||||
@@ -18,7 +19,7 @@ schedules:
|
||||
always: true
|
||||
variables:
|
||||
- name: versionWheels
|
||||
value: '1.4-3.7-alpine3.10'
|
||||
value: '1.10.1-3.7-alpine3.11'
|
||||
resources:
|
||||
repositories:
|
||||
- repository: azure
|
||||
@@ -32,8 +33,10 @@ jobs:
|
||||
builderVersion: '$(versionWheels)'
|
||||
builderApk: 'build-base;cmake;git;linux-headers;bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;autoconf;automake;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev'
|
||||
builderPip: 'Cython;numpy'
|
||||
skipBinary: 'aiohttp'
|
||||
wheelsRequirement: 'requirements_wheels.txt'
|
||||
wheelsRequirementDiff: 'requirements_diff.txt'
|
||||
wheelsConstraint: 'homeassistant/package_constraints.txt'
|
||||
preBuild:
|
||||
- script: |
|
||||
cp requirements_all.txt requirements_wheels.txt
|
||||
@@ -69,9 +72,5 @@ jobs:
|
||||
sed -i "s|# py_noaa|py_noaa|g" ${requirement_file}
|
||||
sed -i "s|# bme680|bme680|g" ${requirement_file}
|
||||
sed -i "s|# python-gammu|python-gammu|g" ${requirement_file}
|
||||
|
||||
if [[ "$(buildArch)" =~ arm ]]; then
|
||||
sed -i "s|# VL53L1X|VL53L1X|g" ${requirement_file}
|
||||
fi
|
||||
done
|
||||
displayName: 'Prepare requirements files for Hass.io'
|
||||
|
14
build.json
Normal file
14
build.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"image": "homeassistant/{arch}-homeassistant",
|
||||
"build_from": {
|
||||
"aarch64": "homeassistant/aarch64-homeassistant-base:7.0.1",
|
||||
"armhf": "homeassistant/armhf-homeassistant-base:7.0.1",
|
||||
"armv7": "homeassistant/armv7-homeassistant-base:7.0.1",
|
||||
"amd64": "homeassistant/amd64-homeassistant-base:7.0.1",
|
||||
"i386": "homeassistant/i386-homeassistant-base:7.0.1"
|
||||
},
|
||||
"labels": {
|
||||
"io.hass.type": "core"
|
||||
},
|
||||
"version_tag": true
|
||||
}
|
@@ -73,8 +73,8 @@ async def async_setup(hass, config):
|
||||
conf.get("ssh_key", conf.get("pub_key", "")),
|
||||
conf[CONF_MODE],
|
||||
conf[CONF_REQUIRE_IP],
|
||||
conf[CONF_INTERFACE],
|
||||
conf[CONF_DNSMASQ],
|
||||
interface=conf[CONF_INTERFACE],
|
||||
dnsmasq=conf[CONF_DNSMASQ],
|
||||
)
|
||||
|
||||
await api.connection.async_connect()
|
||||
|
@@ -2,7 +2,7 @@
|
||||
"domain": "asuswrt",
|
||||
"name": "ASUSWRT",
|
||||
"documentation": "https://www.home-assistant.io/integrations/asuswrt",
|
||||
"requirements": ["aioasuswrt==1.2.2"],
|
||||
"requirements": ["aioasuswrt==1.2.3"],
|
||||
"dependencies": [],
|
||||
"codeowners": ["@kennedyshead"]
|
||||
}
|
||||
|
@@ -5,7 +5,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/huawei_lte",
|
||||
"requirements": [
|
||||
"getmac==0.8.1",
|
||||
"huawei-lte-api==1.4.10",
|
||||
"huawei-lte-api==1.4.11",
|
||||
"stringcase==1.2.0",
|
||||
"url-normalize==1.4.1"
|
||||
],
|
||||
|
@@ -104,7 +104,7 @@ async def async_get_api(hass):
|
||||
|
||||
async def async_get_location(hass, api, latitude, longitude):
|
||||
"""Retrieve pyipma location, location name to be used as the entity name."""
|
||||
with async_timeout.timeout(10):
|
||||
with async_timeout.timeout(30):
|
||||
location = await Location.get(api, float(latitude), float(longitude))
|
||||
|
||||
_LOGGER.debug(
|
||||
|
@@ -505,4 +505,6 @@ class ONVIFHassCamera(Camera):
|
||||
@property
|
||||
def unique_id(self) -> Optional[str]:
|
||||
"""Return a unique ID."""
|
||||
if self._profile_index:
|
||||
return f"{self._mac}_{self._profile_index}"
|
||||
return self._mac
|
||||
|
@@ -75,8 +75,9 @@ async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry):
|
||||
|
||||
def token_saver(token):
|
||||
_LOGGER.debug("Saving updated token")
|
||||
entry.data[CONF_TOKEN] = token
|
||||
hass.config_entries.async_update_entry(entry, data={**entry.data})
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, data={**entry.data, CONF_TOKEN: token}
|
||||
)
|
||||
|
||||
# Force token update.
|
||||
entry.data[CONF_TOKEN]["expires_in"] = -1
|
||||
@@ -105,12 +106,18 @@ async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry):
|
||||
async def async_setup_webhook(hass: HomeAssistantType, entry: ConfigEntry, session):
|
||||
"""Set up a webhook to handle binary sensor events."""
|
||||
if CONF_WEBHOOK_ID not in entry.data:
|
||||
entry.data[CONF_WEBHOOK_ID] = hass.components.webhook.async_generate_id()
|
||||
entry.data[CONF_WEBHOOK_URL] = hass.components.webhook.async_generate_url(
|
||||
entry.data[CONF_WEBHOOK_ID]
|
||||
webhook_id = hass.components.webhook.async_generate_id()
|
||||
webhook_url = hass.components.webhook.async_generate_url(webhook_id)
|
||||
_LOGGER.info("Registering new webhook at: %s", webhook_url)
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data={
|
||||
**entry.data,
|
||||
CONF_WEBHOOK_ID: webhook_id,
|
||||
CONF_WEBHOOK_URL: webhook_url,
|
||||
},
|
||||
)
|
||||
_LOGGER.info("Registering new webhook at: %s", entry.data[CONF_WEBHOOK_URL])
|
||||
hass.config_entries.async_update_entry(entry, data={**entry.data})
|
||||
await hass.async_add_executor_job(
|
||||
session.update_webhook,
|
||||
entry.data[CONF_WEBHOOK_URL],
|
||||
|
@@ -342,7 +342,6 @@ class Recorder(threading.Thread):
|
||||
# has changed. This reduces the disk io.
|
||||
while True:
|
||||
event = self.queue.get()
|
||||
|
||||
if event is None:
|
||||
self._close_run()
|
||||
self._close_connection()
|
||||
@@ -356,7 +355,7 @@ class Recorder(threading.Thread):
|
||||
self.queue.task_done()
|
||||
if self.commit_interval:
|
||||
self._timechanges_seen += 1
|
||||
if self.commit_interval >= self._timechanges_seen:
|
||||
if self._timechanges_seen >= self.commit_interval:
|
||||
self._timechanges_seen = 0
|
||||
self._commit_event_session_or_retry()
|
||||
continue
|
||||
@@ -376,6 +375,9 @@ class Recorder(threading.Thread):
|
||||
self.event_session.flush()
|
||||
except (TypeError, ValueError):
|
||||
_LOGGER.warning("Event is not JSON serializable: %s", event)
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
# Must catch the exception to prevent the loop from collapsing
|
||||
_LOGGER.exception("Error adding event: %s", err)
|
||||
|
||||
if dbevent and event.event_type == EVENT_STATE_CHANGED:
|
||||
try:
|
||||
@@ -387,6 +389,9 @@ class Recorder(threading.Thread):
|
||||
"State is not JSON serializable: %s",
|
||||
event.data.get("new_state"),
|
||||
)
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
# Must catch the exception to prevent the loop from collapsing
|
||||
_LOGGER.exception("Error adding state change: %s", err)
|
||||
|
||||
# If they do not have a commit interval
|
||||
# than we commit right away
|
||||
@@ -404,17 +409,26 @@ class Recorder(threading.Thread):
|
||||
try:
|
||||
self._commit_event_session()
|
||||
return
|
||||
|
||||
except exc.OperationalError as err:
|
||||
_LOGGER.error(
|
||||
"Error in database connectivity: %s. " "(retrying in %s seconds)",
|
||||
err,
|
||||
self.db_retry_wait,
|
||||
)
|
||||
except (exc.InternalError, exc.OperationalError) as err:
|
||||
if err.connection_invalidated:
|
||||
_LOGGER.error(
|
||||
"Database connection invalidated: %s. "
|
||||
"(retrying in %s seconds)",
|
||||
err,
|
||||
self.db_retry_wait,
|
||||
)
|
||||
else:
|
||||
_LOGGER.error(
|
||||
"Error in database connectivity: %s. "
|
||||
"(retrying in %s seconds)",
|
||||
err,
|
||||
self.db_retry_wait,
|
||||
)
|
||||
tries += 1
|
||||
|
||||
except exc.SQLAlchemyError:
|
||||
_LOGGER.exception("Error saving events")
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
# Must catch the exception to prevent the loop from collapsing
|
||||
_LOGGER.exception("Error saving events: %s", err)
|
||||
return
|
||||
|
||||
_LOGGER.error(
|
||||
@@ -423,10 +437,15 @@ class Recorder(threading.Thread):
|
||||
)
|
||||
try:
|
||||
self.event_session.close()
|
||||
except exc.SQLAlchemyError:
|
||||
_LOGGER.exception("Failed to close event session.")
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
# Must catch the exception to prevent the loop from collapsing
|
||||
_LOGGER.exception("Error while closing event session: %s", err)
|
||||
|
||||
self.event_session = self.get_session()
|
||||
try:
|
||||
self.event_session = self.get_session()
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
# Must catch the exception to prevent the loop from collapsing
|
||||
_LOGGER.exception("Error while creating new event session: %s", err)
|
||||
|
||||
def _commit_event_session(self):
|
||||
try:
|
||||
|
@@ -206,6 +206,7 @@ class SamsungTVWSBridge(SamsungTVBridge):
|
||||
CONF_TIMEOUT: 31,
|
||||
}
|
||||
|
||||
result = None
|
||||
try:
|
||||
LOGGER.debug("Try config: %s", config)
|
||||
with SamsungTVWS(
|
||||
@@ -223,9 +224,13 @@ class SamsungTVWSBridge(SamsungTVBridge):
|
||||
return RESULT_SUCCESS
|
||||
except WebSocketException:
|
||||
LOGGER.debug("Working but unsupported config: %s", config)
|
||||
return RESULT_NOT_SUPPORTED
|
||||
result = RESULT_NOT_SUPPORTED
|
||||
except (OSError, ConnectionFailure) as err:
|
||||
LOGGER.debug("Failing config: %s, error: %s", config, err)
|
||||
# pylint: disable=useless-else-on-loop
|
||||
else:
|
||||
if result:
|
||||
return result
|
||||
|
||||
return RESULT_NOT_SUCCESSFUL
|
||||
|
||||
|
@@ -3,7 +3,7 @@
|
||||
"name": "SimpliSafe",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/simplisafe",
|
||||
"requirements": ["simplisafe-python==9.0.3"],
|
||||
"requirements": ["simplisafe-python==9.0.4"],
|
||||
"dependencies": [],
|
||||
"codeowners": ["@bachya"]
|
||||
}
|
||||
|
@@ -1,6 +1,7 @@
|
||||
"""Ask tankerkoenig.de for petrol price information."""
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from math import ceil
|
||||
|
||||
import pytankerkoenig
|
||||
import voluptuous as vol
|
||||
@@ -164,27 +165,41 @@ class TankerkoenigData:
|
||||
)
|
||||
return False
|
||||
self.add_station(additional_station_data["station"])
|
||||
if len(self.stations) > 10:
|
||||
_LOGGER.warning(
|
||||
"Found more than 10 stations to check. "
|
||||
"This might invalidate your api-key on the long run. "
|
||||
"Try using a smaller radius"
|
||||
)
|
||||
return True
|
||||
|
||||
async def fetch_data(self):
|
||||
"""Get the latest data from tankerkoenig.de."""
|
||||
_LOGGER.debug("Fetching new data from tankerkoenig.de")
|
||||
station_ids = list(self.stations)
|
||||
data = await self._hass.async_add_executor_job(
|
||||
pytankerkoenig.getPriceList, self._api_key, station_ids
|
||||
)
|
||||
|
||||
if data["ok"]:
|
||||
prices = {}
|
||||
|
||||
# The API seems to only return at most 10 results, so split the list in chunks of 10
|
||||
# and merge it together.
|
||||
for index in range(ceil(len(station_ids) / 10)):
|
||||
data = await self._hass.async_add_executor_job(
|
||||
pytankerkoenig.getPriceList,
|
||||
self._api_key,
|
||||
station_ids[index * 10 : (index + 1) * 10],
|
||||
)
|
||||
|
||||
_LOGGER.debug("Received data: %s", data)
|
||||
if not data["ok"]:
|
||||
_LOGGER.error(
|
||||
"Error fetching data from tankerkoenig.de: %s", data["message"]
|
||||
)
|
||||
raise TankerkoenigError(data["message"])
|
||||
if "prices" not in data:
|
||||
_LOGGER.error("Did not receive price information from tankerkoenig.de")
|
||||
raise TankerkoenigError("No prices in data")
|
||||
else:
|
||||
_LOGGER.error(
|
||||
"Error fetching data from tankerkoenig.de: %s", data["message"]
|
||||
)
|
||||
raise TankerkoenigError(data["message"])
|
||||
return data["prices"]
|
||||
prices.update(data["prices"])
|
||||
return prices
|
||||
|
||||
def add_station(self, station: dict):
|
||||
"""Add fuel station to the entity list."""
|
||||
|
@@ -18,7 +18,7 @@ from homeassistant.const import (
|
||||
STATE_ALARM_TRIGGERED,
|
||||
)
|
||||
|
||||
from . import DOMAIN as TOTALCONNECT_DOMAIN
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -30,7 +30,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
|
||||
alarms = []
|
||||
|
||||
client = hass.data[TOTALCONNECT_DOMAIN].client
|
||||
client = hass.data[DOMAIN].client
|
||||
|
||||
for location_id, location in client.locations.items():
|
||||
location_name = location.location_name
|
||||
@@ -71,7 +71,7 @@ class TotalConnectAlarm(alarm.AlarmControlPanel):
|
||||
|
||||
def update(self):
|
||||
"""Return the state of the device."""
|
||||
status = self._client.get_armed_status(self._location_id)
|
||||
self._client.get_armed_status(self._location_id)
|
||||
attr = {
|
||||
"location_name": self._name,
|
||||
"location_id": self._location_id,
|
||||
@@ -79,47 +79,36 @@ class TotalConnectAlarm(alarm.AlarmControlPanel):
|
||||
"low_battery": self._client.locations[self._location_id].low_battery,
|
||||
"cover_tampered": self._client.locations[
|
||||
self._location_id
|
||||
].is_cover_tampered,
|
||||
].is_cover_tampered(),
|
||||
"triggered_source": None,
|
||||
"triggered_zone": None,
|
||||
}
|
||||
|
||||
if status in (self._client.DISARMED, self._client.DISARMED_BYPASS):
|
||||
if self._client.locations[self._location_id].is_disarmed():
|
||||
state = STATE_ALARM_DISARMED
|
||||
elif status in (
|
||||
self._client.ARMED_STAY,
|
||||
self._client.ARMED_STAY_INSTANT,
|
||||
self._client.ARMED_STAY_INSTANT_BYPASS,
|
||||
):
|
||||
elif self._client.locations[self._location_id].is_armed_home():
|
||||
state = STATE_ALARM_ARMED_HOME
|
||||
elif status == self._client.ARMED_STAY_NIGHT:
|
||||
elif self._client.locations[self._location_id].is_armed_night():
|
||||
state = STATE_ALARM_ARMED_NIGHT
|
||||
elif status in (
|
||||
self._client.ARMED_AWAY,
|
||||
self._client.ARMED_AWAY_BYPASS,
|
||||
self._client.ARMED_AWAY_INSTANT,
|
||||
self._client.ARMED_AWAY_INSTANT_BYPASS,
|
||||
):
|
||||
elif self._client.locations[self._location_id].is_armed_away():
|
||||
state = STATE_ALARM_ARMED_AWAY
|
||||
elif status == self._client.ARMED_CUSTOM_BYPASS:
|
||||
elif self._client.locations[self._location_id].is_armed_custom_bypass():
|
||||
state = STATE_ALARM_ARMED_CUSTOM_BYPASS
|
||||
elif status == self._client.ARMING:
|
||||
elif self._client.locations[self._location_id].is_arming():
|
||||
state = STATE_ALARM_ARMING
|
||||
elif status == self._client.DISARMING:
|
||||
elif self._client.locations[self._location_id].is_disarming():
|
||||
state = STATE_ALARM_DISARMING
|
||||
elif status == self._client.ALARMING:
|
||||
elif self._client.locations[self._location_id].is_triggered_police():
|
||||
state = STATE_ALARM_TRIGGERED
|
||||
attr["triggered_source"] = "Police/Medical"
|
||||
elif status == self._client.ALARMING_FIRE_SMOKE:
|
||||
elif self._client.locations[self._location_id].is_triggered_fire():
|
||||
state = STATE_ALARM_TRIGGERED
|
||||
attr["triggered_source"] = "Fire/Smoke"
|
||||
elif status == self._client.ALARMING_CARBON_MONOXIDE:
|
||||
elif self._client.locations[self._location_id].is_triggered_gas():
|
||||
state = STATE_ALARM_TRIGGERED
|
||||
attr["triggered_source"] = "Carbon Monoxide"
|
||||
else:
|
||||
logging.info(
|
||||
"Total Connect Client returned unknown status code: %s", status
|
||||
)
|
||||
logging.info("Total Connect Client returned unknown status")
|
||||
state = None
|
||||
|
||||
self._state = state
|
||||
|
3
homeassistant/components/totalconnect/const.py
Normal file
3
homeassistant/components/totalconnect/const.py
Normal file
@@ -0,0 +1,3 @@
|
||||
"""TotalConnect constants."""
|
||||
|
||||
DOMAIN = "totalconnect"
|
@@ -83,6 +83,8 @@ async def async_setup_entry(hass, config_entry):
|
||||
controller_id = get_controller_id_from_config_entry(config_entry)
|
||||
hass.data[DOMAIN][controller_id] = controller
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, controller.shutdown)
|
||||
|
||||
if controller.mac is None:
|
||||
return True
|
||||
|
||||
@@ -96,8 +98,6 @@ async def async_setup_entry(hass, config_entry):
|
||||
# sw_version=config.raw['swversion'],
|
||||
)
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, controller.shutdown)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
@@ -64,7 +64,7 @@ class VelbusLight(VelbusEntity, Light):
|
||||
@property
|
||||
def brightness(self):
|
||||
"""Return the brightness of the light."""
|
||||
return self._module.get_dimmer_state(self._channel)
|
||||
return int((self._module.get_dimmer_state(self._channel) * 255) / 100)
|
||||
|
||||
def turn_on(self, **kwargs):
|
||||
"""Instruct the Velbus light to turn on."""
|
||||
@@ -80,10 +80,15 @@ class VelbusLight(VelbusEntity, Light):
|
||||
attr, *args = "set_led_state", self._channel, "on"
|
||||
else:
|
||||
if ATTR_BRIGHTNESS in kwargs:
|
||||
# Make sure a low but non-zero value is not rounded down to zero
|
||||
if kwargs[ATTR_BRIGHTNESS] == 0:
|
||||
brightness = 0
|
||||
else:
|
||||
brightness = max(int((kwargs[ATTR_BRIGHTNESS] * 100) / 255), 1)
|
||||
attr, *args = (
|
||||
"set_dimmer_state",
|
||||
self._channel,
|
||||
kwargs[ATTR_BRIGHTNESS],
|
||||
brightness,
|
||||
kwargs.get(ATTR_TRANSITION, 0),
|
||||
)
|
||||
else:
|
||||
|
@@ -8,6 +8,8 @@ import voluptuous as vol
|
||||
from homeassistant import config_entries, const as ha_const
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.device_registry import CONNECTION_ZIGBEE
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.typing import HomeAssistantType
|
||||
|
||||
from . import api
|
||||
from .core import ZHAGateway
|
||||
@@ -27,6 +29,7 @@ from .core.const import (
|
||||
DEFAULT_BAUDRATE,
|
||||
DEFAULT_RADIO_TYPE,
|
||||
DOMAIN,
|
||||
SIGNAL_ADD_ENTITIES,
|
||||
RadioType,
|
||||
)
|
||||
|
||||
@@ -92,6 +95,9 @@ async def async_setup_entry(hass, config_entry):
|
||||
zha_data = hass.data.setdefault(DATA_ZHA, {})
|
||||
config = zha_data.get(DATA_ZHA_CONFIG, {})
|
||||
|
||||
for component in COMPONENTS:
|
||||
zha_data.setdefault(component, [])
|
||||
|
||||
if config.get(CONF_ENABLE_QUIRKS, True):
|
||||
# needs to be done here so that the ZHA module is finished loading
|
||||
# before zhaquirks is imported
|
||||
@@ -101,20 +107,10 @@ async def async_setup_entry(hass, config_entry):
|
||||
await zha_gateway.async_initialize()
|
||||
|
||||
zha_data[DATA_ZHA_DISPATCHERS] = []
|
||||
zha_data[DATA_ZHA_PLATFORM_LOADED] = asyncio.Event()
|
||||
platforms = []
|
||||
zha_data[DATA_ZHA_PLATFORM_LOADED] = []
|
||||
for component in COMPONENTS:
|
||||
platforms.append(
|
||||
hass.async_create_task(
|
||||
hass.config_entries.async_forward_entry_setup(config_entry, component)
|
||||
)
|
||||
)
|
||||
|
||||
async def _platforms_loaded():
|
||||
await asyncio.gather(*platforms)
|
||||
zha_data[DATA_ZHA_PLATFORM_LOADED].set()
|
||||
|
||||
hass.async_create_task(_platforms_loaded())
|
||||
coro = hass.config_entries.async_forward_entry_setup(config_entry, component)
|
||||
zha_data[DATA_ZHA_PLATFORM_LOADED].append(hass.async_create_task(coro))
|
||||
|
||||
device_registry = await hass.helpers.device_registry.async_get_registry()
|
||||
device_registry.async_get_or_create(
|
||||
@@ -134,7 +130,7 @@ async def async_setup_entry(hass, config_entry):
|
||||
await zha_data[DATA_ZHA_GATEWAY].async_update_device_storage()
|
||||
|
||||
hass.bus.async_listen_once(ha_const.EVENT_HOMEASSISTANT_STOP, async_zha_shutdown)
|
||||
hass.async_create_task(zha_gateway.async_load_devices())
|
||||
asyncio.create_task(async_load_entities(hass, config_entry))
|
||||
return True
|
||||
|
||||
|
||||
@@ -152,3 +148,16 @@ async def async_unload_entry(hass, config_entry):
|
||||
await hass.config_entries.async_forward_entry_unload(config_entry, component)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_load_entities(
|
||||
hass: HomeAssistantType, config_entry: config_entries.ConfigEntry
|
||||
) -> None:
|
||||
"""Load entities after integration was setup."""
|
||||
await hass.data[DATA_ZHA][DATA_ZHA_GATEWAY].async_prepare_entities()
|
||||
to_setup = hass.data[DATA_ZHA][DATA_ZHA_PLATFORM_LOADED]
|
||||
results = await asyncio.gather(*to_setup, return_exceptions=True)
|
||||
for res in results:
|
||||
if isinstance(res, Exception):
|
||||
_LOGGER.warning("Couldn't setup zha platform: %s", res)
|
||||
async_dispatcher_send(hass, SIGNAL_ADD_ENTITIES)
|
||||
|
@@ -49,7 +49,7 @@ STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, DOMAIN)
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Zigbee Home Automation binary sensor from config entry."""
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN] = []
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN]
|
||||
|
||||
unsub = async_dispatcher_connect(
|
||||
hass,
|
||||
|
@@ -8,6 +8,16 @@ from homeassistant.core import callback
|
||||
from homeassistant.helpers.typing import HomeAssistantType
|
||||
|
||||
from . import const as zha_const, registries as zha_regs, typing as zha_typing
|
||||
from .. import ( # noqa: F401 pylint: disable=unused-import,
|
||||
binary_sensor,
|
||||
cover,
|
||||
device_tracker,
|
||||
fan,
|
||||
light,
|
||||
lock,
|
||||
sensor,
|
||||
switch,
|
||||
)
|
||||
from .channels import base
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
@@ -36,7 +36,6 @@ from .const import (
|
||||
DATA_ZHA,
|
||||
DATA_ZHA_BRIDGE_ID,
|
||||
DATA_ZHA_GATEWAY,
|
||||
DATA_ZHA_PLATFORM_LOADED,
|
||||
DEBUG_COMP_BELLOWS,
|
||||
DEBUG_COMP_ZHA,
|
||||
DEBUG_COMP_ZIGPY,
|
||||
@@ -157,34 +156,40 @@ class ZHAGateway:
|
||||
self._hass.data[DATA_ZHA][DATA_ZHA_BRIDGE_ID] = str(
|
||||
self.application_controller.ieee
|
||||
)
|
||||
await self.async_load_devices()
|
||||
self._initialize_groups()
|
||||
|
||||
async def async_load_devices(self) -> None:
|
||||
"""Restore ZHA devices from zigpy application state."""
|
||||
await self._hass.data[DATA_ZHA][DATA_ZHA_PLATFORM_LOADED].wait()
|
||||
zigpy_devices = self.application_controller.devices.values()
|
||||
for zigpy_device in zigpy_devices:
|
||||
self._async_get_or_create_device(zigpy_device, restored=True)
|
||||
|
||||
async def async_prepare_entities(self) -> None:
|
||||
"""Prepare entities by initializing device channels."""
|
||||
semaphore = asyncio.Semaphore(2)
|
||||
|
||||
async def _throttle(device: zha_typing.ZigpyDeviceType):
|
||||
async def _throttle(zha_device: zha_typing.ZhaDeviceType, cached: bool):
|
||||
async with semaphore:
|
||||
await self.async_device_restored(device)
|
||||
await zha_device.async_initialize(from_cache=cached)
|
||||
|
||||
zigpy_devices = self.application_controller.devices.values()
|
||||
_LOGGER.debug("Loading battery powered devices")
|
||||
await asyncio.gather(
|
||||
*[
|
||||
_throttle(dev)
|
||||
for dev in zigpy_devices
|
||||
if not dev.node_desc.is_mains_powered
|
||||
_throttle(dev, cached=True)
|
||||
for dev in self.devices.values()
|
||||
if not dev.is_mains_powered
|
||||
]
|
||||
)
|
||||
async_dispatcher_send(self._hass, SIGNAL_ADD_ENTITIES)
|
||||
|
||||
_LOGGER.debug("Loading mains powered devices")
|
||||
await asyncio.gather(
|
||||
*[_throttle(dev) for dev in zigpy_devices if dev.node_desc.is_mains_powered]
|
||||
*[
|
||||
_throttle(dev, cached=False)
|
||||
for dev in self.devices.values()
|
||||
if dev.is_mains_powered
|
||||
]
|
||||
)
|
||||
async_dispatcher_send(self._hass, SIGNAL_ADD_ENTITIES)
|
||||
|
||||
def device_joined(self, device):
|
||||
"""Handle device joined.
|
||||
|
@@ -29,7 +29,7 @@ STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, DOMAIN)
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Zigbee Home Automation cover from config entry."""
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN] = []
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN]
|
||||
|
||||
unsub = async_dispatcher_connect(
|
||||
hass,
|
||||
|
@@ -26,7 +26,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Zigbee Home Automation device tracker from config entry."""
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN] = []
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN]
|
||||
|
||||
unsub = async_dispatcher_connect(
|
||||
hass,
|
||||
|
@@ -53,7 +53,7 @@ STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, DOMAIN)
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Zigbee Home Automation fan from config entry."""
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN] = []
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN]
|
||||
|
||||
unsub = async_dispatcher_connect(
|
||||
hass,
|
||||
|
@@ -52,7 +52,7 @@ _REFRESH_INTERVAL = (45, 75)
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Zigbee Home Automation light from config entry."""
|
||||
entities_to_create = hass.data[DATA_ZHA][light.DOMAIN] = []
|
||||
entities_to_create = hass.data[DATA_ZHA][light.DOMAIN]
|
||||
|
||||
unsub = async_dispatcher_connect(
|
||||
hass,
|
||||
|
@@ -36,7 +36,7 @@ VALUE_TO_STATE = dict(enumerate(STATE_LIST))
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Zigbee Home Automation Door Lock from config entry."""
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN] = []
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN]
|
||||
|
||||
unsub = async_dispatcher_connect(
|
||||
hass,
|
||||
|
@@ -68,7 +68,7 @@ STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, DOMAIN)
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Zigbee Home Automation sensor from config entry."""
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN] = []
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN]
|
||||
|
||||
unsub = async_dispatcher_connect(
|
||||
hass,
|
||||
|
@@ -26,7 +26,7 @@ STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, DOMAIN)
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up the Zigbee Home Automation switch from config entry."""
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN] = []
|
||||
entities_to_create = hass.data[DATA_ZHA][DOMAIN]
|
||||
|
||||
unsub = async_dispatcher_connect(
|
||||
hass,
|
||||
|
@@ -3,7 +3,7 @@
|
||||
"name": "Z-Wave",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/zwave",
|
||||
"requirements": ["homeassistant-pyozw==0.1.9", "pydispatcher==2.0.5"],
|
||||
"requirements": ["homeassistant-pyozw==0.1.10", "pydispatcher==2.0.5"],
|
||||
"dependencies": [],
|
||||
"codeowners": ["@home-assistant/z-wave"]
|
||||
}
|
||||
|
@@ -562,34 +562,36 @@ def _log_pkg_error(package: str, component: str, config: Dict, message: str) ->
|
||||
_LOGGER.error(message)
|
||||
|
||||
|
||||
def _identify_config_schema(module: ModuleType) -> Tuple[Optional[str], Optional[Dict]]:
|
||||
def _identify_config_schema(module: ModuleType) -> Optional[str]:
|
||||
"""Extract the schema and identify list or dict based."""
|
||||
try:
|
||||
key = next(k for k in module.CONFIG_SCHEMA.schema if k == module.DOMAIN) # type: ignore
|
||||
except (AttributeError, StopIteration):
|
||||
return None, None
|
||||
return None
|
||||
|
||||
schema = module.CONFIG_SCHEMA.schema[key] # type: ignore
|
||||
|
||||
if hasattr(key, "default") and not isinstance(
|
||||
key.default, vol.schema_builder.Undefined
|
||||
):
|
||||
default_value = schema(key.default())
|
||||
default_value = module.CONFIG_SCHEMA({module.DOMAIN: key.default()})[ # type: ignore
|
||||
module.DOMAIN # type: ignore
|
||||
]
|
||||
|
||||
if isinstance(default_value, dict):
|
||||
return "dict", schema
|
||||
return "dict"
|
||||
|
||||
if isinstance(default_value, list):
|
||||
return "list", schema
|
||||
return "list"
|
||||
|
||||
return None, None
|
||||
return None
|
||||
|
||||
t_schema = str(schema)
|
||||
if t_schema.startswith("{") or "schema_with_slug_keys" in t_schema:
|
||||
return ("dict", schema)
|
||||
return "dict"
|
||||
if t_schema.startswith(("[", "All(<function ensure_list")):
|
||||
return ("list", schema)
|
||||
return "", schema
|
||||
return "list"
|
||||
return None
|
||||
|
||||
|
||||
def _recursive_merge(conf: Dict[str, Any], package: Dict[str, Any]) -> Union[bool, str]:
|
||||
@@ -642,8 +644,7 @@ async def merge_packages_config(
|
||||
merge_list = hasattr(component, "PLATFORM_SCHEMA")
|
||||
|
||||
if not merge_list and hasattr(component, "CONFIG_SCHEMA"):
|
||||
merge_type, _ = _identify_config_schema(component)
|
||||
merge_list = merge_type == "list"
|
||||
merge_list = _identify_config_schema(component) == "list"
|
||||
|
||||
if merge_list:
|
||||
config[comp_name] = cv.remove_falsy(
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""Constants used by Home Assistant components."""
|
||||
MAJOR_VERSION = 0
|
||||
MINOR_VERSION = 107
|
||||
PATCH_VERSION = "3"
|
||||
PATCH_VERSION = "7"
|
||||
__short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__ = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER = (3, 7, 0)
|
||||
|
@@ -701,7 +701,9 @@ class Script:
|
||||
|
||||
def _log(self, msg, *args, level=logging.INFO):
|
||||
if self.name:
|
||||
msg = f"{self.name}: {msg}"
|
||||
msg = f"%s: {msg}"
|
||||
args = [self.name, *args]
|
||||
|
||||
if level == _LOG_EXCEPTION:
|
||||
self._logger.exception(msg, *args)
|
||||
else:
|
||||
|
@@ -139,7 +139,7 @@ aio_georss_gdacs==0.3
|
||||
aioambient==1.0.4
|
||||
|
||||
# homeassistant.components.asuswrt
|
||||
aioasuswrt==1.2.2
|
||||
aioasuswrt==1.2.3
|
||||
|
||||
# homeassistant.components.automatic
|
||||
aioautomatic==0.6.5
|
||||
@@ -699,7 +699,7 @@ holidays==0.10.1
|
||||
home-assistant-frontend==20200318.1
|
||||
|
||||
# homeassistant.components.zwave
|
||||
homeassistant-pyozw==0.1.9
|
||||
homeassistant-pyozw==0.1.10
|
||||
|
||||
# homeassistant.components.homematicip_cloud
|
||||
homematicip==0.10.17
|
||||
@@ -712,7 +712,7 @@ horimote==0.4.1
|
||||
httplib2==0.10.3
|
||||
|
||||
# homeassistant.components.huawei_lte
|
||||
huawei-lte-api==1.4.10
|
||||
huawei-lte-api==1.4.11
|
||||
|
||||
# homeassistant.components.hydrawise
|
||||
hydrawiser==0.1.1
|
||||
@@ -1856,7 +1856,7 @@ simplehound==0.3
|
||||
simplepush==1.1.4
|
||||
|
||||
# homeassistant.components.simplisafe
|
||||
simplisafe-python==9.0.3
|
||||
simplisafe-python==9.0.4
|
||||
|
||||
# homeassistant.components.sisyphus
|
||||
sisyphus-control==2.2.1
|
||||
|
@@ -50,7 +50,7 @@ aio_georss_gdacs==0.3
|
||||
aioambient==1.0.4
|
||||
|
||||
# homeassistant.components.asuswrt
|
||||
aioasuswrt==1.2.2
|
||||
aioasuswrt==1.2.3
|
||||
|
||||
# homeassistant.components.automatic
|
||||
aioautomatic==0.6.5
|
||||
@@ -266,7 +266,7 @@ holidays==0.10.1
|
||||
home-assistant-frontend==20200318.1
|
||||
|
||||
# homeassistant.components.zwave
|
||||
homeassistant-pyozw==0.1.9
|
||||
homeassistant-pyozw==0.1.10
|
||||
|
||||
# homeassistant.components.homematicip_cloud
|
||||
homematicip==0.10.17
|
||||
@@ -276,7 +276,7 @@ homematicip==0.10.17
|
||||
httplib2==0.10.3
|
||||
|
||||
# homeassistant.components.huawei_lte
|
||||
huawei-lte-api==1.4.10
|
||||
huawei-lte-api==1.4.11
|
||||
|
||||
# homeassistant.components.iaqualink
|
||||
iaqualink==0.3.1
|
||||
@@ -649,7 +649,7 @@ sentry-sdk==0.13.5
|
||||
simplehound==0.3
|
||||
|
||||
# homeassistant.components.simplisafe
|
||||
simplisafe-python==9.0.3
|
||||
simplisafe-python==9.0.4
|
||||
|
||||
# homeassistant.components.sleepiq
|
||||
sleepyq==0.7
|
||||
|
5
rootfs/etc/services.d/home-assistant/finish
Normal file
5
rootfs/etc/services.d/home-assistant/finish
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/usr/bin/execlineb -S0
|
||||
# ==============================================================================
|
||||
# Take down the S6 supervision tree when Home Assistant fails
|
||||
# ==============================================================================
|
||||
s6-svscanctl -t /var/run/s6/services
|
7
rootfs/etc/services.d/home-assistant/run
Normal file
7
rootfs/etc/services.d/home-assistant/run
Normal file
@@ -0,0 +1,7 @@
|
||||
#!/usr/bin/with-contenv bashio
|
||||
# ==============================================================================
|
||||
# Start Home Assistant service
|
||||
# ==============================================================================
|
||||
cd /config || bashio::exit.nok "Can't find config folder!"
|
||||
|
||||
exec python3 -m homeassistant --config /config
|
@@ -1,5 +1,5 @@
|
||||
"""Tests for Samsung TV config flow."""
|
||||
from unittest.mock import call, patch
|
||||
from unittest.mock import Mock, PropertyMock, call, patch
|
||||
|
||||
from asynctest import mock
|
||||
import pytest
|
||||
@@ -19,7 +19,7 @@ from homeassistant.components.ssdp import (
|
||||
ATTR_UPNP_MODEL_NAME,
|
||||
ATTR_UPNP_UDN,
|
||||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_ID, CONF_METHOD, CONF_NAME
|
||||
from homeassistant.const import CONF_HOST, CONF_ID, CONF_METHOD, CONF_NAME, CONF_TOKEN
|
||||
|
||||
MOCK_USER_DATA = {CONF_HOST: "fake_host", CONF_NAME: "fake_name"}
|
||||
MOCK_SSDP_DATA = {
|
||||
@@ -46,6 +46,20 @@ AUTODETECT_LEGACY = {
|
||||
"host": "fake_host",
|
||||
"timeout": 31,
|
||||
}
|
||||
AUTODETECT_WEBSOCKET_PLAIN = {
|
||||
"host": "fake_host",
|
||||
"name": "HomeAssistant",
|
||||
"port": 8001,
|
||||
"timeout": 31,
|
||||
"token": None,
|
||||
}
|
||||
AUTODETECT_WEBSOCKET_SSL = {
|
||||
"host": "fake_host",
|
||||
"name": "HomeAssistant",
|
||||
"port": 8002,
|
||||
"timeout": 31,
|
||||
"token": None,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(name="remote")
|
||||
@@ -446,20 +460,48 @@ async def test_autodetect_websocket(hass, remote, remotews):
|
||||
with patch(
|
||||
"homeassistant.components.samsungtv.bridge.Remote", side_effect=OSError("Boom"),
|
||||
), patch("homeassistant.components.samsungtv.bridge.SamsungTVWS") as remotews:
|
||||
enter = Mock()
|
||||
type(enter).token = PropertyMock(return_value="123456789")
|
||||
remote = Mock()
|
||||
remote.__enter__ = Mock(return_value=enter)
|
||||
remote.__exit__ = Mock(return_value=False)
|
||||
remotews.return_value = remote
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": "user"}, data=MOCK_USER_DATA
|
||||
)
|
||||
assert result["type"] == "create_entry"
|
||||
assert result["data"][CONF_METHOD] == "websocket"
|
||||
assert result["data"][CONF_TOKEN] == "123456789"
|
||||
assert remotews.call_count == 1
|
||||
assert remotews.call_args_list == [call(**AUTODETECT_WEBSOCKET_PLAIN)]
|
||||
|
||||
|
||||
async def test_autodetect_websocket_ssl(hass, remote, remotews):
|
||||
"""Test for send key with autodetection of protocol."""
|
||||
with patch(
|
||||
"homeassistant.components.samsungtv.bridge.Remote", side_effect=OSError("Boom"),
|
||||
), patch(
|
||||
"homeassistant.components.samsungtv.bridge.SamsungTVWS",
|
||||
side_effect=[WebSocketProtocolException("Boom"), mock.DEFAULT],
|
||||
) as remotews:
|
||||
enter = Mock()
|
||||
type(enter).token = PropertyMock(return_value="123456789")
|
||||
remote = Mock()
|
||||
remote.__enter__ = Mock(return_value=enter)
|
||||
remote.__exit__ = Mock(return_value=False)
|
||||
remotews.return_value = remote
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": "user"}, data=MOCK_USER_DATA
|
||||
)
|
||||
assert result["type"] == "create_entry"
|
||||
assert result["data"][CONF_METHOD] == "websocket"
|
||||
assert result["data"][CONF_TOKEN] == "123456789"
|
||||
assert remotews.call_count == 2
|
||||
assert remotews.call_args_list == [
|
||||
call(
|
||||
host="fake_host",
|
||||
name="HomeAssistant",
|
||||
port=8001,
|
||||
timeout=31,
|
||||
token=None,
|
||||
)
|
||||
call(**AUTODETECT_WEBSOCKET_PLAIN),
|
||||
call(**AUTODETECT_WEBSOCKET_SSL),
|
||||
]
|
||||
|
||||
|
||||
@@ -524,18 +566,6 @@ async def test_autodetect_none(hass, remote, remotews):
|
||||
]
|
||||
assert remotews.call_count == 2
|
||||
assert remotews.call_args_list == [
|
||||
call(
|
||||
host="fake_host",
|
||||
name="HomeAssistant",
|
||||
port=8001,
|
||||
timeout=31,
|
||||
token=None,
|
||||
),
|
||||
call(
|
||||
host="fake_host",
|
||||
name="HomeAssistant",
|
||||
port=8002,
|
||||
timeout=31,
|
||||
token=None,
|
||||
),
|
||||
call(**AUTODETECT_WEBSOCKET_PLAIN),
|
||||
call(**AUTODETECT_WEBSOCKET_SSL),
|
||||
]
|
||||
|
@@ -34,8 +34,11 @@ from homeassistant.const import (
|
||||
ATTR_FRIENDLY_NAME,
|
||||
ATTR_SUPPORTED_FEATURES,
|
||||
CONF_HOST,
|
||||
CONF_IP_ADDRESS,
|
||||
CONF_METHOD,
|
||||
CONF_NAME,
|
||||
CONF_PORT,
|
||||
CONF_TOKEN,
|
||||
SERVICE_MEDIA_NEXT_TRACK,
|
||||
SERVICE_MEDIA_PAUSE,
|
||||
SERVICE_MEDIA_PLAY,
|
||||
@@ -51,7 +54,7 @@ from homeassistant.const import (
|
||||
from homeassistant.setup import async_setup_component
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from tests.common import async_fire_time_changed
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||
|
||||
ENTITY_ID = f"{DOMAIN}.fake"
|
||||
MOCK_CONFIG = {
|
||||
@@ -64,17 +67,40 @@ MOCK_CONFIG = {
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
MOCK_CONFIGWS = {
|
||||
SAMSUNGTV_DOMAIN: [
|
||||
{
|
||||
CONF_HOST: "fake",
|
||||
CONF_NAME: "fake",
|
||||
CONF_PORT: 8001,
|
||||
CONF_TOKEN: "123456789",
|
||||
CONF_ON_ACTION: [{"delay": "00:00:01"}],
|
||||
}
|
||||
]
|
||||
}
|
||||
MOCK_CALLS_WS = {
|
||||
"host": "fake",
|
||||
"port": 8001,
|
||||
"token": None,
|
||||
"timeout": 31,
|
||||
"name": "HomeAssistant",
|
||||
}
|
||||
|
||||
MOCK_ENTRY_WS = {
|
||||
CONF_IP_ADDRESS: "test",
|
||||
CONF_HOST: "fake",
|
||||
CONF_METHOD: "websocket",
|
||||
CONF_NAME: "fake",
|
||||
CONF_PORT: 8001,
|
||||
CONF_TOKEN: "abcde",
|
||||
}
|
||||
MOCK_CALLS_ENTRY_WS = {
|
||||
"host": "fake",
|
||||
"name": "HomeAssistant",
|
||||
"port": 8001,
|
||||
"timeout": 1,
|
||||
"token": "abcde",
|
||||
}
|
||||
|
||||
ENTITY_ID_NOTURNON = f"{DOMAIN}.fake_noturnon"
|
||||
MOCK_CONFIG_NOTURNON = {
|
||||
@@ -155,6 +181,52 @@ async def test_setup_without_turnon(hass, remote):
|
||||
assert hass.states.get(ENTITY_ID_NOTURNON)
|
||||
|
||||
|
||||
async def test_setup_websocket(hass, remotews, mock_now):
|
||||
"""Test setup of platform."""
|
||||
with patch("homeassistant.components.samsungtv.bridge.SamsungTVWS") as remote_class:
|
||||
enter = mock.Mock()
|
||||
type(enter).token = mock.PropertyMock(return_value="987654321")
|
||||
remote = mock.Mock()
|
||||
remote.__enter__ = mock.Mock(return_value=enter)
|
||||
remote.__exit__ = mock.Mock()
|
||||
remote_class.return_value = remote
|
||||
|
||||
await setup_samsungtv(hass, MOCK_CONFIGWS)
|
||||
|
||||
assert remote_class.call_count == 1
|
||||
assert remote_class.call_args_list == [call(**MOCK_CALLS_WS)]
|
||||
assert hass.states.get(ENTITY_ID)
|
||||
|
||||
|
||||
async def test_setup_websocket_2(hass, mock_now):
|
||||
"""Test setup of platform from config entry."""
|
||||
entity_id = f"{DOMAIN}.fake"
|
||||
|
||||
entry = MockConfigEntry(
|
||||
domain=SAMSUNGTV_DOMAIN, data=MOCK_ENTRY_WS, unique_id=entity_id,
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
config_entries = hass.config_entries.async_entries(SAMSUNGTV_DOMAIN)
|
||||
assert len(config_entries) == 1
|
||||
assert entry is config_entries[0]
|
||||
|
||||
assert await async_setup_component(hass, SAMSUNGTV_DOMAIN, {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
next_update = mock_now + timedelta(minutes=5)
|
||||
with patch(
|
||||
"homeassistant.components.samsungtv.bridge.SamsungTVWS"
|
||||
) as remote, patch("homeassistant.util.dt.utcnow", return_value=next_update):
|
||||
async_fire_time_changed(hass, next_update)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
assert remote.call_count == 1
|
||||
assert remote.call_args_list == [call(**MOCK_CALLS_ENTRY_WS)]
|
||||
|
||||
|
||||
async def test_update_on(hass, remote, mock_now):
|
||||
"""Testing update tv on."""
|
||||
await setup_samsungtv(hass, MOCK_CONFIG)
|
||||
|
@@ -1743,3 +1743,15 @@ async def test_if_running_parallel(hass):
|
||||
assert len(events) == 4
|
||||
assert events[2].data["value"] == 2
|
||||
assert events[3].data["value"] == 2
|
||||
|
||||
|
||||
async def test_script_logging(caplog):
|
||||
"""Test script logging."""
|
||||
script_obj = script.Script(None, [], "Script with % Name")
|
||||
script_obj._log("Test message with name %s", 1)
|
||||
|
||||
assert "Script with % Name: Test message with name 1" in caplog.text
|
||||
|
||||
script_obj = script.Script(None, [])
|
||||
script_obj._log("Test message without name %s", 2)
|
||||
assert "Test message without name 2" in caplog.text
|
||||
|
@@ -722,7 +722,7 @@ async def test_merge_id_schema(hass):
|
||||
for domain, expected_type in types.items():
|
||||
integration = await async_get_integration(hass, domain)
|
||||
module = integration.get_component()
|
||||
typ, _ = config_util._identify_config_schema(module)
|
||||
typ = config_util._identify_config_schema(module)
|
||||
assert typ == expected_type, f"{domain} expected {expected_type}, got {typ}"
|
||||
|
||||
|
||||
@@ -995,15 +995,30 @@ async def test_component_config_exceptions(hass, caplog):
|
||||
@pytest.mark.parametrize(
|
||||
"domain, schema, expected",
|
||||
[
|
||||
("zone", vol.Schema({vol.Optional("zone", default=[]): list}), "list"),
|
||||
("zone", vol.Schema({vol.Optional("zone", default=dict): dict}), "dict"),
|
||||
("zone", vol.Schema({vol.Optional("zone", default=list): [int]}), "list"),
|
||||
("zone", vol.Schema({vol.Optional("zone", default=[]): [int]}), "list"),
|
||||
(
|
||||
"zone",
|
||||
vol.Schema({vol.Optional("zone", default={}): {vol.Optional("hello"): 1}}),
|
||||
"dict",
|
||||
),
|
||||
(
|
||||
"zone",
|
||||
vol.Schema(
|
||||
{vol.Optional("zone", default=dict): {vol.Optional("hello"): 1}}
|
||||
),
|
||||
"dict",
|
||||
),
|
||||
("zone", vol.Schema({vol.Optional("zone"): int}), None),
|
||||
("zone", vol.Schema({"zone": int}), None),
|
||||
("not_existing", vol.Schema({vol.Optional("zone", default=dict): dict}), None,),
|
||||
("non_existing", vol.Schema({"zone": int}), None),
|
||||
("zone", vol.Schema({}), None),
|
||||
],
|
||||
)
|
||||
def test_identify_config_schema(domain, schema, expected):
|
||||
"""Test identify config schema."""
|
||||
assert (
|
||||
config_util._identify_config_schema(Mock(DOMAIN=domain, CONFIG_SCHEMA=schema))[
|
||||
0
|
||||
]
|
||||
config_util._identify_config_schema(Mock(DOMAIN=domain, CONFIG_SCHEMA=schema))
|
||||
== expected
|
||||
)
|
||||
|
Reference in New Issue
Block a user