mirror of
https://github.com/home-assistant/core.git
synced 2025-09-27 22:09:24 +00:00
Compare commits
86 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
3bcf2f9fd5 | ||
![]() |
a72dd6b917 | ||
![]() |
36a1fe5f54 | ||
![]() |
303b7efd4b | ||
![]() |
0323aeb660 | ||
![]() |
b2f50d0a3a | ||
![]() |
58e8162d18 | ||
![]() |
8036b709e0 | ||
![]() |
e80ffe16f7 | ||
![]() |
99010bab18 | ||
![]() |
8d5e15df3d | ||
![]() |
efffc82414 | ||
![]() |
06b99a1ff9 | ||
![]() |
fd79c141b2 | ||
![]() |
b541b08d2d | ||
![]() |
42c9776c97 | ||
![]() |
897340b115 | ||
![]() |
e072d8b694 | ||
![]() |
52d8c66a8b | ||
![]() |
a26413c989 | ||
![]() |
c9d53dec8d | ||
![]() |
dae2117078 | ||
![]() |
72b95f7529 | ||
![]() |
56436e6387 | ||
![]() |
7cdbbc248f | ||
![]() |
cca2594225 | ||
![]() |
22bef1d0d3 | ||
![]() |
b2885f6cc5 | ||
![]() |
3a1ee2f654 | ||
![]() |
26ba956242 | ||
![]() |
90d8874825 | ||
![]() |
22f76a9363 | ||
![]() |
cc4d71f942 | ||
![]() |
0a55f024a4 | ||
![]() |
60fe64d119 | ||
![]() |
a3692859e9 | ||
![]() |
55958bcfb7 | ||
![]() |
cde6400482 | ||
![]() |
d2077acc92 | ||
![]() |
f4991794d4 | ||
![]() |
088fb7eff3 | ||
![]() |
c3e679f69b | ||
![]() |
aa8e336af5 | ||
![]() |
ac87c0eea2 | ||
![]() |
281456b252 | ||
![]() |
aefa305f77 | ||
![]() |
a11fa832ef | ||
![]() |
0470142701 | ||
![]() |
3c22834751 | ||
![]() |
570d1e7d8f | ||
![]() |
23be039b7f | ||
![]() |
859632d636 | ||
![]() |
874cbf808e | ||
![]() |
56b5ddb06d | ||
![]() |
df371d99dc | ||
![]() |
74b5db9ca5 | ||
![]() |
6f4225b51d | ||
![]() |
b524cc9c56 | ||
![]() |
a6d50ba89b | ||
![]() |
228de5807c | ||
![]() |
d4b40154e5 | ||
![]() |
6e3aa004c4 | ||
![]() |
149cc5cbeb | ||
![]() |
37acf9b165 | ||
![]() |
5c4ba23ca9 | ||
![]() |
abc42efe08 | ||
![]() |
17ebc85b62 | ||
![]() |
681cd92627 | ||
![]() |
7fc4b196bd | ||
![]() |
f114419359 | ||
![]() |
751f041009 | ||
![]() |
44b1b87e13 | ||
![]() |
1ef3d856a6 | ||
![]() |
2707bbeb23 | ||
![]() |
f89ba74410 | ||
![]() |
5f088c7ff0 | ||
![]() |
0be678c21c | ||
![]() |
953a1ddc8c | ||
![]() |
fd91d21b28 | ||
![]() |
6e29eb3165 | ||
![]() |
10bb399449 | ||
![]() |
9312fd141e | ||
![]() |
8bfddbc4b2 | ||
![]() |
c0d4e1eaf4 | ||
![]() |
0902caa7e4 | ||
![]() |
b9931aabe7 |
@@ -1050,6 +1050,7 @@ omit =
|
||||
homeassistant/components/zhong_hong/climate.py
|
||||
homeassistant/components/xbee/*
|
||||
homeassistant/components/ziggo_mediabox_xl/media_player.py
|
||||
homeassistant/components/zoneminder/*
|
||||
homeassistant/components/supla/*
|
||||
homeassistant/components/zwave/util.py
|
||||
homeassistant/components/ozw/__init__.py
|
||||
|
@@ -512,7 +512,7 @@ homeassistant/components/zerproc/* @emlove
|
||||
homeassistant/components/zha/* @dmulcahey @adminiuga
|
||||
homeassistant/components/zodiac/* @JulienTant
|
||||
homeassistant/components/zone/* @home-assistant/core
|
||||
homeassistant/components/zoneminder/* @rohankapoorcom @vangorra
|
||||
homeassistant/components/zoneminder/* @rohankapoorcom
|
||||
homeassistant/components/zwave/* @home-assistant/z-wave
|
||||
|
||||
# Individual files
|
||||
|
@@ -115,10 +115,10 @@ stages:
|
||||
docker run --rm --privileged \
|
||||
-v ~/.docker:/root/.docker \
|
||||
-v /run/docker.sock:/run/docker.sock:rw \
|
||||
-v $(pwd):/data:ro \
|
||||
homeassistant/amd64-builder:$(versionBuilder) \
|
||||
--homeassistant-machine "$(homeassistantRelease)=$(buildMachine)" \
|
||||
-r https://github.com/home-assistant/hassio-homeassistant \
|
||||
-t machine --docker-hub homeassistant
|
||||
-t /data/machine --docker-hub homeassistant
|
||||
displayName: 'Build Release'
|
||||
|
||||
- stage: 'Publish'
|
||||
|
@@ -33,6 +33,7 @@ from homeassistant.const import (
|
||||
CONF_NAME,
|
||||
TEMP_CELSIUS,
|
||||
TEMP_FAHRENHEIT,
|
||||
__version__,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, State, callback
|
||||
from homeassistant.helpers import network
|
||||
@@ -286,6 +287,12 @@ class AlexaEntity:
|
||||
"friendlyName": self.friendly_name(),
|
||||
"description": self.description(),
|
||||
"manufacturerName": "Home Assistant",
|
||||
"additionalAttributes": {
|
||||
"manufacturer": "Home Assistant",
|
||||
"model": self.entity.domain,
|
||||
"softwareVersion": __version__,
|
||||
"customIdentifier": self.entity_id,
|
||||
},
|
||||
}
|
||||
|
||||
locale = self.config.locale
|
||||
|
@@ -2,6 +2,6 @@
|
||||
"domain": "apprise",
|
||||
"name": "Apprise",
|
||||
"documentation": "https://www.home-assistant.io/integrations/apprise",
|
||||
"requirements": ["apprise==0.8.8"],
|
||||
"requirements": ["apprise==0.8.9"],
|
||||
"codeowners": ["@caronc"]
|
||||
}
|
||||
|
@@ -28,12 +28,8 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
|
||||
def get_service(hass, config, discovery_info=None):
|
||||
"""Get the Apprise notification service."""
|
||||
|
||||
# Create our Apprise Asset Object
|
||||
asset = apprise.AppriseAsset(async_mode=False)
|
||||
|
||||
# Create our Apprise Instance (reference our asset)
|
||||
a_obj = apprise.Apprise(asset=asset)
|
||||
a_obj = apprise.Apprise()
|
||||
|
||||
if config.get(CONF_FILE):
|
||||
# Sourced from a Configuration File
|
||||
|
@@ -3,7 +3,7 @@
|
||||
"name": "Arris TG2492LG",
|
||||
"documentation": "https://www.home-assistant.io/integrations/arris_tg2492lg",
|
||||
"requirements": [
|
||||
"arris-tg2492lg==1.0.0"
|
||||
"arris-tg2492lg==1.1.0"
|
||||
],
|
||||
"codeowners": [
|
||||
"@vanbalken"
|
||||
|
@@ -10,7 +10,7 @@ from homeassistant.config import async_log_exception, config_without_domain
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_per_platform
|
||||
from homeassistant.helpers.condition import async_validate_condition_config
|
||||
from homeassistant.helpers.script import async_validate_action_config
|
||||
from homeassistant.helpers.script import async_validate_actions_config
|
||||
from homeassistant.helpers.trigger import async_validate_trigger_config
|
||||
from homeassistant.loader import IntegrationNotFound
|
||||
|
||||
@@ -36,9 +36,7 @@ async def async_validate_config_item(hass, config, full_config=None):
|
||||
]
|
||||
)
|
||||
|
||||
config[CONF_ACTION] = await asyncio.gather(
|
||||
*[async_validate_action_config(hass, action) for action in config[CONF_ACTION]]
|
||||
)
|
||||
config[CONF_ACTION] = await async_validate_actions_config(hass, config[CONF_ACTION])
|
||||
|
||||
return config
|
||||
|
||||
|
@@ -39,7 +39,6 @@ from homeassistant.components.media_player.const import (
|
||||
SUPPORT_VOLUME_SET,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CAST_APP_ID_HOMEASSISTANT,
|
||||
CONF_HOST,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
STATE_IDLE,
|
||||
@@ -87,7 +86,7 @@ SUPPORT_CAST = (
|
||||
|
||||
|
||||
ENTITY_SCHEMA = vol.All(
|
||||
cv.deprecated(CONF_HOST, invalidation_version="0.116"),
|
||||
cv.deprecated(CONF_HOST),
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Exclusive(CONF_HOST, "device_identifier"): cv.string,
|
||||
@@ -98,7 +97,7 @@ ENTITY_SCHEMA = vol.All(
|
||||
)
|
||||
|
||||
PLATFORM_SCHEMA = vol.All(
|
||||
cv.deprecated(CONF_HOST, invalidation_version="0.116"),
|
||||
cv.deprecated(CONF_HOST),
|
||||
PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Exclusive(CONF_HOST, "device_identifier"): cv.string,
|
||||
@@ -292,7 +291,6 @@ class CastDevice(MediaPlayerEntity):
|
||||
),
|
||||
ChromeCastZeroconf.get_zeroconf(),
|
||||
)
|
||||
chromecast.media_controller.app_id = CAST_APP_ID_HOMEASSISTANT
|
||||
self._chromecast = chromecast
|
||||
|
||||
if CAST_MULTIZONE_MANAGER_KEY not in self.hass.data:
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""The devolo_home_control integration."""
|
||||
import asyncio
|
||||
from functools import partial
|
||||
|
||||
from devolo_home_control_api.homecontrol import HomeControl
|
||||
@@ -71,8 +72,13 @@ async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool
|
||||
|
||||
async def async_unload_entry(hass, config_entry):
|
||||
"""Unload a config entry."""
|
||||
unload = await hass.config_entries.async_forward_entry_unload(
|
||||
config_entry, "switch"
|
||||
unload = all(
|
||||
await asyncio.gather(
|
||||
*[
|
||||
hass.config_entries.async_forward_entry_unload(config_entry, platform)
|
||||
for platform in PLATFORMS
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
await hass.async_add_executor_job(
|
||||
|
@@ -155,18 +155,16 @@ class ElkArea(ElkAttachedEntity, AlarmControlPanelEntity, RestoreEntity):
|
||||
self.async_write_ha_state()
|
||||
|
||||
def _watch_area(self, area, changeset):
|
||||
if not changeset.get("log_event"):
|
||||
last_log = changeset.get("last_log")
|
||||
if not last_log:
|
||||
return
|
||||
# user_number only set for arm/disarm logs
|
||||
if not last_log.get("user_number"):
|
||||
return
|
||||
self._changed_by_keypad = None
|
||||
self._changed_by_id = area.log_number
|
||||
self._changed_by = username(self._elk, area.log_number - 1)
|
||||
self._changed_by_time = "%04d-%02d-%02dT%02d:%02d" % (
|
||||
area.log_year,
|
||||
area.log_month,
|
||||
area.log_day,
|
||||
area.log_hour,
|
||||
area.log_minute,
|
||||
)
|
||||
self._changed_by_id = last_log["user_number"]
|
||||
self._changed_by = username(self._elk, self._changed_by_id - 1)
|
||||
self._changed_by_time = last_log["timestamp"]
|
||||
self.async_write_ha_state()
|
||||
|
||||
@property
|
||||
|
@@ -2,7 +2,7 @@
|
||||
"domain": "elkm1",
|
||||
"name": "Elk-M1 Control",
|
||||
"documentation": "https://www.home-assistant.io/integrations/elkm1",
|
||||
"requirements": ["elkm1-lib==0.7.19"],
|
||||
"requirements": ["elkm1-lib==0.8.0"],
|
||||
"codeowners": ["@gwww", "@bdraco"],
|
||||
"config_flow": true
|
||||
}
|
||||
|
@@ -2,7 +2,7 @@
|
||||
"domain": "frontend",
|
||||
"name": "Home Assistant Frontend",
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"requirements": ["home-assistant-frontend==20200930.0"],
|
||||
"requirements": ["home-assistant-frontend==20201001.2"],
|
||||
"dependencies": [
|
||||
"api",
|
||||
"auth",
|
||||
|
@@ -91,6 +91,7 @@ class GroupIntegrationRegistry:
|
||||
"""Class to hold a registry of integrations."""
|
||||
|
||||
on_off_mapping: Dict[str, str] = {STATE_ON: STATE_OFF}
|
||||
off_on_mapping: Dict[str, str] = {STATE_OFF: STATE_ON}
|
||||
on_states_by_domain: Dict[str, Set] = {}
|
||||
exclude_domains: Set = set()
|
||||
|
||||
@@ -99,11 +100,14 @@ class GroupIntegrationRegistry:
|
||||
self.exclude_domains.add(current_domain.get())
|
||||
|
||||
def on_off_states(self, on_states: Set, off_state: str) -> None:
|
||||
"""Registry on and off states for the current domain."""
|
||||
"""Register on and off states for the current domain."""
|
||||
for on_state in on_states:
|
||||
if on_state not in self.on_off_mapping:
|
||||
self.on_off_mapping[on_state] = off_state
|
||||
|
||||
if len(on_states) == 1 and off_state not in self.off_on_mapping:
|
||||
self.off_on_mapping[off_state] = list(on_states)[0]
|
||||
|
||||
self.on_states_by_domain[current_domain.get()] = set(on_states)
|
||||
|
||||
|
||||
@@ -543,6 +547,7 @@ class Group(Entity):
|
||||
data = {ATTR_ENTITY_ID: self.tracking, ATTR_ORDER: self._order}
|
||||
if not self.user_defined:
|
||||
data[ATTR_AUTO] = True
|
||||
|
||||
return data
|
||||
|
||||
@property
|
||||
@@ -577,6 +582,7 @@ class Group(Entity):
|
||||
return
|
||||
|
||||
excluded_domains = self.hass.data[REG_KEY].exclude_domains
|
||||
|
||||
tracking = []
|
||||
trackable = []
|
||||
for ent_id in entity_ids:
|
||||
@@ -592,6 +598,7 @@ class Group(Entity):
|
||||
@callback
|
||||
def _async_start(self, *_):
|
||||
"""Start tracking members and write state."""
|
||||
self._reset_tracked_state()
|
||||
self._async_start_tracking()
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -625,15 +632,14 @@ class Group(Entity):
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Handle addition to Home Assistant."""
|
||||
if self.tracking:
|
||||
self._reset_tracked_state()
|
||||
|
||||
if self.hass.state != CoreState.running:
|
||||
self.hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_START, self._async_start
|
||||
)
|
||||
return
|
||||
|
||||
if self.tracking:
|
||||
self._reset_tracked_state()
|
||||
self._async_start_tracking()
|
||||
|
||||
async def async_will_remove_from_hass(self):
|
||||
@@ -671,19 +677,26 @@ class Group(Entity):
|
||||
if state is not None:
|
||||
self._see_state(state)
|
||||
|
||||
def _see_state(self, state):
|
||||
def _see_state(self, new_state):
|
||||
"""Keep track of the the state."""
|
||||
entity_id = state.entity_id
|
||||
domain = state.domain
|
||||
entity_id = new_state.entity_id
|
||||
domain = new_state.domain
|
||||
state = new_state.state
|
||||
registry = self.hass.data[REG_KEY]
|
||||
self._assumed[entity_id] = new_state.attributes.get(ATTR_ASSUMED_STATE)
|
||||
|
||||
domain_on_state = self.hass.data[REG_KEY].on_states_by_domain.get(
|
||||
domain, {STATE_ON}
|
||||
)
|
||||
self._on_off[entity_id] = state.state in domain_on_state
|
||||
self._assumed[entity_id] = state.attributes.get(ATTR_ASSUMED_STATE)
|
||||
|
||||
if domain in self.hass.data[REG_KEY].on_states_by_domain:
|
||||
self._on_states.update(domain_on_state)
|
||||
if domain not in registry.on_states_by_domain:
|
||||
# Handle the group of a group case
|
||||
if state in registry.on_off_mapping:
|
||||
self._on_states.add(state)
|
||||
elif state in registry.off_on_mapping:
|
||||
self._on_states.add(registry.off_on_mapping[state])
|
||||
self._on_off[entity_id] = state in registry.on_off_mapping
|
||||
else:
|
||||
entity_on_state = registry.on_states_by_domain[domain]
|
||||
if domain in self.hass.data[REG_KEY].on_states_by_domain:
|
||||
self._on_states.update(entity_on_state)
|
||||
self._on_off[entity_id] = state in entity_on_state
|
||||
|
||||
@callback
|
||||
def _async_update_group_state(self, tr_state=None):
|
||||
@@ -726,7 +739,6 @@ class Group(Entity):
|
||||
# on state, we use STATE_ON/STATE_OFF
|
||||
else:
|
||||
on_state = STATE_ON
|
||||
|
||||
group_is_on = self.mode(self._on_off.values())
|
||||
if group_is_on:
|
||||
self._state = on_state
|
||||
|
@@ -65,6 +65,7 @@ class HassIOView(HomeAssistantView):
|
||||
|
||||
return await self._command_proxy(path, request)
|
||||
|
||||
delete = _handle
|
||||
get = _handle
|
||||
post = _handle
|
||||
|
||||
|
@@ -2,6 +2,6 @@
|
||||
"domain": "hive",
|
||||
"name": "Hive",
|
||||
"documentation": "https://www.home-assistant.io/integrations/hive",
|
||||
"requirements": ["pyhiveapi==0.2.20.1"],
|
||||
"requirements": ["pyhiveapi==0.2.20.2"],
|
||||
"codeowners": ["@Rendili", "@KJonline"]
|
||||
}
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""Offer state listening automation rules."""
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Dict, Optional
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -25,18 +25,43 @@ CONF_ENTITY_ID = "entity_id"
|
||||
CONF_FROM = "from"
|
||||
CONF_TO = "to"
|
||||
|
||||
TRIGGER_SCHEMA = vol.Schema(
|
||||
BASE_SCHEMA = {
|
||||
vol.Required(CONF_PLATFORM): "state",
|
||||
vol.Required(CONF_ENTITY_ID): cv.entity_ids,
|
||||
vol.Optional(CONF_FOR): cv.positive_time_period_template,
|
||||
vol.Optional(CONF_ATTRIBUTE): cv.match_all,
|
||||
}
|
||||
|
||||
TRIGGER_STATE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PLATFORM): "state",
|
||||
vol.Required(CONF_ENTITY_ID): cv.entity_ids,
|
||||
**BASE_SCHEMA,
|
||||
# These are str on purpose. Want to catch YAML conversions
|
||||
vol.Optional(CONF_FROM): vol.Any(str, [str]),
|
||||
vol.Optional(CONF_TO): vol.Any(str, [str]),
|
||||
vol.Optional(CONF_FOR): cv.positive_time_period_template,
|
||||
vol.Optional(CONF_ATTRIBUTE): cv.match_all,
|
||||
}
|
||||
)
|
||||
|
||||
TRIGGER_ATTRIBUTE_SCHEMA = vol.Schema(
|
||||
{
|
||||
**BASE_SCHEMA,
|
||||
vol.Optional(CONF_FROM): cv.match_all,
|
||||
vol.Optional(CONF_TO): cv.match_all,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def TRIGGER_SCHEMA(value: Any) -> dict: # pylint: disable=invalid-name
|
||||
"""Validate trigger."""
|
||||
if not isinstance(value, dict):
|
||||
raise vol.Invalid("Expected a dictionary")
|
||||
|
||||
# We use this approach instead of vol.Any because
|
||||
# this gives better error messages.
|
||||
if CONF_ATTRIBUTE in value:
|
||||
return TRIGGER_ATTRIBUTE_SCHEMA(value)
|
||||
|
||||
return TRIGGER_STATE_SCHEMA(value)
|
||||
|
||||
|
||||
async def async_attach_trigger(
|
||||
hass: HomeAssistant,
|
||||
|
@@ -3,5 +3,5 @@
|
||||
"name": "KEF",
|
||||
"documentation": "https://www.home-assistant.io/integrations/kef",
|
||||
"codeowners": ["@basnijholt"],
|
||||
"requirements": ["aiokef==0.2.13", "getmac==0.8.2"]
|
||||
"requirements": ["aiokef==0.2.16", "getmac==0.8.2"]
|
||||
}
|
||||
|
@@ -1,6 +1,5 @@
|
||||
"""Platform for the KEF Wireless Speakers."""
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
from functools import partial
|
||||
import ipaddress
|
||||
@@ -164,7 +163,11 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info=
|
||||
dtype = type(options[0]) # int or float
|
||||
platform.async_register_entity_service(
|
||||
name,
|
||||
{vol.Required(option): vol.All(vol.Coerce(dtype), vol.In(options))},
|
||||
{
|
||||
vol.Required(option): vol.All(
|
||||
vol.Coerce(float), vol.Coerce(dtype), vol.In(options)
|
||||
)
|
||||
},
|
||||
f"set_{which}",
|
||||
)
|
||||
|
||||
@@ -365,17 +368,16 @@ class KefMediaPlayer(MediaPlayerEntity):
|
||||
# The LSX is able to respond when off the LS50 has to be on.
|
||||
return
|
||||
|
||||
(mode, *rest) = await asyncio.gather(
|
||||
self._speaker.get_mode(),
|
||||
self._speaker.get_desk_db(),
|
||||
self._speaker.get_wall_db(),
|
||||
self._speaker.get_treble_db(),
|
||||
self._speaker.get_high_hz(),
|
||||
self._speaker.get_low_hz(),
|
||||
self._speaker.get_sub_db(),
|
||||
mode = await self._speaker.get_mode()
|
||||
self._dsp = dict(
|
||||
desk_db=await self._speaker.get_desk_db(),
|
||||
wall_db=await self._speaker.get_wall_db(),
|
||||
treble_db=await self._speaker.get_treble_db(),
|
||||
high_hz=await self._speaker.get_high_hz(),
|
||||
low_hz=await self._speaker.get_low_hz(),
|
||||
sub_db=await self._speaker.get_sub_db(),
|
||||
**mode._asdict(),
|
||||
)
|
||||
keys = ["desk_db", "wall_db", "treble_db", "high_hz", "low_hz", "sub_db"]
|
||||
self._dsp = dict(zip(keys, rest), **mode._asdict())
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Subscribe to DSP updates."""
|
||||
|
@@ -2,16 +2,22 @@
|
||||
|
||||
|
||||
from homeassistant.components.group import GroupIntegrationRegistry
|
||||
from homeassistant.const import STATE_OFF
|
||||
from homeassistant.const import (
|
||||
STATE_IDLE,
|
||||
STATE_OFF,
|
||||
STATE_ON,
|
||||
STATE_PAUSED,
|
||||
STATE_PLAYING,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.typing import HomeAssistantType
|
||||
|
||||
from . import STATE_IDLE, STATE_PLAYING
|
||||
|
||||
|
||||
@callback
|
||||
def async_describe_on_off_states(
|
||||
hass: HomeAssistantType, registry: GroupIntegrationRegistry
|
||||
) -> None:
|
||||
"""Describe group on off states."""
|
||||
registry.on_off_states({STATE_PLAYING, STATE_IDLE}, STATE_OFF)
|
||||
registry.on_off_states(
|
||||
{STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_IDLE}, STATE_OFF
|
||||
)
|
||||
|
@@ -3,7 +3,7 @@
|
||||
"name": "MQTT",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/mqtt",
|
||||
"requirements": ["paho-mqtt==1.5.1"],
|
||||
"requirements": ["paho-mqtt==1.5.0"],
|
||||
"dependencies": ["http"],
|
||||
"codeowners": ["@home-assistant/core", "@emontnemery"]
|
||||
}
|
||||
|
@@ -315,7 +315,7 @@ class NetatmoThermostat(NetatmoBase, ClimateEntity):
|
||||
@property
|
||||
def hvac_action(self) -> Optional[str]:
|
||||
"""Return the current running hvac operation if supported."""
|
||||
if self._model == NA_THERM:
|
||||
if self._model == NA_THERM and self._boilerstatus is not None:
|
||||
return CURRENT_HVAC_MAP_NETATMO[self._boilerstatus]
|
||||
# Maybe it is a valve
|
||||
if self._room_status and self._room_status.get("heating_power_request", 0) > 0:
|
||||
|
@@ -41,7 +41,7 @@ DEFAULT_INTERVALS = {
|
||||
HOMEDATA_DATA_CLASS_NAME: 900,
|
||||
HOMESTATUS_DATA_CLASS_NAME: 300,
|
||||
CAMERA_DATA_CLASS_NAME: 900,
|
||||
WEATHERSTATION_DATA_CLASS_NAME: 300,
|
||||
WEATHERSTATION_DATA_CLASS_NAME: 600,
|
||||
HOMECOACH_DATA_CLASS_NAME: 300,
|
||||
PUBLICDATA_DATA_CLASS_NAME: 600,
|
||||
}
|
||||
|
@@ -3,7 +3,7 @@
|
||||
"name": "Netatmo",
|
||||
"documentation": "https://www.home-assistant.io/integrations/netatmo",
|
||||
"requirements": [
|
||||
"pyatmo==4.0.0"
|
||||
"pyatmo==4.1.0"
|
||||
],
|
||||
"after_dependencies": [
|
||||
"cloud",
|
||||
|
@@ -136,6 +136,7 @@ async def async_setup_entry(hass, entry, async_add_entities):
|
||||
conditions = [
|
||||
c.lower()
|
||||
for c in data_class.get_monitored_conditions(module_id=module["_id"])
|
||||
if c.lower() in SENSOR_TYPES
|
||||
]
|
||||
for condition in conditions:
|
||||
if f"{condition}_value" in SENSOR_TYPES:
|
||||
|
@@ -119,7 +119,7 @@ class OmniLogicTemperatureSensor(OmnilogicSensor):
|
||||
state = sensor_data
|
||||
|
||||
if self._unit_type == "Metric":
|
||||
hayward_state = round((hayward_state - 32) * 5 / 9, 1)
|
||||
hayward_state = round((int(hayward_state) - 32) * 5 / 9, 1)
|
||||
hayward_unit_of_measure = TEMP_CELSIUS
|
||||
|
||||
if int(sensor_data) == -1:
|
||||
@@ -175,7 +175,7 @@ class OmniLogicSaltLevelSensor(OmnilogicSensor):
|
||||
unit_of_measurement = self._unit
|
||||
|
||||
if self._unit_type == "Metric":
|
||||
salt_return = round(salt_return / 1000, 2)
|
||||
salt_return = round(int(salt_return) / 1000, 2)
|
||||
unit_of_measurement = f"{MASS_GRAMS}/{VOLUME_LITERS}"
|
||||
|
||||
self._unit = unit_of_measurement
|
||||
@@ -279,7 +279,7 @@ SENSOR_TYPES = {
|
||||
"icon": "mdi:speedometer",
|
||||
"unit": PERCENTAGE,
|
||||
"guard_condition": [
|
||||
{"Type": "FMT_SINGLE_SPEED"},
|
||||
{"Filter-Type": "FMT_SINGLE_SPEED"},
|
||||
],
|
||||
},
|
||||
],
|
||||
|
@@ -2,7 +2,11 @@
|
||||
"domain": "onvif",
|
||||
"name": "ONVIF",
|
||||
"documentation": "https://www.home-assistant.io/integrations/onvif",
|
||||
"requirements": ["onvif-zeep-async==0.5.0", "WSDiscovery==2.0.0"],
|
||||
"requirements": [
|
||||
"onvif-zeep-async==0.6.0",
|
||||
"WSDiscovery==2.0.0",
|
||||
"zeep[async]==3.4.0"
|
||||
],
|
||||
"dependencies": ["ffmpeg"],
|
||||
"codeowners": ["@hunterjm"],
|
||||
"config_flow": true
|
||||
|
15
homeassistant/components/plant/group.py
Normal file
15
homeassistant/components/plant/group.py
Normal file
@@ -0,0 +1,15 @@
|
||||
"""Describe group states."""
|
||||
|
||||
|
||||
from homeassistant.components.group import GroupIntegrationRegistry
|
||||
from homeassistant.const import STATE_OK, STATE_PROBLEM
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.typing import HomeAssistantType
|
||||
|
||||
|
||||
@callback
|
||||
def async_describe_on_off_states(
|
||||
hass: HomeAssistantType, registry: GroupIntegrationRegistry
|
||||
) -> None:
|
||||
"""Describe group on off states."""
|
||||
registry.on_off_states({STATE_PROBLEM}, STATE_OK)
|
@@ -5,7 +5,14 @@ import json
|
||||
import logging
|
||||
|
||||
import plexapi.exceptions
|
||||
from plexwebsocket import PlexWebsocket
|
||||
from plexwebsocket import (
|
||||
SIGNAL_CONNECTION_STATE,
|
||||
SIGNAL_DATA,
|
||||
STATE_CONNECTED,
|
||||
STATE_DISCONNECTED,
|
||||
STATE_STOPPED,
|
||||
PlexWebsocket,
|
||||
)
|
||||
import requests.exceptions
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -14,7 +21,7 @@ from homeassistant.components.media_player.const import (
|
||||
ATTR_MEDIA_CONTENT_ID,
|
||||
ATTR_MEDIA_CONTENT_TYPE,
|
||||
)
|
||||
from homeassistant.config_entries import SOURCE_REAUTH
|
||||
from homeassistant.config_entries import ENTRY_STATE_SETUP_RETRY, SOURCE_REAUTH
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
CONF_SOURCE,
|
||||
@@ -22,6 +29,7 @@ from homeassistant.const import (
|
||||
CONF_VERIFY_SSL,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
@@ -95,11 +103,12 @@ async def async_setup_entry(hass, entry):
|
||||
entry, data={**entry.data, PLEX_SERVER_CONFIG: new_server_data}
|
||||
)
|
||||
except requests.exceptions.ConnectionError as error:
|
||||
_LOGGER.error(
|
||||
"Plex server (%s) could not be reached: [%s]",
|
||||
server_config[CONF_URL],
|
||||
error,
|
||||
)
|
||||
if entry.state != ENTRY_STATE_SETUP_RETRY:
|
||||
_LOGGER.error(
|
||||
"Plex server (%s) could not be reached: [%s]",
|
||||
server_config[CONF_URL],
|
||||
error,
|
||||
)
|
||||
raise ConfigEntryNotReady from error
|
||||
except plexapi.exceptions.Unauthorized:
|
||||
hass.async_create_task(
|
||||
@@ -142,13 +151,36 @@ async def async_setup_entry(hass, entry):
|
||||
hass.data[PLEX_DOMAIN][DISPATCHERS].setdefault(server_id, [])
|
||||
hass.data[PLEX_DOMAIN][DISPATCHERS][server_id].append(unsub)
|
||||
|
||||
def update_plex():
|
||||
async_dispatcher_send(hass, PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id))
|
||||
@callback
|
||||
def plex_websocket_callback(signal, data, error):
|
||||
"""Handle callbacks from plexwebsocket library."""
|
||||
if signal == SIGNAL_CONNECTION_STATE:
|
||||
|
||||
if data == STATE_CONNECTED:
|
||||
_LOGGER.debug("Websocket to %s successful", entry.data[CONF_SERVER])
|
||||
elif data == STATE_DISCONNECTED:
|
||||
_LOGGER.debug(
|
||||
"Websocket to %s disconnected, retrying", entry.data[CONF_SERVER]
|
||||
)
|
||||
# Stopped websockets without errors are expected during shutdown and ignored
|
||||
elif data == STATE_STOPPED and error:
|
||||
_LOGGER.error(
|
||||
"Websocket to %s failed, aborting [Error: %s]",
|
||||
entry.data[CONF_SERVER],
|
||||
error,
|
||||
)
|
||||
hass.async_create_task(hass.config_entries.async_reload(entry.entry_id))
|
||||
|
||||
elif signal == SIGNAL_DATA:
|
||||
async_dispatcher_send(hass, PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id))
|
||||
|
||||
session = async_get_clientsession(hass)
|
||||
verify_ssl = server_config.get(CONF_VERIFY_SSL)
|
||||
websocket = PlexWebsocket(
|
||||
plex_server.plex_server, update_plex, session=session, verify_ssl=verify_ssl
|
||||
plex_server.plex_server,
|
||||
plex_websocket_callback,
|
||||
session=session,
|
||||
verify_ssl=verify_ssl,
|
||||
)
|
||||
hass.data[PLEX_DOMAIN][WEBSOCKETS][server_id] = websocket
|
||||
|
||||
|
@@ -6,7 +6,7 @@
|
||||
"requirements": [
|
||||
"plexapi==4.1.1",
|
||||
"plexauth==0.0.5",
|
||||
"plexwebsocket==0.0.11"
|
||||
"plexwebsocket==0.0.12"
|
||||
],
|
||||
"dependencies": ["http"],
|
||||
"after_dependencies": ["sonos"],
|
||||
|
@@ -3,7 +3,7 @@
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "PoolSense",
|
||||
"description": "[%key:common::config_flow::description%]",
|
||||
"description": "[%key:common::config_flow::description::confirm_setup%]",
|
||||
"data": {
|
||||
"email": "[%key:common::config_flow::data::email%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
|
@@ -240,6 +240,7 @@ class Recorder(threading.Thread):
|
||||
self._timechanges_seen = 0
|
||||
self._keepalive_count = 0
|
||||
self._old_states = {}
|
||||
self._pending_expunge = []
|
||||
self.event_session = None
|
||||
self.get_session = None
|
||||
self._completed_database_setup = False
|
||||
@@ -403,6 +404,7 @@ class Recorder(threading.Thread):
|
||||
self.event_session.add(dbstate)
|
||||
if has_new_state:
|
||||
self._old_states[dbstate.entity_id] = dbstate
|
||||
self._pending_expunge.append(dbstate)
|
||||
except (TypeError, ValueError):
|
||||
_LOGGER.warning(
|
||||
"State is not JSON serializable: %s",
|
||||
@@ -488,6 +490,13 @@ class Recorder(threading.Thread):
|
||||
|
||||
def _commit_event_session(self):
|
||||
try:
|
||||
self.event_session.flush()
|
||||
for dbstate in self._pending_expunge:
|
||||
# Expunge the state so its not expired
|
||||
# until we use it later for dbstate.old_state
|
||||
if dbstate in self.event_session:
|
||||
self.event_session.expunge(dbstate)
|
||||
self._pending_expunge = []
|
||||
self.event_session.commit()
|
||||
except Exception as err:
|
||||
_LOGGER.error("Error executing query: %s", err)
|
||||
|
@@ -23,7 +23,12 @@ def temperature_unit(block_info: dict) -> str:
|
||||
def shelly_naming(self, block, entity_type: str):
|
||||
"""Naming for switch and sensors."""
|
||||
|
||||
entity_name = self.wrapper.name
|
||||
if not block:
|
||||
return f"{entity_name} {self.description.name}"
|
||||
|
||||
channels = 0
|
||||
mode = block.type + "s"
|
||||
if "num_outputs" in self.wrapper.device.shelly:
|
||||
channels = self.wrapper.device.shelly["num_outputs"]
|
||||
if (
|
||||
@@ -31,12 +36,20 @@ def shelly_naming(self, block, entity_type: str):
|
||||
and self.wrapper.device.settings["mode"] == "roller"
|
||||
):
|
||||
channels = 1
|
||||
|
||||
entity_name = self.wrapper.name
|
||||
if block.type == "emeter" and "num_emeters" in self.wrapper.device.shelly:
|
||||
channels = self.wrapper.device.shelly["num_emeters"]
|
||||
if channels > 1 and block.type != "device":
|
||||
entity_name = self.wrapper.device.settings["relays"][int(block.channel)]["name"]
|
||||
# Shelly EM (SHEM) with firmware v1.8.1 doesn't have "name" key; will be fixed in next firmware release
|
||||
if "name" in self.wrapper.device.settings[mode][int(block.channel)]:
|
||||
entity_name = self.wrapper.device.settings[mode][int(block.channel)]["name"]
|
||||
else:
|
||||
entity_name = None
|
||||
if not entity_name:
|
||||
entity_name = f"{self.wrapper.name} channel {int(block.channel)+1}"
|
||||
if self.wrapper.model == "SHEM-3":
|
||||
base = ord("A")
|
||||
else:
|
||||
base = ord("1")
|
||||
entity_name = f"{self.wrapper.name} channel {chr(int(block.channel)+base)}"
|
||||
|
||||
if entity_type == "switch":
|
||||
return entity_name
|
||||
|
@@ -3,7 +3,7 @@
|
||||
"name": "Shelly",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/shelly",
|
||||
"requirements": ["aioshelly==0.3.3"],
|
||||
"zeroconf": [{"type": "_http._tcp.local.", "name":"shelly*"}],
|
||||
"requirements": ["aioshelly==0.3.4"],
|
||||
"zeroconf": [{ "type": "_http._tcp.local.", "name": "shelly*" }],
|
||||
"codeowners": ["@balloob", "@bieniu"]
|
||||
}
|
||||
|
@@ -16,7 +16,7 @@ from simplipy.websocket import (
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_IMPORT
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_REAUTH
|
||||
from homeassistant.const import (
|
||||
ATTR_CODE,
|
||||
CONF_CODE,
|
||||
@@ -365,8 +365,7 @@ async def async_unload_entry(hass, entry):
|
||||
|
||||
async def async_update_options(hass, config_entry):
|
||||
"""Handle an options update."""
|
||||
simplisafe = hass.data[DOMAIN][DATA_CLIENT][config_entry.entry_id]
|
||||
simplisafe.options = config_entry.options
|
||||
await hass.config_entries.async_reload(config_entry.entry_id)
|
||||
|
||||
|
||||
class SimpliSafeWebsocket:
|
||||
@@ -530,17 +529,26 @@ class SimpliSafe:
|
||||
for result in results:
|
||||
if isinstance(result, InvalidCredentialsError):
|
||||
if self._emergency_refresh_token_used:
|
||||
LOGGER.error(
|
||||
"Token disconnected or invalid. Please re-auth the "
|
||||
"SimpliSafe integration in HASS"
|
||||
)
|
||||
self._hass.async_create_task(
|
||||
self._hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": "reauth"},
|
||||
data=self._config_entry.data,
|
||||
matching_flows = [
|
||||
flow
|
||||
for flow in self._hass.config_entries.flow.async_progress()
|
||||
if flow["context"].get("source") == SOURCE_REAUTH
|
||||
and flow["context"].get("unique_id")
|
||||
== self._config_entry.unique_id
|
||||
]
|
||||
|
||||
if not matching_flows:
|
||||
self._hass.async_create_task(
|
||||
self._hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={
|
||||
"source": SOURCE_REAUTH,
|
||||
"unique_id": self._config_entry.unique_id,
|
||||
},
|
||||
data=self._config_entry.data,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
return
|
||||
|
||||
LOGGER.warning("SimpliSafe cloud error; trying stored refresh token")
|
||||
|
@@ -3,6 +3,6 @@
|
||||
"name": "SimpliSafe",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/simplisafe",
|
||||
"requirements": ["simplisafe-python==9.3.3"],
|
||||
"requirements": ["simplisafe-python==9.4.1"],
|
||||
"codeowners": ["@bachya"]
|
||||
}
|
||||
|
@@ -34,15 +34,18 @@ async def async_setup(hass, config) -> bool:
|
||||
"""Set up the SmartHab platform."""
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
sh_conf = config.get(DOMAIN)
|
||||
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
data=sh_conf,
|
||||
if DOMAIN not in config:
|
||||
return True
|
||||
|
||||
if not hass.config_entries.async_entries(DOMAIN):
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
data=config[DOMAIN],
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
@@ -16,6 +16,9 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class SmartHabConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""SmartHab config flow."""
|
||||
|
||||
VERSION = 1
|
||||
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
|
||||
|
||||
def _show_setup_form(self, user_input=None, errors=None):
|
||||
"""Show the setup form to the user."""
|
||||
|
||||
@@ -72,6 +75,6 @@ class SmartHabConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
|
||||
return self._show_setup_form(user_input, errors)
|
||||
|
||||
async def async_step_import(self, user_input):
|
||||
async def async_step_import(self, import_info):
|
||||
"""Handle import from legacy config."""
|
||||
return await self.async_step_user(user_input)
|
||||
return await self.async_step_user(import_info)
|
||||
|
@@ -3,7 +3,7 @@
|
||||
"name": "SmartThings",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/smartthings",
|
||||
"requirements": ["pysmartapp==0.3.2", "pysmartthings==0.7.3"],
|
||||
"requirements": ["pysmartapp==0.3.2", "pysmartthings==0.7.4"],
|
||||
"dependencies": ["webhook"],
|
||||
"after_dependencies": ["cloud"],
|
||||
"codeowners": ["@andrewsayre"]
|
||||
|
@@ -28,7 +28,7 @@ DEVICES = "devices"
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
|
||||
CONF_OPTIMISTIC = "optimistic"
|
||||
|
@@ -70,7 +70,8 @@ def get_codec_string(segment: io.BytesIO) -> str:
|
||||
):
|
||||
profile = stsd_box[111:112].hex()
|
||||
compatibility = stsd_box[112:113].hex()
|
||||
level = stsd_box[113:114].hex()
|
||||
# Cap level at 4.1 for compatibility with some Google Cast devices
|
||||
level = hex(min(stsd_box[113], 41))[2:]
|
||||
codec += "." + profile + compatibility + level
|
||||
|
||||
# Handle H265
|
||||
|
@@ -32,10 +32,11 @@ class HlsMasterPlaylistView(StreamView):
|
||||
def render(track):
|
||||
"""Render M3U8 file."""
|
||||
# Need to calculate max bandwidth as input_container.bit_rate doesn't seem to work
|
||||
# Calculate file size / duration and use a multiplier to account for variation
|
||||
# Calculate file size / duration and use a small multiplier to account for variation
|
||||
# hls spec already allows for 25% variation
|
||||
segment = track.get_segment(track.segments[-1])
|
||||
bandwidth = round(
|
||||
segment.segment.seek(0, io.SEEK_END) * 8 / segment.duration * 3
|
||||
segment.segment.seek(0, io.SEEK_END) * 8 / segment.duration * 1.2
|
||||
)
|
||||
codecs = get_codec_string(segment.segment)
|
||||
lines = [
|
||||
|
@@ -2,12 +2,13 @@
|
||||
from typing import Dict
|
||||
|
||||
from synology_dsm.api.surveillance_station import SynoSurveillanceStation
|
||||
from synology_dsm.api.surveillance_station.camera import SynoCamera
|
||||
|
||||
from homeassistant.components.camera import SUPPORT_STREAM, Camera
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.helpers.typing import HomeAssistantType
|
||||
|
||||
from . import SynologyDSMEntity
|
||||
from . import SynoApi, SynologyDSMEntity
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
ENTITY_CLASS,
|
||||
@@ -40,7 +41,7 @@ async def async_setup_entry(
|
||||
class SynoDSMCamera(SynologyDSMEntity, Camera):
|
||||
"""Representation a Synology camera."""
|
||||
|
||||
def __init__(self, api, camera):
|
||||
def __init__(self, api: SynoApi, camera: SynoCamera):
|
||||
"""Initialize a Synology camera."""
|
||||
super().__init__(
|
||||
api,
|
||||
@@ -69,6 +70,11 @@ class SynoDSMCamera(SynologyDSMEntity, Camera):
|
||||
),
|
||||
}
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return the availability of the camera."""
|
||||
return self._camera.is_enabled
|
||||
|
||||
@property
|
||||
def supported_features(self) -> int:
|
||||
"""Return supported features of this camera."""
|
||||
@@ -86,10 +92,14 @@ class SynoDSMCamera(SynologyDSMEntity, Camera):
|
||||
|
||||
def camera_image(self) -> bytes:
|
||||
"""Return bytes of camera image."""
|
||||
if not self.available:
|
||||
return None
|
||||
return self._api.surveillance_station.get_camera_image(self._camera.id)
|
||||
|
||||
async def stream_source(self) -> str:
|
||||
"""Return the source of the stream."""
|
||||
if not self.available:
|
||||
return None
|
||||
return self._camera.live_view.rtsp
|
||||
|
||||
def enable_motion_detection(self):
|
||||
|
@@ -126,7 +126,7 @@ class SynologyDSMFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
else:
|
||||
port = DEFAULT_PORT
|
||||
|
||||
api = SynologyDSM(host, port, username, password, use_ssl)
|
||||
api = SynologyDSM(host, port, username, password, use_ssl, timeout=30)
|
||||
|
||||
try:
|
||||
serial = await self.hass.async_add_executor_job(
|
||||
|
@@ -87,7 +87,7 @@ SENSOR_TYPES = {
|
||||
None,
|
||||
False,
|
||||
],
|
||||
"swap_free": ["Swap free", DATA_MEBIBYTES, "mdi:harddisk", None, True],
|
||||
"swap_free": ["Swap free", DATA_MEBIBYTES, "mdi:harddisk", None, False],
|
||||
"swap_use": ["Swap use", DATA_MEBIBYTES, "mdi:harddisk", None, False],
|
||||
"swap_use_percent": ["Swap use (percent)", PERCENTAGE, "mdi:harddisk", None, False],
|
||||
}
|
||||
|
@@ -234,9 +234,7 @@ class TemplateEntity(Entity):
|
||||
else:
|
||||
self._self_ref_update_count = 0
|
||||
|
||||
# If we need to make this less sensitive in the future,
|
||||
# change the '>=' to a '>' here.
|
||||
if self._self_ref_update_count >= len(self._template_attrs):
|
||||
if self._self_ref_update_count > len(self._template_attrs):
|
||||
for update in updates:
|
||||
_LOGGER.warning(
|
||||
"Template loop detected while processing event: %s, skipping template render for Template[%s]",
|
||||
|
@@ -11,7 +11,7 @@ from typing import Dict, Optional
|
||||
|
||||
from aiohttp import web
|
||||
import mutagen
|
||||
from mutagen.id3 import TextFrame as ID3Text
|
||||
from mutagen.id3 import ID3FileType, TextFrame as ID3Text
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
@@ -468,9 +468,14 @@ class SpeechManager:
|
||||
try:
|
||||
tts_file = mutagen.File(data_bytes)
|
||||
if tts_file is not None:
|
||||
tts_file["artist"] = ID3Text(encoding=3, text=artist)
|
||||
tts_file["album"] = ID3Text(encoding=3, text=album)
|
||||
tts_file["title"] = ID3Text(encoding=3, text=message)
|
||||
if isinstance(tts_file, ID3FileType):
|
||||
tts_file["artist"] = ID3Text(encoding=3, text=artist)
|
||||
tts_file["album"] = ID3Text(encoding=3, text=album)
|
||||
tts_file["title"] = ID3Text(encoding=3, text=message)
|
||||
else:
|
||||
tts_file["artist"] = artist
|
||||
tts_file["album"] = album
|
||||
tts_file["title"] = message
|
||||
tts_file.save(data_bytes)
|
||||
except mutagen.MutagenError as err:
|
||||
_LOGGER.error("ID3 tag error: %s", err)
|
||||
|
@@ -56,7 +56,9 @@ async def async_discover_and_construct(
|
||||
filtered = [di for di in discovery_infos if di[DISCOVERY_ST] == st]
|
||||
if not filtered:
|
||||
_LOGGER.warning(
|
||||
'Wanted UPnP/IGD device with UDN "%s" not found, aborting', udn
|
||||
'Wanted UPnP/IGD device with UDN/ST "%s"/"%s" not found, aborting',
|
||||
udn,
|
||||
st,
|
||||
)
|
||||
return None
|
||||
|
||||
@@ -104,7 +106,7 @@ async def async_setup_entry(hass: HomeAssistantType, config_entry: ConfigEntry)
|
||||
"""Set up UPnP/IGD device from a config entry."""
|
||||
_LOGGER.debug("async_setup_entry, config_entry: %s", config_entry.data)
|
||||
|
||||
# discover and construct
|
||||
# Discover and construct.
|
||||
udn = config_entry.data.get(CONFIG_ENTRY_UDN)
|
||||
st = config_entry.data.get(CONFIG_ENTRY_ST) # pylint: disable=invalid-name
|
||||
try:
|
||||
@@ -116,11 +118,11 @@ async def async_setup_entry(hass: HomeAssistantType, config_entry: ConfigEntry)
|
||||
_LOGGER.info("Unable to create UPnP/IGD, aborting")
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
# Save device
|
||||
# Save device.
|
||||
hass.data[DOMAIN][DOMAIN_DEVICES][device.udn] = device
|
||||
|
||||
# Ensure entry has proper unique_id.
|
||||
if config_entry.unique_id != device.unique_id:
|
||||
# Ensure entry has a unique_id.
|
||||
if not config_entry.unique_id:
|
||||
hass.config_entries.async_update_entry(
|
||||
entry=config_entry,
|
||||
unique_id=device.unique_id,
|
||||
|
@@ -104,19 +104,10 @@ class UpnpFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""
|
||||
_LOGGER.debug("async_step_import: import_info: %s", import_info)
|
||||
|
||||
if import_info is None:
|
||||
# Landed here via configuration.yaml entry.
|
||||
# Any device already added, then abort.
|
||||
if self._async_current_entries():
|
||||
_LOGGER.debug("aborting, already configured")
|
||||
return self.async_abort(reason="already_configured")
|
||||
|
||||
# Test if import_info isn't already configured.
|
||||
if import_info is not None and any(
|
||||
import_info["udn"] == entry.data[CONFIG_ENTRY_UDN]
|
||||
and import_info["st"] == entry.data[CONFIG_ENTRY_ST]
|
||||
for entry in self._async_current_entries()
|
||||
):
|
||||
# Landed here via configuration.yaml entry.
|
||||
# Any device already added, then abort.
|
||||
if self._async_current_entries():
|
||||
_LOGGER.debug("Already configured, aborting")
|
||||
return self.async_abort(reason="already_configured")
|
||||
|
||||
# Discover devices.
|
||||
@@ -127,8 +118,17 @@ class UpnpFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
_LOGGER.info("No UPnP devices discovered, aborting")
|
||||
return self.async_abort(reason="no_devices_found")
|
||||
|
||||
discovery = self._discoveries[0]
|
||||
return await self._async_create_entry_from_discovery(discovery)
|
||||
# Ensure complete discovery.
|
||||
discovery_info = self._discoveries[0]
|
||||
if DISCOVERY_USN not in discovery_info:
|
||||
_LOGGER.debug("Incomplete discovery, ignoring")
|
||||
return self.async_abort(reason="incomplete_discovery")
|
||||
|
||||
# Ensure not already configuring/configured.
|
||||
usn = discovery_info[DISCOVERY_USN]
|
||||
await self.async_set_unique_id(usn)
|
||||
|
||||
return await self._async_create_entry_from_discovery(discovery_info)
|
||||
|
||||
async def async_step_ssdp(self, discovery_info: Mapping):
|
||||
"""Handle a discovered UPnP/IGD device.
|
||||
@@ -191,7 +191,7 @@ class UpnpFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
):
|
||||
"""Create an entry from discovery."""
|
||||
_LOGGER.debug(
|
||||
"_async_create_entry_from_data: discovery: %s",
|
||||
"_async_create_entry_from_discovery: discovery: %s",
|
||||
discovery,
|
||||
)
|
||||
# Get name from device, if not found already.
|
||||
|
@@ -257,13 +257,20 @@ async def handle_render_template(hass, connection, msg):
|
||||
timeout = msg.get("timeout")
|
||||
info = None
|
||||
|
||||
if timeout and await template.async_render_will_timeout(timeout):
|
||||
connection.send_error(
|
||||
msg["id"],
|
||||
const.ERR_TEMPLATE_ERROR,
|
||||
f"Exceeded maximum execution time of {timeout}s",
|
||||
)
|
||||
return
|
||||
if timeout:
|
||||
try:
|
||||
timed_out = await template.async_render_will_timeout(timeout)
|
||||
except TemplateError as ex:
|
||||
connection.send_error(msg["id"], const.ERR_TEMPLATE_ERROR, str(ex))
|
||||
return
|
||||
|
||||
if timed_out:
|
||||
connection.send_error(
|
||||
msg["id"],
|
||||
const.ERR_TEMPLATE_ERROR,
|
||||
f"Exceeded maximum execution time of {timeout}s",
|
||||
)
|
||||
return
|
||||
|
||||
@callback
|
||||
def _template_listener(event, updates):
|
||||
|
@@ -137,7 +137,7 @@ EFFECTS_MAP = {
|
||||
EFFECT_POLICE2: yee_transitions.police2,
|
||||
EFFECT_CHRISTMAS: yee_transitions.christmas,
|
||||
EFFECT_RGB: yee_transitions.rgb,
|
||||
EFFECT_RANDOM_LOOP: yee_transitions.randomloop,
|
||||
EFFECT_RANDOM_LOOP: yee_transitions.random_loop,
|
||||
EFFECT_LSD: yee_transitions.lsd,
|
||||
EFFECT_SLOWDOWN: yee_transitions.slowdown,
|
||||
}
|
||||
@@ -661,7 +661,7 @@ class YeelightGenericLight(YeelightEntity, LightEntity):
|
||||
elif effect in EFFECTS_MAP:
|
||||
flow = Flow(count=0, transitions=EFFECTS_MAP[effect]())
|
||||
elif effect == EFFECT_FAST_RANDOM_LOOP:
|
||||
flow = Flow(count=0, transitions=yee_transitions.randomloop(duration=250))
|
||||
flow = Flow(count=0, transitions=yee_transitions.random_loop(duration=250))
|
||||
elif effect == EFFECT_WHATSAPP:
|
||||
flow = Flow(count=2, transitions=yee_transitions.pulse(37, 211, 102))
|
||||
elif effect == EFFECT_FACEBOOK:
|
||||
|
@@ -3,7 +3,7 @@
|
||||
"name": "Yeelight",
|
||||
"documentation": "https://www.home-assistant.io/integrations/yeelight",
|
||||
"requirements": [
|
||||
"yeelight==0.5.3"
|
||||
"yeelight==0.5.4"
|
||||
],
|
||||
"codeowners": [
|
||||
"@rytilahti",
|
||||
|
@@ -57,7 +57,13 @@ def decorate_command(channel, command):
|
||||
return result
|
||||
|
||||
except (zigpy.exceptions.ZigbeeException, asyncio.TimeoutError) as ex:
|
||||
channel.debug("command failed: %s exception: %s", command.__name__, str(ex))
|
||||
channel.debug(
|
||||
"command failed: '%s' args: '%s' kwargs '%s' exception: '%s'",
|
||||
command.__name__,
|
||||
args,
|
||||
kwds,
|
||||
str(ex),
|
||||
)
|
||||
return ex
|
||||
|
||||
return wrapper
|
||||
|
@@ -9,7 +9,7 @@ from typing import Any, Dict
|
||||
|
||||
from zigpy import types
|
||||
import zigpy.exceptions
|
||||
from zigpy.profiles import zha, zll
|
||||
from zigpy.profiles import PROFILES
|
||||
import zigpy.quirks
|
||||
from zigpy.zcl.clusters.general import Groups
|
||||
import zigpy.zdo.types as zdo_types
|
||||
@@ -456,27 +456,20 @@ class ZHADevice(LogMixin):
|
||||
]
|
||||
|
||||
# Return endpoint device type Names
|
||||
try:
|
||||
device_info[ATTR_ENDPOINT_NAMES] = [
|
||||
{
|
||||
"name": endpoint.device_type.name,
|
||||
}
|
||||
for (ep_id, endpoint) in self._zigpy_device.endpoints.items()
|
||||
if ep_id != 0
|
||||
and endpoint.profile_id in (zha.PROFILE_ID, zll.PROFILE_ID)
|
||||
]
|
||||
except AttributeError as ex:
|
||||
# Some device types are not using an enumeration
|
||||
self.warning(
|
||||
"Failed to identify endpoint name in '%s' with exception '%s'",
|
||||
self._zigpy_device.endpoints.items(),
|
||||
ex,
|
||||
)
|
||||
device_info[ATTR_ENDPOINT_NAMES] = [
|
||||
{
|
||||
"name": "unknown",
|
||||
}
|
||||
]
|
||||
names = []
|
||||
for endpoint in (ep for epid, ep in self.device.endpoints.items() if epid):
|
||||
profile = PROFILES.get(endpoint.profile_id)
|
||||
if profile and endpoint.device_type is not None:
|
||||
# DeviceType provides undefined enums
|
||||
names.append({ATTR_NAME: profile.DeviceType(endpoint.device_type).name})
|
||||
else:
|
||||
names.append(
|
||||
{
|
||||
ATTR_NAME: f"unknown {endpoint.device_type} device_type "
|
||||
"of 0x{endpoint.profile_id:04x} profile id"
|
||||
}
|
||||
)
|
||||
device_info[ATTR_ENDPOINT_NAMES] = names
|
||||
|
||||
reg_device = self.gateway.ha_device_registry.async_get(self.device_id)
|
||||
if reg_device is not None:
|
||||
@@ -516,7 +509,7 @@ class ZHADevice(LogMixin):
|
||||
CLUSTER_TYPE_OUT: endpoint.out_clusters,
|
||||
}
|
||||
for (ep_id, endpoint) in self._zigpy_device.endpoints.items()
|
||||
if ep_id != 0 and endpoint.profile_id in (zha.PROFILE_ID, zll.PROFILE_ID)
|
||||
if ep_id != 0 and endpoint.profile_id in PROFILES
|
||||
}
|
||||
|
||||
@callback
|
||||
|
@@ -9,10 +9,10 @@
|
||||
"zha-quirks==0.0.45",
|
||||
"zigpy-cc==0.5.2",
|
||||
"zigpy-deconz==0.10.0",
|
||||
"zigpy==0.24.3",
|
||||
"zigpy==0.26.0",
|
||||
"zigpy-xbee==0.13.0",
|
||||
"zigpy-zigate==0.6.2",
|
||||
"zigpy-znp==0.2.0"
|
||||
"zigpy-znp==0.2.1"
|
||||
],
|
||||
"codeowners": ["@dmulcahey", "@adminiuga"]
|
||||
}
|
||||
|
@@ -2,169 +2,97 @@
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
from zoneminder.zm import ZoneMinder
|
||||
|
||||
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN
|
||||
from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||
import homeassistant.config_entries as config_entries
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
ATTR_ID,
|
||||
ATTR_NAME,
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_PATH,
|
||||
CONF_PLATFORM,
|
||||
CONF_SOURCE,
|
||||
CONF_SSL,
|
||||
CONF_USERNAME,
|
||||
CONF_VERIFY_SSL,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from . import const
|
||||
from .common import (
|
||||
ClientAvailabilityResult,
|
||||
async_test_client_availability,
|
||||
create_client_from_config,
|
||||
del_client_from_data,
|
||||
get_client_from_data,
|
||||
is_client_in_data,
|
||||
set_client_to_data,
|
||||
set_platform_configs,
|
||||
)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.discovery import async_load_platform
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
PLATFORM_DOMAINS = tuple(
|
||||
[BINARY_SENSOR_DOMAIN, CAMERA_DOMAIN, SENSOR_DOMAIN, SWITCH_DOMAIN]
|
||||
)
|
||||
|
||||
CONF_PATH_ZMS = "path_zms"
|
||||
|
||||
DEFAULT_PATH = "/zm/"
|
||||
DEFAULT_PATH_ZMS = "/zm/cgi-bin/nph-zms"
|
||||
DEFAULT_SSL = False
|
||||
DEFAULT_TIMEOUT = 10
|
||||
DEFAULT_VERIFY_SSL = True
|
||||
DOMAIN = "zoneminder"
|
||||
|
||||
HOST_CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Optional(CONF_PASSWORD): cv.string,
|
||||
vol.Optional(CONF_PATH, default=const.DEFAULT_PATH): cv.string,
|
||||
vol.Optional(const.CONF_PATH_ZMS, default=const.DEFAULT_PATH_ZMS): cv.string,
|
||||
vol.Optional(CONF_SSL, default=const.DEFAULT_SSL): cv.boolean,
|
||||
vol.Optional(CONF_PATH, default=DEFAULT_PATH): cv.string,
|
||||
vol.Optional(CONF_PATH_ZMS, default=DEFAULT_PATH_ZMS): cv.string,
|
||||
vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean,
|
||||
vol.Optional(CONF_USERNAME): cv.string,
|
||||
vol.Optional(CONF_VERIFY_SSL, default=const.DEFAULT_VERIFY_SSL): cv.boolean,
|
||||
vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): cv.boolean,
|
||||
}
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = vol.All(
|
||||
cv.deprecated(const.DOMAIN, invalidation_version="0.118"),
|
||||
vol.Schema(
|
||||
{const.DOMAIN: vol.All(cv.ensure_list, [HOST_CONFIG_SCHEMA])},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
),
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{DOMAIN: vol.All(cv.ensure_list, [HOST_CONFIG_SCHEMA])}, extra=vol.ALLOW_EXTRA
|
||||
)
|
||||
|
||||
SERVICE_SET_RUN_STATE = "set_run_state"
|
||||
SET_RUN_STATE_SCHEMA = vol.Schema(
|
||||
{vol.Required(ATTR_ID): cv.string, vol.Required(ATTR_NAME): cv.string}
|
||||
)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, base_config: dict):
|
||||
def setup(hass, config):
|
||||
"""Set up the ZoneMinder component."""
|
||||
|
||||
# Collect the platform specific configs. It's necessary to collect these configs
|
||||
# here instead of the platform's setup_platform function because the invocation order
|
||||
# of setup_platform and async_setup_entry is not consistent.
|
||||
set_platform_configs(
|
||||
hass,
|
||||
SENSOR_DOMAIN,
|
||||
[
|
||||
platform_config
|
||||
for platform_config in base_config.get(SENSOR_DOMAIN, [])
|
||||
if platform_config[CONF_PLATFORM] == const.DOMAIN
|
||||
],
|
||||
)
|
||||
set_platform_configs(
|
||||
hass,
|
||||
SWITCH_DOMAIN,
|
||||
[
|
||||
platform_config
|
||||
for platform_config in base_config.get(SWITCH_DOMAIN, [])
|
||||
if platform_config[CONF_PLATFORM] == const.DOMAIN
|
||||
],
|
||||
hass.data[DOMAIN] = {}
|
||||
|
||||
success = True
|
||||
|
||||
for conf in config[DOMAIN]:
|
||||
protocol = "https" if conf[CONF_SSL] else "http"
|
||||
|
||||
host_name = conf[CONF_HOST]
|
||||
server_origin = f"{protocol}://{host_name}"
|
||||
zm_client = ZoneMinder(
|
||||
server_origin,
|
||||
conf.get(CONF_USERNAME),
|
||||
conf.get(CONF_PASSWORD),
|
||||
conf.get(CONF_PATH),
|
||||
conf.get(CONF_PATH_ZMS),
|
||||
conf.get(CONF_VERIFY_SSL),
|
||||
)
|
||||
hass.data[DOMAIN][host_name] = zm_client
|
||||
|
||||
success = zm_client.login() and success
|
||||
|
||||
def set_active_state(call):
|
||||
"""Set the ZoneMinder run state to the given state name."""
|
||||
zm_id = call.data[ATTR_ID]
|
||||
state_name = call.data[ATTR_NAME]
|
||||
if zm_id not in hass.data[DOMAIN]:
|
||||
_LOGGER.error("Invalid ZoneMinder host provided: %s", zm_id)
|
||||
if not hass.data[DOMAIN][zm_id].set_active_state(state_name):
|
||||
_LOGGER.error(
|
||||
"Unable to change ZoneMinder state. Host: %s, state: %s",
|
||||
zm_id,
|
||||
state_name,
|
||||
)
|
||||
|
||||
hass.services.register(
|
||||
DOMAIN, SERVICE_SET_RUN_STATE, set_active_state, schema=SET_RUN_STATE_SCHEMA
|
||||
)
|
||||
|
||||
config = base_config.get(const.DOMAIN)
|
||||
hass.async_create_task(
|
||||
async_load_platform(hass, "binary_sensor", DOMAIN, {}, config)
|
||||
)
|
||||
|
||||
if not config:
|
||||
return True
|
||||
|
||||
for config_item in config:
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
const.DOMAIN,
|
||||
context={CONF_SOURCE: config_entries.SOURCE_IMPORT},
|
||||
data=config_item,
|
||||
)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Set up Zoneminder config entry."""
|
||||
zm_client = create_client_from_config(config_entry.data)
|
||||
|
||||
result = await async_test_client_availability(hass, zm_client)
|
||||
if result != ClientAvailabilityResult.AVAILABLE:
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
set_client_to_data(hass, config_entry.unique_id, zm_client)
|
||||
|
||||
for platform_domain in PLATFORM_DOMAINS:
|
||||
hass.async_create_task(
|
||||
hass.config_entries.async_forward_entry_setup(config_entry, platform_domain)
|
||||
)
|
||||
|
||||
if not hass.services.has_service(const.DOMAIN, const.SERVICE_SET_RUN_STATE):
|
||||
|
||||
@callback
|
||||
def set_active_state(call):
|
||||
"""Set the ZoneMinder run state to the given state name."""
|
||||
zm_id = call.data[ATTR_ID]
|
||||
state_name = call.data[ATTR_NAME]
|
||||
if not is_client_in_data(hass, zm_id):
|
||||
_LOGGER.error("Invalid ZoneMinder host provided: %s", zm_id)
|
||||
return
|
||||
|
||||
if not get_client_from_data(hass, zm_id).set_active_state(state_name):
|
||||
_LOGGER.error(
|
||||
"Unable to change ZoneMinder state. Host: %s, state: %s",
|
||||
zm_id,
|
||||
state_name,
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
const.DOMAIN,
|
||||
const.SERVICE_SET_RUN_STATE,
|
||||
set_active_state,
|
||||
schema=SET_RUN_STATE_SCHEMA,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Unload Zoneminder config entry."""
|
||||
for platform_domain in PLATFORM_DOMAINS:
|
||||
hass.async_create_task(
|
||||
hass.config_entries.async_forward_entry_unload(
|
||||
config_entry, platform_domain
|
||||
)
|
||||
)
|
||||
|
||||
# If this is the last config to exist, remove the service too.
|
||||
if len(hass.config_entries.async_entries(const.DOMAIN)) <= 1:
|
||||
hass.services.async_remove(const.DOMAIN, const.SERVICE_SET_RUN_STATE)
|
||||
|
||||
del_client_from_data(hass, config_entry.unique_id)
|
||||
|
||||
return True
|
||||
return success
|
||||
|
@@ -1,43 +1,29 @@
|
||||
"""Support for ZoneMinder binary sensors."""
|
||||
from typing import Callable, List, Optional
|
||||
|
||||
from zoneminder.zm import ZoneMinder
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
DEVICE_CLASS_CONNECTIVITY,
|
||||
BinarySensorEntity,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from .common import get_client_from_data
|
||||
from . import DOMAIN as ZONEMINDER_DOMAIN
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: Callable[[List[Entity], Optional[bool]], None],
|
||||
) -> None:
|
||||
"""Set up the sensor config entry."""
|
||||
zm_client = get_client_from_data(hass, config_entry.unique_id)
|
||||
async_add_entities([ZMAvailabilitySensor(zm_client, config_entry)])
|
||||
async def async_setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
"""Set up the ZoneMinder binary sensor platform."""
|
||||
sensors = []
|
||||
for host_name, zm_client in hass.data[ZONEMINDER_DOMAIN].items():
|
||||
sensors.append(ZMAvailabilitySensor(host_name, zm_client))
|
||||
add_entities(sensors)
|
||||
return True
|
||||
|
||||
|
||||
class ZMAvailabilitySensor(BinarySensorEntity):
|
||||
"""Representation of the availability of ZoneMinder as a binary sensor."""
|
||||
|
||||
def __init__(self, client: ZoneMinder, config_entry: ConfigEntry):
|
||||
def __init__(self, host_name, client):
|
||||
"""Initialize availability sensor."""
|
||||
self._state = None
|
||||
self._name = config_entry.unique_id
|
||||
self._name = host_name
|
||||
self._client = client
|
||||
self._config_entry = config_entry
|
||||
|
||||
@property
|
||||
def unique_id(self) -> Optional[str]:
|
||||
"""Return a unique ID."""
|
||||
return f"{self._config_entry.unique_id}_availability"
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
|
@@ -1,8 +1,5 @@
|
||||
"""Support for ZoneMinder camera streaming."""
|
||||
import logging
|
||||
from typing import Callable, List, Optional
|
||||
|
||||
from zoneminder.monitor import Monitor
|
||||
|
||||
from homeassistant.components.mjpeg.camera import (
|
||||
CONF_MJPEG_URL,
|
||||
@@ -10,12 +7,9 @@ from homeassistant.components.mjpeg.camera import (
|
||||
MjpegCamera,
|
||||
filter_urllib3_logging,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_NAME, CONF_VERIFY_SSL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from .common import get_client_from_data
|
||||
from . import DOMAIN as ZONEMINDER_DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -23,28 +17,23 @@ _LOGGER = logging.getLogger(__name__)
|
||||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
"""Set up the ZoneMinder cameras."""
|
||||
filter_urllib3_logging()
|
||||
cameras = []
|
||||
for zm_client in hass.data[ZONEMINDER_DOMAIN].values():
|
||||
monitors = zm_client.get_monitors()
|
||||
if not monitors:
|
||||
_LOGGER.warning("Could not fetch monitors from ZoneMinder host: %s")
|
||||
return
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: Callable[[List[Entity], Optional[bool]], None],
|
||||
) -> None:
|
||||
"""Set up the sensor config entry."""
|
||||
zm_client = get_client_from_data(hass, config_entry.unique_id)
|
||||
|
||||
async_add_entities(
|
||||
[
|
||||
ZoneMinderCamera(monitor, zm_client.verify_ssl, config_entry)
|
||||
for monitor in await hass.async_add_job(zm_client.get_monitors)
|
||||
]
|
||||
)
|
||||
for monitor in monitors:
|
||||
_LOGGER.info("Initializing camera %s", monitor.id)
|
||||
cameras.append(ZoneMinderCamera(monitor, zm_client.verify_ssl))
|
||||
add_entities(cameras)
|
||||
|
||||
|
||||
class ZoneMinderCamera(MjpegCamera):
|
||||
"""Representation of a ZoneMinder Monitor Stream."""
|
||||
|
||||
def __init__(self, monitor: Monitor, verify_ssl: bool, config_entry: ConfigEntry):
|
||||
def __init__(self, monitor, verify_ssl):
|
||||
"""Initialize as a subclass of MjpegCamera."""
|
||||
device_info = {
|
||||
CONF_NAME: monitor.name,
|
||||
@@ -56,12 +45,6 @@ class ZoneMinderCamera(MjpegCamera):
|
||||
self._is_recording = None
|
||||
self._is_available = None
|
||||
self._monitor = monitor
|
||||
self._config_entry = config_entry
|
||||
|
||||
@property
|
||||
def unique_id(self) -> Optional[str]:
|
||||
"""Return a unique ID."""
|
||||
return f"{self._config_entry.unique_id}_{self._monitor.id}_camera"
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
|
@@ -1,110 +0,0 @@
|
||||
"""Common code for the ZoneMinder component."""
|
||||
from enum import Enum
|
||||
from typing import List
|
||||
|
||||
import requests
|
||||
from zoneminder.zm import ZoneMinder
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_PATH,
|
||||
CONF_SSL,
|
||||
CONF_USERNAME,
|
||||
CONF_VERIFY_SSL,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import const
|
||||
|
||||
|
||||
def prime_domain_data(hass: HomeAssistant) -> None:
|
||||
"""Prime the data structures."""
|
||||
hass.data.setdefault(const.DOMAIN, {})
|
||||
|
||||
|
||||
def prime_platform_configs(hass: HomeAssistant, domain: str) -> None:
|
||||
"""Prime the data structures."""
|
||||
prime_domain_data(hass)
|
||||
hass.data[const.DOMAIN].setdefault(const.PLATFORM_CONFIGS, {})
|
||||
hass.data[const.DOMAIN][const.PLATFORM_CONFIGS].setdefault(domain, [])
|
||||
|
||||
|
||||
def set_platform_configs(hass: HomeAssistant, domain: str, configs: List[dict]) -> None:
|
||||
"""Set platform configs."""
|
||||
prime_platform_configs(hass, domain)
|
||||
hass.data[const.DOMAIN][const.PLATFORM_CONFIGS][domain] = configs
|
||||
|
||||
|
||||
def get_platform_configs(hass: HomeAssistant, domain: str) -> List[dict]:
|
||||
"""Get platform configs."""
|
||||
prime_platform_configs(hass, domain)
|
||||
return hass.data[const.DOMAIN][const.PLATFORM_CONFIGS][domain]
|
||||
|
||||
|
||||
def prime_config_data(hass: HomeAssistant, unique_id: str) -> None:
|
||||
"""Prime the data structures."""
|
||||
prime_domain_data(hass)
|
||||
hass.data[const.DOMAIN].setdefault(const.CONFIG_DATA, {})
|
||||
hass.data[const.DOMAIN][const.CONFIG_DATA].setdefault(unique_id, {})
|
||||
|
||||
|
||||
def set_client_to_data(hass: HomeAssistant, unique_id: str, client: ZoneMinder) -> None:
|
||||
"""Put a ZoneMinder client in the Home Assistant data."""
|
||||
prime_config_data(hass, unique_id)
|
||||
hass.data[const.DOMAIN][const.CONFIG_DATA][unique_id][const.API_CLIENT] = client
|
||||
|
||||
|
||||
def is_client_in_data(hass: HomeAssistant, unique_id: str) -> bool:
|
||||
"""Check if ZoneMinder client is in the Home Assistant data."""
|
||||
prime_config_data(hass, unique_id)
|
||||
return const.API_CLIENT in hass.data[const.DOMAIN][const.CONFIG_DATA][unique_id]
|
||||
|
||||
|
||||
def get_client_from_data(hass: HomeAssistant, unique_id: str) -> ZoneMinder:
|
||||
"""Get a ZoneMinder client from the Home Assistant data."""
|
||||
prime_config_data(hass, unique_id)
|
||||
return hass.data[const.DOMAIN][const.CONFIG_DATA][unique_id][const.API_CLIENT]
|
||||
|
||||
|
||||
def del_client_from_data(hass: HomeAssistant, unique_id: str) -> None:
|
||||
"""Delete a ZoneMinder client from the Home Assistant data."""
|
||||
prime_config_data(hass, unique_id)
|
||||
del hass.data[const.DOMAIN][const.CONFIG_DATA][unique_id][const.API_CLIENT]
|
||||
|
||||
|
||||
def create_client_from_config(conf: dict) -> ZoneMinder:
|
||||
"""Create a new ZoneMinder client from a config."""
|
||||
protocol = "https" if conf[CONF_SSL] else "http"
|
||||
|
||||
host_name = conf[CONF_HOST]
|
||||
server_origin = f"{protocol}://{host_name}"
|
||||
|
||||
return ZoneMinder(
|
||||
server_origin,
|
||||
conf.get(CONF_USERNAME),
|
||||
conf.get(CONF_PASSWORD),
|
||||
conf.get(CONF_PATH),
|
||||
conf.get(const.CONF_PATH_ZMS),
|
||||
conf.get(CONF_VERIFY_SSL),
|
||||
)
|
||||
|
||||
|
||||
class ClientAvailabilityResult(Enum):
|
||||
"""Client availability test result."""
|
||||
|
||||
AVAILABLE = "available"
|
||||
ERROR_AUTH_FAIL = "auth_fail"
|
||||
ERROR_CONNECTION_ERROR = "connection_error"
|
||||
|
||||
|
||||
async def async_test_client_availability(
|
||||
hass: HomeAssistant, client: ZoneMinder
|
||||
) -> ClientAvailabilityResult:
|
||||
"""Test the availability of a ZoneMinder client."""
|
||||
try:
|
||||
if await hass.async_add_job(client.login):
|
||||
return ClientAvailabilityResult.AVAILABLE
|
||||
return ClientAvailabilityResult.ERROR_AUTH_FAIL
|
||||
except requests.exceptions.ConnectionError:
|
||||
return ClientAvailabilityResult.ERROR_CONNECTION_ERROR
|
@@ -1,99 +0,0 @@
|
||||
"""ZoneMinder config flow."""
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_PATH,
|
||||
CONF_SOURCE,
|
||||
CONF_SSL,
|
||||
CONF_USERNAME,
|
||||
CONF_VERIFY_SSL,
|
||||
)
|
||||
|
||||
from .common import (
|
||||
ClientAvailabilityResult,
|
||||
async_test_client_availability,
|
||||
create_client_from_config,
|
||||
)
|
||||
from .const import (
|
||||
CONF_PATH_ZMS,
|
||||
DEFAULT_PATH,
|
||||
DEFAULT_PATH_ZMS,
|
||||
DEFAULT_SSL,
|
||||
DEFAULT_VERIFY_SSL,
|
||||
)
|
||||
from .const import DOMAIN # pylint: disable=unused-import
|
||||
|
||||
|
||||
class ZoneminderFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""Flow handler for zoneminder integration."""
|
||||
|
||||
VERSION = 1
|
||||
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
|
||||
|
||||
async def async_step_import(self, config: dict):
|
||||
"""Handle a flow initialized by import."""
|
||||
return await self.async_step_finish(
|
||||
{**config, **{CONF_SOURCE: config_entries.SOURCE_IMPORT}}
|
||||
)
|
||||
|
||||
async def async_step_user(self, user_input: dict = None):
|
||||
"""Handle user step."""
|
||||
user_input = user_input or {}
|
||||
errors = {}
|
||||
|
||||
if user_input:
|
||||
zm_client = create_client_from_config(user_input)
|
||||
result = await async_test_client_availability(self.hass, zm_client)
|
||||
if result == ClientAvailabilityResult.AVAILABLE:
|
||||
return await self.async_step_finish(user_input)
|
||||
|
||||
errors["base"] = result.value
|
||||
|
||||
return self.async_show_form(
|
||||
step_id=config_entries.SOURCE_USER,
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST, default=user_input.get(CONF_HOST)): str,
|
||||
vol.Optional(
|
||||
CONF_USERNAME, default=user_input.get(CONF_USERNAME)
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_PASSWORD, default=user_input.get(CONF_PASSWORD)
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_PATH, default=user_input.get(CONF_PATH, DEFAULT_PATH)
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_PATH_ZMS,
|
||||
default=user_input.get(CONF_PATH_ZMS, DEFAULT_PATH_ZMS),
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_SSL, default=user_input.get(CONF_SSL, DEFAULT_SSL)
|
||||
): bool,
|
||||
vol.Optional(
|
||||
CONF_VERIFY_SSL,
|
||||
default=user_input.get(CONF_VERIFY_SSL, DEFAULT_VERIFY_SSL),
|
||||
): bool,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_finish(self, config: dict):
|
||||
"""Finish config flow."""
|
||||
zm_client = create_client_from_config(config)
|
||||
hostname = urlparse(zm_client.get_zms_url()).hostname
|
||||
result = await async_test_client_availability(self.hass, zm_client)
|
||||
|
||||
if result != ClientAvailabilityResult.AVAILABLE:
|
||||
return self.async_abort(reason=str(result.value))
|
||||
|
||||
await self.async_set_unique_id(hostname)
|
||||
self._abort_if_unique_id_configured(config)
|
||||
|
||||
return self.async_create_entry(title=hostname, data=config)
|
@@ -1,14 +0,0 @@
|
||||
"""Constants for zoneminder component."""
|
||||
|
||||
CONF_PATH_ZMS = "path_zms"
|
||||
|
||||
DEFAULT_PATH = "/zm/"
|
||||
DEFAULT_PATH_ZMS = "/zm/cgi-bin/nph-zms"
|
||||
DEFAULT_SSL = False
|
||||
DEFAULT_VERIFY_SSL = True
|
||||
DOMAIN = "zoneminder"
|
||||
SERVICE_SET_RUN_STATE = "set_run_state"
|
||||
|
||||
PLATFORM_CONFIGS = "platform_configs"
|
||||
CONFIG_DATA = "config_data"
|
||||
API_CLIENT = "api_client"
|
@@ -1,8 +1,7 @@
|
||||
{
|
||||
"domain": "zoneminder",
|
||||
"name": "ZoneMinder",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/zoneminder",
|
||||
"requirements": ["zm-py==0.4.0"],
|
||||
"codeowners": ["@rohankapoorcom", "@vangorra"]
|
||||
"codeowners": ["@rohankapoorcom"]
|
||||
}
|
||||
|
@@ -1,19 +1,15 @@
|
||||
"""Support for ZoneMinder sensors."""
|
||||
import logging
|
||||
from typing import Callable, List, Optional
|
||||
|
||||
import voluptuous as vol
|
||||
from zoneminder.monitor import Monitor, TimePeriod
|
||||
from zoneminder.zm import ZoneMinder
|
||||
from zoneminder.monitor import TimePeriod
|
||||
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN, PLATFORM_SCHEMA
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
||||
from homeassistant.const import CONF_MONITORED_CONDITIONS
|
||||
from homeassistant.core import HomeAssistant
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from .common import get_client_from_data, get_platform_configs
|
||||
from . import DOMAIN as ZONEMINDER_DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -41,50 +37,35 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: Callable[[List[Entity], Optional[bool]], None],
|
||||
) -> None:
|
||||
"""Set up the sensor config entry."""
|
||||
zm_client = get_client_from_data(hass, config_entry.unique_id)
|
||||
monitors = await hass.async_add_job(zm_client.get_monitors)
|
||||
|
||||
if not monitors:
|
||||
_LOGGER.warning("Did not fetch any monitors from ZoneMinder")
|
||||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
"""Set up the ZoneMinder sensor platform."""
|
||||
include_archived = config.get(CONF_INCLUDE_ARCHIVED)
|
||||
|
||||
sensors = []
|
||||
for monitor in monitors:
|
||||
sensors.append(ZMSensorMonitors(monitor, config_entry))
|
||||
for zm_client in hass.data[ZONEMINDER_DOMAIN].values():
|
||||
monitors = zm_client.get_monitors()
|
||||
if not monitors:
|
||||
_LOGGER.warning("Could not fetch any monitors from ZoneMinder")
|
||||
|
||||
for config in get_platform_configs(hass, SENSOR_DOMAIN):
|
||||
include_archived = config.get(CONF_INCLUDE_ARCHIVED)
|
||||
for monitor in monitors:
|
||||
sensors.append(ZMSensorMonitors(monitor))
|
||||
|
||||
for sensor in config[CONF_MONITORED_CONDITIONS]:
|
||||
sensors.append(
|
||||
ZMSensorEvents(monitor, include_archived, sensor, config_entry)
|
||||
)
|
||||
sensors.append(ZMSensorEvents(monitor, include_archived, sensor))
|
||||
|
||||
sensors.append(ZMSensorRunState(zm_client, config_entry))
|
||||
|
||||
async_add_entities(sensors, True)
|
||||
sensors.append(ZMSensorRunState(zm_client))
|
||||
add_entities(sensors)
|
||||
|
||||
|
||||
class ZMSensorMonitors(Entity):
|
||||
"""Get the status of each ZoneMinder monitor."""
|
||||
|
||||
def __init__(self, monitor: Monitor, config_entry: ConfigEntry):
|
||||
def __init__(self, monitor):
|
||||
"""Initialize monitor sensor."""
|
||||
self._monitor = monitor
|
||||
self._config_entry = config_entry
|
||||
self._state = None
|
||||
self._is_available = None
|
||||
|
||||
@property
|
||||
def unique_id(self) -> Optional[str]:
|
||||
"""Return a unique ID."""
|
||||
return f"{self._config_entry.unique_id}_{self._monitor.id}_status"
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the sensor."""
|
||||
@@ -113,26 +94,14 @@ class ZMSensorMonitors(Entity):
|
||||
class ZMSensorEvents(Entity):
|
||||
"""Get the number of events for each monitor."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
monitor: Monitor,
|
||||
include_archived: bool,
|
||||
sensor_type: str,
|
||||
config_entry: ConfigEntry,
|
||||
):
|
||||
def __init__(self, monitor, include_archived, sensor_type):
|
||||
"""Initialize event sensor."""
|
||||
|
||||
self._monitor = monitor
|
||||
self._include_archived = include_archived
|
||||
self.time_period = TimePeriod.get_time_period(sensor_type)
|
||||
self._config_entry = config_entry
|
||||
self._state = None
|
||||
|
||||
@property
|
||||
def unique_id(self) -> Optional[str]:
|
||||
"""Return a unique ID."""
|
||||
return f"{self._config_entry.unique_id}_{self._monitor.id}_{self.time_period.value}_{self._include_archived}_events"
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the sensor."""
|
||||
@@ -156,17 +125,11 @@ class ZMSensorEvents(Entity):
|
||||
class ZMSensorRunState(Entity):
|
||||
"""Get the ZoneMinder run state."""
|
||||
|
||||
def __init__(self, client: ZoneMinder, config_entry: ConfigEntry):
|
||||
def __init__(self, client):
|
||||
"""Initialize run state sensor."""
|
||||
self._state = None
|
||||
self._is_available = None
|
||||
self._client = client
|
||||
self._config_entry = config_entry
|
||||
|
||||
@property
|
||||
def unique_id(self) -> Optional[str]:
|
||||
"""Return a unique ID."""
|
||||
return f"{self._config_entry.unique_id}_runstate"
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
|
@@ -1,9 +1,6 @@
|
||||
set_run_state:
|
||||
description: "Set the ZoneMinder run state"
|
||||
description: Set the ZoneMinder run state
|
||||
fields:
|
||||
id:
|
||||
description: "The host name or IP address of the ZoneMinder instance."
|
||||
example: "10.10.0.2"
|
||||
name:
|
||||
description: "The string name of the ZoneMinder run state to set as active."
|
||||
description: The string name of the ZoneMinder run state to set as active.
|
||||
example: "Home"
|
||||
|
@@ -1,28 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"flow_title": "ZoneMinder",
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Add ZoneMinder Server.",
|
||||
"data": {
|
||||
"host": "Host and Port (ex 10.10.0.4:8010)",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"path": "ZM Path",
|
||||
"path_zms": "ZMS Path",
|
||||
"ssl": "Use SSL for connections to ZoneMinder",
|
||||
"verify_ssl": "Verify SSL Certificate"
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"auth_fail": "Username or password is incorrect.",
|
||||
"connection_error": "Failed to connect to a ZoneMinder server."
|
||||
},
|
||||
"error": {
|
||||
"auth_fail": "Username or password is incorrect.",
|
||||
"connection_error": "Failed to connect to a ZoneMinder server."
|
||||
},
|
||||
"create_entry": { "default": "ZoneMinder server added." }
|
||||
}
|
||||
}
|
@@ -1,61 +1,41 @@
|
||||
"""Support for ZoneMinder switches."""
|
||||
import logging
|
||||
from typing import Callable, List, Optional
|
||||
|
||||
import voluptuous as vol
|
||||
from zoneminder.monitor import Monitor, MonitorState
|
||||
from zoneminder.monitor import MonitorState
|
||||
|
||||
from homeassistant.components.switch import (
|
||||
DOMAIN as SWITCH_DOMAIN,
|
||||
PLATFORM_SCHEMA,
|
||||
SwitchEntity,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity
|
||||
from homeassistant.const import CONF_COMMAND_OFF, CONF_COMMAND_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity import Entity
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
from .common import get_client_from_data, get_platform_configs
|
||||
from . import DOMAIN as ZONEMINDER_DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
MONITOR_STATES = {
|
||||
MonitorState[name].value: MonitorState[name]
|
||||
for name in dir(MonitorState)
|
||||
if not name.startswith("_")
|
||||
}
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_COMMAND_ON): vol.All(vol.In(MONITOR_STATES.keys())),
|
||||
vol.Required(CONF_COMMAND_OFF): vol.All(vol.In(MONITOR_STATES.keys())),
|
||||
vol.Required(CONF_COMMAND_ON): cv.string,
|
||||
vol.Required(CONF_COMMAND_OFF): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: Callable[[List[Entity], Optional[bool]], None],
|
||||
) -> None:
|
||||
"""Set up the sensor config entry."""
|
||||
zm_client = get_client_from_data(hass, config_entry.unique_id)
|
||||
monitors = await hass.async_add_job(zm_client.get_monitors)
|
||||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
"""Set up the ZoneMinder switch platform."""
|
||||
|
||||
if not monitors:
|
||||
_LOGGER.warning("Could not fetch monitors from ZoneMinder")
|
||||
return
|
||||
on_state = MonitorState(config.get(CONF_COMMAND_ON))
|
||||
off_state = MonitorState(config.get(CONF_COMMAND_OFF))
|
||||
|
||||
switches = []
|
||||
for monitor in monitors:
|
||||
for config in get_platform_configs(hass, SWITCH_DOMAIN):
|
||||
on_state = MONITOR_STATES[config[CONF_COMMAND_ON]]
|
||||
off_state = MONITOR_STATES[config[CONF_COMMAND_OFF]]
|
||||
for zm_client in hass.data[ZONEMINDER_DOMAIN].values():
|
||||
monitors = zm_client.get_monitors()
|
||||
if not monitors:
|
||||
_LOGGER.warning("Could not fetch monitors from ZoneMinder")
|
||||
return
|
||||
|
||||
switches.append(
|
||||
ZMSwitchMonitors(monitor, on_state, off_state, config_entry)
|
||||
)
|
||||
|
||||
async_add_entities(switches, True)
|
||||
for monitor in monitors:
|
||||
switches.append(ZMSwitchMonitors(monitor, on_state, off_state))
|
||||
add_entities(switches)
|
||||
|
||||
|
||||
class ZMSwitchMonitors(SwitchEntity):
|
||||
@@ -63,25 +43,13 @@ class ZMSwitchMonitors(SwitchEntity):
|
||||
|
||||
icon = "mdi:record-rec"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
monitor: Monitor,
|
||||
on_state: MonitorState,
|
||||
off_state: MonitorState,
|
||||
config_entry: ConfigEntry,
|
||||
):
|
||||
def __init__(self, monitor, on_state, off_state):
|
||||
"""Initialize the switch."""
|
||||
self._monitor = monitor
|
||||
self._on_state = on_state
|
||||
self._off_state = off_state
|
||||
self._config_entry = config_entry
|
||||
self._state = None
|
||||
|
||||
@property
|
||||
def unique_id(self) -> Optional[str]:
|
||||
"""Return a unique ID."""
|
||||
return f"{self._config_entry.unique_id}_{self._monitor.id}_switch_{self._on_state.value}_{self._off_state.value}"
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the switch."""
|
||||
|
@@ -488,7 +488,6 @@ async def async_process_ha_core_config(hass: HomeAssistant, config: Dict) -> Non
|
||||
CONF_UNIT_SYSTEM,
|
||||
CONF_EXTERNAL_URL,
|
||||
CONF_INTERNAL_URL,
|
||||
CONF_MEDIA_DIRS,
|
||||
]
|
||||
):
|
||||
hac.config_source = SOURCE_YAML
|
||||
|
@@ -1,13 +1,13 @@
|
||||
"""Constants used by Home Assistant components."""
|
||||
MAJOR_VERSION = 0
|
||||
MINOR_VERSION = 116
|
||||
PATCH_VERSION = "0b0"
|
||||
PATCH_VERSION = "4"
|
||||
__short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__ = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER = (3, 7, 1)
|
||||
# Truthy date string triggers showing related deprecation warning messages.
|
||||
REQUIRED_NEXT_PYTHON_VER = (3, 8, 0)
|
||||
REQUIRED_NEXT_PYTHON_DATE = ""
|
||||
REQUIRED_NEXT_PYTHON_DATE = "December 7, 2020"
|
||||
|
||||
# Format for platform files
|
||||
PLATFORM_FORMAT = "{platform}.{domain}"
|
||||
@@ -623,3 +623,7 @@ CLOUD_NEVER_EXPOSED_ENTITIES = ["group.all_locks"]
|
||||
|
||||
# The ID of the Home Assistant Cast App
|
||||
CAST_APP_ID_HOMEASSISTANT = "B12CE3CA"
|
||||
|
||||
# The tracker error allow when converting
|
||||
# loop time to human readable time
|
||||
MAX_TIME_TRACKING_ERROR = 0.001
|
||||
|
@@ -538,7 +538,7 @@ class Event:
|
||||
event_type: str,
|
||||
data: Optional[Dict[str, Any]] = None,
|
||||
origin: EventOrigin = EventOrigin.local,
|
||||
time_fired: Optional[int] = None,
|
||||
time_fired: Optional[datetime.datetime] = None,
|
||||
context: Optional[Context] = None,
|
||||
) -> None:
|
||||
"""Initialize a new event."""
|
||||
|
@@ -1,8 +1,6 @@
|
||||
"""The exceptions used by Home Assistant."""
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
import jinja2
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .core import Context # noqa: F401 pylint: disable=unused-import
|
||||
|
||||
@@ -22,7 +20,7 @@ class NoEntitySpecifiedError(HomeAssistantError):
|
||||
class TemplateError(HomeAssistantError):
|
||||
"""Error during template rendering."""
|
||||
|
||||
def __init__(self, exception: jinja2.TemplateError) -> None:
|
||||
def __init__(self, exception: Exception) -> None:
|
||||
"""Init the error."""
|
||||
super().__init__(f"{exception.__class__.__name__}: {exception}")
|
||||
|
||||
|
@@ -217,6 +217,5 @@ FLOWS = [
|
||||
"yeelight",
|
||||
"zerproc",
|
||||
"zha",
|
||||
"zoneminder",
|
||||
"zwave"
|
||||
]
|
||||
|
@@ -297,7 +297,7 @@ def async_numeric_state_from_config(
|
||||
def state(
|
||||
hass: HomeAssistant,
|
||||
entity: Union[None, str, State],
|
||||
req_state: Union[str, List[str]],
|
||||
req_state: Any,
|
||||
for_period: Optional[timedelta] = None,
|
||||
attribute: Optional[str] = None,
|
||||
) -> bool:
|
||||
@@ -314,17 +314,20 @@ def state(
|
||||
assert isinstance(entity, State)
|
||||
|
||||
if attribute is None:
|
||||
value = entity.state
|
||||
value: Any = entity.state
|
||||
else:
|
||||
value = str(entity.attributes.get(attribute))
|
||||
value = entity.attributes.get(attribute)
|
||||
|
||||
if isinstance(req_state, str):
|
||||
if not isinstance(req_state, list):
|
||||
req_state = [req_state]
|
||||
|
||||
is_state = False
|
||||
for req_state_value in req_state:
|
||||
state_value = req_state_value
|
||||
if INPUT_ENTITY_ID.match(req_state_value) is not None:
|
||||
if (
|
||||
isinstance(req_state_value, str)
|
||||
and INPUT_ENTITY_ID.match(req_state_value) is not None
|
||||
):
|
||||
state_entity = hass.states.get(req_state_value)
|
||||
if not state_entity:
|
||||
continue
|
||||
|
@@ -929,22 +929,44 @@ NUMERIC_STATE_CONDITION_SCHEMA = vol.All(
|
||||
has_at_least_one_key(CONF_BELOW, CONF_ABOVE),
|
||||
)
|
||||
|
||||
STATE_CONDITION_SCHEMA = vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_CONDITION): "state",
|
||||
vol.Required(CONF_ENTITY_ID): entity_ids,
|
||||
vol.Optional(CONF_ATTRIBUTE): str,
|
||||
vol.Required(CONF_STATE): vol.Any(str, [str]),
|
||||
vol.Optional(CONF_FOR): positive_time_period,
|
||||
# To support use_trigger_value in automation
|
||||
# Deprecated 2016/04/25
|
||||
vol.Optional("from"): str,
|
||||
}
|
||||
),
|
||||
key_dependency("for", "state"),
|
||||
STATE_CONDITION_BASE_SCHEMA = {
|
||||
vol.Required(CONF_CONDITION): "state",
|
||||
vol.Required(CONF_ENTITY_ID): entity_ids,
|
||||
vol.Optional(CONF_ATTRIBUTE): str,
|
||||
vol.Optional(CONF_FOR): positive_time_period,
|
||||
# To support use_trigger_value in automation
|
||||
# Deprecated 2016/04/25
|
||||
vol.Optional("from"): str,
|
||||
}
|
||||
|
||||
STATE_CONDITION_STATE_SCHEMA = vol.Schema(
|
||||
{
|
||||
**STATE_CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_STATE): vol.Any(str, [str]),
|
||||
}
|
||||
)
|
||||
|
||||
STATE_CONDITION_ATTRIBUTE_SCHEMA = vol.Schema(
|
||||
{
|
||||
**STATE_CONDITION_BASE_SCHEMA,
|
||||
vol.Required(CONF_STATE): match_all,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def STATE_CONDITION_SCHEMA(value: Any) -> dict: # pylint: disable=invalid-name
|
||||
"""Validate a state condition."""
|
||||
if not isinstance(value, dict):
|
||||
raise vol.Invalid("Expected a dictionary")
|
||||
|
||||
if CONF_ATTRIBUTE in value:
|
||||
validated: dict = STATE_CONDITION_ATTRIBUTE_SCHEMA(value)
|
||||
else:
|
||||
validated = STATE_CONDITION_STATE_SCHEMA(value)
|
||||
|
||||
return key_dependency("for", "state")(validated)
|
||||
|
||||
|
||||
SUN_CONDITION_SCHEMA = vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
|
@@ -27,6 +27,7 @@ from homeassistant.const import (
|
||||
EVENT_STATE_CHANGED,
|
||||
EVENT_TIME_CHANGED,
|
||||
MATCH_ALL,
|
||||
MAX_TIME_TRACKING_ERROR,
|
||||
SUN_EVENT_SUNRISE,
|
||||
SUN_EVENT_SUNSET,
|
||||
)
|
||||
@@ -40,6 +41,7 @@ from homeassistant.core import (
|
||||
)
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers.entity_registry import EVENT_ENTITY_REGISTRY_UPDATED
|
||||
from homeassistant.helpers.ratelimit import KeyedRateLimit
|
||||
from homeassistant.helpers.sun import get_astral_event_next
|
||||
from homeassistant.helpers.template import RenderInfo, Template, result_as_boolean
|
||||
from homeassistant.helpers.typing import TemplateVarsType
|
||||
@@ -47,8 +49,6 @@ from homeassistant.loader import bind_hass
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.async_ import run_callback_threadsafe
|
||||
|
||||
MAX_TIME_TRACKING_ERROR = 0.001
|
||||
|
||||
TRACK_STATE_CHANGE_CALLBACKS = "track_state_change_callbacks"
|
||||
TRACK_STATE_CHANGE_LISTENER = "track_state_change_listener"
|
||||
|
||||
@@ -61,23 +61,39 @@ TRACK_STATE_REMOVED_DOMAIN_LISTENER = "track_state_removed_domain_listener"
|
||||
TRACK_ENTITY_REGISTRY_UPDATED_CALLBACKS = "track_entity_registry_updated_callbacks"
|
||||
TRACK_ENTITY_REGISTRY_UPDATED_LISTENER = "track_entity_registry_updated_listener"
|
||||
|
||||
_TEMPLATE_ALL_LISTENER = "all"
|
||||
_TEMPLATE_DOMAINS_LISTENER = "domains"
|
||||
_TEMPLATE_ENTITIES_LISTENER = "entities"
|
||||
_ALL_LISTENER = "all"
|
||||
_DOMAINS_LISTENER = "domains"
|
||||
_ENTITIES_LISTENER = "entities"
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class TrackStates:
|
||||
"""Class for keeping track of states being tracked.
|
||||
|
||||
all_states: All states on the system are being tracked
|
||||
entities: Entities to track
|
||||
domains: Domains to track
|
||||
"""
|
||||
|
||||
all_states: bool
|
||||
entities: Set
|
||||
domains: Set
|
||||
|
||||
|
||||
@dataclass
|
||||
class TrackTemplate:
|
||||
"""Class for keeping track of a template with variables.
|
||||
|
||||
The template is template to calculate.
|
||||
The variables are variables to pass to the template.
|
||||
The rate_limit is a rate limit on how often the template is re-rendered.
|
||||
"""
|
||||
|
||||
template: Template
|
||||
variables: TemplateVarsType
|
||||
rate_limit: Optional[timedelta] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -217,6 +233,9 @@ def async_track_state_change_event(
|
||||
care about the state change events so we can
|
||||
do a fast dict lookup to route events.
|
||||
"""
|
||||
entity_ids = _async_string_to_lower_list(entity_ids)
|
||||
if not entity_ids:
|
||||
return _remove_empty_listener
|
||||
|
||||
entity_callbacks = hass.data.setdefault(TRACK_STATE_CHANGE_CALLBACKS, {})
|
||||
|
||||
@@ -261,6 +280,11 @@ def async_track_state_change_event(
|
||||
return remove_listener
|
||||
|
||||
|
||||
@callback
|
||||
def _remove_empty_listener() -> None:
|
||||
"""Remove a listener that does nothing."""
|
||||
|
||||
|
||||
@callback
|
||||
def _async_remove_indexed_listeners(
|
||||
hass: HomeAssistant,
|
||||
@@ -293,6 +317,9 @@ def async_track_entity_registry_updated_event(
|
||||
|
||||
Similar to async_track_state_change_event.
|
||||
"""
|
||||
entity_ids = _async_string_to_lower_list(entity_ids)
|
||||
if not entity_ids:
|
||||
return _remove_empty_listener
|
||||
|
||||
entity_callbacks = hass.data.setdefault(TRACK_ENTITY_REGISTRY_UPDATED_CALLBACKS, {})
|
||||
|
||||
@@ -365,6 +392,9 @@ def async_track_state_added_domain(
|
||||
action: Callable[[Event], Any],
|
||||
) -> Callable[[], None]:
|
||||
"""Track state change events when an entity is added to domains."""
|
||||
domains = _async_string_to_lower_list(domains)
|
||||
if not domains:
|
||||
return _remove_empty_listener
|
||||
|
||||
domain_callbacks = hass.data.setdefault(TRACK_STATE_ADDED_DOMAIN_CALLBACKS, {})
|
||||
|
||||
@@ -408,6 +438,9 @@ def async_track_state_removed_domain(
|
||||
action: Callable[[Event], Any],
|
||||
) -> Callable[[], None]:
|
||||
"""Track state change events when an entity is removed from domains."""
|
||||
domains = _async_string_to_lower_list(domains)
|
||||
if not domains:
|
||||
return _remove_empty_listener
|
||||
|
||||
domain_callbacks = hass.data.setdefault(TRACK_STATE_REMOVED_DOMAIN_CALLBACKS, {})
|
||||
|
||||
@@ -452,6 +485,158 @@ def _async_string_to_lower_list(instr: Union[str, Iterable[str]]) -> List[str]:
|
||||
return [mstr.lower() for mstr in instr]
|
||||
|
||||
|
||||
class _TrackStateChangeFiltered:
|
||||
"""Handle removal / refresh of tracker."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
track_states: TrackStates,
|
||||
action: Callable[[Event], Any],
|
||||
):
|
||||
"""Handle removal / refresh of tracker init."""
|
||||
self.hass = hass
|
||||
self._action = action
|
||||
self._listeners: Dict[str, Callable] = {}
|
||||
self._last_track_states: TrackStates = track_states
|
||||
|
||||
@callback
|
||||
def async_setup(self) -> None:
|
||||
"""Create listeners to track states."""
|
||||
track_states = self._last_track_states
|
||||
|
||||
if (
|
||||
not track_states.all_states
|
||||
and not track_states.domains
|
||||
and not track_states.entities
|
||||
):
|
||||
return
|
||||
|
||||
if track_states.all_states:
|
||||
self._setup_all_listener()
|
||||
return
|
||||
|
||||
self._setup_domains_listener(track_states.domains)
|
||||
self._setup_entities_listener(track_states.domains, track_states.entities)
|
||||
|
||||
@property
|
||||
def listeners(self) -> Dict:
|
||||
"""State changes that will cause a re-render."""
|
||||
track_states = self._last_track_states
|
||||
return {
|
||||
_ALL_LISTENER: track_states.all_states,
|
||||
_ENTITIES_LISTENER: track_states.entities,
|
||||
_DOMAINS_LISTENER: track_states.domains,
|
||||
}
|
||||
|
||||
@callback
|
||||
def async_update_listeners(self, new_track_states: TrackStates) -> None:
|
||||
"""Update the listeners based on the new TrackStates."""
|
||||
last_track_states = self._last_track_states
|
||||
self._last_track_states = new_track_states
|
||||
|
||||
had_all_listener = last_track_states.all_states
|
||||
|
||||
if new_track_states.all_states:
|
||||
if had_all_listener:
|
||||
return
|
||||
self._cancel_listener(_DOMAINS_LISTENER)
|
||||
self._cancel_listener(_ENTITIES_LISTENER)
|
||||
self._setup_all_listener()
|
||||
return
|
||||
|
||||
if had_all_listener:
|
||||
self._cancel_listener(_ALL_LISTENER)
|
||||
|
||||
domains_changed = new_track_states.domains != last_track_states.domains
|
||||
|
||||
if had_all_listener or domains_changed:
|
||||
domains_changed = True
|
||||
self._cancel_listener(_DOMAINS_LISTENER)
|
||||
self._setup_domains_listener(new_track_states.domains)
|
||||
|
||||
if (
|
||||
had_all_listener
|
||||
or domains_changed
|
||||
or new_track_states.entities != last_track_states.entities
|
||||
):
|
||||
self._cancel_listener(_ENTITIES_LISTENER)
|
||||
self._setup_entities_listener(
|
||||
new_track_states.domains, new_track_states.entities
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_remove(self) -> None:
|
||||
"""Cancel the listeners."""
|
||||
for key in list(self._listeners):
|
||||
self._listeners.pop(key)()
|
||||
|
||||
@callback
|
||||
def _cancel_listener(self, listener_name: str) -> None:
|
||||
if listener_name not in self._listeners:
|
||||
return
|
||||
|
||||
self._listeners.pop(listener_name)()
|
||||
|
||||
@callback
|
||||
def _setup_entities_listener(self, domains: Set, entities: Set) -> None:
|
||||
if domains:
|
||||
entities = entities.copy()
|
||||
entities.update(self.hass.states.async_entity_ids(domains))
|
||||
|
||||
# Entities has changed to none
|
||||
if not entities:
|
||||
return
|
||||
|
||||
self._listeners[_ENTITIES_LISTENER] = async_track_state_change_event(
|
||||
self.hass, entities, self._action
|
||||
)
|
||||
|
||||
@callback
|
||||
def _setup_domains_listener(self, domains: Set) -> None:
|
||||
if not domains:
|
||||
return
|
||||
|
||||
self._listeners[_DOMAINS_LISTENER] = async_track_state_added_domain(
|
||||
self.hass, domains, self._action
|
||||
)
|
||||
|
||||
@callback
|
||||
def _setup_all_listener(self) -> None:
|
||||
self._listeners[_ALL_LISTENER] = self.hass.bus.async_listen(
|
||||
EVENT_STATE_CHANGED, self._action
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
@bind_hass
|
||||
def async_track_state_change_filtered(
|
||||
hass: HomeAssistant,
|
||||
track_states: TrackStates,
|
||||
action: Callable[[Event], Any],
|
||||
) -> _TrackStateChangeFiltered:
|
||||
"""Track state changes with a TrackStates filter that can be updated.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
hass
|
||||
Home assistant object.
|
||||
track_states
|
||||
A TrackStates data class.
|
||||
action
|
||||
Callable to call with results.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Object used to update the listeners (async_update_listeners) with a new TrackStates or
|
||||
cancel the tracking (async_remove).
|
||||
|
||||
"""
|
||||
tracker = _TrackStateChangeFiltered(hass, track_states, action)
|
||||
tracker.async_setup()
|
||||
return tracker
|
||||
|
||||
|
||||
@callback
|
||||
@bind_hass
|
||||
def async_track_template(
|
||||
@@ -557,12 +742,11 @@ class _TrackTemplateResultInfo:
|
||||
track_template_.template.hass = hass
|
||||
self._track_templates = track_templates
|
||||
|
||||
self._listeners: Dict[str, Callable] = {}
|
||||
|
||||
self._last_result: Dict[Template, Union[str, TemplateError]] = {}
|
||||
|
||||
self._rate_limit = KeyedRateLimit(hass)
|
||||
self._info: Dict[Template, RenderInfo] = {}
|
||||
self._last_domains: Set = set()
|
||||
self._last_entities: Set = set()
|
||||
self._track_state_changes: Optional[_TrackStateChangeFiltered] = None
|
||||
|
||||
def async_setup(self, raise_on_template_error: bool) -> None:
|
||||
"""Activation of template tracking."""
|
||||
@@ -580,7 +764,9 @@ class _TrackTemplateResultInfo:
|
||||
exc_info=self._info[template].exception,
|
||||
)
|
||||
|
||||
self._create_listeners()
|
||||
self._track_state_changes = async_track_state_change_filtered(
|
||||
self.hass, _render_infos_to_track_states(self._info.values()), self._refresh
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Template group %s listens for %s",
|
||||
self._track_templates,
|
||||
@@ -590,182 +776,101 @@ class _TrackTemplateResultInfo:
|
||||
@property
|
||||
def listeners(self) -> Dict:
|
||||
"""State changes that will cause a re-render."""
|
||||
return {
|
||||
"all": _TEMPLATE_ALL_LISTENER in self._listeners,
|
||||
"entities": self._last_entities,
|
||||
"domains": self._last_domains,
|
||||
}
|
||||
|
||||
@property
|
||||
def _needs_all_listener(self) -> bool:
|
||||
for info in self._info.values():
|
||||
# Tracking all states
|
||||
if info.all_states or info.all_states_lifecycle:
|
||||
return True
|
||||
|
||||
# Previous call had an exception
|
||||
# so we do not know which states
|
||||
# to track
|
||||
if info.exception:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@property
|
||||
def _all_templates_are_static(self) -> bool:
|
||||
for info in self._info.values():
|
||||
if not info.is_static:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@callback
|
||||
def _create_listeners(self) -> None:
|
||||
if self._all_templates_are_static:
|
||||
return
|
||||
|
||||
if self._needs_all_listener:
|
||||
self._setup_all_listener()
|
||||
return
|
||||
|
||||
self._last_entities, self._last_domains = _entities_domains_from_info(
|
||||
self._info.values()
|
||||
)
|
||||
self._setup_domains_listener(self._last_domains)
|
||||
self._setup_entities_listener(self._last_domains, self._last_entities)
|
||||
|
||||
@callback
|
||||
def _cancel_listener(self, listener_name: str) -> None:
|
||||
if listener_name not in self._listeners:
|
||||
return
|
||||
|
||||
self._listeners.pop(listener_name)()
|
||||
|
||||
@callback
|
||||
def _update_listeners(self) -> None:
|
||||
had_all_listener = _TEMPLATE_ALL_LISTENER in self._listeners
|
||||
|
||||
if self._needs_all_listener:
|
||||
if had_all_listener:
|
||||
return
|
||||
self._last_domains = set()
|
||||
self._last_entities = set()
|
||||
self._cancel_listener(_TEMPLATE_DOMAINS_LISTENER)
|
||||
self._cancel_listener(_TEMPLATE_ENTITIES_LISTENER)
|
||||
self._setup_all_listener()
|
||||
return
|
||||
|
||||
if had_all_listener:
|
||||
self._cancel_listener(_TEMPLATE_ALL_LISTENER)
|
||||
|
||||
entities, domains = _entities_domains_from_info(self._info.values())
|
||||
domains_changed = domains != self._last_domains
|
||||
|
||||
if had_all_listener or domains_changed:
|
||||
domains_changed = True
|
||||
self._cancel_listener(_TEMPLATE_DOMAINS_LISTENER)
|
||||
self._setup_domains_listener(domains)
|
||||
|
||||
if had_all_listener or domains_changed or entities != self._last_entities:
|
||||
self._cancel_listener(_TEMPLATE_ENTITIES_LISTENER)
|
||||
self._setup_entities_listener(domains, entities)
|
||||
|
||||
self._last_domains = domains
|
||||
self._last_entities = entities
|
||||
|
||||
@callback
|
||||
def _setup_entities_listener(self, domains: Set, entities: Set) -> None:
|
||||
if domains:
|
||||
entities = entities.copy()
|
||||
entities.update(self.hass.states.async_entity_ids(domains))
|
||||
|
||||
# Entities has changed to none
|
||||
if not entities:
|
||||
return
|
||||
|
||||
self._listeners[_TEMPLATE_ENTITIES_LISTENER] = async_track_state_change_event(
|
||||
self.hass, entities, self._refresh
|
||||
)
|
||||
|
||||
@callback
|
||||
def _setup_domains_listener(self, domains: Set) -> None:
|
||||
if not domains:
|
||||
return
|
||||
|
||||
self._listeners[_TEMPLATE_DOMAINS_LISTENER] = async_track_state_added_domain(
|
||||
self.hass, domains, self._refresh
|
||||
)
|
||||
|
||||
@callback
|
||||
def _setup_all_listener(self) -> None:
|
||||
self._listeners[_TEMPLATE_ALL_LISTENER] = self.hass.bus.async_listen(
|
||||
EVENT_STATE_CHANGED, self._refresh
|
||||
)
|
||||
assert self._track_state_changes
|
||||
return self._track_state_changes.listeners
|
||||
|
||||
@callback
|
||||
def async_remove(self) -> None:
|
||||
"""Cancel the listener."""
|
||||
for key in list(self._listeners):
|
||||
self._listeners.pop(key)()
|
||||
assert self._track_state_changes
|
||||
self._track_state_changes.async_remove()
|
||||
self._rate_limit.async_remove()
|
||||
|
||||
@callback
|
||||
def async_refresh(self) -> None:
|
||||
"""Force recalculate the template."""
|
||||
self._refresh(None)
|
||||
|
||||
@callback
|
||||
def _event_triggers_template(self, template: Template, event: Event) -> bool:
|
||||
"""Determine if a template should be re-rendered from an event."""
|
||||
entity_id = event.data.get(ATTR_ENTITY_ID)
|
||||
return (
|
||||
self._info[template].filter(entity_id)
|
||||
or event.data.get("new_state") is None
|
||||
or event.data.get("old_state") is None
|
||||
and self._info[template].filter_lifecycle(entity_id)
|
||||
)
|
||||
def _render_template_if_ready(
|
||||
self,
|
||||
track_template_: TrackTemplate,
|
||||
now: datetime,
|
||||
event: Optional[Event],
|
||||
) -> Union[bool, TrackTemplateResult]:
|
||||
"""Re-render the template if conditions match.
|
||||
|
||||
Returns False if the template was not be re-rendered
|
||||
|
||||
Returns True if the template re-rendered and did not
|
||||
change.
|
||||
|
||||
Returns TrackTemplateResult if the template re-render
|
||||
generates a new result.
|
||||
"""
|
||||
template = track_template_.template
|
||||
|
||||
if event:
|
||||
info = self._info[template]
|
||||
|
||||
if not self._rate_limit.async_has_timer(
|
||||
template
|
||||
) and not _event_triggers_rerender(event, info):
|
||||
return False
|
||||
|
||||
if self._rate_limit.async_schedule_action(
|
||||
template,
|
||||
_rate_limit_for_event(event, info, track_template_),
|
||||
now,
|
||||
self._refresh,
|
||||
event,
|
||||
):
|
||||
return False
|
||||
|
||||
_LOGGER.debug(
|
||||
"Template update %s triggered by event: %s",
|
||||
template.template,
|
||||
event,
|
||||
)
|
||||
|
||||
self._rate_limit.async_triggered(template, now)
|
||||
self._info[template] = template.async_render_to_info(track_template_.variables)
|
||||
|
||||
try:
|
||||
result: Union[str, TemplateError] = self._info[template].result()
|
||||
except TemplateError as ex:
|
||||
result = ex
|
||||
|
||||
last_result = self._last_result.get(template)
|
||||
|
||||
# Check to see if the result has changed
|
||||
if result == last_result:
|
||||
return True
|
||||
|
||||
if isinstance(result, TemplateError) and isinstance(last_result, TemplateError):
|
||||
return True
|
||||
|
||||
return TrackTemplateResult(template, last_result, result)
|
||||
|
||||
@callback
|
||||
def _refresh(self, event: Optional[Event]) -> None:
|
||||
updates = []
|
||||
info_changed = False
|
||||
now = dt_util.utcnow()
|
||||
|
||||
for track_template_ in self._track_templates:
|
||||
template = track_template_.template
|
||||
if event:
|
||||
if not self._event_triggers_template(template, event):
|
||||
continue
|
||||
update = self._render_template_if_ready(track_template_, now, event)
|
||||
if not update:
|
||||
continue
|
||||
|
||||
_LOGGER.debug(
|
||||
"Template update %s triggered by event: %s",
|
||||
template.template,
|
||||
event,
|
||||
)
|
||||
|
||||
self._info[template] = template.async_render_to_info(
|
||||
track_template_.variables
|
||||
)
|
||||
info_changed = True
|
||||
|
||||
try:
|
||||
result: Union[str, TemplateError] = self._info[template].result()
|
||||
except TemplateError as ex:
|
||||
result = ex
|
||||
|
||||
last_result = self._last_result.get(template)
|
||||
|
||||
# Check to see if the result has changed
|
||||
if result == last_result:
|
||||
continue
|
||||
|
||||
if isinstance(result, TemplateError) and isinstance(
|
||||
last_result, TemplateError
|
||||
):
|
||||
continue
|
||||
|
||||
updates.append(TrackTemplateResult(template, last_result, result))
|
||||
if isinstance(update, TrackTemplateResult):
|
||||
updates.append(update)
|
||||
|
||||
if info_changed:
|
||||
self._update_listeners()
|
||||
assert self._track_state_changes
|
||||
self._track_state_changes.async_update_listeners(
|
||||
_render_infos_to_track_states(self._info.values()),
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Template group %s listens for %s",
|
||||
self._track_templates,
|
||||
@@ -1229,7 +1334,10 @@ def process_state_match(
|
||||
return lambda state: state in parameter_set
|
||||
|
||||
|
||||
def _entities_domains_from_info(render_infos: Iterable[RenderInfo]) -> Tuple[Set, Set]:
|
||||
@callback
|
||||
def _entities_domains_from_render_infos(
|
||||
render_infos: Iterable[RenderInfo],
|
||||
) -> Tuple[Set, Set]:
|
||||
"""Combine from multiple RenderInfo."""
|
||||
entities = set()
|
||||
domains = set()
|
||||
@@ -1242,3 +1350,65 @@ def _entities_domains_from_info(render_infos: Iterable[RenderInfo]) -> Tuple[Set
|
||||
if render_info.domains_lifecycle:
|
||||
domains.update(render_info.domains_lifecycle)
|
||||
return entities, domains
|
||||
|
||||
|
||||
@callback
|
||||
def _render_infos_needs_all_listener(render_infos: Iterable[RenderInfo]) -> bool:
|
||||
"""Determine if an all listener is needed from RenderInfo."""
|
||||
for render_info in render_infos:
|
||||
# Tracking all states
|
||||
if render_info.all_states or render_info.all_states_lifecycle:
|
||||
return True
|
||||
|
||||
# Previous call had an exception
|
||||
# so we do not know which states
|
||||
# to track
|
||||
if render_info.exception:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
@callback
|
||||
def _render_infos_to_track_states(render_infos: Iterable[RenderInfo]) -> TrackStates:
|
||||
"""Create a TrackStates dataclass from the latest RenderInfo."""
|
||||
if _render_infos_needs_all_listener(render_infos):
|
||||
return TrackStates(True, set(), set())
|
||||
|
||||
return TrackStates(False, *_entities_domains_from_render_infos(render_infos))
|
||||
|
||||
|
||||
@callback
|
||||
def _event_triggers_rerender(event: Event, info: RenderInfo) -> bool:
|
||||
"""Determine if a template should be re-rendered from an event."""
|
||||
entity_id = event.data.get(ATTR_ENTITY_ID)
|
||||
|
||||
if info.filter(entity_id):
|
||||
return True
|
||||
|
||||
if (
|
||||
event.data.get("new_state") is not None
|
||||
and event.data.get("old_state") is not None
|
||||
):
|
||||
return False
|
||||
|
||||
return bool(info.filter_lifecycle(entity_id))
|
||||
|
||||
|
||||
@callback
|
||||
def _rate_limit_for_event(
|
||||
event: Event, info: RenderInfo, track_template_: TrackTemplate
|
||||
) -> Optional[timedelta]:
|
||||
"""Determine the rate limit for an event."""
|
||||
entity_id = event.data.get(ATTR_ENTITY_ID)
|
||||
|
||||
# Specifically referenced entities are excluded
|
||||
# from the rate limit
|
||||
if entity_id in info.entities:
|
||||
return None
|
||||
|
||||
if track_template_.rate_limit is not None:
|
||||
return track_template_.rate_limit
|
||||
|
||||
rate_limit: Optional[timedelta] = info.rate_limit
|
||||
return rate_limit
|
||||
|
97
homeassistant/helpers/ratelimit.py
Normal file
97
homeassistant/helpers/ratelimit.py
Normal file
@@ -0,0 +1,97 @@
|
||||
"""Ratelimit helper."""
|
||||
import asyncio
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from typing import Any, Callable, Dict, Hashable, Optional
|
||||
|
||||
from homeassistant.const import MAX_TIME_TRACKING_ERROR
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class KeyedRateLimit:
|
||||
"""Class to track rate limits."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
):
|
||||
"""Initialize ratelimit tracker."""
|
||||
self.hass = hass
|
||||
self._last_triggered: Dict[Hashable, datetime] = {}
|
||||
self._rate_limit_timers: Dict[Hashable, asyncio.TimerHandle] = {}
|
||||
|
||||
@callback
|
||||
def async_has_timer(self, key: Hashable) -> bool:
|
||||
"""Check if a rate limit timer is running."""
|
||||
return key in self._rate_limit_timers
|
||||
|
||||
@callback
|
||||
def async_triggered(self, key: Hashable, now: Optional[datetime] = None) -> None:
|
||||
"""Call when the action we are tracking was triggered."""
|
||||
self.async_cancel_timer(key)
|
||||
self._last_triggered[key] = now or dt_util.utcnow()
|
||||
|
||||
@callback
|
||||
def async_cancel_timer(self, key: Hashable) -> None:
|
||||
"""Cancel a rate limit time that will call the action."""
|
||||
if not self.async_has_timer(key):
|
||||
return
|
||||
|
||||
self._rate_limit_timers.pop(key).cancel()
|
||||
|
||||
@callback
|
||||
def async_remove(self) -> None:
|
||||
"""Remove all timers."""
|
||||
for timer in self._rate_limit_timers.values():
|
||||
timer.cancel()
|
||||
self._rate_limit_timers.clear()
|
||||
|
||||
@callback
|
||||
def async_schedule_action(
|
||||
self,
|
||||
key: Hashable,
|
||||
rate_limit: Optional[timedelta],
|
||||
now: datetime,
|
||||
action: Callable,
|
||||
*args: Any,
|
||||
) -> Optional[datetime]:
|
||||
"""Check rate limits and schedule an action if we hit the limit.
|
||||
|
||||
If the rate limit is hit:
|
||||
Schedules the action for when the rate limit expires
|
||||
if there are no pending timers. The action must
|
||||
be called in async.
|
||||
|
||||
Returns the time the rate limit will expire
|
||||
|
||||
If the rate limit is not hit:
|
||||
|
||||
Return None
|
||||
"""
|
||||
if rate_limit is None or key not in self._last_triggered:
|
||||
return None
|
||||
|
||||
next_call_time = self._last_triggered[key] + rate_limit
|
||||
|
||||
if next_call_time <= now:
|
||||
self.async_cancel_timer(key)
|
||||
return None
|
||||
|
||||
_LOGGER.debug(
|
||||
"Reached rate limit of %s for %s and deferred action until %s",
|
||||
rate_limit,
|
||||
key,
|
||||
next_call_time,
|
||||
)
|
||||
|
||||
if key not in self._rate_limit_timers:
|
||||
self._rate_limit_timers[key] = self.hass.loop.call_later(
|
||||
(next_call_time - now).total_seconds() + MAX_TIME_TRACKING_ERROR,
|
||||
action,
|
||||
*args,
|
||||
)
|
||||
|
||||
return next_call_time
|
@@ -123,30 +123,71 @@ def make_script_schema(schema, default_script_mode, extra=vol.PREVENT_EXTRA):
|
||||
)
|
||||
|
||||
|
||||
STATIC_VALIDATION_ACTION_TYPES = (
|
||||
cv.SCRIPT_ACTION_CALL_SERVICE,
|
||||
cv.SCRIPT_ACTION_DELAY,
|
||||
cv.SCRIPT_ACTION_WAIT_TEMPLATE,
|
||||
cv.SCRIPT_ACTION_FIRE_EVENT,
|
||||
cv.SCRIPT_ACTION_ACTIVATE_SCENE,
|
||||
cv.SCRIPT_ACTION_VARIABLES,
|
||||
)
|
||||
|
||||
|
||||
async def async_validate_actions_config(
|
||||
hass: HomeAssistant, actions: List[ConfigType]
|
||||
) -> List[ConfigType]:
|
||||
"""Validate a list of actions."""
|
||||
return await asyncio.gather(
|
||||
*[async_validate_action_config(hass, action) for action in actions]
|
||||
)
|
||||
|
||||
|
||||
async def async_validate_action_config(
|
||||
hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
action_type = cv.determine_script_action(config)
|
||||
|
||||
if action_type == cv.SCRIPT_ACTION_DEVICE_AUTOMATION:
|
||||
if action_type in STATIC_VALIDATION_ACTION_TYPES:
|
||||
pass
|
||||
|
||||
elif action_type == cv.SCRIPT_ACTION_DEVICE_AUTOMATION:
|
||||
platform = await device_automation.async_get_device_automation_platform(
|
||||
hass, config[CONF_DOMAIN], "action"
|
||||
)
|
||||
config = platform.ACTION_SCHEMA(config) # type: ignore
|
||||
elif (
|
||||
action_type == cv.SCRIPT_ACTION_CHECK_CONDITION
|
||||
and config[CONF_CONDITION] == "device"
|
||||
):
|
||||
platform = await device_automation.async_get_device_automation_platform(
|
||||
hass, config[CONF_DOMAIN], "condition"
|
||||
)
|
||||
config = platform.CONDITION_SCHEMA(config) # type: ignore
|
||||
|
||||
elif action_type == cv.SCRIPT_ACTION_CHECK_CONDITION:
|
||||
if config[CONF_CONDITION] == "device":
|
||||
platform = await device_automation.async_get_device_automation_platform(
|
||||
hass, config[CONF_DOMAIN], "condition"
|
||||
)
|
||||
config = platform.CONDITION_SCHEMA(config) # type: ignore
|
||||
|
||||
elif action_type == cv.SCRIPT_ACTION_WAIT_FOR_TRIGGER:
|
||||
config[CONF_WAIT_FOR_TRIGGER] = await async_validate_trigger_config(
|
||||
hass, config[CONF_WAIT_FOR_TRIGGER]
|
||||
)
|
||||
|
||||
elif action_type == cv.SCRIPT_ACTION_REPEAT:
|
||||
config[CONF_SEQUENCE] = await async_validate_actions_config(
|
||||
hass, config[CONF_REPEAT][CONF_SEQUENCE]
|
||||
)
|
||||
|
||||
elif action_type == cv.SCRIPT_ACTION_CHOOSE:
|
||||
if CONF_DEFAULT in config:
|
||||
config[CONF_DEFAULT] = await async_validate_actions_config(
|
||||
hass, config[CONF_DEFAULT]
|
||||
)
|
||||
|
||||
for choose_conf in config[CONF_CHOOSE]:
|
||||
choose_conf[CONF_SEQUENCE] = await async_validate_actions_config(
|
||||
hass, choose_conf[CONF_SEQUENCE]
|
||||
)
|
||||
|
||||
else:
|
||||
raise ValueError(f"No validation for {action_type}")
|
||||
|
||||
return config
|
||||
|
||||
|
||||
@@ -850,7 +891,7 @@ class Script:
|
||||
|
||||
entity_ids = data.get(ATTR_ENTITY_ID)
|
||||
|
||||
if entity_ids is None:
|
||||
if entity_ids is None or isinstance(entity_ids, template.Template):
|
||||
continue
|
||||
|
||||
if isinstance(entity_ids, str):
|
||||
|
@@ -72,6 +72,8 @@ _COLLECTABLE_STATE_ATTRIBUTES = {
|
||||
"name",
|
||||
}
|
||||
|
||||
DEFAULT_RATE_LIMIT = timedelta(minutes=1)
|
||||
|
||||
|
||||
@bind_hass
|
||||
def attach(hass: HomeAssistantType, obj: Any) -> None:
|
||||
@@ -198,10 +200,11 @@ class RenderInfo:
|
||||
self.domains = set()
|
||||
self.domains_lifecycle = set()
|
||||
self.entities = set()
|
||||
self.rate_limit = None
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Representation of RenderInfo."""
|
||||
return f"<RenderInfo {self.template} all_states={self.all_states} all_states_lifecycle={self.all_states_lifecycle} domains={self.domains} domains_lifecycle={self.domains_lifecycle} entities={self.entities}>"
|
||||
return f"<RenderInfo {self.template} all_states={self.all_states} all_states_lifecycle={self.all_states_lifecycle} domains={self.domains} domains_lifecycle={self.domains_lifecycle} entities={self.entities} rate_limit={self.rate_limit}>"
|
||||
|
||||
def _filter_domains_and_entities(self, entity_id: str) -> bool:
|
||||
"""Template should re-render if the entity state changes when we match specific domains or entities."""
|
||||
@@ -221,16 +224,24 @@ class RenderInfo:
|
||||
|
||||
def _freeze_static(self) -> None:
|
||||
self.is_static = True
|
||||
self.entities = frozenset(self.entities)
|
||||
self.domains = frozenset(self.domains)
|
||||
self.domains_lifecycle = frozenset(self.domains_lifecycle)
|
||||
self._freeze_sets()
|
||||
self.all_states = False
|
||||
|
||||
def _freeze(self) -> None:
|
||||
def _freeze_sets(self) -> None:
|
||||
self.entities = frozenset(self.entities)
|
||||
self.domains = frozenset(self.domains)
|
||||
self.domains_lifecycle = frozenset(self.domains_lifecycle)
|
||||
|
||||
def _freeze(self) -> None:
|
||||
self._freeze_sets()
|
||||
|
||||
if self.rate_limit is None and (
|
||||
self.domains or self.domains_lifecycle or self.all_states or self.exception
|
||||
):
|
||||
# If the template accesses all states or an entire
|
||||
# domain, and no rate limit is set, we use the default.
|
||||
self.rate_limit = DEFAULT_RATE_LIMIT
|
||||
|
||||
if self.exception:
|
||||
return
|
||||
|
||||
@@ -319,7 +330,7 @@ class Template:
|
||||
|
||||
try:
|
||||
return compiled.render(kwargs).strip()
|
||||
except jinja2.TemplateError as err:
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
raise TemplateError(err) from err
|
||||
|
||||
async def async_render_will_timeout(
|
||||
@@ -576,17 +587,18 @@ class DomainStates:
|
||||
class TemplateState(State):
|
||||
"""Class to represent a state object in a template."""
|
||||
|
||||
__slots__ = ("_hass", "_state")
|
||||
__slots__ = ("_hass", "_state", "_collect")
|
||||
|
||||
# Inheritance is done so functions that check against State keep working
|
||||
# pylint: disable=super-init-not-called
|
||||
def __init__(self, hass, state):
|
||||
def __init__(self, hass, state, collect=True):
|
||||
"""Initialize template state."""
|
||||
self._hass = hass
|
||||
self._state = state
|
||||
self._collect = collect
|
||||
|
||||
def _collect_state(self):
|
||||
if _RENDER_INFO in self._hass.data:
|
||||
if self._collect and _RENDER_INFO in self._hass.data:
|
||||
self._hass.data[_RENDER_INFO].entities.add(self._state.entity_id)
|
||||
|
||||
# Jinja will try __getitem__ first and it avoids the need
|
||||
@@ -595,7 +607,7 @@ class TemplateState(State):
|
||||
"""Return a property as an attribute for jinja."""
|
||||
if item in _COLLECTABLE_STATE_ATTRIBUTES:
|
||||
# _collect_state inlined here for performance
|
||||
if _RENDER_INFO in self._hass.data:
|
||||
if self._collect and _RENDER_INFO in self._hass.data:
|
||||
self._hass.data[_RENDER_INFO].entities.add(self._state.entity_id)
|
||||
return getattr(self._state, item)
|
||||
if item == "entity_id":
|
||||
@@ -686,7 +698,7 @@ def _collect_state(hass: HomeAssistantType, entity_id: str) -> None:
|
||||
def _state_generator(hass: HomeAssistantType, domain: Optional[str]) -> Generator:
|
||||
"""State generator for a domain or all states."""
|
||||
for state in sorted(hass.states.async_all(domain), key=attrgetter("entity_id")):
|
||||
yield TemplateState(hass, state)
|
||||
yield TemplateState(hass, state, collect=False)
|
||||
|
||||
|
||||
def _get_state_if_valid(
|
||||
|
@@ -13,11 +13,11 @@ defusedxml==0.6.0
|
||||
distro==1.5.0
|
||||
emoji==0.5.4
|
||||
hass-nabucasa==0.37.0
|
||||
home-assistant-frontend==20200930.0
|
||||
home-assistant-frontend==20201001.2
|
||||
importlib-metadata==1.6.0;python_version<'3.8'
|
||||
jinja2>=2.11.2
|
||||
netdisco==2.8.2
|
||||
paho-mqtt==1.5.1
|
||||
paho-mqtt==1.5.0
|
||||
pillow==7.2.0
|
||||
pip>=8.0.3
|
||||
python-slugify==4.0.1
|
||||
|
34
machine/intel-nuc
Normal file
34
machine/intel-nuc
Normal file
@@ -0,0 +1,34 @@
|
||||
ARG BUILD_VERSION
|
||||
FROM homeassistant/amd64-homeassistant:$BUILD_VERSION
|
||||
|
||||
RUN apk --no-cache add \
|
||||
libva-intel-driver \
|
||||
usbutils
|
||||
|
||||
##
|
||||
# Build libcec for HDMI-CEC
|
||||
ARG LIBCEC_VERSION=6.0.2
|
||||
RUN apk add --no-cache \
|
||||
eudev-libs \
|
||||
p8-platform \
|
||||
&& apk add --no-cache --virtual .build-dependencies \
|
||||
build-base \
|
||||
cmake \
|
||||
eudev-dev \
|
||||
swig \
|
||||
p8-platform-dev \
|
||||
linux-headers \
|
||||
&& git clone --depth 1 -b libcec-${LIBCEC_VERSION} https://github.com/Pulse-Eight/libcec /usr/src/libcec \
|
||||
&& cd /usr/src/libcec \
|
||||
&& mkdir -p /usr/src/libcec/build \
|
||||
&& cd /usr/src/libcec/build \
|
||||
&& cmake -DCMAKE_INSTALL_PREFIX:PATH=/usr/local \
|
||||
-DPYTHON_LIBRARY="/usr/local/lib/libpython3.8.so" \
|
||||
-DPYTHON_INCLUDE_DIR="/usr/local/include/python3.8" \
|
||||
-DHAVE_LINUX_API=1 \
|
||||
.. \
|
||||
&& make -j$(nproc) \
|
||||
&& make install \
|
||||
&& echo "cec" > "/usr/local/lib/python3.8/site-packages/cec.pth" \
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -rf /usr/src/libcec*
|
34
machine/odroid-c2
Normal file
34
machine/odroid-c2
Normal file
@@ -0,0 +1,34 @@
|
||||
ARG BUILD_VERSION
|
||||
FROM homeassistant/aarch64-homeassistant:$BUILD_VERSION
|
||||
|
||||
RUN apk --no-cache add \
|
||||
usbutils
|
||||
|
||||
##
|
||||
# Build libcec for HDMI-CEC
|
||||
ARG LIBCEC_VERSION=6.0.2
|
||||
RUN apk add --no-cache \
|
||||
eudev-libs \
|
||||
p8-platform \
|
||||
&& apk add --no-cache --virtual .build-dependencies \
|
||||
build-base \
|
||||
cmake \
|
||||
eudev-dev \
|
||||
swig \
|
||||
p8-platform-dev \
|
||||
linux-headers \
|
||||
&& git clone --depth 1 -b libcec-${LIBCEC_VERSION} https://github.com/Pulse-Eight/libcec /usr/src/libcec \
|
||||
&& cd /usr/src/libcec \
|
||||
&& mkdir -p /usr/src/libcec/build \
|
||||
&& cd /usr/src/libcec/build \
|
||||
&& cmake -DCMAKE_INSTALL_PREFIX:PATH=/usr/local \
|
||||
-DPYTHON_LIBRARY="/usr/local/lib/libpython3.8.so" \
|
||||
-DPYTHON_INCLUDE_DIR="/usr/local/include/python3.8" \
|
||||
-DHAVE_LINUX_API=1 \
|
||||
-DHAVE_AOCEC_API=1 \
|
||||
.. \
|
||||
&& make -j$(nproc) \
|
||||
&& make install \
|
||||
&& echo "cec" > "/usr/local/lib/python3.8/site-packages/cec.pth" \
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -rf /usr/src/libcec*
|
34
machine/odroid-n2
Normal file
34
machine/odroid-n2
Normal file
@@ -0,0 +1,34 @@
|
||||
ARG BUILD_VERSION
|
||||
FROM homeassistant/aarch64-homeassistant:$BUILD_VERSION
|
||||
|
||||
RUN apk --no-cache add \
|
||||
usbutils
|
||||
|
||||
##
|
||||
# Build libcec for HDMI-CEC
|
||||
ARG LIBCEC_VERSION=6.0.2
|
||||
RUN apk add --no-cache \
|
||||
eudev-libs \
|
||||
p8-platform \
|
||||
&& apk add --no-cache --virtual .build-dependencies \
|
||||
build-base \
|
||||
cmake \
|
||||
eudev-dev \
|
||||
swig \
|
||||
p8-platform-dev \
|
||||
linux-headers \
|
||||
&& git clone --depth 1 -b libcec-${LIBCEC_VERSION} https://github.com/Pulse-Eight/libcec /usr/src/libcec \
|
||||
&& cd /usr/src/libcec \
|
||||
&& mkdir -p /usr/src/libcec/build \
|
||||
&& cd /usr/src/libcec/build \
|
||||
&& cmake -DCMAKE_INSTALL_PREFIX:PATH=/usr/local \
|
||||
-DPYTHON_LIBRARY="/usr/local/lib/libpython3.8.so" \
|
||||
-DPYTHON_INCLUDE_DIR="/usr/local/include/python3.8" \
|
||||
-DHAVE_LINUX_API=1 \
|
||||
-DHAVE_AOCEC_API=1 \
|
||||
.. \
|
||||
&& make -j$(nproc) \
|
||||
&& make install \
|
||||
&& echo "cec" > "/usr/local/lib/python3.8/site-packages/cec.pth" \
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -rf /usr/src/libcec*
|
34
machine/odroid-xu
Normal file
34
machine/odroid-xu
Normal file
@@ -0,0 +1,34 @@
|
||||
ARG BUILD_VERSION
|
||||
FROM homeassistant/armv7-homeassistant:$BUILD_VERSION
|
||||
|
||||
RUN apk --no-cache add \
|
||||
usbutils
|
||||
|
||||
##
|
||||
# Build libcec for HDMI-CEC
|
||||
ARG LIBCEC_VERSION=6.0.2
|
||||
RUN apk add --no-cache \
|
||||
eudev-libs \
|
||||
p8-platform \
|
||||
&& apk add --no-cache --virtual .build-dependencies \
|
||||
build-base \
|
||||
cmake \
|
||||
eudev-dev \
|
||||
swig \
|
||||
p8-platform-dev \
|
||||
linux-headers \
|
||||
&& git clone --depth 1 -b libcec-${LIBCEC_VERSION} https://github.com/Pulse-Eight/libcec /usr/src/libcec \
|
||||
&& cd /usr/src/libcec \
|
||||
&& mkdir -p /usr/src/libcec/build \
|
||||
&& cd /usr/src/libcec/build \
|
||||
&& cmake -DCMAKE_INSTALL_PREFIX:PATH=/usr/local \
|
||||
-DPYTHON_LIBRARY="/usr/local/lib/libpython3.8.so" \
|
||||
-DPYTHON_INCLUDE_DIR="/usr/local/include/python3.8" \
|
||||
-DHAVE_LINUX_API=1 \
|
||||
-DHAVE_EXYNOS_API=1 \
|
||||
.. \
|
||||
&& make -j$(nproc) \
|
||||
&& make install \
|
||||
&& echo "cec" > "/usr/local/lib/python3.8/site-packages/cec.pth" \
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -rf /usr/src/libcec*
|
33
machine/qemuarm
Normal file
33
machine/qemuarm
Normal file
@@ -0,0 +1,33 @@
|
||||
ARG BUILD_VERSION
|
||||
FROM homeassistant/armhf-homeassistant:$BUILD_VERSION
|
||||
|
||||
RUN apk --no-cache add \
|
||||
usbutils
|
||||
|
||||
##
|
||||
# Build libcec for HDMI-CEC
|
||||
ARG LIBCEC_VERSION=6.0.2
|
||||
RUN apk add --no-cache \
|
||||
eudev-libs \
|
||||
p8-platform \
|
||||
&& apk add --no-cache --virtual .build-dependencies \
|
||||
build-base \
|
||||
cmake \
|
||||
eudev-dev \
|
||||
swig \
|
||||
p8-platform-dev \
|
||||
linux-headers \
|
||||
&& git clone --depth 1 -b libcec-${LIBCEC_VERSION} https://github.com/Pulse-Eight/libcec /usr/src/libcec \
|
||||
&& cd /usr/src/libcec \
|
||||
&& mkdir -p /usr/src/libcec/build \
|
||||
&& cd /usr/src/libcec/build \
|
||||
&& cmake -DCMAKE_INSTALL_PREFIX:PATH=/usr/local \
|
||||
-DPYTHON_LIBRARY="/usr/local/lib/libpython3.8.so" \
|
||||
-DPYTHON_INCLUDE_DIR="/usr/local/include/python3.8" \
|
||||
-DHAVE_LINUX_API=1 \
|
||||
.. \
|
||||
&& make -j$(nproc) \
|
||||
&& make install \
|
||||
&& echo "cec" > "/usr/local/lib/python3.8/site-packages/cec.pth" \
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -rf /usr/src/libcec*
|
33
machine/qemuarm-64
Normal file
33
machine/qemuarm-64
Normal file
@@ -0,0 +1,33 @@
|
||||
ARG BUILD_VERSION
|
||||
FROM homeassistant/aarch64-homeassistant:$BUILD_VERSION
|
||||
|
||||
RUN apk --no-cache add \
|
||||
usbutils
|
||||
|
||||
##
|
||||
# Build libcec for HDMI-CEC
|
||||
ARG LIBCEC_VERSION=6.0.2
|
||||
RUN apk add --no-cache \
|
||||
eudev-libs \
|
||||
p8-platform \
|
||||
&& apk add --no-cache --virtual .build-dependencies \
|
||||
build-base \
|
||||
cmake \
|
||||
eudev-dev \
|
||||
swig \
|
||||
p8-platform-dev \
|
||||
linux-headers \
|
||||
&& git clone --depth 1 -b libcec-${LIBCEC_VERSION} https://github.com/Pulse-Eight/libcec /usr/src/libcec \
|
||||
&& cd /usr/src/libcec \
|
||||
&& mkdir -p /usr/src/libcec/build \
|
||||
&& cd /usr/src/libcec/build \
|
||||
&& cmake -DCMAKE_INSTALL_PREFIX:PATH=/usr/local \
|
||||
-DPYTHON_LIBRARY="/usr/local/lib/libpython3.8.so" \
|
||||
-DPYTHON_INCLUDE_DIR="/usr/local/include/python3.8" \
|
||||
-DHAVE_LINUX_API=1 \
|
||||
.. \
|
||||
&& make -j$(nproc) \
|
||||
&& make install \
|
||||
&& echo "cec" > "/usr/local/lib/python3.8/site-packages/cec.pth" \
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -rf /usr/src/libcec*
|
33
machine/qemux86
Normal file
33
machine/qemux86
Normal file
@@ -0,0 +1,33 @@
|
||||
ARG BUILD_VERSION
|
||||
FROM homeassistant/i386-homeassistant:$BUILD_VERSION
|
||||
|
||||
RUN apk --no-cache add \
|
||||
usbutils
|
||||
|
||||
##
|
||||
# Build libcec for HDMI-CEC
|
||||
ARG LIBCEC_VERSION=6.0.2
|
||||
RUN apk add --no-cache \
|
||||
eudev-libs \
|
||||
p8-platform \
|
||||
&& apk add --no-cache --virtual .build-dependencies \
|
||||
build-base \
|
||||
cmake \
|
||||
eudev-dev \
|
||||
swig \
|
||||
p8-platform-dev \
|
||||
linux-headers \
|
||||
&& git clone --depth 1 -b libcec-${LIBCEC_VERSION} https://github.com/Pulse-Eight/libcec /usr/src/libcec \
|
||||
&& cd /usr/src/libcec \
|
||||
&& mkdir -p /usr/src/libcec/build \
|
||||
&& cd /usr/src/libcec/build \
|
||||
&& cmake -DCMAKE_INSTALL_PREFIX:PATH=/usr/local \
|
||||
-DPYTHON_LIBRARY="/usr/local/lib/libpython3.8.so" \
|
||||
-DPYTHON_INCLUDE_DIR="/usr/local/include/python3.8" \
|
||||
-DHAVE_LINUX_API=1 \
|
||||
.. \
|
||||
&& make -j$(nproc) \
|
||||
&& make install \
|
||||
&& echo "cec" > "/usr/local/lib/python3.8/site-packages/cec.pth" \
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -rf /usr/src/libcec*
|
33
machine/qemux86-64
Normal file
33
machine/qemux86-64
Normal file
@@ -0,0 +1,33 @@
|
||||
ARG BUILD_VERSION
|
||||
FROM homeassistant/amd64-homeassistant:$BUILD_VERSION
|
||||
|
||||
RUN apk --no-cache add \
|
||||
usbutils
|
||||
|
||||
##
|
||||
# Build libcec for HDMI-CEC
|
||||
ARG LIBCEC_VERSION=6.0.2
|
||||
RUN apk add --no-cache \
|
||||
eudev-libs \
|
||||
p8-platform \
|
||||
&& apk add --no-cache --virtual .build-dependencies \
|
||||
build-base \
|
||||
cmake \
|
||||
eudev-dev \
|
||||
swig \
|
||||
p8-platform-dev \
|
||||
linux-headers \
|
||||
&& git clone --depth 1 -b libcec-${LIBCEC_VERSION} https://github.com/Pulse-Eight/libcec /usr/src/libcec \
|
||||
&& cd /usr/src/libcec \
|
||||
&& mkdir -p /usr/src/libcec/build \
|
||||
&& cd /usr/src/libcec/build \
|
||||
&& cmake -DCMAKE_INSTALL_PREFIX:PATH=/usr/local \
|
||||
-DPYTHON_LIBRARY="/usr/local/lib/libpython3.8.so" \
|
||||
-DPYTHON_INCLUDE_DIR="/usr/local/include/python3.8" \
|
||||
-DHAVE_LINUX_API=1 \
|
||||
.. \
|
||||
&& make -j$(nproc) \
|
||||
&& make install \
|
||||
&& echo "cec" > "/usr/local/lib/python3.8/site-packages/cec.pth" \
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -rf /usr/src/libcec*
|
61
machine/raspberrypi
Normal file
61
machine/raspberrypi
Normal file
@@ -0,0 +1,61 @@
|
||||
ARG BUILD_VERSION
|
||||
FROM homeassistant/armhf-homeassistant:$BUILD_VERSION
|
||||
|
||||
RUN apk --no-cache add \
|
||||
raspberrypi \
|
||||
raspberrypi-libs \
|
||||
usbutils \
|
||||
&& sed -i "s|# RPi.GPIO|RPi.GPIO|g" /usr/src/homeassistant/requirements_all.txt \
|
||||
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links "${WHEELS_LINKS}" \
|
||||
RPi.GPIO -c /usr/src/homeassistant/requirements_all.txt
|
||||
|
||||
##
|
||||
# Set symlinks for raspberry pi camera binaries.
|
||||
RUN ln -sv /opt/vc/bin/raspistill /usr/local/bin/raspistill \
|
||||
&& ln -sv /opt/vc/bin/raspivid /usr/local/bin/raspivid \
|
||||
&& ln -sv /opt/vc/bin/raspividyuv /usr/local/bin/raspividyuv \
|
||||
&& ln -sv /opt/vc/bin/raspiyuv /usr/local/bin/raspiyuv
|
||||
|
||||
##
|
||||
# Build libcec with RPi support for HDMI-CEC
|
||||
ARG LIBCEC_VERSION=6.0.2
|
||||
RUN apk add --no-cache \
|
||||
eudev-libs \
|
||||
p8-platform \
|
||||
&& apk add --no-cache --virtual .build-dependencies \
|
||||
build-base \
|
||||
cmake \
|
||||
eudev-dev \
|
||||
swig \
|
||||
raspberrypi-dev \
|
||||
p8-platform-dev \
|
||||
&& git clone --depth 1 -b libcec-${LIBCEC_VERSION} https://github.com/Pulse-Eight/libcec /usr/src/libcec \
|
||||
&& mkdir -p /usr/src/libcec/build \
|
||||
&& cd /usr/src/libcec/build \
|
||||
&& cmake -DCMAKE_INSTALL_PREFIX:PATH=/usr/local \
|
||||
-DRPI_INCLUDE_DIR=/opt/vc/include \
|
||||
-DRPI_LIB_DIR=/opt/vc/lib \
|
||||
-DPYTHON_LIBRARY="/usr/local/lib/libpython3.8.so" \
|
||||
-DPYTHON_INCLUDE_DIR="/usr/local/include/python3.8" \
|
||||
.. \
|
||||
&& make -j$(nproc) \
|
||||
&& make install \
|
||||
&& echo "cec" > "/usr/local/lib/python3.8/site-packages/cec.pth" \
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -rf /usr/src/libcec
|
||||
ENV LD_LIBRARY_PATH=/opt/vc/lib:${LD_LIBRARY_PATH}
|
||||
|
||||
##
|
||||
# Install DHT
|
||||
RUN apk add --no-cache --virtual .build-dependencies \
|
||||
gcc libc-dev raspberrypi-dev \
|
||||
&& export DHT_VERSION="$(cat /usr/src/homeassistant/requirements_all.txt | sed -n 's|.*Adafruit-DHT==\([0-9\.]*\).*|\1|p')" \
|
||||
&& git clone --depth 1 -b ${DHT_VERSION} https://github.com/adafruit/Adafruit_Python_DHT /usr/src/dht \
|
||||
&& cd /usr/src/dht \
|
||||
&& sed -i 's/^pi_version\ =\ None/pi_version\ =\ 1/' setup.py \
|
||||
&& sed -i 's/^platform\ =\ platform_detect.UNKNOWN/platform\ =\ platform_detect.RASPBERRY_PI/' setup.py \
|
||||
&& sed -i 's/platform\ =\ platform_detect.platform_detect()/pass/' setup.py \
|
||||
&& export MAKEFLAGS="-j$(nproc)" \
|
||||
&& pip3 install . \
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -rf /usr/src/dht
|
61
machine/raspberrypi2
Normal file
61
machine/raspberrypi2
Normal file
@@ -0,0 +1,61 @@
|
||||
ARG BUILD_VERSION
|
||||
FROM homeassistant/armv7-homeassistant:$BUILD_VERSION
|
||||
|
||||
RUN apk --no-cache add \
|
||||
raspberrypi \
|
||||
raspberrypi-libs \
|
||||
usbutils \
|
||||
&& sed -i "s|# RPi.GPIO|RPi.GPIO|g" /usr/src/homeassistant/requirements_all.txt \
|
||||
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links "${WHEELS_LINKS}" \
|
||||
RPi.GPIO -c /usr/src/homeassistant/requirements_all.txt
|
||||
|
||||
##
|
||||
# Set symlinks for raspberry pi binaries.
|
||||
RUN ln -sv /opt/vc/bin/raspistill /usr/local/bin/raspistill \
|
||||
&& ln -sv /opt/vc/bin/raspivid /usr/local/bin/raspivid \
|
||||
&& ln -sv /opt/vc/bin/raspividyuv /usr/local/bin/raspividyuv \
|
||||
&& ln -sv /opt/vc/bin/raspiyuv /usr/local/bin/raspiyuv
|
||||
|
||||
##
|
||||
# Build libcec with RPi support for HDMI-CEC
|
||||
ARG LIBCEC_VERSION=6.0.2
|
||||
RUN apk add --no-cache \
|
||||
eudev-libs \
|
||||
p8-platform \
|
||||
&& apk add --no-cache --virtual .build-dependencies \
|
||||
build-base \
|
||||
cmake \
|
||||
eudev-dev \
|
||||
swig \
|
||||
raspberrypi-dev \
|
||||
p8-platform-dev \
|
||||
&& git clone --depth 1 -b libcec-${LIBCEC_VERSION} https://github.com/Pulse-Eight/libcec /usr/src/libcec \
|
||||
&& mkdir -p /usr/src/libcec/build \
|
||||
&& cd /usr/src/libcec/build \
|
||||
&& cmake -DCMAKE_INSTALL_PREFIX:PATH=/usr/local \
|
||||
-DRPI_INCLUDE_DIR=/opt/vc/include \
|
||||
-DRPI_LIB_DIR=/opt/vc/lib \
|
||||
-DPYTHON_LIBRARY="/usr/local/lib/libpython3.8.so" \
|
||||
-DPYTHON_INCLUDE_DIR="/usr/local/include/python3.8" \
|
||||
.. \
|
||||
&& make -j$(nproc) \
|
||||
&& make install \
|
||||
&& echo "cec" > "/usr/local/lib/python3.8/site-packages/cec.pth" \
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -rf /usr/src/libcec
|
||||
ENV LD_LIBRARY_PATH=/opt/vc/lib:${LD_LIBRARY_PATH}
|
||||
|
||||
##
|
||||
# Install DHT
|
||||
RUN apk add --no-cache --virtual .build-dependencies \
|
||||
gcc libc-dev raspberrypi-dev \
|
||||
&& export DHT_VERSION="$(cat /usr/src/homeassistant/requirements_all.txt | sed -n 's|.*Adafruit-DHT==\([0-9\.]*\).*|\1|p')" \
|
||||
&& git clone --depth 1 -b ${DHT_VERSION} https://github.com/adafruit/Adafruit_Python_DHT /usr/src/dht \
|
||||
&& cd /usr/src/dht \
|
||||
&& sed -i 's/^pi_version\ =\ None/pi_version\ =\ 2/' setup.py \
|
||||
&& sed -i 's/^platform\ =\ platform_detect.UNKNOWN/platform\ =\ platform_detect.RASPBERRY_PI/' setup.py \
|
||||
&& sed -i 's/platform\ =\ platform_detect.platform_detect()/pass/' setup.py \
|
||||
&& export MAKEFLAGS="-j$(nproc)" \
|
||||
&& pip3 install . \
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -rf /usr/src/dht
|
61
machine/raspberrypi3
Normal file
61
machine/raspberrypi3
Normal file
@@ -0,0 +1,61 @@
|
||||
ARG BUILD_VERSION
|
||||
FROM homeassistant/armv7-homeassistant:$BUILD_VERSION
|
||||
|
||||
RUN apk --no-cache add \
|
||||
raspberrypi \
|
||||
raspberrypi-libs \
|
||||
usbutils \
|
||||
&& sed -i "s|# RPi.GPIO|RPi.GPIO|g" /usr/src/homeassistant/requirements_all.txt \
|
||||
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links "${WHEELS_LINKS}" \
|
||||
RPi.GPIO bluepy pybluez -c /usr/src/homeassistant/requirements_all.txt
|
||||
|
||||
##
|
||||
# Set symlinks for raspberry pi binaries.
|
||||
RUN ln -sv /opt/vc/bin/raspistill /usr/local/bin/raspistill \
|
||||
&& ln -sv /opt/vc/bin/raspivid /usr/local/bin/raspivid \
|
||||
&& ln -sv /opt/vc/bin/raspividyuv /usr/local/bin/raspividyuv \
|
||||
&& ln -sv /opt/vc/bin/raspiyuv /usr/local/bin/raspiyuv
|
||||
|
||||
##
|
||||
# Build libcec with RPi support for HDMI-CEC
|
||||
ARG LIBCEC_VERSION=6.0.2
|
||||
RUN apk add --no-cache \
|
||||
eudev-libs \
|
||||
p8-platform \
|
||||
&& apk add --no-cache --virtual .build-dependencies \
|
||||
build-base \
|
||||
cmake \
|
||||
eudev-dev \
|
||||
swig \
|
||||
raspberrypi-dev \
|
||||
p8-platform-dev \
|
||||
&& git clone --depth 1 -b libcec-${LIBCEC_VERSION} https://github.com/Pulse-Eight/libcec /usr/src/libcec \
|
||||
&& mkdir -p /usr/src/libcec/build \
|
||||
&& cd /usr/src/libcec/build \
|
||||
&& cmake -DCMAKE_INSTALL_PREFIX:PATH=/usr/local \
|
||||
-DRPI_INCLUDE_DIR=/opt/vc/include \
|
||||
-DRPI_LIB_DIR=/opt/vc/lib \
|
||||
-DPYTHON_LIBRARY="/usr/local/lib/libpython3.8.so" \
|
||||
-DPYTHON_INCLUDE_DIR="/usr/local/include/python3.8" \
|
||||
.. \
|
||||
&& make -j$(nproc) \
|
||||
&& make install \
|
||||
&& echo "cec" > "/usr/local/lib/python3.8/site-packages/cec.pth" \
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -rf /usr/src/libcec
|
||||
ENV LD_LIBRARY_PATH=/opt/vc/lib:${LD_LIBRARY_PATH}
|
||||
|
||||
##
|
||||
# Install DHT
|
||||
RUN apk add --no-cache --virtual .build-dependencies \
|
||||
gcc libc-dev raspberrypi-dev \
|
||||
&& export DHT_VERSION="$(cat /usr/src/homeassistant/requirements_all.txt | sed -n 's|.*Adafruit-DHT==\([0-9\.]*\).*|\1|p')" \
|
||||
&& git clone --depth 1 -b ${DHT_VERSION} https://github.com/adafruit/Adafruit_Python_DHT /usr/src/dht \
|
||||
&& cd /usr/src/dht \
|
||||
&& sed -i 's/^pi_version\ =\ None/pi_version\ =\ 3/' setup.py \
|
||||
&& sed -i 's/^platform\ =\ platform_detect.UNKNOWN/platform\ =\ platform_detect.RASPBERRY_PI/' setup.py \
|
||||
&& sed -i 's/platform\ =\ platform_detect.platform_detect()/pass/' setup.py \
|
||||
&& export MAKEFLAGS="-j$(nproc)" \
|
||||
&& pip3 install . \
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -rf /usr/src/dht
|
61
machine/raspberrypi3-64
Normal file
61
machine/raspberrypi3-64
Normal file
@@ -0,0 +1,61 @@
|
||||
ARG BUILD_VERSION
|
||||
FROM homeassistant/aarch64-homeassistant:$BUILD_VERSION
|
||||
|
||||
RUN apk --no-cache add \
|
||||
raspberrypi \
|
||||
raspberrypi-libs \
|
||||
usbutils \
|
||||
&& sed -i "s|# RPi.GPIO|RPi.GPIO|g" /usr/src/homeassistant/requirements_all.txt \
|
||||
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links "${WHEELS_LINKS}" \
|
||||
RPi.GPIO bluepy pybluez -c /usr/src/homeassistant/requirements_all.txt
|
||||
|
||||
##
|
||||
# Set symlinks for raspberry pi binaries.
|
||||
RUN ln -sv /opt/vc/bin/raspistill /usr/local/bin/raspistill \
|
||||
&& ln -sv /opt/vc/bin/raspivid /usr/local/bin/raspivid \
|
||||
&& ln -sv /opt/vc/bin/raspividyuv /usr/local/bin/raspividyuv \
|
||||
&& ln -sv /opt/vc/bin/raspiyuv /usr/local/bin/raspiyuv
|
||||
|
||||
##
|
||||
# Build libcec with RPi support for HDMI-CEC
|
||||
ARG LIBCEC_VERSION=6.0.2
|
||||
RUN apk add --no-cache \
|
||||
eudev-libs \
|
||||
p8-platform \
|
||||
&& apk add --no-cache --virtual .build-dependencies \
|
||||
build-base \
|
||||
cmake \
|
||||
eudev-dev \
|
||||
swig \
|
||||
raspberrypi-dev \
|
||||
p8-platform-dev \
|
||||
&& git clone --depth 1 -b libcec-${LIBCEC_VERSION} https://github.com/Pulse-Eight/libcec /usr/src/libcec \
|
||||
&& mkdir -p /usr/src/libcec/build \
|
||||
&& cd /usr/src/libcec/build \
|
||||
&& cmake -DCMAKE_INSTALL_PREFIX:PATH=/usr/local \
|
||||
-DRPI_INCLUDE_DIR=/opt/vc/include \
|
||||
-DRPI_LIB_DIR=/opt/vc/lib \
|
||||
-DPYTHON_LIBRARY="/usr/local/lib/libpython3.8.so" \
|
||||
-DPYTHON_INCLUDE_DIR="/usr/local/include/python3.8" \
|
||||
.. \
|
||||
&& make -j$(nproc) \
|
||||
&& make install \
|
||||
&& echo "cec" > "/usr/local/lib/python3.8/site-packages/cec.pth" \
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -rf /usr/src/libcec
|
||||
ENV LD_LIBRARY_PATH=/opt/vc/lib:${LD_LIBRARY_PATH}
|
||||
|
||||
##
|
||||
# Install DHT
|
||||
RUN apk add --no-cache --virtual .build-dependencies \
|
||||
gcc libc-dev raspberrypi-dev \
|
||||
&& export DHT_VERSION="$(cat /usr/src/homeassistant/requirements_all.txt | sed -n 's|.*Adafruit-DHT==\([0-9\.]*\).*|\1|p')" \
|
||||
&& git clone --depth 1 -b ${DHT_VERSION} https://github.com/adafruit/Adafruit_Python_DHT /usr/src/dht \
|
||||
&& cd /usr/src/dht \
|
||||
&& sed -i 's/^pi_version\ =\ None/pi_version\ =\ 3/' setup.py \
|
||||
&& sed -i 's/^platform\ =\ platform_detect.UNKNOWN/platform\ =\ platform_detect.RASPBERRY_PI/' setup.py \
|
||||
&& sed -i 's/platform\ =\ platform_detect.platform_detect()/pass/' setup.py \
|
||||
&& export MAKEFLAGS="-j$(nproc)" \
|
||||
&& pip3 install . \
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -rf /usr/src/dht
|
61
machine/raspberrypi4
Normal file
61
machine/raspberrypi4
Normal file
@@ -0,0 +1,61 @@
|
||||
ARG BUILD_VERSION
|
||||
FROM homeassistant/armv7-homeassistant:$BUILD_VERSION
|
||||
|
||||
RUN apk --no-cache add \
|
||||
raspberrypi \
|
||||
raspberrypi-libs \
|
||||
usbutils \
|
||||
&& sed -i "s|# RPi.GPIO|RPi.GPIO|g" /usr/src/homeassistant/requirements_all.txt \
|
||||
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links "${WHEELS_LINKS}" \
|
||||
RPi.GPIO bluepy pybluez -c /usr/src/homeassistant/requirements_all.txt
|
||||
|
||||
##
|
||||
# Set symlinks for raspberry pi binaries.
|
||||
RUN ln -sv /opt/vc/bin/raspistill /usr/local/bin/raspistill \
|
||||
&& ln -sv /opt/vc/bin/raspivid /usr/local/bin/raspivid \
|
||||
&& ln -sv /opt/vc/bin/raspividyuv /usr/local/bin/raspividyuv \
|
||||
&& ln -sv /opt/vc/bin/raspiyuv /usr/local/bin/raspiyuv
|
||||
|
||||
##
|
||||
# Build libcec with RPi support for HDMI-CEC
|
||||
ARG LIBCEC_VERSION=6.0.2
|
||||
RUN apk add --no-cache \
|
||||
eudev-libs \
|
||||
p8-platform \
|
||||
&& apk add --no-cache --virtual .build-dependencies \
|
||||
build-base \
|
||||
cmake \
|
||||
eudev-dev \
|
||||
swig \
|
||||
raspberrypi-dev \
|
||||
p8-platform-dev \
|
||||
&& git clone --depth 1 -b libcec-${LIBCEC_VERSION} https://github.com/Pulse-Eight/libcec /usr/src/libcec \
|
||||
&& mkdir -p /usr/src/libcec/build \
|
||||
&& cd /usr/src/libcec/build \
|
||||
&& cmake -DCMAKE_INSTALL_PREFIX:PATH=/usr/local \
|
||||
-DRPI_INCLUDE_DIR=/opt/vc/include \
|
||||
-DRPI_LIB_DIR=/opt/vc/lib \
|
||||
-DPYTHON_LIBRARY="/usr/local/lib/libpython3.8.so" \
|
||||
-DPYTHON_INCLUDE_DIR="/usr/local/include/python3.8" \
|
||||
.. \
|
||||
&& make -j$(nproc) \
|
||||
&& make install \
|
||||
&& echo "cec" > "/usr/local/lib/python3.8/site-packages/cec.pth" \
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -rf /usr/src/libcec
|
||||
ENV LD_LIBRARY_PATH=/opt/vc/lib:${LD_LIBRARY_PATH}
|
||||
|
||||
##
|
||||
# Install DHT
|
||||
RUN apk add --no-cache --virtual .build-dependencies \
|
||||
gcc libc-dev raspberrypi-dev \
|
||||
&& export DHT_VERSION="$(cat /usr/src/homeassistant/requirements_all.txt | sed -n 's|.*Adafruit-DHT==\([0-9\.]*\).*|\1|p')" \
|
||||
&& git clone --depth 1 -b ${DHT_VERSION} https://github.com/adafruit/Adafruit_Python_DHT /usr/src/dht \
|
||||
&& cd /usr/src/dht \
|
||||
&& sed -i 's/^pi_version\ =\ None/pi_version\ =\ 3/' setup.py \
|
||||
&& sed -i 's/^platform\ =\ platform_detect.UNKNOWN/platform\ =\ platform_detect.RASPBERRY_PI/' setup.py \
|
||||
&& sed -i 's/platform\ =\ platform_detect.platform_detect()/pass/' setup.py \
|
||||
&& export MAKEFLAGS="-j$(nproc)" \
|
||||
&& pip3 install . \
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -rf /usr/src/dht
|
61
machine/raspberrypi4-64
Normal file
61
machine/raspberrypi4-64
Normal file
@@ -0,0 +1,61 @@
|
||||
ARG BUILD_VERSION
|
||||
FROM homeassistant/aarch64-homeassistant:$BUILD_VERSION
|
||||
|
||||
RUN apk --no-cache add \
|
||||
raspberrypi \
|
||||
raspberrypi-libs \
|
||||
usbutils \
|
||||
&& sed -i "s|# RPi.GPIO|RPi.GPIO|g" /usr/src/homeassistant/requirements_all.txt \
|
||||
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links "${WHEELS_LINKS}" \
|
||||
RPi.GPIO bluepy pybluez -c /usr/src/homeassistant/requirements_all.txt
|
||||
|
||||
##
|
||||
# Set symlinks for raspberry pi binaries.
|
||||
RUN ln -sv /opt/vc/bin/raspistill /usr/local/bin/raspistill \
|
||||
&& ln -sv /opt/vc/bin/raspivid /usr/local/bin/raspivid \
|
||||
&& ln -sv /opt/vc/bin/raspividyuv /usr/local/bin/raspividyuv \
|
||||
&& ln -sv /opt/vc/bin/raspiyuv /usr/local/bin/raspiyuv
|
||||
|
||||
##
|
||||
# Build libcec with RPi support for HDMI-CEC
|
||||
ARG LIBCEC_VERSION=6.0.2
|
||||
RUN apk add --no-cache \
|
||||
eudev-libs \
|
||||
p8-platform \
|
||||
&& apk add --no-cache --virtual .build-dependencies \
|
||||
build-base \
|
||||
cmake \
|
||||
eudev-dev \
|
||||
swig \
|
||||
raspberrypi-dev \
|
||||
p8-platform-dev \
|
||||
&& git clone --depth 1 -b libcec-${LIBCEC_VERSION} https://github.com/Pulse-Eight/libcec /usr/src/libcec \
|
||||
&& mkdir -p /usr/src/libcec/build \
|
||||
&& cd /usr/src/libcec/build \
|
||||
&& cmake -DCMAKE_INSTALL_PREFIX:PATH=/usr/local \
|
||||
-DRPI_INCLUDE_DIR=/opt/vc/include \
|
||||
-DRPI_LIB_DIR=/opt/vc/lib \
|
||||
-DPYTHON_LIBRARY="/usr/local/lib/libpython3.8.so" \
|
||||
-DPYTHON_INCLUDE_DIR="/usr/local/include/python3.8" \
|
||||
.. \
|
||||
&& make -j$(nproc) \
|
||||
&& make install \
|
||||
&& echo "cec" > "/usr/local/lib/python3.8/site-packages/cec.pth" \
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -rf /usr/src/libcec
|
||||
ENV LD_LIBRARY_PATH=/opt/vc/lib:${LD_LIBRARY_PATH}
|
||||
|
||||
##
|
||||
# Install DHT
|
||||
RUN apk add --no-cache --virtual .build-dependencies \
|
||||
gcc libc-dev raspberrypi-dev \
|
||||
&& export DHT_VERSION="$(cat /usr/src/homeassistant/requirements_all.txt | sed -n 's|.*Adafruit-DHT==\([0-9\.]*\).*|\1|p')" \
|
||||
&& git clone --depth 1 -b ${DHT_VERSION} https://github.com/adafruit/Adafruit_Python_DHT /usr/src/dht \
|
||||
&& cd /usr/src/dht \
|
||||
&& sed -i 's/^pi_version\ =\ None/pi_version\ =\ 3/' setup.py \
|
||||
&& sed -i 's/^platform\ =\ platform_detect.UNKNOWN/platform\ =\ platform_detect.RASPBERRY_PI/' setup.py \
|
||||
&& sed -i 's/platform\ =\ platform_detect.platform_detect()/pass/' setup.py \
|
||||
&& export MAKEFLAGS="-j$(nproc)" \
|
||||
&& pip3 install . \
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -rf /usr/src/dht
|
48
machine/tinker
Normal file
48
machine/tinker
Normal file
@@ -0,0 +1,48 @@
|
||||
ARG BUILD_VERSION
|
||||
FROM homeassistant/armv7-homeassistant:$BUILD_VERSION
|
||||
|
||||
RUN apk --no-cache add usbutils \
|
||||
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links "${WHEELS_LINKS}" \
|
||||
-c /usr/src/homeassistant/homeassistant/package_constraints.txt \
|
||||
bluepy \
|
||||
pybluez \
|
||||
pygatt[GATTTOOL]
|
||||
|
||||
# Install GPIO support
|
||||
RUN apk add --no-cache --virtual .build-dependencies \
|
||||
gcc libc-dev musl-dev \
|
||||
&& git clone --depth 1 https://github.com/TinkerBoard/gpio_lib_python /usr/src/gpio \
|
||||
&& cd /usr/src/gpio \
|
||||
&& sed -i "s/caddr_t/void*/g" source/wiringTB.c \
|
||||
&& export MAKEFLAGS="-j$(nproc)" \
|
||||
&& python3 setup.py install \
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -rf /usr/src/gpio
|
||||
|
||||
##
|
||||
# Build libcec for HDMI-CEC
|
||||
ARG LIBCEC_VERSION=6.0.2
|
||||
RUN apk add --no-cache \
|
||||
eudev-libs \
|
||||
p8-platform \
|
||||
&& apk add --no-cache --virtual .build-dependencies \
|
||||
build-base \
|
||||
cmake \
|
||||
eudev-dev \
|
||||
swig \
|
||||
p8-platform-dev \
|
||||
linux-headers \
|
||||
&& git clone --depth 1 -b libcec-${LIBCEC_VERSION} https://github.com/Pulse-Eight/libcec /usr/src/libcec \
|
||||
&& cd /usr/src/libcec \
|
||||
&& mkdir -p /usr/src/libcec/build \
|
||||
&& cd /usr/src/libcec/build \
|
||||
&& cmake -DCMAKE_INSTALL_PREFIX:PATH=/usr/local \
|
||||
-DPYTHON_LIBRARY="/usr/local/lib/libpython3.8.so" \
|
||||
-DPYTHON_INCLUDE_DIR="/usr/local/include/python3.8" \
|
||||
-DHAVE_LINUX_API=1 \
|
||||
.. \
|
||||
&& make -j$(nproc) \
|
||||
&& make install \
|
||||
&& echo "cec" > "/usr/local/lib/python3.8/site-packages/cec.pth" \
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -rf /usr/src/libcec*
|
@@ -194,7 +194,7 @@ aioimaplib==0.7.15
|
||||
aiokafka==0.6.0
|
||||
|
||||
# homeassistant.components.kef
|
||||
aiokef==0.2.13
|
||||
aiokef==0.2.16
|
||||
|
||||
# homeassistant.components.lifx
|
||||
aiolifx==0.6.7
|
||||
@@ -221,7 +221,7 @@ aiopvpc==2.0.2
|
||||
aiopylgtv==0.3.3
|
||||
|
||||
# homeassistant.components.shelly
|
||||
aioshelly==0.3.3
|
||||
aioshelly==0.3.4
|
||||
|
||||
# homeassistant.components.switcher_kis
|
||||
aioswitcher==1.2.1
|
||||
@@ -263,7 +263,7 @@ apcaccess==0.0.13
|
||||
apns2==0.3.0
|
||||
|
||||
# homeassistant.components.apprise
|
||||
apprise==0.8.8
|
||||
apprise==0.8.9
|
||||
|
||||
# homeassistant.components.aprs
|
||||
aprslib==0.6.46
|
||||
@@ -275,7 +275,7 @@ aqualogic==1.0
|
||||
arcam-fmj==0.5.3
|
||||
|
||||
# homeassistant.components.arris_tg2492lg
|
||||
arris-tg2492lg==1.0.0
|
||||
arris-tg2492lg==1.1.0
|
||||
|
||||
# homeassistant.components.ampio
|
||||
asmog==0.0.6
|
||||
@@ -538,7 +538,7 @@ elgato==0.2.0
|
||||
eliqonline==1.2.2
|
||||
|
||||
# homeassistant.components.elkm1
|
||||
elkm1-lib==0.7.19
|
||||
elkm1-lib==0.8.0
|
||||
|
||||
# homeassistant.components.mobile_app
|
||||
emoji==0.5.4
|
||||
@@ -753,7 +753,7 @@ hole==0.5.1
|
||||
holidays==0.10.3
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20200930.0
|
||||
home-assistant-frontend==20201001.2
|
||||
|
||||
# homeassistant.components.zwave
|
||||
homeassistant-pyozw==0.1.10
|
||||
@@ -1022,7 +1022,7 @@ omnilogic==0.4.0
|
||||
onkyo-eiscp==1.2.7
|
||||
|
||||
# homeassistant.components.onvif
|
||||
onvif-zeep-async==0.5.0
|
||||
onvif-zeep-async==0.6.0
|
||||
|
||||
# homeassistant.components.opengarage
|
||||
open-garage==0.1.4
|
||||
@@ -1058,6 +1058,8 @@ orvibo==1.1.1
|
||||
ovoenergy==1.1.7
|
||||
|
||||
# homeassistant.components.mqtt
|
||||
paho-mqtt==1.5.0
|
||||
|
||||
# homeassistant.components.shiftr
|
||||
paho-mqtt==1.5.1
|
||||
|
||||
@@ -1116,7 +1118,7 @@ plexapi==4.1.1
|
||||
plexauth==0.0.5
|
||||
|
||||
# homeassistant.components.plex
|
||||
plexwebsocket==0.0.11
|
||||
plexwebsocket==0.0.12
|
||||
|
||||
# homeassistant.components.plum_lightpad
|
||||
plumlightpad==0.0.11
|
||||
@@ -1250,7 +1252,7 @@ pyarlo==0.2.3
|
||||
pyatag==0.3.4.4
|
||||
|
||||
# homeassistant.components.netatmo
|
||||
pyatmo==4.0.0
|
||||
pyatmo==4.1.0
|
||||
|
||||
# homeassistant.components.atome
|
||||
pyatome==0.1.1
|
||||
@@ -1401,7 +1403,7 @@ pyheos==0.6.0
|
||||
pyhik==0.2.7
|
||||
|
||||
# homeassistant.components.hive
|
||||
pyhiveapi==0.2.20.1
|
||||
pyhiveapi==0.2.20.2
|
||||
|
||||
# homeassistant.components.homematic
|
||||
pyhomematic==0.1.68
|
||||
@@ -1653,7 +1655,7 @@ pysmappee==0.2.13
|
||||
pysmartapp==0.3.2
|
||||
|
||||
# homeassistant.components.smartthings
|
||||
pysmartthings==0.7.3
|
||||
pysmartthings==0.7.4
|
||||
|
||||
# homeassistant.components.smarty
|
||||
pysmarty==0.8
|
||||
@@ -2000,7 +2002,7 @@ simplehound==0.3
|
||||
simplepush==1.1.4
|
||||
|
||||
# homeassistant.components.simplisafe
|
||||
simplisafe-python==9.3.3
|
||||
simplisafe-python==9.4.1
|
||||
|
||||
# homeassistant.components.sisyphus
|
||||
sisyphus-control==2.2.1
|
||||
@@ -2293,7 +2295,7 @@ xs1-api-client==3.0.0
|
||||
yalesmartalarmclient==0.1.6
|
||||
|
||||
# homeassistant.components.yeelight
|
||||
yeelight==0.5.3
|
||||
yeelight==0.5.4
|
||||
|
||||
# homeassistant.components.yeelightsunflower
|
||||
yeelightsunflower==0.0.10
|
||||
@@ -2301,6 +2303,9 @@ yeelightsunflower==0.0.10
|
||||
# homeassistant.components.media_extractor
|
||||
youtube_dl==2020.09.20
|
||||
|
||||
# homeassistant.components.onvif
|
||||
zeep[async]==3.4.0
|
||||
|
||||
# homeassistant.components.zengge
|
||||
zengge==0.2
|
||||
|
||||
@@ -2329,10 +2334,10 @@ zigpy-xbee==0.13.0
|
||||
zigpy-zigate==0.6.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy-znp==0.2.0
|
||||
zigpy-znp==0.2.1
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy==0.24.3
|
||||
zigpy==0.26.0
|
||||
|
||||
# homeassistant.components.zoneminder
|
||||
zm-py==0.4.0
|
||||
|
@@ -134,7 +134,7 @@ aiopvpc==2.0.2
|
||||
aiopylgtv==0.3.3
|
||||
|
||||
# homeassistant.components.shelly
|
||||
aioshelly==0.3.3
|
||||
aioshelly==0.3.4
|
||||
|
||||
# homeassistant.components.switcher_kis
|
||||
aioswitcher==1.2.1
|
||||
@@ -158,7 +158,7 @@ androidtv[async]==0.0.50
|
||||
apns2==0.3.0
|
||||
|
||||
# homeassistant.components.apprise
|
||||
apprise==0.8.8
|
||||
apprise==0.8.9
|
||||
|
||||
# homeassistant.components.aprs
|
||||
aprslib==0.6.46
|
||||
@@ -278,7 +278,7 @@ eebrightbox==0.0.4
|
||||
elgato==0.2.0
|
||||
|
||||
# homeassistant.components.elkm1
|
||||
elkm1-lib==0.7.19
|
||||
elkm1-lib==0.8.0
|
||||
|
||||
# homeassistant.components.mobile_app
|
||||
emoji==0.5.4
|
||||
@@ -376,7 +376,7 @@ hole==0.5.1
|
||||
holidays==0.10.3
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20200930.0
|
||||
home-assistant-frontend==20201001.2
|
||||
|
||||
# homeassistant.components.zwave
|
||||
homeassistant-pyozw==0.1.10
|
||||
@@ -490,7 +490,7 @@ oauth2client==4.0.0
|
||||
omnilogic==0.4.0
|
||||
|
||||
# homeassistant.components.onvif
|
||||
onvif-zeep-async==0.5.0
|
||||
onvif-zeep-async==0.6.0
|
||||
|
||||
# homeassistant.components.openerz
|
||||
openerz-api==0.1.0
|
||||
@@ -499,8 +499,7 @@ openerz-api==0.1.0
|
||||
ovoenergy==1.1.7
|
||||
|
||||
# homeassistant.components.mqtt
|
||||
# homeassistant.components.shiftr
|
||||
paho-mqtt==1.5.1
|
||||
paho-mqtt==1.5.0
|
||||
|
||||
# homeassistant.components.panasonic_viera
|
||||
panasonic_viera==0.3.6
|
||||
@@ -533,7 +532,7 @@ plexapi==4.1.1
|
||||
plexauth==0.0.5
|
||||
|
||||
# homeassistant.components.plex
|
||||
plexwebsocket==0.0.11
|
||||
plexwebsocket==0.0.12
|
||||
|
||||
# homeassistant.components.plum_lightpad
|
||||
plumlightpad==0.0.11
|
||||
@@ -616,7 +615,7 @@ pyarlo==0.2.3
|
||||
pyatag==0.3.4.4
|
||||
|
||||
# homeassistant.components.netatmo
|
||||
pyatmo==4.0.0
|
||||
pyatmo==4.1.0
|
||||
|
||||
# homeassistant.components.blackbird
|
||||
pyblackbird==0.5
|
||||
@@ -800,7 +799,7 @@ pysmappee==0.2.13
|
||||
pysmartapp==0.3.2
|
||||
|
||||
# homeassistant.components.smartthings
|
||||
pysmartthings==0.7.3
|
||||
pysmartthings==0.7.4
|
||||
|
||||
# homeassistant.components.soma
|
||||
pysoma==0.0.10
|
||||
@@ -936,7 +935,7 @@ sharkiqpy==0.1.8
|
||||
simplehound==0.3
|
||||
|
||||
# homeassistant.components.simplisafe
|
||||
simplisafe-python==9.3.3
|
||||
simplisafe-python==9.4.1
|
||||
|
||||
# homeassistant.components.sleepiq
|
||||
sleepyq==0.7
|
||||
@@ -1065,7 +1064,10 @@ wolf_smartset==0.1.6
|
||||
xmltodict==0.12.0
|
||||
|
||||
# homeassistant.components.yeelight
|
||||
yeelight==0.5.3
|
||||
yeelight==0.5.4
|
||||
|
||||
# homeassistant.components.onvif
|
||||
zeep[async]==3.4.0
|
||||
|
||||
# homeassistant.components.zeroconf
|
||||
zeroconf==0.28.5
|
||||
@@ -1086,10 +1088,7 @@ zigpy-xbee==0.13.0
|
||||
zigpy-zigate==0.6.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy-znp==0.2.0
|
||||
zigpy-znp==0.2.1
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy==0.24.3
|
||||
|
||||
# homeassistant.components.zoneminder
|
||||
zm-py==0.4.0
|
||||
zigpy==0.26.0
|
||||
|
@@ -763,7 +763,6 @@ async def test_group_climate_all_cool(hass):
|
||||
hass.states.async_set("climate.two", "cool")
|
||||
hass.states.async_set("climate.three", "cool")
|
||||
|
||||
assert await async_setup_component(hass, "climate", {})
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
@@ -773,6 +772,7 @@ async def test_group_climate_all_cool(hass):
|
||||
}
|
||||
},
|
||||
)
|
||||
assert await async_setup_component(hass, "climate", {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("group.group_zero").state == STATE_ON
|
||||
@@ -804,8 +804,8 @@ async def test_group_alarm(hass):
|
||||
hass.states.async_set("alarm_control_panel.one", "armed_away")
|
||||
hass.states.async_set("alarm_control_panel.two", "armed_home")
|
||||
hass.states.async_set("alarm_control_panel.three", "armed_away")
|
||||
hass.state = CoreState.stopped
|
||||
|
||||
assert await async_setup_component(hass, "alarm_control_panel", {})
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
@@ -817,8 +817,10 @@ async def test_group_alarm(hass):
|
||||
}
|
||||
},
|
||||
)
|
||||
assert await async_setup_component(hass, "alarm_control_panel", {})
|
||||
await hass.async_block_till_done()
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("group.group_zero").state == STATE_ON
|
||||
|
||||
|
||||
@@ -850,8 +852,8 @@ async def test_group_vacuum_off(hass):
|
||||
hass.states.async_set("vacuum.one", "docked")
|
||||
hass.states.async_set("vacuum.two", "off")
|
||||
hass.states.async_set("vacuum.three", "off")
|
||||
hass.state = CoreState.stopped
|
||||
|
||||
assert await async_setup_component(hass, "vacuum", {})
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
@@ -861,8 +863,11 @@ async def test_group_vacuum_off(hass):
|
||||
}
|
||||
},
|
||||
)
|
||||
assert await async_setup_component(hass, "vacuum", {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
await hass.async_block_till_done()
|
||||
assert hass.states.get("group.group_zero").state == STATE_OFF
|
||||
|
||||
|
||||
@@ -893,7 +898,6 @@ async def test_device_tracker_not_home(hass):
|
||||
hass.states.async_set("device_tracker.two", "not_home")
|
||||
hass.states.async_set("device_tracker.three", "not_home")
|
||||
|
||||
assert await async_setup_component(hass, "device_tracker", {})
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
@@ -916,7 +920,6 @@ async def test_light_removed(hass):
|
||||
hass.states.async_set("light.two", "off")
|
||||
hass.states.async_set("light.three", "on")
|
||||
|
||||
assert await async_setup_component(hass, "light", {})
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
@@ -943,7 +946,6 @@ async def test_switch_removed(hass):
|
||||
hass.states.async_set("switch.three", "on")
|
||||
|
||||
hass.state = CoreState.stopped
|
||||
assert await async_setup_component(hass, "switch", {})
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
@@ -956,6 +958,8 @@ async def test_switch_removed(hass):
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("group.group_zero").state == "unknown"
|
||||
assert await async_setup_component(hass, "switch", {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
await hass.async_block_till_done()
|
||||
@@ -965,3 +969,310 @@ async def test_switch_removed(hass):
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("group.group_zero").state == "off"
|
||||
|
||||
|
||||
async def test_lights_added_after_group(hass):
|
||||
"""Test lights added after group."""
|
||||
|
||||
entity_ids = [
|
||||
"light.living_front_ri",
|
||||
"light.living_back_lef",
|
||||
"light.living_back_cen",
|
||||
"light.living_front_le",
|
||||
"light.living_front_ce",
|
||||
"light.living_back_rig",
|
||||
]
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
{
|
||||
"group": {
|
||||
"living_room_downlights": {"entities": entity_ids},
|
||||
}
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("group.living_room_downlights").state == "unknown"
|
||||
|
||||
for entity_id in entity_ids:
|
||||
hass.states.async_set(entity_id, "off")
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("group.living_room_downlights").state == "off"
|
||||
|
||||
|
||||
async def test_lights_added_before_group(hass):
|
||||
"""Test lights added before group."""
|
||||
|
||||
entity_ids = [
|
||||
"light.living_front_ri",
|
||||
"light.living_back_lef",
|
||||
"light.living_back_cen",
|
||||
"light.living_front_le",
|
||||
"light.living_front_ce",
|
||||
"light.living_back_rig",
|
||||
]
|
||||
|
||||
for entity_id in entity_ids:
|
||||
hass.states.async_set(entity_id, "off")
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
{
|
||||
"group": {
|
||||
"living_room_downlights": {"entities": entity_ids},
|
||||
}
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("group.living_room_downlights").state == "off"
|
||||
|
||||
|
||||
async def test_cover_added_after_group(hass):
|
||||
"""Test cover added after group."""
|
||||
|
||||
entity_ids = [
|
||||
"cover.upstairs",
|
||||
"cover.downstairs",
|
||||
]
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
{
|
||||
"group": {
|
||||
"shades": {"entities": entity_ids},
|
||||
}
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
for entity_id in entity_ids:
|
||||
hass.states.async_set(entity_id, "open")
|
||||
await hass.async_block_till_done()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("group.shades").state == "open"
|
||||
|
||||
for entity_id in entity_ids:
|
||||
hass.states.async_set(entity_id, "closed")
|
||||
|
||||
await hass.async_block_till_done()
|
||||
assert hass.states.get("group.shades").state == "closed"
|
||||
|
||||
|
||||
async def test_group_that_references_a_group_of_lights(hass):
|
||||
"""Group that references a group of lights."""
|
||||
|
||||
entity_ids = [
|
||||
"light.living_front_ri",
|
||||
"light.living_back_lef",
|
||||
]
|
||||
hass.state = CoreState.stopped
|
||||
|
||||
for entity_id in entity_ids:
|
||||
hass.states.async_set(entity_id, "off")
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
{
|
||||
"group": {
|
||||
"living_room_downlights": {"entities": entity_ids},
|
||||
"grouped_group": {
|
||||
"entities": ["group.living_room_downlights", *entity_ids]
|
||||
},
|
||||
}
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("group.living_room_downlights").state == "off"
|
||||
assert hass.states.get("group.grouped_group").state == "off"
|
||||
|
||||
|
||||
async def test_group_that_references_a_group_of_covers(hass):
|
||||
"""Group that references a group of covers."""
|
||||
|
||||
entity_ids = [
|
||||
"cover.living_front_ri",
|
||||
"cover.living_back_lef",
|
||||
]
|
||||
hass.state = CoreState.stopped
|
||||
|
||||
for entity_id in entity_ids:
|
||||
hass.states.async_set(entity_id, "closed")
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
{
|
||||
"group": {
|
||||
"living_room_downcover": {"entities": entity_ids},
|
||||
"grouped_group": {
|
||||
"entities": ["group.living_room_downlights", *entity_ids]
|
||||
},
|
||||
}
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("group.living_room_downcover").state == "closed"
|
||||
assert hass.states.get("group.grouped_group").state == "closed"
|
||||
|
||||
|
||||
async def test_group_that_references_two_groups_of_covers(hass):
|
||||
"""Group that references a group of covers."""
|
||||
|
||||
entity_ids = [
|
||||
"cover.living_front_ri",
|
||||
"cover.living_back_lef",
|
||||
]
|
||||
hass.state = CoreState.stopped
|
||||
|
||||
for entity_id in entity_ids:
|
||||
hass.states.async_set(entity_id, "closed")
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
{
|
||||
"group": {
|
||||
"living_room_downcover": {"entities": entity_ids},
|
||||
"living_room_upcover": {"entities": entity_ids},
|
||||
"grouped_group": {
|
||||
"entities": [
|
||||
"group.living_room_downlights",
|
||||
"group.living_room_upcover",
|
||||
]
|
||||
},
|
||||
}
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("group.living_room_downcover").state == "closed"
|
||||
assert hass.states.get("group.living_room_upcover").state == "closed"
|
||||
assert hass.states.get("group.grouped_group").state == "closed"
|
||||
|
||||
|
||||
async def test_group_that_references_two_types_of_groups(hass):
|
||||
"""Group that references a group of covers and device_trackers."""
|
||||
|
||||
group_1_entity_ids = [
|
||||
"cover.living_front_ri",
|
||||
"cover.living_back_lef",
|
||||
]
|
||||
group_2_entity_ids = [
|
||||
"device_tracker.living_front_ri",
|
||||
"device_tracker.living_back_lef",
|
||||
]
|
||||
hass.state = CoreState.stopped
|
||||
|
||||
for entity_id in group_1_entity_ids:
|
||||
hass.states.async_set(entity_id, "closed")
|
||||
for entity_id in group_2_entity_ids:
|
||||
hass.states.async_set(entity_id, "home")
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
{
|
||||
"group": {
|
||||
"covers": {"entities": group_1_entity_ids},
|
||||
"device_trackers": {"entities": group_2_entity_ids},
|
||||
"grouped_group": {
|
||||
"entities": ["group.covers", "group.device_trackers"]
|
||||
},
|
||||
}
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("group.covers").state == "closed"
|
||||
assert hass.states.get("group.device_trackers").state == "home"
|
||||
assert hass.states.get("group.grouped_group").state == "on"
|
||||
|
||||
|
||||
async def test_plant_group(hass):
|
||||
"""Test plant states can be grouped."""
|
||||
|
||||
entity_ids = [
|
||||
"plant.upstairs",
|
||||
"plant.downstairs",
|
||||
]
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"plant",
|
||||
{
|
||||
"plant": {
|
||||
"plantname": {
|
||||
"sensors": {
|
||||
"moisture": "sensor.mqtt_plant_moisture",
|
||||
"battery": "sensor.mqtt_plant_battery",
|
||||
"temperature": "sensor.mqtt_plant_temperature",
|
||||
"conductivity": "sensor.mqtt_plant_conductivity",
|
||||
"brightness": "sensor.mqtt_plant_brightness",
|
||||
},
|
||||
"min_moisture": 20,
|
||||
"max_moisture": 60,
|
||||
"min_battery": 17,
|
||||
"min_conductivity": 500,
|
||||
"min_temperature": 15,
|
||||
"min_brightness": 500,
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
{
|
||||
"group": {
|
||||
"plants": {"entities": entity_ids},
|
||||
"plant_with_binary_sensors": {
|
||||
"entities": [*entity_ids, "binary_sensor.planter"]
|
||||
},
|
||||
}
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
hass.states.async_set("binary_sensor.planter", "off")
|
||||
for entity_id in entity_ids:
|
||||
hass.states.async_set(entity_id, "ok")
|
||||
await hass.async_block_till_done()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("group.plants").state == "ok"
|
||||
assert hass.states.get("group.plant_with_binary_sensors").state == "off"
|
||||
|
||||
hass.states.async_set("binary_sensor.planter", "on")
|
||||
for entity_id in entity_ids:
|
||||
hass.states.async_set(entity_id, "problem")
|
||||
|
||||
await hass.async_block_till_done()
|
||||
assert hass.states.get("group.plants").state == "problem"
|
||||
assert hass.states.get("group.plant_with_binary_sensors").state == "on"
|
||||
|
@@ -1240,3 +1240,32 @@ async def test_attribute_if_not_fires_on_entities_change_with_for_after_stop(
|
||||
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10))
|
||||
await hass.async_block_till_done()
|
||||
assert len(calls) == 1
|
||||
|
||||
|
||||
async def test_attribute_if_fires_on_entity_change_with_both_filters_boolean(
|
||||
hass, calls
|
||||
):
|
||||
"""Test for firing if both filters are match attribute."""
|
||||
hass.states.async_set("test.entity", "bla", {"happening": False})
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
automation.DOMAIN,
|
||||
{
|
||||
automation.DOMAIN: {
|
||||
"trigger": {
|
||||
"platform": "state",
|
||||
"entity_id": "test.entity",
|
||||
"from": False,
|
||||
"to": True,
|
||||
"attribute": "happening",
|
||||
},
|
||||
"action": {"service": "test.automation"},
|
||||
}
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
hass.states.async_set("test.entity", "bla", {"happening": True})
|
||||
await hass.async_block_till_done()
|
||||
assert len(calls) == 1
|
||||
|
@@ -1,7 +1,8 @@
|
||||
"""Helper methods for Plex tests."""
|
||||
from plexwebsocket import SIGNAL_DATA
|
||||
|
||||
|
||||
def trigger_plex_update(mock_websocket):
|
||||
"""Call the websocket callback method."""
|
||||
callback = mock_websocket.call_args[0][1]
|
||||
callback()
|
||||
callback(SIGNAL_DATA, None, None)
|
||||
|
@@ -4,6 +4,7 @@ from datetime import datetime, timedelta
|
||||
import unittest
|
||||
|
||||
import pytest
|
||||
from sqlalchemy.exc import OperationalError
|
||||
|
||||
from homeassistant.components.recorder import (
|
||||
CONFIG_SCHEMA,
|
||||
@@ -452,3 +453,41 @@ def test_run_information(hass_recorder):
|
||||
|
||||
class CannotSerializeMe:
|
||||
"""A class that the JSONEncoder cannot serialize."""
|
||||
|
||||
|
||||
def test_saving_state_with_exception(hass, hass_recorder, caplog):
|
||||
"""Test saving and restoring a state."""
|
||||
hass = hass_recorder()
|
||||
|
||||
entity_id = "test.recorder"
|
||||
state = "restoring_from_db"
|
||||
attributes = {"test_attr": 5, "test_attr_10": "nice"}
|
||||
|
||||
def _throw_if_state_in_session(*args, **kwargs):
|
||||
for obj in hass.data[DATA_INSTANCE].event_session:
|
||||
if isinstance(obj, States):
|
||||
raise OperationalError(
|
||||
"insert the state", "fake params", "forced to fail"
|
||||
)
|
||||
|
||||
with patch("time.sleep"), patch.object(
|
||||
hass.data[DATA_INSTANCE].event_session,
|
||||
"flush",
|
||||
side_effect=_throw_if_state_in_session,
|
||||
):
|
||||
hass.states.set(entity_id, "fail", attributes)
|
||||
wait_recording_done(hass)
|
||||
|
||||
assert "Error executing query" in caplog.text
|
||||
assert "Error saving events" not in caplog.text
|
||||
|
||||
caplog.clear()
|
||||
hass.states.set(entity_id, state, attributes)
|
||||
wait_recording_done(hass)
|
||||
|
||||
with session_scope(hass=hass) as session:
|
||||
db_states = list(session.query(States))
|
||||
assert len(db_states) >= 1
|
||||
|
||||
assert "Error executing query" not in caplog.text
|
||||
assert "Error saving events" not in caplog.text
|
||||
|
@@ -115,6 +115,7 @@ async def test_template_state_boolean(hass, calls):
|
||||
|
||||
async def test_template_position(hass, calls):
|
||||
"""Test the position_template attribute."""
|
||||
hass.states.async_set("cover.test", STATE_OPEN)
|
||||
with assert_setup_component(1, "cover"):
|
||||
assert await setup.async_setup_component(
|
||||
hass,
|
||||
@@ -1120,3 +1121,48 @@ async def test_state_gets_lowercased(hass):
|
||||
hass.states.async_set("binary_sensor.garage_door_sensor", "on")
|
||||
await hass.async_block_till_done()
|
||||
assert hass.states.get("cover.garage_door").state == STATE_CLOSED
|
||||
|
||||
|
||||
async def test_self_referencing_icon_with_no_template_is_not_a_loop(hass, caplog):
|
||||
"""Test a self referencing icon with no value template is not a loop."""
|
||||
|
||||
icon_template_str = """{% if is_state('cover.office', 'open') %}
|
||||
mdi:window-shutter-open
|
||||
{% else %}
|
||||
mdi:window-shutter
|
||||
{% endif %}"""
|
||||
|
||||
await setup.async_setup_component(
|
||||
hass,
|
||||
"cover",
|
||||
{
|
||||
"cover": {
|
||||
"platform": "template",
|
||||
"covers": {
|
||||
"office": {
|
||||
"icon_template": icon_template_str,
|
||||
"open_cover": {
|
||||
"service": "switch.turn_on",
|
||||
"entity_id": "switch.office_blinds_up",
|
||||
},
|
||||
"close_cover": {
|
||||
"service": "switch.turn_on",
|
||||
"entity_id": "switch.office_blinds_down",
|
||||
},
|
||||
"stop_cover": {
|
||||
"service": "switch.turn_on",
|
||||
"entity_id": "switch.office_blinds_up",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
await hass.async_block_till_done()
|
||||
await hass.async_start()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(hass.states.async_all()) == 1
|
||||
|
||||
assert "Template loop detected" not in caplog.text
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user