mirror of
https://github.com/home-assistant/core.git
synced 2025-07-23 13:17:32 +00:00
Merge pull request #46383 from home-assistant/rc
This commit is contained in:
commit
c10bf079f7
@ -8,7 +8,7 @@ import aiohttp
|
|||||||
import async_timeout
|
import async_timeout
|
||||||
|
|
||||||
from homeassistant.const import HTTP_ACCEPTED, MATCH_ALL, STATE_ON
|
from homeassistant.const import HTTP_ACCEPTED, MATCH_ALL, STATE_ON
|
||||||
from homeassistant.core import State
|
from homeassistant.core import HomeAssistant, State, callback
|
||||||
from homeassistant.helpers.significant_change import create_checker
|
from homeassistant.helpers.significant_change import create_checker
|
||||||
import homeassistant.util.dt as dt_util
|
import homeassistant.util.dt as dt_util
|
||||||
|
|
||||||
@ -28,7 +28,20 @@ async def async_enable_proactive_mode(hass, smart_home_config):
|
|||||||
# Validate we can get access token.
|
# Validate we can get access token.
|
||||||
await smart_home_config.async_get_access_token()
|
await smart_home_config.async_get_access_token()
|
||||||
|
|
||||||
checker = await create_checker(hass, DOMAIN)
|
@callback
|
||||||
|
def extra_significant_check(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
old_state: str,
|
||||||
|
old_attrs: dict,
|
||||||
|
old_extra_arg: dict,
|
||||||
|
new_state: str,
|
||||||
|
new_attrs: dict,
|
||||||
|
new_extra_arg: dict,
|
||||||
|
):
|
||||||
|
"""Check if the serialized data has changed."""
|
||||||
|
return old_extra_arg is not None and old_extra_arg != new_extra_arg
|
||||||
|
|
||||||
|
checker = await create_checker(hass, DOMAIN, extra_significant_check)
|
||||||
|
|
||||||
async def async_entity_state_listener(
|
async def async_entity_state_listener(
|
||||||
changed_entity: str,
|
changed_entity: str,
|
||||||
@ -70,15 +83,22 @@ async def async_enable_proactive_mode(hass, smart_home_config):
|
|||||||
if not should_report and not should_doorbell:
|
if not should_report and not should_doorbell:
|
||||||
return
|
return
|
||||||
|
|
||||||
if not checker.async_is_significant_change(new_state):
|
|
||||||
return
|
|
||||||
|
|
||||||
if should_doorbell:
|
if should_doorbell:
|
||||||
should_report = False
|
should_report = False
|
||||||
|
|
||||||
|
if should_report:
|
||||||
|
alexa_properties = list(alexa_changed_entity.serialize_properties())
|
||||||
|
else:
|
||||||
|
alexa_properties = None
|
||||||
|
|
||||||
|
if not checker.async_is_significant_change(
|
||||||
|
new_state, extra_arg=alexa_properties
|
||||||
|
):
|
||||||
|
return
|
||||||
|
|
||||||
if should_report:
|
if should_report:
|
||||||
await async_send_changereport_message(
|
await async_send_changereport_message(
|
||||||
hass, smart_home_config, alexa_changed_entity
|
hass, smart_home_config, alexa_changed_entity, alexa_properties
|
||||||
)
|
)
|
||||||
|
|
||||||
elif should_doorbell:
|
elif should_doorbell:
|
||||||
@ -92,7 +112,7 @@ async def async_enable_proactive_mode(hass, smart_home_config):
|
|||||||
|
|
||||||
|
|
||||||
async def async_send_changereport_message(
|
async def async_send_changereport_message(
|
||||||
hass, config, alexa_entity, *, invalidate_access_token=True
|
hass, config, alexa_entity, alexa_properties, *, invalidate_access_token=True
|
||||||
):
|
):
|
||||||
"""Send a ChangeReport message for an Alexa entity.
|
"""Send a ChangeReport message for an Alexa entity.
|
||||||
|
|
||||||
@ -107,7 +127,7 @@ async def async_send_changereport_message(
|
|||||||
payload = {
|
payload = {
|
||||||
API_CHANGE: {
|
API_CHANGE: {
|
||||||
"cause": {"type": Cause.APP_INTERACTION},
|
"cause": {"type": Cause.APP_INTERACTION},
|
||||||
"properties": list(alexa_entity.serialize_properties()),
|
"properties": alexa_properties,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -146,7 +166,7 @@ async def async_send_changereport_message(
|
|||||||
):
|
):
|
||||||
config.async_invalidate_access_token()
|
config.async_invalidate_access_token()
|
||||||
return await async_send_changereport_message(
|
return await async_send_changereport_message(
|
||||||
hass, config, alexa_entity, invalidate_access_token=False
|
hass, config, alexa_entity, alexa_properties, invalidate_access_token=False
|
||||||
)
|
)
|
||||||
|
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
|
@ -6,12 +6,13 @@ import logging
|
|||||||
from brother import Brother, SnmpError, UnsupportedModel
|
from brother import Brother, SnmpError, UnsupportedModel
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import CONF_HOST, CONF_TYPE, EVENT_HOMEASSISTANT_STOP
|
from homeassistant.const import CONF_HOST, CONF_TYPE
|
||||||
from homeassistant.core import Config, HomeAssistant
|
from homeassistant.core import Config, HomeAssistant
|
||||||
from homeassistant.exceptions import ConfigEntryNotReady
|
from homeassistant.exceptions import ConfigEntryNotReady
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .const import DATA_CONFIG_ENTRY, DOMAIN, SNMP
|
||||||
|
from .utils import get_snmp_engine
|
||||||
|
|
||||||
PLATFORMS = ["sensor"]
|
PLATFORMS = ["sensor"]
|
||||||
|
|
||||||
@ -30,15 +31,20 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
|||||||
host = entry.data[CONF_HOST]
|
host = entry.data[CONF_HOST]
|
||||||
kind = entry.data[CONF_TYPE]
|
kind = entry.data[CONF_TYPE]
|
||||||
|
|
||||||
coordinator = BrotherDataUpdateCoordinator(hass, host=host, kind=kind)
|
snmp_engine = get_snmp_engine(hass)
|
||||||
|
|
||||||
|
coordinator = BrotherDataUpdateCoordinator(
|
||||||
|
hass, host=host, kind=kind, snmp_engine=snmp_engine
|
||||||
|
)
|
||||||
await coordinator.async_refresh()
|
await coordinator.async_refresh()
|
||||||
|
|
||||||
if not coordinator.last_update_success:
|
if not coordinator.last_update_success:
|
||||||
coordinator.shutdown()
|
|
||||||
raise ConfigEntryNotReady
|
raise ConfigEntryNotReady
|
||||||
|
|
||||||
hass.data.setdefault(DOMAIN, {})
|
hass.data.setdefault(DOMAIN, {})
|
||||||
hass.data[DOMAIN][entry.entry_id] = coordinator
|
hass.data[DOMAIN].setdefault(DATA_CONFIG_ENTRY, {})
|
||||||
|
hass.data[DOMAIN][DATA_CONFIG_ENTRY][entry.entry_id] = coordinator
|
||||||
|
hass.data[DOMAIN][SNMP] = snmp_engine
|
||||||
|
|
||||||
for component in PLATFORMS:
|
for component in PLATFORMS:
|
||||||
hass.async_create_task(
|
hass.async_create_task(
|
||||||
@ -59,7 +65,10 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
if unload_ok:
|
if unload_ok:
|
||||||
hass.data[DOMAIN].pop(entry.entry_id).shutdown()
|
hass.data[DOMAIN][DATA_CONFIG_ENTRY].pop(entry.entry_id)
|
||||||
|
if not hass.data[DOMAIN][DATA_CONFIG_ENTRY]:
|
||||||
|
hass.data[DOMAIN].pop(SNMP)
|
||||||
|
hass.data[DOMAIN].pop(DATA_CONFIG_ENTRY)
|
||||||
|
|
||||||
return unload_ok
|
return unload_ok
|
||||||
|
|
||||||
@ -67,12 +76,9 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
|
|||||||
class BrotherDataUpdateCoordinator(DataUpdateCoordinator):
|
class BrotherDataUpdateCoordinator(DataUpdateCoordinator):
|
||||||
"""Class to manage fetching Brother data from the printer."""
|
"""Class to manage fetching Brother data from the printer."""
|
||||||
|
|
||||||
def __init__(self, hass, host, kind):
|
def __init__(self, hass, host, kind, snmp_engine):
|
||||||
"""Initialize."""
|
"""Initialize."""
|
||||||
self.brother = Brother(host, kind=kind)
|
self.brother = Brother(host, kind=kind, snmp_engine=snmp_engine)
|
||||||
self._unsub_stop = hass.bus.async_listen(
|
|
||||||
EVENT_HOMEASSISTANT_STOP, self._handle_ha_stop
|
|
||||||
)
|
|
||||||
|
|
||||||
super().__init__(
|
super().__init__(
|
||||||
hass,
|
hass,
|
||||||
@ -83,22 +89,8 @@ class BrotherDataUpdateCoordinator(DataUpdateCoordinator):
|
|||||||
|
|
||||||
async def _async_update_data(self):
|
async def _async_update_data(self):
|
||||||
"""Update data via library."""
|
"""Update data via library."""
|
||||||
# Race condition on shutdown. Stop all the fetches.
|
|
||||||
if self._unsub_stop is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await self.brother.async_update()
|
await self.brother.async_update()
|
||||||
except (ConnectionError, SnmpError, UnsupportedModel) as error:
|
except (ConnectionError, SnmpError, UnsupportedModel) as error:
|
||||||
raise UpdateFailed(error) from error
|
raise UpdateFailed(error) from error
|
||||||
return self.brother.data
|
return self.brother.data
|
||||||
|
|
||||||
def shutdown(self):
|
|
||||||
"""Shutdown the Brother coordinator."""
|
|
||||||
self._unsub_stop()
|
|
||||||
self._unsub_stop = None
|
|
||||||
self.brother.shutdown()
|
|
||||||
|
|
||||||
def _handle_ha_stop(self, _):
|
|
||||||
"""Handle Home Assistant stopping."""
|
|
||||||
self.shutdown()
|
|
||||||
|
@ -9,6 +9,7 @@ from homeassistant import config_entries, exceptions
|
|||||||
from homeassistant.const import CONF_HOST, CONF_TYPE
|
from homeassistant.const import CONF_HOST, CONF_TYPE
|
||||||
|
|
||||||
from .const import DOMAIN, PRINTER_TYPES # pylint:disable=unused-import
|
from .const import DOMAIN, PRINTER_TYPES # pylint:disable=unused-import
|
||||||
|
from .utils import get_snmp_engine
|
||||||
|
|
||||||
DATA_SCHEMA = vol.Schema(
|
DATA_SCHEMA = vol.Schema(
|
||||||
{
|
{
|
||||||
@ -48,9 +49,10 @@ class BrotherConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
if not host_valid(user_input[CONF_HOST]):
|
if not host_valid(user_input[CONF_HOST]):
|
||||||
raise InvalidHost()
|
raise InvalidHost()
|
||||||
|
|
||||||
brother = Brother(user_input[CONF_HOST])
|
snmp_engine = get_snmp_engine(self.hass)
|
||||||
|
|
||||||
|
brother = Brother(user_input[CONF_HOST], snmp_engine=snmp_engine)
|
||||||
await brother.async_update()
|
await brother.async_update()
|
||||||
brother.shutdown()
|
|
||||||
|
|
||||||
await self.async_set_unique_id(brother.serial.lower())
|
await self.async_set_unique_id(brother.serial.lower())
|
||||||
self._abort_if_unique_id_configured()
|
self._abort_if_unique_id_configured()
|
||||||
@ -83,7 +85,9 @@ class BrotherConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
# Hostname is format: brother.local.
|
# Hostname is format: brother.local.
|
||||||
self.host = discovery_info["hostname"].rstrip(".")
|
self.host = discovery_info["hostname"].rstrip(".")
|
||||||
|
|
||||||
self.brother = Brother(self.host)
|
snmp_engine = get_snmp_engine(self.hass)
|
||||||
|
|
||||||
|
self.brother = Brother(self.host, snmp_engine=snmp_engine)
|
||||||
try:
|
try:
|
||||||
await self.brother.async_update()
|
await self.brother.async_update()
|
||||||
except (ConnectionError, SnmpError, UnsupportedModel):
|
except (ConnectionError, SnmpError, UnsupportedModel):
|
||||||
|
@ -41,12 +41,16 @@ ATTR_YELLOW_DRUM_REMAINING_PAGES = "yellow_drum_remaining_pages"
|
|||||||
ATTR_YELLOW_INK_REMAINING = "yellow_ink_remaining"
|
ATTR_YELLOW_INK_REMAINING = "yellow_ink_remaining"
|
||||||
ATTR_YELLOW_TONER_REMAINING = "yellow_toner_remaining"
|
ATTR_YELLOW_TONER_REMAINING = "yellow_toner_remaining"
|
||||||
|
|
||||||
|
DATA_CONFIG_ENTRY = "config_entry"
|
||||||
|
|
||||||
DOMAIN = "brother"
|
DOMAIN = "brother"
|
||||||
|
|
||||||
UNIT_PAGES = "p"
|
UNIT_PAGES = "p"
|
||||||
|
|
||||||
PRINTER_TYPES = ["laser", "ink"]
|
PRINTER_TYPES = ["laser", "ink"]
|
||||||
|
|
||||||
|
SNMP = "snmp"
|
||||||
|
|
||||||
SENSOR_TYPES = {
|
SENSOR_TYPES = {
|
||||||
ATTR_STATUS: {
|
ATTR_STATUS: {
|
||||||
ATTR_ICON: "mdi:printer",
|
ATTR_ICON: "mdi:printer",
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
"name": "Brother Printer",
|
"name": "Brother Printer",
|
||||||
"documentation": "https://www.home-assistant.io/integrations/brother",
|
"documentation": "https://www.home-assistant.io/integrations/brother",
|
||||||
"codeowners": ["@bieniu"],
|
"codeowners": ["@bieniu"],
|
||||||
"requirements": ["brother==0.1.20"],
|
"requirements": ["brother==0.1.21"],
|
||||||
"zeroconf": [{ "type": "_printer._tcp.local.", "name": "brother*" }],
|
"zeroconf": [{ "type": "_printer._tcp.local.", "name": "brother*" }],
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"quality_scale": "platinum"
|
"quality_scale": "platinum"
|
||||||
|
@ -24,6 +24,7 @@ from .const import (
|
|||||||
ATTR_YELLOW_DRUM_COUNTER,
|
ATTR_YELLOW_DRUM_COUNTER,
|
||||||
ATTR_YELLOW_DRUM_REMAINING_LIFE,
|
ATTR_YELLOW_DRUM_REMAINING_LIFE,
|
||||||
ATTR_YELLOW_DRUM_REMAINING_PAGES,
|
ATTR_YELLOW_DRUM_REMAINING_PAGES,
|
||||||
|
DATA_CONFIG_ENTRY,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
SENSOR_TYPES,
|
SENSOR_TYPES,
|
||||||
)
|
)
|
||||||
@ -37,7 +38,7 @@ ATTR_SERIAL = "serial"
|
|||||||
|
|
||||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||||
"""Add Brother entities from a config_entry."""
|
"""Add Brother entities from a config_entry."""
|
||||||
coordinator = hass.data[DOMAIN][config_entry.entry_id]
|
coordinator = hass.data[DOMAIN][DATA_CONFIG_ENTRY][config_entry.entry_id]
|
||||||
|
|
||||||
sensors = []
|
sensors = []
|
||||||
|
|
||||||
|
30
homeassistant/components/brother/utils.py
Normal file
30
homeassistant/components/brother/utils.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
"""Brother helpers functions."""
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import pysnmp.hlapi.asyncio as hlapi
|
||||||
|
from pysnmp.hlapi.asyncio.cmdgen import lcd
|
||||||
|
|
||||||
|
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
||||||
|
from homeassistant.core import callback
|
||||||
|
from homeassistant.helpers import singleton
|
||||||
|
|
||||||
|
from .const import DOMAIN, SNMP
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@singleton.singleton("snmp_engine")
|
||||||
|
def get_snmp_engine(hass):
|
||||||
|
"""Get SNMP engine."""
|
||||||
|
_LOGGER.debug("Creating SNMP engine")
|
||||||
|
snmp_engine = hlapi.SnmpEngine()
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def shutdown_listener(ev):
|
||||||
|
if hass.data.get(DOMAIN):
|
||||||
|
_LOGGER.debug("Unconfiguring SNMP engine")
|
||||||
|
lcd.unconfigure(hass.data[DOMAIN][SNMP], None)
|
||||||
|
|
||||||
|
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, shutdown_listener)
|
||||||
|
|
||||||
|
return snmp_engine
|
@ -38,42 +38,59 @@ def async_enable_report_state(hass: HomeAssistant, google_config: AbstractConfig
|
|||||||
if not entity.is_supported():
|
if not entity.is_supported():
|
||||||
return
|
return
|
||||||
|
|
||||||
if not checker.async_is_significant_change(new_state):
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
entity_data = entity.query_serialize()
|
entity_data = entity.query_serialize()
|
||||||
except SmartHomeError as err:
|
except SmartHomeError as err:
|
||||||
_LOGGER.debug("Not reporting state for %s: %s", changed_entity, err.code)
|
_LOGGER.debug("Not reporting state for %s: %s", changed_entity, err.code)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if not checker.async_is_significant_change(new_state, extra_arg=entity_data):
|
||||||
|
return
|
||||||
|
|
||||||
_LOGGER.debug("Reporting state for %s: %s", changed_entity, entity_data)
|
_LOGGER.debug("Reporting state for %s: %s", changed_entity, entity_data)
|
||||||
|
|
||||||
await google_config.async_report_state_all(
|
await google_config.async_report_state_all(
|
||||||
{"devices": {"states": {changed_entity: entity_data}}}
|
{"devices": {"states": {changed_entity: entity_data}}}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def extra_significant_check(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
old_state: str,
|
||||||
|
old_attrs: dict,
|
||||||
|
old_extra_arg: dict,
|
||||||
|
new_state: str,
|
||||||
|
new_attrs: dict,
|
||||||
|
new_extra_arg: dict,
|
||||||
|
):
|
||||||
|
"""Check if the serialized data has changed."""
|
||||||
|
return old_extra_arg != new_extra_arg
|
||||||
|
|
||||||
async def inital_report(_now):
|
async def inital_report(_now):
|
||||||
"""Report initially all states."""
|
"""Report initially all states."""
|
||||||
nonlocal unsub, checker
|
nonlocal unsub, checker
|
||||||
entities = {}
|
entities = {}
|
||||||
|
|
||||||
checker = await create_checker(hass, DOMAIN)
|
checker = await create_checker(hass, DOMAIN, extra_significant_check)
|
||||||
|
|
||||||
for entity in async_get_entities(hass, google_config):
|
for entity in async_get_entities(hass, google_config):
|
||||||
if not entity.should_expose():
|
if not entity.should_expose():
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Tell our significant change checker that we're reporting
|
|
||||||
# So it knows with subsequent changes what was already reported.
|
|
||||||
if not checker.async_is_significant_change(entity.state):
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
entities[entity.entity_id] = entity.query_serialize()
|
entity_data = entity.query_serialize()
|
||||||
except SmartHomeError:
|
except SmartHomeError:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# Tell our significant change checker that we're reporting
|
||||||
|
# So it knows with subsequent changes what was already reported.
|
||||||
|
if not checker.async_is_significant_change(
|
||||||
|
entity.state, extra_arg=entity_data
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
|
||||||
|
entities[entity.entity_id] = entity_data
|
||||||
|
|
||||||
if not entities:
|
if not entities:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
"name": "Lutron Caséta",
|
"name": "Lutron Caséta",
|
||||||
"documentation": "https://www.home-assistant.io/integrations/lutron_caseta",
|
"documentation": "https://www.home-assistant.io/integrations/lutron_caseta",
|
||||||
"requirements": [
|
"requirements": [
|
||||||
"pylutron-caseta==0.9.0", "aiolip==1.0.1"
|
"pylutron-caseta==0.9.0", "aiolip==1.1.4"
|
||||||
],
|
],
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"zeroconf": ["_leap._tcp.local."],
|
"zeroconf": ["_leap._tcp.local."],
|
||||||
|
@ -11,7 +11,7 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
|||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import ConfigEntryNotReady
|
from homeassistant.exceptions import ConfigEntryNotReady
|
||||||
from homeassistant.helpers import aiohttp_client
|
from homeassistant.helpers import aiohttp_client
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
|
|
||||||
from .const import DOMAIN, MYQ_COORDINATOR, MYQ_GATEWAY, PLATFORMS, UPDATE_INTERVAL
|
from .const import DOMAIN, MYQ_COORDINATOR, MYQ_GATEWAY, PLATFORMS, UPDATE_INTERVAL
|
||||||
|
|
||||||
@ -40,11 +40,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
|||||||
except MyQError as err:
|
except MyQError as err:
|
||||||
raise ConfigEntryNotReady from err
|
raise ConfigEntryNotReady from err
|
||||||
|
|
||||||
|
# Called by DataUpdateCoordinator, allows to capture any MyQError exceptions and to throw an HASS UpdateFailed
|
||||||
|
# exception instead, preventing traceback in HASS logs.
|
||||||
|
async def async_update_data():
|
||||||
|
try:
|
||||||
|
return await myq.update_device_info()
|
||||||
|
except MyQError as err:
|
||||||
|
raise UpdateFailed(str(err)) from err
|
||||||
|
|
||||||
coordinator = DataUpdateCoordinator(
|
coordinator = DataUpdateCoordinator(
|
||||||
hass,
|
hass,
|
||||||
_LOGGER,
|
_LOGGER,
|
||||||
name="myq devices",
|
name="myq devices",
|
||||||
update_method=myq.update_device_info,
|
update_method=async_update_data,
|
||||||
update_interval=timedelta(seconds=UPDATE_INTERVAL),
|
update_interval=timedelta(seconds=UPDATE_INTERVAL),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1,7 +1,5 @@
|
|||||||
"""Support for MyQ gateways."""
|
"""Support for MyQ gateways."""
|
||||||
from pymyq.const import (
|
from pymyq.const import (
|
||||||
DEVICE_FAMILY as MYQ_DEVICE_FAMILY,
|
|
||||||
DEVICE_FAMILY_GATEWAY as MYQ_DEVICE_FAMILY_GATEWAY,
|
|
||||||
DEVICE_STATE as MYQ_DEVICE_STATE,
|
DEVICE_STATE as MYQ_DEVICE_STATE,
|
||||||
DEVICE_STATE_ONLINE as MYQ_DEVICE_STATE_ONLINE,
|
DEVICE_STATE_ONLINE as MYQ_DEVICE_STATE_ONLINE,
|
||||||
KNOWN_MODELS,
|
KNOWN_MODELS,
|
||||||
@ -25,8 +23,7 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
|||||||
|
|
||||||
entities = []
|
entities = []
|
||||||
|
|
||||||
for device in myq.devices.values():
|
for device in myq.gateways.values():
|
||||||
if device.device_json[MYQ_DEVICE_FAMILY] == MYQ_DEVICE_FAMILY_GATEWAY:
|
|
||||||
entities.append(MyQBinarySensorEntity(coordinator, device))
|
entities.append(MyQBinarySensorEntity(coordinator, device))
|
||||||
|
|
||||||
async_add_entities(entities, True)
|
async_add_entities(entities, True)
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
"""The MyQ integration."""
|
"""The MyQ integration."""
|
||||||
from pymyq.device import (
|
from pymyq.garagedoor import (
|
||||||
STATE_CLOSED as MYQ_STATE_CLOSED,
|
STATE_CLOSED as MYQ_COVER_STATE_CLOSED,
|
||||||
STATE_CLOSING as MYQ_STATE_CLOSING,
|
STATE_CLOSING as MYQ_COVER_STATE_CLOSING,
|
||||||
STATE_OPEN as MYQ_STATE_OPEN,
|
STATE_OPEN as MYQ_COVER_STATE_OPEN,
|
||||||
STATE_OPENING as MYQ_STATE_OPENING,
|
STATE_OPENING as MYQ_COVER_STATE_OPENING,
|
||||||
)
|
)
|
||||||
|
|
||||||
from homeassistant.const import STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_OPENING
|
from homeassistant.const import STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_OPENING
|
||||||
@ -13,10 +13,10 @@ DOMAIN = "myq"
|
|||||||
PLATFORMS = ["cover", "binary_sensor"]
|
PLATFORMS = ["cover", "binary_sensor"]
|
||||||
|
|
||||||
MYQ_TO_HASS = {
|
MYQ_TO_HASS = {
|
||||||
MYQ_STATE_CLOSED: STATE_CLOSED,
|
MYQ_COVER_STATE_CLOSED: STATE_CLOSED,
|
||||||
MYQ_STATE_CLOSING: STATE_CLOSING,
|
MYQ_COVER_STATE_CLOSING: STATE_CLOSING,
|
||||||
MYQ_STATE_OPEN: STATE_OPEN,
|
MYQ_COVER_STATE_OPEN: STATE_OPEN,
|
||||||
MYQ_STATE_OPENING: STATE_OPENING,
|
MYQ_COVER_STATE_OPENING: STATE_OPENING,
|
||||||
}
|
}
|
||||||
|
|
||||||
MYQ_GATEWAY = "myq_gateway"
|
MYQ_GATEWAY = "myq_gateway"
|
||||||
@ -24,7 +24,7 @@ MYQ_COORDINATOR = "coordinator"
|
|||||||
|
|
||||||
# myq has some ratelimits in place
|
# myq has some ratelimits in place
|
||||||
# and 61 seemed to be work every time
|
# and 61 seemed to be work every time
|
||||||
UPDATE_INTERVAL = 61
|
UPDATE_INTERVAL = 15
|
||||||
|
|
||||||
# Estimated time it takes myq to start transition from one
|
# Estimated time it takes myq to start transition from one
|
||||||
# state to the next.
|
# state to the next.
|
||||||
|
@ -1,14 +1,14 @@
|
|||||||
"""Support for MyQ-Enabled Garage Doors."""
|
"""Support for MyQ-Enabled Garage Doors."""
|
||||||
import time
|
import logging
|
||||||
|
|
||||||
from pymyq.const import (
|
from pymyq.const import (
|
||||||
DEVICE_STATE as MYQ_DEVICE_STATE,
|
DEVICE_STATE as MYQ_DEVICE_STATE,
|
||||||
DEVICE_STATE_ONLINE as MYQ_DEVICE_STATE_ONLINE,
|
DEVICE_STATE_ONLINE as MYQ_DEVICE_STATE_ONLINE,
|
||||||
DEVICE_TYPE as MYQ_DEVICE_TYPE,
|
|
||||||
DEVICE_TYPE_GATE as MYQ_DEVICE_TYPE_GATE,
|
DEVICE_TYPE_GATE as MYQ_DEVICE_TYPE_GATE,
|
||||||
KNOWN_MODELS,
|
KNOWN_MODELS,
|
||||||
MANUFACTURER,
|
MANUFACTURER,
|
||||||
)
|
)
|
||||||
|
from pymyq.errors import MyQError
|
||||||
|
|
||||||
from homeassistant.components.cover import (
|
from homeassistant.components.cover import (
|
||||||
DEVICE_CLASS_GARAGE,
|
DEVICE_CLASS_GARAGE,
|
||||||
@ -17,19 +17,12 @@ from homeassistant.components.cover import (
|
|||||||
SUPPORT_OPEN,
|
SUPPORT_OPEN,
|
||||||
CoverEntity,
|
CoverEntity,
|
||||||
)
|
)
|
||||||
from homeassistant.const import STATE_CLOSED, STATE_CLOSING, STATE_OPENING
|
from homeassistant.const import STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_OPENING
|
||||||
from homeassistant.core import callback
|
|
||||||
from homeassistant.helpers.event import async_call_later
|
|
||||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
from .const import (
|
from .const import DOMAIN, MYQ_COORDINATOR, MYQ_GATEWAY, MYQ_TO_HASS
|
||||||
DOMAIN,
|
|
||||||
MYQ_COORDINATOR,
|
_LOGGER = logging.getLogger(__name__)
|
||||||
MYQ_GATEWAY,
|
|
||||||
MYQ_TO_HASS,
|
|
||||||
TRANSITION_COMPLETE_DURATION,
|
|
||||||
TRANSITION_START_DURATION,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||||
@ -50,13 +43,11 @@ class MyQDevice(CoordinatorEntity, CoverEntity):
|
|||||||
"""Initialize with API object, device id."""
|
"""Initialize with API object, device id."""
|
||||||
super().__init__(coordinator)
|
super().__init__(coordinator)
|
||||||
self._device = device
|
self._device = device
|
||||||
self._last_action_timestamp = 0
|
|
||||||
self._scheduled_transition_update = None
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def device_class(self):
|
def device_class(self):
|
||||||
"""Define this cover as a garage door."""
|
"""Define this cover as a garage door."""
|
||||||
device_type = self._device.device_json.get(MYQ_DEVICE_TYPE)
|
device_type = self._device.device_type
|
||||||
if device_type is not None and device_type == MYQ_DEVICE_TYPE_GATE:
|
if device_type is not None and device_type == MYQ_DEVICE_TYPE_GATE:
|
||||||
return DEVICE_CLASS_GATE
|
return DEVICE_CLASS_GATE
|
||||||
return DEVICE_CLASS_GARAGE
|
return DEVICE_CLASS_GARAGE
|
||||||
@ -87,6 +78,11 @@ class MyQDevice(CoordinatorEntity, CoverEntity):
|
|||||||
"""Return if the cover is closing or not."""
|
"""Return if the cover is closing or not."""
|
||||||
return MYQ_TO_HASS.get(self._device.state) == STATE_CLOSING
|
return MYQ_TO_HASS.get(self._device.state) == STATE_CLOSING
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_open(self):
|
||||||
|
"""Return if the cover is opening or not."""
|
||||||
|
return MYQ_TO_HASS.get(self._device.state) == STATE_OPEN
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_opening(self):
|
def is_opening(self):
|
||||||
"""Return if the cover is opening or not."""
|
"""Return if the cover is opening or not."""
|
||||||
@ -104,37 +100,48 @@ class MyQDevice(CoordinatorEntity, CoverEntity):
|
|||||||
|
|
||||||
async def async_close_cover(self, **kwargs):
|
async def async_close_cover(self, **kwargs):
|
||||||
"""Issue close command to cover."""
|
"""Issue close command to cover."""
|
||||||
self._last_action_timestamp = time.time()
|
if self.is_closing or self.is_closed:
|
||||||
await self._device.close()
|
return
|
||||||
self._async_schedule_update_for_transition()
|
|
||||||
|
try:
|
||||||
|
wait_task = await self._device.close(wait_for_state=False)
|
||||||
|
except MyQError as err:
|
||||||
|
_LOGGER.error(
|
||||||
|
"Closing of cover %s failed with error: %s", self._device.name, str(err)
|
||||||
|
)
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
# Write closing state to HASS
|
||||||
|
self.async_write_ha_state()
|
||||||
|
|
||||||
|
if not await wait_task:
|
||||||
|
_LOGGER.error("Closing of cover %s failed", self._device.name)
|
||||||
|
|
||||||
|
# Write final state to HASS
|
||||||
|
self.async_write_ha_state()
|
||||||
|
|
||||||
async def async_open_cover(self, **kwargs):
|
async def async_open_cover(self, **kwargs):
|
||||||
"""Issue open command to cover."""
|
"""Issue open command to cover."""
|
||||||
self._last_action_timestamp = time.time()
|
if self.is_opening or self.is_open:
|
||||||
await self._device.open()
|
return
|
||||||
self._async_schedule_update_for_transition()
|
|
||||||
|
|
||||||
@callback
|
try:
|
||||||
def _async_schedule_update_for_transition(self):
|
wait_task = await self._device.open(wait_for_state=False)
|
||||||
|
except MyQError as err:
|
||||||
|
_LOGGER.error(
|
||||||
|
"Opening of cover %s failed with error: %s", self._device.name, str(err)
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Write opening state to HASS
|
||||||
self.async_write_ha_state()
|
self.async_write_ha_state()
|
||||||
|
|
||||||
# Cancel any previous updates
|
if not await wait_task:
|
||||||
if self._scheduled_transition_update:
|
_LOGGER.error("Opening of cover %s failed", self._device.name)
|
||||||
self._scheduled_transition_update()
|
|
||||||
|
|
||||||
# Schedule an update for when we expect the transition
|
# Write final state to HASS
|
||||||
# to be completed so the garage door or gate does not
|
self.async_write_ha_state()
|
||||||
# seem like its closing or opening for a long time
|
|
||||||
self._scheduled_transition_update = async_call_later(
|
|
||||||
self.hass,
|
|
||||||
TRANSITION_COMPLETE_DURATION,
|
|
||||||
self._async_complete_schedule_update,
|
|
||||||
)
|
|
||||||
|
|
||||||
async def _async_complete_schedule_update(self, _):
|
|
||||||
"""Update status of the cover via coordinator."""
|
|
||||||
self._scheduled_transition_update = None
|
|
||||||
await self.coordinator.async_request_refresh()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def device_info(self):
|
def device_info(self):
|
||||||
@ -152,22 +159,8 @@ class MyQDevice(CoordinatorEntity, CoverEntity):
|
|||||||
device_info["via_device"] = (DOMAIN, self._device.parent_device_id)
|
device_info["via_device"] = (DOMAIN, self._device.parent_device_id)
|
||||||
return device_info
|
return device_info
|
||||||
|
|
||||||
@callback
|
|
||||||
def _async_consume_update(self):
|
|
||||||
if time.time() - self._last_action_timestamp <= TRANSITION_START_DURATION:
|
|
||||||
# If we just started a transition we need
|
|
||||||
# to prevent a bouncy state
|
|
||||||
return
|
|
||||||
|
|
||||||
self.async_write_ha_state()
|
|
||||||
|
|
||||||
async def async_added_to_hass(self):
|
async def async_added_to_hass(self):
|
||||||
"""Subscribe to updates."""
|
"""Subscribe to updates."""
|
||||||
self.async_on_remove(
|
self.async_on_remove(
|
||||||
self.coordinator.async_add_listener(self._async_consume_update)
|
self.coordinator.async_add_listener(self.async_write_ha_state)
|
||||||
)
|
)
|
||||||
|
|
||||||
async def async_will_remove_from_hass(self):
|
|
||||||
"""Undo subscription."""
|
|
||||||
if self._scheduled_transition_update:
|
|
||||||
self._scheduled_transition_update()
|
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"domain": "myq",
|
"domain": "myq",
|
||||||
"name": "MyQ",
|
"name": "MyQ",
|
||||||
"documentation": "https://www.home-assistant.io/integrations/myq",
|
"documentation": "https://www.home-assistant.io/integrations/myq",
|
||||||
"requirements": ["pymyq==2.0.14"],
|
"requirements": ["pymyq==3.0.1"],
|
||||||
"codeowners": ["@bdraco"],
|
"codeowners": ["@bdraco"],
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"homekit": {
|
"homekit": {
|
||||||
|
@ -4,10 +4,15 @@ from datetime import timedelta
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from tesla_powerwall import MissingAttributeError, Powerwall, PowerwallUnreachableError
|
from tesla_powerwall import (
|
||||||
|
AccessDeniedError,
|
||||||
|
MissingAttributeError,
|
||||||
|
Powerwall,
|
||||||
|
PowerwallUnreachableError,
|
||||||
|
)
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import CONF_IP_ADDRESS
|
from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.exceptions import ConfigEntryNotReady
|
from homeassistant.exceptions import ConfigEntryNotReady
|
||||||
from homeassistant.helpers import entity_registry
|
from homeassistant.helpers import entity_registry
|
||||||
@ -93,11 +98,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
|||||||
|
|
||||||
hass.data[DOMAIN].setdefault(entry_id, {})
|
hass.data[DOMAIN].setdefault(entry_id, {})
|
||||||
http_session = requests.Session()
|
http_session = requests.Session()
|
||||||
|
|
||||||
|
password = entry.data.get(CONF_PASSWORD)
|
||||||
power_wall = Powerwall(entry.data[CONF_IP_ADDRESS], http_session=http_session)
|
power_wall = Powerwall(entry.data[CONF_IP_ADDRESS], http_session=http_session)
|
||||||
try:
|
try:
|
||||||
await hass.async_add_executor_job(power_wall.detect_and_pin_version)
|
powerwall_data = await hass.async_add_executor_job(
|
||||||
await hass.async_add_executor_job(_fetch_powerwall_data, power_wall)
|
_login_and_fetch_base_info, power_wall, password
|
||||||
powerwall_data = await hass.async_add_executor_job(call_base_info, power_wall)
|
)
|
||||||
except PowerwallUnreachableError as err:
|
except PowerwallUnreachableError as err:
|
||||||
http_session.close()
|
http_session.close()
|
||||||
raise ConfigEntryNotReady from err
|
raise ConfigEntryNotReady from err
|
||||||
@ -105,6 +112,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
|||||||
http_session.close()
|
http_session.close()
|
||||||
await _async_handle_api_changed_error(hass, err)
|
await _async_handle_api_changed_error(hass, err)
|
||||||
return False
|
return False
|
||||||
|
except AccessDeniedError as err:
|
||||||
|
_LOGGER.debug("Authentication failed", exc_info=err)
|
||||||
|
http_session.close()
|
||||||
|
_async_start_reauth(hass, entry)
|
||||||
|
return False
|
||||||
|
|
||||||
await _migrate_old_unique_ids(hass, entry_id, powerwall_data)
|
await _migrate_old_unique_ids(hass, entry_id, powerwall_data)
|
||||||
|
|
||||||
@ -112,21 +124,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
|||||||
"""Fetch data from API endpoint."""
|
"""Fetch data from API endpoint."""
|
||||||
# Check if we had an error before
|
# Check if we had an error before
|
||||||
_LOGGER.debug("Checking if update failed")
|
_LOGGER.debug("Checking if update failed")
|
||||||
if not hass.data[DOMAIN][entry.entry_id][POWERWALL_API_CHANGED]:
|
if hass.data[DOMAIN][entry.entry_id][POWERWALL_API_CHANGED]:
|
||||||
|
return hass.data[DOMAIN][entry.entry_id][POWERWALL_COORDINATOR].data
|
||||||
|
|
||||||
_LOGGER.debug("Updating data")
|
_LOGGER.debug("Updating data")
|
||||||
try:
|
try:
|
||||||
return await hass.async_add_executor_job(
|
return await _async_update_powerwall_data(hass, entry, power_wall)
|
||||||
_fetch_powerwall_data, power_wall
|
except AccessDeniedError:
|
||||||
)
|
if password is None:
|
||||||
except PowerwallUnreachableError as err:
|
raise
|
||||||
raise UpdateFailed("Unable to fetch data from powerwall") from err
|
|
||||||
except MissingAttributeError as err:
|
# If the session expired, relogin, and try again
|
||||||
await _async_handle_api_changed_error(hass, err)
|
await hass.async_add_executor_job(power_wall.login, "", password)
|
||||||
hass.data[DOMAIN][entry.entry_id][POWERWALL_API_CHANGED] = True
|
return await _async_update_powerwall_data(hass, entry, power_wall)
|
||||||
# Returns the cached data. This data can also be None
|
|
||||||
return hass.data[DOMAIN][entry.entry_id][POWERWALL_COORDINATOR].data
|
|
||||||
else:
|
|
||||||
return hass.data[DOMAIN][entry.entry_id][POWERWALL_COORDINATOR].data
|
|
||||||
|
|
||||||
coordinator = DataUpdateCoordinator(
|
coordinator = DataUpdateCoordinator(
|
||||||
hass,
|
hass,
|
||||||
@ -156,6 +166,40 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
async def _async_update_powerwall_data(
|
||||||
|
hass: HomeAssistant, entry: ConfigEntry, power_wall: Powerwall
|
||||||
|
):
|
||||||
|
"""Fetch updated powerwall data."""
|
||||||
|
try:
|
||||||
|
return await hass.async_add_executor_job(_fetch_powerwall_data, power_wall)
|
||||||
|
except PowerwallUnreachableError as err:
|
||||||
|
raise UpdateFailed("Unable to fetch data from powerwall") from err
|
||||||
|
except MissingAttributeError as err:
|
||||||
|
await _async_handle_api_changed_error(hass, err)
|
||||||
|
hass.data[DOMAIN][entry.entry_id][POWERWALL_API_CHANGED] = True
|
||||||
|
# Returns the cached data. This data can also be None
|
||||||
|
return hass.data[DOMAIN][entry.entry_id][POWERWALL_COORDINATOR].data
|
||||||
|
|
||||||
|
|
||||||
|
def _async_start_reauth(hass: HomeAssistant, entry: ConfigEntry):
|
||||||
|
hass.async_create_task(
|
||||||
|
hass.config_entries.flow.async_init(
|
||||||
|
DOMAIN,
|
||||||
|
context={"source": "reauth"},
|
||||||
|
data=entry.data,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
_LOGGER.error("Password is no longer valid. Please reauthenticate")
|
||||||
|
|
||||||
|
|
||||||
|
def _login_and_fetch_base_info(power_wall: Powerwall, password: str):
|
||||||
|
"""Login to the powerwall and fetch the base info."""
|
||||||
|
if password is not None:
|
||||||
|
power_wall.login("", password)
|
||||||
|
power_wall.detect_and_pin_version()
|
||||||
|
return call_base_info(power_wall)
|
||||||
|
|
||||||
|
|
||||||
def call_base_info(power_wall):
|
def call_base_info(power_wall):
|
||||||
"""Wrap powerwall properties to be a callable."""
|
"""Wrap powerwall properties to be a callable."""
|
||||||
serial_numbers = power_wall.get_serial_numbers()
|
serial_numbers = power_wall.get_serial_numbers()
|
||||||
|
@ -1,12 +1,17 @@
|
|||||||
"""Config flow for Tesla Powerwall integration."""
|
"""Config flow for Tesla Powerwall integration."""
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from tesla_powerwall import MissingAttributeError, Powerwall, PowerwallUnreachableError
|
from tesla_powerwall import (
|
||||||
|
AccessDeniedError,
|
||||||
|
MissingAttributeError,
|
||||||
|
Powerwall,
|
||||||
|
PowerwallUnreachableError,
|
||||||
|
)
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant import config_entries, core, exceptions
|
from homeassistant import config_entries, core, exceptions
|
||||||
from homeassistant.components.dhcp import IP_ADDRESS
|
from homeassistant.components.dhcp import IP_ADDRESS
|
||||||
from homeassistant.const import CONF_IP_ADDRESS
|
from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD
|
||||||
from homeassistant.core import callback
|
from homeassistant.core import callback
|
||||||
|
|
||||||
from .const import DOMAIN # pylint:disable=unused-import
|
from .const import DOMAIN # pylint:disable=unused-import
|
||||||
@ -14,6 +19,14 @@ from .const import DOMAIN # pylint:disable=unused-import
|
|||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _login_and_fetch_site_info(power_wall: Powerwall, password: str):
|
||||||
|
"""Login to the powerwall and fetch the base info."""
|
||||||
|
if password is not None:
|
||||||
|
power_wall.login("", password)
|
||||||
|
power_wall.detect_and_pin_version()
|
||||||
|
return power_wall.get_site_info()
|
||||||
|
|
||||||
|
|
||||||
async def validate_input(hass: core.HomeAssistant, data):
|
async def validate_input(hass: core.HomeAssistant, data):
|
||||||
"""Validate the user input allows us to connect.
|
"""Validate the user input allows us to connect.
|
||||||
|
|
||||||
@ -21,12 +34,12 @@ async def validate_input(hass: core.HomeAssistant, data):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
power_wall = Powerwall(data[CONF_IP_ADDRESS])
|
power_wall = Powerwall(data[CONF_IP_ADDRESS])
|
||||||
|
password = data[CONF_PASSWORD]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await hass.async_add_executor_job(power_wall.detect_and_pin_version)
|
site_info = await hass.async_add_executor_job(
|
||||||
site_info = await hass.async_add_executor_job(power_wall.get_site_info)
|
_login_and_fetch_site_info, power_wall, password
|
||||||
except PowerwallUnreachableError as err:
|
)
|
||||||
raise CannotConnect from err
|
|
||||||
except MissingAttributeError as err:
|
except MissingAttributeError as err:
|
||||||
# Only log the exception without the traceback
|
# Only log the exception without the traceback
|
||||||
_LOGGER.error(str(err))
|
_LOGGER.error(str(err))
|
||||||
@ -62,27 +75,44 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
try:
|
try:
|
||||||
info = await validate_input(self.hass, user_input)
|
info = await validate_input(self.hass, user_input)
|
||||||
except CannotConnect:
|
except PowerwallUnreachableError:
|
||||||
errors["base"] = "cannot_connect"
|
errors[CONF_IP_ADDRESS] = "cannot_connect"
|
||||||
except WrongVersion:
|
except WrongVersion:
|
||||||
errors["base"] = "wrong_version"
|
errors["base"] = "wrong_version"
|
||||||
|
except AccessDeniedError:
|
||||||
|
errors[CONF_PASSWORD] = "invalid_auth"
|
||||||
except Exception: # pylint: disable=broad-except
|
except Exception: # pylint: disable=broad-except
|
||||||
_LOGGER.exception("Unexpected exception")
|
_LOGGER.exception("Unexpected exception")
|
||||||
errors["base"] = "unknown"
|
errors["base"] = "unknown"
|
||||||
|
|
||||||
if "base" not in errors:
|
if not errors:
|
||||||
await self.async_set_unique_id(user_input[CONF_IP_ADDRESS])
|
existing_entry = await self.async_set_unique_id(
|
||||||
self._abort_if_unique_id_configured()
|
user_input[CONF_IP_ADDRESS]
|
||||||
|
)
|
||||||
|
if existing_entry:
|
||||||
|
self.hass.config_entries.async_update_entry(
|
||||||
|
existing_entry, data=user_input
|
||||||
|
)
|
||||||
|
await self.hass.config_entries.async_reload(existing_entry.entry_id)
|
||||||
|
return self.async_abort(reason="reauth_successful")
|
||||||
return self.async_create_entry(title=info["title"], data=user_input)
|
return self.async_create_entry(title=info["title"], data=user_input)
|
||||||
|
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
step_id="user",
|
step_id="user",
|
||||||
data_schema=vol.Schema(
|
data_schema=vol.Schema(
|
||||||
{vol.Required(CONF_IP_ADDRESS, default=self.ip_address): str}
|
{
|
||||||
|
vol.Required(CONF_IP_ADDRESS, default=self.ip_address): str,
|
||||||
|
vol.Optional(CONF_PASSWORD): str,
|
||||||
|
}
|
||||||
),
|
),
|
||||||
errors=errors,
|
errors=errors,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def async_step_reauth(self, data):
|
||||||
|
"""Handle configuration by re-auth."""
|
||||||
|
self.ip_address = data[CONF_IP_ADDRESS]
|
||||||
|
return await self.async_step_user()
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _async_ip_address_already_configured(self, ip_address):
|
def _async_ip_address_already_configured(self, ip_address):
|
||||||
"""See if we already have an entry matching the ip_address."""
|
"""See if we already have an entry matching the ip_address."""
|
||||||
@ -92,9 +122,5 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
class CannotConnect(exceptions.HomeAssistantError):
|
|
||||||
"""Error to indicate we cannot connect."""
|
|
||||||
|
|
||||||
|
|
||||||
class WrongVersion(exceptions.HomeAssistantError):
|
class WrongVersion(exceptions.HomeAssistantError):
|
||||||
"""Error to indicate the powerwall uses a software version we cannot interact with."""
|
"""Error to indicate the powerwall uses a software version we cannot interact with."""
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
"name": "Tesla Powerwall",
|
"name": "Tesla Powerwall",
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/powerwall",
|
"documentation": "https://www.home-assistant.io/integrations/powerwall",
|
||||||
"requirements": ["tesla-powerwall==0.3.3"],
|
"requirements": ["tesla-powerwall==0.3.5"],
|
||||||
"codeowners": ["@bdraco", "@jrester"],
|
"codeowners": ["@bdraco", "@jrester"],
|
||||||
"dhcp": [
|
"dhcp": [
|
||||||
{"hostname":"1118431-*","macaddress":"88DA1A*"},
|
{"hostname":"1118431-*","macaddress":"88DA1A*"},
|
||||||
|
@ -4,18 +4,22 @@
|
|||||||
"step": {
|
"step": {
|
||||||
"user": {
|
"user": {
|
||||||
"title": "Connect to the powerwall",
|
"title": "Connect to the powerwall",
|
||||||
|
"description": "The password is usually the last 5 characters of the serial number for Backup Gateway and can be found in the Telsa app; or the last 5 characters of the password found inside the door for Backup Gateway 2.",
|
||||||
"data": {
|
"data": {
|
||||||
"ip_address": "[%key:common::config_flow::data::ip%]"
|
"ip_address": "[%key:common::config_flow::data::ip%]",
|
||||||
|
"password": "[%key:common::config_flow::data::password%]"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"error": {
|
"error": {
|
||||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||||
"wrong_version": "Your powerwall uses a software version that is not supported. Please consider upgrading or reporting this issue so it can be resolved.",
|
"wrong_version": "Your powerwall uses a software version that is not supported. Please consider upgrading or reporting this issue so it can be resolved.",
|
||||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
"unknown": "[%key:common::config_flow::error::unknown%]",
|
||||||
|
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]"
|
||||||
},
|
},
|
||||||
"abort": {
|
"abort": {
|
||||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||||
|
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,10 +1,12 @@
|
|||||||
{
|
{
|
||||||
"config": {
|
"config": {
|
||||||
"abort": {
|
"abort": {
|
||||||
"already_configured": "Device is already configured"
|
"already_configured": "Device is already configured",
|
||||||
|
"reauth_successful": "Re-authentication was successful"
|
||||||
},
|
},
|
||||||
"error": {
|
"error": {
|
||||||
"cannot_connect": "Failed to connect",
|
"cannot_connect": "Failed to connect",
|
||||||
|
"invalid_auth": "Invalid authentication",
|
||||||
"unknown": "Unexpected error",
|
"unknown": "Unexpected error",
|
||||||
"wrong_version": "Your powerwall uses a software version that is not supported. Please consider upgrading or reporting this issue so it can be resolved."
|
"wrong_version": "Your powerwall uses a software version that is not supported. Please consider upgrading or reporting this issue so it can be resolved."
|
||||||
},
|
},
|
||||||
@ -12,8 +14,10 @@
|
|||||||
"step": {
|
"step": {
|
||||||
"user": {
|
"user": {
|
||||||
"data": {
|
"data": {
|
||||||
"ip_address": "IP Address"
|
"ip_address": "IP Address",
|
||||||
|
"password": "Password"
|
||||||
},
|
},
|
||||||
|
"description": "The password is usually the last 5 characters of the serial number for Backup Gateway and can be found in the Telsa app; or the last 5 characters of the password found inside the door for Backup Gateway 2.",
|
||||||
"title": "Connect to the powerwall"
|
"title": "Connect to the powerwall"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -19,6 +19,7 @@
|
|||||||
},
|
},
|
||||||
"abort": {
|
"abort": {
|
||||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||||
|
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
"config": {
|
"config": {
|
||||||
"abort": {
|
"abort": {
|
||||||
"already_configured": "Device is already configured",
|
"already_configured": "Device is already configured",
|
||||||
|
"already_in_progress": "Configuration flow is already in progress",
|
||||||
"unknown": "Unexpected error"
|
"unknown": "Unexpected error"
|
||||||
},
|
},
|
||||||
"error": {
|
"error": {
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/roon",
|
"documentation": "https://www.home-assistant.io/integrations/roon",
|
||||||
"requirements": [
|
"requirements": [
|
||||||
"roonapi==0.0.31"
|
"roonapi==0.0.32"
|
||||||
],
|
],
|
||||||
"codeowners": [
|
"codeowners": [
|
||||||
"@pavoni"
|
"@pavoni"
|
||||||
|
@ -141,17 +141,6 @@ class RoonServer:
|
|||||||
async_dispatcher_send(self.hass, "roon_media_player", player_data)
|
async_dispatcher_send(self.hass, "roon_media_player", player_data)
|
||||||
self.offline_devices.add(dev_id)
|
self.offline_devices.add(dev_id)
|
||||||
|
|
||||||
async def async_update_playlists(self):
|
|
||||||
"""Store lists in memory with all playlists - could be used by a custom lovelace card."""
|
|
||||||
all_playlists = []
|
|
||||||
roon_playlists = self.roonapi.playlists()
|
|
||||||
if roon_playlists and "items" in roon_playlists:
|
|
||||||
all_playlists += [item["title"] for item in roon_playlists["items"]]
|
|
||||||
roon_playlists = self.roonapi.internet_radio()
|
|
||||||
if roon_playlists and "items" in roon_playlists:
|
|
||||||
all_playlists += [item["title"] for item in roon_playlists["items"]]
|
|
||||||
self.all_playlists = all_playlists
|
|
||||||
|
|
||||||
async def async_create_player_data(self, zone, output):
|
async def async_create_player_data(self, zone, output):
|
||||||
"""Create player object dict by combining zone with output."""
|
"""Create player object dict by combining zone with output."""
|
||||||
new_dict = zone.copy()
|
new_dict = zone.copy()
|
||||||
|
@ -187,6 +187,11 @@ class ShellyLight(ShellyBlockEntity, LightEntity):
|
|||||||
|
|
||||||
async def async_turn_on(self, **kwargs) -> None:
|
async def async_turn_on(self, **kwargs) -> None:
|
||||||
"""Turn on light."""
|
"""Turn on light."""
|
||||||
|
if self.block.type == "relay":
|
||||||
|
self.control_result = await self.block.set_state(turn="on")
|
||||||
|
self.async_write_ha_state()
|
||||||
|
return
|
||||||
|
|
||||||
params = {"turn": "on"}
|
params = {"turn": "on"}
|
||||||
if ATTR_BRIGHTNESS in kwargs:
|
if ATTR_BRIGHTNESS in kwargs:
|
||||||
tmp_brightness = int(kwargs[ATTR_BRIGHTNESS] / 255 * 100)
|
tmp_brightness = int(kwargs[ATTR_BRIGHTNESS] / 255 * 100)
|
||||||
|
@ -5,10 +5,11 @@ from datetime import timedelta
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
import async_timeout
|
import async_timeout
|
||||||
from teslajsonpy import Controller as TeslaAPI, TeslaException
|
from teslajsonpy import Controller as TeslaAPI
|
||||||
|
from teslajsonpy.exceptions import IncompleteCredentials, TeslaException
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.config_entries import SOURCE_IMPORT
|
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
ATTR_BATTERY_CHARGING,
|
ATTR_BATTERY_CHARGING,
|
||||||
ATTR_BATTERY_LEVEL,
|
ATTR_BATTERY_LEVEL,
|
||||||
@ -17,8 +18,9 @@ from homeassistant.const import (
|
|||||||
CONF_SCAN_INTERVAL,
|
CONF_SCAN_INTERVAL,
|
||||||
CONF_TOKEN,
|
CONF_TOKEN,
|
||||||
CONF_USERNAME,
|
CONF_USERNAME,
|
||||||
|
HTTP_UNAUTHORIZED,
|
||||||
)
|
)
|
||||||
from homeassistant.core import callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.exceptions import ConfigEntryNotReady
|
from homeassistant.exceptions import ConfigEntryNotReady
|
||||||
from homeassistant.helpers import aiohttp_client, config_validation as cv
|
from homeassistant.helpers import aiohttp_client, config_validation as cv
|
||||||
from homeassistant.helpers.update_coordinator import (
|
from homeassistant.helpers.update_coordinator import (
|
||||||
@ -28,12 +30,7 @@ from homeassistant.helpers.update_coordinator import (
|
|||||||
)
|
)
|
||||||
from homeassistant.util import slugify
|
from homeassistant.util import slugify
|
||||||
|
|
||||||
from .config_flow import (
|
from .config_flow import CannotConnect, InvalidAuth, validate_input
|
||||||
CannotConnect,
|
|
||||||
InvalidAuth,
|
|
||||||
configured_instances,
|
|
||||||
validate_input,
|
|
||||||
)
|
|
||||||
from .const import (
|
from .const import (
|
||||||
CONF_WAKE_ON_START,
|
CONF_WAKE_ON_START,
|
||||||
DATA_LISTENER,
|
DATA_LISTENER,
|
||||||
@ -75,6 +72,16 @@ def _async_save_tokens(hass, config_entry, access_token, refresh_token):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _async_configured_emails(hass):
|
||||||
|
"""Return a set of configured Tesla emails."""
|
||||||
|
return {
|
||||||
|
entry.data[CONF_USERNAME]
|
||||||
|
for entry in hass.config_entries.async_entries(DOMAIN)
|
||||||
|
if CONF_USERNAME in entry.data
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
async def async_setup(hass, base_config):
|
async def async_setup(hass, base_config):
|
||||||
"""Set up of Tesla component."""
|
"""Set up of Tesla component."""
|
||||||
|
|
||||||
@ -95,7 +102,7 @@ async def async_setup(hass, base_config):
|
|||||||
email = config[CONF_USERNAME]
|
email = config[CONF_USERNAME]
|
||||||
password = config[CONF_PASSWORD]
|
password = config[CONF_PASSWORD]
|
||||||
scan_interval = config[CONF_SCAN_INTERVAL]
|
scan_interval = config[CONF_SCAN_INTERVAL]
|
||||||
if email in configured_instances(hass):
|
if email in _async_configured_emails(hass):
|
||||||
try:
|
try:
|
||||||
info = await validate_input(hass, config)
|
info = await validate_input(hass, config)
|
||||||
except (CannotConnect, InvalidAuth):
|
except (CannotConnect, InvalidAuth):
|
||||||
@ -103,6 +110,8 @@ async def async_setup(hass, base_config):
|
|||||||
_update_entry(
|
_update_entry(
|
||||||
email,
|
email,
|
||||||
data={
|
data={
|
||||||
|
CONF_USERNAME: email,
|
||||||
|
CONF_PASSWORD: password,
|
||||||
CONF_ACCESS_TOKEN: info[CONF_ACCESS_TOKEN],
|
CONF_ACCESS_TOKEN: info[CONF_ACCESS_TOKEN],
|
||||||
CONF_TOKEN: info[CONF_TOKEN],
|
CONF_TOKEN: info[CONF_TOKEN],
|
||||||
},
|
},
|
||||||
@ -136,6 +145,8 @@ async def async_setup_entry(hass, config_entry):
|
|||||||
try:
|
try:
|
||||||
controller = TeslaAPI(
|
controller = TeslaAPI(
|
||||||
websession,
|
websession,
|
||||||
|
email=config.get(CONF_USERNAME),
|
||||||
|
password=config.get(CONF_PASSWORD),
|
||||||
refresh_token=config[CONF_TOKEN],
|
refresh_token=config[CONF_TOKEN],
|
||||||
access_token=config[CONF_ACCESS_TOKEN],
|
access_token=config[CONF_ACCESS_TOKEN],
|
||||||
update_interval=config_entry.options.get(
|
update_interval=config_entry.options.get(
|
||||||
@ -147,7 +158,12 @@ async def async_setup_entry(hass, config_entry):
|
|||||||
CONF_WAKE_ON_START, DEFAULT_WAKE_ON_START
|
CONF_WAKE_ON_START, DEFAULT_WAKE_ON_START
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
except IncompleteCredentials:
|
||||||
|
_async_start_reauth(hass, config_entry)
|
||||||
|
return False
|
||||||
except TeslaException as ex:
|
except TeslaException as ex:
|
||||||
|
if ex.code == HTTP_UNAUTHORIZED:
|
||||||
|
_async_start_reauth(hass, config_entry)
|
||||||
_LOGGER.error("Unable to communicate with Tesla API: %s", ex.message)
|
_LOGGER.error("Unable to communicate with Tesla API: %s", ex.message)
|
||||||
return False
|
return False
|
||||||
_async_save_tokens(hass, config_entry, access_token, refresh_token)
|
_async_save_tokens(hass, config_entry, access_token, refresh_token)
|
||||||
@ -202,6 +218,17 @@ async def async_unload_entry(hass, config_entry) -> bool:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _async_start_reauth(hass: HomeAssistant, entry: ConfigEntry):
|
||||||
|
hass.async_create_task(
|
||||||
|
hass.config_entries.flow.async_init(
|
||||||
|
DOMAIN,
|
||||||
|
context={"source": "reauth"},
|
||||||
|
data=entry.data,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
_LOGGER.error("Credentials are no longer valid. Please reauthenticate")
|
||||||
|
|
||||||
|
|
||||||
async def update_listener(hass, config_entry):
|
async def update_listener(hass, config_entry):
|
||||||
"""Update when config_entry options update."""
|
"""Update when config_entry options update."""
|
||||||
controller = hass.data[DOMAIN][config_entry.entry_id]["coordinator"].controller
|
controller = hass.data[DOMAIN][config_entry.entry_id]["coordinator"].controller
|
||||||
|
@ -20,22 +20,12 @@ from .const import (
|
|||||||
CONF_WAKE_ON_START,
|
CONF_WAKE_ON_START,
|
||||||
DEFAULT_SCAN_INTERVAL,
|
DEFAULT_SCAN_INTERVAL,
|
||||||
DEFAULT_WAKE_ON_START,
|
DEFAULT_WAKE_ON_START,
|
||||||
DOMAIN,
|
|
||||||
MIN_SCAN_INTERVAL,
|
MIN_SCAN_INTERVAL,
|
||||||
)
|
)
|
||||||
|
from .const import DOMAIN # pylint:disable=unused-import
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
DATA_SCHEMA = vol.Schema(
|
|
||||||
{vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def configured_instances(hass):
|
|
||||||
"""Return a set of configured Tesla instances."""
|
|
||||||
return {entry.title for entry in hass.config_entries.async_entries(DOMAIN)}
|
|
||||||
|
|
||||||
|
|
||||||
class TeslaConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
class TeslaConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||||
"""Handle a config flow for Tesla."""
|
"""Handle a config flow for Tesla."""
|
||||||
@ -43,46 +33,56 @@ class TeslaConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
VERSION = 1
|
VERSION = 1
|
||||||
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
|
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
"""Initialize the tesla flow."""
|
||||||
|
self.username = None
|
||||||
|
|
||||||
async def async_step_import(self, import_config):
|
async def async_step_import(self, import_config):
|
||||||
"""Import a config entry from configuration.yaml."""
|
"""Import a config entry from configuration.yaml."""
|
||||||
return await self.async_step_user(import_config)
|
return await self.async_step_user(import_config)
|
||||||
|
|
||||||
async def async_step_user(self, user_input=None):
|
async def async_step_user(self, user_input=None):
|
||||||
"""Handle the start of the config flow."""
|
"""Handle the start of the config flow."""
|
||||||
|
errors = {}
|
||||||
|
|
||||||
if not user_input:
|
if user_input is not None:
|
||||||
return self.async_show_form(
|
existing_entry = self._async_entry_for_username(user_input[CONF_USERNAME])
|
||||||
step_id="user",
|
if (
|
||||||
data_schema=DATA_SCHEMA,
|
existing_entry
|
||||||
errors={},
|
and existing_entry.data[CONF_PASSWORD] == user_input[CONF_PASSWORD]
|
||||||
description_placeholders={},
|
):
|
||||||
)
|
return self.async_abort(reason="already_configured")
|
||||||
|
|
||||||
if user_input[CONF_USERNAME] in configured_instances(self.hass):
|
|
||||||
return self.async_show_form(
|
|
||||||
step_id="user",
|
|
||||||
data_schema=DATA_SCHEMA,
|
|
||||||
errors={CONF_USERNAME: "already_configured"},
|
|
||||||
description_placeholders={},
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
info = await validate_input(self.hass, user_input)
|
info = await validate_input(self.hass, user_input)
|
||||||
except CannotConnect:
|
except CannotConnect:
|
||||||
return self.async_show_form(
|
errors["base"] = "cannot_connect"
|
||||||
step_id="user",
|
|
||||||
data_schema=DATA_SCHEMA,
|
|
||||||
errors={"base": "cannot_connect"},
|
|
||||||
description_placeholders={},
|
|
||||||
)
|
|
||||||
except InvalidAuth:
|
except InvalidAuth:
|
||||||
|
errors["base"] = "invalid_auth"
|
||||||
|
|
||||||
|
if not errors:
|
||||||
|
if existing_entry:
|
||||||
|
self.hass.config_entries.async_update_entry(
|
||||||
|
existing_entry, data=info
|
||||||
|
)
|
||||||
|
await self.hass.config_entries.async_reload(existing_entry.entry_id)
|
||||||
|
return self.async_abort(reason="reauth_successful")
|
||||||
|
|
||||||
|
return self.async_create_entry(
|
||||||
|
title=user_input[CONF_USERNAME], data=info
|
||||||
|
)
|
||||||
|
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
step_id="user",
|
step_id="user",
|
||||||
data_schema=DATA_SCHEMA,
|
data_schema=self._async_schema(),
|
||||||
errors={"base": "invalid_auth"},
|
errors=errors,
|
||||||
description_placeholders={},
|
description_placeholders={},
|
||||||
)
|
)
|
||||||
return self.async_create_entry(title=user_input[CONF_USERNAME], data=info)
|
|
||||||
|
async def async_step_reauth(self, data):
|
||||||
|
"""Handle configuration by re-auth."""
|
||||||
|
self.username = data[CONF_USERNAME]
|
||||||
|
return await self.async_step_user()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@callback
|
@callback
|
||||||
@ -90,6 +90,23 @@ class TeslaConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
"""Get the options flow for this handler."""
|
"""Get the options flow for this handler."""
|
||||||
return OptionsFlowHandler(config_entry)
|
return OptionsFlowHandler(config_entry)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _async_schema(self):
|
||||||
|
"""Fetch schema with defaults."""
|
||||||
|
return vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_USERNAME, default=self.username): str,
|
||||||
|
vol.Required(CONF_PASSWORD): str,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _async_entry_for_username(self, username):
|
||||||
|
"""Find an existing entry for a username."""
|
||||||
|
for entry in self._async_current_entries():
|
||||||
|
if entry.data.get(CONF_USERNAME) == username:
|
||||||
|
return entry
|
||||||
|
|
||||||
|
|
||||||
class OptionsFlowHandler(config_entries.OptionsFlow):
|
class OptionsFlowHandler(config_entries.OptionsFlow):
|
||||||
"""Handle a option flow for Tesla."""
|
"""Handle a option flow for Tesla."""
|
||||||
@ -140,6 +157,8 @@ async def validate_input(hass: core.HomeAssistant, data):
|
|||||||
(config[CONF_TOKEN], config[CONF_ACCESS_TOKEN]) = await controller.connect(
|
(config[CONF_TOKEN], config[CONF_ACCESS_TOKEN]) = await controller.connect(
|
||||||
test_login=True
|
test_login=True
|
||||||
)
|
)
|
||||||
|
config[CONF_USERNAME] = data[CONF_USERNAME]
|
||||||
|
config[CONF_PASSWORD] = data[CONF_PASSWORD]
|
||||||
except TeslaException as ex:
|
except TeslaException as ex:
|
||||||
if ex.code == HTTP_UNAUTHORIZED:
|
if ex.code == HTTP_UNAUTHORIZED:
|
||||||
_LOGGER.error("Invalid credentials: %s", ex)
|
_LOGGER.error("Invalid credentials: %s", ex)
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
"name": "Tesla",
|
"name": "Tesla",
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/tesla",
|
"documentation": "https://www.home-assistant.io/integrations/tesla",
|
||||||
"requirements": ["teslajsonpy==0.10.4"],
|
"requirements": ["teslajsonpy==0.11.5"],
|
||||||
"codeowners": ["@zabuldon", "@alandtse"],
|
"codeowners": ["@zabuldon", "@alandtse"],
|
||||||
"dhcp": [
|
"dhcp": [
|
||||||
{ "hostname": "tesla_*", "macaddress": "4CFCAA*" },
|
{ "hostname": "tesla_*", "macaddress": "4CFCAA*" },
|
||||||
|
@ -5,6 +5,10 @@
|
|||||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]"
|
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]"
|
||||||
},
|
},
|
||||||
|
"abort": {
|
||||||
|
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||||
|
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
|
||||||
|
},
|
||||||
"step": {
|
"step": {
|
||||||
"user": {
|
"user": {
|
||||||
"data": {
|
"data": {
|
||||||
|
@ -1,5 +1,9 @@
|
|||||||
{
|
{
|
||||||
"config": {
|
"config": {
|
||||||
|
"abort": {
|
||||||
|
"already_configured": "Account is already configured",
|
||||||
|
"reauth_successful": "Re-authentication was successful"
|
||||||
|
},
|
||||||
"error": {
|
"error": {
|
||||||
"already_configured": "Account is already configured",
|
"already_configured": "Account is already configured",
|
||||||
"cannot_connect": "Failed to connect",
|
"cannot_connect": "Failed to connect",
|
||||||
|
@ -397,43 +397,50 @@ class TransmissionData:
|
|||||||
|
|
||||||
def check_completed_torrent(self):
|
def check_completed_torrent(self):
|
||||||
"""Get completed torrent functionality."""
|
"""Get completed torrent functionality."""
|
||||||
|
old_completed_torrent_names = {
|
||||||
|
torrent.name for torrent in self._completed_torrents
|
||||||
|
}
|
||||||
|
|
||||||
current_completed_torrents = [
|
current_completed_torrents = [
|
||||||
torrent for torrent in self._torrents if torrent.status == "seeding"
|
torrent for torrent in self._torrents if torrent.status == "seeding"
|
||||||
]
|
]
|
||||||
freshly_completed_torrents = set(current_completed_torrents).difference(
|
|
||||||
self._completed_torrents
|
|
||||||
)
|
|
||||||
self._completed_torrents = current_completed_torrents
|
|
||||||
|
|
||||||
for torrent in freshly_completed_torrents:
|
for torrent in current_completed_torrents:
|
||||||
|
if torrent.name not in old_completed_torrent_names:
|
||||||
self.hass.bus.fire(
|
self.hass.bus.fire(
|
||||||
EVENT_DOWNLOADED_TORRENT, {"name": torrent.name, "id": torrent.id}
|
EVENT_DOWNLOADED_TORRENT, {"name": torrent.name, "id": torrent.id}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self._completed_torrents = current_completed_torrents
|
||||||
|
|
||||||
def check_started_torrent(self):
|
def check_started_torrent(self):
|
||||||
"""Get started torrent functionality."""
|
"""Get started torrent functionality."""
|
||||||
|
old_started_torrent_names = {torrent.name for torrent in self._started_torrents}
|
||||||
|
|
||||||
current_started_torrents = [
|
current_started_torrents = [
|
||||||
torrent for torrent in self._torrents if torrent.status == "downloading"
|
torrent for torrent in self._torrents if torrent.status == "downloading"
|
||||||
]
|
]
|
||||||
freshly_started_torrents = set(current_started_torrents).difference(
|
|
||||||
self._started_torrents
|
|
||||||
)
|
|
||||||
self._started_torrents = current_started_torrents
|
|
||||||
|
|
||||||
for torrent in freshly_started_torrents:
|
for torrent in current_started_torrents:
|
||||||
|
if torrent.name not in old_started_torrent_names:
|
||||||
self.hass.bus.fire(
|
self.hass.bus.fire(
|
||||||
EVENT_STARTED_TORRENT, {"name": torrent.name, "id": torrent.id}
|
EVENT_STARTED_TORRENT, {"name": torrent.name, "id": torrent.id}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self._started_torrents = current_started_torrents
|
||||||
|
|
||||||
def check_removed_torrent(self):
|
def check_removed_torrent(self):
|
||||||
"""Get removed torrent functionality."""
|
"""Get removed torrent functionality."""
|
||||||
freshly_removed_torrents = set(self._all_torrents).difference(self._torrents)
|
current_torrent_names = {torrent.name for torrent in self._torrents}
|
||||||
self._all_torrents = self._torrents
|
|
||||||
for torrent in freshly_removed_torrents:
|
for torrent in self._all_torrents:
|
||||||
|
if torrent.name not in current_torrent_names:
|
||||||
self.hass.bus.fire(
|
self.hass.bus.fire(
|
||||||
EVENT_REMOVED_TORRENT, {"name": torrent.name, "id": torrent.id}
|
EVENT_REMOVED_TORRENT, {"name": torrent.name, "id": torrent.id}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self._all_torrents = self._torrents.copy()
|
||||||
|
|
||||||
def start_torrents(self):
|
def start_torrents(self):
|
||||||
"""Start all torrents."""
|
"""Start all torrents."""
|
||||||
if len(self._torrents) <= 0:
|
if len(self._torrents) <= 0:
|
||||||
|
@ -258,8 +258,10 @@ class ZWaveBooleanBinarySensor(ZWaveBaseEntity, BinarySensorEntity):
|
|||||||
"""Representation of a Z-Wave binary_sensor."""
|
"""Representation of a Z-Wave binary_sensor."""
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_on(self) -> bool:
|
def is_on(self) -> Optional[bool]:
|
||||||
"""Return if the sensor is on or off."""
|
"""Return if the sensor is on or off."""
|
||||||
|
if self.info.primary_value.value is None:
|
||||||
|
return None
|
||||||
return bool(self.info.primary_value.value)
|
return bool(self.info.primary_value.value)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -301,8 +303,10 @@ class ZWaveNotificationBinarySensor(ZWaveBaseEntity, BinarySensorEntity):
|
|||||||
self._mapping_info = self._get_sensor_mapping()
|
self._mapping_info = self._get_sensor_mapping()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_on(self) -> bool:
|
def is_on(self) -> Optional[bool]:
|
||||||
"""Return if the sensor is on or off."""
|
"""Return if the sensor is on or off."""
|
||||||
|
if self.info.primary_value.value is None:
|
||||||
|
return None
|
||||||
return int(self.info.primary_value.value) == int(self.state_key)
|
return int(self.info.primary_value.value) == int(self.state_key)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -349,8 +353,10 @@ class ZWavePropertyBinarySensor(ZWaveBaseEntity, BinarySensorEntity):
|
|||||||
self._mapping_info = self._get_sensor_mapping()
|
self._mapping_info = self._get_sensor_mapping()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_on(self) -> bool:
|
def is_on(self) -> Optional[bool]:
|
||||||
"""Return if the sensor is on or off."""
|
"""Return if the sensor is on or off."""
|
||||||
|
if self.info.primary_value.value is None:
|
||||||
|
return None
|
||||||
return self.info.primary_value.value in self._mapping_info["on_states"]
|
return self.info.primary_value.value in self._mapping_info["on_states"]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""Constants used by Home Assistant components."""
|
"""Constants used by Home Assistant components."""
|
||||||
MAJOR_VERSION = 2021
|
MAJOR_VERSION = 2021
|
||||||
MINOR_VERSION = 2
|
MINOR_VERSION = 2
|
||||||
PATCH_VERSION = "2"
|
PATCH_VERSION = "3"
|
||||||
__short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
__short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||||
__version__ = f"{__short_version__}.{PATCH_VERSION}"
|
__version__ = f"{__short_version__}.{PATCH_VERSION}"
|
||||||
REQUIRED_PYTHON_VER = (3, 8, 0)
|
REQUIRED_PYTHON_VER = (3, 8, 0)
|
||||||
|
@ -27,7 +27,7 @@ The following cases will never be passed to your function:
|
|||||||
- state adding/removing
|
- state adding/removing
|
||||||
"""
|
"""
|
||||||
from types import MappingProxyType
|
from types import MappingProxyType
|
||||||
from typing import Any, Callable, Dict, Optional, Union
|
from typing import Any, Callable, Dict, Optional, Tuple, Union
|
||||||
|
|
||||||
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN
|
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN
|
||||||
from homeassistant.core import HomeAssistant, State, callback
|
from homeassistant.core import HomeAssistant, State, callback
|
||||||
@ -47,13 +47,28 @@ CheckTypeFunc = Callable[
|
|||||||
Optional[bool],
|
Optional[bool],
|
||||||
]
|
]
|
||||||
|
|
||||||
|
ExtraCheckTypeFunc = Callable[
|
||||||
|
[
|
||||||
|
HomeAssistant,
|
||||||
|
str,
|
||||||
|
Union[dict, MappingProxyType],
|
||||||
|
Any,
|
||||||
|
str,
|
||||||
|
Union[dict, MappingProxyType],
|
||||||
|
Any,
|
||||||
|
],
|
||||||
|
Optional[bool],
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
async def create_checker(
|
async def create_checker(
|
||||||
hass: HomeAssistant, _domain: str
|
hass: HomeAssistant,
|
||||||
|
_domain: str,
|
||||||
|
extra_significant_check: Optional[ExtraCheckTypeFunc] = None,
|
||||||
) -> "SignificantlyChangedChecker":
|
) -> "SignificantlyChangedChecker":
|
||||||
"""Create a significantly changed checker for a domain."""
|
"""Create a significantly changed checker for a domain."""
|
||||||
await _initialize(hass)
|
await _initialize(hass)
|
||||||
return SignificantlyChangedChecker(hass)
|
return SignificantlyChangedChecker(hass, extra_significant_check)
|
||||||
|
|
||||||
|
|
||||||
# Marked as singleton so multiple calls all wait for same output.
|
# Marked as singleton so multiple calls all wait for same output.
|
||||||
@ -105,34 +120,46 @@ class SignificantlyChangedChecker:
|
|||||||
Will always compare the entity to the last entity that was considered significant.
|
Will always compare the entity to the last entity that was considered significant.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, hass: HomeAssistant) -> None:
|
def __init__(
|
||||||
|
self,
|
||||||
|
hass: HomeAssistant,
|
||||||
|
extra_significant_check: Optional[ExtraCheckTypeFunc] = None,
|
||||||
|
) -> None:
|
||||||
"""Test if an entity has significantly changed."""
|
"""Test if an entity has significantly changed."""
|
||||||
self.hass = hass
|
self.hass = hass
|
||||||
self.last_approved_entities: Dict[str, State] = {}
|
self.last_approved_entities: Dict[str, Tuple[State, Any]] = {}
|
||||||
|
self.extra_significant_check = extra_significant_check
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_is_significant_change(self, new_state: State) -> bool:
|
def async_is_significant_change(
|
||||||
"""Return if this was a significant change."""
|
self, new_state: State, *, extra_arg: Optional[Any] = None
|
||||||
old_state: Optional[State] = self.last_approved_entities.get(
|
) -> bool:
|
||||||
|
"""Return if this was a significant change.
|
||||||
|
|
||||||
|
Extra kwargs are passed to the extra significant checker.
|
||||||
|
"""
|
||||||
|
old_data: Optional[Tuple[State, Any]] = self.last_approved_entities.get(
|
||||||
new_state.entity_id
|
new_state.entity_id
|
||||||
)
|
)
|
||||||
|
|
||||||
# First state change is always ok to report
|
# First state change is always ok to report
|
||||||
if old_state is None:
|
if old_data is None:
|
||||||
self.last_approved_entities[new_state.entity_id] = new_state
|
self.last_approved_entities[new_state.entity_id] = (new_state, extra_arg)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
old_state, old_extra_arg = old_data
|
||||||
|
|
||||||
# Handle state unknown or unavailable
|
# Handle state unknown or unavailable
|
||||||
if new_state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE):
|
if new_state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE):
|
||||||
if new_state.state == old_state.state:
|
if new_state.state == old_state.state:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
self.last_approved_entities[new_state.entity_id] = new_state
|
self.last_approved_entities[new_state.entity_id] = (new_state, extra_arg)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# If last state was unknown/unavailable, also significant.
|
# If last state was unknown/unavailable, also significant.
|
||||||
if old_state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE):
|
if old_state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE):
|
||||||
self.last_approved_entities[new_state.entity_id] = new_state
|
self.last_approved_entities[new_state.entity_id] = (new_state, extra_arg)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
functions: Optional[Dict[str, CheckTypeFunc]] = self.hass.data.get(
|
functions: Optional[Dict[str, CheckTypeFunc]] = self.hass.data.get(
|
||||||
@ -144,11 +171,7 @@ class SignificantlyChangedChecker:
|
|||||||
|
|
||||||
check_significantly_changed = functions.get(new_state.domain)
|
check_significantly_changed = functions.get(new_state.domain)
|
||||||
|
|
||||||
# No platform available means always true.
|
if check_significantly_changed is not None:
|
||||||
if check_significantly_changed is None:
|
|
||||||
self.last_approved_entities[new_state.entity_id] = new_state
|
|
||||||
return True
|
|
||||||
|
|
||||||
result = check_significantly_changed(
|
result = check_significantly_changed(
|
||||||
self.hass,
|
self.hass,
|
||||||
old_state.state,
|
old_state.state,
|
||||||
@ -160,7 +183,24 @@ class SignificantlyChangedChecker:
|
|||||||
if result is False:
|
if result is False:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
if self.extra_significant_check is not None:
|
||||||
|
result = self.extra_significant_check(
|
||||||
|
self.hass,
|
||||||
|
old_state.state,
|
||||||
|
old_state.attributes,
|
||||||
|
old_extra_arg,
|
||||||
|
new_state.state,
|
||||||
|
new_state.attributes,
|
||||||
|
extra_arg,
|
||||||
|
)
|
||||||
|
|
||||||
|
if result is False:
|
||||||
|
return False
|
||||||
|
|
||||||
# Result is either True or None.
|
# Result is either True or None.
|
||||||
# None means the function doesn't know. For now assume it's True
|
# None means the function doesn't know. For now assume it's True
|
||||||
self.last_approved_entities[new_state.entity_id] = new_state
|
self.last_approved_entities[new_state.entity_id] = (
|
||||||
|
new_state,
|
||||||
|
extra_arg,
|
||||||
|
)
|
||||||
return True
|
return True
|
||||||
|
@ -197,7 +197,7 @@ aiolifx==0.6.9
|
|||||||
aiolifx_effects==0.2.2
|
aiolifx_effects==0.2.2
|
||||||
|
|
||||||
# homeassistant.components.lutron_caseta
|
# homeassistant.components.lutron_caseta
|
||||||
aiolip==1.0.1
|
aiolip==1.1.4
|
||||||
|
|
||||||
# homeassistant.components.keyboard_remote
|
# homeassistant.components.keyboard_remote
|
||||||
aionotify==0.2.0
|
aionotify==0.2.0
|
||||||
@ -383,7 +383,7 @@ bravia-tv==1.0.8
|
|||||||
broadlink==0.16.0
|
broadlink==0.16.0
|
||||||
|
|
||||||
# homeassistant.components.brother
|
# homeassistant.components.brother
|
||||||
brother==0.1.20
|
brother==0.1.21
|
||||||
|
|
||||||
# homeassistant.components.brottsplatskartan
|
# homeassistant.components.brottsplatskartan
|
||||||
brottsplatskartan==0.0.1
|
brottsplatskartan==0.0.1
|
||||||
@ -1545,7 +1545,7 @@ pymsteams==0.1.12
|
|||||||
pymusiccast==0.1.6
|
pymusiccast==0.1.6
|
||||||
|
|
||||||
# homeassistant.components.myq
|
# homeassistant.components.myq
|
||||||
pymyq==2.0.14
|
pymyq==3.0.1
|
||||||
|
|
||||||
# homeassistant.components.mysensors
|
# homeassistant.components.mysensors
|
||||||
pymysensors==0.18.0
|
pymysensors==0.18.0
|
||||||
@ -1961,7 +1961,7 @@ rokuecp==0.6.0
|
|||||||
roombapy==1.6.2
|
roombapy==1.6.2
|
||||||
|
|
||||||
# homeassistant.components.roon
|
# homeassistant.components.roon
|
||||||
roonapi==0.0.31
|
roonapi==0.0.32
|
||||||
|
|
||||||
# homeassistant.components.rova
|
# homeassistant.components.rova
|
||||||
rova==0.1.0
|
rova==0.1.0
|
||||||
@ -2178,10 +2178,10 @@ temperusb==1.5.3
|
|||||||
# tensorflow==2.3.0
|
# tensorflow==2.3.0
|
||||||
|
|
||||||
# homeassistant.components.powerwall
|
# homeassistant.components.powerwall
|
||||||
tesla-powerwall==0.3.3
|
tesla-powerwall==0.3.5
|
||||||
|
|
||||||
# homeassistant.components.tesla
|
# homeassistant.components.tesla
|
||||||
teslajsonpy==0.10.4
|
teslajsonpy==0.11.5
|
||||||
|
|
||||||
# homeassistant.components.tensorflow
|
# homeassistant.components.tensorflow
|
||||||
# tf-models-official==2.3.0
|
# tf-models-official==2.3.0
|
||||||
|
@ -116,7 +116,7 @@ aiohue==2.1.0
|
|||||||
aiokafka==0.6.0
|
aiokafka==0.6.0
|
||||||
|
|
||||||
# homeassistant.components.lutron_caseta
|
# homeassistant.components.lutron_caseta
|
||||||
aiolip==1.0.1
|
aiolip==1.1.4
|
||||||
|
|
||||||
# homeassistant.components.notion
|
# homeassistant.components.notion
|
||||||
aionotion==1.1.0
|
aionotion==1.1.0
|
||||||
@ -210,7 +210,7 @@ bravia-tv==1.0.8
|
|||||||
broadlink==0.16.0
|
broadlink==0.16.0
|
||||||
|
|
||||||
# homeassistant.components.brother
|
# homeassistant.components.brother
|
||||||
brother==0.1.20
|
brother==0.1.21
|
||||||
|
|
||||||
# homeassistant.components.bsblan
|
# homeassistant.components.bsblan
|
||||||
bsblan==0.4.0
|
bsblan==0.4.0
|
||||||
@ -802,7 +802,7 @@ pymodbus==2.3.0
|
|||||||
pymonoprice==0.3
|
pymonoprice==0.3
|
||||||
|
|
||||||
# homeassistant.components.myq
|
# homeassistant.components.myq
|
||||||
pymyq==2.0.14
|
pymyq==3.0.1
|
||||||
|
|
||||||
# homeassistant.components.nut
|
# homeassistant.components.nut
|
||||||
pynut2==2.1.2
|
pynut2==2.1.2
|
||||||
@ -983,7 +983,7 @@ rokuecp==0.6.0
|
|||||||
roombapy==1.6.2
|
roombapy==1.6.2
|
||||||
|
|
||||||
# homeassistant.components.roon
|
# homeassistant.components.roon
|
||||||
roonapi==0.0.31
|
roonapi==0.0.32
|
||||||
|
|
||||||
# homeassistant.components.rpi_power
|
# homeassistant.components.rpi_power
|
||||||
rpi-bad-power==0.1.0
|
rpi-bad-power==0.1.0
|
||||||
@ -1087,10 +1087,10 @@ synologydsm-api==1.0.1
|
|||||||
tellduslive==0.10.11
|
tellduslive==0.10.11
|
||||||
|
|
||||||
# homeassistant.components.powerwall
|
# homeassistant.components.powerwall
|
||||||
tesla-powerwall==0.3.3
|
tesla-powerwall==0.3.5
|
||||||
|
|
||||||
# homeassistant.components.tesla
|
# homeassistant.components.tesla
|
||||||
teslajsonpy==0.10.4
|
teslajsonpy==0.11.5
|
||||||
|
|
||||||
# homeassistant.components.toon
|
# homeassistant.components.toon
|
||||||
toonapi==0.2.0
|
toonapi==0.2.0
|
||||||
|
@ -178,6 +178,7 @@ async def test_doorbell_event(hass, aioclient_mock):
|
|||||||
|
|
||||||
async def test_proactive_mode_filter_states(hass, aioclient_mock):
|
async def test_proactive_mode_filter_states(hass, aioclient_mock):
|
||||||
"""Test all the cases that filter states."""
|
"""Test all the cases that filter states."""
|
||||||
|
aioclient_mock.post(TEST_URL, text="", status=202)
|
||||||
await state_report.async_enable_proactive_mode(hass, DEFAULT_CONFIG)
|
await state_report.async_enable_proactive_mode(hass, DEFAULT_CONFIG)
|
||||||
|
|
||||||
# First state should report
|
# First state should report
|
||||||
@ -186,7 +187,8 @@ async def test_proactive_mode_filter_states(hass, aioclient_mock):
|
|||||||
"on",
|
"on",
|
||||||
{"friendly_name": "Test Contact Sensor", "device_class": "door"},
|
{"friendly_name": "Test Contact Sensor", "device_class": "door"},
|
||||||
)
|
)
|
||||||
assert len(aioclient_mock.mock_calls) == 0
|
await hass.async_block_till_done()
|
||||||
|
assert len(aioclient_mock.mock_calls) == 1
|
||||||
|
|
||||||
aioclient_mock.clear_requests()
|
aioclient_mock.clear_requests()
|
||||||
|
|
||||||
@ -238,3 +240,24 @@ async def test_proactive_mode_filter_states(hass, aioclient_mock):
|
|||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
assert len(aioclient_mock.mock_calls) == 0
|
assert len(aioclient_mock.mock_calls) == 0
|
||||||
|
|
||||||
|
# If serializes to same properties, it should not report
|
||||||
|
aioclient_mock.post(TEST_URL, text="", status=202)
|
||||||
|
with patch(
|
||||||
|
"homeassistant.components.alexa.entities.AlexaEntity.serialize_properties",
|
||||||
|
return_value=[{"same": "info"}],
|
||||||
|
):
|
||||||
|
hass.states.async_set(
|
||||||
|
"binary_sensor.same_serialize",
|
||||||
|
"off",
|
||||||
|
{"friendly_name": "Test Contact Sensor", "device_class": "door"},
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
hass.states.async_set(
|
||||||
|
"binary_sensor.same_serialize",
|
||||||
|
"off",
|
||||||
|
{"friendly_name": "Test Contact Sensor", "device_class": "door"},
|
||||||
|
)
|
||||||
|
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
assert len(aioclient_mock.mock_calls) == 1
|
||||||
|
@ -46,6 +46,24 @@ async def test_report_state(hass, caplog, legacy_patchable_time):
|
|||||||
"devices": {"states": {"light.kitchen": {"on": True, "online": True}}}
|
"devices": {"states": {"light.kitchen": {"on": True, "online": True}}}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Test that if serialize returns same value, we don't send
|
||||||
|
with patch(
|
||||||
|
"homeassistant.components.google_assistant.report_state.GoogleEntity.query_serialize",
|
||||||
|
return_value={"same": "info"},
|
||||||
|
), patch.object(BASIC_CONFIG, "async_report_state_all", AsyncMock()) as mock_report:
|
||||||
|
# New state, so reported
|
||||||
|
hass.states.async_set("light.double_report", "on")
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
# Changed, but serialize is same, so filtered out by extra check
|
||||||
|
hass.states.async_set("light.double_report", "off")
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert len(mock_report.mock_calls) == 1
|
||||||
|
assert mock_report.mock_calls[0][1][0] == {
|
||||||
|
"devices": {"states": {"light.double_report": {"same": "info"}}}
|
||||||
|
}
|
||||||
|
|
||||||
# Test that only significant state changes are reported
|
# Test that only significant state changes are reported
|
||||||
with patch.object(
|
with patch.object(
|
||||||
BASIC_CONFIG, "async_report_state_all", AsyncMock()
|
BASIC_CONFIG, "async_report_state_all", AsyncMock()
|
||||||
|
@ -1,14 +1,18 @@
|
|||||||
"""Tests for the myq integration."""
|
"""Tests for the myq integration."""
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
from pymyq.const import ACCOUNTS_ENDPOINT, DEVICES_ENDPOINT
|
||||||
|
|
||||||
from homeassistant.components.myq.const import DOMAIN
|
from homeassistant.components.myq.const import DOMAIN
|
||||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
from tests.common import MockConfigEntry, load_fixture
|
from tests.common import MockConfigEntry, load_fixture
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
async def async_init_integration(
|
async def async_init_integration(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
@ -20,16 +24,24 @@ async def async_init_integration(
|
|||||||
devices_json = load_fixture(devices_fixture)
|
devices_json = load_fixture(devices_fixture)
|
||||||
devices_dict = json.loads(devices_json)
|
devices_dict = json.loads(devices_json)
|
||||||
|
|
||||||
def _handle_mock_api_request(method, endpoint, **kwargs):
|
def _handle_mock_api_oauth_authenticate():
|
||||||
if endpoint == "Login":
|
return 1234, 1800
|
||||||
return {"SecurityToken": 1234}
|
|
||||||
if endpoint == "My":
|
|
||||||
return {"Account": {"Id": 1}}
|
|
||||||
if endpoint == "Accounts/1/Devices":
|
|
||||||
return devices_dict
|
|
||||||
return {}
|
|
||||||
|
|
||||||
with patch("pymyq.api.API.request", side_effect=_handle_mock_api_request):
|
def _handle_mock_api_request(method, returns, url, **kwargs):
|
||||||
|
_LOGGER.debug("URL: %s", url)
|
||||||
|
if url == ACCOUNTS_ENDPOINT:
|
||||||
|
_LOGGER.debug("Accounts")
|
||||||
|
return None, {"accounts": [{"id": 1, "name": "mock"}]}
|
||||||
|
if url == DEVICES_ENDPOINT.format(account_id=1):
|
||||||
|
_LOGGER.debug("Devices")
|
||||||
|
return None, devices_dict
|
||||||
|
_LOGGER.debug("Something else")
|
||||||
|
return None, {}
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"pymyq.api.API._oauth_authenticate",
|
||||||
|
side_effect=_handle_mock_api_oauth_authenticate,
|
||||||
|
), patch("pymyq.api.API.request", side_effect=_handle_mock_api_request):
|
||||||
entry = MockConfigEntry(
|
entry = MockConfigEntry(
|
||||||
domain=DOMAIN, data={CONF_USERNAME: "mock", CONF_PASSWORD: "mock"}
|
domain=DOMAIN, data={CONF_USERNAME: "mock", CONF_PASSWORD: "mock"}
|
||||||
)
|
)
|
||||||
|
@ -2,17 +2,23 @@
|
|||||||
|
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
from tesla_powerwall import MissingAttributeError, PowerwallUnreachableError
|
from tesla_powerwall import (
|
||||||
|
AccessDeniedError,
|
||||||
|
MissingAttributeError,
|
||||||
|
PowerwallUnreachableError,
|
||||||
|
)
|
||||||
|
|
||||||
from homeassistant import config_entries, setup
|
from homeassistant import config_entries, setup
|
||||||
from homeassistant.components.dhcp import HOSTNAME, IP_ADDRESS, MAC_ADDRESS
|
from homeassistant.components.dhcp import HOSTNAME, IP_ADDRESS, MAC_ADDRESS
|
||||||
from homeassistant.components.powerwall.const import DOMAIN
|
from homeassistant.components.powerwall.const import DOMAIN
|
||||||
from homeassistant.const import CONF_IP_ADDRESS
|
from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD
|
||||||
|
|
||||||
from .mocks import _mock_powerwall_side_effect, _mock_powerwall_site_name
|
from .mocks import _mock_powerwall_side_effect, _mock_powerwall_site_name
|
||||||
|
|
||||||
from tests.common import MockConfigEntry
|
from tests.common import MockConfigEntry
|
||||||
|
|
||||||
|
VALID_CONFIG = {CONF_IP_ADDRESS: "1.2.3.4", CONF_PASSWORD: "00GGX"}
|
||||||
|
|
||||||
|
|
||||||
async def test_form_source_user(hass):
|
async def test_form_source_user(hass):
|
||||||
"""Test we get config flow setup form as a user."""
|
"""Test we get config flow setup form as a user."""
|
||||||
@ -36,13 +42,13 @@ async def test_form_source_user(hass):
|
|||||||
) as mock_setup_entry:
|
) as mock_setup_entry:
|
||||||
result2 = await hass.config_entries.flow.async_configure(
|
result2 = await hass.config_entries.flow.async_configure(
|
||||||
result["flow_id"],
|
result["flow_id"],
|
||||||
{CONF_IP_ADDRESS: "1.2.3.4"},
|
VALID_CONFIG,
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert result2["type"] == "create_entry"
|
assert result2["type"] == "create_entry"
|
||||||
assert result2["title"] == "My site"
|
assert result2["title"] == "My site"
|
||||||
assert result2["data"] == {CONF_IP_ADDRESS: "1.2.3.4"}
|
assert result2["data"] == VALID_CONFIG
|
||||||
assert len(mock_setup.mock_calls) == 1
|
assert len(mock_setup.mock_calls) == 1
|
||||||
assert len(mock_setup_entry.mock_calls) == 1
|
assert len(mock_setup_entry.mock_calls) == 1
|
||||||
|
|
||||||
@ -61,11 +67,32 @@ async def test_form_cannot_connect(hass):
|
|||||||
):
|
):
|
||||||
result2 = await hass.config_entries.flow.async_configure(
|
result2 = await hass.config_entries.flow.async_configure(
|
||||||
result["flow_id"],
|
result["flow_id"],
|
||||||
{CONF_IP_ADDRESS: "1.2.3.4"},
|
VALID_CONFIG,
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result2["type"] == "form"
|
assert result2["type"] == "form"
|
||||||
assert result2["errors"] == {"base": "cannot_connect"}
|
assert result2["errors"] == {CONF_IP_ADDRESS: "cannot_connect"}
|
||||||
|
|
||||||
|
|
||||||
|
async def test_invalid_auth(hass):
|
||||||
|
"""Test we handle invalid auth error."""
|
||||||
|
result = await hass.config_entries.flow.async_init(
|
||||||
|
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||||
|
)
|
||||||
|
|
||||||
|
mock_powerwall = _mock_powerwall_side_effect(site_info=AccessDeniedError("any"))
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"homeassistant.components.powerwall.config_flow.Powerwall",
|
||||||
|
return_value=mock_powerwall,
|
||||||
|
):
|
||||||
|
result2 = await hass.config_entries.flow.async_configure(
|
||||||
|
result["flow_id"],
|
||||||
|
VALID_CONFIG,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result2["type"] == "form"
|
||||||
|
assert result2["errors"] == {CONF_PASSWORD: "invalid_auth"}
|
||||||
|
|
||||||
|
|
||||||
async def test_form_unknown_exeption(hass):
|
async def test_form_unknown_exeption(hass):
|
||||||
@ -81,8 +108,7 @@ async def test_form_unknown_exeption(hass):
|
|||||||
return_value=mock_powerwall,
|
return_value=mock_powerwall,
|
||||||
):
|
):
|
||||||
result2 = await hass.config_entries.flow.async_configure(
|
result2 = await hass.config_entries.flow.async_configure(
|
||||||
result["flow_id"],
|
result["flow_id"], VALID_CONFIG
|
||||||
{CONF_IP_ADDRESS: "1.2.3.4"},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result2["type"] == "form"
|
assert result2["type"] == "form"
|
||||||
@ -105,7 +131,7 @@ async def test_form_wrong_version(hass):
|
|||||||
):
|
):
|
||||||
result3 = await hass.config_entries.flow.async_configure(
|
result3 = await hass.config_entries.flow.async_configure(
|
||||||
result["flow_id"],
|
result["flow_id"],
|
||||||
{CONF_IP_ADDRESS: "1.2.3.4"},
|
VALID_CONFIG,
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result3["type"] == "form"
|
assert result3["type"] == "form"
|
||||||
@ -178,16 +204,54 @@ async def test_dhcp_discovery(hass):
|
|||||||
) as mock_setup_entry:
|
) as mock_setup_entry:
|
||||||
result2 = await hass.config_entries.flow.async_configure(
|
result2 = await hass.config_entries.flow.async_configure(
|
||||||
result["flow_id"],
|
result["flow_id"],
|
||||||
{
|
VALID_CONFIG,
|
||||||
CONF_IP_ADDRESS: "1.1.1.1",
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert result2["type"] == "create_entry"
|
assert result2["type"] == "create_entry"
|
||||||
assert result2["title"] == "Some site"
|
assert result2["title"] == "Some site"
|
||||||
assert result2["data"] == {
|
assert result2["data"] == VALID_CONFIG
|
||||||
CONF_IP_ADDRESS: "1.1.1.1",
|
assert len(mock_setup.mock_calls) == 1
|
||||||
}
|
assert len(mock_setup_entry.mock_calls) == 1
|
||||||
|
|
||||||
|
|
||||||
|
async def test_form_reauth(hass):
|
||||||
|
"""Test reauthenticate."""
|
||||||
|
|
||||||
|
entry = MockConfigEntry(
|
||||||
|
domain=DOMAIN,
|
||||||
|
data=VALID_CONFIG,
|
||||||
|
unique_id="1.2.3.4",
|
||||||
|
)
|
||||||
|
entry.add_to_hass(hass)
|
||||||
|
|
||||||
|
result = await hass.config_entries.flow.async_init(
|
||||||
|
DOMAIN, context={"source": "reauth"}, data=entry.data
|
||||||
|
)
|
||||||
|
assert result["type"] == "form"
|
||||||
|
assert result["errors"] == {}
|
||||||
|
|
||||||
|
mock_powerwall = await _mock_powerwall_site_name(hass, "My site")
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"homeassistant.components.powerwall.config_flow.Powerwall",
|
||||||
|
return_value=mock_powerwall,
|
||||||
|
), patch(
|
||||||
|
"homeassistant.components.powerwall.async_setup", return_value=True
|
||||||
|
) as mock_setup, patch(
|
||||||
|
"homeassistant.components.powerwall.async_setup_entry",
|
||||||
|
return_value=True,
|
||||||
|
) as mock_setup_entry:
|
||||||
|
result2 = await hass.config_entries.flow.async_configure(
|
||||||
|
result["flow_id"],
|
||||||
|
{
|
||||||
|
CONF_IP_ADDRESS: "1.2.3.4",
|
||||||
|
CONF_PASSWORD: "new-test-password",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert result2["type"] == "abort"
|
||||||
|
assert result2["reason"] == "reauth_successful"
|
||||||
assert len(mock_setup.mock_calls) == 1
|
assert len(mock_setup.mock_calls) == 1
|
||||||
assert len(mock_setup_entry.mock_calls) == 1
|
assert len(mock_setup_entry.mock_calls) == 1
|
||||||
|
@ -48,6 +48,8 @@ async def test_form(hass):
|
|||||||
assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
|
assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
|
||||||
assert result2["title"] == "test@email.com"
|
assert result2["title"] == "test@email.com"
|
||||||
assert result2["data"] == {
|
assert result2["data"] == {
|
||||||
|
CONF_USERNAME: "test@email.com",
|
||||||
|
CONF_PASSWORD: "test",
|
||||||
CONF_TOKEN: "test-refresh-token",
|
CONF_TOKEN: "test-refresh-token",
|
||||||
CONF_ACCESS_TOKEN: "test-access-token",
|
CONF_ACCESS_TOKEN: "test-access-token",
|
||||||
}
|
}
|
||||||
@ -95,7 +97,12 @@ async def test_form_cannot_connect(hass):
|
|||||||
|
|
||||||
async def test_form_repeat_identifier(hass):
|
async def test_form_repeat_identifier(hass):
|
||||||
"""Test we handle repeat identifiers."""
|
"""Test we handle repeat identifiers."""
|
||||||
entry = MockConfigEntry(domain=DOMAIN, title="test-username", data={}, options=None)
|
entry = MockConfigEntry(
|
||||||
|
domain=DOMAIN,
|
||||||
|
title="test-username",
|
||||||
|
data={"username": "test-username", "password": "test-password"},
|
||||||
|
options=None,
|
||||||
|
)
|
||||||
entry.add_to_hass(hass)
|
entry.add_to_hass(hass)
|
||||||
|
|
||||||
result = await hass.config_entries.flow.async_init(
|
result = await hass.config_entries.flow.async_init(
|
||||||
@ -110,8 +117,36 @@ async def test_form_repeat_identifier(hass):
|
|||||||
{CONF_USERNAME: "test-username", CONF_PASSWORD: "test-password"},
|
{CONF_USERNAME: "test-username", CONF_PASSWORD: "test-password"},
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result2["type"] == "form"
|
assert result2["type"] == "abort"
|
||||||
assert result2["errors"] == {CONF_USERNAME: "already_configured"}
|
assert result2["reason"] == "already_configured"
|
||||||
|
|
||||||
|
|
||||||
|
async def test_form_reauth(hass):
|
||||||
|
"""Test we handle reauth."""
|
||||||
|
entry = MockConfigEntry(
|
||||||
|
domain=DOMAIN,
|
||||||
|
title="test-username",
|
||||||
|
data={"username": "test-username", "password": "same"},
|
||||||
|
options=None,
|
||||||
|
)
|
||||||
|
entry.add_to_hass(hass)
|
||||||
|
|
||||||
|
result = await hass.config_entries.flow.async_init(
|
||||||
|
DOMAIN,
|
||||||
|
context={"source": config_entries.SOURCE_REAUTH},
|
||||||
|
data={"username": "test-username"},
|
||||||
|
)
|
||||||
|
with patch(
|
||||||
|
"homeassistant.components.tesla.config_flow.TeslaAPI.connect",
|
||||||
|
return_value=("test-refresh-token", "test-access-token"),
|
||||||
|
):
|
||||||
|
result2 = await hass.config_entries.flow.async_configure(
|
||||||
|
result["flow_id"],
|
||||||
|
{CONF_USERNAME: "test-username", CONF_PASSWORD: "new-password"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result2["type"] == "abort"
|
||||||
|
assert result2["reason"] == "reauth_successful"
|
||||||
|
|
||||||
|
|
||||||
async def test_import(hass):
|
async def test_import(hass):
|
||||||
|
@ -5,7 +5,6 @@ from homeassistant.components.sensor import DEVICE_CLASS_BATTERY
|
|||||||
from homeassistant.const import ATTR_DEVICE_CLASS, STATE_UNAVAILABLE, STATE_UNKNOWN
|
from homeassistant.const import ATTR_DEVICE_CLASS, STATE_UNAVAILABLE, STATE_UNKNOWN
|
||||||
from homeassistant.core import State
|
from homeassistant.core import State
|
||||||
from homeassistant.helpers import significant_change
|
from homeassistant.helpers import significant_change
|
||||||
from homeassistant.setup import async_setup_component
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(name="checker")
|
@pytest.fixture(name="checker")
|
||||||
@ -26,8 +25,6 @@ async def checker_fixture(hass):
|
|||||||
|
|
||||||
async def test_signicant_change(hass, checker):
|
async def test_signicant_change(hass, checker):
|
||||||
"""Test initialize helper works."""
|
"""Test initialize helper works."""
|
||||||
assert await async_setup_component(hass, "sensor", {})
|
|
||||||
|
|
||||||
ent_id = "test_domain.test_entity"
|
ent_id = "test_domain.test_entity"
|
||||||
attrs = {ATTR_DEVICE_CLASS: DEVICE_CLASS_BATTERY}
|
attrs = {ATTR_DEVICE_CLASS: DEVICE_CLASS_BATTERY}
|
||||||
|
|
||||||
@ -48,3 +45,30 @@ async def test_signicant_change(hass, checker):
|
|||||||
# State turned unavailable
|
# State turned unavailable
|
||||||
assert checker.async_is_significant_change(State(ent_id, "100", attrs))
|
assert checker.async_is_significant_change(State(ent_id, "100", attrs))
|
||||||
assert checker.async_is_significant_change(State(ent_id, STATE_UNAVAILABLE, attrs))
|
assert checker.async_is_significant_change(State(ent_id, STATE_UNAVAILABLE, attrs))
|
||||||
|
|
||||||
|
|
||||||
|
async def test_significant_change_extra(hass, checker):
|
||||||
|
"""Test extra significant checker works."""
|
||||||
|
ent_id = "test_domain.test_entity"
|
||||||
|
attrs = {ATTR_DEVICE_CLASS: DEVICE_CLASS_BATTERY}
|
||||||
|
|
||||||
|
assert checker.async_is_significant_change(State(ent_id, "100", attrs), extra_arg=1)
|
||||||
|
assert checker.async_is_significant_change(State(ent_id, "200", attrs), extra_arg=1)
|
||||||
|
|
||||||
|
# Reset the last significiant change to 100 to repeat test but with
|
||||||
|
# extra checker installed.
|
||||||
|
assert checker.async_is_significant_change(State(ent_id, "100", attrs), extra_arg=1)
|
||||||
|
|
||||||
|
def extra_significant_check(
|
||||||
|
hass, old_state, old_attrs, old_extra_arg, new_state, new_attrs, new_extra_arg
|
||||||
|
):
|
||||||
|
return old_extra_arg != new_extra_arg
|
||||||
|
|
||||||
|
checker.extra_significant_check = extra_significant_check
|
||||||
|
|
||||||
|
# This is normally a significant change (100 -> 200), but the extra arg check marks it
|
||||||
|
# as insignificant.
|
||||||
|
assert not checker.async_is_significant_change(
|
||||||
|
State(ent_id, "200", attrs), extra_arg=1
|
||||||
|
)
|
||||||
|
assert checker.async_is_significant_change(State(ent_id, "200", attrs), extra_arg=2)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user