Merge pull request #36070 from home-assistant/rc

This commit is contained in:
Franck Nijhof 2020-05-24 14:58:42 +02:00 committed by GitHub
commit f897264c7f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
45 changed files with 363 additions and 137 deletions

View File

@ -434,7 +434,13 @@ omit =
homeassistant/components/luftdaten/*
homeassistant/components/lupusec/*
homeassistant/components/lutron/*
homeassistant/components/lutron_caseta/*
homeassistant/components/lutron_caseta/__init__.py
homeassistant/components/lutron_caseta/binary_sensor.py
homeassistant/components/lutron_caseta/cover.py
homeassistant/components/lutron_caseta/fan.py
homeassistant/components/lutron_caseta/light.py
homeassistant/components/lutron_caseta/scene.py
homeassistant/components/lutron_caseta/switch.py
homeassistant/components/lw12wifi/light.py
homeassistant/components/lyft/sensor.py
homeassistant/components/magicseaweed/sensor.py
@ -727,6 +733,7 @@ omit =
homeassistant/components/steam_online/sensor.py
homeassistant/components/stiebel_eltron/*
homeassistant/components/stookalert/*
homeassistant/components/stream/*
homeassistant/components/streamlabswater/*
homeassistant/components/suez_water/*
homeassistant/components/supervisord/sensor.py

View File

@ -1,6 +1,7 @@
"""The airvisual component."""
import asyncio
from datetime import timedelta
from math import ceil
from pyairvisual import Client
from pyairvisual.errors import AirVisualError, NodeProError
@ -37,7 +38,6 @@ from .const import (
PLATFORMS = ["air_quality", "sensor"]
DEFAULT_ATTRIBUTION = "Data provided by AirVisual"
DEFAULT_GEOGRAPHY_SCAN_INTERVAL = timedelta(minutes=10)
DEFAULT_NODE_PRO_SCAN_INTERVAL = timedelta(minutes=1)
DEFAULT_OPTIONS = {CONF_SHOW_ON_MAP: True}
@ -88,6 +88,37 @@ def async_get_geography_id(geography_dict):
)
@callback
def async_get_cloud_api_update_interval(hass, api_key):
"""Get a leveled scan interval for a particular cloud API key.
This will shift based on the number of active consumers, thus keeping the user
under the monthly API limit.
"""
num_consumers = len(
{
config_entry
for config_entry in hass.config_entries.async_entries(DOMAIN)
if config_entry.data.get(CONF_API_KEY) == api_key
}
)
# Assuming 10,000 calls per month and a "smallest possible month" of 28 days; note
# that we give a buffer of 1500 API calls for any drift, restarts, etc.:
minutes_between_api_calls = ceil(1 / (8500 / 28 / 24 / 60 / num_consumers))
return timedelta(minutes=minutes_between_api_calls)
@callback
def async_reset_coordinator_update_intervals(hass, update_interval):
"""Update any existing data coordinators with a new update interval."""
if not hass.data[DOMAIN][DATA_COORDINATOR]:
return
for coordinator in hass.data[DOMAIN][DATA_COORDINATOR].values():
coordinator.update_interval = update_interval
async def async_setup(hass, config):
"""Set up the AirVisual component."""
hass.data[DOMAIN] = {DATA_COORDINATOR: {}}
@ -163,6 +194,10 @@ async def async_setup_entry(hass, config_entry):
client = Client(api_key=config_entry.data[CONF_API_KEY], session=websession)
update_interval = async_get_cloud_api_update_interval(
hass, config_entry.data[CONF_API_KEY]
)
async def async_update_data():
"""Get new data from the API."""
if CONF_CITY in config_entry.data:
@ -185,10 +220,14 @@ async def async_setup_entry(hass, config_entry):
hass,
LOGGER,
name="geography data",
update_interval=DEFAULT_GEOGRAPHY_SCAN_INTERVAL,
update_interval=update_interval,
update_method=async_update_data,
)
# Ensure any other, existing config entries that use this API key are updated
# with the new scan interval:
async_reset_coordinator_update_intervals(hass, update_interval)
# Only geography-based entries have options:
config_entry.add_update_listener(async_update_options)
else:

View File

@ -6,7 +6,7 @@ from datetime import timedelta
import logging
import re
from broadlink.exceptions import BroadlinkException, ReadError
from broadlink.exceptions import BroadlinkException, ReadError, StorageError
import voluptuous as vol
from homeassistant.const import CONF_HOST
@ -85,10 +85,11 @@ async def async_setup_service(hass, host, device):
_LOGGER.info("Press the key you want Home Assistant to learn")
start_time = utcnow()
while (utcnow() - start_time) < timedelta(seconds=20):
await asyncio.sleep(1)
try:
packet = await device.async_request(device.api.check_data)
except ReadError:
await asyncio.sleep(1)
except (ReadError, StorageError):
continue
except BroadlinkException as err_msg:
_LOGGER.error("Failed to learn: %s", err_msg)
return

View File

@ -14,6 +14,7 @@ from broadlink.exceptions import (
BroadlinkException,
DeviceOfflineError,
ReadError,
StorageError,
)
import voluptuous as vol
@ -321,10 +322,11 @@ class BroadlinkRemote(RemoteEntity):
code = None
start_time = utcnow()
while (utcnow() - start_time) < timedelta(seconds=timeout):
await asyncio.sleep(1)
try:
code = await self.device.async_request(self.device.api.check_data)
except ReadError:
await asyncio.sleep(1)
except (ReadError, StorageError):
continue
else:
break

View File

@ -2,7 +2,7 @@
"domain": "cloud",
"name": "Home Assistant Cloud",
"documentation": "https://www.home-assistant.io/integrations/cloud",
"requirements": ["hass-nabucasa==0.34.2"],
"requirements": ["hass-nabucasa==0.34.3"],
"dependencies": ["http", "webhook", "alexa"],
"after_dependencies": ["google_assistant"],
"codeowners": ["@home-assistant/cloud"]

View File

@ -3,7 +3,7 @@
"name": "Daikin AC",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/daikin",
"requirements": ["pydaikin==2.0.2"],
"requirements": ["pydaikin==2.0.4"],
"codeowners": ["@fredrike"],
"quality_scale": "platinum"
}

View File

@ -2,7 +2,7 @@
"domain": "frontend",
"name": "Home Assistant Frontend",
"documentation": "https://www.home-assistant.io/integrations/frontend",
"requirements": ["home-assistant-frontend==20200519.1"],
"requirements": ["home-assistant-frontend==20200519.4"],
"dependencies": [
"api",
"auth",

View File

@ -2,8 +2,6 @@
import logging
from turbojpeg import TurboJPEG
SUPPORTED_SCALING_FACTORS = [(7, 8), (3, 4), (5, 8), (1, 2), (3, 8), (1, 4), (1, 8)]
_LOGGER = logging.getLogger(__name__)
@ -54,6 +52,12 @@ class TurboJPEGSingleton:
def __init__(self):
"""Try to create TurboJPEG only once."""
try:
# TurboJPEG checks for libturbojpeg
# when its created, but it imports
# numpy which may or may not work so
# we have to guard the import here.
from turbojpeg import TurboJPEG # pylint: disable=import-outside-toplevel
TurboJPEGSingleton.__instance = TurboJPEG()
except Exception: # pylint: disable=broad-except
_LOGGER.exception(

View File

@ -89,10 +89,6 @@ class HKDevice:
# mapped to a HA entity.
self.entities = []
# There are multiple entities sharing a single connection - only
# allow one entity to use pairing at once.
self.pairing_lock = asyncio.Lock()
self.available = True
self.signal_state_updated = "_".join((DOMAIN, self.unique_id, "state_updated"))
@ -333,13 +329,11 @@ class HKDevice:
async def get_characteristics(self, *args, **kwargs):
"""Read latest state from homekit accessory."""
async with self.pairing_lock:
return await self.pairing.get_characteristics(*args, **kwargs)
return await self.pairing.get_characteristics(*args, **kwargs)
async def put_characteristics(self, characteristics):
"""Control a HomeKit device state from Home Assistant."""
async with self.pairing_lock:
results = await self.pairing.put_characteristics(characteristics)
results = await self.pairing.put_characteristics(characteristics)
# Feed characteristics back into HA and update the current state
# results will only contain failures, so anythin in characteristics

View File

@ -3,7 +3,7 @@
"name": "HomeKit Controller",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
"requirements": ["aiohomekit[IP]==0.2.37"],
"requirements": ["aiohomekit[IP]==0.2.38"],
"zeroconf": ["_hap._tcp.local."],
"codeowners": ["@Jc2k"]
}

View File

@ -68,6 +68,12 @@ async def async_setup_entry(hass, entry, async_add_entities):
except asyncio.TimeoutError:
# Forced refresh is not required for setup
pass
if ATTR_POSITION_DATA not in shade.raw_data:
_LOGGER.info(
"The %s shade was skipped because it is missing position data",
name_before_refresh,
)
continue
entities.append(
PowerViewShade(
shade, name_before_refresh, room_data, coordinator, device_info

View File

@ -74,6 +74,17 @@ class ShadeEntity(HDEntity):
@property
def device_info(self):
"""Return the device_info of the device."""
device_info = {
"identifiers": {(DOMAIN, self._shade.id)},
"name": self._shade_name,
"manufacturer": MANUFACTURER,
"via_device": (DOMAIN, self._device_info[DEVICE_SERIAL_NUMBER]),
}
if FIRMWARE_IN_SHADE not in self._shade.raw_data:
return device_info
firmware = self._shade.raw_data[FIRMWARE_IN_SHADE]
sw_version = f"{firmware[FIRMWARE_REVISION]}.{firmware[FIRMWARE_SUB_REVISION]}.{firmware[FIRMWARE_BUILD]}"
model = self._shade.raw_data[ATTR_TYPE]
@ -82,11 +93,6 @@ class ShadeEntity(HDEntity):
model = shade.description
break
return {
"identifiers": {(DOMAIN, self._shade.id)},
"name": self._shade_name,
"model": str(model),
"sw_version": sw_version,
"manufacturer": MANUFACTURER,
"via_device": (DOMAIN, self._device_info[DEVICE_SERIAL_NUMBER]),
}
device_info["sw_version"] = sw_version
device_info["model"] = model
return device_info

View File

@ -85,12 +85,12 @@ class IPPFlowHandler(ConfigFlow, domain=DOMAIN):
unique_id = user_input[CONF_UUID] = info[CONF_UUID]
if unique_id is None and info[CONF_SERIAL] is not None:
if not unique_id and info[CONF_SERIAL]:
_LOGGER.debug(
"Printer UUID is missing from IPP response. Falling back to IPP serial number"
)
unique_id = info[CONF_SERIAL]
elif unique_id is None:
elif not unique_id:
_LOGGER.debug("Unable to determine unique id from IPP response")
await self.async_set_unique_id(unique_id)
@ -138,17 +138,17 @@ class IPPFlowHandler(ConfigFlow, domain=DOMAIN):
return self.async_abort(reason="ipp_error")
unique_id = self.discovery_info[CONF_UUID]
if unique_id is None and info[CONF_UUID] is not None:
if not unique_id and info[CONF_UUID]:
_LOGGER.debug(
"Printer UUID is missing from discovery info. Falling back to IPP UUID"
)
unique_id = self.discovery_info[CONF_UUID] = info[CONF_UUID]
elif unique_id is None and info[CONF_SERIAL] is not None:
elif not unique_id and info[CONF_SERIAL]:
_LOGGER.debug(
"Printer UUID is missing from discovery info and IPP response. Falling back to IPP serial number"
)
unique_id = info[CONF_SERIAL]
elif unique_id is None:
elif not unique_id:
_LOGGER.debug(
"Unable to determine unique id from discovery info and IPP response"
)

View File

@ -39,11 +39,7 @@ LUTRON_CASETA_COMPONENTS = ["light", "switch", "cover", "scene", "fan", "binary_
async def async_setup(hass, base_config):
"""Set up the Lutron component."""
bridge_configs = base_config.get(DOMAIN)
if not bridge_configs:
return True
bridge_configs = base_config[DOMAIN]
hass.data.setdefault(DOMAIN, {})
for config in bridge_configs:

View File

@ -94,11 +94,6 @@ class LutronCasetaFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
await bridge.close()
return True
except (KeyError, ValueError):
_LOGGER.error(
"Error while checking connectivity to bridge %s", self.data[CONF_HOST],
)
return False
except Exception: # pylint: disable=broad-except
_LOGGER.exception(
"Unknown exception while checking connectivity to bridge %s",

View File

@ -2,7 +2,10 @@
"domain": "lutron_caseta",
"name": "Lutron Caséta",
"documentation": "https://www.home-assistant.io/integrations/lutron_caseta",
"requirements": ["pylutron-caseta==0.6.1"],
"codeowners": ["@swails"],
"config_flow": true
"requirements": [
"pylutron-caseta==0.6.1"
],
"codeowners": [
"@swails"
]
}

View File

@ -82,13 +82,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
if device.capabilities.events and await device.events.async_start():
platforms += ["binary_sensor", "sensor"]
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, device.events.async_stop)
for component in platforms:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, device.async_stop)
return True

View File

@ -219,7 +219,8 @@ class OnvifFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
media_service = device.create_media_service()
profiles = await media_service.GetProfiles()
h264 = any(
profile.VideoEncoderConfiguration.Encoding == "H264"
profile.VideoEncoderConfiguration
and profile.VideoEncoderConfiguration.Encoding == "H264"
for profile in profiles
)

View File

@ -8,7 +8,6 @@ from aiohttp.client_exceptions import ClientConnectionError, ServerDisconnectedE
import onvif
from onvif import ONVIFCamera
from onvif.exceptions import ONVIFError
from zeep.asyncio import AsyncTransport
from zeep.exceptions import Fault
from homeassistant.config_entries import ConfigEntry
@ -20,7 +19,6 @@ from homeassistant.const import (
CONF_USERNAME,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.util.dt as dt_util
from .const import (
@ -141,6 +139,12 @@ class ONVIFDevice:
return True
async def async_stop(self, event=None):
"""Shut it all down."""
if self.events:
await self.events.async_stop()
await self.device.close()
async def async_check_date_and_time(self) -> None:
"""Warns if device and system date not synced."""
LOGGER.debug("Setting up the ONVIF device management service")
@ -251,7 +255,10 @@ class ONVIFDevice:
profiles = []
for key, onvif_profile in enumerate(result):
# Only add H264 profiles
if onvif_profile.VideoEncoderConfiguration.Encoding != "H264":
if (
not onvif_profile.VideoEncoderConfiguration
or onvif_profile.VideoEncoderConfiguration.Encoding != "H264"
):
continue
profile = Profile(
@ -278,9 +285,13 @@ class ONVIFDevice:
is not None,
)
ptz_service = self.device.get_service("ptz")
presets = await ptz_service.GetPresets(profile.token)
profile.ptz.presets = [preset.token for preset in presets]
try:
ptz_service = self.device.create_ptz_service()
presets = await ptz_service.GetPresets(profile.token)
profile.ptz.presets = [preset.token for preset in presets]
except (Fault, ServerDisconnectedError):
# It's OK if Presets aren't supported
profile.ptz.presets = []
profiles.append(profile)
@ -326,7 +337,7 @@ class ONVIFDevice:
LOGGER.warning("PTZ actions are not supported on device '%s'", self.name)
return
ptz_service = self.device.get_service("ptz")
ptz_service = self.device.create_ptz_service()
pan_val = distance * PAN_FACTOR.get(pan, 0)
tilt_val = distance * TILT_FACTOR.get(tilt, 0)
@ -423,13 +434,11 @@ class ONVIFDevice:
def get_device(hass, host, port, username, password) -> ONVIFCamera:
"""Get ONVIFCamera instance."""
session = async_get_clientsession(hass)
transport = AsyncTransport(None, session=session)
return ONVIFCamera(
host,
port,
username,
password,
f"{os.path.dirname(onvif.__file__)}/wsdl/",
transport=transport,
no_cache=True,
)

View File

@ -91,7 +91,7 @@ class EventManager:
return self.started
async def async_stop(self, event=None) -> None:
async def async_stop(self) -> None:
"""Unsubscribe from events."""
if not self._subscription:
return
@ -110,7 +110,7 @@ class EventManager:
async def async_pull_messages(self, _now: dt = None) -> None:
"""Pull messages from device."""
try:
pullpoint = self.device.get_service("pullpoint")
pullpoint = self.device.create_pullpoint_service()
req = pullpoint.create_type("PullMessages")
req.MessageLimit = 100
req.Timeout = dt.timedelta(seconds=60)

View File

@ -2,7 +2,7 @@
"domain": "onvif",
"name": "ONVIF",
"documentation": "https://www.home-assistant.io/integrations/onvif",
"requirements": ["onvif-zeep-async==0.3.0", "WSDiscovery==2.0.0"],
"requirements": ["onvif-zeep-async==0.4.0", "WSDiscovery==2.0.0"],
"dependencies": ["ffmpeg"],
"codeowners": ["@hunterjm"],
"config_flow": true

View File

@ -3,7 +3,15 @@
"name": "OpenZWave (beta)",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/ozw",
"requirements": ["python-openzwave-mqtt==1.0.1"],
"after_dependencies": ["mqtt"],
"codeowners": ["@cgarwood", "@marcelveldt", "@MartinHjelmare"]
"requirements": [
"python-openzwave-mqtt==1.0.2"
],
"after_dependencies": [
"mqtt"
],
"codeowners": [
"@cgarwood",
"@marcelveldt",
"@MartinHjelmare"
]
}

View File

@ -42,7 +42,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the PrezziBenzina sensor platform."""
station = config[CONF_STATION]
@ -65,7 +65,7 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info=
)
)
async_add_entities(dev, True)
add_entities(dev, True)
class PrezziBenzinaSensor(Entity):
@ -114,6 +114,6 @@ class PrezziBenzinaSensor(Entity):
}
return attrs
async def async_update(self):
def update(self):
"""Get the latest data and updates the states."""
self._data = self._client.get_by_id(self._station)[self._index]

View File

@ -2,7 +2,7 @@
"domain": "roku",
"name": "Roku",
"documentation": "https://www.home-assistant.io/integrations/roku",
"requirements": ["rokuecp==0.4.0"],
"requirements": ["rokuecp==0.4.1"],
"ssdp": [
{
"st": "roku:ecp",

View File

@ -7,6 +7,7 @@ from homeassistant.components.media_player.const import (
MEDIA_TYPE_APP,
MEDIA_TYPE_CHANNEL,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
@ -29,6 +30,7 @@ SUPPORT_ROKU = (
| SUPPORT_VOLUME_STEP
| SUPPORT_VOLUME_MUTE
| SUPPORT_SELECT_SOURCE
| SUPPORT_PAUSE
| SUPPORT_PLAY
| SUPPORT_PLAY_MEDIA
| SUPPORT_TURN_ON
@ -167,6 +169,14 @@ class RokuMediaPlayer(RokuEntity, MediaPlayerEntity):
"""Turn off the Roku."""
await self.coordinator.roku.remote("poweroff")
async def async_media_pause(self) -> None:
"""Send pause command."""
await self.coordinator.roku.remote("play")
async def async_media_play(self) -> None:
"""Send play command."""
await self.coordinator.roku.remote("play")
async def async_media_play_pause(self) -> None:
"""Send play/pause command."""
await self.coordinator.roku.remote("play")

View File

@ -17,7 +17,7 @@ from homeassistant.components.light import (
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_TRANSITION,
Light,
LightEntity,
)
from homeassistant.const import CONF_ADDRESS, CONF_HOST, CONF_NAME, CONF_TYPE, STATE_ON
import homeassistant.helpers.config_validation as cv
@ -104,7 +104,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
add_entities(leds)
class PwmSimpleLed(Light, RestoreEntity):
class PwmSimpleLed(LightEntity, RestoreEntity):
"""Representation of a simple one-color PWM LED."""
def __init__(self, led, name):

View File

@ -79,8 +79,11 @@ class StreamOutput:
@property
def target_duration(self) -> int:
"""Return the average duration of the segments in seconds."""
segment_length = len(self._segments)
if not segment_length:
return 0
durations = [s.duration for s in self._segments]
return round(sum(durations) // len(self._segments)) or 1
return round(sum(durations) // segment_length) or 1
def get_segment(self, sequence: int = None) -> Any:
"""Retrieve a specific segment, or the whole list."""

View File

@ -2,7 +2,7 @@
"domain": "stream",
"name": "Stream",
"documentation": "https://www.home-assistant.io/integrations/stream",
"requirements": ["av==7.0.1"],
"requirements": ["av==8.0.1"],
"dependencies": ["http"],
"codeowners": ["@hunterjm"],
"quality_scale": "internal"

View File

@ -164,3 +164,7 @@ def stream_worker(hass, stream, quit_event):
# Assign the video packet to the new stream & mux
packet.stream = buffer.vstream
buffer.output.mux(packet)
# Close stream
buffer.output.close()
container.close()

View File

@ -3,7 +3,7 @@
"name": "Telldus Live",
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/tellduslive",
"requirements": ["tellduslive==0.10.10"],
"requirements": ["tellduslive==0.10.11"],
"codeowners": ["@fredrike"],
"quality_scale": "gold"
}

View File

@ -40,7 +40,7 @@ def setup(hass, config):
"""Set up the Zabbix component."""
conf = config[DOMAIN]
protocol = "https" if config[CONF_SSL] else "http"
protocol = "https" if conf[CONF_SSL] else "http"
url = urljoin(f"{protocol}://{conf[CONF_HOST]}", conf[CONF_PATH])
username = conf.get(CONF_USERNAME)

View File

@ -1,7 +1,7 @@
"""Constants used by Home Assistant components."""
MAJOR_VERSION = 0
MINOR_VERSION = 110
PATCH_VERSION = "1"
PATCH_VERSION = "2"
__short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__ = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER = (3, 7, 0)

View File

@ -83,7 +83,6 @@ FLOWS = [
"locative",
"logi_circle",
"luftdaten",
"lutron_caseta",
"mailgun",
"melcloud",
"met",

View File

@ -399,17 +399,25 @@ def async_cleanup(
ent_reg: "entity_registry.EntityRegistry",
) -> None:
"""Clean up device registry."""
# Find all devices that are no longer referenced in the entity registry.
referenced = {entry.device_id for entry in ent_reg.entities.values()}
orphan = set(dev_reg.devices) - referenced
# Find all devices that are referenced by a config_entry.
config_entry_ids = {entry.entry_id for entry in hass.config_entries.async_entries()}
references_config_entries = {
device.id
for device in dev_reg.devices.values()
for config_entry_id in device.config_entries
if config_entry_id in config_entry_ids
}
# Find all devices that are referenced in the entity registry.
references_entities = {entry.device_id for entry in ent_reg.entities.values()}
orphan = set(dev_reg.devices) - references_entities - references_config_entries
for dev_id in orphan:
dev_reg.async_remove_device(dev_id)
# Find all referenced config entries that no longer exist
# This shouldn't happen but have not been able to track down the bug :(
config_entry_ids = {entry.entry_id for entry in hass.config_entries.async_entries()}
for device in list(dev_reg.devices.values()):
for config_entry_id in device.config_entries:
if config_entry_id not in config_entry_ids:

View File

@ -431,7 +431,8 @@ async def entity_service_call(hass, platforms, func, call, required_features=Non
# Skip entities that don't have the required feature.
if required_features is not None and not any(
entity.supported_features & feature_set for feature_set in required_features
entity.supported_features & feature_set == feature_set
for feature_set in required_features
):
continue

View File

@ -11,8 +11,8 @@ ciso8601==2.1.3
cryptography==2.9.2
defusedxml==0.6.0
distro==1.5.0
hass-nabucasa==0.34.2
home-assistant-frontend==20200519.1
hass-nabucasa==0.34.3
home-assistant-frontend==20200519.4
importlib-metadata==1.6.0
jinja2>=2.11.1
netdisco==2.6.0

View File

@ -178,7 +178,7 @@ aioftp==0.12.0
aioharmony==0.1.13
# homeassistant.components.homekit_controller
aiohomekit[IP]==0.2.37
aiohomekit[IP]==0.2.38
# homeassistant.components.emulated_hue
# homeassistant.components.http
@ -291,7 +291,7 @@ atenpdu==0.3.0
aurorapy==0.2.6
# homeassistant.components.stream
av==7.0.1
av==8.0.1
# homeassistant.components.avea
avea==1.4
@ -701,7 +701,7 @@ habitipy==0.2.0
hangups==0.4.9
# homeassistant.components.cloud
hass-nabucasa==0.34.2
hass-nabucasa==0.34.3
# homeassistant.components.mqtt
hbmqtt==0.9.5
@ -731,7 +731,7 @@ hole==0.5.1
holidays==0.10.2
# homeassistant.components.frontend
home-assistant-frontend==20200519.1
home-assistant-frontend==20200519.4
# homeassistant.components.zwave
homeassistant-pyozw==0.1.10
@ -994,7 +994,7 @@ oemthermostat==1.1
onkyo-eiscp==1.2.7
# homeassistant.components.onvif
onvif-zeep-async==0.3.0
onvif-zeep-async==0.4.0
# homeassistant.components.opengarage
open-garage==0.1.4
@ -1263,7 +1263,7 @@ pycsspeechtts==1.0.3
# pycups==1.9.73
# homeassistant.components.daikin
pydaikin==2.0.2
pydaikin==2.0.4
# homeassistant.components.danfoss_air
pydanfossair==0.1.0
@ -1711,7 +1711,7 @@ python-nest==4.1.0
python-nmap==0.6.1
# homeassistant.components.ozw
python-openzwave-mqtt==1.0.1
python-openzwave-mqtt==1.0.2
# homeassistant.components.qbittorrent
python-qbittorrent==0.4.1
@ -1871,7 +1871,7 @@ rjpl==0.3.5
rocketchat-API==0.6.1
# homeassistant.components.roku
rokuecp==0.4.0
rokuecp==0.4.1
# homeassistant.components.roomba
roombapy==1.6.1
@ -2063,7 +2063,7 @@ tellcore-net==0.4
tellcore-py==1.1.2
# homeassistant.components.tellduslive
tellduslive==0.10.10
tellduslive==0.10.11
# homeassistant.components.lg_soundbar
temescal==0.1

View File

@ -82,7 +82,7 @@ aiofreepybox==0.0.8
aioharmony==0.1.13
# homeassistant.components.homekit_controller
aiohomekit[IP]==0.2.37
aiohomekit[IP]==0.2.38
# homeassistant.components.emulated_hue
# homeassistant.components.http
@ -138,7 +138,7 @@ arcam-fmj==0.4.4
async-upnp-client==0.14.13
# homeassistant.components.stream
av==7.0.1
av==8.0.1
# homeassistant.components.axis
axis==25
@ -294,7 +294,7 @@ ha-ffmpeg==2.0
hangups==0.4.9
# homeassistant.components.cloud
hass-nabucasa==0.34.2
hass-nabucasa==0.34.3
# homeassistant.components.mqtt
hbmqtt==0.9.5
@ -312,7 +312,7 @@ hole==0.5.1
holidays==0.10.2
# homeassistant.components.frontend
home-assistant-frontend==20200519.1
home-assistant-frontend==20200519.4
# homeassistant.components.zwave
homeassistant-pyozw==0.1.10
@ -414,7 +414,7 @@ numpy==1.18.4
oauth2client==4.0.0
# homeassistant.components.onvif
onvif-zeep-async==0.3.0
onvif-zeep-async==0.4.0
# homeassistant.components.openerz
openerz-api==0.1.0
@ -533,7 +533,7 @@ pychromecast==5.1.0
pycoolmasternet==0.0.4
# homeassistant.components.daikin
pydaikin==2.0.2
pydaikin==2.0.4
# homeassistant.components.deconz
pydeconz==70
@ -705,7 +705,7 @@ python-miio==0.5.0.1
python-nest==4.1.0
# homeassistant.components.ozw
python-openzwave-mqtt==1.0.1
python-openzwave-mqtt==1.0.2
# homeassistant.components.songpal
python-songpal==0.12
@ -762,7 +762,7 @@ rflink==0.0.52
ring_doorbell==0.6.0
# homeassistant.components.roku
rokuecp==0.4.0
rokuecp==0.4.1
# homeassistant.components.roomba
roombapy==1.6.1
@ -826,7 +826,7 @@ stringcase==1.2.0
sunwatcher==0.2.1
# homeassistant.components.tellduslive
tellduslive==0.10.10
tellduslive==0.10.11
# homeassistant.components.powerwall
tesla-powerwall==0.2.8

View File

@ -23,25 +23,19 @@ def test_scale_jpeg_camera_image():
camera_image = Image("image/jpeg", EMPTY_16_12_JPEG)
turbo_jpeg = mock_turbo_jpeg(first_width=16, first_height=12)
with patch(
"homeassistant.components.homekit.img_util.TurboJPEG", return_value=False
):
with patch("turbojpeg.TurboJPEG", return_value=False):
TurboJPEGSingleton()
assert scale_jpeg_camera_image(camera_image, 16, 12) == camera_image.content
turbo_jpeg = mock_turbo_jpeg(first_width=16, first_height=12)
with patch(
"homeassistant.components.homekit.img_util.TurboJPEG", return_value=turbo_jpeg
):
with patch("turbojpeg.TurboJPEG", return_value=turbo_jpeg):
TurboJPEGSingleton()
assert scale_jpeg_camera_image(camera_image, 16, 12) == EMPTY_16_12_JPEG
turbo_jpeg = mock_turbo_jpeg(
first_width=16, first_height=12, second_width=8, second_height=6
)
with patch(
"homeassistant.components.homekit.img_util.TurboJPEG", return_value=turbo_jpeg
):
with patch("turbojpeg.TurboJPEG", return_value=turbo_jpeg):
TurboJPEGSingleton()
jpeg_bytes = scale_jpeg_camera_image(camera_image, 8, 6)
@ -51,12 +45,10 @@ def test_scale_jpeg_camera_image():
def test_turbojpeg_load_failure():
"""Handle libjpegturbo not being installed."""
with patch(
"homeassistant.components.homekit.img_util.TurboJPEG", side_effect=Exception
):
with patch("turbojpeg.TurboJPEG", side_effect=Exception):
TurboJPEGSingleton()
assert TurboJPEGSingleton.instance() is False
with patch("homeassistant.components.homekit.img_util.TurboJPEG"):
with patch("turbojpeg.TurboJPEG"):
TurboJPEGSingleton()
assert TurboJPEGSingleton.instance()

View File

@ -193,9 +193,7 @@ async def test_camera_stream_source_configured(hass, run_driver, events):
turbo_jpeg = mock_turbo_jpeg(
first_width=16, first_height=12, second_width=300, second_height=200
)
with patch(
"homeassistant.components.homekit.img_util.TurboJPEG", return_value=turbo_jpeg
):
with patch("turbojpeg.TurboJPEG", return_value=turbo_jpeg):
TurboJPEGSingleton()
assert await hass.async_add_executor_job(
acc.get_snapshot, {"aid": 2, "image-width": 300, "image-height": 200}

View File

@ -264,6 +264,24 @@ async def test_zeroconf_with_uuid_device_exists_abort(
assert result["reason"] == "already_configured"
async def test_zeroconf_empty_unique_id_required_abort(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test we abort zeroconf flow if printer lacks (empty) unique identification."""
mock_connection(aioclient_mock, no_unique_id=True)
discovery_info = {
**MOCK_ZEROCONF_IPP_SERVICE_INFO,
"properties": {**MOCK_ZEROCONF_IPP_SERVICE_INFO["properties"], "UUID": ""},
}
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_ZEROCONF}, data=discovery_info,
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "unique_id_required"
async def test_zeroconf_unique_id_required_abort(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:

View File

@ -1,5 +1,4 @@
"""Test the Lutron Caseta config flow."""
from asynctest import patch
from pylutron_caseta.smartbridge import Smartbridge
from homeassistant import config_entries, data_entry_flow
@ -14,6 +13,7 @@ from homeassistant.components.lutron_caseta.const import (
)
from homeassistant.const import CONF_HOST
from tests.async_mock import AsyncMock, patch
from tests.common import MockConfigEntry
@ -51,7 +51,11 @@ async def test_bridge_import_flow(hass):
with patch(
"homeassistant.components.lutron_caseta.async_setup_entry", return_value=True,
) as mock_setup_entry, patch.object(Smartbridge, "create_tls") as create_tls:
) as mock_setup_entry, patch(
"homeassistant.components.lutron_caseta.async_setup", return_value=True
), patch.object(
Smartbridge, "create_tls"
) as create_tls:
create_tls.return_value = MockBridge(can_connect=True)
result = await hass.config_entries.flow.async_init(
@ -77,9 +81,7 @@ async def test_bridge_cannot_connect(hass):
CONF_CA_CERTS: "",
}
with patch(
"homeassistant.components.lutron_caseta.async_setup_entry", return_value=True,
) as mock_setup_entry, patch.object(Smartbridge, "create_tls") as create_tls:
with patch.object(Smartbridge, "create_tls") as create_tls:
create_tls.return_value = MockBridge(can_connect=False)
result = await hass.config_entries.flow.async_init(
@ -91,8 +93,41 @@ async def test_bridge_cannot_connect(hass):
assert result["type"] == "form"
assert result["step_id"] == STEP_IMPORT_FAILED
assert result["errors"] == {"base": ERROR_CANNOT_CONNECT}
# validate setup_entry was not called
assert len(mock_setup_entry.mock_calls) == 0
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == CasetaConfigFlow.ABORT_REASON_CANNOT_CONNECT
async def test_bridge_cannot_connect_unknown_error(hass):
"""Test checking for connection and encountering an unknown error."""
entry_mock_data = {
CONF_HOST: "",
CONF_KEYFILE: "",
CONF_CERTFILE: "",
CONF_CA_CERTS: "",
}
with patch.object(Smartbridge, "create_tls") as create_tls:
mock_bridge = MockBridge()
mock_bridge.connect = AsyncMock(side_effect=Exception())
create_tls.return_value = mock_bridge
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data=entry_mock_data,
)
assert result["type"] == "form"
assert result["step_id"] == STEP_IMPORT_FAILED
assert result["errors"] == {"base": ERROR_CANNOT_CONNECT}
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == CasetaConfigFlow.ABORT_REASON_CANNOT_CONNECT
async def test_duplicate_bridge_import(hass):

View File

@ -18,6 +18,7 @@ from homeassistant.components.media_player.const import (
SERVICE_PLAY_MEDIA,
SERVICE_SELECT_SOURCE,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
@ -30,6 +31,8 @@ from homeassistant.components.media_player.const import (
from homeassistant.const import (
ATTR_ENTITY_ID,
SERVICE_MEDIA_NEXT_TRACK,
SERVICE_MEDIA_PAUSE,
SERVICE_MEDIA_PLAY,
SERVICE_MEDIA_PLAY_PAUSE,
SERVICE_MEDIA_PREVIOUS_TRACK,
SERVICE_TURN_OFF,
@ -142,6 +145,7 @@ async def test_supported_features(
| SUPPORT_VOLUME_STEP
| SUPPORT_VOLUME_MUTE
| SUPPORT_SELECT_SOURCE
| SUPPORT_PAUSE
| SUPPORT_PLAY
| SUPPORT_PLAY_MEDIA
| SUPPORT_TURN_ON
@ -170,6 +174,7 @@ async def test_tv_supported_features(
| SUPPORT_VOLUME_STEP
| SUPPORT_VOLUME_MUTE
| SUPPORT_SELECT_SOURCE
| SUPPORT_PAUSE
| SUPPORT_PLAY
| SUPPORT_PLAY_MEDIA
| SUPPORT_TURN_ON
@ -267,6 +272,26 @@ async def test_services(
remote_mock.assert_called_once_with("poweron")
with patch("homeassistant.components.roku.Roku.remote") as remote_mock:
await hass.services.async_call(
MP_DOMAIN,
SERVICE_MEDIA_PAUSE,
{ATTR_ENTITY_ID: MAIN_ENTITY_ID},
blocking=True,
)
remote_mock.assert_called_once_with("play")
with patch("homeassistant.components.roku.Roku.remote") as remote_mock:
await hass.services.async_call(
MP_DOMAIN,
SERVICE_MEDIA_PLAY,
{ATTR_ENTITY_ID: MAIN_ENTITY_ID},
blocking=True,
)
remote_mock.assert_called_once_with("play")
with patch("homeassistant.components.roku.Roku.remote") as remote_mock:
await hass.services.async_call(
MP_DOMAIN,

View File

@ -539,7 +539,7 @@ async def test_cleanup_device_registry(hass, registry):
device_registry.async_cleanup(hass, registry, ent_reg)
assert registry.async_get_device({("hue", "d1")}, set()) is not None
assert registry.async_get_device({("hue", "d2")}, set()) is None
assert registry.async_get_device({("hue", "d2")}, set()) is not None
assert registry.async_get_device({("hue", "d3")}, set()) is not None
assert registry.async_get_device({("something", "d4")}, set()) is None

View File

@ -35,6 +35,10 @@ from tests.common import (
mock_service,
)
SUPPORT_A = 1
SUPPORT_B = 2
SUPPORT_C = 4
@pytest.fixture
def mock_handle_entity_call():
@ -52,17 +56,31 @@ def mock_entities(hass):
entity_id="light.kitchen",
available=True,
should_poll=False,
supported_features=1,
supported_features=SUPPORT_A,
)
living_room = MockEntity(
entity_id="light.living_room",
available=True,
should_poll=False,
supported_features=0,
supported_features=SUPPORT_B,
)
bedroom = MockEntity(
entity_id="light.bedroom",
available=True,
should_poll=False,
supported_features=(SUPPORT_A | SUPPORT_B),
)
bathroom = MockEntity(
entity_id="light.bathroom",
available=True,
should_poll=False,
supported_features=(SUPPORT_B | SUPPORT_C),
)
entities = OrderedDict()
entities[kitchen.entity_id] = kitchen
entities[living_room.entity_id] = living_room
entities[bedroom.entity_id] = bedroom
entities[bathroom.entity_id] = bathroom
return entities
@ -307,18 +325,61 @@ async def test_async_get_all_descriptions(hass):
async def test_call_with_required_features(hass, mock_entities):
"""Test service calls invoked only if entity has required feautres."""
"""Test service calls invoked only if entity has required features."""
test_service_mock = AsyncMock(return_value=None)
await service.entity_service_call(
hass,
[Mock(entities=mock_entities)],
test_service_mock,
ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}),
required_features=[1],
required_features=[SUPPORT_A],
)
assert len(mock_entities) == 2
# Called once because only one of the entities had the required features
assert test_service_mock.call_count == 2
expected = [
mock_entities["light.kitchen"],
mock_entities["light.bedroom"],
]
actual = [call[0][0] for call in test_service_mock.call_args_list]
assert all(entity in actual for entity in expected)
async def test_call_with_both_required_features(hass, mock_entities):
"""Test service calls invoked only if entity has both features."""
test_service_mock = AsyncMock(return_value=None)
await service.entity_service_call(
hass,
[Mock(entities=mock_entities)],
test_service_mock,
ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}),
required_features=[SUPPORT_A | SUPPORT_B],
)
assert test_service_mock.call_count == 1
assert [call[0][0] for call in test_service_mock.call_args_list] == [
mock_entities["light.bedroom"]
]
async def test_call_with_one_of_required_features(hass, mock_entities):
"""Test service calls invoked with one entity having the required features."""
test_service_mock = AsyncMock(return_value=None)
await service.entity_service_call(
hass,
[Mock(entities=mock_entities)],
test_service_mock,
ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}),
required_features=[SUPPORT_A, SUPPORT_C],
)
assert test_service_mock.call_count == 3
expected = [
mock_entities["light.kitchen"],
mock_entities["light.bedroom"],
mock_entities["light.bathroom"],
]
actual = [call[0][0] for call in test_service_mock.call_args_list]
assert all(entity in actual for entity in expected)
async def test_call_with_sync_func(hass, mock_entities):
@ -458,7 +519,7 @@ async def test_call_no_context_target_all(hass, mock_handle_entity_call, mock_en
),
)
assert len(mock_handle_entity_call.mock_calls) == 2
assert len(mock_handle_entity_call.mock_calls) == 4
assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list(
mock_entities.values()
)
@ -494,7 +555,7 @@ async def test_call_with_match_all(
ha.ServiceCall("test_domain", "test_service", {"entity_id": "all"}),
)
assert len(mock_handle_entity_call.mock_calls) == 2
assert len(mock_handle_entity_call.mock_calls) == 4
assert [call[1][1] for call in mock_handle_entity_call.mock_calls] == list(
mock_entities.values()
)