mirror of
https://github.com/home-assistant/core.git
synced 2025-07-29 08:07:45 +00:00
Merge pull request #36756 from home-assistant/rc
This commit is contained in:
commit
e5f08ee657
@ -5,6 +5,7 @@ import asyncio
|
||||
import async_timeout
|
||||
import axis
|
||||
from axis.configuration import Configuration
|
||||
from axis.errors import Unauthorized
|
||||
from axis.event_stream import OPERATION_INITIALIZED
|
||||
from axis.mqtt import mqtt_json_to_event
|
||||
from axis.streammanager import SIGNAL_PLAYING, STATE_STOPPED
|
||||
@ -160,9 +161,13 @@ class AxisNetworkDevice:
|
||||
|
||||
async def use_mqtt(self, hass: HomeAssistant, component: str) -> None:
|
||||
"""Set up to use MQTT."""
|
||||
status = await hass.async_add_executor_job(
|
||||
self.api.vapix.mqtt.get_client_status
|
||||
)
|
||||
try:
|
||||
status = await hass.async_add_executor_job(
|
||||
self.api.vapix.mqtt.get_client_status
|
||||
)
|
||||
except Unauthorized:
|
||||
# This means the user has too low privileges
|
||||
status = {}
|
||||
|
||||
if status.get("data", {}).get("status", {}).get("state") == "active":
|
||||
self.listeners.append(
|
||||
|
@ -3,7 +3,7 @@
|
||||
"name": "Axis",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/axis",
|
||||
"requirements": ["axis==29"],
|
||||
"requirements": ["axis==30"],
|
||||
"zeroconf": ["_axis-video._tcp.local."],
|
||||
"after_dependencies": ["mqtt"],
|
||||
"codeowners": ["@Kane610"]
|
||||
|
@ -72,6 +72,8 @@ async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry):
|
||||
# For backwards compat, set unique ID
|
||||
if entry.unique_id is None:
|
||||
hass.config_entries.async_update_entry(entry, unique_id=conf[KEY_MAC])
|
||||
elif ".local" in entry.unique_id:
|
||||
hass.config_entries.async_update_entry(entry, unique_id=conf[KEY_MAC])
|
||||
daikin_api = await daikin_api_setup(
|
||||
hass,
|
||||
conf[CONF_HOST],
|
||||
|
@ -6,12 +6,13 @@ from uuid import uuid4
|
||||
from aiohttp import ClientError, web_exceptions
|
||||
from async_timeout import timeout
|
||||
from pydaikin.daikin_base import Appliance
|
||||
from pydaikin.discovery import Discovery
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD
|
||||
|
||||
from .const import CONF_KEY, CONF_UUID, KEY_HOSTNAME, KEY_IP, KEY_MAC, TIMEOUT
|
||||
from .const import CONF_KEY, CONF_UUID, KEY_IP, KEY_MAC, TIMEOUT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@ -128,7 +129,8 @@ class FlowHandler(config_entries.ConfigFlow):
|
||||
async def async_step_zeroconf(self, discovery_info):
|
||||
"""Prepare configuration for a discovered Daikin device."""
|
||||
_LOGGER.debug("Zeroconf discovery_info: %s", discovery_info)
|
||||
await self.async_set_unique_id(discovery_info[KEY_HOSTNAME])
|
||||
devices = Discovery.poll(discovery_info[CONF_HOST])
|
||||
await self.async_set_unique_id(next(iter(devices.values()))[KEY_MAC])
|
||||
self._abort_if_unique_id_configured()
|
||||
self.host = discovery_info[CONF_HOST]
|
||||
return await self.async_step_user()
|
||||
|
@ -64,6 +64,5 @@ CONF_UUID = "uuid"
|
||||
|
||||
KEY_MAC = "mac"
|
||||
KEY_IP = "ip"
|
||||
KEY_HOSTNAME = "hostname"
|
||||
|
||||
TIMEOUT = 60
|
||||
|
@ -3,7 +3,7 @@
|
||||
"name": "Daikin AC",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/daikin",
|
||||
"requirements": ["pydaikin==2.1.1"],
|
||||
"requirements": ["pydaikin==2.1.2"],
|
||||
"codeowners": ["@fredrike"],
|
||||
"zeroconf": ["_dkapi._tcp.local."],
|
||||
"quality_scale": "platinum"
|
||||
|
@ -2,7 +2,7 @@
|
||||
"domain": "frontend",
|
||||
"name": "Home Assistant Frontend",
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"requirements": ["home-assistant-frontend==20200603.2"],
|
||||
"requirements": ["home-assistant-frontend==20200603.3"],
|
||||
"dependencies": [
|
||||
"api",
|
||||
"auth",
|
||||
|
@ -12,7 +12,11 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components import recorder
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.components.recorder.models import DB_TIMEZONE, States
|
||||
from homeassistant.components.recorder.models import (
|
||||
DB_TIMEZONE,
|
||||
States,
|
||||
process_timestamp,
|
||||
)
|
||||
from homeassistant.components.recorder.util import execute, session_scope
|
||||
from homeassistant.const import (
|
||||
ATTR_HIDDEN,
|
||||
@ -304,6 +308,10 @@ def _sorted_states_to_json(
|
||||
elapsed = time.perf_counter() - timer_start
|
||||
_LOGGER.debug("getting %d first datapoints took %fs", len(result), elapsed)
|
||||
|
||||
# Called in a tight loop so cache the function
|
||||
# here
|
||||
_process_timestamp = process_timestamp
|
||||
|
||||
# Append all changes to it
|
||||
for ent_id, group in groupby(states, lambda state: state.entity_id):
|
||||
domain = split_entity_id(ent_id)[0]
|
||||
@ -347,7 +355,7 @@ def _sorted_states_to_json(
|
||||
ent_results.append(
|
||||
{
|
||||
STATE_KEY: db_state.state,
|
||||
LAST_CHANGED_KEY: f"{str(db_state.last_changed).replace(' ','T').split('.')[0]}{DB_TIMEZONE}",
|
||||
LAST_CHANGED_KEY: f"{str(_process_timestamp(db_state.last_changed)).replace(' ','T').split('.')[0]}{DB_TIMEZONE}",
|
||||
}
|
||||
)
|
||||
prev_state = db_state
|
||||
|
@ -331,7 +331,7 @@ def _get_related_entity_ids(session, entity_filter):
|
||||
|
||||
query = session.query(States).with_entities(States.entity_id).distinct()
|
||||
|
||||
for tryno in range(0, RETRIES):
|
||||
for tryno in range(RETRIES):
|
||||
try:
|
||||
result = [row.entity_id for row in query if entity_filter(row.entity_id)]
|
||||
|
||||
@ -410,11 +410,12 @@ def _get_events(hass, config, start_day, end_day, entity_id=None):
|
||||
|
||||
|
||||
def _keep_event(hass, event, entities_filter):
|
||||
domain, entity_id = None, None
|
||||
domain = event.data.get(ATTR_DOMAIN)
|
||||
entity_id = event.data.get("entity_id")
|
||||
if entity_id:
|
||||
domain = split_entity_id(entity_id)[0]
|
||||
|
||||
if event.event_type == EVENT_STATE_CHANGED:
|
||||
entity_id = event.data.get("entity_id")
|
||||
|
||||
if entity_id is None:
|
||||
return False
|
||||
|
||||
@ -432,7 +433,6 @@ def _keep_event(hass, event, entities_filter):
|
||||
if new_state.get("state") == old_state.get("state"):
|
||||
return False
|
||||
|
||||
domain = split_entity_id(entity_id)[0]
|
||||
attributes = new_state.get("attributes", {})
|
||||
|
||||
# Also filter auto groups.
|
||||
@ -446,13 +446,13 @@ def _keep_event(hass, event, entities_filter):
|
||||
|
||||
elif event.event_type == EVENT_LOGBOOK_ENTRY:
|
||||
domain = event.data.get(ATTR_DOMAIN)
|
||||
entity_id = event.data.get(ATTR_ENTITY_ID)
|
||||
|
||||
elif event.event_type == EVENT_SCRIPT_STARTED:
|
||||
domain = "script"
|
||||
entity_id = event.data.get(ATTR_ENTITY_ID)
|
||||
|
||||
elif event.event_type in hass.data.get(DOMAIN, {}):
|
||||
elif not entity_id and event.event_type in hass.data.get(DOMAIN, {}):
|
||||
# If the entity_id isn't described, use the domain that describes
|
||||
# the event for filtering.
|
||||
domain = hass.data[DOMAIN][event.event_type][0]
|
||||
|
||||
if not entity_id and domain:
|
||||
|
@ -154,4 +154,5 @@ class OwnTracksEntity(TrackerEntity, RestoreEntity):
|
||||
def update_data(self, data):
|
||||
"""Mark the device as seen."""
|
||||
self._data = data
|
||||
self.async_write_ha_state()
|
||||
if self.hass:
|
||||
self.async_write_ha_state()
|
||||
|
@ -9,6 +9,7 @@ from plexwebsocket import PlexWebsocket
|
||||
import requests.exceptions
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.media_player import DOMAIN as MP_DOMAIN
|
||||
from homeassistant.components.media_player.const import (
|
||||
ATTR_MEDIA_CONTENT_ID,
|
||||
ATTR_MEDIA_CONTENT_TYPE,
|
||||
@ -65,6 +66,11 @@ async def async_setup_entry(hass, entry):
|
||||
entry, unique_id=entry.data[CONF_SERVER_IDENTIFIER]
|
||||
)
|
||||
|
||||
if MP_DOMAIN not in entry.options:
|
||||
options = dict(entry.options)
|
||||
options.setdefault(MP_DOMAIN, {})
|
||||
hass.config_entries.async_update_entry(entry, options=options)
|
||||
|
||||
plex_server = PlexServer(
|
||||
hass, server_config, entry.data[CONF_SERVER_IDENTIFIER], entry.options
|
||||
)
|
||||
|
@ -239,11 +239,12 @@ class YeelightDevice:
|
||||
|
||||
@property
|
||||
def is_nightlight_supported(self) -> bool:
|
||||
"""Return true / false if nightlight is supported."""
|
||||
if self.model:
|
||||
return self.bulb.get_model_specs().get("night_light", False)
|
||||
"""
|
||||
Return true / false if nightlight is supported.
|
||||
|
||||
Uses brightness as it appears to be supported in both ceiling and other lights.
|
||||
"""
|
||||
|
||||
# It should support both ceiling and other lights
|
||||
return self._nightlight_brightness is not None
|
||||
|
||||
@property
|
||||
@ -333,6 +334,12 @@ class YeelightDevice:
|
||||
"""Request device capabilities."""
|
||||
try:
|
||||
self.bulb.get_capabilities()
|
||||
_LOGGER.debug(
|
||||
"Device %s, %s capabilities: %s",
|
||||
self.ipaddr,
|
||||
self.name,
|
||||
self.bulb.capabilities,
|
||||
)
|
||||
except BulbException as ex:
|
||||
_LOGGER.error(
|
||||
"Unable to get device capabilities %s, %s: %s",
|
||||
|
@ -128,7 +128,7 @@ class Metering(ZigbeeChannel):
|
||||
"demand_formatting", 0xF9
|
||||
) # 1 digit to the right, 15 digits to the left
|
||||
|
||||
r_digits = fmting & 0x07 # digits to the right of decimal point
|
||||
r_digits = int(fmting & 0x07) # digits to the right of decimal point
|
||||
l_digits = (fmting >> 3) & 0x0F # digits to the left of decimal point
|
||||
if l_digits == 0:
|
||||
l_digits = 15
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""Constants used by Home Assistant components."""
|
||||
MAJOR_VERSION = 0
|
||||
MINOR_VERSION = 111
|
||||
PATCH_VERSION = "1"
|
||||
PATCH_VERSION = "2"
|
||||
__short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__ = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER = (3, 7, 0)
|
||||
|
@ -12,7 +12,7 @@ cryptography==2.9.2
|
||||
defusedxml==0.6.0
|
||||
distro==1.5.0
|
||||
hass-nabucasa==0.34.6
|
||||
home-assistant-frontend==20200603.2
|
||||
home-assistant-frontend==20200603.3
|
||||
importlib-metadata==1.6.0
|
||||
jinja2>=2.11.1
|
||||
netdisco==2.7.0
|
||||
|
@ -306,7 +306,7 @@ avea==1.4
|
||||
avri-api==0.1.7
|
||||
|
||||
# homeassistant.components.axis
|
||||
axis==29
|
||||
axis==30
|
||||
|
||||
# homeassistant.components.azure_event_hub
|
||||
azure-eventhub==5.1.0
|
||||
@ -734,7 +734,7 @@ hole==0.5.1
|
||||
holidays==0.10.2
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20200603.2
|
||||
home-assistant-frontend==20200603.3
|
||||
|
||||
# homeassistant.components.zwave
|
||||
homeassistant-pyozw==0.1.10
|
||||
@ -1263,7 +1263,7 @@ pycsspeechtts==1.0.3
|
||||
# pycups==1.9.73
|
||||
|
||||
# homeassistant.components.daikin
|
||||
pydaikin==2.1.1
|
||||
pydaikin==2.1.2
|
||||
|
||||
# homeassistant.components.danfoss_air
|
||||
pydanfossair==0.1.0
|
||||
|
@ -147,7 +147,7 @@ async-upnp-client==0.14.13
|
||||
av==8.0.2
|
||||
|
||||
# homeassistant.components.axis
|
||||
axis==29
|
||||
axis==30
|
||||
|
||||
# homeassistant.components.homekit
|
||||
base36==0.1.1
|
||||
@ -321,7 +321,7 @@ hole==0.5.1
|
||||
holidays==0.10.2
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20200603.2
|
||||
home-assistant-frontend==20200603.3
|
||||
|
||||
# homeassistant.components.zwave
|
||||
homeassistant-pyozw==0.1.10
|
||||
@ -545,7 +545,7 @@ pychromecast==6.0.0
|
||||
pycoolmasternet==0.0.4
|
||||
|
||||
# homeassistant.components.daikin
|
||||
pydaikin==2.1.1
|
||||
pydaikin==2.1.2
|
||||
|
||||
# homeassistant.components.deconz
|
||||
pydeconz==71
|
||||
|
@ -6,7 +6,7 @@ from aiohttp import ClientError
|
||||
from aiohttp.web_exceptions import HTTPForbidden
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.daikin.const import KEY_HOSTNAME, KEY_IP, KEY_MAC
|
||||
from homeassistant.components.daikin.const import KEY_IP, KEY_MAC
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_DISCOVERY,
|
||||
SOURCE_IMPORT,
|
||||
@ -25,7 +25,6 @@ from tests.common import MockConfigEntry
|
||||
|
||||
MAC = "AABBCCDDEEFF"
|
||||
HOST = "127.0.0.1"
|
||||
HOSTNAME = "DaikinUNIQUE.local"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@ -42,6 +41,16 @@ def mock_daikin():
|
||||
yield Appliance
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_daikin_discovery():
|
||||
"""Mock pydaikin Discovery."""
|
||||
with patch("homeassistant.components.daikin.config_flow.Discovery") as Discovery:
|
||||
Discovery.poll = PropertyMock(
|
||||
return_value={"127.0.01": {"mac": "AABBCCDDEEFF", "id": "test"}}
|
||||
)
|
||||
yield Discovery
|
||||
|
||||
|
||||
async def test_user(hass, mock_daikin):
|
||||
"""Test user config."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
@ -113,10 +122,12 @@ async def test_device_abort(hass, mock_daikin, s_effect, reason):
|
||||
"source, data, unique_id",
|
||||
[
|
||||
(SOURCE_DISCOVERY, {KEY_IP: HOST, KEY_MAC: MAC}, MAC),
|
||||
(SOURCE_ZEROCONF, {CONF_HOST: HOST, KEY_HOSTNAME: HOSTNAME}, HOSTNAME),
|
||||
(SOURCE_ZEROCONF, {CONF_HOST: HOST}, MAC),
|
||||
],
|
||||
)
|
||||
async def test_discovery_zeroconf(hass, mock_daikin, source, data, unique_id):
|
||||
async def test_discovery_zeroconf(
|
||||
hass, mock_daikin, mock_daikin_discovery, source, data, unique_id
|
||||
):
|
||||
"""Test discovery/zeroconf step."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
"daikin", context={"source": source}, data=data,
|
||||
|
@ -1353,3 +1353,68 @@ async def test_logbook_describe_event(hass, hass_client):
|
||||
assert event["name"] == "Test Name"
|
||||
assert event["message"] == "tested a message"
|
||||
assert event["domain"] == "test_domain"
|
||||
|
||||
|
||||
async def test_exclude_described_event(hass, hass_client):
|
||||
"""Test exclusions of events that are described by another integration."""
|
||||
name = "My Automation Rule"
|
||||
entity_id = "automation.excluded_rule"
|
||||
entity_id2 = "automation.included_rule"
|
||||
entity_id3 = "sensor.excluded_domain"
|
||||
|
||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
logbook.DOMAIN,
|
||||
{
|
||||
logbook.DOMAIN: {
|
||||
logbook.CONF_EXCLUDE: {
|
||||
logbook.CONF_DOMAINS: ["sensor"],
|
||||
logbook.CONF_ENTITIES: [entity_id],
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.util.dt.utcnow",
|
||||
return_value=dt_util.utcnow() - timedelta(seconds=5),
|
||||
):
|
||||
hass.bus.async_fire(
|
||||
"some_automation_event",
|
||||
{logbook.ATTR_NAME: name, logbook.ATTR_ENTITY_ID: entity_id},
|
||||
)
|
||||
hass.bus.async_fire(
|
||||
"some_automation_event",
|
||||
{logbook.ATTR_NAME: name, logbook.ATTR_ENTITY_ID: entity_id2},
|
||||
)
|
||||
hass.bus.async_fire(
|
||||
"some_event", {logbook.ATTR_NAME: name, logbook.ATTR_ENTITY_ID: entity_id3}
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
await hass.async_add_executor_job(
|
||||
hass.data[recorder.DATA_INSTANCE].block_till_done
|
||||
)
|
||||
|
||||
def _describe(event):
|
||||
"""Describe an event."""
|
||||
return {
|
||||
"name": "Test Name",
|
||||
"message": "tested a message",
|
||||
"entity_id": event.data.get(ATTR_ENTITY_ID),
|
||||
}
|
||||
|
||||
hass.components.logbook.async_describe_event(
|
||||
"automation", "some_automation_event", _describe
|
||||
)
|
||||
hass.components.logbook.async_describe_event("sensor", "some_event", _describe)
|
||||
|
||||
client = await hass_client()
|
||||
response = await client.get("/api/logbook")
|
||||
results = await response.json()
|
||||
assert len(results) == 1
|
||||
event = results[0]
|
||||
assert event["name"] == "Test Name"
|
||||
assert event["message"] == "tested a message"
|
||||
assert event["domain"] == "automation"
|
||||
assert event["entity_id"] == "automation.included_rule"
|
||||
|
@ -405,6 +405,56 @@ async def test_option_flow(hass):
|
||||
}
|
||||
|
||||
|
||||
async def test_missing_option_flow(hass):
|
||||
"""Test config options flow selection when no options stored."""
|
||||
mock_plex_server = MockPlexServer()
|
||||
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data=DEFAULT_DATA,
|
||||
options=None,
|
||||
unique_id=DEFAULT_DATA["server_id"],
|
||||
)
|
||||
|
||||
with patch("plexapi.server.PlexServer", return_value=mock_plex_server), patch(
|
||||
"homeassistant.components.plex.PlexWebsocket.listen"
|
||||
) as mock_listen:
|
||||
entry.add_to_hass(hass)
|
||||
assert await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert mock_listen.called
|
||||
|
||||
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
|
||||
assert entry.state == ENTRY_STATE_LOADED
|
||||
|
||||
result = await hass.config_entries.options.async_init(
|
||||
entry.entry_id, context={"source": "test"}, data=None
|
||||
)
|
||||
assert result["type"] == "form"
|
||||
assert result["step_id"] == "plex_mp_settings"
|
||||
|
||||
result = await hass.config_entries.options.async_configure(
|
||||
result["flow_id"],
|
||||
user_input={
|
||||
CONF_USE_EPISODE_ART: True,
|
||||
CONF_IGNORE_NEW_SHARED_USERS: True,
|
||||
CONF_MONITORED_USERS: list(mock_plex_server.accounts),
|
||||
},
|
||||
)
|
||||
assert result["type"] == "create_entry"
|
||||
assert result["data"] == {
|
||||
MP_DOMAIN: {
|
||||
CONF_USE_EPISODE_ART: True,
|
||||
CONF_IGNORE_NEW_SHARED_USERS: True,
|
||||
CONF_MONITORED_USERS: {
|
||||
user: {"enabled": True} for user in mock_plex_server.accounts
|
||||
},
|
||||
CONF_IGNORE_PLEX_WEB_CLIENTS: False,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async def test_option_flow_new_users_available(hass, caplog):
|
||||
"""Test config options multiselect defaults when new Plex users are seen."""
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user