Merge pull request #65713 from home-assistant/rc

This commit is contained in:
Paulus Schoutsen 2022-02-04 12:46:28 -08:00 committed by GitHub
commit 0f02ae981d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
42 changed files with 823 additions and 422 deletions

View File

@ -124,6 +124,14 @@ class AndroidTVFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
return RESULT_CONN_ERROR, None return RESULT_CONN_ERROR, None
dev_prop = aftv.device_properties dev_prop = aftv.device_properties
_LOGGER.info(
"Android TV at %s: %s = %r, %s = %r",
user_input[CONF_HOST],
PROP_ETHMAC,
dev_prop.get(PROP_ETHMAC),
PROP_WIFIMAC,
dev_prop.get(PROP_WIFIMAC),
)
unique_id = format_mac( unique_id = format_mac(
dev_prop.get(PROP_ETHMAC) or dev_prop.get(PROP_WIFIMAC, "") dev_prop.get(PROP_ETHMAC) or dev_prop.get(PROP_WIFIMAC, "")
) )

View File

@ -4,7 +4,7 @@
"documentation": "https://www.home-assistant.io/integrations/androidtv", "documentation": "https://www.home-assistant.io/integrations/androidtv",
"requirements": [ "requirements": [
"adb-shell[async]==0.4.0", "adb-shell[async]==0.4.0",
"androidtv[async]==0.0.61", "androidtv[async]==0.0.63",
"pure-python-adb[async]==0.3.0.dev0" "pure-python-adb[async]==0.3.0.dev0"
], ],
"codeowners": ["@JeffLIrion", "@ollo69"], "codeowners": ["@JeffLIrion", "@ollo69"],

View File

@ -3,7 +3,7 @@
"name": "Bosch SHC", "name": "Bosch SHC",
"config_flow": true, "config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/bosch_shc", "documentation": "https://www.home-assistant.io/integrations/bosch_shc",
"requirements": ["boschshcpy==0.2.28"], "requirements": ["boschshcpy==0.2.29"],
"zeroconf": [{ "type": "_http._tcp.local.", "name": "bosch shc*" }], "zeroconf": [{ "type": "_http._tcp.local.", "name": "bosch shc*" }],
"iot_class": "local_push", "iot_class": "local_push",
"codeowners": ["@tschamm"], "codeowners": ["@tschamm"],

View File

@ -113,8 +113,9 @@ class ButtonEntity(RestoreEntity):
self.async_write_ha_state() self.async_write_ha_state()
await self.async_press() await self.async_press()
async def async_added_to_hass(self) -> None: async def async_internal_added_to_hass(self) -> None:
"""Call when the button is added to hass.""" """Call when the button is added to hass."""
await super().async_internal_added_to_hass()
state = await self.async_get_last_state() state = await self.async_get_last_state()
if state is not None and state.state is not None: if state is not None and state.state is not None:
self.__last_pressed = dt_util.parse_datetime(state.state) self.__last_pressed = dt_util.parse_datetime(state.state)

View File

@ -232,7 +232,11 @@ class WebDavCalendarData:
new_events.append(new_event) new_events.append(new_event)
elif _start_of_tomorrow <= start_dt: elif _start_of_tomorrow <= start_dt:
break break
vevents = [event.instance.vevent for event in results + new_events] vevents = [
event.instance.vevent
for event in results + new_events
if hasattr(event.instance, "vevent")
]
# dtstart can be a date or datetime depending if the event lasts a # dtstart can be a date or datetime depending if the event lasts a
# whole day. Convert everything to datetime to be able to sort it # whole day. Convert everything to datetime to be able to sort it

View File

@ -62,6 +62,9 @@ async def websocket_update_device(hass, connection, msg):
msg.pop("type") msg.pop("type")
msg_id = msg.pop("id") msg_id = msg.pop("id")
if "disabled_by" in msg:
msg["disabled_by"] = DeviceEntryDisabler(msg["disabled_by"])
entry = registry.async_update_device(**msg) entry = registry.async_update_device(**msg)
connection.send_message(websocket_api.result_message(msg_id, _entry_dict(entry))) connection.send_message(websocket_api.result_message(msg_id, _entry_dict(entry)))

View File

@ -7,7 +7,6 @@
"cloud", "cloud",
"counter", "counter",
"dhcp", "dhcp",
"diagnostics",
"energy", "energy",
"frontend", "frontend",
"history", "history",

View File

@ -4,7 +4,7 @@
"config_flow": true, "config_flow": true,
"dependencies": ["network"], "dependencies": ["network"],
"documentation": "https://www.home-assistant.io/integrations/flux_led", "documentation": "https://www.home-assistant.io/integrations/flux_led",
"requirements": ["flux_led==0.28.17"], "requirements": ["flux_led==0.28.20"],
"quality_scale": "platinum", "quality_scale": "platinum",
"codeowners": ["@icemanch", "@bdraco"], "codeowners": ["@icemanch", "@bdraco"],
"iot_class": "local_push", "iot_class": "local_push",

View File

@ -10,6 +10,7 @@
"auth", "auth",
"config", "config",
"device_automation", "device_automation",
"diagnostics",
"http", "http",
"lovelace", "lovelace",
"onboarding", "onboarding",

View File

@ -10,7 +10,6 @@ from aiogithubapi import (
GitHubException, GitHubException,
GitHubLoginDeviceModel, GitHubLoginDeviceModel,
GitHubLoginOauthModel, GitHubLoginOauthModel,
GitHubRepositoryModel,
) )
from aiogithubapi.const import OAUTH_USER_LOGIN from aiogithubapi.const import OAUTH_USER_LOGIN
import voluptuous as vol import voluptuous as vol
@ -34,11 +33,12 @@ from .const import (
) )
async def starred_repositories(hass: HomeAssistant, access_token: str) -> list[str]: async def get_repositories(hass: HomeAssistant, access_token: str) -> list[str]:
"""Return a list of repositories that the user has starred.""" """Return a list of repositories that the user owns or has starred."""
client = GitHubAPI(token=access_token, session=async_get_clientsession(hass)) client = GitHubAPI(token=access_token, session=async_get_clientsession(hass))
repositories = set()
async def _get_starred() -> list[GitHubRepositoryModel] | None: async def _get_starred_repositories() -> None:
response = await client.user.starred(**{"params": {"per_page": 100}}) response = await client.user.starred(**{"params": {"per_page": 100}})
if not response.is_last_page: if not response.is_last_page:
results = await asyncio.gather( results = await asyncio.gather(
@ -54,16 +54,44 @@ async def starred_repositories(hass: HomeAssistant, access_token: str) -> list[s
for result in results: for result in results:
response.data.extend(result.data) response.data.extend(result.data)
return response.data repositories.update(response.data)
async def _get_personal_repositories() -> None:
response = await client.user.repos(**{"params": {"per_page": 100}})
if not response.is_last_page:
results = await asyncio.gather(
*(
client.user.repos(
**{"params": {"per_page": 100, "page": page_number}},
)
for page_number in range(
response.next_page_number, response.last_page_number + 1
)
)
)
for result in results:
response.data.extend(result.data)
repositories.update(response.data)
try: try:
result = await _get_starred() await asyncio.gather(
*(
_get_starred_repositories(),
_get_personal_repositories(),
)
)
except GitHubException: except GitHubException:
return DEFAULT_REPOSITORIES return DEFAULT_REPOSITORIES
if not result or len(result) == 0: if len(repositories) == 0:
return DEFAULT_REPOSITORIES return DEFAULT_REPOSITORIES
return sorted((repo.full_name for repo in result), key=str.casefold)
return sorted(
(repo.full_name for repo in repositories),
key=str.casefold,
)
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
@ -153,9 +181,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
assert self._login is not None assert self._login is not None
if not user_input: if not user_input:
repositories = await starred_repositories( repositories = await get_repositories(self.hass, self._login.access_token)
self.hass, self._login.access_token
)
return self.async_show_form( return self.async_show_form(
step_id="repositories", step_id="repositories",
data_schema=vol.Schema( data_schema=vol.Schema(
@ -205,7 +231,7 @@ class OptionsFlowHandler(config_entries.OptionsFlow):
configured_repositories: list[str] = self.config_entry.options[ configured_repositories: list[str] = self.config_entry.options[
CONF_REPOSITORIES CONF_REPOSITORIES
] ]
repositories = await starred_repositories( repositories = await get_repositories(
self.hass, self.config_entry.data[CONF_ACCESS_TOKEN] self.hass, self.config_entry.data[CONF_ACCESS_TOKEN]
) )

View File

@ -3,7 +3,7 @@
"name": "GitHub", "name": "GitHub",
"documentation": "https://www.home-assistant.io/integrations/github", "documentation": "https://www.home-assistant.io/integrations/github",
"requirements": [ "requirements": [
"aiogithubapi==22.1.0" "aiogithubapi==22.2.0"
], ],
"codeowners": [ "codeowners": [
"@timmo001", "@timmo001",

View File

@ -3,7 +3,7 @@
"name": "HomematicIP Cloud", "name": "HomematicIP Cloud",
"config_flow": true, "config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/homematicip_cloud", "documentation": "https://www.home-assistant.io/integrations/homematicip_cloud",
"requirements": ["homematicip==1.0.1"], "requirements": ["homematicip==1.0.2"],
"codeowners": [], "codeowners": [],
"quality_scale": "platinum", "quality_scale": "platinum",
"iot_class": "cloud_push" "iot_class": "cloud_push"

View File

@ -3,10 +3,11 @@ import logging
from aiohwenergy import DisabledError from aiohwenergy import DisabledError
from homeassistant.config_entries import ConfigEntry from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import CONF_IP_ADDRESS from homeassistant.const import CONF_IP_ADDRESS
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.helpers.update_coordinator import UpdateFailed
from .const import DOMAIN, PLATFORMS from .const import DOMAIN, PLATFORMS
@ -20,6 +21,51 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
_LOGGER.debug("__init__ async_setup_entry") _LOGGER.debug("__init__ async_setup_entry")
# Migrate `homewizard_energy` (custom_component) to `homewizard`
if entry.source == SOURCE_IMPORT and "old_config_entry_id" in entry.data:
# Remove the old config entry ID from the entry data so we don't try this again
# on the next setup
data = entry.data.copy()
old_config_entry_id = data.pop("old_config_entry_id")
hass.config_entries.async_update_entry(entry, data=data)
_LOGGER.debug(
(
"Setting up imported homewizard_energy entry %s for the first time as "
"homewizard entry %s"
),
old_config_entry_id,
entry.entry_id,
)
ent_reg = er.async_get(hass)
for entity in er.async_entries_for_config_entry(ent_reg, old_config_entry_id):
_LOGGER.debug("Removing %s", entity.entity_id)
ent_reg.async_remove(entity.entity_id)
_LOGGER.debug("Re-creating %s for the new config entry", entity.entity_id)
# We will precreate the entity so that any customizations can be preserved
new_entity = ent_reg.async_get_or_create(
entity.domain,
DOMAIN,
entity.unique_id,
suggested_object_id=entity.entity_id.split(".")[1],
disabled_by=entity.disabled_by,
config_entry=entry,
original_name=entity.original_name,
original_icon=entity.original_icon,
)
_LOGGER.debug("Re-created %s", new_entity.entity_id)
# If there are customizations on the old entity, apply them to the new one
if entity.name or entity.icon:
ent_reg.async_update_entity(
new_entity.entity_id, name=entity.name, icon=entity.icon
)
# Remove the old config entry and now the entry is fully migrated
hass.async_create_task(hass.config_entries.async_remove(old_config_entry_id))
# Create coordinator # Create coordinator
coordinator = Coordinator(hass, entry.data[CONF_IP_ADDRESS]) coordinator = Coordinator(hass, entry.data[CONF_IP_ADDRESS])
try: try:

View File

@ -28,6 +28,21 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Initialize the HomeWizard config flow.""" """Initialize the HomeWizard config flow."""
self.config: dict[str, str | int] = {} self.config: dict[str, str | int] = {}
async def async_step_import(self, import_config: dict) -> FlowResult:
"""Handle a flow initiated by older `homewizard_energy` component."""
_LOGGER.debug("config_flow async_step_import")
self.hass.components.persistent_notification.async_create(
(
"The custom integration of HomeWizard Energy has been migrated to core. "
"You can safely remove the custom integration from the custom_integrations folder."
),
"HomeWizard Energy",
f"homewizard_energy_to_{DOMAIN}",
)
return await self.async_step_user({CONF_IP_ADDRESS: import_config["host"]})
async def async_step_user( async def async_step_user(
self, user_input: dict[str, Any] | None = None self, user_input: dict[str, Any] | None = None
) -> FlowResult: ) -> FlowResult:
@ -59,12 +74,17 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
} }
) )
data: dict[str, str] = {CONF_IP_ADDRESS: user_input[CONF_IP_ADDRESS]}
if self.source == config_entries.SOURCE_IMPORT:
old_config_entry_id = self.context["old_config_entry_id"]
assert self.hass.config_entries.async_get_entry(old_config_entry_id)
data["old_config_entry_id"] = old_config_entry_id
# Add entry # Add entry
return self.async_create_entry( return self.async_create_entry(
title=f"{device_info[CONF_PRODUCT_NAME]} ({device_info[CONF_SERIAL]})", title=f"{device_info[CONF_PRODUCT_NAME]} ({device_info[CONF_SERIAL]})",
data={ data=data,
CONF_IP_ADDRESS: user_input[CONF_IP_ADDRESS],
},
) )
async def async_step_zeroconf( async def async_step_zeroconf(

View File

@ -6,6 +6,7 @@ import logging
import sqlalchemy import sqlalchemy
from sqlalchemy import ForeignKeyConstraint, MetaData, Table, func, text from sqlalchemy import ForeignKeyConstraint, MetaData, Table, func, text
from sqlalchemy.exc import ( from sqlalchemy.exc import (
DatabaseError,
InternalError, InternalError,
OperationalError, OperationalError,
ProgrammingError, ProgrammingError,
@ -68,20 +69,18 @@ def schema_is_current(current_version):
def migrate_schema(instance, current_version): def migrate_schema(instance, current_version):
"""Check if the schema needs to be upgraded.""" """Check if the schema needs to be upgraded."""
with session_scope(session=instance.get_session()) as session: _LOGGER.warning("Database is about to upgrade. Schema version: %s", current_version)
_LOGGER.warning( for version in range(current_version, SCHEMA_VERSION):
"Database is about to upgrade. Schema version: %s", current_version new_version = version + 1
) _LOGGER.info("Upgrading recorder db schema to version %s", new_version)
for version in range(current_version, SCHEMA_VERSION): _apply_update(instance, new_version, current_version)
new_version = version + 1 with session_scope(session=instance.get_session()) as session:
_LOGGER.info("Upgrading recorder db schema to version %s", new_version)
_apply_update(instance, session, new_version, current_version)
session.add(SchemaChanges(schema_version=new_version)) session.add(SchemaChanges(schema_version=new_version))
_LOGGER.info("Upgrade to version %s done", new_version) _LOGGER.info("Upgrade to version %s done", new_version)
def _create_index(connection, table_name, index_name): def _create_index(instance, table_name, index_name):
"""Create an index for the specified table. """Create an index for the specified table.
The index name should match the name given for the index The index name should match the name given for the index
@ -103,8 +102,10 @@ def _create_index(connection, table_name, index_name):
index_name, index_name,
) )
try: try:
index.create(connection) with session_scope(session=instance.get_session()) as session:
except (InternalError, ProgrammingError, OperationalError) as err: connection = session.connection()
index.create(connection)
except (InternalError, OperationalError, ProgrammingError) as err:
raise_if_exception_missing_str(err, ["already exists", "duplicate"]) raise_if_exception_missing_str(err, ["already exists", "duplicate"])
_LOGGER.warning( _LOGGER.warning(
"Index %s already exists on %s, continuing", index_name, table_name "Index %s already exists on %s, continuing", index_name, table_name
@ -113,7 +114,7 @@ def _create_index(connection, table_name, index_name):
_LOGGER.debug("Finished creating %s", index_name) _LOGGER.debug("Finished creating %s", index_name)
def _drop_index(connection, table_name, index_name): def _drop_index(instance, table_name, index_name):
"""Drop an index from a specified table. """Drop an index from a specified table.
There is no universal way to do something like `DROP INDEX IF EXISTS` There is no universal way to do something like `DROP INDEX IF EXISTS`
@ -129,7 +130,9 @@ def _drop_index(connection, table_name, index_name):
# Engines like DB2/Oracle # Engines like DB2/Oracle
try: try:
connection.execute(text(f"DROP INDEX {index_name}")) with session_scope(session=instance.get_session()) as session:
connection = session.connection()
connection.execute(text(f"DROP INDEX {index_name}"))
except SQLAlchemyError: except SQLAlchemyError:
pass pass
else: else:
@ -138,13 +141,15 @@ def _drop_index(connection, table_name, index_name):
# Engines like SQLite, SQL Server # Engines like SQLite, SQL Server
if not success: if not success:
try: try:
connection.execute( with session_scope(session=instance.get_session()) as session:
text( connection = session.connection()
"DROP INDEX {table}.{index}".format( connection.execute(
index=index_name, table=table_name text(
"DROP INDEX {table}.{index}".format(
index=index_name, table=table_name
)
) )
) )
)
except SQLAlchemyError: except SQLAlchemyError:
pass pass
else: else:
@ -153,13 +158,15 @@ def _drop_index(connection, table_name, index_name):
if not success: if not success:
# Engines like MySQL, MS Access # Engines like MySQL, MS Access
try: try:
connection.execute( with session_scope(session=instance.get_session()) as session:
text( connection = session.connection()
"DROP INDEX {index} ON {table}".format( connection.execute(
index=index_name, table=table_name text(
"DROP INDEX {index} ON {table}".format(
index=index_name, table=table_name
)
) )
) )
)
except SQLAlchemyError: except SQLAlchemyError:
pass pass
else: else:
@ -184,7 +191,7 @@ def _drop_index(connection, table_name, index_name):
) )
def _add_columns(connection, table_name, columns_def): def _add_columns(instance, table_name, columns_def):
"""Add columns to a table.""" """Add columns to a table."""
_LOGGER.warning( _LOGGER.warning(
"Adding columns %s to table %s. Note: this can take several " "Adding columns %s to table %s. Note: this can take several "
@ -197,29 +204,33 @@ def _add_columns(connection, table_name, columns_def):
columns_def = [f"ADD {col_def}" for col_def in columns_def] columns_def = [f"ADD {col_def}" for col_def in columns_def]
try: try:
connection.execute( with session_scope(session=instance.get_session()) as session:
text( connection = session.connection()
"ALTER TABLE {table} {columns_def}".format( connection.execute(
table=table_name, columns_def=", ".join(columns_def) text(
"ALTER TABLE {table} {columns_def}".format(
table=table_name, columns_def=", ".join(columns_def)
)
) )
) )
) return
return except (InternalError, OperationalError, ProgrammingError):
except (InternalError, OperationalError):
# Some engines support adding all columns at once, # Some engines support adding all columns at once,
# this error is when they don't # this error is when they don't
_LOGGER.info("Unable to use quick column add. Adding 1 by 1") _LOGGER.info("Unable to use quick column add. Adding 1 by 1")
for column_def in columns_def: for column_def in columns_def:
try: try:
connection.execute( with session_scope(session=instance.get_session()) as session:
text( connection = session.connection()
"ALTER TABLE {table} {column_def}".format( connection.execute(
table=table_name, column_def=column_def text(
"ALTER TABLE {table} {column_def}".format(
table=table_name, column_def=column_def
)
) )
) )
) except (InternalError, OperationalError, ProgrammingError) as err:
except (InternalError, OperationalError) as err:
raise_if_exception_missing_str(err, ["already exists", "duplicate"]) raise_if_exception_missing_str(err, ["already exists", "duplicate"])
_LOGGER.warning( _LOGGER.warning(
"Column %s already exists on %s, continuing", "Column %s already exists on %s, continuing",
@ -228,7 +239,7 @@ def _add_columns(connection, table_name, columns_def):
) )
def _modify_columns(connection, engine, table_name, columns_def): def _modify_columns(instance, engine, table_name, columns_def):
"""Modify columns in a table.""" """Modify columns in a table."""
if engine.dialect.name == "sqlite": if engine.dialect.name == "sqlite":
_LOGGER.debug( _LOGGER.debug(
@ -261,33 +272,37 @@ def _modify_columns(connection, engine, table_name, columns_def):
columns_def = [f"MODIFY {col_def}" for col_def in columns_def] columns_def = [f"MODIFY {col_def}" for col_def in columns_def]
try: try:
connection.execute( with session_scope(session=instance.get_session()) as session:
text( connection = session.connection()
"ALTER TABLE {table} {columns_def}".format( connection.execute(
table=table_name, columns_def=", ".join(columns_def) text(
"ALTER TABLE {table} {columns_def}".format(
table=table_name, columns_def=", ".join(columns_def)
)
) )
) )
) return
return
except (InternalError, OperationalError): except (InternalError, OperationalError):
_LOGGER.info("Unable to use quick column modify. Modifying 1 by 1") _LOGGER.info("Unable to use quick column modify. Modifying 1 by 1")
for column_def in columns_def: for column_def in columns_def:
try: try:
connection.execute( with session_scope(session=instance.get_session()) as session:
text( connection = session.connection()
"ALTER TABLE {table} {column_def}".format( connection.execute(
table=table_name, column_def=column_def text(
"ALTER TABLE {table} {column_def}".format(
table=table_name, column_def=column_def
)
) )
) )
)
except (InternalError, OperationalError): except (InternalError, OperationalError):
_LOGGER.exception( _LOGGER.exception(
"Could not modify column %s in table %s", column_def, table_name "Could not modify column %s in table %s", column_def, table_name
) )
def _update_states_table_with_foreign_key_options(connection, engine): def _update_states_table_with_foreign_key_options(instance, engine):
"""Add the options to foreign key constraints.""" """Add the options to foreign key constraints."""
inspector = sqlalchemy.inspect(engine) inspector = sqlalchemy.inspect(engine)
alters = [] alters = []
@ -316,17 +331,19 @@ def _update_states_table_with_foreign_key_options(connection, engine):
for alter in alters: for alter in alters:
try: try:
connection.execute(DropConstraint(alter["old_fk"])) with session_scope(session=instance.get_session()) as session:
for fkc in states_key_constraints: connection = session.connection()
if fkc.column_keys == alter["columns"]: connection.execute(DropConstraint(alter["old_fk"]))
connection.execute(AddConstraint(fkc)) for fkc in states_key_constraints:
if fkc.column_keys == alter["columns"]:
connection.execute(AddConstraint(fkc))
except (InternalError, OperationalError): except (InternalError, OperationalError):
_LOGGER.exception( _LOGGER.exception(
"Could not update foreign options in %s table", TABLE_STATES "Could not update foreign options in %s table", TABLE_STATES
) )
def _drop_foreign_key_constraints(connection, engine, table, columns): def _drop_foreign_key_constraints(instance, engine, table, columns):
"""Drop foreign key constraints for a table on specific columns.""" """Drop foreign key constraints for a table on specific columns."""
inspector = sqlalchemy.inspect(engine) inspector = sqlalchemy.inspect(engine)
drops = [] drops = []
@ -345,7 +362,9 @@ def _drop_foreign_key_constraints(connection, engine, table, columns):
for drop in drops: for drop in drops:
try: try:
connection.execute(DropConstraint(drop)) with session_scope(session=instance.get_session()) as session:
connection = session.connection()
connection.execute(DropConstraint(drop))
except (InternalError, OperationalError): except (InternalError, OperationalError):
_LOGGER.exception( _LOGGER.exception(
"Could not drop foreign constraints in %s table on %s", "Could not drop foreign constraints in %s table on %s",
@ -354,17 +373,16 @@ def _drop_foreign_key_constraints(connection, engine, table, columns):
) )
def _apply_update(instance, session, new_version, old_version): # noqa: C901 def _apply_update(instance, new_version, old_version): # noqa: C901
"""Perform operations to bring schema up to date.""" """Perform operations to bring schema up to date."""
engine = instance.engine engine = instance.engine
connection = session.connection()
if new_version == 1: if new_version == 1:
_create_index(connection, "events", "ix_events_time_fired") _create_index(instance, "events", "ix_events_time_fired")
elif new_version == 2: elif new_version == 2:
# Create compound start/end index for recorder_runs # Create compound start/end index for recorder_runs
_create_index(connection, "recorder_runs", "ix_recorder_runs_start_end") _create_index(instance, "recorder_runs", "ix_recorder_runs_start_end")
# Create indexes for states # Create indexes for states
_create_index(connection, "states", "ix_states_last_updated") _create_index(instance, "states", "ix_states_last_updated")
elif new_version == 3: elif new_version == 3:
# There used to be a new index here, but it was removed in version 4. # There used to be a new index here, but it was removed in version 4.
pass pass
@ -374,41 +392,41 @@ def _apply_update(instance, session, new_version, old_version): # noqa: C901
if old_version == 3: if old_version == 3:
# Remove index that was added in version 3 # Remove index that was added in version 3
_drop_index(connection, "states", "ix_states_created_domain") _drop_index(instance, "states", "ix_states_created_domain")
if old_version == 2: if old_version == 2:
# Remove index that was added in version 2 # Remove index that was added in version 2
_drop_index(connection, "states", "ix_states_entity_id_created") _drop_index(instance, "states", "ix_states_entity_id_created")
# Remove indexes that were added in version 0 # Remove indexes that were added in version 0
_drop_index(connection, "states", "states__state_changes") _drop_index(instance, "states", "states__state_changes")
_drop_index(connection, "states", "states__significant_changes") _drop_index(instance, "states", "states__significant_changes")
_drop_index(connection, "states", "ix_states_entity_id_created") _drop_index(instance, "states", "ix_states_entity_id_created")
_create_index(connection, "states", "ix_states_entity_id_last_updated") _create_index(instance, "states", "ix_states_entity_id_last_updated")
elif new_version == 5: elif new_version == 5:
# Create supporting index for States.event_id foreign key # Create supporting index for States.event_id foreign key
_create_index(connection, "states", "ix_states_event_id") _create_index(instance, "states", "ix_states_event_id")
elif new_version == 6: elif new_version == 6:
_add_columns( _add_columns(
session, instance,
"events", "events",
["context_id CHARACTER(36)", "context_user_id CHARACTER(36)"], ["context_id CHARACTER(36)", "context_user_id CHARACTER(36)"],
) )
_create_index(connection, "events", "ix_events_context_id") _create_index(instance, "events", "ix_events_context_id")
_create_index(connection, "events", "ix_events_context_user_id") _create_index(instance, "events", "ix_events_context_user_id")
_add_columns( _add_columns(
connection, instance,
"states", "states",
["context_id CHARACTER(36)", "context_user_id CHARACTER(36)"], ["context_id CHARACTER(36)", "context_user_id CHARACTER(36)"],
) )
_create_index(connection, "states", "ix_states_context_id") _create_index(instance, "states", "ix_states_context_id")
_create_index(connection, "states", "ix_states_context_user_id") _create_index(instance, "states", "ix_states_context_user_id")
elif new_version == 7: elif new_version == 7:
_create_index(connection, "states", "ix_states_entity_id") _create_index(instance, "states", "ix_states_entity_id")
elif new_version == 8: elif new_version == 8:
_add_columns(connection, "events", ["context_parent_id CHARACTER(36)"]) _add_columns(instance, "events", ["context_parent_id CHARACTER(36)"])
_add_columns(connection, "states", ["old_state_id INTEGER"]) _add_columns(instance, "states", ["old_state_id INTEGER"])
_create_index(connection, "events", "ix_events_context_parent_id") _create_index(instance, "events", "ix_events_context_parent_id")
elif new_version == 9: elif new_version == 9:
# We now get the context from events with a join # We now get the context from events with a join
# since its always there on state_changed events # since its always there on state_changed events
@ -418,36 +436,36 @@ def _apply_update(instance, session, new_version, old_version): # noqa: C901
# and we would have to move to something like # and we would have to move to something like
# sqlalchemy alembic to make that work # sqlalchemy alembic to make that work
# #
_drop_index(connection, "states", "ix_states_context_id") _drop_index(instance, "states", "ix_states_context_id")
_drop_index(connection, "states", "ix_states_context_user_id") _drop_index(instance, "states", "ix_states_context_user_id")
# This index won't be there if they were not running # This index won't be there if they were not running
# nightly but we don't treat that as a critical issue # nightly but we don't treat that as a critical issue
_drop_index(connection, "states", "ix_states_context_parent_id") _drop_index(instance, "states", "ix_states_context_parent_id")
# Redundant keys on composite index: # Redundant keys on composite index:
# We already have ix_states_entity_id_last_updated # We already have ix_states_entity_id_last_updated
_drop_index(connection, "states", "ix_states_entity_id") _drop_index(instance, "states", "ix_states_entity_id")
_create_index(connection, "events", "ix_events_event_type_time_fired") _create_index(instance, "events", "ix_events_event_type_time_fired")
_drop_index(connection, "events", "ix_events_event_type") _drop_index(instance, "events", "ix_events_event_type")
elif new_version == 10: elif new_version == 10:
# Now done in step 11 # Now done in step 11
pass pass
elif new_version == 11: elif new_version == 11:
_create_index(connection, "states", "ix_states_old_state_id") _create_index(instance, "states", "ix_states_old_state_id")
_update_states_table_with_foreign_key_options(connection, engine) _update_states_table_with_foreign_key_options(instance, engine)
elif new_version == 12: elif new_version == 12:
if engine.dialect.name == "mysql": if engine.dialect.name == "mysql":
_modify_columns(connection, engine, "events", ["event_data LONGTEXT"]) _modify_columns(instance, engine, "events", ["event_data LONGTEXT"])
_modify_columns(connection, engine, "states", ["attributes LONGTEXT"]) _modify_columns(instance, engine, "states", ["attributes LONGTEXT"])
elif new_version == 13: elif new_version == 13:
if engine.dialect.name == "mysql": if engine.dialect.name == "mysql":
_modify_columns( _modify_columns(
connection, instance,
engine, engine,
"events", "events",
["time_fired DATETIME(6)", "created DATETIME(6)"], ["time_fired DATETIME(6)", "created DATETIME(6)"],
) )
_modify_columns( _modify_columns(
connection, instance,
engine, engine,
"states", "states",
[ [
@ -457,14 +475,12 @@ def _apply_update(instance, session, new_version, old_version): # noqa: C901
], ],
) )
elif new_version == 14: elif new_version == 14:
_modify_columns(connection, engine, "events", ["event_type VARCHAR(64)"]) _modify_columns(instance, engine, "events", ["event_type VARCHAR(64)"])
elif new_version == 15: elif new_version == 15:
# This dropped the statistics table, done again in version 18. # This dropped the statistics table, done again in version 18.
pass pass
elif new_version == 16: elif new_version == 16:
_drop_foreign_key_constraints( _drop_foreign_key_constraints(instance, engine, TABLE_STATES, ["old_state_id"])
connection, engine, TABLE_STATES, ["old_state_id"]
)
elif new_version == 17: elif new_version == 17:
# This dropped the statistics table, done again in version 18. # This dropped the statistics table, done again in version 18.
pass pass
@ -489,12 +505,13 @@ def _apply_update(instance, session, new_version, old_version): # noqa: C901
elif new_version == 19: elif new_version == 19:
# This adds the statistic runs table, insert a fake run to prevent duplicating # This adds the statistic runs table, insert a fake run to prevent duplicating
# statistics. # statistics.
session.add(StatisticsRuns(start=get_start_time())) with session_scope(session=instance.get_session()) as session:
session.add(StatisticsRuns(start=get_start_time()))
elif new_version == 20: elif new_version == 20:
# This changed the precision of statistics from float to double # This changed the precision of statistics from float to double
if engine.dialect.name in ["mysql", "postgresql"]: if engine.dialect.name in ["mysql", "postgresql"]:
_modify_columns( _modify_columns(
connection, instance,
engine, engine,
"statistics", "statistics",
[ [
@ -516,14 +533,16 @@ def _apply_update(instance, session, new_version, old_version): # noqa: C901
table, table,
) )
with contextlib.suppress(SQLAlchemyError): with contextlib.suppress(SQLAlchemyError):
connection.execute( with session_scope(session=instance.get_session()) as session:
# Using LOCK=EXCLUSIVE to prevent the database from corrupting connection = session.connection()
# https://github.com/home-assistant/core/issues/56104 connection.execute(
text( # Using LOCK=EXCLUSIVE to prevent the database from corrupting
f"ALTER TABLE {table} CONVERT TO " # https://github.com/home-assistant/core/issues/56104
"CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci LOCK=EXCLUSIVE" text(
f"ALTER TABLE {table} CONVERT TO "
"CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci LOCK=EXCLUSIVE"
)
) )
)
elif new_version == 22: elif new_version == 22:
# Recreate the all statistics tables for Oracle DB with Identity columns # Recreate the all statistics tables for Oracle DB with Identity columns
# #
@ -549,60 +568,76 @@ def _apply_update(instance, session, new_version, old_version): # noqa: C901
# Block 5-minute statistics for one hour from the last run, or it will overlap # Block 5-minute statistics for one hour from the last run, or it will overlap
# with existing hourly statistics. Don't block on a database with no existing # with existing hourly statistics. Don't block on a database with no existing
# statistics. # statistics.
if session.query(Statistics.id).count() and ( with session_scope(session=instance.get_session()) as session:
last_run_string := session.query(func.max(StatisticsRuns.start)).scalar() if session.query(Statistics.id).count() and (
): last_run_string := session.query(
last_run_start_time = process_timestamp(last_run_string) func.max(StatisticsRuns.start)
if last_run_start_time: ).scalar()
fake_start_time = last_run_start_time + timedelta(minutes=5) ):
while fake_start_time < last_run_start_time + timedelta(hours=1): last_run_start_time = process_timestamp(last_run_string)
session.add(StatisticsRuns(start=fake_start_time)) if last_run_start_time:
fake_start_time += timedelta(minutes=5) fake_start_time = last_run_start_time + timedelta(minutes=5)
while fake_start_time < last_run_start_time + timedelta(hours=1):
session.add(StatisticsRuns(start=fake_start_time))
fake_start_time += timedelta(minutes=5)
# When querying the database, be careful to only explicitly query for columns # When querying the database, be careful to only explicitly query for columns
# which were present in schema version 21. If querying the table, SQLAlchemy # which were present in schema version 21. If querying the table, SQLAlchemy
# will refer to future columns. # will refer to future columns.
for sum_statistic in session.query(StatisticsMeta.id).filter_by(has_sum=true()): with session_scope(session=instance.get_session()) as session:
last_statistic = ( for sum_statistic in session.query(StatisticsMeta.id).filter_by(
session.query( has_sum=true()
Statistics.start, ):
Statistics.last_reset, last_statistic = (
Statistics.state, session.query(
Statistics.sum, Statistics.start,
) Statistics.last_reset,
.filter_by(metadata_id=sum_statistic.id) Statistics.state,
.order_by(Statistics.start.desc()) Statistics.sum,
.first()
)
if last_statistic:
session.add(
StatisticsShortTerm(
metadata_id=sum_statistic.id,
start=last_statistic.start,
last_reset=last_statistic.last_reset,
state=last_statistic.state,
sum=last_statistic.sum,
) )
.filter_by(metadata_id=sum_statistic.id)
.order_by(Statistics.start.desc())
.first()
) )
if last_statistic:
session.add(
StatisticsShortTerm(
metadata_id=sum_statistic.id,
start=last_statistic.start,
last_reset=last_statistic.last_reset,
state=last_statistic.state,
sum=last_statistic.sum,
)
)
elif new_version == 23: elif new_version == 23:
# Add name column to StatisticsMeta # Add name column to StatisticsMeta
_add_columns(session, "statistics_meta", ["name VARCHAR(255)"]) _add_columns(instance, "statistics_meta", ["name VARCHAR(255)"])
elif new_version == 24: elif new_version == 24:
# Delete duplicated statistics
delete_duplicates(instance, session)
# Recreate statistics indices to block duplicated statistics # Recreate statistics indices to block duplicated statistics
_drop_index(connection, "statistics", "ix_statistics_statistic_id_start") _drop_index(instance, "statistics", "ix_statistics_statistic_id_start")
_create_index(connection, "statistics", "ix_statistics_statistic_id_start")
_drop_index( _drop_index(
connection, instance,
"statistics_short_term",
"ix_statistics_short_term_statistic_id_start",
)
_create_index(
connection,
"statistics_short_term", "statistics_short_term",
"ix_statistics_short_term_statistic_id_start", "ix_statistics_short_term_statistic_id_start",
) )
try:
_create_index(instance, "statistics", "ix_statistics_statistic_id_start")
_create_index(
instance,
"statistics_short_term",
"ix_statistics_short_term_statistic_id_start",
)
except DatabaseError:
# There may be duplicated statistics entries, delete duplicated statistics
# and try again
with session_scope(session=instance.get_session()) as session:
delete_duplicates(instance, session)
_create_index(instance, "statistics", "ix_statistics_statistic_id_start")
_create_index(
instance,
"statistics_short_term",
"ix_statistics_short_term_statistic_id_start",
)
else: else:
raise ValueError(f"No schema migration defined for version {new_version}") raise ValueError(f"No schema migration defined for version {new_version}")

View File

@ -119,8 +119,6 @@ QUERY_STATISTIC_META_ID = [
StatisticsMeta.statistic_id, StatisticsMeta.statistic_id,
] ]
MAX_DUPLICATES = 1000000
STATISTICS_BAKERY = "recorder_statistics_bakery" STATISTICS_BAKERY = "recorder_statistics_bakery"
STATISTICS_META_BAKERY = "recorder_statistics_meta_bakery" STATISTICS_META_BAKERY = "recorder_statistics_meta_bakery"
STATISTICS_SHORT_TERM_BAKERY = "recorder_statistics_short_term_bakery" STATISTICS_SHORT_TERM_BAKERY = "recorder_statistics_short_term_bakery"
@ -351,8 +349,6 @@ def _delete_duplicates_from_table(
.delete(synchronize_session=False) .delete(synchronize_session=False)
) )
total_deleted_rows += deleted_rows total_deleted_rows += deleted_rows
if total_deleted_rows >= MAX_DUPLICATES:
break
return (total_deleted_rows, all_non_identical_duplicates) return (total_deleted_rows, all_non_identical_duplicates)
@ -389,13 +385,6 @@ def delete_duplicates(instance: Recorder, session: scoped_session) -> None:
backup_path, backup_path,
) )
if deleted_statistics_rows >= MAX_DUPLICATES:
_LOGGER.warning(
"Found more than %s duplicated statistic rows, please report at "
'https://github.com/home-assistant/core/issues?q=is%%3Aissue+label%%3A"integration%%3A+recorder"+',
MAX_DUPLICATES - 1,
)
deleted_short_term_statistics_rows, _ = _delete_duplicates_from_table( deleted_short_term_statistics_rows, _ = _delete_duplicates_from_table(
session, StatisticsShortTerm session, StatisticsShortTerm
) )

View File

@ -4,7 +4,7 @@
"config_flow": true, "config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/renault", "documentation": "https://www.home-assistant.io/integrations/renault",
"requirements": [ "requirements": [
"renault-api==0.1.7" "renault-api==0.1.8"
], ],
"codeowners": [ "codeowners": [
"@epenet" "@epenet"

View File

@ -113,8 +113,9 @@ class Scene(RestoreEntity):
self.async_write_ha_state() self.async_write_ha_state()
await self.async_activate(**kwargs) await self.async_activate(**kwargs)
async def async_added_to_hass(self) -> None: async def async_internal_added_to_hass(self) -> None:
"""Call when the button is added to hass.""" """Call when the scene is added to hass."""
await super().async_internal_added_to_hass()
state = await self.async_get_last_state() state = await self.async_get_last_state()
if state is not None and state.state is not None: if state is not None and state.state is not None:
self.__last_activated = state.state self.__last_activated = state.state

View File

@ -157,7 +157,7 @@ def _async_device_as_dict(hass: HomeAssistant, device: TuyaDevice) -> dict[str,
state = hass.states.get(entity_entry.entity_id) state = hass.states.get(entity_entry.entity_id)
state_dict = None state_dict = None
if state: if state:
state_dict = state.as_dict() state_dict = dict(state.as_dict())
# Redact the `entity_picture` attribute as it contains a token. # Redact the `entity_picture` attribute as it contains a token.
if "entity_picture" in state_dict["attributes"]: if "entity_picture" in state_dict["attributes"]:

View File

@ -123,7 +123,7 @@ class ZwaveDimmer(ZWaveDeviceEntity, LightEntity):
self._state = None self._state = None
self._color_mode = None self._color_mode = None
self._supported_color_modes = set() self._supported_color_modes = set()
self._supported_features = None self._supported_features = 0
self._delay = delay self._delay = delay
self._refresh_value = refresh self._refresh_value = refresh
self._zw098 = None self._zw098 = None

View File

@ -99,7 +99,16 @@ async def async_validate_condition_config(
# We return early if the config entry for this device is not ready because we can't # We return early if the config entry for this device is not ready because we can't
# validate the value without knowing the state of the device # validate the value without knowing the state of the device
if async_is_device_config_entry_not_loaded(hass, config[CONF_DEVICE_ID]): try:
device_config_entry_not_loaded = async_is_device_config_entry_not_loaded(
hass, config[CONF_DEVICE_ID]
)
except ValueError as err:
raise InvalidDeviceAutomationConfig(
f"Device {config[CONF_DEVICE_ID]} not found"
) from err
if device_config_entry_not_loaded:
return config return config
if config[CONF_TYPE] == VALUE_TYPE: if config[CONF_TYPE] == VALUE_TYPE:

View File

@ -217,7 +217,16 @@ async def async_validate_trigger_config(
# We return early if the config entry for this device is not ready because we can't # We return early if the config entry for this device is not ready because we can't
# validate the value without knowing the state of the device # validate the value without knowing the state of the device
if async_is_device_config_entry_not_loaded(hass, config[CONF_DEVICE_ID]): try:
device_config_entry_not_loaded = async_is_device_config_entry_not_loaded(
hass, config[CONF_DEVICE_ID]
)
except ValueError as err:
raise InvalidDeviceAutomationConfig(
f"Device {config[CONF_DEVICE_ID]} not found"
) from err
if device_config_entry_not_loaded:
return config return config
trigger_type = config[CONF_TYPE] trigger_type = config[CONF_TYPE]

View File

@ -298,7 +298,8 @@ def async_is_device_config_entry_not_loaded(
"""Return whether device's config entries are not loaded.""" """Return whether device's config entries are not loaded."""
dev_reg = dr.async_get(hass) dev_reg = dr.async_get(hass)
device = dev_reg.async_get(device_id) device = dev_reg.async_get(device_id)
assert device if device is None:
raise ValueError(f"Device {device_id} not found")
return any( return any(
(entry := hass.config_entries.async_get_entry(entry_id)) (entry := hass.config_entries.async_get_entry(entry_id))
and entry.state != ConfigEntryState.LOADED and entry.state != ConfigEntryState.LOADED

View File

@ -7,7 +7,7 @@ from .backports.enum import StrEnum
MAJOR_VERSION: Final = 2022 MAJOR_VERSION: Final = 2022
MINOR_VERSION: Final = 2 MINOR_VERSION: Final = 2
PATCH_VERSION: Final = "1" PATCH_VERSION: Final = "2"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 9, 0) REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 9, 0)

View File

@ -41,8 +41,11 @@ async def async_get_system_info(hass: HomeAssistant) -> dict[str, Any]:
# Determine installation type on current data # Determine installation type on current data
if info_object["docker"]: if info_object["docker"]:
if info_object["user"] == "root": if info_object["user"] == "root" and os.path.isfile("/OFFICIAL_IMAGE"):
info_object["installation_type"] = "Home Assistant Container" info_object["installation_type"] = "Home Assistant Container"
else:
info_object["installation_type"] = "Unsupported Third Party Container"
elif is_virtual_env(): elif is_virtual_env():
info_object["installation_type"] = "Home Assistant Core" info_object["installation_type"] = "Home Assistant Core"

View File

@ -531,13 +531,33 @@ def color_temperature_to_rgb(
def color_temperature_to_rgbww( def color_temperature_to_rgbww(
temperature: int, brightness: int, min_mireds: int, max_mireds: int temperature: int, brightness: int, min_mireds: int, max_mireds: int
) -> tuple[int, int, int, int, int]: ) -> tuple[int, int, int, int, int]:
"""Convert color temperature to rgbcw.""" """Convert color temperature in mireds to rgbcw."""
mired_range = max_mireds - min_mireds mired_range = max_mireds - min_mireds
warm = ((max_mireds - temperature) / mired_range) * brightness cold = ((max_mireds - temperature) / mired_range) * brightness
cold = brightness - warm warm = brightness - cold
return (0, 0, 0, round(cold), round(warm)) return (0, 0, 0, round(cold), round(warm))
def rgbww_to_color_temperature(
rgbww: tuple[int, int, int, int, int], min_mireds: int, max_mireds: int
) -> tuple[int, int]:
"""Convert rgbcw to color temperature in mireds."""
_, _, _, cold, warm = rgbww
return while_levels_to_color_temperature(cold, warm, min_mireds, max_mireds)
def while_levels_to_color_temperature(
cold: int, warm: int, min_mireds: int, max_mireds: int
) -> tuple[int, int]:
"""Convert whites to color temperature in mireds."""
brightness = warm / 255 + cold / 255
if brightness == 0:
return (max_mireds, 0)
return round(
((cold / 255 / brightness) * (min_mireds - max_mireds)) + max_mireds
), min(255, round(brightness * 255))
def _clamp(color_component: float, minimum: float = 0, maximum: float = 255) -> float: def _clamp(color_component: float, minimum: float = 0, maximum: float = 255) -> float:
""" """
Clamp the given color component value between the given min and max values. Clamp the given color component value between the given min and max values.

View File

@ -175,7 +175,7 @@ aioflo==2021.11.0
aioftp==0.12.0 aioftp==0.12.0
# homeassistant.components.github # homeassistant.components.github
aiogithubapi==22.1.0 aiogithubapi==22.2.0
# homeassistant.components.guardian # homeassistant.components.guardian
aioguardian==2021.11.0 aioguardian==2021.11.0
@ -311,7 +311,7 @@ ambiclimate==0.2.1
amcrest==1.9.3 amcrest==1.9.3
# homeassistant.components.androidtv # homeassistant.components.androidtv
androidtv[async]==0.0.61 androidtv[async]==0.0.63
# homeassistant.components.anel_pwrctrl # homeassistant.components.anel_pwrctrl
anel_pwrctrl-homeassistant==0.0.1.dev2 anel_pwrctrl-homeassistant==0.0.1.dev2
@ -438,7 +438,7 @@ blockchain==1.4.4
bond-api==0.1.16 bond-api==0.1.16
# homeassistant.components.bosch_shc # homeassistant.components.bosch_shc
boschshcpy==0.2.28 boschshcpy==0.2.29
# homeassistant.components.amazon_polly # homeassistant.components.amazon_polly
# homeassistant.components.route53 # homeassistant.components.route53
@ -681,7 +681,7 @@ fjaraskupan==1.0.2
flipr-api==1.4.1 flipr-api==1.4.1
# homeassistant.components.flux_led # homeassistant.components.flux_led
flux_led==0.28.17 flux_led==0.28.20
# homeassistant.components.homekit # homeassistant.components.homekit
fnvhash==0.1.0 fnvhash==0.1.0
@ -851,7 +851,7 @@ homeassistant-pyozw==0.1.10
homeconnect==0.6.3 homeconnect==0.6.3
# homeassistant.components.homematicip_cloud # homeassistant.components.homematicip_cloud
homematicip==1.0.1 homematicip==1.0.2
# homeassistant.components.home_plus_control # homeassistant.components.home_plus_control
homepluscontrol==0.0.5 homepluscontrol==0.0.5
@ -2087,7 +2087,7 @@ raspyrfm-client==1.2.8
regenmaschine==2022.01.0 regenmaschine==2022.01.0
# homeassistant.components.renault # homeassistant.components.renault
renault-api==0.1.7 renault-api==0.1.8
# homeassistant.components.python_script # homeassistant.components.python_script
restrictedpython==5.2 restrictedpython==5.2

View File

@ -125,7 +125,7 @@ aioesphomeapi==10.8.1
aioflo==2021.11.0 aioflo==2021.11.0
# homeassistant.components.github # homeassistant.components.github
aiogithubapi==22.1.0 aiogithubapi==22.2.0
# homeassistant.components.guardian # homeassistant.components.guardian
aioguardian==2021.11.0 aioguardian==2021.11.0
@ -237,7 +237,7 @@ amberelectric==1.0.3
ambiclimate==0.2.1 ambiclimate==0.2.1
# homeassistant.components.androidtv # homeassistant.components.androidtv
androidtv[async]==0.0.61 androidtv[async]==0.0.63
# homeassistant.components.apns # homeassistant.components.apns
apns2==0.3.0 apns2==0.3.0
@ -291,7 +291,7 @@ blinkpy==0.18.0
bond-api==0.1.16 bond-api==0.1.16
# homeassistant.components.bosch_shc # homeassistant.components.bosch_shc
boschshcpy==0.2.28 boschshcpy==0.2.29
# homeassistant.components.braviatv # homeassistant.components.braviatv
bravia-tv==1.0.11 bravia-tv==1.0.11
@ -427,7 +427,7 @@ fjaraskupan==1.0.2
flipr-api==1.4.1 flipr-api==1.4.1
# homeassistant.components.flux_led # homeassistant.components.flux_led
flux_led==0.28.17 flux_led==0.28.20
# homeassistant.components.homekit # homeassistant.components.homekit
fnvhash==0.1.0 fnvhash==0.1.0
@ -552,7 +552,7 @@ homeassistant-pyozw==0.1.10
homeconnect==0.6.3 homeconnect==0.6.3
# homeassistant.components.homematicip_cloud # homeassistant.components.homematicip_cloud
homematicip==1.0.1 homematicip==1.0.2
# homeassistant.components.home_plus_control # homeassistant.components.home_plus_control
homepluscontrol==0.0.5 homepluscontrol==0.0.5
@ -1282,7 +1282,7 @@ rachiopy==1.0.3
regenmaschine==2022.01.0 regenmaschine==2022.01.0
# homeassistant.components.renault # homeassistant.components.renault
renault-api==0.1.7 renault-api==0.1.8
# homeassistant.components.python_script # homeassistant.components.python_script
restrictedpython==5.2 restrictedpython==5.2

View File

@ -1,6 +1,6 @@
[metadata] [metadata]
name = homeassistant name = homeassistant
version = 2022.2.1 version = 2022.2.2
author = The Home Assistant Authors author = The Home Assistant Authors
author_email = hello@home-assistant.io author_email = hello@home-assistant.io
license = Apache-2.0 license = Apache-2.0

View File

@ -185,3 +185,15 @@ PATCH_ANDROIDTV_UPDATE_EXCEPTION = patch(
"androidtv.androidtv.androidtv_async.AndroidTVAsync.update", "androidtv.androidtv.androidtv_async.AndroidTVAsync.update",
side_effect=ZeroDivisionError, side_effect=ZeroDivisionError,
) )
PATCH_DEVICE_PROPERTIES = patch(
"androidtv.basetv.basetv_async.BaseTVAsync.get_device_properties",
return_value={
"manufacturer": "a",
"model": "b",
"serialno": "c",
"sw_version": "d",
"wifimac": "ab:cd:ef:gh:ij:kl",
"ethmac": None,
},
)

View File

@ -157,8 +157,10 @@ async def test_setup_with_properties(hass):
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[ with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key patch_key
], patchers.patch_shell(response)[patch_key]: ], patchers.patch_shell(response)[patch_key]:
assert await hass.config_entries.async_setup(config_entry.entry_id) with patchers.PATCH_DEVICE_PROPERTIES:
await hass.async_block_till_done() assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
state = hass.states.get(entity_id) state = hass.states.get(entity_id)
assert state is not None assert state is not None
@ -188,8 +190,9 @@ async def test_reconnect(hass, caplog, config):
], patchers.patch_shell(SHELL_RESPONSE_OFF)[ ], patchers.patch_shell(SHELL_RESPONSE_OFF)[
patch_key patch_key
], patchers.PATCH_KEYGEN, patchers.PATCH_ANDROIDTV_OPEN, patchers.PATCH_SIGNER: ], patchers.PATCH_KEYGEN, patchers.PATCH_ANDROIDTV_OPEN, patchers.PATCH_SIGNER:
assert await hass.config_entries.async_setup(config_entry.entry_id) with patchers.PATCH_DEVICE_PROPERTIES:
await hass.async_block_till_done() assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
await hass.helpers.entity_component.async_update_entity(entity_id) await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id) state = hass.states.get(entity_id)
@ -256,8 +259,10 @@ async def test_adb_shell_returns_none(hass, config):
], patchers.patch_shell(SHELL_RESPONSE_OFF)[ ], patchers.patch_shell(SHELL_RESPONSE_OFF)[
patch_key patch_key
], patchers.PATCH_KEYGEN, patchers.PATCH_ANDROIDTV_OPEN, patchers.PATCH_SIGNER: ], patchers.PATCH_KEYGEN, patchers.PATCH_ANDROIDTV_OPEN, patchers.PATCH_SIGNER:
assert await hass.config_entries.async_setup(config_entry.entry_id) with patchers.PATCH_DEVICE_PROPERTIES:
await hass.async_block_till_done() assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
await hass.helpers.entity_component.async_update_entity(entity_id) await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id) state = hass.states.get(entity_id)
assert state is not None assert state is not None
@ -284,8 +289,10 @@ async def test_setup_with_adbkey(hass):
], patchers.patch_shell(SHELL_RESPONSE_OFF)[ ], patchers.patch_shell(SHELL_RESPONSE_OFF)[
patch_key patch_key
], patchers.PATCH_ANDROIDTV_OPEN, patchers.PATCH_SIGNER, PATCH_ISFILE, PATCH_ACCESS: ], patchers.PATCH_ANDROIDTV_OPEN, patchers.PATCH_SIGNER, PATCH_ISFILE, PATCH_ACCESS:
assert await hass.config_entries.async_setup(config_entry.entry_id) with patchers.PATCH_DEVICE_PROPERTIES:
await hass.async_block_till_done() assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
await hass.helpers.entity_component.async_update_entity(entity_id) await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id) state = hass.states.get(entity_id)
assert state is not None assert state is not None
@ -317,8 +324,10 @@ async def test_sources(hass, config0):
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[ with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]: ], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await hass.config_entries.async_setup(config_entry.entry_id) with patchers.PATCH_DEVICE_PROPERTIES:
await hass.async_block_till_done() assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
await hass.helpers.entity_component.async_update_entity(entity_id) await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id) state = hass.states.get(entity_id)
assert state is not None assert state is not None
@ -395,8 +404,10 @@ async def _test_exclude_sources(hass, config0, expected_sources):
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[ with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]: ], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await hass.config_entries.async_setup(config_entry.entry_id) with patchers.PATCH_DEVICE_PROPERTIES:
await hass.async_block_till_done() assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
await hass.helpers.entity_component.async_update_entity(entity_id) await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id) state = hass.states.get(entity_id)
assert state is not None assert state is not None
@ -475,8 +486,10 @@ async def _test_select_source(hass, config0, source, expected_arg, method_patch)
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[ with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]: ], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await hass.config_entries.async_setup(config_entry.entry_id) with patchers.PATCH_DEVICE_PROPERTIES:
await hass.async_block_till_done() assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
await hass.helpers.entity_component.async_update_entity(entity_id) await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id) state = hass.states.get(entity_id)
assert state is not None assert state is not None
@ -701,8 +714,10 @@ async def test_setup_fail(hass, config):
], patchers.patch_shell(SHELL_RESPONSE_OFF)[ ], patchers.patch_shell(SHELL_RESPONSE_OFF)[
patch_key patch_key
], patchers.PATCH_KEYGEN, patchers.PATCH_ANDROIDTV_OPEN, patchers.PATCH_SIGNER: ], patchers.PATCH_KEYGEN, patchers.PATCH_ANDROIDTV_OPEN, patchers.PATCH_SIGNER:
assert await hass.config_entries.async_setup(config_entry.entry_id) is False with patchers.PATCH_DEVICE_PROPERTIES:
await hass.async_block_till_done() assert await hass.config_entries.async_setup(config_entry.entry_id) is False
await hass.async_block_till_done()
await hass.helpers.entity_component.async_update_entity(entity_id) await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id) state = hass.states.get(entity_id)
assert state is None assert state is None
@ -718,8 +733,9 @@ async def test_adb_command(hass):
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[ with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]: ], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await hass.config_entries.async_setup(config_entry.entry_id) with patchers.PATCH_DEVICE_PROPERTIES:
await hass.async_block_till_done() assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
with patch( with patch(
"androidtv.basetv.basetv_async.BaseTVAsync.adb_shell", return_value=response "androidtv.basetv.basetv_async.BaseTVAsync.adb_shell", return_value=response
@ -747,8 +763,9 @@ async def test_adb_command_unicode_decode_error(hass):
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[ with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]: ], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await hass.config_entries.async_setup(config_entry.entry_id) with patchers.PATCH_DEVICE_PROPERTIES:
await hass.async_block_till_done() assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
with patch( with patch(
"androidtv.basetv.basetv_async.BaseTVAsync.adb_shell", "androidtv.basetv.basetv_async.BaseTVAsync.adb_shell",
@ -776,8 +793,9 @@ async def test_adb_command_key(hass):
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[ with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]: ], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await hass.config_entries.async_setup(config_entry.entry_id) with patchers.PATCH_DEVICE_PROPERTIES:
await hass.async_block_till_done() assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
with patch( with patch(
"androidtv.basetv.basetv_async.BaseTVAsync.adb_shell", return_value=response "androidtv.basetv.basetv_async.BaseTVAsync.adb_shell", return_value=response
@ -805,8 +823,9 @@ async def test_adb_command_get_properties(hass):
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[ with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]: ], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await hass.config_entries.async_setup(config_entry.entry_id) with patchers.PATCH_DEVICE_PROPERTIES:
await hass.async_block_till_done() assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
with patch( with patch(
"androidtv.androidtv.androidtv_async.AndroidTVAsync.get_properties_dict", "androidtv.androidtv.androidtv_async.AndroidTVAsync.get_properties_dict",
@ -834,8 +853,9 @@ async def test_learn_sendevent(hass):
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[ with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]: ], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await hass.config_entries.async_setup(config_entry.entry_id) with patchers.PATCH_DEVICE_PROPERTIES:
await hass.async_block_till_done() assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
with patch( with patch(
"androidtv.basetv.basetv_async.BaseTVAsync.learn_sendevent", "androidtv.basetv.basetv_async.BaseTVAsync.learn_sendevent",
@ -862,8 +882,9 @@ async def test_update_lock_not_acquired(hass):
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[ with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]: ], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await hass.config_entries.async_setup(config_entry.entry_id) with patchers.PATCH_DEVICE_PROPERTIES:
await hass.async_block_till_done() assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
with patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]: with patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
await hass.helpers.entity_component.async_update_entity(entity_id) await hass.helpers.entity_component.async_update_entity(entity_id)
@ -897,8 +918,9 @@ async def test_download(hass):
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[ with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]: ], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await hass.config_entries.async_setup(config_entry.entry_id) with patchers.PATCH_DEVICE_PROPERTIES:
await hass.async_block_till_done() assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
# Failed download because path is not whitelisted # Failed download because path is not whitelisted
with patch("androidtv.basetv.basetv_async.BaseTVAsync.adb_pull") as patch_pull: with patch("androidtv.basetv.basetv_async.BaseTVAsync.adb_pull") as patch_pull:
@ -943,8 +965,9 @@ async def test_upload(hass):
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[ with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]: ], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await hass.config_entries.async_setup(config_entry.entry_id) with patchers.PATCH_DEVICE_PROPERTIES:
await hass.async_block_till_done() assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
# Failed upload because path is not whitelisted # Failed upload because path is not whitelisted
with patch("androidtv.basetv.basetv_async.BaseTVAsync.adb_push") as patch_push: with patch("androidtv.basetv.basetv_async.BaseTVAsync.adb_push") as patch_push:
@ -987,8 +1010,9 @@ async def test_androidtv_volume_set(hass):
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[ with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]: ], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await hass.config_entries.async_setup(config_entry.entry_id) with patchers.PATCH_DEVICE_PROPERTIES:
await hass.async_block_till_done() assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
with patch( with patch(
"androidtv.basetv.basetv_async.BaseTVAsync.set_volume_level", return_value=0.5 "androidtv.basetv.basetv_async.BaseTVAsync.set_volume_level", return_value=0.5
@ -1014,8 +1038,9 @@ async def test_get_image(hass, hass_ws_client):
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[ with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]: ], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await hass.config_entries.async_setup(config_entry.entry_id) with patchers.PATCH_DEVICE_PROPERTIES:
await hass.async_block_till_done() assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
with patchers.patch_shell("11")[patch_key]: with patchers.patch_shell("11")[patch_key]:
await hass.helpers.entity_component.async_update_entity(entity_id) await hass.helpers.entity_component.async_update_entity(entity_id)
@ -1090,8 +1115,9 @@ async def test_services_androidtv(hass):
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[patch_key]: with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[patch_key]:
with patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]: with patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await hass.config_entries.async_setup(config_entry.entry_id) with patchers.PATCH_DEVICE_PROPERTIES:
await hass.async_block_till_done() assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
with patchers.patch_shell(SHELL_RESPONSE_STANDBY)[patch_key]: with patchers.patch_shell(SHELL_RESPONSE_STANDBY)[patch_key]:
await _test_service( await _test_service(
@ -1136,8 +1162,9 @@ async def test_services_firetv(hass):
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[patch_key]: with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[patch_key]:
with patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]: with patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await hass.config_entries.async_setup(config_entry.entry_id) with patchers.PATCH_DEVICE_PROPERTIES:
await hass.async_block_till_done() assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
with patchers.patch_shell(SHELL_RESPONSE_STANDBY)[patch_key]: with patchers.patch_shell(SHELL_RESPONSE_STANDBY)[patch_key]:
await _test_service(hass, entity_id, SERVICE_MEDIA_STOP, "back") await _test_service(hass, entity_id, SERVICE_MEDIA_STOP, "back")
@ -1152,8 +1179,9 @@ async def test_volume_mute(hass):
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[patch_key]: with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[patch_key]:
with patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]: with patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await hass.config_entries.async_setup(config_entry.entry_id) with patchers.PATCH_DEVICE_PROPERTIES:
await hass.async_block_till_done() assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
with patchers.patch_shell(SHELL_RESPONSE_STANDBY)[patch_key]: with patchers.patch_shell(SHELL_RESPONSE_STANDBY)[patch_key]:
service_data = {ATTR_ENTITY_ID: entity_id, ATTR_MEDIA_VOLUME_MUTED: True} service_data = {ATTR_ENTITY_ID: entity_id, ATTR_MEDIA_VOLUME_MUTED: True}
@ -1196,8 +1224,9 @@ async def test_connection_closed_on_ha_stop(hass):
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[ with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]: ], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await hass.config_entries.async_setup(config_entry.entry_id) with patchers.PATCH_DEVICE_PROPERTIES:
await hass.async_block_till_done() assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
with patch( with patch(
"androidtv.androidtv.androidtv_async.AndroidTVAsync.adb_close" "androidtv.androidtv.androidtv_async.AndroidTVAsync.adb_close"
@ -1220,8 +1249,9 @@ async def test_exception(hass):
], patchers.patch_shell(SHELL_RESPONSE_OFF)[ ], patchers.patch_shell(SHELL_RESPONSE_OFF)[
patch_key patch_key
], patchers.PATCH_KEYGEN, patchers.PATCH_ANDROIDTV_OPEN, patchers.PATCH_SIGNER: ], patchers.PATCH_KEYGEN, patchers.PATCH_ANDROIDTV_OPEN, patchers.PATCH_SIGNER:
assert await hass.config_entries.async_setup(config_entry.entry_id) with patchers.PATCH_DEVICE_PROPERTIES:
await hass.async_block_till_done() assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
await hass.helpers.entity_component.async_update_entity(entity_id) await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id) state = hass.states.get(entity_id)

View File

@ -4,7 +4,7 @@ from unittest.mock import AsyncMock, MagicMock, patch
from aiogithubapi import GitHubException from aiogithubapi import GitHubException
from homeassistant import config_entries from homeassistant import config_entries
from homeassistant.components.github.config_flow import starred_repositories from homeassistant.components.github.config_flow import get_repositories
from homeassistant.components.github.const import ( from homeassistant.components.github.const import (
CONF_ACCESS_TOKEN, CONF_ACCESS_TOKEN,
CONF_REPOSITORIES, CONF_REPOSITORIES,
@ -161,11 +161,19 @@ async def test_starred_pagination_with_paginated_result(hass: HomeAssistant) ->
last_page_number=2, last_page_number=2,
data=[MagicMock(full_name="home-assistant/core")], data=[MagicMock(full_name="home-assistant/core")],
) )
) ),
repos=AsyncMock(
return_value=MagicMock(
is_last_page=False,
next_page_number=2,
last_page_number=2,
data=[MagicMock(full_name="awesome/reposiotry")],
)
),
) )
), ),
): ):
repos = await starred_repositories(hass, MOCK_ACCESS_TOKEN) repos = await get_repositories(hass, MOCK_ACCESS_TOKEN)
assert len(repos) == 2 assert len(repos) == 2
assert repos[-1] == DEFAULT_REPOSITORIES[0] assert repos[-1] == DEFAULT_REPOSITORIES[0]
@ -182,11 +190,17 @@ async def test_starred_pagination_with_no_starred(hass: HomeAssistant) -> None:
is_last_page=True, is_last_page=True,
data=[], data=[],
) )
) ),
repos=AsyncMock(
return_value=MagicMock(
is_last_page=True,
data=[],
)
),
) )
), ),
): ):
repos = await starred_repositories(hass, MOCK_ACCESS_TOKEN) repos = await get_repositories(hass, MOCK_ACCESS_TOKEN)
assert len(repos) == 2 assert len(repos) == 2
assert repos == DEFAULT_REPOSITORIES assert repos == DEFAULT_REPOSITORIES
@ -200,7 +214,7 @@ async def test_starred_pagination_with_exception(hass: HomeAssistant) -> None:
user=MagicMock(starred=AsyncMock(side_effect=GitHubException("Error"))) user=MagicMock(starred=AsyncMock(side_effect=GitHubException("Error")))
), ),
): ):
repos = await starred_repositories(hass, MOCK_ACCESS_TOKEN) repos = await get_repositories(hass, MOCK_ACCESS_TOKEN)
assert len(repos) == 2 assert len(repos) == 2
assert repos == DEFAULT_REPOSITORIES assert repos == DEFAULT_REPOSITORIES

View File

@ -12,6 +12,8 @@ from homeassistant.data_entry_flow import RESULT_TYPE_ABORT, RESULT_TYPE_CREATE_
from .generator import get_mock_device from .generator import get_mock_device
from tests.common import MockConfigEntry
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@ -88,6 +90,37 @@ async def test_discovery_flow_works(hass, aioclient_mock):
assert result["result"].unique_id == "HWE-P1_aabbccddeeff" assert result["result"].unique_id == "HWE-P1_aabbccddeeff"
async def test_config_flow_imports_entry(aioclient_mock, hass):
"""Test config flow accepts imported configuration."""
device = get_mock_device()
mock_entry = MockConfigEntry(domain="homewizard_energy", data={"host": "1.2.3.4"})
mock_entry.add_to_hass(hass)
with patch("aiohwenergy.HomeWizardEnergy", return_value=device,), patch(
"homeassistant.components.homewizard.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={
"source": config_entries.SOURCE_IMPORT,
"old_config_entry_id": mock_entry.entry_id,
},
data=mock_entry.data,
)
assert result["type"] == "create_entry"
assert result["title"] == f"{device.device.product_name} (aabbccddeeff)"
assert result["data"][CONF_IP_ADDRESS] == "1.2.3.4"
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
assert len(device.initialize.mock_calls) == 1
assert len(device.close.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_discovery_disabled_api(hass, aioclient_mock): async def test_discovery_disabled_api(hass, aioclient_mock):
"""Test discovery detecting disabled api.""" """Test discovery detecting disabled api."""

View File

@ -4,9 +4,11 @@ from unittest.mock import patch
from aiohwenergy import AiohwenergyException, DisabledError from aiohwenergy import AiohwenergyException, DisabledError
from homeassistant import config_entries
from homeassistant.components.homewizard.const import DOMAIN from homeassistant.components.homewizard.const import DOMAIN
from homeassistant.config_entries import ConfigEntryState from homeassistant.config_entries import ConfigEntryState
from homeassistant.const import CONF_IP_ADDRESS from homeassistant.const import CONF_IP_ADDRESS
from homeassistant.helpers import entity_registry as er
from .generator import get_mock_device from .generator import get_mock_device
@ -68,6 +70,94 @@ async def test_load_failed_host_unavailable(aioclient_mock, hass):
assert entry.state is ConfigEntryState.SETUP_RETRY assert entry.state is ConfigEntryState.SETUP_RETRY
async def test_init_accepts_and_migrates_old_entry(aioclient_mock, hass):
"""Test config flow accepts imported configuration."""
device = get_mock_device()
# Add original entry
original_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_IP_ADDRESS: "1.2.3.4"},
entry_id="old_id",
)
original_entry.add_to_hass(hass)
# Give it some entities to see of they migrate properly
ent_reg = er.async_get(hass)
old_entity_active_power = ent_reg.async_get_or_create(
"sensor",
"homewizard_energy",
"p1_active_power_unique_id",
config_entry=original_entry,
original_name="Active Power",
suggested_object_id="p1_active_power",
)
old_entity_switch = ent_reg.async_get_or_create(
"switch",
"homewizard_energy",
"socket_switch_unique_id",
config_entry=original_entry,
original_name="Switch",
suggested_object_id="socket_switch",
)
old_entity_disabled_sensor = ent_reg.async_get_or_create(
"sensor",
"homewizard_energy",
"socket_disabled_unique_id",
config_entry=original_entry,
original_name="Switch Disabled",
suggested_object_id="socket_disabled",
disabled_by=er.DISABLED_USER,
)
# Update some user-customs
ent_reg.async_update_entity(old_entity_active_power.entity_id, name="new_name")
ent_reg.async_update_entity(old_entity_switch.entity_id, icon="new_icon")
imported_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_IP_ADDRESS: "1.2.3.4", "old_config_entry_id": "old_id"},
source=config_entries.SOURCE_IMPORT,
entry_id="new_id",
)
imported_entry.add_to_hass(hass)
# Add the entry_id to trigger migration
with patch(
"aiohwenergy.HomeWizardEnergy",
return_value=device,
):
await hass.config_entries.async_setup(imported_entry.entry_id)
await hass.async_block_till_done()
assert original_entry.state is ConfigEntryState.NOT_LOADED
assert imported_entry.state is ConfigEntryState.LOADED
# Check if new entities are migrated
new_entity_active_power = ent_reg.async_get(old_entity_active_power.entity_id)
assert new_entity_active_power.platform == DOMAIN
assert new_entity_active_power.name == "new_name"
assert new_entity_active_power.icon is None
assert new_entity_active_power.original_name == "Active Power"
assert new_entity_active_power.unique_id == "p1_active_power_unique_id"
assert new_entity_active_power.disabled_by is None
new_entity_switch = ent_reg.async_get(old_entity_switch.entity_id)
assert new_entity_switch.platform == DOMAIN
assert new_entity_switch.name is None
assert new_entity_switch.icon == "new_icon"
assert new_entity_switch.original_name == "Switch"
assert new_entity_switch.unique_id == "socket_switch_unique_id"
assert new_entity_switch.disabled_by is None
new_entity_disabled_sensor = ent_reg.async_get(old_entity_disabled_sensor.entity_id)
assert new_entity_disabled_sensor.platform == DOMAIN
assert new_entity_disabled_sensor.name is None
assert new_entity_disabled_sensor.original_name == "Switch Disabled"
assert new_entity_disabled_sensor.unique_id == "socket_disabled_unique_id"
assert new_entity_disabled_sensor.disabled_by == er.DISABLED_USER
async def test_load_detect_api_disabled(aioclient_mock, hass): async def test_load_detect_api_disabled(aioclient_mock, hass):
"""Test setup detects disabled API.""" """Test setup detects disabled API."""

View File

@ -1899,7 +1899,8 @@ async def test_light_service_call_color_temp_conversion(
_, data = entity0.last_call("turn_on") _, data = entity0.last_call("turn_on")
assert data == {"brightness": 255, "color_temp": 153} assert data == {"brightness": 255, "color_temp": 153}
_, data = entity1.last_call("turn_on") _, data = entity1.last_call("turn_on")
assert data == {"brightness": 255, "rgbww_color": (0, 0, 0, 0, 255)} # Home Assistant uses RGBCW so a mireds of 153 should be maximum cold at 100% brightness so 255
assert data == {"brightness": 255, "rgbww_color": (0, 0, 0, 255, 0)}
await hass.services.async_call( await hass.services.async_call(
"light", "light",
@ -1917,7 +1918,63 @@ async def test_light_service_call_color_temp_conversion(
_, data = entity0.last_call("turn_on") _, data = entity0.last_call("turn_on")
assert data == {"brightness": 128, "color_temp": 500} assert data == {"brightness": 128, "color_temp": 500}
_, data = entity1.last_call("turn_on") _, data = entity1.last_call("turn_on")
assert data == {"brightness": 128, "rgbww_color": (0, 0, 0, 128, 0)} # Home Assistant uses RGBCW so a mireds of 500 should be maximum warm at 50% brightness so 128
assert data == {"brightness": 128, "rgbww_color": (0, 0, 0, 0, 128)}
await hass.services.async_call(
"light",
"turn_on",
{
"entity_id": [
entity0.entity_id,
entity1.entity_id,
],
"brightness_pct": 100,
"color_temp": 327,
},
blocking=True,
)
_, data = entity0.last_call("turn_on")
assert data == {"brightness": 255, "color_temp": 327}
_, data = entity1.last_call("turn_on")
# Home Assistant uses RGBCW so a mireds of 328 should be the midway point at 100% brightness so 127 (rounding), 128
assert data == {"brightness": 255, "rgbww_color": (0, 0, 0, 127, 128)}
await hass.services.async_call(
"light",
"turn_on",
{
"entity_id": [
entity0.entity_id,
entity1.entity_id,
],
"brightness_pct": 100,
"color_temp": 240,
},
blocking=True,
)
_, data = entity0.last_call("turn_on")
assert data == {"brightness": 255, "color_temp": 240}
_, data = entity1.last_call("turn_on")
assert data == {"brightness": 255, "rgbww_color": (0, 0, 0, 191, 64)}
await hass.services.async_call(
"light",
"turn_on",
{
"entity_id": [
entity0.entity_id,
entity1.entity_id,
],
"brightness_pct": 100,
"color_temp": 410,
},
blocking=True,
)
_, data = entity0.last_call("turn_on")
assert data == {"brightness": 255, "color_temp": 410}
_, data = entity1.last_call("turn_on")
assert data == {"brightness": 255, "rgbww_color": (0, 0, 0, 66, 189)}
async def test_light_service_call_white_mode(hass, enable_custom_integrations): async def test_light_service_call_white_mode(hass, enable_custom_integrations):

View File

@ -5,7 +5,7 @@ import importlib
import sqlite3 import sqlite3
import sys import sys
import threading import threading
from unittest.mock import ANY, Mock, PropertyMock, call, patch from unittest.mock import Mock, PropertyMock, call, patch
import pytest import pytest
from sqlalchemy import create_engine, text from sqlalchemy import create_engine, text
@ -57,7 +57,7 @@ async def test_schema_update_calls(hass):
assert recorder.util.async_migration_in_progress(hass) is False assert recorder.util.async_migration_in_progress(hass) is False
update.assert_has_calls( update.assert_has_calls(
[ [
call(hass.data[DATA_INSTANCE], ANY, version + 1, 0) call(hass.data[DATA_INSTANCE], version + 1, 0)
for version in range(0, models.SCHEMA_VERSION) for version in range(0, models.SCHEMA_VERSION)
] ]
) )
@ -309,7 +309,7 @@ async def test_schema_migrate(hass, start_version):
def test_invalid_update(): def test_invalid_update():
"""Test that an invalid new version raises an exception.""" """Test that an invalid new version raises an exception."""
with pytest.raises(ValueError): with pytest.raises(ValueError):
migration._apply_update(Mock(), Mock(), -1, 0) migration._apply_update(Mock(), -1, 0)
@pytest.mark.parametrize( @pytest.mark.parametrize(
@ -324,9 +324,13 @@ def test_invalid_update():
def test_modify_column(engine_type, substr): def test_modify_column(engine_type, substr):
"""Test that modify column generates the expected query.""" """Test that modify column generates the expected query."""
connection = Mock() connection = Mock()
session = Mock()
session.connection = Mock(return_value=connection)
instance = Mock()
instance.get_session = Mock(return_value=session)
engine = Mock() engine = Mock()
engine.dialect.name = engine_type engine.dialect.name = engine_type
migration._modify_columns(connection, engine, "events", ["event_type VARCHAR(64)"]) migration._modify_columns(instance, engine, "events", ["event_type VARCHAR(64)"])
if substr: if substr:
assert substr in connection.execute.call_args[0][0].text assert substr in connection.execute.call_args[0][0].text
else: else:
@ -338,8 +342,10 @@ def test_forgiving_add_column():
engine = create_engine("sqlite://", poolclass=StaticPool) engine = create_engine("sqlite://", poolclass=StaticPool)
with Session(engine) as session: with Session(engine) as session:
session.execute(text("CREATE TABLE hello (id int)")) session.execute(text("CREATE TABLE hello (id int)"))
migration._add_columns(session, "hello", ["context_id CHARACTER(36)"]) instance = Mock()
migration._add_columns(session, "hello", ["context_id CHARACTER(36)"]) instance.get_session = Mock(return_value=session)
migration._add_columns(instance, "hello", ["context_id CHARACTER(36)"])
migration._add_columns(instance, "hello", ["context_id CHARACTER(36)"])
def test_forgiving_add_index(): def test_forgiving_add_index():
@ -347,7 +353,9 @@ def test_forgiving_add_index():
engine = create_engine("sqlite://", poolclass=StaticPool) engine = create_engine("sqlite://", poolclass=StaticPool)
models.Base.metadata.create_all(engine) models.Base.metadata.create_all(engine)
with Session(engine) as session: with Session(engine) as session:
migration._create_index(session, "states", "ix_states_context_id") instance = Mock()
instance.get_session = Mock(return_value=session)
migration._create_index(instance, "states", "ix_states_context_id")
@pytest.mark.parametrize( @pytest.mark.parametrize(

View File

@ -852,7 +852,6 @@ def test_delete_duplicates(caplog, tmpdir):
assert "Deleted 2 duplicated statistics rows" in caplog.text assert "Deleted 2 duplicated statistics rows" in caplog.text
assert "Found non identical" not in caplog.text assert "Found non identical" not in caplog.text
assert "Found more than" not in caplog.text
assert "Found duplicated" not in caplog.text assert "Found duplicated" not in caplog.text
@ -989,7 +988,6 @@ def test_delete_duplicates_non_identical(caplog, tmpdir):
assert "Deleted 2 duplicated statistics rows" in caplog.text assert "Deleted 2 duplicated statistics rows" in caplog.text
assert "Deleted 1 non identical" in caplog.text assert "Deleted 1 non identical" in caplog.text
assert "Found more than" not in caplog.text
assert "Found duplicated" not in caplog.text assert "Found duplicated" not in caplog.text
isotime = dt_util.utcnow().isoformat() isotime = dt_util.utcnow().isoformat()
@ -1028,144 +1026,6 @@ def test_delete_duplicates_non_identical(caplog, tmpdir):
] ]
@patch.object(statistics, "MAX_DUPLICATES", 2)
def test_delete_duplicates_too_many(caplog, tmpdir):
"""Test removal of duplicated statistics."""
test_db_file = tmpdir.mkdir("sqlite").join("test_run_info.db")
dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}"
module = "tests.components.recorder.models_schema_23"
importlib.import_module(module)
old_models = sys.modules[module]
period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00"))
period3 = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
period4 = dt_util.as_utc(dt_util.parse_datetime("2021-10-31 23:00:00"))
external_energy_statistics_1 = (
{
"start": period1,
"last_reset": None,
"state": 0,
"sum": 2,
},
{
"start": period2,
"last_reset": None,
"state": 1,
"sum": 3,
},
{
"start": period3,
"last_reset": None,
"state": 2,
"sum": 4,
},
{
"start": period4,
"last_reset": None,
"state": 3,
"sum": 5,
},
{
"start": period4,
"last_reset": None,
"state": 3,
"sum": 5,
},
)
external_energy_metadata_1 = {
"has_mean": False,
"has_sum": True,
"name": "Total imported energy",
"source": "test",
"statistic_id": "test:total_energy_import_tariff_1",
"unit_of_measurement": "kWh",
}
external_energy_statistics_2 = (
{
"start": period1,
"last_reset": None,
"state": 0,
"sum": 20,
},
{
"start": period2,
"last_reset": None,
"state": 1,
"sum": 30,
},
{
"start": period3,
"last_reset": None,
"state": 2,
"sum": 40,
},
{
"start": period4,
"last_reset": None,
"state": 3,
"sum": 50,
},
{
"start": period4,
"last_reset": None,
"state": 3,
"sum": 50,
},
)
external_energy_metadata_2 = {
"has_mean": False,
"has_sum": True,
"name": "Total imported energy",
"source": "test",
"statistic_id": "test:total_energy_import_tariff_2",
"unit_of_measurement": "kWh",
}
# Create some duplicated statistics with schema version 23
with patch.object(recorder, "models", old_models), patch.object(
recorder.migration, "SCHEMA_VERSION", old_models.SCHEMA_VERSION
), patch(
"homeassistant.components.recorder.create_engine", new=_create_engine_test
):
hass = get_test_home_assistant()
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
wait_recording_done(hass)
wait_recording_done(hass)
with session_scope(hass=hass) as session:
session.add(
recorder.models.StatisticsMeta.from_meta(external_energy_metadata_1)
)
session.add(
recorder.models.StatisticsMeta.from_meta(external_energy_metadata_2)
)
with session_scope(hass=hass) as session:
for stat in external_energy_statistics_1:
session.add(recorder.models.Statistics.from_stats(1, stat))
for stat in external_energy_statistics_2:
session.add(recorder.models.Statistics.from_stats(2, stat))
hass.stop()
# Test that the duplicates are removed during migration from schema 23
hass = get_test_home_assistant()
hass.config.config_dir = tmpdir
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
hass.start()
wait_recording_done(hass)
wait_recording_done(hass)
hass.stop()
assert "Deleted 2 duplicated statistics rows" in caplog.text
assert "Found non identical" not in caplog.text
assert "Found more than 1 duplicated statistic rows" in caplog.text
assert "Found duplicated" not in caplog.text
@patch.object(statistics, "MAX_DUPLICATES", 2)
def test_delete_duplicates_short_term(caplog, tmpdir): def test_delete_duplicates_short_term(caplog, tmpdir):
"""Test removal of duplicated statistics.""" """Test removal of duplicated statistics."""
test_db_file = tmpdir.mkdir("sqlite").join("test_run_info.db") test_db_file = tmpdir.mkdir("sqlite").join("test_run_info.db")
@ -1228,7 +1088,6 @@ def test_delete_duplicates_short_term(caplog, tmpdir):
assert "duplicated statistics rows" not in caplog.text assert "duplicated statistics rows" not in caplog.text
assert "Found non identical" not in caplog.text assert "Found non identical" not in caplog.text
assert "Found more than" not in caplog.text
assert "Deleted duplicated short term statistic" in caplog.text assert "Deleted duplicated short term statistic" in caplog.text
@ -1240,7 +1099,6 @@ def test_delete_duplicates_no_duplicates(hass_recorder, caplog):
delete_duplicates(hass.data[DATA_INSTANCE], session) delete_duplicates(hass.data[DATA_INSTANCE], session)
assert "duplicated statistics rows" not in caplog.text assert "duplicated statistics rows" not in caplog.text
assert "Found non identical" not in caplog.text assert "Found non identical" not in caplog.text
assert "Found more than" not in caplog.text
assert "Found duplicated" not in caplog.text assert "Found duplicated" not in caplog.text

View File

@ -39,7 +39,7 @@ def test_get_device_detects_dimmer(mock_openzwave):
device = light.get_device(node=node, values=values, node_config={}) device = light.get_device(node=node, values=values, node_config={})
assert isinstance(device, light.ZwaveDimmer) assert isinstance(device, light.ZwaveDimmer)
assert device.color_mode == COLOR_MODE_BRIGHTNESS assert device.color_mode == COLOR_MODE_BRIGHTNESS
assert device.supported_features is None assert device.supported_features == 0
assert device.supported_color_modes == {COLOR_MODE_BRIGHTNESS} assert device.supported_color_modes == {COLOR_MODE_BRIGHTNESS}
@ -52,7 +52,7 @@ def test_get_device_detects_colorlight(mock_openzwave):
device = light.get_device(node=node, values=values, node_config={}) device = light.get_device(node=node, values=values, node_config={})
assert isinstance(device, light.ZwaveColorLight) assert isinstance(device, light.ZwaveColorLight)
assert device.color_mode == COLOR_MODE_RGB assert device.color_mode == COLOR_MODE_RGB
assert device.supported_features is None assert device.supported_features == 0
assert device.supported_color_modes == {COLOR_MODE_RGB} assert device.supported_color_modes == {COLOR_MODE_RGB}
@ -68,7 +68,7 @@ def test_get_device_detects_zw098(mock_openzwave):
device = light.get_device(node=node, values=values, node_config={}) device = light.get_device(node=node, values=values, node_config={})
assert isinstance(device, light.ZwaveColorLight) assert isinstance(device, light.ZwaveColorLight)
assert device.color_mode == COLOR_MODE_RGB assert device.color_mode == COLOR_MODE_RGB
assert device.supported_features is None assert device.supported_features == 0
assert device.supported_color_modes == {COLOR_MODE_COLOR_TEMP, COLOR_MODE_RGB} assert device.supported_color_modes == {COLOR_MODE_COLOR_TEMP, COLOR_MODE_RGB}
@ -84,7 +84,7 @@ def test_get_device_detects_rgbw_light(mock_openzwave):
device.value_added() device.value_added()
assert isinstance(device, light.ZwaveColorLight) assert isinstance(device, light.ZwaveColorLight)
assert device.color_mode == COLOR_MODE_RGBW assert device.color_mode == COLOR_MODE_RGBW
assert device.supported_features is None assert device.supported_features == 0
assert device.supported_color_modes == {COLOR_MODE_RGBW} assert device.supported_color_modes == {COLOR_MODE_RGBW}

View File

@ -596,6 +596,23 @@ async def test_failure_scenarios(hass, client, hank_binary_switch, integration):
== INVALID_CONFIG == INVALID_CONFIG
) )
# Test invalid device ID fails validation
with pytest.raises(InvalidDeviceAutomationConfig):
await device_condition.async_validate_condition_config(
hass,
{
"condition": "device",
"domain": DOMAIN,
"type": "value",
"device_id": "invalid_device_id",
"command_class": CommandClass.DOOR_LOCK.value,
"property": 9999,
"property_key": 9999,
"endpoint": 9999,
"value": 9999,
},
)
async def test_get_value_from_config_failure( async def test_get_value_from_config_failure(
hass, client, hank_binary_switch, integration hass, client, hank_binary_switch, integration

View File

@ -1370,3 +1370,19 @@ async def test_failure_scenarios(hass, client, hank_binary_switch, integration):
await device_trigger.async_validate_trigger_config(hass, INVALID_CONFIG) await device_trigger.async_validate_trigger_config(hass, INVALID_CONFIG)
== INVALID_CONFIG == INVALID_CONFIG
) )
# Test invalid device ID fails validation
with pytest.raises(InvalidDeviceAutomationConfig):
await device_trigger.async_validate_trigger_config(
hass,
{
"platform": "device",
"domain": DOMAIN,
"device_id": "invalid_device_id",
"type": "zwave_js.value_updated.value",
"command_class": CommandClass.DOOR_LOCK.value,
"property": 9999,
"property_key": 9999,
"endpoint": 9999,
},
)

View File

@ -18,15 +18,15 @@ async def test_container_installationtype(hass):
"""Test container installation type.""" """Test container installation type."""
with patch("platform.system", return_value="Linux"), patch( with patch("platform.system", return_value="Linux"), patch(
"os.path.isfile", return_value=True "os.path.isfile", return_value=True
): ), patch("homeassistant.helpers.system_info.getuser", return_value="root"):
info = await hass.helpers.system_info.async_get_system_info() info = await hass.helpers.system_info.async_get_system_info()
assert info["installation_type"] == "Home Assistant Container" assert info["installation_type"] == "Home Assistant Container"
with patch("platform.system", return_value="Linux"), patch( with patch("platform.system", return_value="Linux"), patch(
"os.path.isfile", return_value=True "os.path.isfile", side_effect=lambda file: file == "/.dockerenv"
), patch("homeassistant.helpers.system_info.getuser", return_value="user"): ), patch("homeassistant.helpers.system_info.getuser", return_value="user"):
info = await hass.helpers.system_info.async_get_system_info() info = await hass.helpers.system_info.async_get_system_info()
assert info["installation_type"] == "Unknown" assert info["installation_type"] == "Unsupported Third Party Container"
async def test_getuser_keyerror(hass): async def test_getuser_keyerror(hass):

View File

@ -406,46 +406,137 @@ def test_color_rgb_to_rgbww():
def test_color_temperature_to_rgbww(): def test_color_temperature_to_rgbww():
"""Test color temp to warm, cold conversion.""" """Test color temp to warm, cold conversion.
Temperature values must be in mireds
Home Assistant uses rgbcw for rgbww
"""
assert color_util.color_temperature_to_rgbww(153, 255, 153, 500) == ( assert color_util.color_temperature_to_rgbww(153, 255, 153, 500) == (
0, 0,
0, 0,
0, 0,
0,
255, 255,
0,
) )
assert color_util.color_temperature_to_rgbww(153, 128, 153, 500) == ( assert color_util.color_temperature_to_rgbww(153, 128, 153, 500) == (
0, 0,
0, 0,
0, 0,
0,
128, 128,
0,
) )
assert color_util.color_temperature_to_rgbww(500, 255, 153, 500) == ( assert color_util.color_temperature_to_rgbww(500, 255, 153, 500) == (
0, 0,
0, 0,
0, 0,
255,
0, 0,
255,
) )
assert color_util.color_temperature_to_rgbww(500, 128, 153, 500) == ( assert color_util.color_temperature_to_rgbww(500, 128, 153, 500) == (
0, 0,
0, 0,
0, 0,
128,
0, 0,
128,
) )
assert color_util.color_temperature_to_rgbww(347, 255, 153, 500) == ( assert color_util.color_temperature_to_rgbww(347, 255, 153, 500) == (
0, 0,
0, 0,
0, 0,
143,
112, 112,
143,
) )
assert color_util.color_temperature_to_rgbww(347, 128, 153, 500) == ( assert color_util.color_temperature_to_rgbww(347, 128, 153, 500) == (
0, 0,
0, 0,
0, 0,
72,
56, 56,
72,
)
def test_rgbww_to_color_temperature():
"""Test rgbww conversion to color temp.
Temperature values must be in mireds
Home Assistant uses rgbcw for rgbww
"""
assert (
color_util.rgbww_to_color_temperature(
(
0,
0,
0,
255,
0,
),
153,
500,
)
== (153, 255)
)
assert color_util.rgbww_to_color_temperature((0, 0, 0, 128, 0), 153, 500) == (
153,
128,
)
assert color_util.rgbww_to_color_temperature((0, 0, 0, 0, 255), 153, 500) == (
500,
255,
)
assert color_util.rgbww_to_color_temperature((0, 0, 0, 0, 128), 153, 500) == (
500,
128,
)
assert color_util.rgbww_to_color_temperature((0, 0, 0, 112, 143), 153, 500) == (
348,
255,
)
assert color_util.rgbww_to_color_temperature((0, 0, 0, 56, 72), 153, 500) == (
348,
128,
)
assert color_util.rgbww_to_color_temperature((0, 0, 0, 0, 0), 153, 500) == (
500,
0,
)
def test_white_levels_to_color_temperature():
"""Test warm, cold conversion to color temp.
Temperature values must be in mireds
Home Assistant uses rgbcw for rgbww
"""
assert (
color_util.while_levels_to_color_temperature(
255,
0,
153,
500,
)
== (153, 255)
)
assert color_util.while_levels_to_color_temperature(128, 0, 153, 500) == (
153,
128,
)
assert color_util.while_levels_to_color_temperature(0, 255, 153, 500) == (
500,
255,
)
assert color_util.while_levels_to_color_temperature(0, 128, 153, 500) == (
500,
128,
)
assert color_util.while_levels_to_color_temperature(112, 143, 153, 500) == (
348,
255,
)
assert color_util.while_levels_to_color_temperature(56, 72, 153, 500) == (
348,
128,
)
assert color_util.while_levels_to_color_temperature(0, 0, 153, 500) == (
500,
0,
) )