mirror of
https://github.com/home-assistant/core.git
synced 2025-07-01 18:37:13 +00:00
Update ruff to 0.12.0 (#147106)
This commit is contained in:
parent
73d0d87705
commit
2c13c70e12
@ -1,6 +1,6 @@
|
|||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.11.12
|
rev: v0.12.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff-check
|
- id: ruff-check
|
||||||
args:
|
args:
|
||||||
|
@ -38,8 +38,7 @@ def validate_python() -> None:
|
|||||||
|
|
||||||
def ensure_config_path(config_dir: str) -> None:
|
def ensure_config_path(config_dir: str) -> None:
|
||||||
"""Validate the configuration directory."""
|
"""Validate the configuration directory."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from . import config as config_util # noqa: PLC0415
|
||||||
from . import config as config_util
|
|
||||||
|
|
||||||
lib_dir = os.path.join(config_dir, "deps")
|
lib_dir = os.path.join(config_dir, "deps")
|
||||||
|
|
||||||
@ -80,8 +79,7 @@ def ensure_config_path(config_dir: str) -> None:
|
|||||||
|
|
||||||
def get_arguments() -> argparse.Namespace:
|
def get_arguments() -> argparse.Namespace:
|
||||||
"""Get parsed passed in arguments."""
|
"""Get parsed passed in arguments."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from . import config as config_util # noqa: PLC0415
|
||||||
from . import config as config_util
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Home Assistant: Observe, Control, Automate.",
|
description="Home Assistant: Observe, Control, Automate.",
|
||||||
@ -177,8 +175,7 @@ def main() -> int:
|
|||||||
validate_os()
|
validate_os()
|
||||||
|
|
||||||
if args.script is not None:
|
if args.script is not None:
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from . import scripts # noqa: PLC0415
|
||||||
from . import scripts
|
|
||||||
|
|
||||||
return scripts.run(args.script)
|
return scripts.run(args.script)
|
||||||
|
|
||||||
@ -188,8 +185,7 @@ def main() -> int:
|
|||||||
|
|
||||||
ensure_config_path(config_dir)
|
ensure_config_path(config_dir)
|
||||||
|
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from . import config, runner # noqa: PLC0415
|
||||||
from . import config, runner
|
|
||||||
|
|
||||||
safe_mode = config.safe_mode_enabled(config_dir)
|
safe_mode = config.safe_mode_enabled(config_dir)
|
||||||
|
|
||||||
|
@ -52,28 +52,28 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
|
|
||||||
def _generate_secret() -> str:
|
def _generate_secret() -> str:
|
||||||
"""Generate a secret."""
|
"""Generate a secret."""
|
||||||
import pyotp # pylint: disable=import-outside-toplevel
|
import pyotp # noqa: PLC0415
|
||||||
|
|
||||||
return str(pyotp.random_base32())
|
return str(pyotp.random_base32())
|
||||||
|
|
||||||
|
|
||||||
def _generate_random() -> int:
|
def _generate_random() -> int:
|
||||||
"""Generate a 32 digit number."""
|
"""Generate a 32 digit number."""
|
||||||
import pyotp # pylint: disable=import-outside-toplevel
|
import pyotp # noqa: PLC0415
|
||||||
|
|
||||||
return int(pyotp.random_base32(length=32, chars=list("1234567890")))
|
return int(pyotp.random_base32(length=32, chars=list("1234567890")))
|
||||||
|
|
||||||
|
|
||||||
def _generate_otp(secret: str, count: int) -> str:
|
def _generate_otp(secret: str, count: int) -> str:
|
||||||
"""Generate one time password."""
|
"""Generate one time password."""
|
||||||
import pyotp # pylint: disable=import-outside-toplevel
|
import pyotp # noqa: PLC0415
|
||||||
|
|
||||||
return str(pyotp.HOTP(secret).at(count))
|
return str(pyotp.HOTP(secret).at(count))
|
||||||
|
|
||||||
|
|
||||||
def _verify_otp(secret: str, otp: str, count: int) -> bool:
|
def _verify_otp(secret: str, otp: str, count: int) -> bool:
|
||||||
"""Verify one time password."""
|
"""Verify one time password."""
|
||||||
import pyotp # pylint: disable=import-outside-toplevel
|
import pyotp # noqa: PLC0415
|
||||||
|
|
||||||
return bool(pyotp.HOTP(secret).verify(otp, count))
|
return bool(pyotp.HOTP(secret).verify(otp, count))
|
||||||
|
|
||||||
|
@ -37,7 +37,7 @@ DUMMY_SECRET = "FPPTH34D4E3MI2HG"
|
|||||||
|
|
||||||
def _generate_qr_code(data: str) -> str:
|
def _generate_qr_code(data: str) -> str:
|
||||||
"""Generate a base64 PNG string represent QR Code image of data."""
|
"""Generate a base64 PNG string represent QR Code image of data."""
|
||||||
import pyqrcode # pylint: disable=import-outside-toplevel
|
import pyqrcode # noqa: PLC0415
|
||||||
|
|
||||||
qr_code = pyqrcode.create(data)
|
qr_code = pyqrcode.create(data)
|
||||||
|
|
||||||
@ -59,7 +59,7 @@ def _generate_qr_code(data: str) -> str:
|
|||||||
|
|
||||||
def _generate_secret_and_qr_code(username: str) -> tuple[str, str, str]:
|
def _generate_secret_and_qr_code(username: str) -> tuple[str, str, str]:
|
||||||
"""Generate a secret, url, and QR code."""
|
"""Generate a secret, url, and QR code."""
|
||||||
import pyotp # pylint: disable=import-outside-toplevel
|
import pyotp # noqa: PLC0415
|
||||||
|
|
||||||
ota_secret = pyotp.random_base32()
|
ota_secret = pyotp.random_base32()
|
||||||
url = pyotp.totp.TOTP(ota_secret).provisioning_uri(
|
url = pyotp.totp.TOTP(ota_secret).provisioning_uri(
|
||||||
@ -107,7 +107,7 @@ class TotpAuthModule(MultiFactorAuthModule):
|
|||||||
|
|
||||||
def _add_ota_secret(self, user_id: str, secret: str | None = None) -> str:
|
def _add_ota_secret(self, user_id: str, secret: str | None = None) -> str:
|
||||||
"""Create a ota_secret for user."""
|
"""Create a ota_secret for user."""
|
||||||
import pyotp # pylint: disable=import-outside-toplevel
|
import pyotp # noqa: PLC0415
|
||||||
|
|
||||||
ota_secret: str = secret or pyotp.random_base32()
|
ota_secret: str = secret or pyotp.random_base32()
|
||||||
|
|
||||||
@ -163,7 +163,7 @@ class TotpAuthModule(MultiFactorAuthModule):
|
|||||||
|
|
||||||
def _validate_2fa(self, user_id: str, code: str) -> bool:
|
def _validate_2fa(self, user_id: str, code: str) -> bool:
|
||||||
"""Validate two factor authentication code."""
|
"""Validate two factor authentication code."""
|
||||||
import pyotp # pylint: disable=import-outside-toplevel
|
import pyotp # noqa: PLC0415
|
||||||
|
|
||||||
if (ota_secret := self._users.get(user_id)) is None: # type: ignore[union-attr]
|
if (ota_secret := self._users.get(user_id)) is None: # type: ignore[union-attr]
|
||||||
# even we cannot find user, we still do verify
|
# even we cannot find user, we still do verify
|
||||||
@ -196,7 +196,7 @@ class TotpSetupFlow(SetupFlow[TotpAuthModule]):
|
|||||||
Return self.async_show_form(step_id='init') if user_input is None.
|
Return self.async_show_form(step_id='init') if user_input is None.
|
||||||
Return self.async_create_entry(data={'result': result}) if finish.
|
Return self.async_create_entry(data={'result': result}) if finish.
|
||||||
"""
|
"""
|
||||||
import pyotp # pylint: disable=import-outside-toplevel
|
import pyotp # noqa: PLC0415
|
||||||
|
|
||||||
errors: dict[str, str] = {}
|
errors: dict[str, str] = {}
|
||||||
|
|
||||||
|
@ -394,7 +394,7 @@ async def async_setup_hass(
|
|||||||
|
|
||||||
def open_hass_ui(hass: core.HomeAssistant) -> None:
|
def open_hass_ui(hass: core.HomeAssistant) -> None:
|
||||||
"""Open the UI."""
|
"""Open the UI."""
|
||||||
import webbrowser # pylint: disable=import-outside-toplevel
|
import webbrowser # noqa: PLC0415
|
||||||
|
|
||||||
if hass.config.api is None or "frontend" not in hass.config.components:
|
if hass.config.api is None or "frontend" not in hass.config.components:
|
||||||
_LOGGER.warning("Cannot launch the UI because frontend not loaded")
|
_LOGGER.warning("Cannot launch the UI because frontend not loaded")
|
||||||
@ -561,8 +561,7 @@ async def async_enable_logging(
|
|||||||
|
|
||||||
if not log_no_color:
|
if not log_no_color:
|
||||||
try:
|
try:
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from colorlog import ColoredFormatter # noqa: PLC0415
|
||||||
from colorlog import ColoredFormatter
|
|
||||||
|
|
||||||
# basicConfig must be called after importing colorlog in order to
|
# basicConfig must be called after importing colorlog in order to
|
||||||
# ensure that the handlers it sets up wraps the correct streams.
|
# ensure that the handlers it sets up wraps the correct streams.
|
||||||
@ -606,7 +605,7 @@ async def async_enable_logging(
|
|||||||
)
|
)
|
||||||
threading.excepthook = lambda args: logging.getLogger().exception(
|
threading.excepthook = lambda args: logging.getLogger().exception(
|
||||||
"Uncaught thread exception",
|
"Uncaught thread exception",
|
||||||
exc_info=( # type: ignore[arg-type]
|
exc_info=( # type: ignore[arg-type] # noqa: LOG014
|
||||||
args.exc_type,
|
args.exc_type,
|
||||||
args.exc_value,
|
args.exc_value,
|
||||||
args.exc_traceback,
|
args.exc_traceback,
|
||||||
@ -1060,5 +1059,5 @@ async def _async_setup_multi_components(
|
|||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Error setting up integration %s - received exception",
|
"Error setting up integration %s - received exception",
|
||||||
domain,
|
domain,
|
||||||
exc_info=(type(result), result, result.__traceback__),
|
exc_info=(type(result), result, result.__traceback__), # noqa: LOG014
|
||||||
)
|
)
|
||||||
|
@ -39,14 +39,14 @@ class AirlyFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
)
|
)
|
||||||
self._abort_if_unique_id_configured()
|
self._abort_if_unique_id_configured()
|
||||||
try:
|
try:
|
||||||
location_point_valid = await test_location(
|
location_point_valid = await check_location(
|
||||||
websession,
|
websession,
|
||||||
user_input["api_key"],
|
user_input["api_key"],
|
||||||
user_input["latitude"],
|
user_input["latitude"],
|
||||||
user_input["longitude"],
|
user_input["longitude"],
|
||||||
)
|
)
|
||||||
if not location_point_valid:
|
if not location_point_valid:
|
||||||
location_nearest_valid = await test_location(
|
location_nearest_valid = await check_location(
|
||||||
websession,
|
websession,
|
||||||
user_input["api_key"],
|
user_input["api_key"],
|
||||||
user_input["latitude"],
|
user_input["latitude"],
|
||||||
@ -88,7 +88,7 @@ class AirlyFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def test_location(
|
async def check_location(
|
||||||
client: ClientSession,
|
client: ClientSession,
|
||||||
api_key: str,
|
api_key: str,
|
||||||
latitude: float,
|
latitude: float,
|
||||||
|
@ -12,7 +12,7 @@ DATA_BLUEPRINTS = "automation_blueprints"
|
|||||||
|
|
||||||
def _blueprint_in_use(hass: HomeAssistant, blueprint_path: str) -> bool:
|
def _blueprint_in_use(hass: HomeAssistant, blueprint_path: str) -> bool:
|
||||||
"""Return True if any automation references the blueprint."""
|
"""Return True if any automation references the blueprint."""
|
||||||
from . import automations_with_blueprint # pylint: disable=import-outside-toplevel
|
from . import automations_with_blueprint # noqa: PLC0415
|
||||||
|
|
||||||
return len(automations_with_blueprint(hass, blueprint_path)) > 0
|
return len(automations_with_blueprint(hass, blueprint_path)) > 0
|
||||||
|
|
||||||
@ -28,8 +28,7 @@ async def _reload_blueprint_automations(
|
|||||||
@callback
|
@callback
|
||||||
def async_get_blueprints(hass: HomeAssistant) -> blueprint.DomainBlueprints:
|
def async_get_blueprints(hass: HomeAssistant) -> blueprint.DomainBlueprints:
|
||||||
"""Get automation blueprints."""
|
"""Get automation blueprints."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .config import AUTOMATION_BLUEPRINT_SCHEMA # noqa: PLC0415
|
||||||
from .config import AUTOMATION_BLUEPRINT_SCHEMA
|
|
||||||
|
|
||||||
return blueprint.DomainBlueprints(
|
return blueprint.DomainBlueprints(
|
||||||
hass,
|
hass,
|
||||||
|
@ -94,8 +94,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
if not with_hassio:
|
if not with_hassio:
|
||||||
reader_writer = CoreBackupReaderWriter(hass)
|
reader_writer = CoreBackupReaderWriter(hass)
|
||||||
else:
|
else:
|
||||||
# pylint: disable-next=import-outside-toplevel, hass-component-root-import
|
# pylint: disable-next=hass-component-root-import
|
||||||
from homeassistant.components.hassio.backup import SupervisorBackupReaderWriter
|
from homeassistant.components.hassio.backup import ( # noqa: PLC0415
|
||||||
|
SupervisorBackupReaderWriter,
|
||||||
|
)
|
||||||
|
|
||||||
reader_writer = SupervisorBackupReaderWriter(hass)
|
reader_writer = SupervisorBackupReaderWriter(hass)
|
||||||
|
|
||||||
|
@ -54,10 +54,10 @@ class Control4RuntimeData:
|
|||||||
type Control4ConfigEntry = ConfigEntry[Control4RuntimeData]
|
type Control4ConfigEntry = ConfigEntry[Control4RuntimeData]
|
||||||
|
|
||||||
|
|
||||||
async def call_c4_api_retry(func, *func_args):
|
async def call_c4_api_retry(func, *func_args): # noqa: RET503
|
||||||
"""Call C4 API function and retry on failure."""
|
"""Call C4 API function and retry on failure."""
|
||||||
# Ruff doesn't understand this loop - the exception is always raised after the retries
|
# Ruff doesn't understand this loop - the exception is always raised after the retries
|
||||||
for i in range(API_RETRY_TIMES): # noqa: RET503
|
for i in range(API_RETRY_TIMES):
|
||||||
try:
|
try:
|
||||||
return await func(*func_args)
|
return await func(*func_args)
|
||||||
except client_exceptions.ClientError as exception:
|
except client_exceptions.ClientError as exception:
|
||||||
|
@ -271,7 +271,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Temporary migration. We can remove this in 2024.10
|
# Temporary migration. We can remove this in 2024.10
|
||||||
from homeassistant.components.assist_pipeline import ( # pylint: disable=import-outside-toplevel
|
from homeassistant.components.assist_pipeline import ( # noqa: PLC0415
|
||||||
async_migrate_engine,
|
async_migrate_engine,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -108,8 +108,7 @@ def download_file(service: ServiceCall) -> None:
|
|||||||
_LOGGER.debug("%s -> %s", url, final_path)
|
_LOGGER.debug("%s -> %s", url, final_path)
|
||||||
|
|
||||||
with open(final_path, "wb") as fil:
|
with open(final_path, "wb") as fil:
|
||||||
for chunk in req.iter_content(1024):
|
fil.writelines(req.iter_content(1024))
|
||||||
fil.write(chunk)
|
|
||||||
|
|
||||||
_LOGGER.debug("Downloading of %s done", url)
|
_LOGGER.debug("Downloading of %s done", url)
|
||||||
service.hass.bus.fire(
|
service.hass.bus.fire(
|
||||||
|
@ -63,9 +63,7 @@ class ESPHomeDashboardManager:
|
|||||||
if not (data := self._data) or not (info := data.get("info")):
|
if not (data := self._data) or not (info := data.get("info")):
|
||||||
return
|
return
|
||||||
if is_hassio(self._hass):
|
if is_hassio(self._hass):
|
||||||
from homeassistant.components.hassio import ( # pylint: disable=import-outside-toplevel
|
from homeassistant.components.hassio import get_addons_info # noqa: PLC0415
|
||||||
get_addons_info,
|
|
||||||
)
|
|
||||||
|
|
||||||
if (addons := get_addons_info(self._hass)) is not None and info[
|
if (addons := get_addons_info(self._hass)) is not None and info[
|
||||||
"addon_slug"
|
"addon_slug"
|
||||||
|
@ -364,8 +364,7 @@ def _frontend_root(dev_repo_path: str | None) -> pathlib.Path:
|
|||||||
if dev_repo_path is not None:
|
if dev_repo_path is not None:
|
||||||
return pathlib.Path(dev_repo_path) / "hass_frontend"
|
return pathlib.Path(dev_repo_path) / "hass_frontend"
|
||||||
# Keep import here so that we can import frontend without installing reqs
|
# Keep import here so that we can import frontend without installing reqs
|
||||||
# pylint: disable-next=import-outside-toplevel
|
import hass_frontend # noqa: PLC0415
|
||||||
import hass_frontend
|
|
||||||
|
|
||||||
return hass_frontend.where()
|
return hass_frontend.where()
|
||||||
|
|
||||||
|
@ -212,8 +212,7 @@ class AbstractConfig(ABC):
|
|||||||
def async_enable_report_state(self) -> None:
|
def async_enable_report_state(self) -> None:
|
||||||
"""Enable proactive mode."""
|
"""Enable proactive mode."""
|
||||||
# Circular dep
|
# Circular dep
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .report_state import async_enable_report_state # noqa: PLC0415
|
||||||
from .report_state import async_enable_report_state
|
|
||||||
|
|
||||||
if self._unsub_report_state is None:
|
if self._unsub_report_state is None:
|
||||||
self._unsub_report_state = async_enable_report_state(self.hass, self)
|
self._unsub_report_state = async_enable_report_state(self.hass, self)
|
||||||
@ -395,8 +394,7 @@ class AbstractConfig(ABC):
|
|||||||
async def _handle_local_webhook(self, hass, webhook_id, request):
|
async def _handle_local_webhook(self, hass, webhook_id, request):
|
||||||
"""Handle an incoming local SDK message."""
|
"""Handle an incoming local SDK message."""
|
||||||
# Circular dep
|
# Circular dep
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from . import smart_home # noqa: PLC0415
|
||||||
from . import smart_home
|
|
||||||
|
|
||||||
self._local_last_active = utcnow()
|
self._local_last_active = utcnow()
|
||||||
|
|
||||||
@ -655,8 +653,9 @@ class GoogleEntity:
|
|||||||
if "matter" in self.hass.config.components and any(
|
if "matter" in self.hass.config.components and any(
|
||||||
x for x in device_entry.identifiers if x[0] == "matter"
|
x for x in device_entry.identifiers if x[0] == "matter"
|
||||||
):
|
):
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components.matter import ( # noqa: PLC0415
|
||||||
from homeassistant.components.matter import get_matter_device_info
|
get_matter_device_info,
|
||||||
|
)
|
||||||
|
|
||||||
# Import matter can block the event loop for multiple seconds
|
# Import matter can block the event loop for multiple seconds
|
||||||
# so we import it here to avoid blocking the event loop during
|
# so we import it here to avoid blocking the event loop during
|
||||||
|
@ -29,8 +29,7 @@ async def update_addon(
|
|||||||
client = get_supervisor_client(hass)
|
client = get_supervisor_client(hass)
|
||||||
|
|
||||||
if backup:
|
if backup:
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .backup import backup_addon_before_update # noqa: PLC0415
|
||||||
from .backup import backup_addon_before_update
|
|
||||||
|
|
||||||
await backup_addon_before_update(hass, addon, addon_name, installed_version)
|
await backup_addon_before_update(hass, addon, addon_name, installed_version)
|
||||||
|
|
||||||
@ -50,8 +49,7 @@ async def update_core(hass: HomeAssistant, version: str | None, backup: bool) ->
|
|||||||
client = get_supervisor_client(hass)
|
client = get_supervisor_client(hass)
|
||||||
|
|
||||||
if backup:
|
if backup:
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .backup import backup_core_before_update # noqa: PLC0415
|
||||||
from .backup import backup_core_before_update
|
|
||||||
|
|
||||||
await backup_core_before_update(hass)
|
await backup_core_before_update(hass)
|
||||||
|
|
||||||
@ -71,8 +69,7 @@ async def update_os(hass: HomeAssistant, version: str | None, backup: bool) -> N
|
|||||||
client = get_supervisor_client(hass)
|
client = get_supervisor_client(hass)
|
||||||
|
|
||||||
if backup:
|
if backup:
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .backup import backup_core_before_update # noqa: PLC0415
|
||||||
from .backup import backup_core_before_update
|
|
||||||
|
|
||||||
await backup_core_before_update(hass)
|
await backup_core_before_update(hass)
|
||||||
|
|
||||||
|
@ -309,8 +309,7 @@ class OptionsFlowHandler(OptionsFlow, ABC):
|
|||||||
|
|
||||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||||
"""Set up the options flow."""
|
"""Set up the options flow."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components.zha.radio_manager import ( # noqa: PLC0415
|
||||||
from homeassistant.components.zha.radio_manager import (
|
|
||||||
ZhaMultiPANMigrationHelper,
|
ZhaMultiPANMigrationHelper,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -451,16 +450,11 @@ class OptionsFlowHandler(OptionsFlow, ABC):
|
|||||||
self, user_input: dict[str, Any] | None = None
|
self, user_input: dict[str, Any] | None = None
|
||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
"""Configure the Silicon Labs Multiprotocol add-on."""
|
"""Configure the Silicon Labs Multiprotocol add-on."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components.zha import DOMAIN as ZHA_DOMAIN # noqa: PLC0415
|
||||||
from homeassistant.components.zha import DOMAIN as ZHA_DOMAIN
|
from homeassistant.components.zha.radio_manager import ( # noqa: PLC0415
|
||||||
|
|
||||||
# pylint: disable-next=import-outside-toplevel
|
|
||||||
from homeassistant.components.zha.radio_manager import (
|
|
||||||
ZhaMultiPANMigrationHelper,
|
ZhaMultiPANMigrationHelper,
|
||||||
)
|
)
|
||||||
|
from homeassistant.components.zha.silabs_multiprotocol import ( # noqa: PLC0415
|
||||||
# pylint: disable-next=import-outside-toplevel
|
|
||||||
from homeassistant.components.zha.silabs_multiprotocol import (
|
|
||||||
async_get_channel as async_get_zha_channel,
|
async_get_channel as async_get_zha_channel,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -747,11 +741,8 @@ class OptionsFlowHandler(OptionsFlow, ABC):
|
|||||||
self, user_input: dict[str, Any] | None = None
|
self, user_input: dict[str, Any] | None = None
|
||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
"""Perform initial backup and reconfigure ZHA."""
|
"""Perform initial backup and reconfigure ZHA."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components.zha import DOMAIN as ZHA_DOMAIN # noqa: PLC0415
|
||||||
from homeassistant.components.zha import DOMAIN as ZHA_DOMAIN
|
from homeassistant.components.zha.radio_manager import ( # noqa: PLC0415
|
||||||
|
|
||||||
# pylint: disable-next=import-outside-toplevel
|
|
||||||
from homeassistant.components.zha.radio_manager import (
|
|
||||||
ZhaMultiPANMigrationHelper,
|
ZhaMultiPANMigrationHelper,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -355,11 +355,10 @@ class HomekitControllerFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
return self.async_abort(reason="ignored_model")
|
return self.async_abort(reason="ignored_model")
|
||||||
|
|
||||||
# Late imports in case BLE is not available
|
# Late imports in case BLE is not available
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from aiohomekit.controller.ble.discovery import BleDiscovery # noqa: PLC0415
|
||||||
from aiohomekit.controller.ble.discovery import BleDiscovery
|
from aiohomekit.controller.ble.manufacturer_data import ( # noqa: PLC0415
|
||||||
|
HomeKitAdvertisement,
|
||||||
# pylint: disable-next=import-outside-toplevel
|
)
|
||||||
from aiohomekit.controller.ble.manufacturer_data import HomeKitAdvertisement
|
|
||||||
|
|
||||||
mfr_data = discovery_info.manufacturer_data
|
mfr_data = discovery_info.manufacturer_data
|
||||||
|
|
||||||
|
@ -278,8 +278,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
ssl_certificate is not None
|
ssl_certificate is not None
|
||||||
and (hass.config.external_url or hass.config.internal_url) is None
|
and (hass.config.external_url or hass.config.internal_url) is None
|
||||||
):
|
):
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components.cloud import ( # noqa: PLC0415
|
||||||
from homeassistant.components.cloud import (
|
|
||||||
CloudNotAvailable,
|
CloudNotAvailable,
|
||||||
async_remote_ui_url,
|
async_remote_ui_url,
|
||||||
)
|
)
|
||||||
|
@ -136,8 +136,7 @@ async def process_wrong_login(request: Request) -> None:
|
|||||||
_LOGGER.warning(log_msg)
|
_LOGGER.warning(log_msg)
|
||||||
|
|
||||||
# Circular import with websocket_api
|
# Circular import with websocket_api
|
||||||
# pylint: disable=import-outside-toplevel
|
from homeassistant.components import persistent_notification # noqa: PLC0415
|
||||||
from homeassistant.components import persistent_notification
|
|
||||||
|
|
||||||
persistent_notification.async_create(
|
persistent_notification.async_create(
|
||||||
hass, notification_msg, "Login attempt failed", NOTIFICATION_ID_LOGIN
|
hass, notification_msg, "Login attempt failed", NOTIFICATION_ID_LOGIN
|
||||||
|
@ -444,8 +444,9 @@ class TimerManager:
|
|||||||
timer.finish()
|
timer.finish()
|
||||||
|
|
||||||
if timer.conversation_command:
|
if timer.conversation_command:
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components.conversation import ( # noqa: PLC0415
|
||||||
from homeassistant.components.conversation import async_converse
|
async_converse,
|
||||||
|
)
|
||||||
|
|
||||||
self.hass.async_create_background_task(
|
self.hass.async_create_background_task(
|
||||||
async_converse(
|
async_converse(
|
||||||
|
@ -354,8 +354,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
|
|
||||||
def write_dump() -> None:
|
def write_dump() -> None:
|
||||||
with open(hass.config.path("mqtt_dump.txt"), "w", encoding="utf8") as fp:
|
with open(hass.config.path("mqtt_dump.txt"), "w", encoding="utf8") as fp:
|
||||||
for msg in messages:
|
fp.writelines([",".join(msg) + "\n" for msg in messages])
|
||||||
fp.write(",".join(msg) + "\n")
|
|
||||||
|
|
||||||
async def finish_dump(_: datetime) -> None:
|
async def finish_dump(_: datetime) -> None:
|
||||||
"""Write dump to file."""
|
"""Write dump to file."""
|
||||||
@ -608,8 +607,7 @@ async def async_remove_config_entry_device(
|
|||||||
hass: HomeAssistant, config_entry: ConfigEntry, device_entry: DeviceEntry
|
hass: HomeAssistant, config_entry: ConfigEntry, device_entry: DeviceEntry
|
||||||
) -> bool:
|
) -> bool:
|
||||||
"""Remove MQTT config entry from a device."""
|
"""Remove MQTT config entry from a device."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from . import device_automation # noqa: PLC0415
|
||||||
from . import device_automation
|
|
||||||
|
|
||||||
await device_automation.async_removed_from_device(hass, device_entry.id)
|
await device_automation.async_removed_from_device(hass, device_entry.id)
|
||||||
return True
|
return True
|
||||||
|
@ -293,10 +293,9 @@ class MqttClientSetup:
|
|||||||
"""
|
"""
|
||||||
# We don't import on the top because some integrations
|
# We don't import on the top because some integrations
|
||||||
# should be able to optionally rely on MQTT.
|
# should be able to optionally rely on MQTT.
|
||||||
from paho.mqtt import client as mqtt # pylint: disable=import-outside-toplevel
|
from paho.mqtt import client as mqtt # noqa: PLC0415
|
||||||
|
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .async_client import AsyncMQTTClient # noqa: PLC0415
|
||||||
from .async_client import AsyncMQTTClient
|
|
||||||
|
|
||||||
config = self._config
|
config = self._config
|
||||||
clean_session: bool | None = None
|
clean_session: bool | None = None
|
||||||
@ -524,8 +523,7 @@ class MQTT:
|
|||||||
"""Start the misc periodic."""
|
"""Start the misc periodic."""
|
||||||
assert self._misc_timer is None, "Misc periodic already started"
|
assert self._misc_timer is None, "Misc periodic already started"
|
||||||
_LOGGER.debug("%s: Starting client misc loop", self.config_entry.title)
|
_LOGGER.debug("%s: Starting client misc loop", self.config_entry.title)
|
||||||
# pylint: disable=import-outside-toplevel
|
import paho.mqtt.client as mqtt # noqa: PLC0415
|
||||||
import paho.mqtt.client as mqtt
|
|
||||||
|
|
||||||
# Inner function to avoid having to check late import
|
# Inner function to avoid having to check late import
|
||||||
# each time the function is called.
|
# each time the function is called.
|
||||||
@ -665,8 +663,7 @@ class MQTT:
|
|||||||
|
|
||||||
async def async_connect(self, client_available: asyncio.Future[bool]) -> None:
|
async def async_connect(self, client_available: asyncio.Future[bool]) -> None:
|
||||||
"""Connect to the host. Does not process messages yet."""
|
"""Connect to the host. Does not process messages yet."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
import paho.mqtt.client as mqtt # noqa: PLC0415
|
||||||
import paho.mqtt.client as mqtt
|
|
||||||
|
|
||||||
result: int | None = None
|
result: int | None = None
|
||||||
self._available_future = client_available
|
self._available_future = client_available
|
||||||
@ -724,8 +721,7 @@ class MQTT:
|
|||||||
|
|
||||||
async def _reconnect_loop(self) -> None:
|
async def _reconnect_loop(self) -> None:
|
||||||
"""Reconnect to the MQTT server."""
|
"""Reconnect to the MQTT server."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
import paho.mqtt.client as mqtt # noqa: PLC0415
|
||||||
import paho.mqtt.client as mqtt
|
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
if not self.connected:
|
if not self.connected:
|
||||||
@ -1228,7 +1224,7 @@ class MQTT:
|
|||||||
"""Handle a callback exception."""
|
"""Handle a callback exception."""
|
||||||
# We don't import on the top because some integrations
|
# We don't import on the top because some integrations
|
||||||
# should be able to optionally rely on MQTT.
|
# should be able to optionally rely on MQTT.
|
||||||
import paho.mqtt.client as mqtt # pylint: disable=import-outside-toplevel
|
import paho.mqtt.client as mqtt # noqa: PLC0415
|
||||||
|
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Error returned from MQTT server: %s",
|
"Error returned from MQTT server: %s",
|
||||||
@ -1273,8 +1269,7 @@ class MQTT:
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Wait for ACK from broker or raise on error."""
|
"""Wait for ACK from broker or raise on error."""
|
||||||
if result_code != 0:
|
if result_code != 0:
|
||||||
# pylint: disable-next=import-outside-toplevel
|
import paho.mqtt.client as mqtt # noqa: PLC0415
|
||||||
import paho.mqtt.client as mqtt
|
|
||||||
|
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(
|
||||||
translation_domain=DOMAIN,
|
translation_domain=DOMAIN,
|
||||||
@ -1322,8 +1317,7 @@ class MQTT:
|
|||||||
|
|
||||||
|
|
||||||
def _matcher_for_topic(subscription: str) -> Callable[[str], bool]:
|
def _matcher_for_topic(subscription: str) -> Callable[[str], bool]:
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from paho.mqtt.matcher import MQTTMatcher # noqa: PLC0415
|
||||||
from paho.mqtt.matcher import MQTTMatcher
|
|
||||||
|
|
||||||
matcher = MQTTMatcher() # type: ignore[no-untyped-call]
|
matcher = MQTTMatcher() # type: ignore[no-untyped-call]
|
||||||
matcher[subscription] = True
|
matcher[subscription] = True
|
||||||
|
@ -3493,7 +3493,7 @@ def try_connection(
|
|||||||
"""Test if we can connect to an MQTT broker."""
|
"""Test if we can connect to an MQTT broker."""
|
||||||
# We don't import on the top because some integrations
|
# We don't import on the top because some integrations
|
||||||
# should be able to optionally rely on MQTT.
|
# should be able to optionally rely on MQTT.
|
||||||
import paho.mqtt.client as mqtt # pylint: disable=import-outside-toplevel
|
import paho.mqtt.client as mqtt # noqa: PLC0415
|
||||||
|
|
||||||
mqtt_client_setup = MqttClientSetup(user_input)
|
mqtt_client_setup = MqttClientSetup(user_input)
|
||||||
mqtt_client_setup.setup()
|
mqtt_client_setup.setup()
|
||||||
|
@ -640,8 +640,7 @@ async def cleanup_device_registry(
|
|||||||
entities, triggers or tags.
|
entities, triggers or tags.
|
||||||
"""
|
"""
|
||||||
# Local import to avoid circular dependencies
|
# Local import to avoid circular dependencies
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from . import device_trigger, tag # noqa: PLC0415
|
||||||
from . import device_trigger, tag
|
|
||||||
|
|
||||||
device_registry = dr.async_get(hass)
|
device_registry = dr.async_get(hass)
|
||||||
entity_registry = er.async_get(hass)
|
entity_registry = er.async_get(hass)
|
||||||
|
@ -163,16 +163,14 @@ async def async_forward_entry_setup_and_setup_discovery(
|
|||||||
tasks: list[asyncio.Task] = []
|
tasks: list[asyncio.Task] = []
|
||||||
if "device_automation" in new_platforms:
|
if "device_automation" in new_platforms:
|
||||||
# Local import to avoid circular dependencies
|
# Local import to avoid circular dependencies
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from . import device_automation # noqa: PLC0415
|
||||||
from . import device_automation
|
|
||||||
|
|
||||||
tasks.append(
|
tasks.append(
|
||||||
create_eager_task(device_automation.async_setup_entry(hass, config_entry))
|
create_eager_task(device_automation.async_setup_entry(hass, config_entry))
|
||||||
)
|
)
|
||||||
if "tag" in new_platforms:
|
if "tag" in new_platforms:
|
||||||
# Local import to avoid circular dependencies
|
# Local import to avoid circular dependencies
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from . import tag # noqa: PLC0415
|
||||||
from . import tag
|
|
||||||
|
|
||||||
tasks.append(create_eager_task(tag.async_setup_entry(hass, config_entry)))
|
tasks.append(create_eager_task(tag.async_setup_entry(hass, config_entry)))
|
||||||
if new_entity_platforms := (new_platforms - {"tag", "device_automation"}):
|
if new_entity_platforms := (new_platforms - {"tag", "device_automation"}):
|
||||||
|
@ -175,9 +175,7 @@ async def async_get_announce_addresses(hass: HomeAssistant) -> list[str]:
|
|||||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||||
"""Set up network for Home Assistant."""
|
"""Set up network for Home Assistant."""
|
||||||
# Avoid circular issue: http->network->websocket_api->http
|
# Avoid circular issue: http->network->websocket_api->http
|
||||||
from .websocket import ( # pylint: disable=import-outside-toplevel
|
from .websocket import async_register_websocket_commands # noqa: PLC0415
|
||||||
async_register_websocket_commands,
|
|
||||||
)
|
|
||||||
|
|
||||||
await async_get_network(hass)
|
await async_get_network(hass)
|
||||||
|
|
||||||
|
@ -282,8 +282,7 @@ class BaseNotificationService:
|
|||||||
|
|
||||||
for name, target in self.targets.items():
|
for name, target in self.targets.items():
|
||||||
target_name = slugify(f"{self._target_service_name_prefix}_{name}")
|
target_name = slugify(f"{self._target_service_name_prefix}_{name}")
|
||||||
if target_name in stale_targets:
|
stale_targets.discard(target_name)
|
||||||
stale_targets.remove(target_name)
|
|
||||||
if (
|
if (
|
||||||
target_name in self.registered_targets
|
target_name in self.registered_targets
|
||||||
and target == self.registered_targets[target_name]
|
and target == self.registered_targets[target_name]
|
||||||
|
@ -322,8 +322,9 @@ class OllamaConversationEntity(
|
|||||||
num_keep = 2 * max_messages + 1
|
num_keep = 2 * max_messages + 1
|
||||||
drop_index = len(message_history.messages) - num_keep
|
drop_index = len(message_history.messages) - num_keep
|
||||||
message_history.messages = [
|
message_history.messages = [
|
||||||
message_history.messages[0]
|
message_history.messages[0],
|
||||||
] + message_history.messages[drop_index:]
|
*message_history.messages[drop_index:],
|
||||||
|
]
|
||||||
|
|
||||||
async def _async_entry_update_listener(
|
async def _async_entry_update_listener(
|
||||||
self, hass: HomeAssistant, entry: ConfigEntry
|
self, hass: HomeAssistant, entry: ConfigEntry
|
||||||
|
@ -218,8 +218,7 @@ class UserOnboardingView(_BaseOnboardingStepView):
|
|||||||
|
|
||||||
# Return authorization code for fetching tokens and connect
|
# Return authorization code for fetching tokens and connect
|
||||||
# during onboarding.
|
# during onboarding.
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components.auth import create_auth_code # noqa: PLC0415
|
||||||
from homeassistant.components.auth import create_auth_code
|
|
||||||
|
|
||||||
auth_code = create_auth_code(hass, data["client_id"], credentials)
|
auth_code = create_auth_code(hass, data["client_id"], credentials)
|
||||||
return self.json({"auth_code": auth_code})
|
return self.json({"auth_code": auth_code})
|
||||||
@ -309,8 +308,7 @@ class IntegrationOnboardingView(_BaseOnboardingStepView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Return authorization code so we can redirect user and log them in
|
# Return authorization code so we can redirect user and log them in
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components.auth import create_auth_code # noqa: PLC0415
|
||||||
from homeassistant.components.auth import create_auth_code
|
|
||||||
|
|
||||||
auth_code = create_auth_code(
|
auth_code = create_auth_code(
|
||||||
hass, data["client_id"], refresh_token.credential
|
hass, data["client_id"], refresh_token.credential
|
||||||
|
@ -166,7 +166,7 @@ async def async_setup_entry( # noqa: C901
|
|||||||
# Imports deferred to avoid loading modules
|
# Imports deferred to avoid loading modules
|
||||||
# in memory since usually only one part of this
|
# in memory since usually only one part of this
|
||||||
# integration is used at a time
|
# integration is used at a time
|
||||||
import objgraph # pylint: disable=import-outside-toplevel
|
import objgraph # noqa: PLC0415
|
||||||
|
|
||||||
obj_type = call.data[CONF_TYPE]
|
obj_type = call.data[CONF_TYPE]
|
||||||
|
|
||||||
@ -192,7 +192,7 @@ async def async_setup_entry( # noqa: C901
|
|||||||
# Imports deferred to avoid loading modules
|
# Imports deferred to avoid loading modules
|
||||||
# in memory since usually only one part of this
|
# in memory since usually only one part of this
|
||||||
# integration is used at a time
|
# integration is used at a time
|
||||||
import objgraph # pylint: disable=import-outside-toplevel
|
import objgraph # noqa: PLC0415
|
||||||
|
|
||||||
for lru in objgraph.by_type(_LRU_CACHE_WRAPPER_OBJECT):
|
for lru in objgraph.by_type(_LRU_CACHE_WRAPPER_OBJECT):
|
||||||
lru = cast(_lru_cache_wrapper, lru)
|
lru = cast(_lru_cache_wrapper, lru)
|
||||||
@ -399,7 +399,7 @@ async def _async_generate_profile(hass: HomeAssistant, call: ServiceCall):
|
|||||||
# Imports deferred to avoid loading modules
|
# Imports deferred to avoid loading modules
|
||||||
# in memory since usually only one part of this
|
# in memory since usually only one part of this
|
||||||
# integration is used at a time
|
# integration is used at a time
|
||||||
import cProfile # pylint: disable=import-outside-toplevel
|
import cProfile # noqa: PLC0415
|
||||||
|
|
||||||
start_time = int(time.time() * 1000000)
|
start_time = int(time.time() * 1000000)
|
||||||
persistent_notification.async_create(
|
persistent_notification.async_create(
|
||||||
@ -436,7 +436,7 @@ async def _async_generate_memory_profile(hass: HomeAssistant, call: ServiceCall)
|
|||||||
# Imports deferred to avoid loading modules
|
# Imports deferred to avoid loading modules
|
||||||
# in memory since usually only one part of this
|
# in memory since usually only one part of this
|
||||||
# integration is used at a time
|
# integration is used at a time
|
||||||
from guppy import hpy # pylint: disable=import-outside-toplevel
|
from guppy import hpy # noqa: PLC0415
|
||||||
|
|
||||||
start_time = int(time.time() * 1000000)
|
start_time = int(time.time() * 1000000)
|
||||||
persistent_notification.async_create(
|
persistent_notification.async_create(
|
||||||
@ -467,7 +467,7 @@ def _write_profile(profiler, cprofile_path, callgrind_path):
|
|||||||
# Imports deferred to avoid loading modules
|
# Imports deferred to avoid loading modules
|
||||||
# in memory since usually only one part of this
|
# in memory since usually only one part of this
|
||||||
# integration is used at a time
|
# integration is used at a time
|
||||||
from pyprof2calltree import convert # pylint: disable=import-outside-toplevel
|
from pyprof2calltree import convert # noqa: PLC0415
|
||||||
|
|
||||||
profiler.create_stats()
|
profiler.create_stats()
|
||||||
profiler.dump_stats(cprofile_path)
|
profiler.dump_stats(cprofile_path)
|
||||||
@ -482,14 +482,14 @@ def _log_objects(*_):
|
|||||||
# Imports deferred to avoid loading modules
|
# Imports deferred to avoid loading modules
|
||||||
# in memory since usually only one part of this
|
# in memory since usually only one part of this
|
||||||
# integration is used at a time
|
# integration is used at a time
|
||||||
import objgraph # pylint: disable=import-outside-toplevel
|
import objgraph # noqa: PLC0415
|
||||||
|
|
||||||
_LOGGER.critical("Memory Growth: %s", objgraph.growth(limit=1000))
|
_LOGGER.critical("Memory Growth: %s", objgraph.growth(limit=1000))
|
||||||
|
|
||||||
|
|
||||||
def _get_function_absfile(func: Any) -> str | None:
|
def _get_function_absfile(func: Any) -> str | None:
|
||||||
"""Get the absolute file path of a function."""
|
"""Get the absolute file path of a function."""
|
||||||
import inspect # pylint: disable=import-outside-toplevel
|
import inspect # noqa: PLC0415
|
||||||
|
|
||||||
abs_file: str | None = None
|
abs_file: str | None = None
|
||||||
with suppress(Exception):
|
with suppress(Exception):
|
||||||
@ -510,7 +510,7 @@ def _safe_repr(obj: Any) -> str:
|
|||||||
|
|
||||||
|
|
||||||
def _find_backrefs_not_to_self(_object: Any) -> list[str]:
|
def _find_backrefs_not_to_self(_object: Any) -> list[str]:
|
||||||
import objgraph # pylint: disable=import-outside-toplevel
|
import objgraph # noqa: PLC0415
|
||||||
|
|
||||||
return [
|
return [
|
||||||
_safe_repr(backref)
|
_safe_repr(backref)
|
||||||
@ -526,7 +526,7 @@ def _log_object_sources(
|
|||||||
# Imports deferred to avoid loading modules
|
# Imports deferred to avoid loading modules
|
||||||
# in memory since usually only one part of this
|
# in memory since usually only one part of this
|
||||||
# integration is used at a time
|
# integration is used at a time
|
||||||
import gc # pylint: disable=import-outside-toplevel
|
import gc # noqa: PLC0415
|
||||||
|
|
||||||
gc.collect()
|
gc.collect()
|
||||||
|
|
||||||
|
@ -242,7 +242,7 @@ def correct_db_schema_utf8(
|
|||||||
f"{table_name}.4-byte UTF-8" in schema_errors
|
f"{table_name}.4-byte UTF-8" in schema_errors
|
||||||
or f"{table_name}.utf8mb4_unicode_ci" in schema_errors
|
or f"{table_name}.utf8mb4_unicode_ci" in schema_errors
|
||||||
):
|
):
|
||||||
from ..migration import ( # pylint: disable=import-outside-toplevel
|
from ..migration import ( # noqa: PLC0415
|
||||||
_correct_table_character_set_and_collation,
|
_correct_table_character_set_and_collation,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -258,9 +258,7 @@ def correct_db_schema_precision(
|
|||||||
table_name = table_object.__tablename__
|
table_name = table_object.__tablename__
|
||||||
|
|
||||||
if f"{table_name}.double precision" in schema_errors:
|
if f"{table_name}.double precision" in schema_errors:
|
||||||
from ..migration import ( # pylint: disable=import-outside-toplevel
|
from ..migration import _modify_columns # noqa: PLC0415
|
||||||
_modify_columns,
|
|
||||||
)
|
|
||||||
|
|
||||||
precision_columns = _get_precision_column_types(table_object)
|
precision_columns = _get_precision_column_types(table_object)
|
||||||
# Attempt to convert timestamp columns to µs precision
|
# Attempt to convert timestamp columns to µs precision
|
||||||
|
@ -45,7 +45,7 @@ def get_full_significant_states_with_session(
|
|||||||
) -> dict[str, list[State]]:
|
) -> dict[str, list[State]]:
|
||||||
"""Return a dict of significant states during a time period."""
|
"""Return a dict of significant states during a time period."""
|
||||||
if not get_instance(hass).states_meta_manager.active:
|
if not get_instance(hass).states_meta_manager.active:
|
||||||
from .legacy import ( # pylint: disable=import-outside-toplevel
|
from .legacy import ( # noqa: PLC0415
|
||||||
get_full_significant_states_with_session as _legacy_get_full_significant_states_with_session,
|
get_full_significant_states_with_session as _legacy_get_full_significant_states_with_session,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -70,7 +70,7 @@ def get_last_state_changes(
|
|||||||
) -> dict[str, list[State]]:
|
) -> dict[str, list[State]]:
|
||||||
"""Return the last number_of_states."""
|
"""Return the last number_of_states."""
|
||||||
if not get_instance(hass).states_meta_manager.active:
|
if not get_instance(hass).states_meta_manager.active:
|
||||||
from .legacy import ( # pylint: disable=import-outside-toplevel
|
from .legacy import ( # noqa: PLC0415
|
||||||
get_last_state_changes as _legacy_get_last_state_changes,
|
get_last_state_changes as _legacy_get_last_state_changes,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -94,7 +94,7 @@ def get_significant_states(
|
|||||||
) -> dict[str, list[State | dict[str, Any]]]:
|
) -> dict[str, list[State | dict[str, Any]]]:
|
||||||
"""Return a dict of significant states during a time period."""
|
"""Return a dict of significant states during a time period."""
|
||||||
if not get_instance(hass).states_meta_manager.active:
|
if not get_instance(hass).states_meta_manager.active:
|
||||||
from .legacy import ( # pylint: disable=import-outside-toplevel
|
from .legacy import ( # noqa: PLC0415
|
||||||
get_significant_states as _legacy_get_significant_states,
|
get_significant_states as _legacy_get_significant_states,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -130,7 +130,7 @@ def get_significant_states_with_session(
|
|||||||
) -> dict[str, list[State | dict[str, Any]]]:
|
) -> dict[str, list[State | dict[str, Any]]]:
|
||||||
"""Return a dict of significant states during a time period."""
|
"""Return a dict of significant states during a time period."""
|
||||||
if not get_instance(hass).states_meta_manager.active:
|
if not get_instance(hass).states_meta_manager.active:
|
||||||
from .legacy import ( # pylint: disable=import-outside-toplevel
|
from .legacy import ( # noqa: PLC0415
|
||||||
get_significant_states_with_session as _legacy_get_significant_states_with_session,
|
get_significant_states_with_session as _legacy_get_significant_states_with_session,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -164,7 +164,7 @@ def state_changes_during_period(
|
|||||||
) -> dict[str, list[State]]:
|
) -> dict[str, list[State]]:
|
||||||
"""Return a list of states that changed during a time period."""
|
"""Return a list of states that changed during a time period."""
|
||||||
if not get_instance(hass).states_meta_manager.active:
|
if not get_instance(hass).states_meta_manager.active:
|
||||||
from .legacy import ( # pylint: disable=import-outside-toplevel
|
from .legacy import ( # noqa: PLC0415
|
||||||
state_changes_during_period as _legacy_state_changes_during_period,
|
state_changes_during_period as _legacy_state_changes_during_period,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -90,7 +90,7 @@ class RecorderPool(SingletonThreadPool, NullPool):
|
|||||||
if threading.get_ident() in self.recorder_and_worker_thread_ids:
|
if threading.get_ident() in self.recorder_and_worker_thread_ids:
|
||||||
super().dispose()
|
super().dispose()
|
||||||
|
|
||||||
def _do_get(self) -> ConnectionPoolEntry: # type: ignore[return]
|
def _do_get(self) -> ConnectionPoolEntry: # type: ignore[return] # noqa: RET503
|
||||||
if threading.get_ident() in self.recorder_and_worker_thread_ids:
|
if threading.get_ident() in self.recorder_and_worker_thread_ids:
|
||||||
return super()._do_get()
|
return super()._do_get()
|
||||||
try:
|
try:
|
||||||
@ -100,7 +100,7 @@ class RecorderPool(SingletonThreadPool, NullPool):
|
|||||||
# which is allowed but discouraged since its much slower
|
# which is allowed but discouraged since its much slower
|
||||||
return self._do_get_db_connection_protected()
|
return self._do_get_db_connection_protected()
|
||||||
# In the event loop, raise an exception
|
# In the event loop, raise an exception
|
||||||
raise_for_blocking_call( # noqa: RET503
|
raise_for_blocking_call(
|
||||||
self._do_get_db_connection_protected,
|
self._do_get_db_connection_protected,
|
||||||
strict=True,
|
strict=True,
|
||||||
advise_msg=ADVISE_MSG,
|
advise_msg=ADVISE_MSG,
|
||||||
|
@ -2855,7 +2855,7 @@ def cleanup_statistics_timestamp_migration(instance: Recorder) -> bool:
|
|||||||
# to indicate we need to run again
|
# to indicate we need to run again
|
||||||
return False
|
return False
|
||||||
|
|
||||||
from .migration import _drop_index # pylint: disable=import-outside-toplevel
|
from .migration import _drop_index # noqa: PLC0415
|
||||||
|
|
||||||
for table in STATISTICS_TABLES:
|
for table in STATISTICS_TABLES:
|
||||||
_drop_index(instance.get_session, table, f"ix_{table}_start")
|
_drop_index(instance.get_session, table, f"ix_{table}_start")
|
||||||
|
@ -258,7 +258,7 @@ def basic_sanity_check(cursor: SQLiteCursor) -> bool:
|
|||||||
|
|
||||||
def validate_sqlite_database(dbpath: str) -> bool:
|
def validate_sqlite_database(dbpath: str) -> bool:
|
||||||
"""Run a quick check on an sqlite database to see if it is corrupt."""
|
"""Run a quick check on an sqlite database to see if it is corrupt."""
|
||||||
import sqlite3 # pylint: disable=import-outside-toplevel
|
import sqlite3 # noqa: PLC0415
|
||||||
|
|
||||||
try:
|
try:
|
||||||
conn = sqlite3.connect(dbpath)
|
conn = sqlite3.connect(dbpath)
|
||||||
@ -402,9 +402,8 @@ def _datetime_or_none(value: str) -> datetime | None:
|
|||||||
def build_mysqldb_conv() -> dict:
|
def build_mysqldb_conv() -> dict:
|
||||||
"""Build a MySQLDB conv dict that uses cisco8601 to parse datetimes."""
|
"""Build a MySQLDB conv dict that uses cisco8601 to parse datetimes."""
|
||||||
# Late imports since we only call this if they are using mysqldb
|
# Late imports since we only call this if they are using mysqldb
|
||||||
# pylint: disable=import-outside-toplevel
|
from MySQLdb.constants import FIELD_TYPE # noqa: PLC0415
|
||||||
from MySQLdb.constants import FIELD_TYPE
|
from MySQLdb.converters import conversions # noqa: PLC0415
|
||||||
from MySQLdb.converters import conversions
|
|
||||||
|
|
||||||
return {**conversions, FIELD_TYPE.DATETIME: _datetime_or_none}
|
return {**conversions, FIELD_TYPE.DATETIME: _datetime_or_none}
|
||||||
|
|
||||||
|
@ -264,8 +264,7 @@ class RMVDepartureData:
|
|||||||
for dest in self._destinations:
|
for dest in self._destinations:
|
||||||
if dest in journey["stops"]:
|
if dest in journey["stops"]:
|
||||||
dest_found = True
|
dest_found = True
|
||||||
if dest in _deps_not_found:
|
_deps_not_found.discard(dest)
|
||||||
_deps_not_found.remove(dest)
|
|
||||||
_nextdep["destination"] = dest
|
_nextdep["destination"] = dest
|
||||||
|
|
||||||
if not dest_found:
|
if not dest_found:
|
||||||
|
@ -12,7 +12,7 @@ DATA_BLUEPRINTS = "script_blueprints"
|
|||||||
|
|
||||||
def _blueprint_in_use(hass: HomeAssistant, blueprint_path: str) -> bool:
|
def _blueprint_in_use(hass: HomeAssistant, blueprint_path: str) -> bool:
|
||||||
"""Return True if any script references the blueprint."""
|
"""Return True if any script references the blueprint."""
|
||||||
from . import scripts_with_blueprint # pylint: disable=import-outside-toplevel
|
from . import scripts_with_blueprint # noqa: PLC0415
|
||||||
|
|
||||||
return len(scripts_with_blueprint(hass, blueprint_path)) > 0
|
return len(scripts_with_blueprint(hass, blueprint_path)) > 0
|
||||||
|
|
||||||
|
@ -97,7 +97,7 @@ async def _async_find_next_available_port(source: AddressTupleVXType) -> int:
|
|||||||
test_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
test_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
|
|
||||||
for port in range(UPNP_SERVER_MIN_PORT, UPNP_SERVER_MAX_PORT):
|
for port in range(UPNP_SERVER_MIN_PORT, UPNP_SERVER_MAX_PORT):
|
||||||
addr = (source[0],) + (port,) + source[2:]
|
addr = (source[0], port, *source[2:])
|
||||||
try:
|
try:
|
||||||
test_socket.bind(addr)
|
test_socket.bind(addr)
|
||||||
except OSError:
|
except OSError:
|
||||||
|
@ -119,7 +119,7 @@ def _check_stream_client_error(
|
|||||||
|
|
||||||
Raise StreamOpenClientError if an http client error is encountered.
|
Raise StreamOpenClientError if an http client error is encountered.
|
||||||
"""
|
"""
|
||||||
from .worker import try_open_stream # pylint: disable=import-outside-toplevel
|
from .worker import try_open_stream # noqa: PLC0415
|
||||||
|
|
||||||
pyav_options, _ = _convert_stream_options(hass, source, options or {})
|
pyav_options, _ = _convert_stream_options(hass, source, options or {})
|
||||||
try:
|
try:
|
||||||
@ -234,7 +234,7 @@ CONFIG_SCHEMA = vol.Schema(
|
|||||||
|
|
||||||
def set_pyav_logging(enable: bool) -> None:
|
def set_pyav_logging(enable: bool) -> None:
|
||||||
"""Turn PyAV logging on or off."""
|
"""Turn PyAV logging on or off."""
|
||||||
import av # pylint: disable=import-outside-toplevel
|
import av # noqa: PLC0415
|
||||||
|
|
||||||
av.logging.set_level(av.logging.VERBOSE if enable else av.logging.FATAL)
|
av.logging.set_level(av.logging.VERBOSE if enable else av.logging.FATAL)
|
||||||
|
|
||||||
@ -267,8 +267,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
await hass.async_add_executor_job(set_pyav_logging, debug_enabled)
|
await hass.async_add_executor_job(set_pyav_logging, debug_enabled)
|
||||||
|
|
||||||
# Keep import here so that we can import stream integration without installing reqs
|
# Keep import here so that we can import stream integration without installing reqs
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .recorder import async_setup_recorder # noqa: PLC0415
|
||||||
from .recorder import async_setup_recorder
|
|
||||||
|
|
||||||
hass.data[DOMAIN] = {}
|
hass.data[DOMAIN] = {}
|
||||||
hass.data[DOMAIN][ATTR_ENDPOINTS] = {}
|
hass.data[DOMAIN][ATTR_ENDPOINTS] = {}
|
||||||
@ -460,8 +459,7 @@ class Stream:
|
|||||||
def _run_worker(self) -> None:
|
def _run_worker(self) -> None:
|
||||||
"""Handle consuming streams and restart keepalive streams."""
|
"""Handle consuming streams and restart keepalive streams."""
|
||||||
# Keep import here so that we can import stream integration without installing reqs
|
# Keep import here so that we can import stream integration without installing reqs
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .worker import StreamState, stream_worker # noqa: PLC0415
|
||||||
from .worker import StreamState, stream_worker
|
|
||||||
|
|
||||||
stream_state = StreamState(self.hass, self.outputs, self._diagnostics)
|
stream_state = StreamState(self.hass, self.outputs, self._diagnostics)
|
||||||
wait_timeout = 0
|
wait_timeout = 0
|
||||||
@ -556,8 +554,7 @@ class Stream:
|
|||||||
"""Make a .mp4 recording from a provided stream."""
|
"""Make a .mp4 recording from a provided stream."""
|
||||||
|
|
||||||
# Keep import here so that we can import stream integration without installing reqs
|
# Keep import here so that we can import stream integration without installing reqs
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .recorder import RecorderOutput # noqa: PLC0415
|
||||||
from .recorder import RecorderOutput
|
|
||||||
|
|
||||||
# Check for file access
|
# Check for file access
|
||||||
if not self.hass.config.is_allowed_path(video_path):
|
if not self.hass.config.is_allowed_path(video_path):
|
||||||
|
@ -439,8 +439,9 @@ class KeyFrameConverter:
|
|||||||
|
|
||||||
# Keep import here so that we can import stream integration
|
# Keep import here so that we can import stream integration
|
||||||
# without installing reqs
|
# without installing reqs
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components.camera.img_util import ( # noqa: PLC0415
|
||||||
from homeassistant.components.camera.img_util import TurboJPEGSingleton
|
TurboJPEGSingleton,
|
||||||
|
)
|
||||||
|
|
||||||
self._packet: Packet | None = None
|
self._packet: Packet | None = None
|
||||||
self._event: asyncio.Event = asyncio.Event()
|
self._event: asyncio.Event = asyncio.Event()
|
||||||
@ -471,8 +472,7 @@ class KeyFrameConverter:
|
|||||||
|
|
||||||
# Keep import here so that we can import stream integration without
|
# Keep import here so that we can import stream integration without
|
||||||
# installing reqs
|
# installing reqs
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from av import CodecContext # noqa: PLC0415
|
||||||
from av import CodecContext
|
|
||||||
|
|
||||||
self._codec_context = cast(
|
self._codec_context = cast(
|
||||||
"VideoCodecContext", CodecContext.create(codec_context.name, "r")
|
"VideoCodecContext", CodecContext.create(codec_context.name, "r")
|
||||||
|
@ -146,11 +146,11 @@ def get_codec_string(mp4_bytes: bytes) -> str:
|
|||||||
return ",".join(codecs)
|
return ",".join(codecs)
|
||||||
|
|
||||||
|
|
||||||
def find_moov(mp4_io: BufferedIOBase) -> int:
|
def find_moov(mp4_io: BufferedIOBase) -> int: # noqa: RET503
|
||||||
"""Find location of moov atom in a BufferedIOBase mp4."""
|
"""Find location of moov atom in a BufferedIOBase mp4."""
|
||||||
index = 0
|
index = 0
|
||||||
# Ruff doesn't understand this loop - the exception is always raised at the end
|
# Ruff doesn't understand this loop - the exception is always raised at the end
|
||||||
while 1: # noqa: RET503
|
while 1:
|
||||||
mp4_io.seek(index)
|
mp4_io.seek(index)
|
||||||
box_header = mp4_io.read(8)
|
box_header = mp4_io.read(8)
|
||||||
if len(box_header) != 8 or box_header[0:4] == b"\x00\x00\x00\x00":
|
if len(box_header) != 8 or box_header[0:4] == b"\x00\x00\x00\x00":
|
||||||
|
@ -231,7 +231,7 @@ async def handle_info(
|
|||||||
"Error fetching system info for %s - %s",
|
"Error fetching system info for %s - %s",
|
||||||
domain,
|
domain,
|
||||||
key,
|
key,
|
||||||
exc_info=(type(exception), exception, exception.__traceback__),
|
exc_info=(type(exception), exception, exception.__traceback__), # noqa: LOG014
|
||||||
)
|
)
|
||||||
event_msg["success"] = False
|
event_msg["success"] = False
|
||||||
event_msg["error"] = {"type": "failed", "error": "unknown"}
|
event_msg["error"] = {"type": "failed", "error": "unknown"}
|
||||||
|
@ -54,8 +54,7 @@ async def _reload_blueprint_templates(hass: HomeAssistant, blueprint_path: str)
|
|||||||
@callback
|
@callback
|
||||||
def async_get_blueprints(hass: HomeAssistant) -> blueprint.DomainBlueprints:
|
def async_get_blueprints(hass: HomeAssistant) -> blueprint.DomainBlueprints:
|
||||||
"""Get template blueprints."""
|
"""Get template blueprints."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .config import TEMPLATE_BLUEPRINT_SCHEMA # noqa: PLC0415
|
||||||
from .config import TEMPLATE_BLUEPRINT_SCHEMA
|
|
||||||
|
|
||||||
return blueprint.DomainBlueprints(
|
return blueprint.DomainBlueprints(
|
||||||
hass,
|
hass,
|
||||||
|
@ -536,7 +536,6 @@ class AbstractTemplateLight(AbstractTemplateEntity, LightEntity):
|
|||||||
effect,
|
effect,
|
||||||
self.entity_id,
|
self.entity_id,
|
||||||
self._effect_list,
|
self._effect_list,
|
||||||
exc_info=True,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
common_params["effect"] = effect
|
common_params["effect"] = effect
|
||||||
|
@ -156,9 +156,8 @@ def setup_platform(
|
|||||||
# These imports shouldn't be moved to the top, because they depend on code from the model_dir.
|
# These imports shouldn't be moved to the top, because they depend on code from the model_dir.
|
||||||
# (The model_dir is created during the manual setup process. See integration docs.)
|
# (The model_dir is created during the manual setup process. See integration docs.)
|
||||||
|
|
||||||
# pylint: disable=import-outside-toplevel
|
from object_detection.builders import model_builder # noqa: PLC0415
|
||||||
from object_detection.builders import model_builder
|
from object_detection.utils import config_util, label_map_util # noqa: PLC0415
|
||||||
from object_detection.utils import config_util, label_map_util
|
|
||||||
except ImportError:
|
except ImportError:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"No TensorFlow Object Detection library found! Install or compile "
|
"No TensorFlow Object Detection library found! Install or compile "
|
||||||
@ -169,7 +168,7 @@ def setup_platform(
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
# Display warning that PIL will be used if no OpenCV is found.
|
# Display warning that PIL will be used if no OpenCV is found.
|
||||||
import cv2 # noqa: F401 pylint: disable=import-outside-toplevel
|
import cv2 # noqa: F401, PLC0415
|
||||||
except ImportError:
|
except ImportError:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"No OpenCV library found. TensorFlow will process image with "
|
"No OpenCV library found. TensorFlow will process image with "
|
||||||
@ -354,7 +353,7 @@ class TensorFlowImageProcessor(ImageProcessingEntity):
|
|||||||
|
|
||||||
start = time.perf_counter()
|
start = time.perf_counter()
|
||||||
try:
|
try:
|
||||||
import cv2 # pylint: disable=import-outside-toplevel
|
import cv2 # noqa: PLC0415
|
||||||
|
|
||||||
img = cv2.imdecode(np.asarray(bytearray(image)), cv2.IMREAD_UNCHANGED)
|
img = cv2.imdecode(np.asarray(bytearray(image)), cv2.IMREAD_UNCHANGED)
|
||||||
inp = img[:, :, [2, 1, 0]] # BGR->RGB
|
inp = img[:, :, [2, 1, 0]] # BGR->RGB
|
||||||
|
@ -117,9 +117,7 @@ def _get_neighbours(ndb: NDB) -> dict[str, Neighbour]:
|
|||||||
def _get_routes_and_neighbors():
|
def _get_routes_and_neighbors():
|
||||||
"""Get the routes and neighbours from pyroute2."""
|
"""Get the routes and neighbours from pyroute2."""
|
||||||
# Import in the executor since import NDB can take a while
|
# Import in the executor since import NDB can take a while
|
||||||
from pyroute2 import ( # pylint: disable=no-name-in-module, import-outside-toplevel
|
from pyroute2 import NDB # pylint: disable=no-name-in-module # noqa: PLC0415
|
||||||
NDB,
|
|
||||||
)
|
|
||||||
|
|
||||||
with NDB() as ndb:
|
with NDB() as ndb:
|
||||||
routes, reverse_routes = _get_possible_thread_routes(ndb)
|
routes, reverse_routes = _get_possible_thread_routes(ndb)
|
||||||
|
@ -317,8 +317,7 @@ class TPLinkSensorEntity(CoordinatedTPLinkFeatureEntity, SensorEntity):
|
|||||||
value = self.entity_description.convert_fn(value)
|
value = self.entity_description.convert_fn(value)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from datetime import date, datetime # noqa: PLC0415
|
||||||
from datetime import date, datetime
|
|
||||||
|
|
||||||
assert isinstance(value, str | int | float | date | datetime | None)
|
assert isinstance(value, str | int | float | date | datetime | None)
|
||||||
|
|
||||||
|
@ -40,7 +40,7 @@ def generate_media_source_id(
|
|||||||
cache: bool | None = None,
|
cache: bool | None = None,
|
||||||
) -> str:
|
) -> str:
|
||||||
"""Generate a media source ID for text-to-speech."""
|
"""Generate a media source ID for text-to-speech."""
|
||||||
from . import async_resolve_engine # pylint: disable=import-outside-toplevel
|
from . import async_resolve_engine # noqa: PLC0415
|
||||||
|
|
||||||
if (engine := async_resolve_engine(hass, engine)) is None:
|
if (engine := async_resolve_engine(hass, engine)) is None:
|
||||||
raise HomeAssistantError("Invalid TTS provider selected")
|
raise HomeAssistantError("Invalid TTS provider selected")
|
||||||
@ -193,7 +193,7 @@ class TTSMediaSource(MediaSource):
|
|||||||
@callback
|
@callback
|
||||||
def _engine_item(self, engine: str, params: str | None = None) -> BrowseMediaSource:
|
def _engine_item(self, engine: str, params: str | None = None) -> BrowseMediaSource:
|
||||||
"""Return provider item."""
|
"""Return provider item."""
|
||||||
from . import TextToSpeechEntity # pylint: disable=import-outside-toplevel
|
from . import TextToSpeechEntity # noqa: PLC0415
|
||||||
|
|
||||||
if (engine_instance := get_engine_instance(self.hass, engine)) is None:
|
if (engine_instance := get_engine_instance(self.hass, engine)) is None:
|
||||||
raise BrowseError("Unknown provider")
|
raise BrowseError("Unknown provider")
|
||||||
|
@ -94,7 +94,7 @@ class SharingMQCompat(SharingMQ):
|
|||||||
"""Start the MQTT client."""
|
"""Start the MQTT client."""
|
||||||
# We don't import on the top because some integrations
|
# We don't import on the top because some integrations
|
||||||
# should be able to optionally rely on MQTT.
|
# should be able to optionally rely on MQTT.
|
||||||
import paho.mqtt.client as mqtt # pylint: disable=import-outside-toplevel
|
import paho.mqtt.client as mqtt # noqa: PLC0415
|
||||||
|
|
||||||
mqttc = mqtt.Client(client_id=mq_config.client_id)
|
mqttc = mqtt.Client(client_id=mq_config.client_id)
|
||||||
mqttc.username_pw_set(mq_config.username, mq_config.password)
|
mqttc.username_pw_set(mq_config.username, mq_config.password)
|
||||||
|
@ -735,8 +735,7 @@ async def handle_subscribe_trigger(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Handle subscribe trigger command."""
|
"""Handle subscribe trigger command."""
|
||||||
# Circular dep
|
# Circular dep
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.helpers import trigger # noqa: PLC0415
|
||||||
from homeassistant.helpers import trigger
|
|
||||||
|
|
||||||
trigger_config = await trigger.async_validate_trigger_config(hass, msg["trigger"])
|
trigger_config = await trigger.async_validate_trigger_config(hass, msg["trigger"])
|
||||||
|
|
||||||
@ -786,8 +785,7 @@ async def handle_test_condition(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Handle test condition command."""
|
"""Handle test condition command."""
|
||||||
# Circular dep
|
# Circular dep
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.helpers import condition # noqa: PLC0415
|
||||||
from homeassistant.helpers import condition
|
|
||||||
|
|
||||||
# Do static + dynamic validation of the condition
|
# Do static + dynamic validation of the condition
|
||||||
config = await condition.async_validate_condition_config(hass, msg["condition"])
|
config = await condition.async_validate_condition_config(hass, msg["condition"])
|
||||||
@ -812,8 +810,10 @@ async def handle_execute_script(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Handle execute script command."""
|
"""Handle execute script command."""
|
||||||
# Circular dep
|
# Circular dep
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.helpers.script import ( # noqa: PLC0415
|
||||||
from homeassistant.helpers.script import Script, async_validate_actions_config
|
Script,
|
||||||
|
async_validate_actions_config,
|
||||||
|
)
|
||||||
|
|
||||||
script_config = await async_validate_actions_config(hass, msg["sequence"])
|
script_config = await async_validate_actions_config(hass, msg["sequence"])
|
||||||
|
|
||||||
@ -877,8 +877,7 @@ async def handle_validate_config(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Handle validate config command."""
|
"""Handle validate config command."""
|
||||||
# Circular dep
|
# Circular dep
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.helpers import condition, script, trigger # noqa: PLC0415
|
||||||
from homeassistant.helpers import condition, script, trigger
|
|
||||||
|
|
||||||
result = {}
|
result = {}
|
||||||
|
|
||||||
|
@ -772,7 +772,7 @@ async def websocket_device_cluster_commands(
|
|||||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Return a list of cluster commands."""
|
"""Return a list of cluster commands."""
|
||||||
import voluptuous_serialize # pylint: disable=import-outside-toplevel
|
import voluptuous_serialize # noqa: PLC0415
|
||||||
|
|
||||||
zha_gateway = get_zha_gateway(hass)
|
zha_gateway = get_zha_gateway(hass)
|
||||||
ieee: EUI64 = msg[ATTR_IEEE]
|
ieee: EUI64 = msg[ATTR_IEEE]
|
||||||
@ -1080,7 +1080,7 @@ async def websocket_get_configuration(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Get ZHA configuration."""
|
"""Get ZHA configuration."""
|
||||||
config_entry: ConfigEntry = get_config_entry(hass)
|
config_entry: ConfigEntry = get_config_entry(hass)
|
||||||
import voluptuous_serialize # pylint: disable=import-outside-toplevel
|
import voluptuous_serialize # noqa: PLC0415
|
||||||
|
|
||||||
def custom_serializer(schema: Any) -> Any:
|
def custom_serializer(schema: Any) -> Any:
|
||||||
"""Serialize additional types for voluptuous_serialize."""
|
"""Serialize additional types for voluptuous_serialize."""
|
||||||
|
@ -166,9 +166,9 @@ async def async_attach_trigger(
|
|||||||
if (
|
if (
|
||||||
config[ATTR_PARTIAL_DICT_MATCH]
|
config[ATTR_PARTIAL_DICT_MATCH]
|
||||||
and isinstance(event_data[key], dict)
|
and isinstance(event_data[key], dict)
|
||||||
and isinstance(event_data_filter[key], dict)
|
and isinstance(val, dict)
|
||||||
):
|
):
|
||||||
for key2, val2 in event_data_filter[key].items():
|
for key2, val2 in val.items():
|
||||||
if key2 not in event_data[key] or event_data[key][key2] != val2:
|
if key2 not in event_data[key] or event_data[key][key2] != val2:
|
||||||
return
|
return
|
||||||
continue
|
continue
|
||||||
|
@ -1321,8 +1321,7 @@ async def async_check_ha_config_file(hass: HomeAssistant) -> str | None:
|
|||||||
|
|
||||||
This method is a coroutine.
|
This method is a coroutine.
|
||||||
"""
|
"""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .helpers import check_config # noqa: PLC0415
|
||||||
from .helpers import check_config
|
|
||||||
|
|
||||||
res = await check_config.async_check_ha_config_file(hass)
|
res = await check_config.async_check_ha_config_file(hass)
|
||||||
|
|
||||||
|
@ -179,8 +179,7 @@ class EventStateReportedData(EventStateEventData):
|
|||||||
|
|
||||||
|
|
||||||
def _deprecated_core_config() -> Any:
|
def _deprecated_core_config() -> Any:
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from . import core_config # noqa: PLC0415
|
||||||
from . import core_config
|
|
||||||
|
|
||||||
return core_config.Config
|
return core_config.Config
|
||||||
|
|
||||||
@ -428,8 +427,7 @@ class HomeAssistant:
|
|||||||
|
|
||||||
def __init__(self, config_dir: str) -> None:
|
def __init__(self, config_dir: str) -> None:
|
||||||
"""Initialize new Home Assistant object."""
|
"""Initialize new Home Assistant object."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .core_config import Config # noqa: PLC0415
|
||||||
from .core_config import Config
|
|
||||||
|
|
||||||
# This is a dictionary that any component can store any data on.
|
# This is a dictionary that any component can store any data on.
|
||||||
self.data = HassDict()
|
self.data = HassDict()
|
||||||
@ -458,7 +456,7 @@ class HomeAssistant:
|
|||||||
"""Report and raise if we are not running in the event loop thread."""
|
"""Report and raise if we are not running in the event loop thread."""
|
||||||
if self.loop_thread_id != threading.get_ident():
|
if self.loop_thread_id != threading.get_ident():
|
||||||
# frame is a circular import, so we import it here
|
# frame is a circular import, so we import it here
|
||||||
from .helpers import frame # pylint: disable=import-outside-toplevel
|
from .helpers import frame # noqa: PLC0415
|
||||||
|
|
||||||
frame.report_non_thread_safe_operation(what)
|
frame.report_non_thread_safe_operation(what)
|
||||||
|
|
||||||
@ -522,8 +520,7 @@ class HomeAssistant:
|
|||||||
|
|
||||||
await self.async_start()
|
await self.async_start()
|
||||||
if attach_signals:
|
if attach_signals:
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .helpers.signal import async_register_signal_handling # noqa: PLC0415
|
||||||
from .helpers.signal import async_register_signal_handling
|
|
||||||
|
|
||||||
async_register_signal_handling(self)
|
async_register_signal_handling(self)
|
||||||
|
|
||||||
@ -643,7 +640,7 @@ class HomeAssistant:
|
|||||||
args: parameters for method to call.
|
args: parameters for method to call.
|
||||||
"""
|
"""
|
||||||
# late import to avoid circular imports
|
# late import to avoid circular imports
|
||||||
from .helpers import frame # pylint: disable=import-outside-toplevel
|
from .helpers import frame # noqa: PLC0415
|
||||||
|
|
||||||
frame.report_usage(
|
frame.report_usage(
|
||||||
"calls `async_add_job`, which should be reviewed against "
|
"calls `async_add_job`, which should be reviewed against "
|
||||||
@ -699,7 +696,7 @@ class HomeAssistant:
|
|||||||
args: parameters for method to call.
|
args: parameters for method to call.
|
||||||
"""
|
"""
|
||||||
# late import to avoid circular imports
|
# late import to avoid circular imports
|
||||||
from .helpers import frame # pylint: disable=import-outside-toplevel
|
from .helpers import frame # noqa: PLC0415
|
||||||
|
|
||||||
frame.report_usage(
|
frame.report_usage(
|
||||||
"calls `async_add_hass_job`, which should be reviewed against "
|
"calls `async_add_hass_job`, which should be reviewed against "
|
||||||
@ -802,7 +799,7 @@ class HomeAssistant:
|
|||||||
target: target to call.
|
target: target to call.
|
||||||
"""
|
"""
|
||||||
if self.loop_thread_id != threading.get_ident():
|
if self.loop_thread_id != threading.get_ident():
|
||||||
from .helpers import frame # pylint: disable=import-outside-toplevel
|
from .helpers import frame # noqa: PLC0415
|
||||||
|
|
||||||
frame.report_non_thread_safe_operation("hass.async_create_task")
|
frame.report_non_thread_safe_operation("hass.async_create_task")
|
||||||
return self.async_create_task_internal(target, name, eager_start)
|
return self.async_create_task_internal(target, name, eager_start)
|
||||||
@ -973,7 +970,7 @@ class HomeAssistant:
|
|||||||
args: parameters for method to call.
|
args: parameters for method to call.
|
||||||
"""
|
"""
|
||||||
# late import to avoid circular imports
|
# late import to avoid circular imports
|
||||||
from .helpers import frame # pylint: disable=import-outside-toplevel
|
from .helpers import frame # noqa: PLC0415
|
||||||
|
|
||||||
frame.report_usage(
|
frame.report_usage(
|
||||||
"calls `async_run_job`, which should be reviewed against "
|
"calls `async_run_job`, which should be reviewed against "
|
||||||
@ -1517,7 +1514,7 @@ class EventBus:
|
|||||||
"""
|
"""
|
||||||
_verify_event_type_length_or_raise(event_type)
|
_verify_event_type_length_or_raise(event_type)
|
||||||
if self._hass.loop_thread_id != threading.get_ident():
|
if self._hass.loop_thread_id != threading.get_ident():
|
||||||
from .helpers import frame # pylint: disable=import-outside-toplevel
|
from .helpers import frame # noqa: PLC0415
|
||||||
|
|
||||||
frame.report_non_thread_safe_operation("hass.bus.async_fire")
|
frame.report_non_thread_safe_operation("hass.bus.async_fire")
|
||||||
return self.async_fire_internal(
|
return self.async_fire_internal(
|
||||||
@ -1622,7 +1619,7 @@ class EventBus:
|
|||||||
"""
|
"""
|
||||||
if run_immediately in (True, False):
|
if run_immediately in (True, False):
|
||||||
# late import to avoid circular imports
|
# late import to avoid circular imports
|
||||||
from .helpers import frame # pylint: disable=import-outside-toplevel
|
from .helpers import frame # noqa: PLC0415
|
||||||
|
|
||||||
frame.report_usage(
|
frame.report_usage(
|
||||||
"calls `async_listen` with run_immediately",
|
"calls `async_listen` with run_immediately",
|
||||||
@ -1692,7 +1689,7 @@ class EventBus:
|
|||||||
"""
|
"""
|
||||||
if run_immediately in (True, False):
|
if run_immediately in (True, False):
|
||||||
# late import to avoid circular imports
|
# late import to avoid circular imports
|
||||||
from .helpers import frame # pylint: disable=import-outside-toplevel
|
from .helpers import frame # noqa: PLC0415
|
||||||
|
|
||||||
frame.report_usage(
|
frame.report_usage(
|
||||||
"calls `async_listen_once` with run_immediately",
|
"calls `async_listen_once` with run_immediately",
|
||||||
|
@ -538,8 +538,7 @@ class Config:
|
|||||||
|
|
||||||
def __init__(self, hass: HomeAssistant, config_dir: str) -> None:
|
def __init__(self, hass: HomeAssistant, config_dir: str) -> None:
|
||||||
"""Initialize a new config object."""
|
"""Initialize a new config object."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .components.zone import DEFAULT_RADIUS # noqa: PLC0415
|
||||||
from .components.zone import DEFAULT_RADIUS
|
|
||||||
|
|
||||||
self.hass = hass
|
self.hass = hass
|
||||||
|
|
||||||
@ -845,8 +844,7 @@ class Config:
|
|||||||
) -> dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
"""Migrate to the new version."""
|
"""Migrate to the new version."""
|
||||||
|
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .components.zone import DEFAULT_RADIUS # noqa: PLC0415
|
||||||
from .components.zone import DEFAULT_RADIUS
|
|
||||||
|
|
||||||
data = old_data
|
data = old_data
|
||||||
if old_major_version == 1 and old_minor_version < 2:
|
if old_major_version == 1 and old_minor_version < 2:
|
||||||
@ -863,8 +861,9 @@ class Config:
|
|||||||
try:
|
try:
|
||||||
owner = await self.hass.auth.async_get_owner()
|
owner = await self.hass.auth.async_get_owner()
|
||||||
if owner is not None:
|
if owner is not None:
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .components.frontend import ( # noqa: PLC0415
|
||||||
from .components.frontend import storage as frontend_store
|
storage as frontend_store,
|
||||||
|
)
|
||||||
|
|
||||||
owner_store = await frontend_store.async_user_store(
|
owner_store = await frontend_store.async_user_store(
|
||||||
self.hass, owner.id
|
self.hass, owner.id
|
||||||
|
@ -23,8 +23,7 @@ def import_async_get_exception_message() -> Callable[
|
|||||||
Defaults to English, requires translations to already be cached.
|
Defaults to English, requires translations to already be cached.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .helpers.translation import ( # noqa: PLC0415
|
||||||
from .helpers.translation import (
|
|
||||||
async_get_exception_message as async_get_exception_message_import,
|
async_get_exception_message as async_get_exception_message_import,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -475,8 +475,7 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]):
|
|||||||
@callback
|
@callback
|
||||||
def _async_setup_cleanup(self) -> None:
|
def _async_setup_cleanup(self) -> None:
|
||||||
"""Set up the area registry cleanup."""
|
"""Set up the area registry cleanup."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from . import ( # Circular dependencies # noqa: PLC0415
|
||||||
from . import ( # Circular dependencies
|
|
||||||
floor_registry as fr,
|
floor_registry as fr,
|
||||||
label_registry as lr,
|
label_registry as lr,
|
||||||
)
|
)
|
||||||
@ -543,8 +542,7 @@ def async_entries_for_label(registry: AreaRegistry, label_id: str) -> list[AreaE
|
|||||||
|
|
||||||
def _validate_temperature_entity(hass: HomeAssistant, entity_id: str) -> None:
|
def _validate_temperature_entity(hass: HomeAssistant, entity_id: str) -> None:
|
||||||
"""Validate temperature entity."""
|
"""Validate temperature entity."""
|
||||||
# pylint: disable=import-outside-toplevel
|
from homeassistant.components.sensor import SensorDeviceClass # noqa: PLC0415
|
||||||
from homeassistant.components.sensor import SensorDeviceClass
|
|
||||||
|
|
||||||
if not (state := hass.states.get(entity_id)):
|
if not (state := hass.states.get(entity_id)):
|
||||||
raise ValueError(f"Entity {entity_id} does not exist")
|
raise ValueError(f"Entity {entity_id} does not exist")
|
||||||
@ -558,8 +556,7 @@ def _validate_temperature_entity(hass: HomeAssistant, entity_id: str) -> None:
|
|||||||
|
|
||||||
def _validate_humidity_entity(hass: HomeAssistant, entity_id: str) -> None:
|
def _validate_humidity_entity(hass: HomeAssistant, entity_id: str) -> None:
|
||||||
"""Validate humidity entity."""
|
"""Validate humidity entity."""
|
||||||
# pylint: disable=import-outside-toplevel
|
from homeassistant.components.sensor import SensorDeviceClass # noqa: PLC0415
|
||||||
from homeassistant.components.sensor import SensorDeviceClass
|
|
||||||
|
|
||||||
if not (state := hass.states.get(entity_id)):
|
if not (state := hass.states.get(entity_id)):
|
||||||
raise ValueError(f"Entity {entity_id} does not exist")
|
raise ValueError(f"Entity {entity_id} does not exist")
|
||||||
|
@ -43,8 +43,7 @@ def async_initialize_backup(hass: HomeAssistant) -> None:
|
|||||||
registers the basic backup websocket API which is used by frontend to subscribe
|
registers the basic backup websocket API which is used by frontend to subscribe
|
||||||
to backup events.
|
to backup events.
|
||||||
"""
|
"""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components.backup import basic_websocket # noqa: PLC0415
|
||||||
from homeassistant.components.backup import basic_websocket
|
|
||||||
|
|
||||||
hass.data[DATA_BACKUP] = BackupData()
|
hass.data[DATA_BACKUP] = BackupData()
|
||||||
basic_websocket.async_register_websocket_handlers(hass)
|
basic_websocket.async_register_websocket_handlers(hass)
|
||||||
|
@ -222,16 +222,14 @@ class WebhookFlowHandler(config_entries.ConfigFlow):
|
|||||||
return self.async_show_form(step_id="user")
|
return self.async_show_form(step_id="user")
|
||||||
|
|
||||||
# Local import to be sure cloud is loaded and setup
|
# Local import to be sure cloud is loaded and setup
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components.cloud import ( # noqa: PLC0415
|
||||||
from homeassistant.components.cloud import (
|
|
||||||
async_active_subscription,
|
async_active_subscription,
|
||||||
async_create_cloudhook,
|
async_create_cloudhook,
|
||||||
async_is_connected,
|
async_is_connected,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Local import to be sure webhook is loaded and setup
|
# Local import to be sure webhook is loaded and setup
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components.webhook import ( # noqa: PLC0415
|
||||||
from homeassistant.components.webhook import (
|
|
||||||
async_generate_id,
|
async_generate_id,
|
||||||
async_generate_url,
|
async_generate_url,
|
||||||
)
|
)
|
||||||
@ -281,7 +279,6 @@ async def webhook_async_remove_entry(
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Local import to be sure cloud is loaded and setup
|
# Local import to be sure cloud is loaded and setup
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components.cloud import async_delete_cloudhook # noqa: PLC0415
|
||||||
from homeassistant.components.cloud import async_delete_cloudhook
|
|
||||||
|
|
||||||
await async_delete_cloudhook(hass, entry.data["webhook_id"])
|
await async_delete_cloudhook(hass, entry.data["webhook_id"])
|
||||||
|
@ -721,8 +721,7 @@ def template(value: Any | None) -> template_helper.Template:
|
|||||||
if isinstance(value, (list, dict, template_helper.Template)):
|
if isinstance(value, (list, dict, template_helper.Template)):
|
||||||
raise vol.Invalid("template value should be a string")
|
raise vol.Invalid("template value should be a string")
|
||||||
if not (hass := _async_get_hass_or_none()):
|
if not (hass := _async_get_hass_or_none()):
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .frame import ReportBehavior, report_usage # noqa: PLC0415
|
||||||
from .frame import ReportBehavior, report_usage
|
|
||||||
|
|
||||||
report_usage(
|
report_usage(
|
||||||
(
|
(
|
||||||
@ -750,8 +749,7 @@ def dynamic_template(value: Any | None) -> template_helper.Template:
|
|||||||
if not template_helper.is_template_string(str(value)):
|
if not template_helper.is_template_string(str(value)):
|
||||||
raise vol.Invalid("template value does not contain a dynamic template")
|
raise vol.Invalid("template value does not contain a dynamic template")
|
||||||
if not (hass := _async_get_hass_or_none()):
|
if not (hass := _async_get_hass_or_none()):
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .frame import ReportBehavior, report_usage # noqa: PLC0415
|
||||||
from .frame import ReportBehavior, report_usage
|
|
||||||
|
|
||||||
report_usage(
|
report_usage(
|
||||||
(
|
(
|
||||||
@ -1151,9 +1149,9 @@ def custom_serializer(schema: Any) -> Any:
|
|||||||
|
|
||||||
def _custom_serializer(schema: Any, *, allow_section: bool) -> Any:
|
def _custom_serializer(schema: Any, *, allow_section: bool) -> Any:
|
||||||
"""Serialize additional types for voluptuous_serialize."""
|
"""Serialize additional types for voluptuous_serialize."""
|
||||||
from homeassistant import data_entry_flow # pylint: disable=import-outside-toplevel
|
from homeassistant import data_entry_flow # noqa: PLC0415
|
||||||
|
|
||||||
from . import selector # pylint: disable=import-outside-toplevel
|
from . import selector # noqa: PLC0415
|
||||||
|
|
||||||
if schema is positive_time_period_dict:
|
if schema is positive_time_period_dict:
|
||||||
return {"type": "positive_time_period_dict"}
|
return {"type": "positive_time_period_dict"}
|
||||||
@ -1216,8 +1214,7 @@ def _no_yaml_config_schema(
|
|||||||
"""Return a config schema which logs if attempted to setup from YAML."""
|
"""Return a config schema which logs if attempted to setup from YAML."""
|
||||||
|
|
||||||
def raise_issue() -> None:
|
def raise_issue() -> None:
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .issue_registry import IssueSeverity, async_create_issue # noqa: PLC0415
|
||||||
from .issue_registry import IssueSeverity, async_create_issue
|
|
||||||
|
|
||||||
# HomeAssistantError is raised if called from the wrong thread
|
# HomeAssistantError is raised if called from the wrong thread
|
||||||
with contextlib.suppress(HomeAssistantError):
|
with contextlib.suppress(HomeAssistantError):
|
||||||
|
@ -190,11 +190,10 @@ def _print_deprecation_warning_internal_impl(
|
|||||||
*,
|
*,
|
||||||
log_when_no_integration_is_found: bool,
|
log_when_no_integration_is_found: bool,
|
||||||
) -> None:
|
) -> None:
|
||||||
# pylint: disable=import-outside-toplevel
|
from homeassistant.core import async_get_hass_or_none # noqa: PLC0415
|
||||||
from homeassistant.core import async_get_hass_or_none
|
from homeassistant.loader import async_suggest_report_issue # noqa: PLC0415
|
||||||
from homeassistant.loader import async_suggest_report_issue
|
|
||||||
|
|
||||||
from .frame import MissingIntegrationFrame, get_integration_frame
|
from .frame import MissingIntegrationFrame, get_integration_frame # noqa: PLC0415
|
||||||
|
|
||||||
logger = logging.getLogger(module_name)
|
logger = logging.getLogger(module_name)
|
||||||
if breaks_in_ha_version:
|
if breaks_in_ha_version:
|
||||||
|
@ -1018,8 +1018,7 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]):
|
|||||||
and old.area_id is None
|
and old.area_id is None
|
||||||
):
|
):
|
||||||
# Circular dep
|
# Circular dep
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from . import area_registry as ar # noqa: PLC0415
|
||||||
from . import area_registry as ar
|
|
||||||
|
|
||||||
area = ar.async_get(self.hass).async_get_or_create(suggested_area)
|
area = ar.async_get(self.hass).async_get_or_create(suggested_area)
|
||||||
area_id = area.id
|
area_id = area.id
|
||||||
@ -1622,8 +1621,7 @@ def async_cleanup(
|
|||||||
@callback
|
@callback
|
||||||
def async_setup_cleanup(hass: HomeAssistant, dev_reg: DeviceRegistry) -> None:
|
def async_setup_cleanup(hass: HomeAssistant, dev_reg: DeviceRegistry) -> None:
|
||||||
"""Clean up device registry when entities removed."""
|
"""Clean up device registry when entities removed."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from . import entity_registry, label_registry as lr # noqa: PLC0415
|
||||||
from . import entity_registry, label_registry as lr
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _label_removed_from_registry_filter(
|
def _label_removed_from_registry_filter(
|
||||||
|
@ -1745,8 +1745,7 @@ def async_config_entry_disabled_by_changed(
|
|||||||
@callback
|
@callback
|
||||||
def _async_setup_cleanup(hass: HomeAssistant, registry: EntityRegistry) -> None:
|
def _async_setup_cleanup(hass: HomeAssistant, registry: EntityRegistry) -> None:
|
||||||
"""Clean up device registry when entities removed."""
|
"""Clean up device registry when entities removed."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from . import category_registry as cr, event, label_registry as lr # noqa: PLC0415
|
||||||
from . import category_registry as cr, event, label_registry as lr
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _removed_from_registry_filter(
|
def _removed_from_registry_filter(
|
||||||
|
@ -235,10 +235,7 @@ def find_paths_unserializable_data(
|
|||||||
|
|
||||||
This method is slow! Only use for error handling.
|
This method is slow! Only use for error handling.
|
||||||
"""
|
"""
|
||||||
from homeassistant.core import ( # pylint: disable=import-outside-toplevel
|
from homeassistant.core import Event, State # noqa: PLC0415
|
||||||
Event,
|
|
||||||
State,
|
|
||||||
)
|
|
||||||
|
|
||||||
to_process = deque([(bad_data, "$")])
|
to_process = deque([(bad_data, "$")])
|
||||||
invalid = {}
|
invalid = {}
|
||||||
|
@ -216,8 +216,7 @@ class APIInstance:
|
|||||||
|
|
||||||
async def async_call_tool(self, tool_input: ToolInput) -> JsonObjectType:
|
async def async_call_tool(self, tool_input: ToolInput) -> JsonObjectType:
|
||||||
"""Call a LLM tool, validate args and return the response."""
|
"""Call a LLM tool, validate args and return the response."""
|
||||||
# pylint: disable=import-outside-toplevel
|
from homeassistant.components.conversation import ( # noqa: PLC0415
|
||||||
from homeassistant.components.conversation import (
|
|
||||||
ConversationTraceEventType,
|
ConversationTraceEventType,
|
||||||
async_conversation_trace_append,
|
async_conversation_trace_append,
|
||||||
)
|
)
|
||||||
|
@ -186,8 +186,7 @@ def get_url(
|
|||||||
known_hostnames = ["localhost"]
|
known_hostnames = ["localhost"]
|
||||||
if is_hassio(hass):
|
if is_hassio(hass):
|
||||||
# Local import to avoid circular dependencies
|
# Local import to avoid circular dependencies
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components.hassio import get_host_info # noqa: PLC0415
|
||||||
from homeassistant.components.hassio import get_host_info
|
|
||||||
|
|
||||||
if host_info := get_host_info(hass):
|
if host_info := get_host_info(hass):
|
||||||
known_hostnames.extend(
|
known_hostnames.extend(
|
||||||
@ -318,8 +317,7 @@ def _get_cloud_url(hass: HomeAssistant, require_current_request: bool = False) -
|
|||||||
"""Get external Home Assistant Cloud URL of this instance."""
|
"""Get external Home Assistant Cloud URL of this instance."""
|
||||||
if "cloud" in hass.config.components:
|
if "cloud" in hass.config.components:
|
||||||
# Local import to avoid circular dependencies
|
# Local import to avoid circular dependencies
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components.cloud import ( # noqa: PLC0415
|
||||||
from homeassistant.components.cloud import (
|
|
||||||
CloudNotAvailable,
|
CloudNotAvailable,
|
||||||
async_remote_ui_url,
|
async_remote_ui_url,
|
||||||
)
|
)
|
||||||
|
@ -35,8 +35,7 @@ class RecorderData:
|
|||||||
@callback
|
@callback
|
||||||
def async_migration_in_progress(hass: HomeAssistant) -> bool:
|
def async_migration_in_progress(hass: HomeAssistant) -> bool:
|
||||||
"""Check to see if a recorder migration is in progress."""
|
"""Check to see if a recorder migration is in progress."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components import recorder # noqa: PLC0415
|
||||||
from homeassistant.components import recorder
|
|
||||||
|
|
||||||
return recorder.util.async_migration_in_progress(hass)
|
return recorder.util.async_migration_in_progress(hass)
|
||||||
|
|
||||||
@ -44,8 +43,7 @@ def async_migration_in_progress(hass: HomeAssistant) -> bool:
|
|||||||
@callback
|
@callback
|
||||||
def async_migration_is_live(hass: HomeAssistant) -> bool:
|
def async_migration_is_live(hass: HomeAssistant) -> bool:
|
||||||
"""Check to see if a recorder migration is live."""
|
"""Check to see if a recorder migration is live."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components import recorder # noqa: PLC0415
|
||||||
from homeassistant.components import recorder
|
|
||||||
|
|
||||||
return recorder.util.async_migration_is_live(hass)
|
return recorder.util.async_migration_is_live(hass)
|
||||||
|
|
||||||
@ -58,8 +56,9 @@ def async_initialize_recorder(hass: HomeAssistant) -> None:
|
|||||||
registers the basic recorder websocket API which is used by frontend to determine
|
registers the basic recorder websocket API which is used by frontend to determine
|
||||||
if the recorder is migrating the database.
|
if the recorder is migrating the database.
|
||||||
"""
|
"""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components.recorder.basic_websocket_api import ( # noqa: PLC0415
|
||||||
from homeassistant.components.recorder.basic_websocket_api import async_setup
|
async_setup,
|
||||||
|
)
|
||||||
|
|
||||||
hass.data[DATA_RECORDER] = RecorderData()
|
hass.data[DATA_RECORDER] = RecorderData()
|
||||||
async_setup(hass)
|
async_setup(hass)
|
||||||
|
@ -85,8 +85,7 @@ ALL_SERVICE_DESCRIPTIONS_CACHE: HassKey[
|
|||||||
@cache
|
@cache
|
||||||
def _base_components() -> dict[str, ModuleType]:
|
def _base_components() -> dict[str, ModuleType]:
|
||||||
"""Return a cached lookup of base components."""
|
"""Return a cached lookup of base components."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components import ( # noqa: PLC0415
|
||||||
from homeassistant.components import (
|
|
||||||
alarm_control_panel,
|
alarm_control_panel,
|
||||||
assist_satellite,
|
assist_satellite,
|
||||||
calendar,
|
calendar,
|
||||||
@ -1296,8 +1295,7 @@ def async_register_entity_service(
|
|||||||
if schema is None or isinstance(schema, dict):
|
if schema is None or isinstance(schema, dict):
|
||||||
schema = cv.make_entity_service_schema(schema)
|
schema = cv.make_entity_service_schema(schema)
|
||||||
elif not cv.is_entity_service_schema(schema):
|
elif not cv.is_entity_service_schema(schema):
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .frame import ReportBehavior, report_usage # noqa: PLC0415
|
||||||
from .frame import ReportBehavior, report_usage
|
|
||||||
|
|
||||||
report_usage(
|
report_usage(
|
||||||
"registers an entity service with a non entity service schema",
|
"registers an entity service with a non entity service schema",
|
||||||
|
@ -354,7 +354,7 @@ class Store[_T: Mapping[str, Any] | Sequence[Any]]:
|
|||||||
corrupt_path,
|
corrupt_path,
|
||||||
err,
|
err,
|
||||||
)
|
)
|
||||||
from .issue_registry import ( # pylint: disable=import-outside-toplevel
|
from .issue_registry import ( # noqa: PLC0415
|
||||||
IssueSeverity,
|
IssueSeverity,
|
||||||
async_create_issue,
|
async_create_issue,
|
||||||
)
|
)
|
||||||
|
@ -31,8 +31,8 @@ def get_astral_location(
|
|||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
) -> tuple[astral.location.Location, astral.Elevation]:
|
) -> tuple[astral.location.Location, astral.Elevation]:
|
||||||
"""Get an astral location for the current Home Assistant configuration."""
|
"""Get an astral location for the current Home Assistant configuration."""
|
||||||
from astral import LocationInfo # pylint: disable=import-outside-toplevel
|
from astral import LocationInfo # noqa: PLC0415
|
||||||
from astral.location import Location # pylint: disable=import-outside-toplevel
|
from astral.location import Location # noqa: PLC0415
|
||||||
|
|
||||||
latitude = hass.config.latitude
|
latitude = hass.config.latitude
|
||||||
longitude = hass.config.longitude
|
longitude = hass.config.longitude
|
||||||
|
@ -42,8 +42,7 @@ async def async_get_system_info(hass: HomeAssistant) -> dict[str, Any]:
|
|||||||
# may not be loaded yet and we don't want to
|
# may not be loaded yet and we don't want to
|
||||||
# do blocking I/O in the event loop to import it.
|
# do blocking I/O in the event loop to import it.
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components import hassio # noqa: PLC0415
|
||||||
from homeassistant.components import hassio
|
|
||||||
else:
|
else:
|
||||||
hassio = await async_import_module(hass, "homeassistant.components.hassio")
|
hassio = await async_import_module(hass, "homeassistant.components.hassio")
|
||||||
|
|
||||||
|
@ -210,9 +210,7 @@ def async_setup(hass: HomeAssistant) -> bool:
|
|||||||
if new_size > current_size:
|
if new_size > current_size:
|
||||||
lru.set_size(new_size)
|
lru.set_size(new_size)
|
||||||
|
|
||||||
from .event import ( # pylint: disable=import-outside-toplevel
|
from .event import async_track_time_interval # noqa: PLC0415
|
||||||
async_track_time_interval,
|
|
||||||
)
|
|
||||||
|
|
||||||
cancel = async_track_time_interval(
|
cancel = async_track_time_interval(
|
||||||
hass, _async_adjust_lru_sizes, timedelta(minutes=10)
|
hass, _async_adjust_lru_sizes, timedelta(minutes=10)
|
||||||
@ -527,8 +525,7 @@ class Template:
|
|||||||
Note: A valid hass instance should always be passed in. The hass parameter
|
Note: A valid hass instance should always be passed in. The hass parameter
|
||||||
will be non optional in Home Assistant Core 2025.10.
|
will be non optional in Home Assistant Core 2025.10.
|
||||||
"""
|
"""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .frame import ReportBehavior, report_usage # noqa: PLC0415
|
||||||
from .frame import ReportBehavior, report_usage
|
|
||||||
|
|
||||||
if not isinstance(template, str):
|
if not isinstance(template, str):
|
||||||
raise TypeError("Expected template to be a string")
|
raise TypeError("Expected template to be a string")
|
||||||
@ -1141,8 +1138,7 @@ class TemplateStateBase(State):
|
|||||||
def format_state(self, rounded: bool, with_unit: bool) -> str:
|
def format_state(self, rounded: bool, with_unit: bool) -> str:
|
||||||
"""Return a formatted version of the state."""
|
"""Return a formatted version of the state."""
|
||||||
# Import here, not at top-level, to avoid circular import
|
# Import here, not at top-level, to avoid circular import
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components.sensor import ( # noqa: PLC0415
|
||||||
from homeassistant.components.sensor import (
|
|
||||||
DOMAIN as SENSOR_DOMAIN,
|
DOMAIN as SENSOR_DOMAIN,
|
||||||
async_rounded_state,
|
async_rounded_state,
|
||||||
)
|
)
|
||||||
@ -1278,7 +1274,7 @@ def forgiving_boolean[_T](
|
|||||||
"""Try to convert value to a boolean."""
|
"""Try to convert value to a boolean."""
|
||||||
try:
|
try:
|
||||||
# Import here, not at top-level to avoid circular import
|
# Import here, not at top-level to avoid circular import
|
||||||
from . import config_validation as cv # pylint: disable=import-outside-toplevel
|
from . import config_validation as cv # noqa: PLC0415
|
||||||
|
|
||||||
return cv.boolean(value)
|
return cv.boolean(value)
|
||||||
except vol.Invalid:
|
except vol.Invalid:
|
||||||
@ -1303,7 +1299,7 @@ def result_as_boolean(template_result: Any | None) -> bool:
|
|||||||
def expand(hass: HomeAssistant, *args: Any) -> Iterable[State]:
|
def expand(hass: HomeAssistant, *args: Any) -> Iterable[State]:
|
||||||
"""Expand out any groups and zones into entity states."""
|
"""Expand out any groups and zones into entity states."""
|
||||||
# circular import.
|
# circular import.
|
||||||
from . import entity as entity_helper # pylint: disable=import-outside-toplevel
|
from . import entity as entity_helper # noqa: PLC0415
|
||||||
|
|
||||||
search = list(args)
|
search = list(args)
|
||||||
found = {}
|
found = {}
|
||||||
@ -1376,8 +1372,7 @@ def integration_entities(hass: HomeAssistant, entry_name: str) -> Iterable[str]:
|
|||||||
return entities
|
return entities
|
||||||
|
|
||||||
# fallback to just returning all entities for a domain
|
# fallback to just returning all entities for a domain
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .entity import entity_sources # noqa: PLC0415
|
||||||
from .entity import entity_sources
|
|
||||||
|
|
||||||
return [
|
return [
|
||||||
entity_id
|
entity_id
|
||||||
@ -1421,7 +1416,7 @@ def device_name(hass: HomeAssistant, lookup_value: str) -> str | None:
|
|||||||
|
|
||||||
ent_reg = entity_registry.async_get(hass)
|
ent_reg = entity_registry.async_get(hass)
|
||||||
# Import here, not at top-level to avoid circular import
|
# Import here, not at top-level to avoid circular import
|
||||||
from . import config_validation as cv # pylint: disable=import-outside-toplevel
|
from . import config_validation as cv # noqa: PLC0415
|
||||||
|
|
||||||
try:
|
try:
|
||||||
cv.entity_id(lookup_value)
|
cv.entity_id(lookup_value)
|
||||||
@ -1579,7 +1574,7 @@ def area_id(hass: HomeAssistant, lookup_value: str) -> str | None:
|
|||||||
ent_reg = entity_registry.async_get(hass)
|
ent_reg = entity_registry.async_get(hass)
|
||||||
dev_reg = device_registry.async_get(hass)
|
dev_reg = device_registry.async_get(hass)
|
||||||
# Import here, not at top-level to avoid circular import
|
# Import here, not at top-level to avoid circular import
|
||||||
from . import config_validation as cv # pylint: disable=import-outside-toplevel
|
from . import config_validation as cv # noqa: PLC0415
|
||||||
|
|
||||||
try:
|
try:
|
||||||
cv.entity_id(lookup_value)
|
cv.entity_id(lookup_value)
|
||||||
@ -1617,7 +1612,7 @@ def area_name(hass: HomeAssistant, lookup_value: str) -> str | None:
|
|||||||
dev_reg = device_registry.async_get(hass)
|
dev_reg = device_registry.async_get(hass)
|
||||||
ent_reg = entity_registry.async_get(hass)
|
ent_reg = entity_registry.async_get(hass)
|
||||||
# Import here, not at top-level to avoid circular import
|
# Import here, not at top-level to avoid circular import
|
||||||
from . import config_validation as cv # pylint: disable=import-outside-toplevel
|
from . import config_validation as cv # noqa: PLC0415
|
||||||
|
|
||||||
try:
|
try:
|
||||||
cv.entity_id(lookup_value)
|
cv.entity_id(lookup_value)
|
||||||
@ -1698,7 +1693,7 @@ def labels(hass: HomeAssistant, lookup_value: Any = None) -> Iterable[str | None
|
|||||||
ent_reg = entity_registry.async_get(hass)
|
ent_reg = entity_registry.async_get(hass)
|
||||||
|
|
||||||
# Import here, not at top-level to avoid circular import
|
# Import here, not at top-level to avoid circular import
|
||||||
from . import config_validation as cv # pylint: disable=import-outside-toplevel
|
from . import config_validation as cv # noqa: PLC0415
|
||||||
|
|
||||||
lookup_value = str(lookup_value)
|
lookup_value = str(lookup_value)
|
||||||
|
|
||||||
|
@ -41,8 +41,7 @@ def _deprecated_typing_helper(attr: str) -> DeferredDeprecatedAlias:
|
|||||||
"""Help to make a DeferredDeprecatedAlias."""
|
"""Help to make a DeferredDeprecatedAlias."""
|
||||||
|
|
||||||
def value_fn() -> Any:
|
def value_fn() -> Any:
|
||||||
# pylint: disable-next=import-outside-toplevel
|
import homeassistant.core # noqa: PLC0415
|
||||||
import homeassistant.core
|
|
||||||
|
|
||||||
return getattr(homeassistant.core, attr)
|
return getattr(homeassistant.core, attr)
|
||||||
|
|
||||||
|
@ -291,7 +291,7 @@ def _get_custom_components(hass: HomeAssistant) -> dict[str, Integration]:
|
|||||||
return {}
|
return {}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import custom_components # pylint: disable=import-outside-toplevel
|
import custom_components # noqa: PLC0415
|
||||||
except ImportError:
|
except ImportError:
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
@ -1392,7 +1392,7 @@ async def async_get_integrations(
|
|||||||
|
|
||||||
# Now the rest use resolve_from_root
|
# Now the rest use resolve_from_root
|
||||||
if needed:
|
if needed:
|
||||||
from . import components # pylint: disable=import-outside-toplevel
|
from . import components # noqa: PLC0415
|
||||||
|
|
||||||
integrations = await hass.async_add_executor_job(
|
integrations = await hass.async_add_executor_job(
|
||||||
_resolve_integrations_from_root, hass, components, needed
|
_resolve_integrations_from_root, hass, components, needed
|
||||||
@ -1728,7 +1728,7 @@ def _async_mount_config_dir(hass: HomeAssistant) -> None:
|
|||||||
|
|
||||||
sys.path.insert(0, hass.config.config_dir)
|
sys.path.insert(0, hass.config.config_dir)
|
||||||
with suppress(ImportError):
|
with suppress(ImportError):
|
||||||
import custom_components # pylint: disable=import-outside-toplevel # noqa: F401
|
import custom_components # noqa: F401, PLC0415
|
||||||
sys.path.remove(hass.config.config_dir)
|
sys.path.remove(hass.config.config_dir)
|
||||||
sys.path_importer_cache.pop(hass.config.config_dir, None)
|
sys.path_importer_cache.pop(hass.config.config_dir, None)
|
||||||
|
|
||||||
|
@ -47,8 +47,7 @@ WARNING_STR = "General Warnings"
|
|||||||
|
|
||||||
def color(the_color, *args, reset=None):
|
def color(the_color, *args, reset=None):
|
||||||
"""Color helper."""
|
"""Color helper."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from colorlog.escape_codes import escape_codes, parse_colors # noqa: PLC0415
|
||||||
from colorlog.escape_codes import escape_codes, parse_colors
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if not args:
|
if not args:
|
||||||
|
@ -101,8 +101,7 @@ def async_notify_setup_error(
|
|||||||
|
|
||||||
This method must be run in the event loop.
|
This method must be run in the event loop.
|
||||||
"""
|
"""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .components import persistent_notification # noqa: PLC0415
|
||||||
from .components import persistent_notification
|
|
||||||
|
|
||||||
if (errors := hass.data.get(_DATA_PERSISTENT_ERRORS)) is None:
|
if (errors := hass.data.get(_DATA_PERSISTENT_ERRORS)) is None:
|
||||||
errors = hass.data[_DATA_PERSISTENT_ERRORS] = {}
|
errors = hass.data[_DATA_PERSISTENT_ERRORS] = {}
|
||||||
|
@ -36,8 +36,7 @@ def create_eager_task[_T](
|
|||||||
# If there is no running loop, create_eager_task is being called from
|
# If there is no running loop, create_eager_task is being called from
|
||||||
# the wrong thread.
|
# the wrong thread.
|
||||||
# Late import to avoid circular dependencies
|
# Late import to avoid circular dependencies
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.helpers import frame # noqa: PLC0415
|
||||||
from homeassistant.helpers import frame
|
|
||||||
|
|
||||||
frame.report_usage("attempted to create an asyncio task from a thread")
|
frame.report_usage("attempted to create an asyncio task from a thread")
|
||||||
raise
|
raise
|
||||||
|
@ -31,9 +31,8 @@ def _test_signal_type_typing() -> None: # noqa: PYI048
|
|||||||
|
|
||||||
This is tested during the mypy run. Do not move it to 'tests'!
|
This is tested during the mypy run. Do not move it to 'tests'!
|
||||||
"""
|
"""
|
||||||
# pylint: disable=import-outside-toplevel
|
from homeassistant.core import HomeAssistant # noqa: PLC0415
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.helpers.dispatcher import ( # noqa: PLC0415
|
||||||
from homeassistant.helpers.dispatcher import (
|
|
||||||
async_dispatcher_connect,
|
async_dispatcher_connect,
|
||||||
async_dispatcher_send,
|
async_dispatcher_send,
|
||||||
)
|
)
|
||||||
|
@ -287,6 +287,7 @@ disable = [
|
|||||||
# "global-statement", # PLW0603, ruff catches new occurrences, needs more work
|
# "global-statement", # PLW0603, ruff catches new occurrences, needs more work
|
||||||
"global-variable-not-assigned", # PLW0602
|
"global-variable-not-assigned", # PLW0602
|
||||||
"implicit-str-concat", # ISC001
|
"implicit-str-concat", # ISC001
|
||||||
|
"import-outside-toplevel", # PLC0415
|
||||||
"import-self", # PLW0406
|
"import-self", # PLW0406
|
||||||
"inconsistent-quotes", # Q000
|
"inconsistent-quotes", # Q000
|
||||||
"invalid-envvar-default", # PLW1508
|
"invalid-envvar-default", # PLW1508
|
||||||
@ -812,6 +813,7 @@ ignore = [
|
|||||||
"PLR0913", # Too many arguments to function call ({c_args} > {max_args})
|
"PLR0913", # Too many arguments to function call ({c_args} > {max_args})
|
||||||
"PLR0915", # Too many statements ({statements} > {max_statements})
|
"PLR0915", # Too many statements ({statements} > {max_statements})
|
||||||
"PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable
|
"PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable
|
||||||
|
"PLW1641", # __eq__ without __hash__
|
||||||
"PLW2901", # Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target
|
"PLW2901", # Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target
|
||||||
"PT011", # pytest.raises({exception}) is too broad, set the `match` parameter or use a more specific exception
|
"PT011", # pytest.raises({exception}) is too broad, set the `match` parameter or use a more specific exception
|
||||||
"PT018", # Assertion should be broken down into multiple parts
|
"PT018", # Assertion should be broken down into multiple parts
|
||||||
@ -835,6 +837,9 @@ ignore = [
|
|||||||
"TRY400", # Use `logging.exception` instead of `logging.error`
|
"TRY400", # Use `logging.exception` instead of `logging.error`
|
||||||
# Ignored due to performance: https://github.com/charliermarsh/ruff/issues/2923
|
# Ignored due to performance: https://github.com/charliermarsh/ruff/issues/2923
|
||||||
"UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)`
|
"UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)`
|
||||||
|
"UP046", # Non PEP 695 generic class
|
||||||
|
"UP047", # Non PEP 696 generic function
|
||||||
|
"UP049", # Avoid private type parameter names
|
||||||
|
|
||||||
# May conflict with the formatter, https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
|
# May conflict with the formatter, https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
|
||||||
"W191",
|
"W191",
|
||||||
|
2
requirements_test_pre_commit.txt
generated
2
requirements_test_pre_commit.txt
generated
@ -1,5 +1,5 @@
|
|||||||
# Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit
|
# Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit
|
||||||
|
|
||||||
codespell==2.4.1
|
codespell==2.4.1
|
||||||
ruff==0.11.12
|
ruff==0.12.0
|
||||||
yamllint==1.37.1
|
yamllint==1.37.1
|
||||||
|
2
script/hassfest/docker/Dockerfile
generated
2
script/hassfest/docker/Dockerfile
generated
@ -27,7 +27,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.7.1,source=/uv,target=/bin/uv \
|
|||||||
stdlib-list==0.10.0 \
|
stdlib-list==0.10.0 \
|
||||||
pipdeptree==2.26.1 \
|
pipdeptree==2.26.1 \
|
||||||
tqdm==4.67.1 \
|
tqdm==4.67.1 \
|
||||||
ruff==0.11.12 \
|
ruff==0.12.0 \
|
||||||
PyTurboJPEG==1.8.0 \
|
PyTurboJPEG==1.8.0 \
|
||||||
go2rtc-client==0.2.1 \
|
go2rtc-client==0.2.1 \
|
||||||
ha-ffmpeg==3.2.2 \
|
ha-ffmpeg==3.2.2 \
|
||||||
|
@ -42,8 +42,7 @@ def printc(the_color, *args):
|
|||||||
|
|
||||||
def validate_requirements_ok():
|
def validate_requirements_ok():
|
||||||
"""Validate requirements, returns True of ok."""
|
"""Validate requirements, returns True of ok."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from gen_requirements_all import main as req_main # noqa: PLC0415
|
||||||
from gen_requirements_all import main as req_main
|
|
||||||
|
|
||||||
return req_main(True) == 0
|
return req_main(True) == 0
|
||||||
|
|
||||||
|
@ -198,7 +198,7 @@ def main() -> None:
|
|||||||
|
|
||||||
def test_bump_version() -> None:
|
def test_bump_version() -> None:
|
||||||
"""Make sure it all works."""
|
"""Make sure it all works."""
|
||||||
import pytest
|
import pytest # noqa: PLC0415
|
||||||
|
|
||||||
assert bump_version(Version("0.56.0"), "beta") == Version("0.56.1b0")
|
assert bump_version(Version("0.56.0"), "beta") == Version("0.56.1b0")
|
||||||
assert bump_version(Version("0.56.0b3"), "beta") == Version("0.56.0b4")
|
assert bump_version(Version("0.56.0b3"), "beta") == Version("0.56.0b4")
|
||||||
|
@ -452,11 +452,9 @@ def async_fire_mqtt_message(
|
|||||||
# Local import to avoid processing MQTT modules when running a testcase
|
# Local import to avoid processing MQTT modules when running a testcase
|
||||||
# which does not use MQTT.
|
# which does not use MQTT.
|
||||||
|
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from paho.mqtt.client import MQTTMessage # noqa: PLC0415
|
||||||
from paho.mqtt.client import MQTTMessage
|
|
||||||
|
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components.mqtt import MqttData # noqa: PLC0415
|
||||||
from homeassistant.components.mqtt import MqttData
|
|
||||||
|
|
||||||
if isinstance(payload, str):
|
if isinstance(payload, str):
|
||||||
payload = payload.encode("utf-8")
|
payload = payload.encode("utf-8")
|
||||||
@ -1736,8 +1734,7 @@ def async_get_persistent_notifications(
|
|||||||
|
|
||||||
def async_mock_cloud_connection_status(hass: HomeAssistant, connected: bool) -> None:
|
def async_mock_cloud_connection_status(hass: HomeAssistant, connected: bool) -> None:
|
||||||
"""Mock a signal the cloud disconnected."""
|
"""Mock a signal the cloud disconnected."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components.cloud import ( # noqa: PLC0415
|
||||||
from homeassistant.components.cloud import (
|
|
||||||
SIGNAL_CLOUD_CONNECTION_STATE,
|
SIGNAL_CLOUD_CONNECTION_STATE,
|
||||||
CloudConnectionState,
|
CloudConnectionState,
|
||||||
)
|
)
|
||||||
|
@ -166,8 +166,7 @@ def mock_backup_generation_fixture(
|
|||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mock_backups() -> Generator[None]:
|
def mock_backups() -> Generator[None]:
|
||||||
"""Fixture to setup test backups."""
|
"""Fixture to setup test backups."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components.backup import backup as core_backup # noqa: PLC0415
|
||||||
from homeassistant.components.backup import backup as core_backup
|
|
||||||
|
|
||||||
class CoreLocalBackupAgent(core_backup.CoreLocalBackupAgent):
|
class CoreLocalBackupAgent(core_backup.CoreLocalBackupAgent):
|
||||||
def __init__(self, hass: HomeAssistant) -> None:
|
def __init__(self, hass: HomeAssistant) -> None:
|
||||||
|
@ -98,8 +98,9 @@ def entity_registry_enabled_by_default() -> Generator[None]:
|
|||||||
@pytest.fixture(name="stub_blueprint_populate")
|
@pytest.fixture(name="stub_blueprint_populate")
|
||||||
def stub_blueprint_populate_fixture() -> Generator[None]:
|
def stub_blueprint_populate_fixture() -> Generator[None]:
|
||||||
"""Stub copying the blueprints to the config folder."""
|
"""Stub copying the blueprints to the config folder."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .blueprint.common import ( # noqa: PLC0415
|
||||||
from .blueprint.common import stub_blueprint_populate_fixture_helper
|
stub_blueprint_populate_fixture_helper,
|
||||||
|
)
|
||||||
|
|
||||||
yield from stub_blueprint_populate_fixture_helper()
|
yield from stub_blueprint_populate_fixture_helper()
|
||||||
|
|
||||||
@ -108,8 +109,7 @@ def stub_blueprint_populate_fixture() -> Generator[None]:
|
|||||||
@pytest.fixture(name="mock_tts_get_cache_files")
|
@pytest.fixture(name="mock_tts_get_cache_files")
|
||||||
def mock_tts_get_cache_files_fixture() -> Generator[MagicMock]:
|
def mock_tts_get_cache_files_fixture() -> Generator[MagicMock]:
|
||||||
"""Mock the list TTS cache function."""
|
"""Mock the list TTS cache function."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .tts.common import mock_tts_get_cache_files_fixture_helper # noqa: PLC0415
|
||||||
from .tts.common import mock_tts_get_cache_files_fixture_helper
|
|
||||||
|
|
||||||
yield from mock_tts_get_cache_files_fixture_helper()
|
yield from mock_tts_get_cache_files_fixture_helper()
|
||||||
|
|
||||||
@ -119,8 +119,7 @@ def mock_tts_init_cache_dir_fixture(
|
|||||||
init_tts_cache_dir_side_effect: Any,
|
init_tts_cache_dir_side_effect: Any,
|
||||||
) -> Generator[MagicMock]:
|
) -> Generator[MagicMock]:
|
||||||
"""Mock the TTS cache dir in memory."""
|
"""Mock the TTS cache dir in memory."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .tts.common import mock_tts_init_cache_dir_fixture_helper # noqa: PLC0415
|
||||||
from .tts.common import mock_tts_init_cache_dir_fixture_helper
|
|
||||||
|
|
||||||
yield from mock_tts_init_cache_dir_fixture_helper(init_tts_cache_dir_side_effect)
|
yield from mock_tts_init_cache_dir_fixture_helper(init_tts_cache_dir_side_effect)
|
||||||
|
|
||||||
@ -128,8 +127,9 @@ def mock_tts_init_cache_dir_fixture(
|
|||||||
@pytest.fixture(name="init_tts_cache_dir_side_effect")
|
@pytest.fixture(name="init_tts_cache_dir_side_effect")
|
||||||
def init_tts_cache_dir_side_effect_fixture() -> Any:
|
def init_tts_cache_dir_side_effect_fixture() -> Any:
|
||||||
"""Return the cache dir."""
|
"""Return the cache dir."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .tts.common import ( # noqa: PLC0415
|
||||||
from .tts.common import init_tts_cache_dir_side_effect_fixture_helper
|
init_tts_cache_dir_side_effect_fixture_helper,
|
||||||
|
)
|
||||||
|
|
||||||
return init_tts_cache_dir_side_effect_fixture_helper()
|
return init_tts_cache_dir_side_effect_fixture_helper()
|
||||||
|
|
||||||
@ -142,8 +142,7 @@ def mock_tts_cache_dir_fixture(
|
|||||||
request: pytest.FixtureRequest,
|
request: pytest.FixtureRequest,
|
||||||
) -> Generator[Path]:
|
) -> Generator[Path]:
|
||||||
"""Mock the TTS cache dir with empty dir."""
|
"""Mock the TTS cache dir with empty dir."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .tts.common import mock_tts_cache_dir_fixture_helper # noqa: PLC0415
|
||||||
from .tts.common import mock_tts_cache_dir_fixture_helper
|
|
||||||
|
|
||||||
yield from mock_tts_cache_dir_fixture_helper(
|
yield from mock_tts_cache_dir_fixture_helper(
|
||||||
tmp_path, mock_tts_init_cache_dir, mock_tts_get_cache_files, request
|
tmp_path, mock_tts_init_cache_dir, mock_tts_get_cache_files, request
|
||||||
@ -153,8 +152,7 @@ def mock_tts_cache_dir_fixture(
|
|||||||
@pytest.fixture(name="tts_mutagen_mock")
|
@pytest.fixture(name="tts_mutagen_mock")
|
||||||
def tts_mutagen_mock_fixture() -> Generator[MagicMock]:
|
def tts_mutagen_mock_fixture() -> Generator[MagicMock]:
|
||||||
"""Mock writing tags."""
|
"""Mock writing tags."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .tts.common import tts_mutagen_mock_fixture_helper # noqa: PLC0415
|
||||||
from .tts.common import tts_mutagen_mock_fixture_helper
|
|
||||||
|
|
||||||
yield from tts_mutagen_mock_fixture_helper()
|
yield from tts_mutagen_mock_fixture_helper()
|
||||||
|
|
||||||
@ -162,8 +160,9 @@ def tts_mutagen_mock_fixture() -> Generator[MagicMock]:
|
|||||||
@pytest.fixture(name="mock_conversation_agent")
|
@pytest.fixture(name="mock_conversation_agent")
|
||||||
def mock_conversation_agent_fixture(hass: HomeAssistant) -> MockAgent:
|
def mock_conversation_agent_fixture(hass: HomeAssistant) -> MockAgent:
|
||||||
"""Mock a conversation agent."""
|
"""Mock a conversation agent."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .conversation.common import ( # noqa: PLC0415
|
||||||
from .conversation.common import mock_conversation_agent_fixture_helper
|
mock_conversation_agent_fixture_helper,
|
||||||
|
)
|
||||||
|
|
||||||
return mock_conversation_agent_fixture_helper(hass)
|
return mock_conversation_agent_fixture_helper(hass)
|
||||||
|
|
||||||
@ -180,8 +179,7 @@ def prevent_ffmpeg_subprocess() -> Generator[None]:
|
|||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mock_light_entities() -> list[MockLight]:
|
def mock_light_entities() -> list[MockLight]:
|
||||||
"""Return mocked light entities."""
|
"""Return mocked light entities."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .light.common import MockLight # noqa: PLC0415
|
||||||
from .light.common import MockLight
|
|
||||||
|
|
||||||
return [
|
return [
|
||||||
MockLight("Ceiling", STATE_ON),
|
MockLight("Ceiling", STATE_ON),
|
||||||
@ -193,8 +191,7 @@ def mock_light_entities() -> list[MockLight]:
|
|||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mock_sensor_entities() -> dict[str, MockSensor]:
|
def mock_sensor_entities() -> dict[str, MockSensor]:
|
||||||
"""Return mocked sensor entities."""
|
"""Return mocked sensor entities."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .sensor.common import get_mock_sensor_entities # noqa: PLC0415
|
||||||
from .sensor.common import get_mock_sensor_entities
|
|
||||||
|
|
||||||
return get_mock_sensor_entities()
|
return get_mock_sensor_entities()
|
||||||
|
|
||||||
@ -202,8 +199,7 @@ def mock_sensor_entities() -> dict[str, MockSensor]:
|
|||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mock_switch_entities() -> list[MockSwitch]:
|
def mock_switch_entities() -> list[MockSwitch]:
|
||||||
"""Return mocked toggle entities."""
|
"""Return mocked toggle entities."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .switch.common import get_mock_switch_entities # noqa: PLC0415
|
||||||
from .switch.common import get_mock_switch_entities
|
|
||||||
|
|
||||||
return get_mock_switch_entities()
|
return get_mock_switch_entities()
|
||||||
|
|
||||||
@ -211,8 +207,7 @@ def mock_switch_entities() -> list[MockSwitch]:
|
|||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mock_legacy_device_scanner() -> MockScanner:
|
def mock_legacy_device_scanner() -> MockScanner:
|
||||||
"""Return mocked legacy device scanner entity."""
|
"""Return mocked legacy device scanner entity."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .device_tracker.common import MockScanner # noqa: PLC0415
|
||||||
from .device_tracker.common import MockScanner
|
|
||||||
|
|
||||||
return MockScanner()
|
return MockScanner()
|
||||||
|
|
||||||
@ -220,8 +215,7 @@ def mock_legacy_device_scanner() -> MockScanner:
|
|||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mock_legacy_device_tracker_setup() -> Callable[[HomeAssistant, MockScanner], None]:
|
def mock_legacy_device_tracker_setup() -> Callable[[HomeAssistant, MockScanner], None]:
|
||||||
"""Return setup callable for legacy device tracker setup."""
|
"""Return setup callable for legacy device tracker setup."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .device_tracker.common import mock_legacy_device_tracker_setup # noqa: PLC0415
|
||||||
from .device_tracker.common import mock_legacy_device_tracker_setup
|
|
||||||
|
|
||||||
return mock_legacy_device_tracker_setup
|
return mock_legacy_device_tracker_setup
|
||||||
|
|
||||||
@ -231,8 +225,7 @@ def addon_manager_fixture(
|
|||||||
hass: HomeAssistant, supervisor_client: AsyncMock
|
hass: HomeAssistant, supervisor_client: AsyncMock
|
||||||
) -> AddonManager:
|
) -> AddonManager:
|
||||||
"""Return an AddonManager instance."""
|
"""Return an AddonManager instance."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .hassio.common import mock_addon_manager # noqa: PLC0415
|
||||||
from .hassio.common import mock_addon_manager
|
|
||||||
|
|
||||||
return mock_addon_manager(hass)
|
return mock_addon_manager(hass)
|
||||||
|
|
||||||
@ -288,8 +281,7 @@ def addon_store_info_fixture(
|
|||||||
addon_store_info_side_effect: Any | None,
|
addon_store_info_side_effect: Any | None,
|
||||||
) -> AsyncMock:
|
) -> AsyncMock:
|
||||||
"""Mock Supervisor add-on store info."""
|
"""Mock Supervisor add-on store info."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .hassio.common import mock_addon_store_info # noqa: PLC0415
|
||||||
from .hassio.common import mock_addon_store_info
|
|
||||||
|
|
||||||
return mock_addon_store_info(supervisor_client, addon_store_info_side_effect)
|
return mock_addon_store_info(supervisor_client, addon_store_info_side_effect)
|
||||||
|
|
||||||
@ -305,8 +297,7 @@ def addon_info_fixture(
|
|||||||
supervisor_client: AsyncMock, addon_info_side_effect: Any | None
|
supervisor_client: AsyncMock, addon_info_side_effect: Any | None
|
||||||
) -> AsyncMock:
|
) -> AsyncMock:
|
||||||
"""Mock Supervisor add-on info."""
|
"""Mock Supervisor add-on info."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .hassio.common import mock_addon_info # noqa: PLC0415
|
||||||
from .hassio.common import mock_addon_info
|
|
||||||
|
|
||||||
return mock_addon_info(supervisor_client, addon_info_side_effect)
|
return mock_addon_info(supervisor_client, addon_info_side_effect)
|
||||||
|
|
||||||
@ -316,8 +307,7 @@ def addon_not_installed_fixture(
|
|||||||
addon_store_info: AsyncMock, addon_info: AsyncMock
|
addon_store_info: AsyncMock, addon_info: AsyncMock
|
||||||
) -> AsyncMock:
|
) -> AsyncMock:
|
||||||
"""Mock add-on not installed."""
|
"""Mock add-on not installed."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .hassio.common import mock_addon_not_installed # noqa: PLC0415
|
||||||
from .hassio.common import mock_addon_not_installed
|
|
||||||
|
|
||||||
return mock_addon_not_installed(addon_store_info, addon_info)
|
return mock_addon_not_installed(addon_store_info, addon_info)
|
||||||
|
|
||||||
@ -327,8 +317,7 @@ def addon_installed_fixture(
|
|||||||
addon_store_info: AsyncMock, addon_info: AsyncMock
|
addon_store_info: AsyncMock, addon_info: AsyncMock
|
||||||
) -> AsyncMock:
|
) -> AsyncMock:
|
||||||
"""Mock add-on already installed but not running."""
|
"""Mock add-on already installed but not running."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .hassio.common import mock_addon_installed # noqa: PLC0415
|
||||||
from .hassio.common import mock_addon_installed
|
|
||||||
|
|
||||||
return mock_addon_installed(addon_store_info, addon_info)
|
return mock_addon_installed(addon_store_info, addon_info)
|
||||||
|
|
||||||
@ -338,8 +327,7 @@ def addon_running_fixture(
|
|||||||
addon_store_info: AsyncMock, addon_info: AsyncMock
|
addon_store_info: AsyncMock, addon_info: AsyncMock
|
||||||
) -> AsyncMock:
|
) -> AsyncMock:
|
||||||
"""Mock add-on already running."""
|
"""Mock add-on already running."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .hassio.common import mock_addon_running # noqa: PLC0415
|
||||||
from .hassio.common import mock_addon_running
|
|
||||||
|
|
||||||
return mock_addon_running(addon_store_info, addon_info)
|
return mock_addon_running(addon_store_info, addon_info)
|
||||||
|
|
||||||
@ -350,8 +338,7 @@ def install_addon_side_effect_fixture(
|
|||||||
) -> Any | None:
|
) -> Any | None:
|
||||||
"""Return the install add-on side effect."""
|
"""Return the install add-on side effect."""
|
||||||
|
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .hassio.common import mock_install_addon_side_effect # noqa: PLC0415
|
||||||
from .hassio.common import mock_install_addon_side_effect
|
|
||||||
|
|
||||||
return mock_install_addon_side_effect(addon_store_info, addon_info)
|
return mock_install_addon_side_effect(addon_store_info, addon_info)
|
||||||
|
|
||||||
@ -371,8 +358,7 @@ def start_addon_side_effect_fixture(
|
|||||||
addon_store_info: AsyncMock, addon_info: AsyncMock
|
addon_store_info: AsyncMock, addon_info: AsyncMock
|
||||||
) -> Any | None:
|
) -> Any | None:
|
||||||
"""Return the start add-on options side effect."""
|
"""Return the start add-on options side effect."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .hassio.common import mock_start_addon_side_effect # noqa: PLC0415
|
||||||
from .hassio.common import mock_start_addon_side_effect
|
|
||||||
|
|
||||||
return mock_start_addon_side_effect(addon_store_info, addon_info)
|
return mock_start_addon_side_effect(addon_store_info, addon_info)
|
||||||
|
|
||||||
@ -419,8 +405,7 @@ def set_addon_options_side_effect_fixture(
|
|||||||
addon_options: dict[str, Any],
|
addon_options: dict[str, Any],
|
||||||
) -> Any | None:
|
) -> Any | None:
|
||||||
"""Return the set add-on options side effect."""
|
"""Return the set add-on options side effect."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .hassio.common import mock_set_addon_options_side_effect # noqa: PLC0415
|
||||||
from .hassio.common import mock_set_addon_options_side_effect
|
|
||||||
|
|
||||||
return mock_set_addon_options_side_effect(addon_options)
|
return mock_set_addon_options_side_effect(addon_options)
|
||||||
|
|
||||||
@ -446,8 +431,7 @@ def uninstall_addon_fixture(supervisor_client: AsyncMock) -> AsyncMock:
|
|||||||
@pytest.fixture(name="create_backup")
|
@pytest.fixture(name="create_backup")
|
||||||
def create_backup_fixture() -> Generator[AsyncMock]:
|
def create_backup_fixture() -> Generator[AsyncMock]:
|
||||||
"""Mock create backup."""
|
"""Mock create backup."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .hassio.common import mock_create_backup # noqa: PLC0415
|
||||||
from .hassio.common import mock_create_backup
|
|
||||||
|
|
||||||
yield from mock_create_backup()
|
yield from mock_create_backup()
|
||||||
|
|
||||||
@ -486,8 +470,7 @@ def store_info_fixture(
|
|||||||
@pytest.fixture(name="addon_stats")
|
@pytest.fixture(name="addon_stats")
|
||||||
def addon_stats_fixture(supervisor_client: AsyncMock) -> AsyncMock:
|
def addon_stats_fixture(supervisor_client: AsyncMock) -> AsyncMock:
|
||||||
"""Mock addon stats info."""
|
"""Mock addon stats info."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .hassio.common import mock_addon_stats # noqa: PLC0415
|
||||||
from .hassio.common import mock_addon_stats
|
|
||||||
|
|
||||||
return mock_addon_stats(supervisor_client)
|
return mock_addon_stats(supervisor_client)
|
||||||
|
|
||||||
|
@ -275,7 +275,7 @@ async def test_resolve_media_path(hass: HomeAssistant, dms_device_mock: Mock) ->
|
|||||||
requested_count=1,
|
requested_count=1,
|
||||||
)
|
)
|
||||||
for parent_id, title in zip(
|
for parent_id, title in zip(
|
||||||
["0"] + object_ids[:-1], path.split("/"), strict=False
|
["0", *object_ids[:-1]], path.split("/"), strict=False
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
assert result.url == res_abs_url
|
assert result.url == res_abs_url
|
||||||
@ -293,7 +293,7 @@ async def test_resolve_media_path(hass: HomeAssistant, dms_device_mock: Mock) ->
|
|||||||
requested_count=1,
|
requested_count=1,
|
||||||
)
|
)
|
||||||
for parent_id, title in zip(
|
for parent_id, title in zip(
|
||||||
["0"] + object_ids[:-1], path.split("/"), strict=False
|
["0", *object_ids[:-1]], path.split("/"), strict=False
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
assert result.url == res_abs_url
|
assert result.url == res_abs_url
|
||||||
@ -351,7 +351,7 @@ async def test_resolve_path_browsed(hass: HomeAssistant, dms_device_mock: Mock)
|
|||||||
requested_count=1,
|
requested_count=1,
|
||||||
)
|
)
|
||||||
for parent_id, title in zip(
|
for parent_id, title in zip(
|
||||||
["0"] + object_ids[:-1], path.split("/"), strict=False
|
["0", *object_ids[:-1]], path.split("/"), strict=False
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
assert result.didl_metadata.id == object_ids[-1]
|
assert result.didl_metadata.id == object_ids[-1]
|
||||||
|
@ -13,9 +13,7 @@ async def test_repair_issue_is_created(
|
|||||||
issue_registry: ir.IssueRegistry,
|
issue_registry: ir.IssueRegistry,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test repair issue is created."""
|
"""Test repair issue is created."""
|
||||||
from homeassistant.components.keyboard import ( # pylint:disable=import-outside-toplevel
|
from homeassistant.components.keyboard import DOMAIN # noqa: PLC0415
|
||||||
DOMAIN,
|
|
||||||
)
|
|
||||||
|
|
||||||
assert await async_setup_component(
|
assert await async_setup_component(
|
||||||
hass,
|
hass,
|
||||||
|
@ -13,9 +13,7 @@ async def test_repair_issue_is_created(
|
|||||||
issue_registry: ir.IssueRegistry,
|
issue_registry: ir.IssueRegistry,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test repair issue is created."""
|
"""Test repair issue is created."""
|
||||||
from homeassistant.components.lirc import ( # pylint: disable=import-outside-toplevel
|
from homeassistant.components.lirc import DOMAIN # noqa: PLC0415
|
||||||
DOMAIN,
|
|
||||||
)
|
|
||||||
|
|
||||||
assert await async_setup_component(
|
assert await async_setup_component(
|
||||||
hass,
|
hass,
|
||||||
|
@ -683,11 +683,9 @@ async def test_receiving_message_with_non_utf8_topic_gets_logged(
|
|||||||
# Local import to avoid processing MQTT modules when running a testcase
|
# Local import to avoid processing MQTT modules when running a testcase
|
||||||
# which does not use MQTT.
|
# which does not use MQTT.
|
||||||
|
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from paho.mqtt.client import MQTTMessage # noqa: PLC0415
|
||||||
from paho.mqtt.client import MQTTMessage
|
|
||||||
|
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components.mqtt.models import MqttData # noqa: PLC0415
|
||||||
from homeassistant.components.mqtt.models import MqttData
|
|
||||||
|
|
||||||
msg = MQTTMessage(topic=b"tasmota/discovery/18FE34E0B760\xcc\x02")
|
msg = MQTTMessage(topic=b"tasmota/discovery/18FE34E0B760\xcc\x02")
|
||||||
msg.payload = b"Payload"
|
msg.payload = b"Payload"
|
||||||
@ -1001,10 +999,9 @@ async def test_dump_service(
|
|||||||
async_fire_time_changed(hass, utcnow() + timedelta(seconds=3))
|
async_fire_time_changed(hass, utcnow() + timedelta(seconds=3))
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
writes = mopen.return_value.write.mock_calls
|
writes = mopen.return_value.writelines.mock_calls
|
||||||
assert len(writes) == 2
|
assert len(writes) == 1
|
||||||
assert writes[0][1][0] == "bla/1,test1\n"
|
assert writes[0][1][0] == ["bla/1,test1\n", "bla/2,test2\n"]
|
||||||
assert writes[1][1][0] == "bla/2,test2\n"
|
|
||||||
|
|
||||||
|
|
||||||
async def test_mqtt_ws_remove_discovered_device(
|
async def test_mqtt_ws_remove_discovered_device(
|
||||||
|
@ -55,8 +55,7 @@ async def fixture_mock_connection(mock_connection_construct):
|
|||||||
@pytest.fixture(name="coils")
|
@pytest.fixture(name="coils")
|
||||||
async def fixture_coils(mock_connection: MockConnection):
|
async def fixture_coils(mock_connection: MockConnection):
|
||||||
"""Return a dict with coil data."""
|
"""Return a dict with coil data."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components.nibe_heatpump import HeatPump # noqa: PLC0415
|
||||||
from homeassistant.components.nibe_heatpump import HeatPump
|
|
||||||
|
|
||||||
get_coils_original = HeatPump.get_coils
|
get_coils_original = HeatPump.get_coils
|
||||||
get_coil_by_address_original = HeatPump.get_coil_by_address
|
get_coil_by_address_original = HeatPump.get_coil_by_address
|
||||||
|
@ -22,7 +22,7 @@ async def test_repair_issue_is_created(
|
|||||||
issue_registry: ir.IssueRegistry,
|
issue_registry: ir.IssueRegistry,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test repair issue is created."""
|
"""Test repair issue is created."""
|
||||||
from homeassistant.components.sms import ( # pylint: disable=import-outside-toplevel
|
from homeassistant.components.sms import ( # noqa: PLC0415
|
||||||
DEPRECATED_ISSUE_ID,
|
DEPRECATED_ISSUE_ID,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
)
|
)
|
||||||
|
@ -201,8 +201,7 @@ def pytest_runtest_setup() -> None:
|
|||||||
|
|
||||||
# Setup HAFakeDatetime converter for pymysql
|
# Setup HAFakeDatetime converter for pymysql
|
||||||
try:
|
try:
|
||||||
# pylint: disable-next=import-outside-toplevel
|
import MySQLdb.converters as MySQLdb_converters # noqa: PLC0415
|
||||||
import MySQLdb.converters as MySQLdb_converters
|
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
@ -1036,7 +1035,7 @@ async def _mqtt_mock_entry(
|
|||||||
"""Fixture to mock a delayed setup of the MQTT config entry."""
|
"""Fixture to mock a delayed setup of the MQTT config entry."""
|
||||||
# Local import to avoid processing MQTT modules when running a testcase
|
# Local import to avoid processing MQTT modules when running a testcase
|
||||||
# which does not use MQTT.
|
# which does not use MQTT.
|
||||||
from homeassistant.components import mqtt # pylint: disable=import-outside-toplevel
|
from homeassistant.components import mqtt # noqa: PLC0415
|
||||||
|
|
||||||
if mqtt_config_entry_data is None:
|
if mqtt_config_entry_data is None:
|
||||||
mqtt_config_entry_data = {mqtt.CONF_BROKER: "mock-broker"}
|
mqtt_config_entry_data = {mqtt.CONF_BROKER: "mock-broker"}
|
||||||
@ -1317,7 +1316,7 @@ def disable_mock_zeroconf_resolver(
|
|||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mock_zeroconf() -> Generator[MagicMock]:
|
def mock_zeroconf() -> Generator[MagicMock]:
|
||||||
"""Mock zeroconf."""
|
"""Mock zeroconf."""
|
||||||
from zeroconf import DNSCache # pylint: disable=import-outside-toplevel
|
from zeroconf import DNSCache # noqa: PLC0415
|
||||||
|
|
||||||
with (
|
with (
|
||||||
patch("homeassistant.components.zeroconf.HaZeroconf") as mock_zc,
|
patch("homeassistant.components.zeroconf.HaZeroconf") as mock_zc,
|
||||||
@ -1337,10 +1336,8 @@ def mock_zeroconf() -> Generator[MagicMock]:
|
|||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mock_async_zeroconf(mock_zeroconf: MagicMock) -> Generator[MagicMock]:
|
def mock_async_zeroconf(mock_zeroconf: MagicMock) -> Generator[MagicMock]:
|
||||||
"""Mock AsyncZeroconf."""
|
"""Mock AsyncZeroconf."""
|
||||||
from zeroconf import DNSCache, Zeroconf # pylint: disable=import-outside-toplevel
|
from zeroconf import DNSCache, Zeroconf # noqa: PLC0415
|
||||||
from zeroconf.asyncio import ( # pylint: disable=import-outside-toplevel
|
from zeroconf.asyncio import AsyncZeroconf # noqa: PLC0415
|
||||||
AsyncZeroconf,
|
|
||||||
)
|
|
||||||
|
|
||||||
with patch(
|
with patch(
|
||||||
"homeassistant.components.zeroconf.HaAsyncZeroconf", spec=AsyncZeroconf
|
"homeassistant.components.zeroconf.HaAsyncZeroconf", spec=AsyncZeroconf
|
||||||
@ -1496,15 +1493,13 @@ def recorder_db_url(
|
|||||||
tmp_path = tmp_path_factory.mktemp("recorder")
|
tmp_path = tmp_path_factory.mktemp("recorder")
|
||||||
db_url = "sqlite:///" + str(tmp_path / "pytest.db")
|
db_url = "sqlite:///" + str(tmp_path / "pytest.db")
|
||||||
elif db_url.startswith("mysql://"):
|
elif db_url.startswith("mysql://"):
|
||||||
# pylint: disable-next=import-outside-toplevel
|
import sqlalchemy_utils # noqa: PLC0415
|
||||||
import sqlalchemy_utils
|
|
||||||
|
|
||||||
charset = "utf8mb4' COLLATE = 'utf8mb4_unicode_ci"
|
charset = "utf8mb4' COLLATE = 'utf8mb4_unicode_ci"
|
||||||
assert not sqlalchemy_utils.database_exists(db_url)
|
assert not sqlalchemy_utils.database_exists(db_url)
|
||||||
sqlalchemy_utils.create_database(db_url, encoding=charset)
|
sqlalchemy_utils.create_database(db_url, encoding=charset)
|
||||||
elif db_url.startswith("postgresql://"):
|
elif db_url.startswith("postgresql://"):
|
||||||
# pylint: disable-next=import-outside-toplevel
|
import sqlalchemy_utils # noqa: PLC0415
|
||||||
import sqlalchemy_utils
|
|
||||||
|
|
||||||
assert not sqlalchemy_utils.database_exists(db_url)
|
assert not sqlalchemy_utils.database_exists(db_url)
|
||||||
sqlalchemy_utils.create_database(db_url, encoding="utf8")
|
sqlalchemy_utils.create_database(db_url, encoding="utf8")
|
||||||
@ -1512,8 +1507,7 @@ def recorder_db_url(
|
|||||||
if db_url == "sqlite://" and persistent_database:
|
if db_url == "sqlite://" and persistent_database:
|
||||||
rmtree(tmp_path, ignore_errors=True)
|
rmtree(tmp_path, ignore_errors=True)
|
||||||
elif db_url.startswith("mysql://"):
|
elif db_url.startswith("mysql://"):
|
||||||
# pylint: disable-next=import-outside-toplevel
|
import sqlalchemy as sa # noqa: PLC0415
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
made_url = sa.make_url(db_url)
|
made_url = sa.make_url(db_url)
|
||||||
db = made_url.database
|
db = made_url.database
|
||||||
@ -1544,8 +1538,7 @@ async def _async_init_recorder_component(
|
|||||||
wait_setup: bool,
|
wait_setup: bool,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the recorder asynchronously."""
|
"""Initialize the recorder asynchronously."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components import recorder # noqa: PLC0415
|
||||||
from homeassistant.components import recorder
|
|
||||||
|
|
||||||
config = dict(add_config) if add_config else {}
|
config = dict(add_config) if add_config else {}
|
||||||
if recorder.CONF_DB_URL not in config:
|
if recorder.CONF_DB_URL not in config:
|
||||||
@ -1596,21 +1589,16 @@ async def async_test_recorder(
|
|||||||
enable_migrate_event_ids: bool,
|
enable_migrate_event_ids: bool,
|
||||||
) -> AsyncGenerator[RecorderInstanceContextManager]:
|
) -> AsyncGenerator[RecorderInstanceContextManager]:
|
||||||
"""Yield context manager to setup recorder instance."""
|
"""Yield context manager to setup recorder instance."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from homeassistant.components import recorder # noqa: PLC0415
|
||||||
from homeassistant.components import recorder
|
from homeassistant.components.recorder import migration # noqa: PLC0415
|
||||||
|
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .components.recorder.common import ( # noqa: PLC0415
|
||||||
from homeassistant.components.recorder import migration
|
async_recorder_block_till_done,
|
||||||
|
)
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from .patch_recorder import real_session_scope # noqa: PLC0415
|
||||||
from .components.recorder.common import async_recorder_block_till_done
|
|
||||||
|
|
||||||
# pylint: disable-next=import-outside-toplevel
|
|
||||||
from .patch_recorder import real_session_scope
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from sqlalchemy.orm.session import Session # noqa: PLC0415
|
||||||
from sqlalchemy.orm.session import Session
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def debug_session_scope(
|
def debug_session_scope(
|
||||||
@ -1857,8 +1845,7 @@ def mock_bleak_scanner_start() -> Generator[MagicMock]:
|
|||||||
|
|
||||||
# Late imports to avoid loading bleak unless we need it
|
# Late imports to avoid loading bleak unless we need it
|
||||||
|
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from habluetooth import scanner as bluetooth_scanner # noqa: PLC0415
|
||||||
from habluetooth import scanner as bluetooth_scanner
|
|
||||||
|
|
||||||
# We need to drop the stop method from the object since we patched
|
# We need to drop the stop method from the object since we patched
|
||||||
# out start and this fixture will expire before the stop method is called
|
# out start and this fixture will expire before the stop method is called
|
||||||
@ -1878,13 +1865,9 @@ def mock_bleak_scanner_start() -> Generator[MagicMock]:
|
|||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def hassio_env(supervisor_is_connected: AsyncMock) -> Generator[None]:
|
def hassio_env(supervisor_is_connected: AsyncMock) -> Generator[None]:
|
||||||
"""Fixture to inject hassio env."""
|
"""Fixture to inject hassio env."""
|
||||||
from homeassistant.components.hassio import ( # pylint: disable=import-outside-toplevel
|
from homeassistant.components.hassio import HassioAPIError # noqa: PLC0415
|
||||||
HassioAPIError,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .components.hassio import ( # pylint: disable=import-outside-toplevel
|
from .components.hassio import SUPERVISOR_TOKEN # noqa: PLC0415
|
||||||
SUPERVISOR_TOKEN,
|
|
||||||
)
|
|
||||||
|
|
||||||
with (
|
with (
|
||||||
patch.dict(os.environ, {"SUPERVISOR": "127.0.0.1"}),
|
patch.dict(os.environ, {"SUPERVISOR": "127.0.0.1"}),
|
||||||
@ -1906,9 +1889,7 @@ async def hassio_stubs(
|
|||||||
supervisor_client: AsyncMock,
|
supervisor_client: AsyncMock,
|
||||||
) -> RefreshToken:
|
) -> RefreshToken:
|
||||||
"""Create mock hassio http client."""
|
"""Create mock hassio http client."""
|
||||||
from homeassistant.components.hassio import ( # pylint: disable=import-outside-toplevel
|
from homeassistant.components.hassio import HassioAPIError # noqa: PLC0415
|
||||||
HassioAPIError,
|
|
||||||
)
|
|
||||||
|
|
||||||
with (
|
with (
|
||||||
patch(
|
patch(
|
||||||
|
@ -39,8 +39,9 @@ async def test_get_integration_logger(
|
|||||||
@pytest.mark.usefixtures("enable_custom_integrations", "hass")
|
@pytest.mark.usefixtures("enable_custom_integrations", "hass")
|
||||||
async def test_extract_frame_resolve_module() -> None:
|
async def test_extract_frame_resolve_module() -> None:
|
||||||
"""Test extracting the current frame from integration context."""
|
"""Test extracting the current frame from integration context."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from custom_components.test_integration_frame import ( # noqa: PLC0415
|
||||||
from custom_components.test_integration_frame import call_get_integration_frame
|
call_get_integration_frame,
|
||||||
|
)
|
||||||
|
|
||||||
integration_frame = call_get_integration_frame()
|
integration_frame = call_get_integration_frame()
|
||||||
|
|
||||||
@ -56,8 +57,9 @@ async def test_extract_frame_resolve_module() -> None:
|
|||||||
@pytest.mark.usefixtures("enable_custom_integrations", "hass")
|
@pytest.mark.usefixtures("enable_custom_integrations", "hass")
|
||||||
async def test_get_integration_logger_resolve_module() -> None:
|
async def test_get_integration_logger_resolve_module() -> None:
|
||||||
"""Test getting the logger from integration context."""
|
"""Test getting the logger from integration context."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from custom_components.test_integration_frame import ( # noqa: PLC0415
|
||||||
from custom_components.test_integration_frame import call_get_integration_logger
|
call_get_integration_logger,
|
||||||
|
)
|
||||||
|
|
||||||
logger = call_get_integration_logger(__name__)
|
logger = call_get_integration_logger(__name__)
|
||||||
|
|
||||||
|
@ -134,8 +134,7 @@ async def test_custom_component_name(hass: HomeAssistant) -> None:
|
|||||||
assert platform.__package__ == "custom_components.test"
|
assert platform.__package__ == "custom_components.test"
|
||||||
|
|
||||||
# Test custom components is mounted
|
# Test custom components is mounted
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from custom_components.test_package import TEST # noqa: PLC0415
|
||||||
from custom_components.test_package import TEST
|
|
||||||
|
|
||||||
assert TEST == 5
|
assert TEST == 5
|
||||||
|
|
||||||
@ -1295,12 +1294,11 @@ async def test_config_folder_not_in_path() -> None:
|
|||||||
|
|
||||||
# Verify that we are unable to import this file from top level
|
# Verify that we are unable to import this file from top level
|
||||||
with pytest.raises(ImportError):
|
with pytest.raises(ImportError):
|
||||||
# pylint: disable-next=import-outside-toplevel
|
import check_config_not_in_path # noqa: F401, PLC0415
|
||||||
import check_config_not_in_path # noqa: F401
|
|
||||||
|
|
||||||
# Verify that we are able to load the file with absolute path
|
# Verify that we are able to load the file with absolute path
|
||||||
# pylint: disable-next=import-outside-toplevel,hass-relative-import
|
# pylint: disable-next=hass-relative-import
|
||||||
import tests.testing_config.check_config_not_in_path # noqa: F401
|
import tests.testing_config.check_config_not_in_path # noqa: F401, PLC0415
|
||||||
|
|
||||||
|
|
||||||
async def test_async_get_component_preloads_config_and_config_flow(
|
async def test_async_get_component_preloads_config_and_config_flow(
|
||||||
|
@ -36,7 +36,7 @@ def test_validate_python(mock_exit) -> None:
|
|||||||
with patch(
|
with patch(
|
||||||
"sys.version_info",
|
"sys.version_info",
|
||||||
new_callable=PropertyMock(
|
new_callable=PropertyMock(
|
||||||
return_value=(REQUIRED_PYTHON_VER[0] - 1,) + REQUIRED_PYTHON_VER[1:]
|
return_value=(REQUIRED_PYTHON_VER[0] - 1, *REQUIRED_PYTHON_VER[1:])
|
||||||
),
|
),
|
||||||
):
|
):
|
||||||
main.validate_python()
|
main.validate_python()
|
||||||
@ -55,7 +55,7 @@ def test_validate_python(mock_exit) -> None:
|
|||||||
with patch(
|
with patch(
|
||||||
"sys.version_info",
|
"sys.version_info",
|
||||||
new_callable=PropertyMock(
|
new_callable=PropertyMock(
|
||||||
return_value=(REQUIRED_PYTHON_VER[:2]) + (REQUIRED_PYTHON_VER[2] + 1,)
|
return_value=(*REQUIRED_PYTHON_VER[:2], REQUIRED_PYTHON_VER[2] + 1)
|
||||||
),
|
),
|
||||||
):
|
):
|
||||||
main.validate_python()
|
main.validate_python()
|
||||||
|
Loading…
x
Reference in New Issue
Block a user