Add first batch of Ruff PYI rules (#115100)

Co-authored-by: Jan Bouwhuis <jbouwh@users.noreply.github.com>
This commit is contained in:
Sid 2024-04-07 23:30:50 +02:00 committed by GitHub
parent 569f54d8e3
commit 8e98ba7312
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
27 changed files with 44 additions and 55 deletions

View File

@ -3,7 +3,6 @@
from __future__ import annotations from __future__ import annotations
from collections.abc import Callable from collections.abc import Callable
from typing import Any
import voluptuous as vol import voluptuous as vol
@ -64,7 +63,7 @@ class PolicyPermissions(AbstractPermissions):
"""Return a function that can test entity access.""" """Return a function that can test entity access."""
return compile_entities(self._policy.get(CAT_ENTITIES), self._perm_lookup) return compile_entities(self._policy.get(CAT_ENTITIES), self._perm_lookup)
def __eq__(self, other: Any) -> bool: def __eq__(self, other: object) -> bool:
"""Equals check.""" """Equals check."""
return isinstance(other, PolicyPermissions) and other._policy == self._policy return isinstance(other, PolicyPermissions) and other._policy == self._policy

View File

@ -587,7 +587,7 @@ class PipelineRun:
self.audio_settings.noise_suppression_level, self.audio_settings.noise_suppression_level,
) )
def __eq__(self, other: Any) -> bool: def __eq__(self, other: object) -> bool:
"""Compare pipeline runs by id.""" """Compare pipeline runs by id."""
if isinstance(other, PipelineRun): if isinstance(other, PipelineRun):
return self.id == other.id return self.id == other.id

View File

@ -1,7 +1,6 @@
"""Provides data updates from the Control4 controller for platforms.""" """Provides data updates from the Control4 controller for platforms."""
from collections import defaultdict from collections import defaultdict
from collections.abc import Set
import logging import logging
from typing import Any from typing import Any
@ -20,7 +19,7 @@ _LOGGER = logging.getLogger(__name__)
async def _update_variables_for_config_entry( async def _update_variables_for_config_entry(
hass: HomeAssistant, entry: ConfigEntry, variable_names: Set[str] hass: HomeAssistant, entry: ConfigEntry, variable_names: set[str]
) -> dict[int, dict[str, Any]]: ) -> dict[int, dict[str, Any]]:
"""Retrieve data from the Control4 director.""" """Retrieve data from the Control4 director."""
director: C4Director = hass.data[DOMAIN][entry.entry_id][CONF_DIRECTOR] director: C4Director = hass.data[DOMAIN][entry.entry_id][CONF_DIRECTOR]
@ -32,7 +31,7 @@ async def _update_variables_for_config_entry(
async def update_variables_for_config_entry( async def update_variables_for_config_entry(
hass: HomeAssistant, entry: ConfigEntry, variable_names: Set[str] hass: HomeAssistant, entry: ConfigEntry, variable_names: set[str]
) -> dict[int, dict[str, Any]]: ) -> dict[int, dict[str, Any]]:
"""Try to Retrieve data from the Control4 director for update_coordinator.""" """Try to Retrieve data from the Control4 director for update_coordinator."""
try: try:

View File

@ -33,8 +33,6 @@ from .const import ATTR_DARK, ATTR_ON
from .deconz_device import DeconzDevice from .deconz_device import DeconzDevice
from .hub import DeconzHub from .hub import DeconzHub
_SensorDeviceT = TypeVar("_SensorDeviceT", bound=PydeconzSensorBase)
ATTR_ORIENTATION = "orientation" ATTR_ORIENTATION = "orientation"
ATTR_TILTANGLE = "tiltangle" ATTR_TILTANGLE = "tiltangle"
ATTR_VIBRATIONSTRENGTH = "vibrationstrength" ATTR_VIBRATIONSTRENGTH = "vibrationstrength"

View File

@ -50,7 +50,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
departure=departure, departure=departure,
) )
cls: type[HERETransitDataUpdateCoordinator] | type[HERERoutingDataUpdateCoordinator] cls: type[HERETransitDataUpdateCoordinator | HERERoutingDataUpdateCoordinator]
if config_entry.data[CONF_MODE] in {TRAVEL_MODE_PUBLIC, "publicTransportTimeTable"}: if config_entry.data[CONF_MODE] in {TRAVEL_MODE_PUBLIC, "publicTransportTimeTable"}:
cls = HERETransitDataUpdateCoordinator cls = HERETransitDataUpdateCoordinator
else: else:

View File

@ -192,7 +192,7 @@ class DeclarativeCharacteristicSwitch(CharacteristicEntity, SwitchEntity):
) )
ENTITY_TYPES: dict[str, type[HomeKitSwitch] | type[HomeKitValve]] = { ENTITY_TYPES: dict[str, type[HomeKitSwitch | HomeKitValve]] = {
ServicesTypes.SWITCH: HomeKitSwitch, ServicesTypes.SWITCH: HomeKitSwitch,
ServicesTypes.OUTLET: HomeKitSwitch, ServicesTypes.OUTLET: HomeKitSwitch,
ServicesTypes.VALVE: HomeKitValve, ServicesTypes.VALVE: HomeKitValve,

View File

@ -38,7 +38,7 @@ _LOGGER = logging.getLogger(__name__)
def setup_decrypt( def setup_decrypt(
key_encoder: type[RawEncoder] | type[HexEncoder], key_encoder: type[RawEncoder | HexEncoder],
) -> Callable[[bytes, bytes], bytes]: ) -> Callable[[bytes, bytes], bytes]:
"""Return decryption function and length of key. """Return decryption function and length of key.
@ -55,7 +55,7 @@ def setup_decrypt(
def setup_encrypt( def setup_encrypt(
key_encoder: type[RawEncoder] | type[HexEncoder], key_encoder: type[RawEncoder | HexEncoder],
) -> Callable[[bytes, bytes], bytes]: ) -> Callable[[bytes, bytes], bytes]:
"""Return encryption function and length of key. """Return encryption function and length of key.
@ -75,7 +75,7 @@ def _decrypt_payload_helper(
key: str | bytes, key: str | bytes,
ciphertext: bytes, ciphertext: bytes,
key_bytes: bytes, key_bytes: bytes,
key_encoder: type[RawEncoder] | type[HexEncoder], key_encoder: type[RawEncoder | HexEncoder],
) -> JsonValueType | None: ) -> JsonValueType | None:
"""Decrypt encrypted payload.""" """Decrypt encrypted payload."""
try: try:

View File

@ -6,7 +6,7 @@ import asyncio
from collections.abc import Callable from collections.abc import Callable
from datetime import datetime from datetime import datetime
import logging import logging
from typing import TYPE_CHECKING, Any, TypeVar, cast from typing import TYPE_CHECKING, Any, cast
import voluptuous as vol import voluptuous as vol
@ -143,8 +143,6 @@ CONFIG_ENTRY_CONFIG_KEYS = [
CONF_WILL_MESSAGE, CONF_WILL_MESSAGE,
] ]
_T = TypeVar("_T")
REMOVED_OPTIONS = vol.All( REMOVED_OPTIONS = vol.All(
cv.removed(CONF_BIRTH_MESSAGE), # Removed in HA Core 2023.4 cv.removed(CONF_BIRTH_MESSAGE), # Removed in HA Core 2023.4
cv.removed(CONF_BROKER), # Removed in HA Core 2023.4 cv.removed(CONF_BROKER), # Removed in HA Core 2023.4

View File

@ -85,7 +85,7 @@ class NOAATidesData(TypedDict):
"""Representation of a single tide.""" """Representation of a single tide."""
time_stamp: list[Timestamp] time_stamp: list[Timestamp]
hi_lo: list[Literal["L"] | Literal["H"]] hi_lo: list[Literal["L", "H"]]
predicted_wl: list[float] predicted_wl: list[float]

View File

@ -7,7 +7,6 @@ from dataclasses import dataclass
import datetime import datetime
from functools import cached_property from functools import cached_property
import logging import logging
from typing import TypeVar
import aiohttp import aiohttp
from pyrainbird.async_client import ( from pyrainbird.async_client import (
@ -39,8 +38,6 @@ CONECTION_LIMIT = 1
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
_T = TypeVar("_T")
@dataclass @dataclass
class RainbirdDeviceState: class RainbirdDeviceState:

View File

@ -684,7 +684,7 @@ def get_metadata_with_session(
session: Session, session: Session,
*, *,
statistic_ids: set[str] | None = None, statistic_ids: set[str] | None = None,
statistic_type: Literal["mean"] | Literal["sum"] | None = None, statistic_type: Literal["mean", "sum"] | None = None,
statistic_source: str | None = None, statistic_source: str | None = None,
) -> dict[str, tuple[int, StatisticMetaData]]: ) -> dict[str, tuple[int, StatisticMetaData]]:
"""Fetch meta data. """Fetch meta data.
@ -705,7 +705,7 @@ def get_metadata(
hass: HomeAssistant, hass: HomeAssistant,
*, *,
statistic_ids: set[str] | None = None, statistic_ids: set[str] | None = None,
statistic_type: Literal["mean"] | Literal["sum"] | None = None, statistic_type: Literal["mean", "sum"] | None = None,
statistic_source: str | None = None, statistic_source: str | None = None,
) -> dict[str, tuple[int, StatisticMetaData]]: ) -> dict[str, tuple[int, StatisticMetaData]]:
"""Return metadata for statistic_ids.""" """Return metadata for statistic_ids."""
@ -753,7 +753,7 @@ def update_statistics_metadata(
async def async_list_statistic_ids( async def async_list_statistic_ids(
hass: HomeAssistant, hass: HomeAssistant,
statistic_ids: set[str] | None = None, statistic_ids: set[str] | None = None,
statistic_type: Literal["mean"] | Literal["sum"] | None = None, statistic_type: Literal["mean", "sum"] | None = None,
) -> list[dict]: ) -> list[dict]:
"""Return all statistic_ids (or filtered one) and unit of measurement. """Return all statistic_ids (or filtered one) and unit of measurement.
@ -823,7 +823,7 @@ def _flatten_list_statistic_ids_metadata_result(
def list_statistic_ids( def list_statistic_ids(
hass: HomeAssistant, hass: HomeAssistant,
statistic_ids: set[str] | None = None, statistic_ids: set[str] | None = None,
statistic_type: Literal["mean"] | Literal["sum"] | None = None, statistic_type: Literal["mean", "sum"] | None = None,
) -> list[dict]: ) -> list[dict]:
"""Return all statistic_ids (or filtered one) and unit of measurement. """Return all statistic_ids (or filtered one) and unit of measurement.

View File

@ -36,7 +36,7 @@ QUERY_STATISTIC_META = (
def _generate_get_metadata_stmt( def _generate_get_metadata_stmt(
statistic_ids: set[str] | None = None, statistic_ids: set[str] | None = None,
statistic_type: Literal["mean"] | Literal["sum"] | None = None, statistic_type: Literal["mean", "sum"] | None = None,
statistic_source: str | None = None, statistic_source: str | None = None,
) -> StatementLambdaElement: ) -> StatementLambdaElement:
"""Generate a statement to fetch metadata.""" """Generate a statement to fetch metadata."""
@ -88,7 +88,7 @@ class StatisticsMetaManager:
self, self,
session: Session, session: Session,
statistic_ids: set[str] | None = None, statistic_ids: set[str] | None = None,
statistic_type: Literal["mean"] | Literal["sum"] | None = None, statistic_type: Literal["mean", "sum"] | None = None,
statistic_source: str | None = None, statistic_source: str | None = None,
) -> dict[str, tuple[int, StatisticMetaData]]: ) -> dict[str, tuple[int, StatisticMetaData]]:
"""Fetch meta data and process it into results and/or cache.""" """Fetch meta data and process it into results and/or cache."""
@ -202,7 +202,7 @@ class StatisticsMetaManager:
self, self,
session: Session, session: Session,
statistic_ids: set[str] | None = None, statistic_ids: set[str] | None = None,
statistic_type: Literal["mean"] | Literal["sum"] | None = None, statistic_type: Literal["mean", "sum"] | None = None,
statistic_source: str | None = None, statistic_source: str | None = None,
) -> dict[str, tuple[int, StatisticMetaData]]: ) -> dict[str, tuple[int, StatisticMetaData]]:
"""Fetch meta data. """Fetch meta data.

View File

@ -235,7 +235,7 @@ async def ws_get_statistics_during_period(
def _ws_get_list_statistic_ids( def _ws_get_list_statistic_ids(
hass: HomeAssistant, hass: HomeAssistant,
msg_id: int, msg_id: int,
statistic_type: Literal["mean"] | Literal["sum"] | None = None, statistic_type: Literal["mean", "sum"] | None = None,
) -> bytes: ) -> bytes:
"""Fetch a list of available statistic_id and convert them to JSON. """Fetch a list of available statistic_id and convert them to JSON.

View File

@ -14,7 +14,7 @@ from dataclasses import dataclass
import enum import enum
import logging import logging
import re import re
from typing import TYPE_CHECKING, Any, ParamSpec, TypeVar, overload from typing import TYPE_CHECKING, Any, TypeVar, overload
import voluptuous as vol import voluptuous as vol
import zigpy.exceptions import zigpy.exceptions
@ -59,14 +59,10 @@ from .const import CLUSTER_TYPE_IN, CLUSTER_TYPE_OUT, CUSTOM_CONFIGURATION, DATA
from .registries import BINDABLE_CLUSTERS from .registries import BINDABLE_CLUSTERS
if TYPE_CHECKING: if TYPE_CHECKING:
from .cluster_handlers import ClusterHandler
from .device import ZHADevice from .device import ZHADevice
from .gateway import ZHAGateway from .gateway import ZHAGateway
_ClusterHandlerT = TypeVar("_ClusterHandlerT", bound="ClusterHandler")
_T = TypeVar("_T") _T = TypeVar("_T")
_R = TypeVar("_R")
_P = ParamSpec("_P")
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@ -508,9 +504,9 @@ def validate_device_class(
def validate_device_class( def validate_device_class(
device_class_enum: type[BinarySensorDeviceClass] device_class_enum: type[
| type[SensorDeviceClass] BinarySensorDeviceClass | SensorDeviceClass | NumberDeviceClass
| type[NumberDeviceClass], ],
metadata_value: enum.Enum, metadata_value: enum.Enum,
platform: str, platform: str,
logger: logging.Logger, logger: logging.Logger,

View File

@ -381,7 +381,7 @@ class HomeAssistant:
http: HomeAssistantHTTP = None # type: ignore[assignment] http: HomeAssistantHTTP = None # type: ignore[assignment]
config_entries: ConfigEntries = None # type: ignore[assignment] config_entries: ConfigEntries = None # type: ignore[assignment]
def __new__(cls, config_dir: str) -> HomeAssistant: def __new__(cls, config_dir: str) -> Self:
"""Set the _hass thread local data.""" """Set the _hass thread local data."""
hass = super().__new__(cls) hass = super().__new__(cls)
_hass.hass = hass _hass.hass = hass
@ -1168,9 +1168,9 @@ class Context:
self.parent_id = parent_id self.parent_id = parent_id
self.origin_event: Event[Any] | None = None self.origin_event: Event[Any] | None = None
def __eq__(self, other: Any) -> bool: def __eq__(self, other: object) -> bool:
"""Compare contexts.""" """Compare contexts."""
return bool(self.__class__ == other.__class__ and self.id == other.id) return isinstance(other, Context) and self.id == other.id
@cached_property @cached_property
def _as_dict(self) -> dict[str, str | None]: def _as_dict(self) -> dict[str, str | None]:

View File

@ -57,7 +57,7 @@ class HassHttpXAsyncClient(httpx.AsyncClient):
"""Prevent an integration from reopen of the client via context manager.""" """Prevent an integration from reopen of the client via context manager."""
return self return self
async def __aexit__(self, *args: Any) -> None: async def __aexit__(self, *args: object) -> None:
"""Prevent an integration from close of the client via context manager.""" """Prevent an integration from close of the client via context manager."""

View File

@ -10,7 +10,7 @@ from dataclasses import dataclass
from enum import Enum from enum import Enum
from functools import cached_property from functools import cached_property
import logging import logging
from typing import Any, TypeVar from typing import Any
import voluptuous as vol import voluptuous as vol
@ -34,7 +34,6 @@ from . import (
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
_SlotsType = dict[str, Any] _SlotsType = dict[str, Any]
_T = TypeVar("_T")
INTENT_TURN_OFF = "HassTurnOff" INTENT_TURN_OFF = "HassTurnOff"
INTENT_TURN_ON = "HassTurnOn" INTENT_TURN_ON = "HassTurnOn"

View File

@ -156,7 +156,7 @@ async def async_integration_yaml_config(
hass: HomeAssistant, hass: HomeAssistant,
integration_name: str, integration_name: str,
*, *,
raise_on_failure: Literal[False] | bool, raise_on_failure: Literal[False],
) -> ConfigType | None: ... ) -> ConfigType | None: ...

View File

@ -28,6 +28,7 @@ from typing import (
Literal, Literal,
NoReturn, NoReturn,
ParamSpec, ParamSpec,
Self,
TypeVar, TypeVar,
cast, cast,
overload, overload,
@ -310,7 +311,7 @@ class TupleWrapper(tuple, ResultWrapper):
# This is all magic to be allowed to subclass a tuple. # This is all magic to be allowed to subclass a tuple.
def __new__(cls, value: tuple, *, render_result: str | None = None) -> TupleWrapper: def __new__(cls, value: tuple, *, render_result: str | None = None) -> Self:
"""Create a new tuple class.""" """Create a new tuple class."""
return super().__new__(cls, tuple(value)) return super().__new__(cls, tuple(value))
@ -1102,7 +1103,7 @@ class TemplateStateBase(State):
return f"{state} {unit}" return f"{state} {unit}"
return state return state
def __eq__(self, other: Any) -> bool: def __eq__(self, other: object) -> bool:
"""Ensure we collect on equality check.""" """Ensure we collect on equality check."""
self._collect_state() self._collect_state()
return self._state.__eq__(other) return self._state.__eq__(other)

View File

@ -3,7 +3,7 @@
from collections.abc import Mapping from collections.abc import Mapping
from enum import Enum from enum import Enum
from functools import partial from functools import partial
from typing import Any, TypeVar from typing import Any
import homeassistant.core import homeassistant.core
@ -14,8 +14,6 @@ from .deprecation import (
dir_with_deprecated_constants, dir_with_deprecated_constants,
) )
_DataT = TypeVar("_DataT")
GPSType = tuple[float, float] GPSType = tuple[float, float]
ConfigType = dict[str, Any] ConfigType = dict[str, Any]
DiscoveryInfoType = dict[str, Any] DiscoveryInfoType = dict[str, Any]

View File

@ -188,7 +188,7 @@ def parse_datetime(dt_str: str, *, raise_on_error: Literal[True]) -> dt.datetime
@overload @overload
def parse_datetime( def parse_datetime(
dt_str: str, *, raise_on_error: Literal[False] | bool dt_str: str, *, raise_on_error: Literal[False]
) -> dt.datetime | None: ... ) -> dt.datetime | None: ...

View File

@ -19,7 +19,7 @@ class _SignalTypeBase(Generic[*_Ts]):
return hash(self.name) return hash(self.name)
def __eq__(self, other: Any) -> bool: def __eq__(self, other: object) -> bool:
"""Check equality for dict keys to be compatible with str.""" """Check equality for dict keys to be compatible with str."""
if isinstance(other, str): if isinstance(other, str):

View File

@ -276,7 +276,7 @@ def _parse_yaml_python(
def _parse_yaml( def _parse_yaml(
loader: type[FastSafeLoader] | type[PythonSafeLoader], loader: type[FastSafeLoader | PythonSafeLoader],
content: str | TextIO, content: str | TextIO,
secrets: Secrets | None = None, secrets: Secrets | None = None,
) -> JSON_TYPE: ) -> JSON_TYPE:

View File

@ -638,6 +638,7 @@ select = [
"PIE", # flake8-pie "PIE", # flake8-pie
"PL", # pylint "PL", # pylint
"PT", # flake8-pytest-style "PT", # flake8-pytest-style
"PYI", # flake8-pyi
"RET", # flake8-return "RET", # flake8-return
"RSE", # flake8-raise "RSE", # flake8-raise
"RUF005", # Consider iterable unpacking instead of concatenation "RUF005", # Consider iterable unpacking instead of concatenation
@ -719,6 +720,9 @@ ignore = [
# temporarily disabled # temporarily disabled
"PT019", "PT019",
"PYI024", # Use typing.NamedTuple instead of collections.namedtuple
"PYI036",
"PYI041",
"RET503", "RET503",
"RET502", "RET502",
"RET501", "RET501",

View File

@ -1527,12 +1527,12 @@ class _HA_ANY:
_other = _SENTINEL _other = _SENTINEL
def __eq__(self, other: Any) -> bool: def __eq__(self, other: object) -> bool:
"""Test equal.""" """Test equal."""
self._other = other self._other = other
return True return True
def __ne__(self, other: Any) -> bool: def __ne__(self, other: object) -> bool:
"""Test not equal.""" """Test not equal."""
self._other = other self._other = other
return False return False

View File

@ -653,7 +653,7 @@ class LazyState(State):
"last_updated": last_updated_isoformat, "last_updated": last_updated_isoformat,
} }
def __eq__(self, other: Any) -> bool: def __eq__(self, other: object) -> bool:
"""Return the comparison.""" """Return the comparison."""
return ( return (
other.__class__ in [self.__class__, State] other.__class__ in [self.__class__, State]

View File

@ -818,7 +818,7 @@ class LazyState(State):
"last_updated": last_updated_isoformat, "last_updated": last_updated_isoformat,
} }
def __eq__(self, other: Any) -> bool: def __eq__(self, other: object) -> bool:
"""Return the comparison.""" """Return the comparison."""
return ( return (
other.__class__ in [self.__class__, State] other.__class__ in [self.__class__, State]