mirror of
https://github.com/home-assistant/core.git
synced 2025-07-23 21:27:38 +00:00
Pylint cleanups (#35409)
* Avoid some outer name redefinitions * Remove unneeded directives * Narrow directive scope * Don't disable redefined-variable-type
This commit is contained in:
parent
c37100299a
commit
b4404b071f
@ -61,8 +61,7 @@ class CommandLineAuthProvider(AuthProvider):
|
|||||||
"""Validate a username and password."""
|
"""Validate a username and password."""
|
||||||
env = {"username": username, "password": password}
|
env = {"username": username, "password": password}
|
||||||
try:
|
try:
|
||||||
# pylint: disable=no-member
|
process = await asyncio.subprocess.create_subprocess_exec( # pylint: disable=no-member
|
||||||
process = await asyncio.subprocess.create_subprocess_exec(
|
|
||||||
self.config[CONF_COMMAND],
|
self.config[CONF_COMMAND],
|
||||||
*self.config[CONF_ARGS],
|
*self.config[CONF_ARGS],
|
||||||
env=env,
|
env=env,
|
||||||
|
@ -138,8 +138,9 @@ class Data:
|
|||||||
if not bcrypt.checkpw(password.encode(), user_hash):
|
if not bcrypt.checkpw(password.encode(), user_hash):
|
||||||
raise InvalidAuth
|
raise InvalidAuth
|
||||||
|
|
||||||
# pylint: disable=no-self-use
|
def hash_password( # pylint: disable=no-self-use
|
||||||
def hash_password(self, password: str, for_storage: bool = False) -> bytes:
|
self, password: str, for_storage: bool = False
|
||||||
|
) -> bytes:
|
||||||
"""Encode a password."""
|
"""Encode a password."""
|
||||||
hashed: bytes = bcrypt.hashpw(password.encode(), bcrypt.gensalt(rounds=12))
|
hashed: bytes = bcrypt.hashpw(password.encode(), bcrypt.gensalt(rounds=12))
|
||||||
|
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Module to help with parsing and generating configuration files."""
|
"""Module to help with parsing and generating configuration files."""
|
||||||
# pylint: disable=no-name-in-module
|
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from distutils.version import LooseVersion # pylint: disable=import-error
|
from distutils.version import LooseVersion # pylint: disable=import-error
|
||||||
import logging
|
import logging
|
||||||
@ -183,9 +182,9 @@ CORE_CONFIG_SCHEMA = CUSTOMIZE_CONFIG_SCHEMA.extend(
|
|||||||
CONF_TIME_ZONE: cv.time_zone,
|
CONF_TIME_ZONE: cv.time_zone,
|
||||||
vol.Optional(CONF_INTERNAL_URL): cv.url,
|
vol.Optional(CONF_INTERNAL_URL): cv.url,
|
||||||
vol.Optional(CONF_EXTERNAL_URL): cv.url,
|
vol.Optional(CONF_EXTERNAL_URL): cv.url,
|
||||||
vol.Optional(CONF_WHITELIST_EXTERNAL_DIRS):
|
vol.Optional(CONF_WHITELIST_EXTERNAL_DIRS): vol.All(
|
||||||
# pylint: disable=no-value-for-parameter
|
cv.ensure_list, [vol.IsDir()] # pylint: disable=no-value-for-parameter
|
||||||
vol.All(cv.ensure_list, [vol.IsDir()]),
|
),
|
||||||
vol.Optional(CONF_PACKAGES, default={}): PACKAGES_CONFIG_SCHEMA,
|
vol.Optional(CONF_PACKAGES, default={}): PACKAGES_CONFIG_SCHEMA,
|
||||||
vol.Optional(CONF_AUTH_PROVIDERS): vol.All(
|
vol.Optional(CONF_AUTH_PROVIDERS): vol.All(
|
||||||
cv.ensure_list,
|
cv.ensure_list,
|
||||||
|
@ -851,8 +851,7 @@ class ConfigFlow(data_entry_flow.FlowHandler):
|
|||||||
if progress["context"].get("unique_id") == unique_id:
|
if progress["context"].get("unique_id") == unique_id:
|
||||||
raise data_entry_flow.AbortFlow("already_in_progress")
|
raise data_entry_flow.AbortFlow("already_in_progress")
|
||||||
|
|
||||||
# pylint: disable=no-member
|
self.context["unique_id"] = unique_id # pylint: disable=no-member
|
||||||
self.context["unique_id"] = unique_id
|
|
||||||
|
|
||||||
for entry in self._async_current_entries():
|
for entry in self._async_current_entries():
|
||||||
if entry.unique_id == unique_id:
|
if entry.unique_id == unique_id:
|
||||||
|
@ -26,7 +26,6 @@ from typing import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Typing imports that create a circular dependency
|
# Typing imports that create a circular dependency
|
||||||
# pylint: disable=unused-import
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
|
@ -24,11 +24,9 @@ import slugify as unicode_slug
|
|||||||
|
|
||||||
from .dt import as_local, utcnow
|
from .dt import as_local, utcnow
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
|
||||||
T = TypeVar("T")
|
T = TypeVar("T")
|
||||||
U = TypeVar("U")
|
U = TypeVar("U") # pylint: disable=invalid-name
|
||||||
ENUM_T = TypeVar("ENUM_T", bound=enum.Enum)
|
ENUM_T = TypeVar("ENUM_T", bound=enum.Enum) # pylint: disable=invalid-name
|
||||||
# pylint: enable=invalid-name
|
|
||||||
|
|
||||||
RE_SANITIZE_FILENAME = re.compile(r"(~|\.\.|/|\\)")
|
RE_SANITIZE_FILENAME = re.compile(r"(~|\.\.|/|\\)")
|
||||||
RE_SANITIZE_PATH = re.compile(r"(~|\.(\.)+)")
|
RE_SANITIZE_PATH = re.compile(r"(~|\.(\.)+)")
|
||||||
@ -214,7 +212,6 @@ class Throttle:
|
|||||||
|
|
||||||
If we cannot acquire the lock, it is running so return None.
|
If we cannot acquire the lock, it is running so return None.
|
||||||
"""
|
"""
|
||||||
# pylint: disable=protected-access
|
|
||||||
if hasattr(method, "__self__"):
|
if hasattr(method, "__self__"):
|
||||||
host = getattr(method, "__self__")
|
host = getattr(method, "__self__")
|
||||||
elif is_func:
|
elif is_func:
|
||||||
@ -222,12 +219,14 @@ class Throttle:
|
|||||||
else:
|
else:
|
||||||
host = args[0] if args else wrapper
|
host = args[0] if args else wrapper
|
||||||
|
|
||||||
|
# pylint: disable=protected-access # to _throttle
|
||||||
if not hasattr(host, "_throttle"):
|
if not hasattr(host, "_throttle"):
|
||||||
host._throttle = {}
|
host._throttle = {}
|
||||||
|
|
||||||
if id(self) not in host._throttle:
|
if id(self) not in host._throttle:
|
||||||
host._throttle[id(self)] = [threading.Lock(), None]
|
host._throttle[id(self)] = [threading.Lock(), None]
|
||||||
throttle = host._throttle[id(self)]
|
throttle = host._throttle[id(self)]
|
||||||
|
# pylint: enable=protected-access
|
||||||
|
|
||||||
if not throttle[0].acquire(False):
|
if not throttle[0].acquire(False):
|
||||||
return throttled_value()
|
return throttled_value()
|
||||||
|
@ -244,9 +244,11 @@ def parse_time_expression(parameter: Any, min_value: int, max_value: int) -> Lis
|
|||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=redefined-outer-name
|
|
||||||
def find_next_time_expression_time(
|
def find_next_time_expression_time(
|
||||||
now: dt.datetime, seconds: List[int], minutes: List[int], hours: List[int]
|
now: dt.datetime, # pylint: disable=redefined-outer-name
|
||||||
|
seconds: List[int],
|
||||||
|
minutes: List[int],
|
||||||
|
hours: List[int],
|
||||||
) -> dt.datetime:
|
) -> dt.datetime:
|
||||||
"""Find the next datetime from now for which the time expression matches.
|
"""Find the next datetime from now for which the time expression matches.
|
||||||
|
|
||||||
|
@ -56,7 +56,6 @@ def save_json(
|
|||||||
try:
|
try:
|
||||||
json_data = json.dumps(data, sort_keys=True, indent=4, cls=encoder)
|
json_data = json.dumps(data, sort_keys=True, indent=4, cls=encoder)
|
||||||
except TypeError:
|
except TypeError:
|
||||||
# pylint: disable=no-member
|
|
||||||
msg = f"Failed to serialize to JSON: {filename}. Bad data at {format_unserializable_data(find_paths_unserializable_data(data))}"
|
msg = f"Failed to serialize to JSON: {filename}. Bad data at {format_unserializable_data(find_paths_unserializable_data(data))}"
|
||||||
_LOGGER.error(msg)
|
_LOGGER.error(msg)
|
||||||
raise SerializationError(msg)
|
raise SerializationError(msg)
|
||||||
|
@ -80,7 +80,6 @@ def distance(
|
|||||||
# Author: https://github.com/maurycyp
|
# Author: https://github.com/maurycyp
|
||||||
# Source: https://github.com/maurycyp/vincenty
|
# Source: https://github.com/maurycyp/vincenty
|
||||||
# License: https://github.com/maurycyp/vincenty/blob/master/LICENSE
|
# License: https://github.com/maurycyp/vincenty/blob/master/LICENSE
|
||||||
# pylint: disable=invalid-name
|
|
||||||
def vincenty(
|
def vincenty(
|
||||||
point1: Tuple[float, float], point2: Tuple[float, float], miles: bool = False
|
point1: Tuple[float, float], point2: Tuple[float, float], miles: bool = False
|
||||||
) -> Optional[float]:
|
) -> Optional[float]:
|
||||||
@ -96,6 +95,7 @@ def vincenty(
|
|||||||
if point1[0] == point2[0] and point1[1] == point2[1]:
|
if point1[0] == point2[0] and point1[1] == point2[1]:
|
||||||
return 0.0
|
return 0.0
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
U1 = math.atan((1 - FLATTENING) * math.tan(math.radians(point1[0])))
|
U1 = math.atan((1 - FLATTENING) * math.tan(math.radians(point1[0])))
|
||||||
U2 = math.atan((1 - FLATTENING) * math.tan(math.radians(point2[0])))
|
U2 = math.atan((1 - FLATTENING) * math.tan(math.radians(point2[0])))
|
||||||
L = math.radians(point2[1] - point1[1])
|
L = math.radians(point2[1] - point1[1])
|
||||||
|
@ -24,7 +24,6 @@ class HideSensitiveDataFilter(logging.Filter):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
|
||||||
class AsyncHandler:
|
class AsyncHandler:
|
||||||
"""Logging handler wrapper to add an async layer."""
|
"""Logging handler wrapper to add an async layer."""
|
||||||
|
|
||||||
@ -36,6 +35,7 @@ class AsyncHandler:
|
|||||||
self._thread = threading.Thread(target=self._process)
|
self._thread = threading.Thread(target=self._process)
|
||||||
|
|
||||||
# Delegate from handler
|
# Delegate from handler
|
||||||
|
# pylint: disable=invalid-name
|
||||||
self.setLevel = handler.setLevel
|
self.setLevel = handler.setLevel
|
||||||
self.setFormatter = handler.setFormatter
|
self.setFormatter = handler.setFormatter
|
||||||
self.addFilter = handler.addFilter
|
self.addFilter = handler.addFilter
|
||||||
@ -94,7 +94,7 @@ class AsyncHandler:
|
|||||||
except asyncio.CancelledError:
|
except asyncio.CancelledError:
|
||||||
self.handler.close()
|
self.handler.close()
|
||||||
|
|
||||||
def createLock(self) -> None:
|
def createLock(self) -> None: # pylint: disable=invalid-name
|
||||||
"""Ignore lock stuff."""
|
"""Ignore lock stuff."""
|
||||||
|
|
||||||
def acquire(self) -> None:
|
def acquire(self) -> None:
|
||||||
|
@ -24,29 +24,28 @@ def save_yaml(path: str, data: dict) -> None:
|
|||||||
|
|
||||||
|
|
||||||
# From: https://gist.github.com/miracle2k/3184458
|
# From: https://gist.github.com/miracle2k/3184458
|
||||||
# pylint: disable=redefined-outer-name
|
|
||||||
def represent_odict( # type: ignore
|
def represent_odict( # type: ignore
|
||||||
dump, tag, mapping, flow_style=None
|
dumper, tag, mapping, flow_style=None
|
||||||
) -> yaml.MappingNode:
|
) -> yaml.MappingNode:
|
||||||
"""Like BaseRepresenter.represent_mapping but does not issue the sort()."""
|
"""Like BaseRepresenter.represent_mapping but does not issue the sort()."""
|
||||||
value: list = []
|
value: list = []
|
||||||
node = yaml.MappingNode(tag, value, flow_style=flow_style)
|
node = yaml.MappingNode(tag, value, flow_style=flow_style)
|
||||||
if dump.alias_key is not None:
|
if dumper.alias_key is not None:
|
||||||
dump.represented_objects[dump.alias_key] = node
|
dumper.represented_objects[dumper.alias_key] = node
|
||||||
best_style = True
|
best_style = True
|
||||||
if hasattr(mapping, "items"):
|
if hasattr(mapping, "items"):
|
||||||
mapping = mapping.items()
|
mapping = mapping.items()
|
||||||
for item_key, item_value in mapping:
|
for item_key, item_value in mapping:
|
||||||
node_key = dump.represent_data(item_key)
|
node_key = dumper.represent_data(item_key)
|
||||||
node_value = dump.represent_data(item_value)
|
node_value = dumper.represent_data(item_value)
|
||||||
if not (isinstance(node_key, yaml.ScalarNode) and not node_key.style):
|
if not (isinstance(node_key, yaml.ScalarNode) and not node_key.style):
|
||||||
best_style = False
|
best_style = False
|
||||||
if not (isinstance(node_value, yaml.ScalarNode) and not node_value.style):
|
if not (isinstance(node_value, yaml.ScalarNode) and not node_value.style):
|
||||||
best_style = False
|
best_style = False
|
||||||
value.append((node_key, node_value))
|
value.append((node_key, node_value))
|
||||||
if flow_style is None:
|
if flow_style is None:
|
||||||
if dump.default_flow_style is not None:
|
if dumper.default_flow_style is not None:
|
||||||
node.flow_style = dump.default_flow_style
|
node.flow_style = dumper.default_flow_style
|
||||||
else:
|
else:
|
||||||
node.flow_style = best_style
|
node.flow_style = best_style
|
||||||
return node
|
return node
|
||||||
|
@ -88,9 +88,6 @@ def _add_reference(
|
|||||||
...
|
...
|
||||||
|
|
||||||
|
|
||||||
# pylint: enable=pointless-statement
|
|
||||||
|
|
||||||
|
|
||||||
def _add_reference( # type: ignore
|
def _add_reference( # type: ignore
|
||||||
obj, loader: SafeLineLoader, node: yaml.nodes.Node
|
obj, loader: SafeLineLoader, node: yaml.nodes.Node
|
||||||
):
|
):
|
||||||
|
2
pylintrc
2
pylintrc
@ -18,7 +18,6 @@ good-names=id,i,j,k,ex,Run,_,fp,T
|
|||||||
# cyclic-import - doesn't test if both import on load
|
# cyclic-import - doesn't test if both import on load
|
||||||
# abstract-class-little-used - prevents from setting right foundation
|
# abstract-class-little-used - prevents from setting right foundation
|
||||||
# unused-argument - generic callbacks and setup methods create a lot of warnings
|
# unused-argument - generic callbacks and setup methods create a lot of warnings
|
||||||
# redefined-variable-type - this is Python, we're duck typing!
|
|
||||||
# too-many-* - are not enforced for the sake of readability
|
# too-many-* - are not enforced for the sake of readability
|
||||||
# too-few-* - same as too-many-*
|
# too-few-* - same as too-many-*
|
||||||
# abstract-method - with intro of async there are always methods missing
|
# abstract-method - with intro of async there are always methods missing
|
||||||
@ -34,7 +33,6 @@ disable=
|
|||||||
inconsistent-return-statements,
|
inconsistent-return-statements,
|
||||||
locally-disabled,
|
locally-disabled,
|
||||||
not-context-manager,
|
not-context-manager,
|
||||||
redefined-variable-type,
|
|
||||||
too-few-public-methods,
|
too-few-public-methods,
|
||||||
too-many-ancestors,
|
too-many-ancestors,
|
||||||
too-many-arguments,
|
too-many-arguments,
|
||||||
|
Loading…
x
Reference in New Issue
Block a user