mirror of
https://github.com/home-assistant/core.git
synced 2025-07-23 13:17:32 +00:00
Update mypy to 1.7.0 (#103800)
This commit is contained in:
parent
66d1a7f1dd
commit
a70ec64408
@ -398,7 +398,7 @@ def async_enable_logging(
|
|||||||
logging.getLogger("httpx").setLevel(logging.WARNING)
|
logging.getLogger("httpx").setLevel(logging.WARNING)
|
||||||
|
|
||||||
sys.excepthook = lambda *args: logging.getLogger(None).exception(
|
sys.excepthook = lambda *args: logging.getLogger(None).exception(
|
||||||
"Uncaught exception", exc_info=args # type: ignore[arg-type]
|
"Uncaught exception", exc_info=args
|
||||||
)
|
)
|
||||||
threading.excepthook = lambda args: logging.getLogger(None).exception(
|
threading.excepthook = lambda args: logging.getLogger(None).exception(
|
||||||
"Uncaught thread exception",
|
"Uncaught thread exception",
|
||||||
|
@ -93,9 +93,7 @@ class AssistPipelineSelect(SelectEntity, restore_state.RestoreEntity):
|
|||||||
if self.registry_entry and (device_id := self.registry_entry.device_id):
|
if self.registry_entry and (device_id := self.registry_entry.device_id):
|
||||||
pipeline_data.pipeline_devices.add(device_id)
|
pipeline_data.pipeline_devices.add(device_id)
|
||||||
self.async_on_remove(
|
self.async_on_remove(
|
||||||
lambda: pipeline_data.pipeline_devices.discard(
|
lambda: pipeline_data.pipeline_devices.discard(device_id)
|
||||||
device_id # type: ignore[arg-type]
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
async def async_select_option(self, option: str) -> None:
|
async def async_select_option(self, option: str) -> None:
|
||||||
|
@ -126,10 +126,10 @@ async def async_remove_config_entry_device(
|
|||||||
for identifier in device_entry.identifiers:
|
for identifier in device_entry.identifiers:
|
||||||
if identifier[0] != DOMAIN or len(identifier) != 3:
|
if identifier[0] != DOMAIN or len(identifier) != 3:
|
||||||
continue
|
continue
|
||||||
bond_id: str = identifier[1]
|
bond_id: str = identifier[1] # type: ignore[unreachable]
|
||||||
# Bond still uses the 3 arg tuple before
|
# Bond still uses the 3 arg tuple before
|
||||||
# the identifiers were typed
|
# the identifiers were typed
|
||||||
device_id: str = identifier[2] # type: ignore[misc]
|
device_id: str = identifier[2]
|
||||||
# If device_id is no longer present on
|
# If device_id is no longer present on
|
||||||
# the hub, we allow removal.
|
# the hub, we allow removal.
|
||||||
if hub.bond_id != bond_id or not any(
|
if hub.bond_id != bond_id or not any(
|
||||||
|
@ -12,7 +12,7 @@ _T = TypeVar("_T")
|
|||||||
|
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
def async_redact_data(data: Mapping, to_redact: Iterable[Any]) -> dict: # type: ignore[misc]
|
def async_redact_data(data: Mapping, to_redact: Iterable[Any]) -> dict: # type: ignore[overload-overlap]
|
||||||
...
|
...
|
||||||
|
|
||||||
|
|
||||||
|
@ -28,18 +28,13 @@ def all_stmt(
|
|||||||
)
|
)
|
||||||
if context_id_bin is not None:
|
if context_id_bin is not None:
|
||||||
stmt += lambda s: s.where(Events.context_id_bin == context_id_bin).union_all(
|
stmt += lambda s: s.where(Events.context_id_bin == context_id_bin).union_all(
|
||||||
_states_query_for_context_id(
|
_states_query_for_context_id(start_day, end_day, context_id_bin),
|
||||||
start_day,
|
|
||||||
end_day,
|
|
||||||
# https://github.com/python/mypy/issues/2608
|
|
||||||
context_id_bin, # type:ignore[arg-type]
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
elif filters and filters.has_config:
|
elif filters and filters.has_config:
|
||||||
stmt = stmt.add_criteria(
|
stmt = stmt.add_criteria(
|
||||||
lambda q: q.filter(filters.events_entity_filter()).union_all( # type: ignore[union-attr]
|
lambda q: q.filter(filters.events_entity_filter()).union_all(
|
||||||
_states_query_for_all(start_day, end_day).where(
|
_states_query_for_all(start_day, end_day).where(
|
||||||
filters.states_metadata_entity_filter() # type: ignore[union-attr]
|
filters.states_metadata_entity_filter()
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
track_on=[filters],
|
track_on=[filters],
|
||||||
|
@ -124,7 +124,7 @@ class LTEData:
|
|||||||
"""Shared state."""
|
"""Shared state."""
|
||||||
|
|
||||||
websession = attr.ib()
|
websession = attr.ib()
|
||||||
modem_data = attr.ib(init=False, factory=dict)
|
modem_data: dict[str, ModemData] = attr.ib(init=False, factory=dict)
|
||||||
|
|
||||||
def get_modem_data(self, config):
|
def get_modem_data(self, config):
|
||||||
"""Get modem_data for the host in config."""
|
"""Get modem_data for the host in config."""
|
||||||
|
@ -192,7 +192,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
if isinstance(result, BaseException):
|
if isinstance(result, BaseException):
|
||||||
raise result from None
|
raise result from None
|
||||||
|
|
||||||
data.update_data_from_response(result)
|
data.update_data_from_response(result) # type: ignore[arg-type]
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
@ -4,6 +4,7 @@ from __future__ import annotations
|
|||||||
import asyncio
|
import asyncio
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
from typing import cast
|
||||||
|
|
||||||
from aiohttp import ClientError
|
from aiohttp import ClientError
|
||||||
from pyoverkiz.client import OverkizClient
|
from pyoverkiz.client import OverkizClient
|
||||||
@ -15,7 +16,7 @@ from pyoverkiz.exceptions import (
|
|||||||
NotSuchTokenException,
|
NotSuchTokenException,
|
||||||
TooManyRequestsException,
|
TooManyRequestsException,
|
||||||
)
|
)
|
||||||
from pyoverkiz.models import Device, Scenario
|
from pyoverkiz.models import Device, Scenario, Setup
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform
|
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform
|
||||||
@ -77,6 +78,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
except MaintenanceException as exception:
|
except MaintenanceException as exception:
|
||||||
raise ConfigEntryNotReady("Server is down for maintenance") from exception
|
raise ConfigEntryNotReady("Server is down for maintenance") from exception
|
||||||
|
|
||||||
|
setup = cast(Setup, setup)
|
||||||
|
scenarios = cast(list[Scenario], scenarios)
|
||||||
|
|
||||||
coordinator = OverkizDataUpdateCoordinator(
|
coordinator = OverkizDataUpdateCoordinator(
|
||||||
hass,
|
hass,
|
||||||
LOGGER,
|
LOGGER,
|
||||||
|
@ -43,7 +43,7 @@ class OverkizDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Device]]):
|
|||||||
name: str,
|
name: str,
|
||||||
client: OverkizClient,
|
client: OverkizClient,
|
||||||
devices: list[Device],
|
devices: list[Device],
|
||||||
places: Place,
|
places: Place | None,
|
||||||
update_interval: timedelta | None = None,
|
update_interval: timedelta | None = None,
|
||||||
config_entry_id: str,
|
config_entry_id: str,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -1088,10 +1088,7 @@ def _generate_statistics_during_period_stmt(
|
|||||||
end_time_ts = end_time.timestamp()
|
end_time_ts = end_time.timestamp()
|
||||||
stmt += lambda q: q.filter(table.start_ts < end_time_ts)
|
stmt += lambda q: q.filter(table.start_ts < end_time_ts)
|
||||||
if metadata_ids:
|
if metadata_ids:
|
||||||
stmt += lambda q: q.filter(
|
stmt += lambda q: q.filter(table.metadata_id.in_(metadata_ids))
|
||||||
# https://github.com/python/mypy/issues/2608
|
|
||||||
table.metadata_id.in_(metadata_ids) # type:ignore[arg-type]
|
|
||||||
)
|
|
||||||
stmt += lambda q: q.order_by(table.metadata_id, table.start_ts)
|
stmt += lambda q: q.order_by(table.metadata_id, table.start_ts)
|
||||||
return stmt
|
return stmt
|
||||||
|
|
||||||
|
@ -41,10 +41,7 @@ def _generate_get_metadata_stmt(
|
|||||||
"""Generate a statement to fetch metadata."""
|
"""Generate a statement to fetch metadata."""
|
||||||
stmt = lambda_stmt(lambda: select(*QUERY_STATISTIC_META))
|
stmt = lambda_stmt(lambda: select(*QUERY_STATISTIC_META))
|
||||||
if statistic_ids:
|
if statistic_ids:
|
||||||
stmt += lambda q: q.where(
|
stmt += lambda q: q.where(StatisticsMeta.statistic_id.in_(statistic_ids))
|
||||||
# https://github.com/python/mypy/issues/2608
|
|
||||||
StatisticsMeta.statistic_id.in_(statistic_ids) # type:ignore[arg-type]
|
|
||||||
)
|
|
||||||
if statistic_source is not None:
|
if statistic_source is not None:
|
||||||
stmt += lambda q: q.where(StatisticsMeta.source == statistic_source)
|
stmt += lambda q: q.where(StatisticsMeta.source == statistic_source)
|
||||||
if statistic_type == "mean":
|
if statistic_type == "mean":
|
||||||
|
@ -497,19 +497,9 @@ def compile_statistics( # noqa: C901
|
|||||||
# Make calculations
|
# Make calculations
|
||||||
stat: StatisticData = {"start": start}
|
stat: StatisticData = {"start": start}
|
||||||
if "max" in wanted_statistics[entity_id]:
|
if "max" in wanted_statistics[entity_id]:
|
||||||
stat["max"] = max(
|
stat["max"] = max(*itertools.islice(zip(*valid_float_states), 1))
|
||||||
*itertools.islice(
|
|
||||||
zip(*valid_float_states), # type: ignore[typeddict-item]
|
|
||||||
1,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
if "min" in wanted_statistics[entity_id]:
|
if "min" in wanted_statistics[entity_id]:
|
||||||
stat["min"] = min(
|
stat["min"] = min(*itertools.islice(zip(*valid_float_states), 1))
|
||||||
*itertools.islice(
|
|
||||||
zip(*valid_float_states), # type: ignore[typeddict-item]
|
|
||||||
1,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if "mean" in wanted_statistics[entity_id]:
|
if "mean" in wanted_statistics[entity_id]:
|
||||||
stat["mean"] = _time_weighted_average(valid_float_states, start, end)
|
stat["mean"] = _time_weighted_average(valid_float_states, start, end)
|
||||||
|
@ -70,7 +70,7 @@ class LTEData:
|
|||||||
"""Shared state."""
|
"""Shared state."""
|
||||||
|
|
||||||
websession = attr.ib()
|
websession = attr.ib()
|
||||||
modem_data = attr.ib(init=False, factory=dict)
|
modem_data: dict[str, ModemData] = attr.ib(init=False, factory=dict)
|
||||||
|
|
||||||
def get_modem_data(self, config):
|
def get_modem_data(self, config):
|
||||||
"""Get the requested or the only modem_data value."""
|
"""Get the requested or the only modem_data value."""
|
||||||
|
@ -823,15 +823,8 @@ class DeviceRegistry:
|
|||||||
for device in data["deleted_devices"]:
|
for device in data["deleted_devices"]:
|
||||||
deleted_devices[device["id"]] = DeletedDeviceEntry(
|
deleted_devices[device["id"]] = DeletedDeviceEntry(
|
||||||
config_entries=set(device["config_entries"]),
|
config_entries=set(device["config_entries"]),
|
||||||
# type ignores (if tuple arg was cast): likely https://github.com/python/mypy/issues/8625
|
connections={tuple(conn) for conn in device["connections"]},
|
||||||
connections={
|
identifiers={tuple(iden) for iden in device["identifiers"]},
|
||||||
tuple(conn) # type: ignore[misc]
|
|
||||||
for conn in device["connections"]
|
|
||||||
},
|
|
||||||
identifiers={
|
|
||||||
tuple(iden) # type: ignore[misc]
|
|
||||||
for iden in device["identifiers"]
|
|
||||||
},
|
|
||||||
id=device["id"],
|
id=device["id"],
|
||||||
orphaned_timestamp=device["orphaned_timestamp"],
|
orphaned_timestamp=device["orphaned_timestamp"],
|
||||||
)
|
)
|
||||||
|
@ -2567,7 +2567,7 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment):
|
|||||||
self.globals["expand"] = hassfunction(expand)
|
self.globals["expand"] = hassfunction(expand)
|
||||||
self.filters["expand"] = self.globals["expand"]
|
self.filters["expand"] = self.globals["expand"]
|
||||||
self.globals["closest"] = hassfunction(closest)
|
self.globals["closest"] = hassfunction(closest)
|
||||||
self.filters["closest"] = hassfunction(closest_filter) # type: ignore[arg-type]
|
self.filters["closest"] = hassfunction(closest_filter)
|
||||||
self.globals["distance"] = hassfunction(distance)
|
self.globals["distance"] = hassfunction(distance)
|
||||||
self.globals["is_hidden_entity"] = hassfunction(is_hidden_entity)
|
self.globals["is_hidden_entity"] = hassfunction(is_hidden_entity)
|
||||||
self.tests["is_hidden_entity"] = hassfunction(
|
self.tests["is_hidden_entity"] = hassfunction(
|
||||||
@ -2608,7 +2608,7 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment):
|
|||||||
return super().is_safe_attribute(obj, attr, value)
|
return super().is_safe_attribute(obj, attr, value)
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
def compile( # type: ignore[misc]
|
def compile( # type: ignore[overload-overlap]
|
||||||
self,
|
self,
|
||||||
source: str | jinja2.nodes.Template,
|
source: str | jinja2.nodes.Template,
|
||||||
name: str | None = None,
|
name: str | None = None,
|
||||||
|
@ -341,7 +341,7 @@ async def async_initialize_triggers(
|
|||||||
elif isinstance(result, BaseException):
|
elif isinstance(result, BaseException):
|
||||||
raise result from None
|
raise result from None
|
||||||
elif result is None:
|
elif result is None:
|
||||||
log_cb(
|
log_cb( # type: ignore[unreachable]
|
||||||
logging.ERROR, "Unknown error while setting up trigger (empty result)"
|
logging.ERROR, "Unknown error while setting up trigger (empty result)"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
@ -11,7 +11,7 @@ astroid==3.0.1
|
|||||||
coverage==7.3.2
|
coverage==7.3.2
|
||||||
freezegun==1.2.2
|
freezegun==1.2.2
|
||||||
mock-open==1.4.0
|
mock-open==1.4.0
|
||||||
mypy==1.6.1
|
mypy==1.7.0
|
||||||
pre-commit==3.5.0
|
pre-commit==3.5.0
|
||||||
pydantic==1.10.12
|
pydantic==1.10.12
|
||||||
pylint==3.0.2
|
pylint==3.0.2
|
||||||
|
Loading…
x
Reference in New Issue
Block a user