mirror of
https://github.com/home-assistant/core.git
synced 2025-07-22 20:57:21 +00:00
Add type ignore error codes [last ones] (#66816)
This commit is contained in:
parent
fcf774ecfc
commit
30e2411761
@ -8,11 +8,11 @@ from .const import DOMAIN
|
||||
|
||||
|
||||
@callback
|
||||
def async_describe_events(hass: HomeAssistant, async_describe_event): # type: ignore
|
||||
def async_describe_events(hass: HomeAssistant, async_describe_event): # type: ignore[no-untyped-def]
|
||||
"""Describe logbook events."""
|
||||
|
||||
@callback
|
||||
def async_describe_logbook_event(event: LazyEventPartialState): # type: ignore
|
||||
def async_describe_logbook_event(event: LazyEventPartialState): # type: ignore[no-untyped-def]
|
||||
"""Describe a logbook event."""
|
||||
data = event.data
|
||||
message = "has been triggered"
|
||||
|
@ -397,7 +397,10 @@ class SensorEntity(Entity):
|
||||
# Received a date value
|
||||
if value is not None and device_class == DEVICE_CLASS_DATE:
|
||||
try:
|
||||
return value.isoformat() # type: ignore
|
||||
# We cast the value, to avoid using isinstance, but satisfy
|
||||
# typechecking. The errors are guarded in this try.
|
||||
value = cast(date, value)
|
||||
return value.isoformat()
|
||||
except (AttributeError, TypeError) as err:
|
||||
raise ValueError(
|
||||
f"Invalid date: {self.entity_id} has a date device class "
|
||||
@ -434,7 +437,7 @@ class SensorEntity(Entity):
|
||||
prec = len(value_s) - value_s.index(".") - 1 if "." in value_s else 0
|
||||
# Suppress ValueError (Could not convert sensor_value to float)
|
||||
with suppress(ValueError):
|
||||
temp = units.temperature(float(value), unit_of_measurement) # type: ignore
|
||||
temp = units.temperature(float(value), unit_of_measurement) # type: ignore[arg-type]
|
||||
value = round(temp) if prec == 0 else round(temp, prec)
|
||||
|
||||
return value
|
||||
|
@ -330,7 +330,7 @@ def sync_entity_lifecycle(
|
||||
create_entity: Callable[[dict], Entity],
|
||||
) -> None:
|
||||
"""Map a collection to an entity component."""
|
||||
entities = {}
|
||||
entities: dict[str, Entity] = {}
|
||||
ent_reg = entity_registry.async_get(hass)
|
||||
|
||||
async def _add_entity(change_set: CollectionChangeSet) -> Entity:
|
||||
@ -348,7 +348,7 @@ def sync_entity_lifecycle(
|
||||
entities.pop(change_set.item_id)
|
||||
|
||||
async def _update_entity(change_set: CollectionChangeSet) -> None:
|
||||
await entities[change_set.item_id].async_update_config(change_set.item) # type: ignore
|
||||
await entities[change_set.item_id].async_update_config(change_set.item) # type: ignore[attr-defined]
|
||||
|
||||
_func_map: dict[
|
||||
str, Callable[[CollectionChangeSet], Coroutine[Any, Any, Entity | None]]
|
||||
|
@ -707,10 +707,11 @@ class Entity(ABC):
|
||||
await self.parallel_updates.acquire()
|
||||
|
||||
try:
|
||||
task: asyncio.Future[None]
|
||||
if hasattr(self, "async_update"):
|
||||
task = self.hass.async_create_task(self.async_update()) # type: ignore
|
||||
task = self.hass.async_create_task(self.async_update()) # type: ignore[attr-defined]
|
||||
elif hasattr(self, "update"):
|
||||
task = self.hass.async_add_executor_job(self.update) # type: ignore
|
||||
task = self.hass.async_add_executor_job(self.update) # type: ignore[attr-defined]
|
||||
else:
|
||||
return
|
||||
|
||||
|
@ -172,7 +172,7 @@ class EntityPlatform:
|
||||
def async_create_setup_task() -> Coroutine:
|
||||
"""Get task to set up platform."""
|
||||
if getattr(platform, "async_setup_platform", None):
|
||||
return platform.async_setup_platform( # type: ignore
|
||||
return platform.async_setup_platform( # type: ignore[no-any-return,union-attr]
|
||||
hass,
|
||||
platform_config,
|
||||
self._async_schedule_add_entities,
|
||||
@ -183,7 +183,7 @@ class EntityPlatform:
|
||||
# we don't want to track this task in case it blocks startup.
|
||||
return hass.loop.run_in_executor( # type: ignore[return-value]
|
||||
None,
|
||||
platform.setup_platform, # type: ignore
|
||||
platform.setup_platform, # type: ignore[union-attr]
|
||||
hass,
|
||||
platform_config,
|
||||
self._schedule_add_entities,
|
||||
|
@ -1328,8 +1328,8 @@ def async_track_time_interval(
|
||||
interval: timedelta,
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Add a listener that fires repetitively at every timedelta interval."""
|
||||
remove = None
|
||||
interval_listener_job = None
|
||||
remove: CALLBACK_TYPE
|
||||
interval_listener_job: HassJob[None]
|
||||
|
||||
job = HassJob(action)
|
||||
|
||||
@ -1344,7 +1344,7 @@ def async_track_time_interval(
|
||||
nonlocal interval_listener_job
|
||||
|
||||
remove = async_track_point_in_utc_time(
|
||||
hass, interval_listener_job, next_interval() # type: ignore
|
||||
hass, interval_listener_job, next_interval()
|
||||
)
|
||||
hass.async_run_hass_job(job, now)
|
||||
|
||||
@ -1353,7 +1353,7 @@ def async_track_time_interval(
|
||||
|
||||
def remove_listener() -> None:
|
||||
"""Remove interval listener."""
|
||||
remove() # type: ignore
|
||||
remove()
|
||||
|
||||
return remove_listener
|
||||
|
||||
|
@ -102,12 +102,12 @@ async def async_reproduce_state(
|
||||
return
|
||||
|
||||
try:
|
||||
platform: ModuleType | None = integration.get_platform("reproduce_state")
|
||||
platform: ModuleType = integration.get_platform("reproduce_state")
|
||||
except ImportError:
|
||||
_LOGGER.warning("Integration %s does not support reproduce state", domain)
|
||||
return
|
||||
|
||||
await platform.async_reproduce_states( # type: ignore
|
||||
await platform.async_reproduce_states(
|
||||
hass, states_by_domain, context=context, reproduce_options=reproduce_options
|
||||
)
|
||||
|
||||
|
@ -116,7 +116,7 @@ def get_astral_event_date(
|
||||
kwargs["observer_elevation"] = elevation
|
||||
|
||||
try:
|
||||
return getattr(location, event)(date, **kwargs) # type: ignore
|
||||
return getattr(location, event)(date, **kwargs) # type: ignore[no-any-return]
|
||||
except ValueError:
|
||||
# Event never occurs for specified date.
|
||||
return None
|
||||
|
@ -83,7 +83,7 @@ async def async_initialize_triggers(
|
||||
triggers.append(platform.async_attach_trigger(hass, conf, action, info))
|
||||
|
||||
attach_results = await asyncio.gather(*triggers, return_exceptions=True)
|
||||
removes = []
|
||||
removes: list[Callable[[], None]] = []
|
||||
|
||||
for result in attach_results:
|
||||
if isinstance(result, HomeAssistantError):
|
||||
@ -103,7 +103,7 @@ async def async_initialize_triggers(
|
||||
log_cb(logging.INFO, "Initialized trigger")
|
||||
|
||||
@callback
|
||||
def remove_triggers(): # type: ignore
|
||||
def remove_triggers() -> None:
|
||||
"""Remove triggers."""
|
||||
for remove in removes:
|
||||
remove()
|
||||
|
@ -66,10 +66,10 @@ async def async_setup_component(
|
||||
if domain in hass.config.components:
|
||||
return True
|
||||
|
||||
setup_tasks = hass.data.setdefault(DATA_SETUP, {})
|
||||
setup_tasks: dict[str, asyncio.Task[bool]] = hass.data.setdefault(DATA_SETUP, {})
|
||||
|
||||
if domain in setup_tasks:
|
||||
return await setup_tasks[domain] # type: ignore
|
||||
return await setup_tasks[domain]
|
||||
|
||||
task = setup_tasks[domain] = hass.async_create_task(
|
||||
_async_setup_component(hass, domain, config)
|
||||
|
@ -69,11 +69,11 @@ def async_activate_log_queue_handler(hass: HomeAssistant) -> None:
|
||||
This allows us to avoid blocking I/O and formatting messages
|
||||
in the event loop as log messages are written in another thread.
|
||||
"""
|
||||
simple_queue = queue.SimpleQueue() # type: ignore
|
||||
simple_queue: queue.SimpleQueue[logging.Handler] = queue.SimpleQueue()
|
||||
queue_handler = HomeAssistantQueueHandler(simple_queue)
|
||||
logging.root.addHandler(queue_handler)
|
||||
|
||||
migrated_handlers = []
|
||||
migrated_handlers: list[logging.Handler] = []
|
||||
for handler in logging.root.handlers[:]:
|
||||
if handler is queue_handler:
|
||||
continue
|
||||
|
Loading…
x
Reference in New Issue
Block a user