Use percent formatting in logging per guidelines (#135550)

This commit is contained in:
Ville Skyttä 2025-01-14 09:15:38 -01:00 committed by GitHub
parent 58df5f2394
commit 1426c421f3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 8 additions and 8 deletions

View File

@ -70,7 +70,7 @@ class PulseHub:
async def async_notify_update(self, update_type: aiopulse.UpdateType) -> None: async def async_notify_update(self, update_type: aiopulse.UpdateType) -> None:
"""Evaluate entities when hub reports that update has occurred.""" """Evaluate entities when hub reports that update has occurred."""
LOGGER.debug("Hub {update_type.name} updated") LOGGER.debug("Hub %s updated", update_type.name)
if update_type == aiopulse.UpdateType.rollers: if update_type == aiopulse.UpdateType.rollers:
await update_devices(self.hass, self.config_entry, self.api.rollers) await update_devices(self.hass, self.config_entry, self.api.rollers)

View File

@ -26,7 +26,7 @@ def _migrate_unique_id(
for entity in entry_entities: for entity in entry_entities:
if entity.unique_id.split("-")[0] == entry.entry_id: if entity.unique_id.split("-")[0] == entry.entry_id:
feed_id = entity.unique_id.split("-")[-1] feed_id = entity.unique_id.split("-")[-1]
LOGGER.debug(f"moving feed {feed_id} to hardware uuid") LOGGER.debug("moving feed %s to hardware uuid", feed_id)
ent_reg.async_update_entity( ent_reg.async_update_entity(
entity.entity_id, new_unique_id=f"{emoncms_unique_id}-{feed_id}" entity.entity_id, new_unique_id=f"{emoncms_unique_id}-{feed_id}"
) )

View File

@ -149,7 +149,7 @@ async def _async_wait_for_initialization(
while ( while (
fireplace.data.ipv4_address == "127.0.0.1" and fireplace.data.serial == "unset" fireplace.data.ipv4_address == "127.0.0.1" and fireplace.data.serial == "unset"
): ):
LOGGER.debug(f"Waiting for fireplace to initialize [{fireplace.read_mode}]") LOGGER.debug("Waiting for fireplace to initialize [%s]", fireplace.read_mode)
await asyncio.sleep(INIT_WAIT_TIME_SECONDS) await asyncio.sleep(INIT_WAIT_TIME_SECONDS)

View File

@ -145,13 +145,13 @@ class IntelliFireConfigFlow(ConfigFlow, domain=DOMAIN):
""" """
errors: dict[str, str] = {} errors: dict[str, str] = {}
LOGGER.debug( LOGGER.debug(
f"STEP: pick_cloud_device: {user_input} - DHCP_MODE[{self._dhcp_mode}" "STEP: pick_cloud_device: %s - DHCP_MODE[%s]", user_input, self._dhcp_mode
) )
if self._dhcp_mode or user_input is not None: if self._dhcp_mode or user_input is not None:
if self._dhcp_mode: if self._dhcp_mode:
serial = self._dhcp_discovered_serial serial = self._dhcp_discovered_serial
LOGGER.debug(f"DHCP Mode detected for serial [{serial}]") LOGGER.debug("DHCP Mode detected for serial [%s]", serial)
if user_input is not None: if user_input is not None:
serial = user_input[CONF_SERIAL] serial = user_input[CONF_SERIAL]

View File

@ -115,7 +115,7 @@ class MastodonNotificationService(BaseNotificationService):
try: try:
mediadata = self.client.media_post(media_path, mime_type=media_type) mediadata = self.client.media_post(media_path, mime_type=media_type)
except MastodonAPIError: except MastodonAPIError:
LOGGER.error(f"Unable to upload image {media_path}") LOGGER.error("Unable to upload image %s", media_path)
return mediadata return mediadata

View File

@ -87,7 +87,7 @@ async def validate_login(
except LoginFailedException as err: except LoginFailedException as err:
raise InvalidAuth from err raise InvalidAuth from err
LOGGER.debug(f"Connection successful - saving session to file {SESSION_FILE}") LOGGER.debug("Connection successful - saving session to file %s", SESSION_FILE)
LOGGER.debug("Obtaining subscription id") LOGGER.debug("Obtaining subscription id")
subs: MonarchSubscription = await monarch_client.get_subscription_details() subs: MonarchSubscription = await monarch_client.get_subscription_details()
assert subs is not None assert subs is not None

View File

@ -45,7 +45,7 @@ async def validate_user_input(
raise SchemaFlowError("invalid_auth") from err raise SchemaFlowError("invalid_auth") from err
raise SchemaFlowError("cannot_connect") from err raise SchemaFlowError("cannot_connect") from err
except Fault as fault: except Fault as fault:
LOGGER.exception(f"Fault {fault.faultCode}: {fault.faultString}") LOGGER.exception("Fault %s: %s", fault.faultCode, fault.faultString)
raise SchemaFlowError("unknown") from fault raise SchemaFlowError("unknown") from fault
except ClientConnectionError as err: except ClientConnectionError as err:
raise SchemaFlowError("cannot_connect") from err raise SchemaFlowError("cannot_connect") from err