Combine short strings in components (#135265)

This commit is contained in:
epenet 2025-01-10 11:07:51 +01:00 committed by GitHub
parent 024b9ae414
commit aa741a9207
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 7 additions and 23 deletions

View File

@ -220,8 +220,7 @@ def async_subscribe_internal(
mqtt_data = hass.data[DATA_MQTT] mqtt_data = hass.data[DATA_MQTT]
except KeyError as exc: except KeyError as exc:
raise HomeAssistantError( raise HomeAssistantError(
f"Cannot subscribe to topic '{topic}', " f"Cannot subscribe to topic '{topic}', make sure MQTT is set up correctly",
"make sure MQTT is set up correctly",
translation_key="mqtt_not_setup_cannot_subscribe", translation_key="mqtt_not_setup_cannot_subscribe",
translation_domain=DOMAIN, translation_domain=DOMAIN,
translation_placeholders={"topic": topic}, translation_placeholders={"topic": topic},

View File

@ -151,7 +151,7 @@ class MqttEvent(MqttEntity, EventEntity):
) )
except KeyError: except KeyError:
_LOGGER.warning( _LOGGER.warning(
("`event_type` missing in JSON event payload, " " '%s' on topic %s"), "`event_type` missing in JSON event payload, '%s' on topic %s",
payload, payload,
msg.topic, msg.topic,
) )

View File

@ -217,10 +217,7 @@ class MqttSiren(MqttEntity, SirenEntity):
try: try:
json_payload = json_loads_object(payload) json_payload = json_loads_object(payload)
_LOGGER.debug( _LOGGER.debug(
( "JSON payload detected after processing payload '%s' on topic %s",
"JSON payload detected after processing payload '%s' on"
" topic %s"
),
json_payload, json_payload,
msg.topic, msg.topic,
) )

View File

@ -151,10 +151,7 @@ class MqttUpdate(MqttEntity, UpdateEntity, RestoreEntity):
rendered_json_payload = json_loads(payload) rendered_json_payload = json_loads(payload)
if isinstance(rendered_json_payload, dict): if isinstance(rendered_json_payload, dict):
_LOGGER.debug( _LOGGER.debug(
( "JSON payload detected after processing payload '%s' on topic %s",
"JSON payload detected after processing payload '%s' on"
" topic %s"
),
rendered_json_payload, rendered_json_payload,
msg.topic, msg.topic,
) )

View File

@ -2073,10 +2073,7 @@ def _wipe_old_string_time_columns(
session.execute(text("UPDATE events set time_fired=NULL LIMIT 100000;")) session.execute(text("UPDATE events set time_fired=NULL LIMIT 100000;"))
session.commit() session.commit()
session.execute( session.execute(
text( text("UPDATE states set last_updated=NULL, last_changed=NULL LIMIT 100000;")
"UPDATE states set last_updated=NULL, last_changed=NULL "
" LIMIT 100000;"
)
) )
session.commit() session.commit()
elif engine.dialect.name == SupportedDialect.POSTGRESQL: elif engine.dialect.name == SupportedDialect.POSTGRESQL:

View File

@ -927,10 +927,7 @@ def filter_unique_constraint_integrity_error(
if ignore: if ignore:
_LOGGER.warning( _LOGGER.warning(
( "Blocked attempt to insert duplicated %s rows, please report at %s",
"Blocked attempt to insert duplicated %s rows, please report"
" at %s"
),
row_type, row_type,
"https://github.com/home-assistant/core/issues?q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+recorder%22", "https://github.com/home-assistant/core/issues?q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+recorder%22",
exc_info=err, exc_info=err,

View File

@ -488,10 +488,7 @@ class ZWaveServices:
) )
if nodes_without_endpoints and _LOGGER.isEnabledFor(logging.WARNING): if nodes_without_endpoints and _LOGGER.isEnabledFor(logging.WARNING):
_LOGGER.warning( _LOGGER.warning(
( "The following nodes do not have endpoint %x and will be skipped: %s",
"The following nodes do not have endpoint %x and will be "
"skipped: %s"
),
endpoint, endpoint,
nodes_without_endpoints, nodes_without_endpoints,
) )