String formatting and max line length - Part 4 (#84445)

Co-authored-by: jjlawren <jjlawren@users.noreply.github.com>
This commit is contained in:
Franck Nijhof 2022-12-23 13:27:27 +01:00 committed by GitHub
parent a8f09b4063
commit 94755a5773
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
44 changed files with 267 additions and 136 deletions

View File

@ -78,9 +78,10 @@ async def async_setup(hass: HomeAssistant, yaml_config: ConfigType) -> bool:
if not yaml_config[DOMAIN]:
return True
_LOGGER.warning(
"Loading Azure Event Hub completely via yaml config is deprecated; Only the \
Filter can be set in yaml, the rest is done through a config flow and has \
been imported, all other keys but filter can be deleted from configuration.yaml"
"Loading Azure Event Hub completely via yaml config is deprecated; Only the"
" Filter can be set in yaml, the rest is done through a config flow and has"
" been imported, all other keys but filter can be deleted from"
" configuration.yaml"
)
hass.async_create_task(
hass.config_entries.flow.async_init(

View File

@ -146,9 +146,13 @@ class CrownstoneEntryManager:
# Show notification to ensure the user knows the cloud is now used
persistent_notification.async_create(
self.hass,
f"Setup of Crownstone USB dongle was unsuccessful on port {serial_port}.\n \
Crownstone Cloud will be used to switch Crownstones.\n \
Please check if your port is correct and set up the USB again from integration options.",
(
"Setup of Crownstone USB dongle was unsuccessful on port"
f" {serial_port}.\n Crownstone Cloud will be used"
" to switch Crownstones.\n Please check if your"
" port is correct and set up the USB again from integration"
" options."
),
"Crownstone",
"crownstone_usb_dongle_setup",
)

View File

@ -296,8 +296,7 @@ class NMBSSensor(SensorEntity):
if self._excl_vias and self.is_via_connection:
_LOGGER.debug(
"Skipping update of NMBSSensor \
because this connection is a via"
"Skipping update of NMBSSensor because this connection is a via"
)
return

View File

@ -88,7 +88,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
if coordinator.is_stateless:
LOGGER.debug(
"All devices have an assumed state. Update interval has been reduced to: %s",
(
"All devices have an assumed state. Update interval has been reduced"
" to: %s"
),
UPDATE_INTERVAL_ALL_ASSUMED_STATE,
)
coordinator.update_interval = UPDATE_INTERVAL_ALL_ASSUMED_STATE
@ -102,7 +105,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
# Map Overkiz entities to Home Assistant platform
for device in coordinator.data.values():
LOGGER.debug(
"The following device has been retrieved. Report an issue if not supported correctly (%s)",
(
"The following device has been retrieved. Report an issue if not"
" supported correctly (%s)"
),
device,
)

View File

@ -187,7 +187,9 @@ ALARM_DESCRIPTIONS: list[OverkizAlarmDescription] = [
alarm_arm_night=OverkizCommand.ALARM_ZONE_ON,
alarm_arm_night_args=f"{OverkizCommandParam.A}, {OverkizCommandParam.B}",
alarm_arm_away=OverkizCommand.ALARM_ZONE_ON,
alarm_arm_away_args=f"{OverkizCommandParam.A},{OverkizCommandParam.B},{OverkizCommandParam.C}",
alarm_arm_away_args=(
f"{OverkizCommandParam.A},{OverkizCommandParam.B},{OverkizCommandParam.C}"
),
),
# MyFoxAlarmController
OverkizAlarmDescription(

View File

@ -33,7 +33,10 @@ class OwnTracksFlow(config_entries.ConfigFlow, domain=DOMAIN):
secret = secrets.token_hex(16)
if supports_encryption():
secret_desc = f"The encryption key is {secret} (on Android under preferences -> advanced)"
secret_desc = (
f"The encryption key is {secret} (on Android under preferences ->"
" advanced)"
)
else:
secret_desc = "Encryption is not supported because nacl is not installed."

View File

@ -138,7 +138,10 @@ def _decrypt_payload(secret, topic, ciphertext):
return message
except ValueError:
_LOGGER.warning(
"Ignoring encrypted payload because unable to decrypt using key for topic %s",
(
"Ignoring encrypted payload because unable to decrypt using key for"
" topic %s"
),
topic,
)
return None

View File

@ -99,7 +99,8 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
unique_id = config_entry.unique_id
if device_info is None:
_LOGGER.error(
"Couldn't gather device info; Please restart Home Assistant with your TV turned on and connected to your network"
"Couldn't gather device info; Please restart Home Assistant with your"
" TV turned on and connected to your network"
)
else:
unique_id = device_info[ATTR_UDN]

View File

@ -148,8 +148,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
if entity_id not in notifications:
_LOGGER.error(
"Marking persistent_notification read failed: "
"Notification ID %s not found",
(
"Marking persistent_notification read failed: "
"Notification ID %s not found"
),
notification_id,
)
return

View File

@ -303,7 +303,8 @@ async def filter_yaml_data(hass: HomeAssistant, persons: list[dict]) -> list[dic
person_conf[CONF_ID],
)
person_invalid_user.append(
f"- Person {person_conf[CONF_NAME]} (id: {person_conf[CONF_ID]}) points at invalid user {user_id}"
f"- Person {person_conf[CONF_NAME]} (id: {person_conf[CONF_ID]}) points"
f" at invalid user {user_id}"
)
continue

View File

@ -32,7 +32,8 @@ def _can_use_icmp_lib_with_privilege() -> None | bool:
icmp_ping("127.0.0.1", count=0, timeout=0, privileged=False)
except SocketPermissionError:
_LOGGER.debug(
"Cannot use icmplib because privileges are insufficient to create the socket"
"Cannot use icmplib because privileges are insufficient to create the"
" socket"
)
return None
else:

View File

@ -142,7 +142,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
raise ConfigEntryNotReady from error
except plexapi.exceptions.Unauthorized as ex:
raise ConfigEntryAuthFailed(
f"Token not accepted, please reauthenticate Plex server '{entry.data[CONF_SERVER]}'"
"Token not accepted, please reauthenticate Plex server"
f" '{entry.data[CONF_SERVER]}'"
) from ex
except (
plexapi.exceptions.BadRequest,

View File

@ -89,7 +89,8 @@ def search_media(
if len(exact_matches) == 1:
return exact_matches[0]
raise MediaNotFound(
f"Multiple matches, make content_id more specific or use `allow_multiple`: {results}"
"Multiple matches, make content_id more specific or use `allow_multiple`:"
f" {results}"
)
return results[0]

View File

@ -77,7 +77,10 @@ class PlexSession:
elif media.librarySectionID and media.librarySectionID < 1:
self.media_library_title = UNKNOWN_SECTION
_LOGGER.warning(
"Unknown library section ID (%s) for title '%s', please create an issue",
(
"Unknown library section ID (%s) for title '%s',"
" please create an issue"
),
media.librarySectionID,
media.title,
)
@ -92,7 +95,11 @@ class PlexSession:
self.media_series_title = media.grandparentTitle
if media.index is not None:
self.media_episode = media.index
self.sensor_title = f"{self.media_series_title} - {media.seasonEpisode} - {self.media_title}"
self.sensor_title = (
f"{self.media_series_title} -"
f" {media.seasonEpisode} -"
f" {self.media_title}"
)
elif media.type == "movie":
self.media_content_type = MediaType.MOVIE
if media.year is not None and media.title is not None:

View File

@ -198,7 +198,8 @@ class PlexServer:
config_entry_update_needed = True
else:
raise Unauthorized( # pylint: disable=raise-missing-from
"New certificate cannot be validated with provided token"
"New certificate cannot be validated"
" with provided token"
)
else:
raise
@ -212,7 +213,8 @@ class PlexServer:
shared_users = self.account.users() if self.account else []
except Unauthorized:
_LOGGER.warning(
"Plex account has limited permissions, shared account filtering will not be available"
"Plex account has limited permissions,"
" shared account filtering will not be available"
)
else:
self._accounts = []

View File

@ -36,8 +36,9 @@ async def async_setup_services(hass):
async def async_scan_clients_service(_: ServiceCall) -> None:
_LOGGER.warning(
"This service is deprecated in favor of the scan_clients button entity. "
"Service calls will still work for now but the service will be removed in a future release"
"This service is deprecated in favor of the scan_clients button entity."
" Service calls will still work for now but the service will be removed in"
" a future release"
)
for server_id in hass.data[DOMAIN][SERVERS]:
async_dispatcher_send(hass, PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id))
@ -103,7 +104,8 @@ def get_plex_server(hass, plex_server_name=None, plex_server_id=None):
friendly_names = [x.friendly_name for x in plex_servers]
raise HomeAssistantError(
f"Multiple Plex servers configured, choose with 'plex_server' key: {friendly_names}"
"Multiple Plex servers configured, choose with 'plex_server' key:"
f" {friendly_names}"
)

View File

@ -157,7 +157,9 @@ class PlugwiseConfigFlow(ConfigFlow, domain=DOMAIN):
CONF_PORT: discovery_info.port,
CONF_USERNAME: self._username,
},
"configuration_url": f"http://{discovery_info.host}:{discovery_info.port}",
"configuration_url": (
f"http://{discovery_info.host}:{discovery_info.port}"
),
"product": _product,
}
)

View File

@ -80,7 +80,8 @@ class PlugwiseDataUpdateCoordinator(DataUpdateCoordinator[PlugwiseData]):
raise ConfigEntryError("Invalid username or Smile ID") from err
except (InvalidXMLError, ResponseError) as err:
raise UpdateFailed(
"Invalid XML data, or error indication received for the Plugwise Adam/Smile/Stretch"
"Invalid XML data, or error indication received for the Plugwise"
" Adam/Smile/Stretch"
) from err
except UnsupportedDeviceError as err:
raise ConfigEntryError("Device with unsupported firmware") from err

View File

@ -68,7 +68,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
persistent_notification.async_create(
hass,
"Object growth logging has started. See [the logs](/config/logs) to track the growth of new objects.",
(
"Object growth logging has started. See [the logs](/config/logs) to"
" track the growth of new objects."
),
title="Object growth logging started",
notification_id="profile_object_logging",
)
@ -111,7 +114,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
persistent_notification.create(
hass,
f"Objects with type {obj_type} have been dumped to the log. See [the logs](/config/logs) to review the repr of the objects.",
(
f"Objects with type {obj_type} have been dumped to the log. See [the"
" logs](/config/logs) to review the repr of the objects."
),
title="Object dump completed",
notification_id="profile_object_dump",
)
@ -231,7 +237,10 @@ async def _async_generate_profile(hass: HomeAssistant, call: ServiceCall):
start_time = int(time.time() * 1000000)
persistent_notification.async_create(
hass,
"The profile has started. This notification will be updated when it is complete.",
(
"The profile has started. This notification will be updated when it is"
" complete."
),
title="Profile Started",
notification_id=f"profiler_{start_time}",
)
@ -247,7 +256,10 @@ async def _async_generate_profile(hass: HomeAssistant, call: ServiceCall):
)
persistent_notification.async_create(
hass,
f"Wrote cProfile data to {cprofile_path} and callgrind data to {callgrind_path}",
(
f"Wrote cProfile data to {cprofile_path} and callgrind data to"
f" {callgrind_path}"
),
title="Profile Complete",
notification_id=f"profiler_{start_time}",
)
@ -262,7 +274,10 @@ async def _async_generate_memory_profile(hass: HomeAssistant, call: ServiceCall)
start_time = int(time.time() * 1000000)
persistent_notification.async_create(
hass,
"The memory profile has started. This notification will be updated when it is complete.",
(
"The memory profile has started. This notification will be updated when it"
" is complete."
),
title="Profile Started",
notification_id=f"memory_profiler_{start_time}",
)

View File

@ -120,8 +120,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
continue
except SSLError:
_LOGGER.error(
"Unable to verify proxmox server SSL. "
'Try using "verify_ssl: false" for proxmox instance %s:%d',
(
"Unable to verify proxmox server SSL. "
'Try using "verify_ssl: false" for proxmox instance %s:%d'
),
host,
port,
)

View File

@ -118,8 +118,10 @@ def _resize_image(image, opts):
newimage = imgbuf.getvalue()
if not opts.force_resize and len(newimage) >= old_size:
_LOGGER.debug(
"Using original image (%d bytes) "
"because resized image (%d bytes) is not smaller",
(
"Using original image (%d bytes) "
"because resized image (%d bytes) is not smaller"
),
old_size,
len(newimage),
)

View File

@ -62,7 +62,9 @@ SENSORS: tuple[PVOutputSensorEntityDescription, ...] = (
PVOutputSensorEntityDescription(
key="normalized_output",
name="Efficiency",
native_unit_of_measurement=f"{UnitOfEnergy.KILO_WATT_HOUR}/{UnitOfPower.KILO_WATT}",
native_unit_of_measurement=(
f"{UnitOfEnergy.KILO_WATT_HOUR}/{UnitOfPower.KILO_WATT}"
),
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda status: status.normalized_output,
),

View File

@ -80,10 +80,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
try:
await async_migrate_entries(hass, entry.entry_id, update_unique_id)
_LOGGER.warning(
"Migrating PVPC sensor from old tariff '%s' to new '%s'. "
"Configure the integration to set your contracted power, "
"and select prices for Ceuta/Melilla, "
"if that is your case",
(
"Migrating PVPC sensor from old tariff '%s' to new '%s'. "
"Configure the integration to set your contracted power, "
"and select prices for Ceuta/Melilla, "
"if that is your case"
),
entry.data[ATTR_TARIFF],
_DEFAULT_TARIFF,
)
@ -95,8 +97,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
if reg_entry.config_entry_id == entry.entry_id:
ent_reg.async_remove(entity_id)
_LOGGER.warning(
"Old PVPC Sensor %s is removed "
"(another one already exists, using the same tariff)",
(
"Old PVPC Sensor %s is removed "
"(another one already exists, using the same tariff)"
),
entity_id,
)
break

View File

@ -462,7 +462,10 @@ class QNAPDriveSensor(QNAPSensor):
"""Return the name of the sensor, if any."""
server_name = self._api.data["system_stats"]["system"]["name"]
return f"{server_name} {self.entity_description.name} (Drive {self.monitor_device})"
return (
f"{server_name} {self.entity_description.name} (Drive"
f" {self.monitor_device})"
)
@property
def extra_state_attributes(self):
@ -506,5 +509,7 @@ class QNAPVolumeSensor(QNAPSensor):
total_gb = int(data["total_size"]) / 1024 / 1024 / 1024
return {
ATTR_VOLUME_SIZE: f"{round_nicely(total_gb)} {UnitOfInformation.GIBIBYTES}"
ATTR_VOLUME_SIZE: (
f"{round_nicely(total_gb)} {UnitOfInformation.GIBIBYTES}"
)
}

View File

@ -86,7 +86,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
_LOGGER.error("No Rachio devices found in account %s", person.username)
return False
_LOGGER.info(
"%d Rachio device(s) found; The url %s must be accessible from the internet in order to receive updates",
(
"%d Rachio device(s) found; The url %s must be accessible from the internet"
" in order to receive updates"
),
len(person.controllers),
webhook_url,
)

View File

@ -154,7 +154,10 @@ class RachioPerson:
if isinstance(webhooks, dict):
if webhooks.get("code") == PERMISSION_ERROR:
_LOGGER.info(
"Not adding controller '%s', only controllers owned by '%s' may be added",
(
"Not adding controller '%s', only controllers owned by '%s'"
" may be added"
),
controller[KEY_NAME],
self.username,
)

View File

@ -137,10 +137,10 @@ async def async_setup_platform(
translation_key="deprecated_yaml",
)
_LOGGER.warning(
"Configuration of the Radio Thermostat climate platform in YAML is deprecated and "
"will be removed in Home Assistant 2022.9; Your existing configuration "
"has been imported into the UI automatically and can be safely removed "
"from your configuration.yaml file"
"Configuration of the Radio Thermostat climate platform in YAML is deprecated"
" and will be removed in Home Assistant 2022.9; Your existing configuration has"
" been imported into the UI automatically and can be safely removed from your"
" configuration.yaml file"
)
hosts: list[str] = []

View File

@ -107,7 +107,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool:
_LOGGER.error("Unable to connect to Rain Cloud service: %s", str(ex))
persistent_notification.create(
hass,
f"Error: {ex}<br />" "You will need to restart hass after fixing.",
f"Error: {ex}<br />You will need to restart hass after fixing.",
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID,
)

View File

@ -42,7 +42,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
coordinator = DataUpdateCoordinator(
hass,
LOGGER,
name=f"Place {entry.data[CONF_PLACE_ID]}, Service {entry.data[CONF_SERVICE_ID]}",
name=(
f"Place {entry.data[CONF_PLACE_ID]}, Service {entry.data[CONF_SERVICE_ID]}"
),
update_interval=DEFAULT_UPDATE_INTERVAL,
update_method=async_get_pickup_events,
)

View File

@ -319,10 +319,12 @@ class Recorder(threading.Thread):
if size <= MAX_QUEUE_BACKLOG:
return
_LOGGER.error(
"The recorder backlog queue reached the maximum size of %s events; "
"usually, the system is CPU bound, I/O bound, or the database "
"is corrupt due to a disk problem; The recorder will stop "
"recording events to avoid running out of memory",
(
"The recorder backlog queue reached the maximum size of %s events; "
"usually, the system is CPU bound, I/O bound, or the database "
"is corrupt due to a disk problem; The recorder will stop "
"recording events to avoid running out of memory"
),
MAX_QUEUE_BACKLOG,
)
self._async_stop_queue_watcher_and_event_listener()
@ -635,8 +637,10 @@ class Recorder(threading.Thread):
else:
persistent_notification.create(
self.hass,
"The database migration failed, check [the logs](/config/logs)."
"Database Migration Failed",
(
"The database migration failed, check [the logs](/config/logs)."
"Database Migration Failed"
),
"recorder_database_migration",
)
self.hass.add_job(self.async_set_db_ready)
@ -722,7 +726,12 @@ class Recorder(threading.Thread):
"""Migrate schema to the latest version."""
persistent_notification.create(
self.hass,
"System performance will temporarily degrade during the database upgrade. Do not power down or restart the system until the upgrade completes. Integrations that read the database, such as logbook and history, may return inconsistent results until the upgrade completes.",
(
"System performance will temporarily degrade during the database"
" upgrade. Do not power down or restart the system until the upgrade"
" completes. Integrations that read the database, such as logbook and"
" history, may return inconsistent results until the upgrade completes."
),
"Database upgrade in progress",
"recorder_database_migration",
)

View File

@ -165,7 +165,7 @@ class Events(Base): # type: ignore[misc,valid-type]
def __repr__(self) -> str:
"""Return string representation of instance for debugging."""
return (
f"<recorder.Events("
"<recorder.Events("
f"id={self.event_id}, type='{self.event_type}', "
f"origin_idx='{self.origin_idx}', time_fired='{self.time_fired}'"
f", data_id={self.data_id})>"
@ -222,9 +222,9 @@ class EventData(Base): # type: ignore[misc,valid-type]
def __repr__(self) -> str:
"""Return string representation of instance for debugging."""
return (
f"<recorder.EventData("
"<recorder.EventData("
f"id={self.data_id}, hash='{self.hash}', data='{self.shared_data}'"
f")>"
")>"
)
@staticmethod
@ -290,12 +290,10 @@ class States(Base): # type: ignore[misc,valid-type]
def __repr__(self) -> str:
"""Return string representation of instance for debugging."""
return (
f"<recorder.States("
f"id={self.state_id}, entity_id='{self.entity_id}', "
f"state='{self.state}', event_id='{self.event_id}', "
f"last_updated='{self.last_updated.isoformat(sep=' ', timespec='seconds')}', "
f"old_state_id={self.old_state_id}, attributes_id={self.attributes_id}"
f")>"
f"<recorder.States(id={self.state_id}, entity_id='{self.entity_id}',"
f" state='{self.state}', event_id='{self.event_id}',"
f" last_updated='{self.last_updated.isoformat(sep=' ', timespec='seconds')}',"
f" old_state_id={self.old_state_id}, attributes_id={self.attributes_id})>"
)
@staticmethod
@ -374,9 +372,8 @@ class StateAttributes(Base): # type: ignore[misc,valid-type]
def __repr__(self) -> str:
"""Return string representation of instance for debugging."""
return (
f"<recorder.StateAttributes("
f"id={self.attributes_id}, hash='{self.hash}', attributes='{self.shared_attrs}'"
f")>"
f"<recorder.StateAttributes(id={self.attributes_id}, hash='{self.hash}',"
f" attributes='{self.shared_attrs}')>"
)
@staticmethod
@ -522,11 +519,10 @@ class RecorderRuns(Base): # type: ignore[misc,valid-type]
f"'{self.end.isoformat(sep=' ', timespec='seconds')}'" if self.end else None
)
return (
f"<recorder.RecorderRuns("
f"id={self.run_id}, start='{self.start.isoformat(sep=' ', timespec='seconds')}', "
f"end={end}, closed_incorrect={self.closed_incorrect}, "
f"created='{self.created.isoformat(sep=' ', timespec='seconds')}'"
f")>"
f"<recorder.RecorderRuns(id={self.run_id},"
f" start='{self.start.isoformat(sep=' ', timespec='seconds')}', end={end},"
f" closed_incorrect={self.closed_incorrect},"
f" created='{self.created.isoformat(sep=' ', timespec='seconds')}')>"
)
def entity_ids(self, point_in_time: datetime | None = None) -> list[str]:
@ -566,10 +562,10 @@ class SchemaChanges(Base): # type: ignore[misc,valid-type]
def __repr__(self) -> str:
"""Return string representation of instance for debugging."""
return (
f"<recorder.SchemaChanges("
"<recorder.SchemaChanges("
f"id={self.change_id}, schema_version={self.schema_version}, "
f"changed='{self.changed.isoformat(sep=' ', timespec='seconds')}'"
f")>"
")>"
)
@ -583,9 +579,8 @@ class StatisticsRuns(Base): # type: ignore[misc,valid-type]
def __repr__(self) -> str:
"""Return string representation of instance for debugging."""
return (
f"<recorder.StatisticsRuns("
f"id={self.run_id}, start='{self.start.isoformat(sep=' ', timespec='seconds')}', "
f")>"
f"<recorder.StatisticsRuns(id={self.run_id},"
f" start='{self.start.isoformat(sep=' ', timespec='seconds')}', )>"
)

View File

@ -92,7 +92,8 @@ class Filters:
def __repr__(self) -> str:
"""Return human readable excludes/includes."""
return (
f"<Filters excluded_entities={self.excluded_entities} excluded_domains={self.excluded_domains} "
"<Filters"
f" excluded_entities={self.excluded_entities} excluded_domains={self.excluded_domains} "
f"excluded_entity_globs={self.excluded_entity_globs} "
f"included_entities={self.included_entities} included_domains={self.included_domains} "
f"included_entity_globs={self.included_entity_globs}>"

View File

@ -182,9 +182,11 @@ def _create_index(
index = index_list[0]
_LOGGER.debug("Creating %s index", index_name)
_LOGGER.warning(
"Adding index `%s` to database. Note: this can take several "
"minutes on large databases and slow computers. Please "
"be patient!",
(
"Adding index `%s` to database. Note: this can take several "
"minutes on large databases and slow computers. Please "
"be patient!"
),
index_name,
)
with session_scope(session=session_maker()) as session:
@ -271,9 +273,11 @@ def _drop_index(
return
_LOGGER.warning(
"Failed to drop index %s from table %s. Schema "
"Migration will continue; this is not a "
"critical operation",
(
"Failed to drop index %s from table %s. Schema "
"Migration will continue; this is not a "
"critical operation"
),
index_name,
table_name,
)
@ -284,9 +288,11 @@ def _add_columns(
) -> None:
"""Add columns to a table."""
_LOGGER.warning(
"Adding columns %s to table %s. Note: this can take several "
"minutes on large databases and slow computers. Please "
"be patient!",
(
"Adding columns %s to table %s. Note: this can take several "
"minutes on large databases and slow computers. Please "
"be patient!"
),
", ".join(column.split(" ")[0] for column in columns_def),
table_name,
)
@ -338,18 +344,22 @@ def _modify_columns(
"""Modify columns in a table."""
if engine.dialect.name == SupportedDialect.SQLITE:
_LOGGER.debug(
"Skipping to modify columns %s in table %s; "
"Modifying column length in SQLite is unnecessary, "
"it does not impose any length restrictions",
(
"Skipping to modify columns %s in table %s; "
"Modifying column length in SQLite is unnecessary, "
"it does not impose any length restrictions"
),
", ".join(column.split(" ")[0] for column in columns_def),
table_name,
)
return
_LOGGER.warning(
"Modifying columns %s in table %s. Note: this can take several "
"minutes on large databases and slow computers. Please "
"be patient!",
(
"Modifying columns %s in table %s. Note: this can take several "
"minutes on large databases and slow computers. Please "
"be patient!"
),
", ".join(column.split(" ")[0] for column in columns_def),
table_name,
)
@ -636,9 +646,11 @@ def _apply_update( # noqa: C901
if engine.dialect.name == SupportedDialect.MYSQL:
for table in ("events", "states", "statistics_meta"):
_LOGGER.warning(
"Updating character set and collation of table %s to utf8mb4. "
"Note: this can take several minutes on large databases and slow "
"computers. Please be patient!",
(
"Updating character set and collation of table %s to utf8mb4."
" Note: this can take several minutes on large databases and"
" slow computers. Please be patient!"
),
table,
)
with contextlib.suppress(SQLAlchemyError):

View File

@ -89,14 +89,16 @@ def purge_old_data(
has_more_to_purge = False
if _purging_legacy_format(session):
_LOGGER.debug(
"Purge running in legacy format as there are states with event_id remaining"
"Purge running in legacy format as there are states with event_id"
" remaining"
)
has_more_to_purge |= _purge_legacy_format(
instance, session, purge_before, using_sqlite
)
else:
_LOGGER.debug(
"Purge running in new format as there are NO states with event_id remaining"
"Purge running in new format as there are NO states with event_id"
" remaining"
)
# Once we are done purging legacy rows, we use the new method
has_more_to_purge |= _purge_states_and_attributes_ids(

View File

@ -508,8 +508,10 @@ def delete_statistics_duplicates(hass: HomeAssistant, session: Session) -> None:
cls=JSONEncoder,
)
_LOGGER.warning(
"Deleted %s non identical duplicated %s rows, a backup of the deleted rows "
"has been saved to %s",
(
"Deleted %s non identical duplicated %s rows, a backup of the deleted"
" rows has been saved to %s"
),
len(non_identical_duplicates),
Statistics.__tablename__,
backup_path,
@ -2070,7 +2072,10 @@ def _filter_unique_constraint_integrity_error(
if ignore:
_LOGGER.warning(
"Blocked attempt to insert duplicated statistic rows, please report at %s",
(
"Blocked attempt to insert duplicated statistic rows, please report"
" at %s"
),
"https://github.com/home-assistant/core/issues?q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+recorder%22",
exc_info=err,
)
@ -2415,9 +2420,11 @@ def correct_db_schema(
if "statistics_meta.4-byte UTF-8" in schema_errors:
# Attempt to convert the table to utf8mb4
_LOGGER.warning(
"Updating character set and collation of table %s to utf8mb4. "
"Note: this can take several minutes on large databases and slow "
"computers. Please be patient!",
(
"Updating character set and collation of table %s to utf8mb4. "
"Note: this can take several minutes on large databases and slow "
"computers. Please be patient!"
),
"statistics_meta",
)
with contextlib.suppress(SQLAlchemyError):

View File

@ -295,7 +295,10 @@ def run_checks_on_open_db(dbpath: str, cursor: CursorFetchStrategy) -> None:
if not last_run_was_clean:
_LOGGER.warning(
"The system could not validate that the sqlite3 database at %s was shutdown cleanly",
(
"The system could not validate that the sqlite3 database at %s was"
" shutdown cleanly"
),
dbpath,
)
@ -307,7 +310,10 @@ def move_away_broken_database(dbfile: str) -> None:
corrupt_postfix = f".corrupt.{isotime}"
_LOGGER.error(
"The system will rename the corrupt database file %s to %s in order to allow startup to proceed",
(
"The system will rename the corrupt database file %s to %s in order to"
" allow startup to proceed"
),
dbfile,
f"{dbfile}{corrupt_postfix}",
)
@ -338,9 +344,11 @@ def query_on_connection(dbapi_connection: Any, statement: str) -> Any:
def _fail_unsupported_dialect(dialect_name: str) -> None:
"""Warn about unsupported database version."""
_LOGGER.error(
"Database %s is not supported; Home Assistant supports %s. "
"Starting with Home Assistant 2022.6 this prevents the recorder from "
"starting. Please migrate your database to a supported software",
(
"Database %s is not supported; Home Assistant supports %s. "
"Starting with Home Assistant 2022.6 this prevents the recorder from "
"starting. Please migrate your database to a supported software"
),
dialect_name,
"MariaDB ≥ 10.3, MySQL ≥ 8.0, PostgreSQL ≥ 12, SQLite ≥ 3.31.0",
)
@ -352,9 +360,11 @@ def _fail_unsupported_version(
) -> None:
"""Warn about unsupported database version."""
_LOGGER.error(
"Version %s of %s is not supported; minimum supported version is %s. "
"Starting with Home Assistant 2022.6 this prevents the recorder from "
"starting. Please upgrade your database software",
(
"Version %s of %s is not supported; minimum supported version is %s. "
"Starting with Home Assistant 2022.6 this prevents the recorder from "
"starting. Please upgrade your database software"
),
server_version,
dialect_name,
minimum_version,

View File

@ -328,8 +328,10 @@ class RememberTheMilk(Entity):
rtm_id = self._rtm_config.get_rtm_id(self._name, hass_id)
if rtm_id is None:
_LOGGER.error(
"Could not find task with ID %s in account %s. "
"So task could not be closed",
(
"Could not find task with ID %s in account %s. "
"So task could not be closed"
),
hass_id,
self._name,
)

View File

@ -235,9 +235,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
keepalive_idle_timer = None
elif keepalive_idle_timer <= 30:
_LOGGER.warning(
"A very short TCP Keepalive IDLE timer was provided (%d secs) "
"and may produce unexpected disconnections from RFlink device."
" Recommended values: 60-3600 (seconds)",
(
"A very short TCP Keepalive IDLE timer was provided (%d secs) "
"and may produce unexpected disconnections from RFlink device."
" Recommended values: 60-3600 (seconds)"
),
keepalive_idle_timer,
)

View File

@ -135,9 +135,11 @@ def devices_from_config(domain_config):
repetitions_enabled = device_config[CONF_SIGNAL_REPETITIONS] != 1
if is_hybrid and repetitions_enabled:
_LOGGER.warning(
"Hybrid type for %s not compatible with signal "
"repetitions. Please set 'dimmable' or 'switchable' "
"type explicitly in configuration",
(
"Hybrid type for %s not compatible with signal "
"repetitions. Please set 'dimmable' or 'switchable' "
"type explicitly in configuration"
),
device_id,
)

View File

@ -405,11 +405,13 @@ def find_possible_pt2262_device(device_ids: list[str], device_id: str) -> str |
if size is not None:
size = len(dev_id) - size - 1
_LOGGER.info(
"Found possible device %s for %s "
"with the following configuration:\n"
"data_bits=%d\n"
"command_on=0x%s\n"
"command_off=0x%s\n",
(
"Found possible device %s for %s "
"with the following configuration:\n"
"data_bits=%d\n"
"command_on=0x%s\n"
"command_off=0x%s\n"
),
device_id,
dev_id,
size * 4,

View File

@ -56,6 +56,7 @@ class DiffuserPerfumeAmount(DiffuserEntity, NumberEntity):
"""Set the perfume amount."""
if not value.is_integer():
raise ValueError(
f"Can't set the perfume amount to {value}. Perfume amount must be an integer."
f"Can't set the perfume amount to {value}. Perfume amount must be an"
" integer."
)
await self._diffuser.set_perfume_amount(int(value))

View File

@ -415,7 +415,10 @@ class RoonDevice(MediaPlayerEntity):
return
if name not in sync_available:
_LOGGER.error(
"Can't join player %s with %s because it's not in the join available list %s",
(
"Can't join player %s with %s because it's not in the join"
" available list %s"
),
name,
self.name,
list(sync_available),

View File

@ -19,7 +19,10 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
_LOGGER = logging.getLogger(__name__)
DESCRIPTION_NORMALIZED = "Voltage normalized. Everything is working as intended."
DESCRIPTION_UNDER_VOLTAGE = "Under-voltage was detected. Consider getting a uninterruptible power supply for your Raspberry Pi."
DESCRIPTION_UNDER_VOLTAGE = (
"Under-voltage was detected. Consider getting a uninterruptible power supply for"
" your Raspberry Pi."
)
async def async_setup_entry(

View File

@ -10,7 +10,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Safe Mode component."""
persistent_notification.async_create(
hass,
"Home Assistant is running in safe mode. Check [the error log](/config/logs) to see what went wrong.",
(
"Home Assistant is running in safe mode. Check [the error"
" log](/config/logs) to see what went wrong."
),
"Safe Mode",
)
return True