mirror of
https://github.com/home-assistant/core.git
synced 2025-04-24 01:08:12 +00:00
Enable Ruff RUF010 (#115371)
Co-authored-by: J. Nick Koston <nick@koston.org>
This commit is contained in:
parent
589104f63d
commit
ac54cdcdb4
@ -149,7 +149,7 @@ class BroadlinkRemote(BroadlinkEntity, RemoteEntity, RestoreEntity):
|
||||
try:
|
||||
codes = self._codes[device][cmd]
|
||||
except KeyError as err:
|
||||
raise ValueError(f"Command not found: {repr(cmd)}") from err
|
||||
raise ValueError(f"Command not found: {cmd!r}") from err
|
||||
|
||||
if isinstance(codes, list):
|
||||
codes = codes[:]
|
||||
@ -160,7 +160,7 @@ class BroadlinkRemote(BroadlinkEntity, RemoteEntity, RestoreEntity):
|
||||
try:
|
||||
codes[idx] = data_packet(code)
|
||||
except ValueError as err:
|
||||
raise ValueError(f"Invalid code: {repr(code)}") from err
|
||||
raise ValueError(f"Invalid code: {code!r}") from err
|
||||
|
||||
code_list.append(codes)
|
||||
return code_list
|
||||
@ -448,7 +448,7 @@ class BroadlinkRemote(BroadlinkEntity, RemoteEntity, RestoreEntity):
|
||||
try:
|
||||
codes = self._codes[subdevice]
|
||||
except KeyError as err:
|
||||
err_msg = f"Device not found: {repr(subdevice)}"
|
||||
err_msg = f"Device not found: {subdevice!r}"
|
||||
_LOGGER.error("Failed to call %s. %s", service, err_msg)
|
||||
raise ValueError(err_msg) from err
|
||||
|
||||
@ -461,9 +461,9 @@ class BroadlinkRemote(BroadlinkEntity, RemoteEntity, RestoreEntity):
|
||||
|
||||
if cmds_not_found:
|
||||
if len(cmds_not_found) == 1:
|
||||
err_msg = f"Command not found: {repr(cmds_not_found[0])}"
|
||||
err_msg = f"Command not found: {cmds_not_found[0]!r}"
|
||||
else:
|
||||
err_msg = f"Commands not found: {repr(cmds_not_found)}"
|
||||
err_msg = f"Commands not found: {cmds_not_found!r}"
|
||||
|
||||
if len(cmds_not_found) == len(commands):
|
||||
_LOGGER.error("Failed to call %s. %s", service, err_msg)
|
||||
|
@ -56,7 +56,7 @@ class DwdWeatherWarningsCoordinator(DataUpdateCoordinator[None]):
|
||||
try:
|
||||
position = get_position_data(self.hass, self._device_tracker)
|
||||
except (EntityNotFoundError, AttributeError) as err:
|
||||
raise UpdateFailed(f"Error fetching position: {repr(err)}") from err
|
||||
raise UpdateFailed(f"Error fetching position: {err!r}") from err
|
||||
|
||||
distance = None
|
||||
if self._previous_position is not None:
|
||||
|
@ -519,7 +519,7 @@ class GoogleCalendarEntity(
|
||||
CalendarSyncUpdateCoordinator, self.coordinator
|
||||
).sync.store_service.async_add_event(event)
|
||||
except ApiException as err:
|
||||
raise HomeAssistantError(f"Error while creating event: {str(err)}") from err
|
||||
raise HomeAssistantError(f"Error while creating event: {err!s}") from err
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_delete_event(
|
||||
|
@ -404,7 +404,7 @@ def print_aldb_to_log(aldb):
|
||||
hwm = "Y" if rec.is_high_water_mark else "N"
|
||||
log_msg = (
|
||||
f" {rec.mem_addr:04x} {in_use:s} {mode:s} {hwm:s} "
|
||||
f"{rec.group:3d} {str(rec.target):s} {rec.data1:3d} "
|
||||
f"{rec.group:3d} {rec.target!s:s} {rec.data1:3d} "
|
||||
f"{rec.data2:3d} {rec.data3:3d}"
|
||||
)
|
||||
logger.info(log_msg)
|
||||
|
@ -337,7 +337,7 @@ class ModbusHub:
|
||||
try:
|
||||
await self._client.connect() # type: ignore[union-attr]
|
||||
except ModbusException as exception_error:
|
||||
err = f"{self.name} connect failed, retry in pymodbus ({str(exception_error)})"
|
||||
err = f"{self.name} connect failed, retry in pymodbus ({exception_error!s})"
|
||||
self._log_error(err, error_state=False)
|
||||
return
|
||||
message = f"modbus {self.name} communication open"
|
||||
@ -404,9 +404,7 @@ class ModbusHub:
|
||||
try:
|
||||
result: ModbusResponse = await entry.func(address, value, **kwargs)
|
||||
except ModbusException as exception_error:
|
||||
error = (
|
||||
f"Error: device: {slave} address: {address} -> {str(exception_error)}"
|
||||
)
|
||||
error = f"Error: device: {slave} address: {address} -> {exception_error!s}"
|
||||
self._log_error(error)
|
||||
return None
|
||||
if not result:
|
||||
@ -416,7 +414,7 @@ class ModbusHub:
|
||||
self._log_error(error)
|
||||
return None
|
||||
if not hasattr(result, entry.attr):
|
||||
error = f"Error: device: {slave} address: {address} -> {str(result)}"
|
||||
error = f"Error: device: {slave} address: {address} -> {result!s}"
|
||||
self._log_error(error)
|
||||
return None
|
||||
if result.isError():
|
||||
|
@ -183,9 +183,7 @@ def struct_validator(config: dict[str, Any]) -> dict[str, Any]:
|
||||
try:
|
||||
size = struct.calcsize(structure)
|
||||
except struct.error as err:
|
||||
raise vol.Invalid(
|
||||
f"{name}: error in structure format --> {str(err)}"
|
||||
) from err
|
||||
raise vol.Invalid(f"{name}: error in structure format --> {err!s}") from err
|
||||
bytecount = count * 2
|
||||
if bytecount != size:
|
||||
raise vol.Invalid(
|
||||
|
@ -216,8 +216,8 @@ class MqttBinarySensor(MqttEntity, BinarySensorEntity, RestoreEntity):
|
||||
template_info = ""
|
||||
if self._config.get(CONF_VALUE_TEMPLATE) is not None:
|
||||
template_info = (
|
||||
f", template output: '{str(payload)}', with value template"
|
||||
f" '{str(self._config.get(CONF_VALUE_TEMPLATE))}'"
|
||||
f", template output: '{payload!s}', with value template"
|
||||
f" '{self._config.get(CONF_VALUE_TEMPLATE)!s}'"
|
||||
)
|
||||
_LOGGER.info(
|
||||
(
|
||||
|
@ -202,7 +202,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
await subscriber.start_async()
|
||||
except AuthException as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
f"Subscriber authentication error: {str(err)}"
|
||||
f"Subscriber authentication error: {err!s}"
|
||||
) from err
|
||||
except ConfigurationException as err:
|
||||
_LOGGER.error("Configuration error: %s", err)
|
||||
@ -210,13 +210,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return False
|
||||
except SubscriberException as err:
|
||||
subscriber.stop_async()
|
||||
raise ConfigEntryNotReady(f"Subscriber error: {str(err)}") from err
|
||||
raise ConfigEntryNotReady(f"Subscriber error: {err!s}") from err
|
||||
|
||||
try:
|
||||
device_manager = await subscriber.async_get_device_manager()
|
||||
except ApiException as err:
|
||||
subscriber.stop_async()
|
||||
raise ConfigEntryNotReady(f"Device manager error: {str(err)}") from err
|
||||
raise ConfigEntryNotReady(f"Device manager error: {err!s}") from err
|
||||
|
||||
hass.data[DOMAIN][entry.entry_id] = {
|
||||
DATA_SUBSCRIBER: subscriber,
|
||||
|
@ -51,7 +51,7 @@ class ProgettiHWSWConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
relay_modes_schema = {}
|
||||
for i in range(1, int(self.s1_in["relay_count"]) + 1):
|
||||
relay_modes_schema[vol.Required(f"relay_{str(i)}", default="bistable")] = (
|
||||
relay_modes_schema[vol.Required(f"relay_{i!s}", default="bistable")] = (
|
||||
vol.In(
|
||||
{
|
||||
"bistable": "Bistable (ON/OFF Mode)",
|
||||
|
@ -49,7 +49,7 @@ async def async_setup_entry(
|
||||
ProgettihwswSwitch(
|
||||
coordinator,
|
||||
f"Relay #{i}",
|
||||
setup_switch(board_api, i, config_entry.data[f"relay_{str(i)}"]),
|
||||
setup_switch(board_api, i, config_entry.data[f"relay_{i!s}"]),
|
||||
)
|
||||
for i in range(1, int(relay_count) + 1)
|
||||
)
|
||||
|
@ -350,7 +350,7 @@ class ProximityDataUpdateCoordinator(DataUpdateCoordinator[ProximityData]):
|
||||
if cast(int, nearest_distance_to) == int(distance_to):
|
||||
_LOGGER.debug("set equally close entity_data: %s", entity_data)
|
||||
proximity_data[ATTR_NEAREST] = (
|
||||
f"{proximity_data[ATTR_NEAREST]}, {str(entity_data[ATTR_NAME])}"
|
||||
f"{proximity_data[ATTR_NEAREST]}, {entity_data[ATTR_NAME]!s}"
|
||||
)
|
||||
|
||||
return ProximityData(proximity_data, entities_data)
|
||||
|
@ -365,7 +365,7 @@ class RachioZone(RachioSwitch):
|
||||
|
||||
def __str__(self):
|
||||
"""Display the zone as a string."""
|
||||
return f'Rachio Zone "{self.name}" on {str(self._controller)}'
|
||||
return f'Rachio Zone "{self.name}" on {self._controller!s}'
|
||||
|
||||
@property
|
||||
def zone_id(self) -> str:
|
||||
|
@ -120,12 +120,12 @@ class RainbirdConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
except TimeoutError as err:
|
||||
raise ConfigFlowError(
|
||||
f"Timeout connecting to Rain Bird controller: {str(err)}",
|
||||
f"Timeout connecting to Rain Bird controller: {err!s}",
|
||||
"timeout_connect",
|
||||
) from err
|
||||
except RainbirdApiException as err:
|
||||
raise ConfigFlowError(
|
||||
f"Error connecting to Rain Bird controller: {str(err)}",
|
||||
f"Error connecting to Rain Bird controller: {err!s}",
|
||||
"cannot_connect",
|
||||
) from err
|
||||
finally:
|
||||
|
@ -107,7 +107,7 @@ def _validate_unit(options: dict[str, Any]) -> None:
|
||||
and (unit := options.get(CONF_UNIT_OF_MEASUREMENT)) not in units
|
||||
):
|
||||
sorted_units = sorted(
|
||||
[f"'{str(unit)}'" if unit else "no unit of measurement" for unit in units],
|
||||
[f"'{unit!s}'" if unit else "no unit of measurement" for unit in units],
|
||||
key=str.casefold,
|
||||
)
|
||||
if len(sorted_units) == 1:
|
||||
|
@ -67,7 +67,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
) as err:
|
||||
await host.stop()
|
||||
raise ConfigEntryNotReady(
|
||||
f"Error while trying to setup {host.api.host}:{host.api.port}: {str(err)}"
|
||||
f"Error while trying to setup {host.api.host}:{host.api.port}: {err!s}"
|
||||
) from err
|
||||
except Exception:
|
||||
await host.stop()
|
||||
|
@ -319,7 +319,7 @@ class BlockSleepingClimate(
|
||||
self.coordinator.last_update_success = False
|
||||
raise HomeAssistantError(
|
||||
f"Setting state for entity {self.name} failed, state: {kwargs}, error:"
|
||||
f" {repr(err)}"
|
||||
f" {err!r}"
|
||||
) from err
|
||||
except InvalidAuthError:
|
||||
await self.coordinator.async_shutdown_device_and_start_reauth()
|
||||
|
@ -353,7 +353,7 @@ class ShellyBlockCoordinator(ShellyCoordinatorBase[BlockDevice]):
|
||||
try:
|
||||
await self.device.update()
|
||||
except DeviceConnectionError as err:
|
||||
raise UpdateFailed(f"Error fetching data: {repr(err)}") from err
|
||||
raise UpdateFailed(f"Error fetching data: {err!r}") from err
|
||||
except InvalidAuthError:
|
||||
await self.async_shutdown_device_and_start_reauth()
|
||||
|
||||
@ -444,7 +444,7 @@ class ShellyRestCoordinator(ShellyCoordinatorBase[BlockDevice]):
|
||||
return
|
||||
await self.device.update_shelly()
|
||||
except DeviceConnectionError as err:
|
||||
raise UpdateFailed(f"Error fetching data: {repr(err)}") from err
|
||||
raise UpdateFailed(f"Error fetching data: {err!r}") from err
|
||||
except InvalidAuthError:
|
||||
await self.async_shutdown_device_and_start_reauth()
|
||||
else:
|
||||
@ -732,7 +732,7 @@ class ShellyRpcPollingCoordinator(ShellyCoordinatorBase[RpcDevice]):
|
||||
try:
|
||||
await self.device.update_status()
|
||||
except (DeviceConnectionError, RpcCallError) as err:
|
||||
raise UpdateFailed(f"Device disconnected: {repr(err)}") from err
|
||||
raise UpdateFailed(f"Device disconnected: {err!r}") from err
|
||||
except InvalidAuthError:
|
||||
await self.async_shutdown_device_and_start_reauth()
|
||||
|
||||
|
@ -340,7 +340,7 @@ class ShellyBlockEntity(CoordinatorEntity[ShellyBlockCoordinator]):
|
||||
self.coordinator.last_update_success = False
|
||||
raise HomeAssistantError(
|
||||
f"Setting state for entity {self.name} failed, state: {kwargs}, error:"
|
||||
f" {repr(err)}"
|
||||
f" {err!r}"
|
||||
) from err
|
||||
except InvalidAuthError:
|
||||
await self.coordinator.async_shutdown_device_and_start_reauth()
|
||||
@ -388,12 +388,12 @@ class ShellyRpcEntity(CoordinatorEntity[ShellyRpcCoordinator]):
|
||||
self.coordinator.last_update_success = False
|
||||
raise HomeAssistantError(
|
||||
f"Call RPC for {self.name} connection error, method: {method}, params:"
|
||||
f" {params}, error: {repr(err)}"
|
||||
f" {params}, error: {err!r}"
|
||||
) from err
|
||||
except RpcCallError as err:
|
||||
raise HomeAssistantError(
|
||||
f"Call RPC for {self.name} request error, method: {method}, params:"
|
||||
f" {params}, error: {repr(err)}"
|
||||
f" {params}, error: {err!r}"
|
||||
) from err
|
||||
except InvalidAuthError:
|
||||
await self.coordinator.async_shutdown_device_and_start_reauth()
|
||||
|
@ -122,7 +122,7 @@ class BlockSleepingNumber(ShellySleepingBlockAttributeEntity, RestoreNumber):
|
||||
self.coordinator.last_update_success = False
|
||||
raise HomeAssistantError(
|
||||
f"Setting state for entity {self.name} failed, state: {params}, error:"
|
||||
f" {repr(err)}"
|
||||
f" {err!r}"
|
||||
) from err
|
||||
except InvalidAuthError:
|
||||
await self.coordinator.async_shutdown_device_and_start_reauth()
|
||||
|
@ -197,7 +197,7 @@ class RestUpdateEntity(ShellyRestAttributeEntity, UpdateEntity):
|
||||
try:
|
||||
result = await self.coordinator.device.trigger_ota_update(beta=beta)
|
||||
except DeviceConnectionError as err:
|
||||
raise HomeAssistantError(f"Error starting OTA update: {repr(err)}") from err
|
||||
raise HomeAssistantError(f"Error starting OTA update: {err!r}") from err
|
||||
except InvalidAuthError:
|
||||
await self.coordinator.async_shutdown_device_and_start_reauth()
|
||||
else:
|
||||
@ -286,11 +286,9 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity):
|
||||
try:
|
||||
await self.coordinator.device.trigger_ota_update(beta=beta)
|
||||
except DeviceConnectionError as err:
|
||||
raise HomeAssistantError(
|
||||
f"OTA update connection error: {repr(err)}"
|
||||
) from err
|
||||
raise HomeAssistantError(f"OTA update connection error: {err!r}") from err
|
||||
except RpcCallError as err:
|
||||
raise HomeAssistantError(f"OTA update request error: {repr(err)}") from err
|
||||
raise HomeAssistantError(f"OTA update request error: {err!r}") from err
|
||||
except InvalidAuthError:
|
||||
await self.coordinator.async_shutdown_device_and_start_reauth()
|
||||
else:
|
||||
|
@ -592,7 +592,7 @@ def stream_worker(
|
||||
except av.AVError as ex:
|
||||
container.close()
|
||||
raise StreamWorkerError(
|
||||
f"Error demuxing stream while finding first packet: {str(ex)}"
|
||||
f"Error demuxing stream while finding first packet: {ex!s}"
|
||||
) from ex
|
||||
|
||||
muxer = StreamMuxer(
|
||||
@ -617,7 +617,7 @@ def stream_worker(
|
||||
except StopIteration as ex:
|
||||
raise StreamEndedError("Stream ended; no additional packets") from ex
|
||||
except av.AVError as ex:
|
||||
raise StreamWorkerError(f"Error demuxing stream: {str(ex)}") from ex
|
||||
raise StreamWorkerError(f"Error demuxing stream: {ex!s}") from ex
|
||||
|
||||
muxer.mux_packet(packet)
|
||||
|
||||
|
@ -35,5 +35,5 @@ class SwitchBeeButton(SwitchBeeEntity, ButtonEntity):
|
||||
await self.coordinator.api.set_state(self._device.id, ApiStateCommand.ON)
|
||||
except SwitchBeeError as exp:
|
||||
raise HomeAssistantError(
|
||||
f"Failed to fire scenario {self.name}, {str(exp)}"
|
||||
f"Failed to fire scenario {self.name}, {exp!s}"
|
||||
) from exp
|
||||
|
@ -181,7 +181,7 @@ class SwitchBeeClimateEntity(SwitchBeeDeviceEntity[SwitchBeeThermostat], Climate
|
||||
await self.coordinator.api.set_state(self._device.id, state)
|
||||
except (SwitchBeeError, SwitchBeeDeviceOfflineError) as exp:
|
||||
raise HomeAssistantError(
|
||||
f"Failed to set {self.name} state {state}, error: {str(exp)}"
|
||||
f"Failed to set {self.name} state {state}, error: {exp!s}"
|
||||
) from exp
|
||||
|
||||
await self.coordinator.async_refresh()
|
||||
|
@ -55,7 +55,7 @@ class SwitchBeeSomfyEntity(SwitchBeeDeviceEntity[SwitchBeeSomfy], CoverEntity):
|
||||
await self.coordinator.api.set_state(self._device.id, command)
|
||||
except (SwitchBeeError, SwitchBeeTokenError) as exp:
|
||||
raise HomeAssistantError(
|
||||
f"Failed to fire {command} for {self.name}, {str(exp)}"
|
||||
f"Failed to fire {command} for {self.name}, {exp!s}"
|
||||
) from exp
|
||||
|
||||
async def async_open_cover(self, **kwargs: Any) -> None:
|
||||
@ -145,7 +145,7 @@ class SwitchBeeCoverEntity(SwitchBeeDeviceEntity[SwitchBeeShutter], CoverEntity)
|
||||
except (SwitchBeeError, SwitchBeeTokenError) as exp:
|
||||
raise HomeAssistantError(
|
||||
f"Failed to set {self.name} position to {kwargs[ATTR_POSITION]}, error:"
|
||||
f" {str(exp)}"
|
||||
f" {exp!s}"
|
||||
) from exp
|
||||
|
||||
self._get_coordinator_device().position = kwargs[ATTR_POSITION]
|
||||
|
@ -100,7 +100,7 @@ class SwitchBeeLightEntity(SwitchBeeDeviceEntity[SwitchBeeDimmer], LightEntity):
|
||||
await self.coordinator.api.set_state(self._device.id, state)
|
||||
except (SwitchBeeError, SwitchBeeDeviceOfflineError) as exp:
|
||||
raise HomeAssistantError(
|
||||
f"Failed to set {self.name} state {state}, {str(exp)}"
|
||||
f"Failed to set {self.name} state {state}, {exp!s}"
|
||||
) from exp
|
||||
|
||||
if not isinstance(state, int):
|
||||
@ -120,7 +120,7 @@ class SwitchBeeLightEntity(SwitchBeeDeviceEntity[SwitchBeeDimmer], LightEntity):
|
||||
await self.coordinator.api.set_state(self._device.id, ApiStateCommand.OFF)
|
||||
except (SwitchBeeError, SwitchBeeDeviceOfflineError) as exp:
|
||||
raise HomeAssistantError(
|
||||
f"Failed to turn off {self._attr_name}, {str(exp)}"
|
||||
f"Failed to turn off {self._attr_name}, {exp!s}"
|
||||
) from exp
|
||||
|
||||
# update the coordinator manually
|
||||
|
@ -102,7 +102,7 @@ class SwitchBeeSwitchEntity(SwitchBeeDeviceEntity[_DeviceTypeT], SwitchEntity):
|
||||
except (SwitchBeeError, SwitchBeeDeviceOfflineError) as exp:
|
||||
await self.coordinator.async_refresh()
|
||||
raise HomeAssistantError(
|
||||
f"Failed to set {self._attr_name} state {state}, {str(exp)}"
|
||||
f"Failed to set {self._attr_name} state {state}, {exp!s}"
|
||||
) from exp
|
||||
|
||||
await self.coordinator.async_refresh()
|
||||
|
@ -100,9 +100,9 @@ class TedeeApiCoordinator(DataUpdateCoordinator[dict[int, TedeeLock]]):
|
||||
|
||||
except TedeeDataUpdateException as ex:
|
||||
_LOGGER.debug("Error while updating data: %s", str(ex))
|
||||
raise UpdateFailed(f"Error while updating data: {str(ex)}") from ex
|
||||
raise UpdateFailed(f"Error while updating data: {ex!s}") from ex
|
||||
except (TedeeClientException, TimeoutError) as ex:
|
||||
raise UpdateFailed(f"Querying API failed. Error: {str(ex)}") from ex
|
||||
raise UpdateFailed(f"Querying API failed. Error: {ex!s}") from ex
|
||||
|
||||
def _async_add_remove_locks(self) -> None:
|
||||
"""Add new locks, remove non-existing locks."""
|
||||
|
@ -130,7 +130,7 @@ def _validate_unit(options: dict[str, Any]) -> None:
|
||||
and (unit := options.get(CONF_UNIT_OF_MEASUREMENT)) not in units
|
||||
):
|
||||
sorted_units = sorted(
|
||||
[f"'{str(unit)}'" if unit else "no unit of measurement" for unit in units],
|
||||
[f"'{unit!s}'" if unit else "no unit of measurement" for unit in units],
|
||||
key=str.casefold,
|
||||
)
|
||||
if len(sorted_units) == 1:
|
||||
@ -153,7 +153,7 @@ def _validate_state_class(options: dict[str, Any]) -> None:
|
||||
and state_class not in state_classes
|
||||
):
|
||||
sorted_state_classes = sorted(
|
||||
[f"'{str(state_class)}'" for state_class in state_classes],
|
||||
[f"'{state_class!s}'" for state_class in state_classes],
|
||||
key=str.casefold,
|
||||
)
|
||||
if len(sorted_state_classes) == 0:
|
||||
|
@ -121,7 +121,7 @@ class TVFerryConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if ferry_to:
|
||||
name = name + f" to {ferry_to}"
|
||||
if ferry_time != "00:00:00":
|
||||
name = name + f" at {str(ferry_time)}"
|
||||
name = name + f" at {ferry_time!s}"
|
||||
|
||||
try:
|
||||
await self.validate_input(api_key, ferry_from, ferry_to)
|
||||
|
@ -11,5 +11,5 @@ def create_unique_id(
|
||||
"""Create unique id."""
|
||||
return (
|
||||
f"{ferry_from.casefold().replace(' ', '')}-{ferry_to.casefold().replace(' ', '')}"
|
||||
f"-{str(ferry_time)}-{str(weekdays)}"
|
||||
f"-{ferry_time!s}-{weekdays!s}"
|
||||
)
|
||||
|
@ -14,7 +14,7 @@ def create_unique_id(
|
||||
timestr = str(depart_time) if depart_time else ""
|
||||
return (
|
||||
f"{from_station.casefold().replace(' ', '')}-{to_station.casefold().replace(' ', '')}"
|
||||
f"-{timestr.casefold().replace(' ', '')}-{str(weekdays)}"
|
||||
f"-{timestr.casefold().replace(' ', '')}-{weekdays!s}"
|
||||
)
|
||||
|
||||
|
||||
|
@ -67,7 +67,7 @@ class VeSyncBaseEntity(Entity):
|
||||
# sensors. Maintaining base_unique_id allows us to group related
|
||||
# entities under a single device.
|
||||
if isinstance(self.device.sub_device_no, int):
|
||||
return f"{self.device.cid}{str(self.device.sub_device_no)}"
|
||||
return f"{self.device.cid}{self.device.sub_device_no!s}"
|
||||
return self.device.cid
|
||||
|
||||
@property
|
||||
|
@ -108,7 +108,7 @@ class VodafoneStationRouter(DataUpdateCoordinator[UpdateCoordinatorDataType]):
|
||||
exceptions.AlreadyLogged,
|
||||
exceptions.GenericLoginError,
|
||||
) as err:
|
||||
raise UpdateFailed(f"Error fetching data: {repr(err)}") from err
|
||||
raise UpdateFailed(f"Error fetching data: {err!r}") from err
|
||||
except (ConfigEntryAuthFailed, UpdateFailed):
|
||||
await self.api.close()
|
||||
raise
|
||||
|
@ -583,7 +583,7 @@ class ZDOClusterHandler(LogMixin):
|
||||
self._cluster = device.device.endpoints[0]
|
||||
self._zha_device = device
|
||||
self._status = ClusterHandlerStatus.CREATED
|
||||
self._unique_id = f"{str(device.ieee)}:{device.name}_ZDO"
|
||||
self._unique_id = f"{device.ieee!s}:{device.name}_ZDO"
|
||||
self._cluster.add_listener(self)
|
||||
|
||||
@property
|
||||
|
@ -44,7 +44,7 @@ class Endpoint:
|
||||
self._all_cluster_handlers: dict[str, ClusterHandler] = {}
|
||||
self._claimed_cluster_handlers: dict[str, ClusterHandler] = {}
|
||||
self._client_cluster_handlers: dict[str, ClientClusterHandler] = {}
|
||||
self._unique_id: str = f"{str(device.ieee)}-{zigpy_endpoint.endpoint_id}"
|
||||
self._unique_id: str = f"{device.ieee!s}-{zigpy_endpoint.endpoint_id}"
|
||||
|
||||
@property
|
||||
def device(self) -> ZHADevice:
|
||||
|
@ -269,7 +269,7 @@ class ZHAGateway:
|
||||
delta_msg = "not known"
|
||||
if zha_device.last_seen is not None:
|
||||
delta = round(time.time() - zha_device.last_seen)
|
||||
delta_msg = f"{str(timedelta(seconds=delta))} ago"
|
||||
delta_msg = f"{timedelta(seconds=delta)!s} ago"
|
||||
_LOGGER.debug(
|
||||
(
|
||||
"[%s](%s) restored as '%s', last seen: %s,"
|
||||
@ -470,7 +470,7 @@ class ZHAGateway:
|
||||
if zha_device is not None:
|
||||
device_info = zha_device.zha_device_info
|
||||
zha_device.async_cleanup_handles()
|
||||
async_dispatcher_send(self.hass, f"{SIGNAL_REMOVE}_{str(zha_device.ieee)}")
|
||||
async_dispatcher_send(self.hass, f"{SIGNAL_REMOVE}_{zha_device.ieee!s}")
|
||||
self.hass.async_create_task(
|
||||
self._async_remove_device(zha_device, entity_refs),
|
||||
"ZHAGateway._async_remove_device",
|
||||
|
@ -1314,7 +1314,7 @@ def async_load_api(hass: HomeAssistant) -> None:
|
||||
manufacturer=manufacturer,
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Device with IEEE {str(ieee)} not found")
|
||||
raise ValueError(f"Device with IEEE {ieee!s} not found")
|
||||
|
||||
_LOGGER.debug(
|
||||
(
|
||||
@ -1394,7 +1394,7 @@ def async_load_api(hass: HomeAssistant) -> None:
|
||||
manufacturer,
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Device with IEEE {str(ieee)} not found")
|
||||
raise ValueError(f"Device with IEEE {ieee!s} not found")
|
||||
|
||||
async_register_admin_service(
|
||||
hass,
|
||||
|
@ -214,5 +214,5 @@ class ZWaveLock(ZWaveBaseEntity, LockEntity):
|
||||
return
|
||||
msg = f"Result status is {result.status}"
|
||||
if result.remaining_duration is not None:
|
||||
msg += f" and remaining duration is {str(result.remaining_duration)}"
|
||||
msg += f" and remaining duration is {result.remaining_duration!s}"
|
||||
LOGGER.info("%s after setting lock configuration for %s", msg, self.entity_id)
|
||||
|
@ -67,7 +67,7 @@ class ZWaveMeCover(ZWaveMeEntity, CoverEntity):
|
||||
"""Update the current value."""
|
||||
value = kwargs[ATTR_POSITION]
|
||||
self.controller.zwave_api.send_command(
|
||||
self.device.id, f"exact?level={str(min(value, 99))}"
|
||||
self.device.id, f"exact?level={min(value, 99)!s}"
|
||||
)
|
||||
|
||||
def stop_cover(self, **kwargs: Any) -> None:
|
||||
|
@ -50,5 +50,5 @@ class ZWaveMeNumber(ZWaveMeEntity, NumberEntity):
|
||||
def set_native_value(self, value: float) -> None:
|
||||
"""Update the current value."""
|
||||
self.controller.zwave_api.send_command(
|
||||
self.device.id, f"exact?level={str(round(value))}"
|
||||
self.device.id, f"exact?level={round(value)!s}"
|
||||
)
|
||||
|
@ -1079,7 +1079,7 @@ async def merge_packages_config(
|
||||
pack_name,
|
||||
None,
|
||||
config,
|
||||
f"Invalid package definition '{pack_name}': {str(exc)}. Package "
|
||||
f"Invalid package definition '{pack_name}': {exc!s}. Package "
|
||||
f"will not be initialized",
|
||||
)
|
||||
invalid_packages.append(pack_name)
|
||||
@ -1107,7 +1107,7 @@ async def merge_packages_config(
|
||||
pack_name,
|
||||
comp_name,
|
||||
config,
|
||||
f"Integration {comp_name} caused error: {str(exc)}",
|
||||
f"Integration {comp_name} caused error: {exc!s}",
|
||||
)
|
||||
continue
|
||||
except INTEGRATION_LOAD_EXCEPTIONS as exc:
|
||||
|
@ -2899,7 +2899,7 @@ class Config:
|
||||
|
||||
def is_allowed_external_url(self, url: str) -> bool:
|
||||
"""Check if an external URL is allowed."""
|
||||
parsed_url = f"{str(yarl.URL(url))}/"
|
||||
parsed_url = f"{yarl.URL(url)!s}/"
|
||||
|
||||
return any(
|
||||
allowed
|
||||
|
@ -909,7 +909,7 @@ class _ScriptRun:
|
||||
count = len(items)
|
||||
for iteration, item in enumerate(items, 1):
|
||||
set_repeat_var(iteration, count, item)
|
||||
extra_msg = f" of {count} with item: {repr(item)}"
|
||||
extra_msg = f" of {count} with item: {item!r}"
|
||||
if self._stop.done():
|
||||
break
|
||||
await async_run_sequence(iteration, extra_msg)
|
||||
|
@ -705,6 +705,7 @@ select = [
|
||||
"RSE", # flake8-raise
|
||||
"RUF005", # Consider iterable unpacking instead of concatenation
|
||||
"RUF006", # Store a reference to the return value of asyncio.create_task
|
||||
"RUF010", # Use explicit conversion flag
|
||||
"RUF013", # PEP 484 prohibits implicit Optional
|
||||
"RUF018", # Avoid assignment expressions in assert statements
|
||||
"RUF019", # Unnecessary key check before dictionary access
|
||||
|
@ -92,9 +92,7 @@ async def test_binary_sensor_setup_no_notify(
|
||||
caplog.set_level(logging.INFO)
|
||||
|
||||
def raise_notification_error(self, port, callback, direction):
|
||||
raise NumatoGpioError(
|
||||
f"{repr(self)} Mockup device doesn't support notifications."
|
||||
)
|
||||
raise NumatoGpioError(f"{self!r} Mockup device doesn't support notifications.")
|
||||
|
||||
with patch.object(
|
||||
NumatoModuleMock.NumatoDeviceMock,
|
||||
|
@ -350,7 +350,7 @@ async def test_schema_migrate(
|
||||
|
||||
This simulates an existing db with the old schema.
|
||||
"""
|
||||
module = f"tests.components.recorder.db_schema_{str(start_version)}"
|
||||
module = f"tests.components.recorder.db_schema_{start_version!s}"
|
||||
importlib.import_module(module)
|
||||
old_models = sys.modules[module]
|
||||
engine = create_engine(*args, **kwargs)
|
||||
|
@ -1342,7 +1342,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None:
|
||||
"value mismatch for characteristic "
|
||||
f"'{characteristic['source_sensor_domain']}/{characteristic['name']}' "
|
||||
"(buffer filled) - "
|
||||
f"assert {state.state} == {str(characteristic['value_9'])}"
|
||||
f"assert {state.state} == {characteristic['value_9']!s}"
|
||||
)
|
||||
assert (
|
||||
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == characteristic["unit"]
|
||||
@ -1368,7 +1368,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None:
|
||||
"value mismatch for characteristic "
|
||||
f"'{characteristic['source_sensor_domain']}/{characteristic['name']}' "
|
||||
"(one stored value) - "
|
||||
f"assert {state.state} == {str(characteristic['value_1'])}"
|
||||
f"assert {state.state} == {characteristic['value_1']!s}"
|
||||
)
|
||||
|
||||
# With empty buffer
|
||||
@ -1391,7 +1391,7 @@ async def test_state_characteristics(hass: HomeAssistant) -> None:
|
||||
"value mismatch for characteristic "
|
||||
f"'{characteristic['source_sensor_domain']}/{characteristic['name']}' "
|
||||
"(buffer empty) - "
|
||||
f"assert {state.state} == {str(characteristic['value_0'])}"
|
||||
f"assert {state.state} == {characteristic['value_0']!s}"
|
||||
)
|
||||
|
||||
|
||||
|
@ -789,7 +789,7 @@ async def test_quirks_v2_entity_no_metadata(
|
||||
setattr(zigpy_device, "_exposes_metadata", {})
|
||||
zha_device = await zha_device_joined(zigpy_device)
|
||||
assert (
|
||||
f"Device: {str(zigpy_device.ieee)}-{zha_device.name} does not expose any quirks v2 entities"
|
||||
f"Device: {zigpy_device.ieee!s}-{zha_device.name} does not expose any quirks v2 entities"
|
||||
in caplog.text
|
||||
)
|
||||
|
||||
@ -807,14 +807,14 @@ async def test_quirks_v2_entity_discovery_errors(
|
||||
)
|
||||
zha_device = await zha_device_joined(zigpy_device)
|
||||
|
||||
m1 = f"Device: {str(zigpy_device.ieee)}-{zha_device.name} does not have an"
|
||||
m1 = f"Device: {zigpy_device.ieee!s}-{zha_device.name} does not have an"
|
||||
m2 = " endpoint with id: 3 - unable to create entity with cluster"
|
||||
m3 = " details: (3, 6, <ClusterType.Server: 0>)"
|
||||
assert f"{m1}{m2}{m3}" in caplog.text
|
||||
|
||||
time_cluster_id = zigpy.zcl.clusters.general.Time.cluster_id
|
||||
|
||||
m1 = f"Device: {str(zigpy_device.ieee)}-{zha_device.name} does not have a"
|
||||
m1 = f"Device: {zigpy_device.ieee!s}-{zha_device.name} does not have a"
|
||||
m2 = f" cluster with id: {time_cluster_id} - unable to create entity with "
|
||||
m3 = f"cluster details: (1, {time_cluster_id}, <ClusterType.Server: 0>)"
|
||||
assert f"{m1}{m2}{m3}" in caplog.text
|
||||
@ -831,7 +831,7 @@ async def test_quirks_v2_entity_discovery_errors(
|
||||
)
|
||||
# fmt: on
|
||||
|
||||
m1 = f"Device: {str(zigpy_device.ieee)}-{zha_device.name} has an entity with "
|
||||
m1 = f"Device: {zigpy_device.ieee!s}-{zha_device.name} has an entity with "
|
||||
m2 = f"details: {entity_details} that does not have an entity class mapping - "
|
||||
m3 = "unable to create entity"
|
||||
assert f"{m1}{m2}{m3}" in caplog.text
|
||||
|
@ -96,7 +96,7 @@ async def test_zha_logbook_event_device_with_triggers(
|
||||
CONF_DEVICE_ID: reg_device.id,
|
||||
COMMAND: COMMAND_SHAKE,
|
||||
"device_ieee": str(ieee_address),
|
||||
CONF_UNIQUE_ID: f"{str(ieee_address)}:1:0x0006",
|
||||
CONF_UNIQUE_ID: f"{ieee_address!s}:1:0x0006",
|
||||
"endpoint_id": 1,
|
||||
"cluster_id": 6,
|
||||
"params": {
|
||||
@ -110,7 +110,7 @@ async def test_zha_logbook_event_device_with_triggers(
|
||||
CONF_DEVICE_ID: reg_device.id,
|
||||
COMMAND: COMMAND_DOUBLE,
|
||||
"device_ieee": str(ieee_address),
|
||||
CONF_UNIQUE_ID: f"{str(ieee_address)}:1:0x0006",
|
||||
CONF_UNIQUE_ID: f"{ieee_address!s}:1:0x0006",
|
||||
"endpoint_id": 1,
|
||||
"cluster_id": 6,
|
||||
"params": {
|
||||
@ -124,7 +124,7 @@ async def test_zha_logbook_event_device_with_triggers(
|
||||
CONF_DEVICE_ID: reg_device.id,
|
||||
COMMAND: COMMAND_DOUBLE,
|
||||
"device_ieee": str(ieee_address),
|
||||
CONF_UNIQUE_ID: f"{str(ieee_address)}:1:0x0006",
|
||||
CONF_UNIQUE_ID: f"{ieee_address!s}:1:0x0006",
|
||||
"endpoint_id": 2,
|
||||
"cluster_id": 6,
|
||||
"params": {
|
||||
@ -175,7 +175,7 @@ async def test_zha_logbook_event_device_no_triggers(
|
||||
CONF_DEVICE_ID: reg_device.id,
|
||||
COMMAND: COMMAND_SHAKE,
|
||||
"device_ieee": str(ieee_address),
|
||||
CONF_UNIQUE_ID: f"{str(ieee_address)}:1:0x0006",
|
||||
CONF_UNIQUE_ID: f"{ieee_address!s}:1:0x0006",
|
||||
"endpoint_id": 1,
|
||||
"cluster_id": 6,
|
||||
"params": {
|
||||
@ -188,7 +188,7 @@ async def test_zha_logbook_event_device_no_triggers(
|
||||
{
|
||||
CONF_DEVICE_ID: reg_device.id,
|
||||
"device_ieee": str(ieee_address),
|
||||
CONF_UNIQUE_ID: f"{str(ieee_address)}:1:0x0006",
|
||||
CONF_UNIQUE_ID: f"{ieee_address!s}:1:0x0006",
|
||||
"endpoint_id": 1,
|
||||
"cluster_id": 6,
|
||||
"params": {
|
||||
@ -201,7 +201,7 @@ async def test_zha_logbook_event_device_no_triggers(
|
||||
{
|
||||
CONF_DEVICE_ID: reg_device.id,
|
||||
"device_ieee": str(ieee_address),
|
||||
CONF_UNIQUE_ID: f"{str(ieee_address)}:1:0x0006",
|
||||
CONF_UNIQUE_ID: f"{ieee_address!s}:1:0x0006",
|
||||
"endpoint_id": 1,
|
||||
"cluster_id": 6,
|
||||
"params": {},
|
||||
@ -212,7 +212,7 @@ async def test_zha_logbook_event_device_no_triggers(
|
||||
{
|
||||
CONF_DEVICE_ID: reg_device.id,
|
||||
"device_ieee": str(ieee_address),
|
||||
CONF_UNIQUE_ID: f"{str(ieee_address)}:1:0x0006",
|
||||
CONF_UNIQUE_ID: f"{ieee_address!s}:1:0x0006",
|
||||
"endpoint_id": 1,
|
||||
"cluster_id": 6,
|
||||
},
|
||||
|
@ -355,7 +355,7 @@ def verify_cleanup(
|
||||
if expected_lingering_tasks:
|
||||
_LOGGER.warning("Lingering task after test %r", task)
|
||||
else:
|
||||
pytest.fail(f"Lingering task after test {repr(task)}")
|
||||
pytest.fail(f"Lingering task after test {task!r}")
|
||||
task.cancel()
|
||||
if tasks:
|
||||
event_loop.run_until_complete(asyncio.wait(tasks))
|
||||
@ -368,9 +368,9 @@ def verify_cleanup(
|
||||
elif handle._args and isinstance(job := handle._args[-1], HassJob):
|
||||
if job.cancel_on_shutdown:
|
||||
continue
|
||||
pytest.fail(f"Lingering timer after job {repr(job)}")
|
||||
pytest.fail(f"Lingering timer after job {job!r}")
|
||||
else:
|
||||
pytest.fail(f"Lingering timer after test {repr(handle)}")
|
||||
pytest.fail(f"Lingering timer after test {handle!r}")
|
||||
handle.cancel()
|
||||
|
||||
# Verify no threads where left behind.
|
||||
|
@ -4685,7 +4685,7 @@ async def test_unhashable_unique_id(
|
||||
entries[entry.entry_id] = entry
|
||||
assert (
|
||||
"Config entry 'title' from integration test has an invalid unique_id "
|
||||
f"'{str(unique_id)}'"
|
||||
f"'{unique_id!s}'"
|
||||
) in caplog.text
|
||||
|
||||
assert entry.entry_id in entries
|
||||
|
Loading…
x
Reference in New Issue
Block a user