mirror of
https://github.com/home-assistant/core.git
synced 2025-04-24 09:17:53 +00:00
Enable ruff RUF005 and fix occurrences (#113589)
This commit is contained in:
parent
2a5c85a020
commit
ccd2e989c3
@ -58,7 +58,7 @@ HVAC_MODES = [
|
||||
HVACMode.FAN_ONLY,
|
||||
HVACMode.DRY,
|
||||
]
|
||||
HVAC_MODES_MYAUTO = HVAC_MODES + [HVACMode.HEAT_COOL]
|
||||
HVAC_MODES_MYAUTO = [*HVAC_MODES, HVACMode.HEAT_COOL]
|
||||
SUPPORTED_FEATURES = (
|
||||
ClimateEntityFeature.FAN_MODE
|
||||
| ClimateEntityFeature.TURN_OFF
|
||||
|
@ -132,7 +132,7 @@ async def async_setup_entry(
|
||||
sensor
|
||||
for sensor in SENSOR_TYPES
|
||||
if sensor.key
|
||||
in coordinator.cam.enabled_sensors + ["audio_connections", "video_connections"]
|
||||
in [*coordinator.cam.enabled_sensors, "audio_connections", "video_connections"]
|
||||
]
|
||||
async_add_entities(
|
||||
IPWebcamSensor(coordinator, description) for description in sensor_types
|
||||
|
@ -235,7 +235,7 @@ class OptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
apps = [SelectOptionDict(value=APPS_NEW_ID, label="Add new")] + [
|
||||
SelectOptionDict(value=k, label=v) for k, v in apps_list.items()
|
||||
]
|
||||
rules = [RULES_NEW_ID] + list(self._state_det_rules)
|
||||
rules = [RULES_NEW_ID, *self._state_det_rules]
|
||||
options = self.options
|
||||
|
||||
data_schema = vol.Schema(
|
||||
|
@ -137,7 +137,7 @@ class APIEventStream(HomeAssistantView):
|
||||
|
||||
restrict: list[str] | None = None
|
||||
if restrict_str := request.query.get("restrict"):
|
||||
restrict = restrict_str.split(",") + [EVENT_HOMEASSISTANT_STOP]
|
||||
restrict = [*restrict_str.split(","), EVENT_HOMEASSISTANT_STOP]
|
||||
|
||||
async def forward_events(event: Event) -> None:
|
||||
"""Forward events to the open request."""
|
||||
|
@ -33,7 +33,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
)
|
||||
|
||||
# Ignore services that don't support usage data
|
||||
ignore_types = FETCH_TYPES + ["Hardware"]
|
||||
ignore_types = [*FETCH_TYPES, "Hardware"]
|
||||
|
||||
try:
|
||||
await client.login()
|
||||
|
@ -323,9 +323,7 @@ class DemoMusicPlayer(AbstractDemoPlayer):
|
||||
|
||||
def join_players(self, group_members: list[str]) -> None:
|
||||
"""Join `group_members` as a player group with the current player."""
|
||||
self._attr_group_members = [
|
||||
self.entity_id,
|
||||
] + group_members
|
||||
self._attr_group_members = [self.entity_id, *group_members]
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
def unjoin_player(self) -> None:
|
||||
|
@ -236,7 +236,7 @@ class GenericThermostat(ClimateEntity, RestoreEntity):
|
||||
)
|
||||
if len(presets):
|
||||
self._attr_supported_features |= ClimateEntityFeature.PRESET_MODE
|
||||
self._attr_preset_modes = [PRESET_NONE] + list(presets.keys())
|
||||
self._attr_preset_modes = [PRESET_NONE, *presets.keys()]
|
||||
else:
|
||||
self._attr_preset_modes = [PRESET_NONE]
|
||||
self._presets = presets
|
||||
|
@ -625,7 +625,7 @@ class GoogleEntity:
|
||||
if (config_aliases := entity_config.get(CONF_ALIASES, [])) or (
|
||||
entity_entry and entity_entry.aliases
|
||||
):
|
||||
device["name"]["nicknames"] = [name] + config_aliases
|
||||
device["name"]["nicknames"] = [name, *config_aliases]
|
||||
if entity_entry:
|
||||
device["name"]["nicknames"].extend(entity_entry.aliases)
|
||||
|
||||
|
@ -47,7 +47,7 @@ class HarmonyActivitySelect(HarmonyEntity, SelectEntity):
|
||||
@property
|
||||
def options(self) -> list[str]:
|
||||
"""Return a set of selectable options."""
|
||||
return [TRANSLATABLE_POWER_OFF] + sorted(self._data.activity_names)
|
||||
return [TRANSLATABLE_POWER_OFF, *sorted(self._data.activity_names)]
|
||||
|
||||
@property
|
||||
def current_option(self) -> str | None:
|
||||
|
@ -172,7 +172,7 @@ def parse_mapping(mapping, parents=None):
|
||||
if isinstance(addr, (str,)) and isinstance(val, (str,)):
|
||||
yield (addr, PhysicalAddress(val))
|
||||
else:
|
||||
cur = parents + [addr]
|
||||
cur = [*parents, addr]
|
||||
if isinstance(val, dict):
|
||||
yield from parse_mapping(val, cur)
|
||||
elif isinstance(val, str):
|
||||
|
@ -201,7 +201,8 @@ class HomeKitHeaterCoolerEntity(HomeKitBaseClimateEntity):
|
||||
|
||||
def get_characteristic_types(self) -> list[str]:
|
||||
"""Define the homekit characteristics the entity cares about."""
|
||||
return super().get_characteristic_types() + [
|
||||
return [
|
||||
*super().get_characteristic_types(),
|
||||
CharacteristicsTypes.ACTIVE,
|
||||
CharacteristicsTypes.CURRENT_HEATER_COOLER_STATE,
|
||||
CharacteristicsTypes.TARGET_HEATER_COOLER_STATE,
|
||||
@ -479,7 +480,8 @@ class HomeKitClimateEntity(HomeKitBaseClimateEntity):
|
||||
|
||||
def get_characteristic_types(self) -> list[str]:
|
||||
"""Define the homekit characteristics the entity cares about."""
|
||||
return super().get_characteristic_types() + [
|
||||
return [
|
||||
*super().get_characteristic_types(),
|
||||
CharacteristicsTypes.HEATING_COOLING_CURRENT,
|
||||
CharacteristicsTypes.HEATING_COOLING_TARGET,
|
||||
CharacteristicsTypes.TEMPERATURE_COOLING_THRESHOLD,
|
||||
|
@ -155,8 +155,9 @@ class HomeKitDehumidifier(HomeKitBaseHumidifier):
|
||||
|
||||
def get_characteristic_types(self) -> list[str]:
|
||||
"""Define the homekit characteristics the entity cares about."""
|
||||
return super().get_characteristic_types() + [
|
||||
CharacteristicsTypes.RELATIVE_HUMIDITY_DEHUMIDIFIER_THRESHOLD
|
||||
return [
|
||||
*super().get_characteristic_types(),
|
||||
CharacteristicsTypes.RELATIVE_HUMIDITY_DEHUMIDIFIER_THRESHOLD,
|
||||
]
|
||||
|
||||
@property
|
||||
|
@ -214,19 +214,21 @@ NODE_FILTERS: dict[Platform, dict[str, list[str]]] = {
|
||||
"7.13.",
|
||||
TYPE_CATEGORY_SAFETY,
|
||||
], # Does a startswith() match; include the dot
|
||||
FILTER_ZWAVE_CAT: (["104", "112", "138"] + list(map(str, range(148, 180)))),
|
||||
FILTER_ZWAVE_CAT: (["104", "112", "138", *map(str, range(148, 180))]),
|
||||
},
|
||||
Platform.SENSOR: {
|
||||
# This is just a more-readable way of including MOST uoms between 1-100
|
||||
# (Remember that range() is non-inclusive of the stop value)
|
||||
FILTER_UOM: (
|
||||
["1"]
|
||||
+ list(map(str, range(3, 11)))
|
||||
+ list(map(str, range(12, 51)))
|
||||
+ list(map(str, range(52, 66)))
|
||||
+ list(map(str, range(69, 78)))
|
||||
+ ["79"]
|
||||
+ list(map(str, range(82, 97)))
|
||||
[
|
||||
"1",
|
||||
*map(str, range(3, 11)),
|
||||
*map(str, range(12, 51)),
|
||||
*map(str, range(52, 66)),
|
||||
*map(str, range(69, 78)),
|
||||
"79",
|
||||
*map(str, range(82, 97)),
|
||||
]
|
||||
),
|
||||
FILTER_STATES: [],
|
||||
FILTER_NODE_DEF_ID: [
|
||||
@ -238,7 +240,7 @@ NODE_FILTERS: dict[Platform, dict[str, list[str]]] = {
|
||||
"RemoteLinc2_ADV",
|
||||
],
|
||||
FILTER_INSTEON_TYPE: ["0.16.", "0.17.", "0.18.", "9.0.", "9.7."],
|
||||
FILTER_ZWAVE_CAT: (["118", "143"] + list(map(str, range(180, 186)))),
|
||||
FILTER_ZWAVE_CAT: (["118", "143", *map(str, range(180, 186))]),
|
||||
},
|
||||
Platform.LOCK: {
|
||||
FILTER_UOM: ["11"],
|
||||
|
@ -570,9 +570,10 @@ class OptionsFlowHandler(OptionsFlow):
|
||||
if user_input is not None:
|
||||
zone = {"zone": self.active_cfg}
|
||||
zone.update(user_input)
|
||||
self.new_opt[CONF_BINARY_SENSORS] = self.new_opt.get(
|
||||
CONF_BINARY_SENSORS, []
|
||||
) + [zone]
|
||||
self.new_opt[CONF_BINARY_SENSORS] = [
|
||||
*self.new_opt.get(CONF_BINARY_SENSORS, []),
|
||||
zone,
|
||||
]
|
||||
self.io_cfg.pop(self.active_cfg)
|
||||
self.active_cfg = None
|
||||
|
||||
@ -645,7 +646,7 @@ class OptionsFlowHandler(OptionsFlow):
|
||||
if user_input is not None:
|
||||
zone = {"zone": self.active_cfg}
|
||||
zone.update(user_input)
|
||||
self.new_opt[CONF_SENSORS] = self.new_opt.get(CONF_SENSORS, []) + [zone]
|
||||
self.new_opt[CONF_SENSORS] = [*self.new_opt.get(CONF_SENSORS, []), zone]
|
||||
self.io_cfg.pop(self.active_cfg)
|
||||
self.active_cfg = None
|
||||
|
||||
@ -714,7 +715,7 @@ class OptionsFlowHandler(OptionsFlow):
|
||||
zone = {"zone": self.active_cfg}
|
||||
zone.update(user_input)
|
||||
del zone[CONF_MORE_STATES]
|
||||
self.new_opt[CONF_SWITCHES] = self.new_opt.get(CONF_SWITCHES, []) + [zone]
|
||||
self.new_opt[CONF_SWITCHES] = [*self.new_opt.get(CONF_SWITCHES, []), zone]
|
||||
|
||||
# iterate through multiple switch states
|
||||
if self.current_states:
|
||||
|
@ -106,19 +106,17 @@ STATIC_ENERGY_GATES = [
|
||||
for i in range(0, 9)
|
||||
]
|
||||
|
||||
SENSOR_DESCRIPTIONS = (
|
||||
[
|
||||
MOVING_TARGET_DISTANCE_DESCRIPTION,
|
||||
STATIC_TARGET_DISTANCE_DESCRIPTION,
|
||||
MOVING_TARGET_ENERGY_DESCRIPTION,
|
||||
STATIC_TARGET_ENERGY_DESCRIPTION,
|
||||
DETECTION_DISTANCE_DESCRIPTION,
|
||||
MAX_MOTION_GATES_DESCRIPTION,
|
||||
MAX_STATIC_GATES_DESCRIPTION,
|
||||
]
|
||||
+ MOTION_ENERGY_GATES
|
||||
+ STATIC_ENERGY_GATES
|
||||
)
|
||||
SENSOR_DESCRIPTIONS = [
|
||||
MOVING_TARGET_DISTANCE_DESCRIPTION,
|
||||
STATIC_TARGET_DISTANCE_DESCRIPTION,
|
||||
MOVING_TARGET_ENERGY_DESCRIPTION,
|
||||
STATIC_TARGET_ENERGY_DESCRIPTION,
|
||||
DETECTION_DISTANCE_DESCRIPTION,
|
||||
MAX_MOTION_GATES_DESCRIPTION,
|
||||
MAX_STATIC_GATES_DESCRIPTION,
|
||||
*MOTION_ENERGY_GATES,
|
||||
*STATIC_ENERGY_GATES,
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
|
@ -47,8 +47,12 @@ _ACTION_SCHEMA = cv.DEVICE_ACTION_BASE_SCHEMA.extend(
|
||||
vol.Required(ATTR_ENTITY_ID): cv.entity_id_or_uuid,
|
||||
vol.Required(CONF_DOMAIN): DOMAIN,
|
||||
vol.Required(CONF_TYPE): vol.In(
|
||||
toggle_entity.DEVICE_ACTION_TYPES
|
||||
+ [TYPE_BRIGHTNESS_INCREASE, TYPE_BRIGHTNESS_DECREASE, TYPE_FLASH]
|
||||
[
|
||||
*toggle_entity.DEVICE_ACTION_TYPES,
|
||||
TYPE_BRIGHTNESS_INCREASE,
|
||||
TYPE_BRIGHTNESS_DECREASE,
|
||||
TYPE_FLASH,
|
||||
]
|
||||
),
|
||||
vol.Optional(ATTR_BRIGHTNESS_PCT): VALID_BRIGHTNESS_PCT,
|
||||
vol.Optional(ATTR_FLASH): VALID_FLASH,
|
||||
|
@ -572,7 +572,7 @@ async def async_get_broker_settings(
|
||||
)
|
||||
schema = vol.Schema({cv.string: cv.template})
|
||||
schema(validated_user_input[CONF_WS_HEADERS])
|
||||
except JSON_DECODE_EXCEPTIONS + (vol.MultipleInvalid,):
|
||||
except (*JSON_DECODE_EXCEPTIONS, vol.MultipleInvalid):
|
||||
errors["base"] = "bad_ws_headers"
|
||||
return False
|
||||
return True
|
||||
|
@ -140,7 +140,7 @@ async def async_get_announce_addresses(hass: HomeAssistant) -> list[str]:
|
||||
if default_ip := await async_get_source_ip(hass, target_ip=MDNS_TARGET_IP):
|
||||
if default_ip in addresses:
|
||||
addresses.remove(default_ip)
|
||||
return [default_ip] + list(addresses)
|
||||
return [default_ip, *addresses]
|
||||
return list(addresses)
|
||||
|
||||
|
||||
|
@ -60,7 +60,7 @@ class OmniLogicUpdateCoordinator(DataUpdateCoordinator[dict[tuple, dict[str, Any
|
||||
|
||||
if "systemId" in item:
|
||||
system_id = item["systemId"]
|
||||
current_id = current_id + (item_kind, system_id)
|
||||
current_id = (*current_id, item_kind, system_id)
|
||||
data[current_id] = item
|
||||
|
||||
for kind in ALL_ITEM_KINDS:
|
||||
|
@ -134,7 +134,7 @@ async def async_add_user_device_tracker(
|
||||
|
||||
await coll.async_update_item(
|
||||
person[CONF_ID],
|
||||
{CONF_DEVICE_TRACKERS: device_trackers + [device_tracker_entity_id]},
|
||||
{CONF_DEVICE_TRACKERS: [*device_trackers, device_tracker_entity_id]},
|
||||
)
|
||||
break
|
||||
|
||||
|
@ -124,8 +124,10 @@ class ProximityDataUpdateCoordinator(DataUpdateCoordinator[ProximityData]):
|
||||
**self.config_entry.data,
|
||||
CONF_TRACKED_ENTITIES: [
|
||||
tracked_entity
|
||||
for tracked_entity in self.tracked_entities
|
||||
+ [new_tracked_entity_id]
|
||||
for tracked_entity in (
|
||||
*self.tracked_entities,
|
||||
new_tracked_entity_id,
|
||||
)
|
||||
if tracked_entity != old_tracked_entity_id
|
||||
],
|
||||
},
|
||||
|
@ -255,9 +255,12 @@ class RokuMediaPlayer(RokuEntity, MediaPlayerEntity):
|
||||
@property
|
||||
def source_list(self) -> list[str]:
|
||||
"""List of available input sources."""
|
||||
return ["Home"] + sorted(
|
||||
app.name for app in self.coordinator.data.apps if app.name is not None
|
||||
)
|
||||
return [
|
||||
"Home",
|
||||
*sorted(
|
||||
app.name for app in self.coordinator.data.apps if app.name is not None
|
||||
),
|
||||
]
|
||||
|
||||
@roku_exception_handler()
|
||||
async def search(self, keyword: str) -> None:
|
||||
|
@ -30,7 +30,7 @@ def _get_application_name(device: RokuDevice) -> str | None:
|
||||
|
||||
|
||||
def _get_applications(device: RokuDevice) -> list[str]:
|
||||
return ["Home"] + sorted(app.name for app in device.apps if app.name is not None)
|
||||
return ["Home", *sorted(app.name for app in device.apps if app.name is not None)]
|
||||
|
||||
|
||||
def _get_channel_name(device: RokuDevice) -> str | None:
|
||||
|
@ -70,7 +70,8 @@ TRENDS_SENSOR_TYPES = {
|
||||
SENSOR_VARIANTS = [(PRODUCTION_ID, PRODUCTION_NAME), (CONSUMPTION_ID, CONSUMPTION_NAME)]
|
||||
|
||||
# Trend production/consumption variants
|
||||
TREND_SENSOR_VARIANTS = SENSOR_VARIANTS + [
|
||||
TREND_SENSOR_VARIANTS = [
|
||||
*SENSOR_VARIANTS,
|
||||
(PRODUCTION_PCT_ID, PRODUCTION_PCT_NAME),
|
||||
(NET_PRODUCTION_ID, NET_PRODUCTION_NAME),
|
||||
(FROM_GRID_ID, FROM_GRID_NAME),
|
||||
|
@ -96,14 +96,14 @@ class ImageProcessingSsocr(ImageProcessingEntity):
|
||||
threshold = ["-t", str(config[CONF_THRESHOLD])]
|
||||
extra_arguments = config[CONF_EXTRA_ARGUMENTS].split(" ")
|
||||
|
||||
self._command = (
|
||||
[config[CONF_SSOCR_BIN]]
|
||||
+ crop
|
||||
+ digits
|
||||
+ threshold
|
||||
+ rotate
|
||||
+ extra_arguments
|
||||
)
|
||||
self._command = [
|
||||
config[CONF_SSOCR_BIN],
|
||||
*crop,
|
||||
*digits,
|
||||
*threshold,
|
||||
*rotate,
|
||||
*extra_arguments,
|
||||
]
|
||||
self._command.append(self.filepath)
|
||||
|
||||
@property
|
||||
|
@ -77,7 +77,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
else:
|
||||
# Template used. Break into list and use create_subprocess_exec
|
||||
# (which uses shell=False) for security
|
||||
shlexed_cmd = [prog] + shlex.split(rendered_args)
|
||||
shlexed_cmd = [prog, *shlex.split(rendered_args)]
|
||||
|
||||
create_process = asyncio.create_subprocess_exec(
|
||||
*shlexed_cmd,
|
||||
|
@ -63,7 +63,8 @@ def get_capabilities(capabilities: Sequence[str]) -> Sequence[str] | None:
|
||||
# Must have one of the min_required
|
||||
if any(capability in capabilities for capability in min_required):
|
||||
# Return all capabilities supported/consumed
|
||||
return min_required + [
|
||||
return [
|
||||
*min_required,
|
||||
Capability.battery,
|
||||
Capability.switch_level,
|
||||
Capability.window_shade_level,
|
||||
|
@ -837,7 +837,7 @@ class SonosSpeaker:
|
||||
if p.uid != coordinator_uid and p.is_visible
|
||||
]
|
||||
|
||||
return [coordinator_uid] + joined_uids
|
||||
return [coordinator_uid, *joined_uids]
|
||||
|
||||
async def _async_extract_group(event: SonosEvent | None) -> list[str]:
|
||||
"""Extract group layout from a topology event."""
|
||||
|
@ -92,7 +92,8 @@ VERSION_SOURCE_MAP: Final[dict[str, str]] = {
|
||||
VERSION_SOURCE_PYPI: "pypi",
|
||||
}
|
||||
|
||||
VALID_SOURCES: Final[list[str]] = HA_VERSION_SOURCES + [
|
||||
VALID_SOURCES: Final[list[str]] = [
|
||||
*HA_VERSION_SOURCES,
|
||||
"hassio", # Kept to not break existing configurations
|
||||
"docker", # Kept to not break existing configurations
|
||||
]
|
||||
|
@ -8,7 +8,7 @@ from homeassistant.const import WEEKDAYS, Platform
|
||||
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
|
||||
ALLOWED_DAYS = WEEKDAYS + ["holiday"]
|
||||
ALLOWED_DAYS = [*WEEKDAYS, "holiday"]
|
||||
|
||||
DOMAIN = "workday"
|
||||
PLATFORMS = [Platform.BINARY_SENSOR]
|
||||
|
@ -34,7 +34,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
|
||||
def x10_command(command):
|
||||
"""Execute X10 command and check output."""
|
||||
return check_output(["heyu"] + command.split(" "), stderr=STDOUT)
|
||||
return check_output(["heyu", *command.split(" ")], stderr=STDOUT)
|
||||
|
||||
|
||||
def get_unit_status(code):
|
||||
|
@ -106,7 +106,7 @@ class XiaomiGatewayLight(XiaomiDevice, LightEntity):
|
||||
self._brightness = int(100 * kwargs[ATTR_BRIGHTNESS] / 255)
|
||||
|
||||
rgb = color_util.color_hs_to_RGB(*self._hs)
|
||||
rgba = (self._brightness,) + rgb
|
||||
rgba = (self._brightness, *rgb)
|
||||
rgbhex = binascii.hexlify(struct.pack("BBBB", *rgba)).decode("ASCII")
|
||||
rgbhex = int(rgbhex, 16)
|
||||
|
||||
|
@ -676,7 +676,7 @@ class MusicCastMediaPlayer(MusicCastDeviceEntity, MediaPlayerEntity):
|
||||
return [self]
|
||||
entities = self.get_all_mc_entities()
|
||||
clients = [entity for entity in entities if entity.is_part_of_group(self)]
|
||||
return [self] + clients
|
||||
return [self, *clients]
|
||||
|
||||
@property
|
||||
def musiccast_zone_entity(self) -> MusicCastMediaPlayer:
|
||||
|
@ -556,7 +556,7 @@ class ClusterHandler(LogMixin):
|
||||
def log(self, level, msg, *args, **kwargs):
|
||||
"""Log a message."""
|
||||
msg = f"[%s:%s]: {msg}"
|
||||
args = (self._endpoint.device.nwk, self._id) + args
|
||||
args = (self._endpoint.device.nwk, self._id, *args)
|
||||
_LOGGER.log(level, msg, *args, **kwargs)
|
||||
|
||||
def __getattr__(self, name):
|
||||
@ -620,7 +620,7 @@ class ZDOClusterHandler(LogMixin):
|
||||
def log(self, level, msg, *args, **kwargs):
|
||||
"""Log a message."""
|
||||
msg = f"[%s:ZDO](%s): {msg}"
|
||||
args = (self._zha_device.nwk, self._zha_device.model) + args
|
||||
args = (self._zha_device.nwk, self._zha_device.model, *args)
|
||||
_LOGGER.log(level, msg, *args, **kwargs)
|
||||
|
||||
|
||||
|
@ -1005,5 +1005,5 @@ class ZHADevice(LogMixin):
|
||||
def log(self, level: int, msg: str, *args: Any, **kwargs: Any) -> None:
|
||||
"""Log a message."""
|
||||
msg = f"[%s](%s): {msg}"
|
||||
args = (self.nwk, self.model) + args
|
||||
args = (self.nwk, self.model, *args)
|
||||
_LOGGER.log(level, msg, *args, **kwargs)
|
||||
|
@ -131,7 +131,7 @@ class ZHAGroupMember(LogMixin):
|
||||
def log(self, level: int, msg: str, *args: Any, **kwargs) -> None:
|
||||
"""Log a message."""
|
||||
msg = f"[%s](%s): {msg}"
|
||||
args = (f"0x{self._zha_group.group_id:04x}", self.endpoint_id) + args
|
||||
args = (f"0x{self._zha_group.group_id:04x}", self.endpoint_id, *args)
|
||||
_LOGGER.log(level, msg, *args, **kwargs)
|
||||
|
||||
|
||||
@ -242,5 +242,5 @@ class ZHAGroup(LogMixin):
|
||||
def log(self, level: int, msg: str, *args: Any, **kwargs) -> None:
|
||||
"""Log a message."""
|
||||
msg = f"[%s](%s): {msg}"
|
||||
args = (self.name, self.group_id) + args
|
||||
args = (self.name, self.group_id, *args)
|
||||
_LOGGER.log(level, msg, *args, **kwargs)
|
||||
|
@ -140,7 +140,7 @@ class BaseZhaEntity(LogMixin, entity.Entity):
|
||||
def log(self, level: int, msg: str, *args, **kwargs):
|
||||
"""Log a message."""
|
||||
msg = f"%s: {msg}"
|
||||
args = (self.entity_id,) + args
|
||||
args = (self.entity_id, *args)
|
||||
_LOGGER.log(level, msg, *args, **kwargs)
|
||||
|
||||
|
||||
|
@ -506,7 +506,7 @@ async def async_hass_config_yaml(hass: HomeAssistant) -> dict:
|
||||
await merge_packages_config(hass, config, core_config.get(CONF_PACKAGES, {}))
|
||||
except vol.Invalid as exc:
|
||||
suffix = ""
|
||||
if annotation := find_annotation(config, [CONF_CORE, CONF_PACKAGES] + exc.path):
|
||||
if annotation := find_annotation(config, [CONF_CORE, CONF_PACKAGES, *exc.path]):
|
||||
suffix = f" at {_relpath(hass, annotation[0])}, line {annotation[1]}"
|
||||
_LOGGER.error(
|
||||
"Invalid package configuration '%s'%s: %s", CONF_PACKAGES, suffix, exc
|
||||
|
@ -166,7 +166,7 @@ class HomeAssistantView:
|
||||
) -> None:
|
||||
"""Register the view with a router."""
|
||||
assert self.url is not None, "No url set for view"
|
||||
urls = [self.url] + self.extra_urls
|
||||
urls = [self.url, *self.extra_urls]
|
||||
routes: list[AbstractRoute] = []
|
||||
|
||||
for method in ("get", "post", "delete", "put", "patch", "head", "options"):
|
||||
|
@ -130,7 +130,7 @@ CONF_MAX = "max"
|
||||
DEFAULT_MAX = 10
|
||||
|
||||
CONF_MAX_EXCEEDED = "max_exceeded"
|
||||
_MAX_EXCEEDED_CHOICES = list(LOGSEVERITY) + ["SILENT"]
|
||||
_MAX_EXCEEDED_CHOICES = [*LOGSEVERITY, "SILENT"]
|
||||
DEFAULT_MAX_EXCEEDED = "WARNING"
|
||||
|
||||
ATTR_CUR = "current"
|
||||
|
@ -610,6 +610,7 @@ select = [
|
||||
"PLR", # pylint
|
||||
"PLW", # pylint
|
||||
"Q000", # Double quotes found but single quotes preferred
|
||||
"RUF005", # Consider iterable unpacking instead of concatenation
|
||||
"RUF006", # Store a reference to the return value of asyncio.create_task
|
||||
"S102", # Use of exec detected
|
||||
"S103", # bad-file-permissions
|
||||
|
@ -433,15 +433,17 @@ def gather_constraints() -> str:
|
||||
return (
|
||||
GENERATED_MESSAGE
|
||||
+ "\n".join(
|
||||
sorted(
|
||||
{
|
||||
*core_requirements(),
|
||||
*gather_recursive_requirements("default_config"),
|
||||
*gather_recursive_requirements("mqtt"),
|
||||
},
|
||||
key=str.lower,
|
||||
)
|
||||
+ [""]
|
||||
[
|
||||
*sorted(
|
||||
{
|
||||
*core_requirements(),
|
||||
*gather_recursive_requirements("default_config"),
|
||||
*gather_recursive_requirements("mqtt"),
|
||||
},
|
||||
key=str.lower,
|
||||
),
|
||||
"",
|
||||
]
|
||||
)
|
||||
+ CONSTRAINT_BASE
|
||||
)
|
||||
|
@ -398,8 +398,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None:
|
||||
manifests_resorted.append(integration.manifest_path)
|
||||
if config.action == "generate" and manifests_resorted:
|
||||
subprocess.run(
|
||||
["pre-commit", "run", "--hook-stage", "manual", "prettier", "--files"]
|
||||
+ manifests_resorted,
|
||||
[
|
||||
"pre-commit",
|
||||
"run",
|
||||
"--hook-stage",
|
||||
"manual",
|
||||
"prettier",
|
||||
"--files",
|
||||
*manifests_resorted,
|
||||
],
|
||||
stdout=subprocess.DEVNULL,
|
||||
check=True,
|
||||
)
|
||||
|
@ -25,7 +25,7 @@ async def anova_api(
|
||||
async def get_devices_side_effect():
|
||||
if not api_mock.existing_devices:
|
||||
api_mock.existing_devices = []
|
||||
api_mock.existing_devices = api_mock.existing_devices + [new_device]
|
||||
api_mock.existing_devices = [*api_mock.existing_devices, new_device]
|
||||
return [new_device]
|
||||
|
||||
api_mock.authenticate.side_effect = authenticate_side_effect
|
||||
|
@ -25,13 +25,12 @@ from homeassistant.setup import async_setup_component
|
||||
|
||||
FULL_FAN_ENTITY_IDS = ["fan.living_room_fan", "fan.percentage_full_fan"]
|
||||
FANS_WITH_PRESET_MODE_ONLY = ["fan.preset_only_limited_fan"]
|
||||
LIMITED_AND_FULL_FAN_ENTITY_IDS = FULL_FAN_ENTITY_IDS + [
|
||||
LIMITED_AND_FULL_FAN_ENTITY_IDS = [
|
||||
*FULL_FAN_ENTITY_IDS,
|
||||
"fan.ceiling_fan",
|
||||
"fan.percentage_limited_fan",
|
||||
]
|
||||
FANS_WITH_PRESET_MODES = FULL_FAN_ENTITY_IDS + [
|
||||
"fan.percentage_limited_fan",
|
||||
]
|
||||
FANS_WITH_PRESET_MODES = [*FULL_FAN_ENTITY_IDS, "fan.percentage_limited_fan"]
|
||||
PERCENTAGE_MODEL_FANS = ["fan.percentage_full_fan", "fan.percentage_limited_fan"]
|
||||
|
||||
|
||||
|
@ -238,11 +238,7 @@ async def test_double_signal_after_delay(hass: HomeAssistant) -> None:
|
||||
# The old algorithm would produce extreme values if, after a delay longer than the time window
|
||||
# there would be two signals, a large spike would be produced. Check explicitly for this situation
|
||||
time_window = 60
|
||||
times = [*range(time_window * 10)]
|
||||
times = times + [
|
||||
time_window * 20,
|
||||
time_window * 20 + 0.01,
|
||||
]
|
||||
times = [*range(time_window * 10), time_window * 20, time_window * 20 + 0.01]
|
||||
|
||||
# just apply sine as some sort of temperature change and make sure the change after the delay is very small
|
||||
temperature_values = [sin(x) for x in times]
|
||||
|
@ -1276,11 +1276,9 @@ BAD_AUTOMATIONS = [
|
||||
),
|
||||
]
|
||||
|
||||
BAD_TRIGGERS = BAD_CONDITIONS = BAD_AUTOMATIONS + [
|
||||
(
|
||||
{"domain": "light"},
|
||||
"required key not provided @ data{path}['device_id']",
|
||||
)
|
||||
BAD_TRIGGERS = BAD_CONDITIONS = [
|
||||
*BAD_AUTOMATIONS,
|
||||
({"domain": "light"}, "required key not provided @ data{path}['device_id']"),
|
||||
]
|
||||
|
||||
|
||||
|
@ -384,7 +384,7 @@ def test_initial_outlier(values: list[State]) -> None:
|
||||
"""Test issue #13363."""
|
||||
filt = OutlierFilter(window_size=3, precision=2, entity=None, radius=4.0)
|
||||
out = State("sensor.test_monitored", "4000")
|
||||
for state in [out] + values:
|
||||
for state in [out, *values]:
|
||||
filtered = filt.filter_state(state)
|
||||
assert filtered.state == 21
|
||||
|
||||
@ -393,7 +393,7 @@ def test_unknown_state_outlier(values: list[State]) -> None:
|
||||
"""Test issue #32395."""
|
||||
filt = OutlierFilter(window_size=3, precision=2, entity=None, radius=4.0)
|
||||
out = State("sensor.test_monitored", "unknown")
|
||||
for state in [out] + values + [out]:
|
||||
for state in [out, *values, out]:
|
||||
try:
|
||||
filtered = filt.filter_state(state)
|
||||
except ValueError:
|
||||
@ -413,7 +413,7 @@ def test_lowpass(values: list[State]) -> None:
|
||||
"""Test if lowpass filter works."""
|
||||
filt = LowPassFilter(window_size=10, precision=2, entity=None, time_constant=10)
|
||||
out = State("sensor.test_monitored", "unknown")
|
||||
for state in [out] + values + [out]:
|
||||
for state in [out, *values, out]:
|
||||
try:
|
||||
filtered = filt.filter_state(state)
|
||||
except ValueError:
|
||||
|
@ -30,7 +30,7 @@ from tests.typing import ClientSessionGenerator
|
||||
|
||||
SUPERVISOR_IP = "1.2.3.4"
|
||||
BANNED_IPS = ["200.201.202.203", "100.64.0.2"]
|
||||
BANNED_IPS_WITH_SUPERVISOR = BANNED_IPS + [SUPERVISOR_IP]
|
||||
BANNED_IPS_WITH_SUPERVISOR = [*BANNED_IPS, SUPERVISOR_IP]
|
||||
|
||||
|
||||
@pytest.fixture(name="hassio_env")
|
||||
|
@ -32,7 +32,7 @@ def setup_owproxy_mock_devices(
|
||||
)
|
||||
|
||||
# Ensure enough read side effect
|
||||
dir_side_effect = [main_dir_return_value] + sub_dir_side_effect
|
||||
dir_side_effect = [main_dir_return_value, *sub_dir_side_effect]
|
||||
read_side_effect = (
|
||||
main_read_side_effect
|
||||
+ sub_read_side_effect
|
||||
|
@ -109,7 +109,7 @@ async def test_record_path_not_allowed(hass: HomeAssistant, h264_video) -> None:
|
||||
|
||||
def add_parts_to_segment(segment, source):
|
||||
"""Add relevant part data to segment for testing recorder."""
|
||||
moof_locs = list(find_box(source.getbuffer(), b"moof")) + [len(source.getbuffer())]
|
||||
moof_locs = [*find_box(source.getbuffer(), b"moof"), len(source.getbuffer())]
|
||||
segment.init = source.getbuffer()[: moof_locs[0]].tobytes()
|
||||
segment.parts = [
|
||||
Part(
|
||||
|
@ -84,7 +84,7 @@ APP_LIST = [
|
||||
},
|
||||
]
|
||||
APP_NAME_LIST = [app["name"] for app in APP_LIST]
|
||||
INPUT_LIST_WITH_APPS = INPUT_LIST + ["CAST"]
|
||||
INPUT_LIST_WITH_APPS = [*INPUT_LIST, "CAST"]
|
||||
CUSTOM_CONFIG = {CONF_APP_ID: "test", CONF_MESSAGE: None, CONF_NAME_SPACE: 10}
|
||||
ADDITIONAL_APP_CONFIG = {
|
||||
"name": CURRENT_APP,
|
||||
|
@ -564,7 +564,7 @@ async def test_setup_with_apps_include(
|
||||
hass, MOCK_TV_WITH_INCLUDE_CONFIG, CURRENT_APP_CONFIG
|
||||
):
|
||||
attr = hass.states.get(ENTITY_ID).attributes
|
||||
_assert_source_list_with_apps(list(INPUT_LIST_WITH_APPS + [CURRENT_APP]), attr)
|
||||
_assert_source_list_with_apps([*INPUT_LIST_WITH_APPS, CURRENT_APP], attr)
|
||||
assert CURRENT_APP in attr[ATTR_INPUT_SOURCE_LIST]
|
||||
assert attr[ATTR_INPUT_SOURCE] == CURRENT_APP
|
||||
assert attr["app_name"] == CURRENT_APP
|
||||
@ -582,7 +582,7 @@ async def test_setup_with_apps_exclude(
|
||||
hass, MOCK_TV_WITH_EXCLUDE_CONFIG, CURRENT_APP_CONFIG
|
||||
):
|
||||
attr = hass.states.get(ENTITY_ID).attributes
|
||||
_assert_source_list_with_apps(list(INPUT_LIST_WITH_APPS + [CURRENT_APP]), attr)
|
||||
_assert_source_list_with_apps([*INPUT_LIST_WITH_APPS, CURRENT_APP], attr)
|
||||
assert CURRENT_APP in attr[ATTR_INPUT_SOURCE_LIST]
|
||||
assert attr[ATTR_INPUT_SOURCE] == CURRENT_APP
|
||||
assert attr["app_name"] == CURRENT_APP
|
||||
|
@ -88,8 +88,10 @@ async def test_config_flow_auth_success_with_multiple_students(
|
||||
mock_account.return_value = fake_account
|
||||
mock_student.return_value = [
|
||||
Student.load(student)
|
||||
for student in [load_fixture("fake_student_1.json", "vulcan")]
|
||||
+ [load_fixture("fake_student_2.json", "vulcan")]
|
||||
for student in [
|
||||
load_fixture("fake_student_1.json", "vulcan"),
|
||||
load_fixture("fake_student_2.json", "vulcan"),
|
||||
]
|
||||
]
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
const.DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
@ -379,8 +381,9 @@ async def test_multiple_config_entries_using_saved_credentials_2(
|
||||
) -> None:
|
||||
"""Test a successful config flow for multiple config entries using saved credentials (different situation)."""
|
||||
mock_student.return_value = [
|
||||
Student.load(load_fixture("fake_student_1.json", "vulcan"))
|
||||
] + [Student.load(load_fixture("fake_student_2.json", "vulcan"))]
|
||||
Student.load(load_fixture("fake_student_1.json", "vulcan")),
|
||||
Student.load(load_fixture("fake_student_2.json", "vulcan")),
|
||||
]
|
||||
MockConfigEntry(
|
||||
domain=const.DOMAIN,
|
||||
unique_id="123456",
|
||||
@ -477,8 +480,9 @@ async def test_multiple_config_entries_using_saved_credentials_4(
|
||||
) -> None:
|
||||
"""Test a successful config flow for multiple config entries using saved credentials (different situation)."""
|
||||
mock_student.return_value = [
|
||||
Student.load(load_fixture("fake_student_1.json", "vulcan"))
|
||||
] + [Student.load(load_fixture("fake_student_2.json", "vulcan"))]
|
||||
Student.load(load_fixture("fake_student_1.json", "vulcan")),
|
||||
Student.load(load_fixture("fake_student_2.json", "vulcan")),
|
||||
]
|
||||
MockConfigEntry(
|
||||
entry_id="456",
|
||||
domain=const.DOMAIN,
|
||||
|
@ -169,7 +169,7 @@ class SatelliteAsyncTcpClient(MockAsyncTcpClient):
|
||||
|
||||
def inject_event(self, event: Event) -> None:
|
||||
"""Put an event in as the next response."""
|
||||
self.responses = [event] + self.responses
|
||||
self.responses = [event, *self.responses]
|
||||
|
||||
|
||||
async def test_satellite_pipeline(hass: HomeAssistant) -> None:
|
||||
|
@ -1418,9 +1418,10 @@ async def test_effects(hass: HomeAssistant) -> None:
|
||||
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get(ENTITY_LIGHT).attributes.get(
|
||||
"effect_list"
|
||||
) == YEELIGHT_COLOR_EFFECT_LIST + ["mock_effect"]
|
||||
assert hass.states.get(ENTITY_LIGHT).attributes.get("effect_list") == [
|
||||
*YEELIGHT_COLOR_EFFECT_LIST,
|
||||
"mock_effect",
|
||||
]
|
||||
|
||||
async def _async_test_effect(name, target=None, called=True):
|
||||
async_mocked_start_flow = AsyncMock()
|
||||
|
@ -993,7 +993,7 @@ async def _mqtt_mock_entry(
|
||||
nonlocal mock_mqtt_instance
|
||||
nonlocal real_mqtt_instance
|
||||
real_mqtt_instance = real_mqtt(*args, **kwargs)
|
||||
spec = dir(real_mqtt_instance) + ["_mqttc"]
|
||||
spec = [*dir(real_mqtt_instance), "_mqttc"]
|
||||
mock_mqtt_instance = MqttMockHAClient(
|
||||
return_value=real_mqtt_instance,
|
||||
spec_set=spec,
|
||||
|
@ -69,7 +69,7 @@ def test_skip_pip_mutually_exclusive(mock_exit) -> None:
|
||||
"""Test --skip-pip and --skip-pip-package are mutually exclusive."""
|
||||
|
||||
def parse_args(*args):
|
||||
with patch("sys.argv", ["python"] + list(args)):
|
||||
with patch("sys.argv", ["python", *args]):
|
||||
return main.get_arguments()
|
||||
|
||||
args = parse_args("--skip-pip")
|
||||
|
Loading…
x
Reference in New Issue
Block a user