[ruff] Enable PERF rules and fix all violations (#9874)

This commit is contained in:
J. Nick Koston 2025-07-25 08:15:54 -10:00 committed by GitHub
parent 88ccde4ba1
commit f808c38f10
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
18 changed files with 90 additions and 96 deletions

View File

@ -89,9 +89,9 @@ def choose_prompt(options, purpose: str = None):
def choose_upload_log_host(
default, check_default, show_ota, show_mqtt, show_api, purpose: str = None
):
options = []
for port in get_serial_ports():
options.append((f"{port.path} ({port.description})", port.path))
options = [
(f"{port.path} ({port.description})", port.path) for port in get_serial_ports()
]
if default == "SERIAL":
return choose_prompt(options, purpose=purpose)
if (show_ota and "ota" in CORE.config) or (show_api and "api" in CORE.config):

View File

@ -266,8 +266,10 @@ async def delayed_off_filter_to_code(config, filter_id):
async def autorepeat_filter_to_code(config, filter_id):
timings = []
if len(config) > 0:
for conf in config:
timings.append((conf[CONF_DELAY], conf[CONF_TIME_OFF], conf[CONF_TIME_ON]))
timings.extend(
(conf[CONF_DELAY], conf[CONF_TIME_OFF], conf[CONF_TIME_ON])
for conf in config
)
else:
timings.append(
(
@ -573,16 +575,15 @@ async def setup_binary_sensor_core_(var, config):
await automation.build_automation(trigger, [], conf)
for conf in config.get(CONF_ON_MULTI_CLICK, []):
timings = []
for tim in conf[CONF_TIMING]:
timings.append(
cg.StructInitializer(
MultiClickTriggerEvent,
("state", tim[CONF_STATE]),
("min_length", tim[CONF_MIN_LENGTH]),
("max_length", tim.get(CONF_MAX_LENGTH, 4294967294)),
)
timings = [
cg.StructInitializer(
MultiClickTriggerEvent,
("state", tim[CONF_STATE]),
("min_length", tim[CONF_MIN_LENGTH]),
("max_length", tim.get(CONF_MAX_LENGTH, 4294967294)),
)
for tim in conf[CONF_TIMING]
]
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var, timings)
if CONF_INVALID_COOLDOWN in conf:
cg.add(trigger.set_invalid_cooldown(conf[CONF_INVALID_COOLDOWN]))

View File

@ -74,8 +74,7 @@ def range_segment_list(input):
if isinstance(input, list):
for list_item in input:
if isinstance(list_item, list):
for item in list_item:
flat_list.append(item)
flat_list.extend(list_item)
else:
flat_list.append(list_item)
else:

View File

@ -982,7 +982,7 @@ def copy_files():
__version__,
)
for _, file in CORE.data[KEY_ESP32][KEY_EXTRA_BUILD_FILES].items():
for file in CORE.data[KEY_ESP32][KEY_EXTRA_BUILD_FILES].values():
if file[KEY_PATH].startswith("http"):
import requests

View File

@ -310,9 +310,7 @@ async def to_code(config):
for conf in config.get(CONF_ON_BLE_ADVERTISE, []):
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
if CONF_MAC_ADDRESS in conf:
addr_list = []
for it in conf[CONF_MAC_ADDRESS]:
addr_list.append(it.as_hex)
addr_list = [it.as_hex for it in conf[CONF_MAC_ADDRESS]]
cg.add(trigger.set_addresses(addr_list))
await automation.build_automation(trigger, [(ESPBTDeviceConstRef, "x")], conf)
for conf in config.get(CONF_ON_BLE_SERVICE_DATA_ADVERTISE, []):

View File

@ -73,8 +73,7 @@ def ota_esphome_final_validate(config):
else:
new_ota_conf.append(ota_conf)
for port_conf in merged_ota_esphome_configs_by_port.values():
new_ota_conf.append(port_conf)
new_ota_conf.extend(merged_ota_esphome_configs_by_port.values())
full_conf[CONF_OTA] = new_ota_conf
fv.full_config.set(full_conf)

View File

@ -41,9 +41,7 @@ CONFIG_SCHEMA = lcd_base.LCD_SCHEMA.extend(
async def to_code(config):
var = cg.new_Pvariable(config[CONF_ID])
await lcd_base.setup_lcd_display(var, config)
pins_ = []
for conf in config[CONF_DATA_PINS]:
pins_.append(await cg.gpio_pin_expression(conf))
pins_ = [await cg.gpio_pin_expression(conf) for conf in config[CONF_DATA_PINS]]
cg.add(var.set_data_pins(*pins_))
enable = await cg.gpio_pin_expression(config[CONF_ENABLE_PIN])
cg.add(var.set_enable_pin(enable))

View File

@ -291,31 +291,30 @@ async def random_effect_to_code(config, effect_id):
)
async def strobe_effect_to_code(config, effect_id):
var = cg.new_Pvariable(effect_id, config[CONF_NAME])
colors = []
for color in config.get(CONF_COLORS, []):
colors.append(
cg.StructInitializer(
StrobeLightEffectColor,
(
"color",
LightColorValues(
color.get(CONF_COLOR_MODE, ColorMode.UNKNOWN),
color[CONF_STATE],
color[CONF_BRIGHTNESS],
color[CONF_COLOR_BRIGHTNESS],
color[CONF_RED],
color[CONF_GREEN],
color[CONF_BLUE],
color[CONF_WHITE],
color.get(CONF_COLOR_TEMPERATURE, 0.0),
color[CONF_COLD_WHITE],
color[CONF_WARM_WHITE],
),
colors = [
cg.StructInitializer(
StrobeLightEffectColor,
(
"color",
LightColorValues(
color.get(CONF_COLOR_MODE, ColorMode.UNKNOWN),
color[CONF_STATE],
color[CONF_BRIGHTNESS],
color[CONF_COLOR_BRIGHTNESS],
color[CONF_RED],
color[CONF_GREEN],
color[CONF_BLUE],
color[CONF_WHITE],
color.get(CONF_COLOR_TEMPERATURE, 0.0),
color[CONF_COLD_WHITE],
color[CONF_WARM_WHITE],
),
("duration", color[CONF_DURATION]),
("transition_length", color[CONF_TRANSITION_LENGTH]),
)
),
("duration", color[CONF_DURATION]),
("transition_length", color[CONF_TRANSITION_LENGTH]),
)
for color in config.get(CONF_COLORS, [])
]
cg.add(var.set_colors(colors))
return var
@ -404,20 +403,19 @@ async def addressable_color_wipe_effect_to_code(config, effect_id):
var = cg.new_Pvariable(effect_id, config[CONF_NAME])
cg.add(var.set_add_led_interval(config[CONF_ADD_LED_INTERVAL]))
cg.add(var.set_reverse(config[CONF_REVERSE]))
colors = []
for color in config.get(CONF_COLORS, []):
colors.append(
cg.StructInitializer(
AddressableColorWipeEffectColor,
("r", int(round(color[CONF_RED] * 255))),
("g", int(round(color[CONF_GREEN] * 255))),
("b", int(round(color[CONF_BLUE] * 255))),
("w", int(round(color[CONF_WHITE] * 255))),
("random", color[CONF_RANDOM]),
("num_leds", color[CONF_NUM_LEDS]),
("gradient", color[CONF_GRADIENT]),
)
colors = [
cg.StructInitializer(
AddressableColorWipeEffectColor,
("r", int(round(color[CONF_RED] * 255))),
("g", int(round(color[CONF_GREEN] * 255))),
("b", int(round(color[CONF_BLUE] * 255))),
("w", int(round(color[CONF_WHITE] * 255))),
("random", color[CONF_RANDOM]),
("num_leds", color[CONF_NUM_LEDS]),
("gradient", color[CONF_GRADIENT]),
)
for color in config.get(CONF_COLORS, [])
]
cg.add(var.set_colors(colors))
return var

View File

@ -273,7 +273,7 @@ CONFIG_SCHEMA = PIPSOLAR_COMPONENT_SCHEMA.extend(
async def to_code(config):
paren = await cg.get_variable(config[CONF_PIPSOLAR_ID])
for type, _ in TYPES.items():
for type in TYPES:
if type in config:
conf = config[type]
sens = await sensor.new_sensor(conf)

View File

@ -317,7 +317,7 @@ def preload_core_config(config, result) -> str:
target_platforms = []
for domain, _ in config.items():
for domain in config:
if domain.startswith("."):
continue
if _is_target_platform(domain):

View File

@ -144,7 +144,7 @@ def websocket_class(cls):
if not hasattr(cls, "_message_handlers"):
cls._message_handlers = {}
for _, method in cls.__dict__.items():
for method in cls.__dict__.values():
if hasattr(method, "_message_handler"):
cls._message_handlers[method._message_handler] = method

View File

@ -111,12 +111,13 @@ exclude = ['generated']
[tool.ruff.lint]
select = [
"E", # pycodestyle
"F", # pyflakes/autoflake
"I", # isort
"PL", # pylint
"SIM", # flake8-simplify
"UP", # pyupgrade
"E", # pycodestyle
"F", # pyflakes/autoflake
"I", # isort
"PERF", # performance
"PL", # pylint
"SIM", # flake8-simplify
"UP", # pyupgrade
]
ignore = [

View File

@ -66,9 +66,10 @@ def main():
)
args = parser.parse_args()
files = []
for path in git_ls_files(["*.cpp", "*.h", "*.tcc"]):
files.append(os.path.relpath(path, os.getcwd()))
cwd = os.getcwd()
files = [
os.path.relpath(path, cwd) for path in git_ls_files(["*.cpp", "*.h", "*.tcc"])
]
if args.files:
# Match against files specified on command-line

View File

@ -219,9 +219,8 @@ def main():
)
args = parser.parse_args()
files = []
for path in git_ls_files(["*.cpp"]):
files.append(os.path.relpath(path, os.getcwd()))
cwd = os.getcwd()
files = [os.path.relpath(path, cwd) for path in git_ls_files(["*.cpp"])]
# Print initial file count if it's large
if len(files) > 50:

View File

@ -41,11 +41,12 @@ CONFIG_NEWLIB_LIBC=y
return include_paths
def extract_defines(command):
defines = []
define_pattern = re.compile(r"-D\s*([^\s]+)")
for match in define_pattern.findall(command):
if match not in ("_ASMLANGUAGE"):
defines.append(match)
defines = [
match
for match in define_pattern.findall(command)
if match not in ("_ASMLANGUAGE")
]
return defines
def find_cxx_path(commands):
@ -78,13 +79,14 @@ CONFIG_NEWLIB_LIBC=y
return include_paths
def extract_cxx_flags(command):
flags = []
# Extracts CXXFLAGS from the command string, excluding includes and defines.
flag_pattern = re.compile(
r"(-O[0-3s]|-g|-std=[^\s]+|-Wall|-Wextra|-Werror|--[^\s]+|-f[^\s]+|-m[^\s]+|-imacros\s*[^\s]+)"
)
for match in flag_pattern.findall(command):
flags.append(match.replace("-imacros ", "-imacros"))
flags = [
match.replace("-imacros ", "-imacros")
for match in flag_pattern.findall(command)
]
return flags
def transform_to_idedata_format(compile_commands):

View File

@ -38,8 +38,7 @@ async def test_duplicate_entities_on_different_devices(
# Get entity list
entities = await client.list_entities_services()
all_entities: list[EntityInfo] = []
for entity_list in entities[0]:
all_entities.append(entity_list)
all_entities.extend(entities[0])
# Group entities by type for easier testing
sensors = [e for e in all_entities if e.__class__.__name__ == "SensorInfo"]

View File

@ -23,9 +23,7 @@ async def test_host_mode_fan_preset(
entities = await client.list_entities_services()
fans: list[FanInfo] = []
for entity_list in entities:
for entity in entity_list:
if isinstance(entity, FanInfo):
fans.append(entity)
fans.extend(entity for entity in entity_list if isinstance(entity, FanInfo))
# Create a map of fan names to entity info
fan_map = {fan.name: fan for fan in fans}

View File

@ -31,10 +31,8 @@ def dict_diff(a, b, path=""):
if isinstance(a, dict) and isinstance(b, dict):
a_keys = set(a)
b_keys = set(b)
for key in a_keys - b_keys:
diffs.append(f"{path}/{key} only in actual")
for key in b_keys - a_keys:
diffs.append(f"{path}/{key} only in expected")
diffs.extend(f"{path}/{key} only in actual" for key in a_keys - b_keys)
diffs.extend(f"{path}/{key} only in expected" for key in b_keys - a_keys)
for key in a_keys & b_keys:
diffs.extend(dict_diff(a[key], b[key], f"{path}/{key}"))
elif isinstance(a, list) and isinstance(b, list):
@ -42,11 +40,14 @@ def dict_diff(a, b, path=""):
for i in range(min_len):
diffs.extend(dict_diff(a[i], b[i], f"{path}[{i}]"))
if len(a) > len(b):
for i in range(min_len, len(a)):
diffs.append(f"{path}[{i}] only in actual: {a[i]!r}")
diffs.extend(
f"{path}[{i}] only in actual: {a[i]!r}" for i in range(min_len, len(a))
)
elif len(b) > len(a):
for i in range(min_len, len(b)):
diffs.append(f"{path}[{i}] only in expected: {b[i]!r}")
diffs.extend(
f"{path}[{i}] only in expected: {b[i]!r}"
for i in range(min_len, len(b))
)
elif a != b:
diffs.append(f"\t{path}: actual={a!r} expected={b!r}")
return diffs