[ruff] Enable PERF rules and fix all violations (#9874)

This commit is contained in:
J. Nick Koston 2025-07-25 08:15:54 -10:00 committed by GitHub
parent 88ccde4ba1
commit f808c38f10
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
18 changed files with 90 additions and 96 deletions

View File

@ -89,9 +89,9 @@ def choose_prompt(options, purpose: str = None):
def choose_upload_log_host( def choose_upload_log_host(
default, check_default, show_ota, show_mqtt, show_api, purpose: str = None default, check_default, show_ota, show_mqtt, show_api, purpose: str = None
): ):
options = [] options = [
for port in get_serial_ports(): (f"{port.path} ({port.description})", port.path) for port in get_serial_ports()
options.append((f"{port.path} ({port.description})", port.path)) ]
if default == "SERIAL": if default == "SERIAL":
return choose_prompt(options, purpose=purpose) return choose_prompt(options, purpose=purpose)
if (show_ota and "ota" in CORE.config) or (show_api and "api" in CORE.config): if (show_ota and "ota" in CORE.config) or (show_api and "api" in CORE.config):

View File

@ -266,8 +266,10 @@ async def delayed_off_filter_to_code(config, filter_id):
async def autorepeat_filter_to_code(config, filter_id): async def autorepeat_filter_to_code(config, filter_id):
timings = [] timings = []
if len(config) > 0: if len(config) > 0:
for conf in config: timings.extend(
timings.append((conf[CONF_DELAY], conf[CONF_TIME_OFF], conf[CONF_TIME_ON])) (conf[CONF_DELAY], conf[CONF_TIME_OFF], conf[CONF_TIME_ON])
for conf in config
)
else: else:
timings.append( timings.append(
( (
@ -573,16 +575,15 @@ async def setup_binary_sensor_core_(var, config):
await automation.build_automation(trigger, [], conf) await automation.build_automation(trigger, [], conf)
for conf in config.get(CONF_ON_MULTI_CLICK, []): for conf in config.get(CONF_ON_MULTI_CLICK, []):
timings = [] timings = [
for tim in conf[CONF_TIMING]: cg.StructInitializer(
timings.append( MultiClickTriggerEvent,
cg.StructInitializer( ("state", tim[CONF_STATE]),
MultiClickTriggerEvent, ("min_length", tim[CONF_MIN_LENGTH]),
("state", tim[CONF_STATE]), ("max_length", tim.get(CONF_MAX_LENGTH, 4294967294)),
("min_length", tim[CONF_MIN_LENGTH]),
("max_length", tim.get(CONF_MAX_LENGTH, 4294967294)),
)
) )
for tim in conf[CONF_TIMING]
]
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var, timings) trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var, timings)
if CONF_INVALID_COOLDOWN in conf: if CONF_INVALID_COOLDOWN in conf:
cg.add(trigger.set_invalid_cooldown(conf[CONF_INVALID_COOLDOWN])) cg.add(trigger.set_invalid_cooldown(conf[CONF_INVALID_COOLDOWN]))

View File

@ -74,8 +74,7 @@ def range_segment_list(input):
if isinstance(input, list): if isinstance(input, list):
for list_item in input: for list_item in input:
if isinstance(list_item, list): if isinstance(list_item, list):
for item in list_item: flat_list.extend(list_item)
flat_list.append(item)
else: else:
flat_list.append(list_item) flat_list.append(list_item)
else: else:

View File

@ -982,7 +982,7 @@ def copy_files():
__version__, __version__,
) )
for _, file in CORE.data[KEY_ESP32][KEY_EXTRA_BUILD_FILES].items(): for file in CORE.data[KEY_ESP32][KEY_EXTRA_BUILD_FILES].values():
if file[KEY_PATH].startswith("http"): if file[KEY_PATH].startswith("http"):
import requests import requests

View File

@ -310,9 +310,7 @@ async def to_code(config):
for conf in config.get(CONF_ON_BLE_ADVERTISE, []): for conf in config.get(CONF_ON_BLE_ADVERTISE, []):
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var) trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
if CONF_MAC_ADDRESS in conf: if CONF_MAC_ADDRESS in conf:
addr_list = [] addr_list = [it.as_hex for it in conf[CONF_MAC_ADDRESS]]
for it in conf[CONF_MAC_ADDRESS]:
addr_list.append(it.as_hex)
cg.add(trigger.set_addresses(addr_list)) cg.add(trigger.set_addresses(addr_list))
await automation.build_automation(trigger, [(ESPBTDeviceConstRef, "x")], conf) await automation.build_automation(trigger, [(ESPBTDeviceConstRef, "x")], conf)
for conf in config.get(CONF_ON_BLE_SERVICE_DATA_ADVERTISE, []): for conf in config.get(CONF_ON_BLE_SERVICE_DATA_ADVERTISE, []):

View File

@ -73,8 +73,7 @@ def ota_esphome_final_validate(config):
else: else:
new_ota_conf.append(ota_conf) new_ota_conf.append(ota_conf)
for port_conf in merged_ota_esphome_configs_by_port.values(): new_ota_conf.extend(merged_ota_esphome_configs_by_port.values())
new_ota_conf.append(port_conf)
full_conf[CONF_OTA] = new_ota_conf full_conf[CONF_OTA] = new_ota_conf
fv.full_config.set(full_conf) fv.full_config.set(full_conf)

View File

@ -41,9 +41,7 @@ CONFIG_SCHEMA = lcd_base.LCD_SCHEMA.extend(
async def to_code(config): async def to_code(config):
var = cg.new_Pvariable(config[CONF_ID]) var = cg.new_Pvariable(config[CONF_ID])
await lcd_base.setup_lcd_display(var, config) await lcd_base.setup_lcd_display(var, config)
pins_ = [] pins_ = [await cg.gpio_pin_expression(conf) for conf in config[CONF_DATA_PINS]]
for conf in config[CONF_DATA_PINS]:
pins_.append(await cg.gpio_pin_expression(conf))
cg.add(var.set_data_pins(*pins_)) cg.add(var.set_data_pins(*pins_))
enable = await cg.gpio_pin_expression(config[CONF_ENABLE_PIN]) enable = await cg.gpio_pin_expression(config[CONF_ENABLE_PIN])
cg.add(var.set_enable_pin(enable)) cg.add(var.set_enable_pin(enable))

View File

@ -291,31 +291,30 @@ async def random_effect_to_code(config, effect_id):
) )
async def strobe_effect_to_code(config, effect_id): async def strobe_effect_to_code(config, effect_id):
var = cg.new_Pvariable(effect_id, config[CONF_NAME]) var = cg.new_Pvariable(effect_id, config[CONF_NAME])
colors = [] colors = [
for color in config.get(CONF_COLORS, []): cg.StructInitializer(
colors.append( StrobeLightEffectColor,
cg.StructInitializer( (
StrobeLightEffectColor, "color",
( LightColorValues(
"color", color.get(CONF_COLOR_MODE, ColorMode.UNKNOWN),
LightColorValues( color[CONF_STATE],
color.get(CONF_COLOR_MODE, ColorMode.UNKNOWN), color[CONF_BRIGHTNESS],
color[CONF_STATE], color[CONF_COLOR_BRIGHTNESS],
color[CONF_BRIGHTNESS], color[CONF_RED],
color[CONF_COLOR_BRIGHTNESS], color[CONF_GREEN],
color[CONF_RED], color[CONF_BLUE],
color[CONF_GREEN], color[CONF_WHITE],
color[CONF_BLUE], color.get(CONF_COLOR_TEMPERATURE, 0.0),
color[CONF_WHITE], color[CONF_COLD_WHITE],
color.get(CONF_COLOR_TEMPERATURE, 0.0), color[CONF_WARM_WHITE],
color[CONF_COLD_WHITE],
color[CONF_WARM_WHITE],
),
), ),
("duration", color[CONF_DURATION]), ),
("transition_length", color[CONF_TRANSITION_LENGTH]), ("duration", color[CONF_DURATION]),
) ("transition_length", color[CONF_TRANSITION_LENGTH]),
) )
for color in config.get(CONF_COLORS, [])
]
cg.add(var.set_colors(colors)) cg.add(var.set_colors(colors))
return var return var
@ -404,20 +403,19 @@ async def addressable_color_wipe_effect_to_code(config, effect_id):
var = cg.new_Pvariable(effect_id, config[CONF_NAME]) var = cg.new_Pvariable(effect_id, config[CONF_NAME])
cg.add(var.set_add_led_interval(config[CONF_ADD_LED_INTERVAL])) cg.add(var.set_add_led_interval(config[CONF_ADD_LED_INTERVAL]))
cg.add(var.set_reverse(config[CONF_REVERSE])) cg.add(var.set_reverse(config[CONF_REVERSE]))
colors = [] colors = [
for color in config.get(CONF_COLORS, []): cg.StructInitializer(
colors.append( AddressableColorWipeEffectColor,
cg.StructInitializer( ("r", int(round(color[CONF_RED] * 255))),
AddressableColorWipeEffectColor, ("g", int(round(color[CONF_GREEN] * 255))),
("r", int(round(color[CONF_RED] * 255))), ("b", int(round(color[CONF_BLUE] * 255))),
("g", int(round(color[CONF_GREEN] * 255))), ("w", int(round(color[CONF_WHITE] * 255))),
("b", int(round(color[CONF_BLUE] * 255))), ("random", color[CONF_RANDOM]),
("w", int(round(color[CONF_WHITE] * 255))), ("num_leds", color[CONF_NUM_LEDS]),
("random", color[CONF_RANDOM]), ("gradient", color[CONF_GRADIENT]),
("num_leds", color[CONF_NUM_LEDS]),
("gradient", color[CONF_GRADIENT]),
)
) )
for color in config.get(CONF_COLORS, [])
]
cg.add(var.set_colors(colors)) cg.add(var.set_colors(colors))
return var return var

View File

@ -273,7 +273,7 @@ CONFIG_SCHEMA = PIPSOLAR_COMPONENT_SCHEMA.extend(
async def to_code(config): async def to_code(config):
paren = await cg.get_variable(config[CONF_PIPSOLAR_ID]) paren = await cg.get_variable(config[CONF_PIPSOLAR_ID])
for type, _ in TYPES.items(): for type in TYPES:
if type in config: if type in config:
conf = config[type] conf = config[type]
sens = await sensor.new_sensor(conf) sens = await sensor.new_sensor(conf)

View File

@ -317,7 +317,7 @@ def preload_core_config(config, result) -> str:
target_platforms = [] target_platforms = []
for domain, _ in config.items(): for domain in config:
if domain.startswith("."): if domain.startswith("."):
continue continue
if _is_target_platform(domain): if _is_target_platform(domain):

View File

@ -144,7 +144,7 @@ def websocket_class(cls):
if not hasattr(cls, "_message_handlers"): if not hasattr(cls, "_message_handlers"):
cls._message_handlers = {} cls._message_handlers = {}
for _, method in cls.__dict__.items(): for method in cls.__dict__.values():
if hasattr(method, "_message_handler"): if hasattr(method, "_message_handler"):
cls._message_handlers[method._message_handler] = method cls._message_handlers[method._message_handler] = method

View File

@ -111,12 +111,13 @@ exclude = ['generated']
[tool.ruff.lint] [tool.ruff.lint]
select = [ select = [
"E", # pycodestyle "E", # pycodestyle
"F", # pyflakes/autoflake "F", # pyflakes/autoflake
"I", # isort "I", # isort
"PL", # pylint "PERF", # performance
"SIM", # flake8-simplify "PL", # pylint
"UP", # pyupgrade "SIM", # flake8-simplify
"UP", # pyupgrade
] ]
ignore = [ ignore = [

View File

@ -66,9 +66,10 @@ def main():
) )
args = parser.parse_args() args = parser.parse_args()
files = [] cwd = os.getcwd()
for path in git_ls_files(["*.cpp", "*.h", "*.tcc"]): files = [
files.append(os.path.relpath(path, os.getcwd())) os.path.relpath(path, cwd) for path in git_ls_files(["*.cpp", "*.h", "*.tcc"])
]
if args.files: if args.files:
# Match against files specified on command-line # Match against files specified on command-line

View File

@ -219,9 +219,8 @@ def main():
) )
args = parser.parse_args() args = parser.parse_args()
files = [] cwd = os.getcwd()
for path in git_ls_files(["*.cpp"]): files = [os.path.relpath(path, cwd) for path in git_ls_files(["*.cpp"])]
files.append(os.path.relpath(path, os.getcwd()))
# Print initial file count if it's large # Print initial file count if it's large
if len(files) > 50: if len(files) > 50:

View File

@ -41,11 +41,12 @@ CONFIG_NEWLIB_LIBC=y
return include_paths return include_paths
def extract_defines(command): def extract_defines(command):
defines = []
define_pattern = re.compile(r"-D\s*([^\s]+)") define_pattern = re.compile(r"-D\s*([^\s]+)")
for match in define_pattern.findall(command): defines = [
if match not in ("_ASMLANGUAGE"): match
defines.append(match) for match in define_pattern.findall(command)
if match not in ("_ASMLANGUAGE")
]
return defines return defines
def find_cxx_path(commands): def find_cxx_path(commands):
@ -78,13 +79,14 @@ CONFIG_NEWLIB_LIBC=y
return include_paths return include_paths
def extract_cxx_flags(command): def extract_cxx_flags(command):
flags = []
# Extracts CXXFLAGS from the command string, excluding includes and defines. # Extracts CXXFLAGS from the command string, excluding includes and defines.
flag_pattern = re.compile( flag_pattern = re.compile(
r"(-O[0-3s]|-g|-std=[^\s]+|-Wall|-Wextra|-Werror|--[^\s]+|-f[^\s]+|-m[^\s]+|-imacros\s*[^\s]+)" r"(-O[0-3s]|-g|-std=[^\s]+|-Wall|-Wextra|-Werror|--[^\s]+|-f[^\s]+|-m[^\s]+|-imacros\s*[^\s]+)"
) )
for match in flag_pattern.findall(command): flags = [
flags.append(match.replace("-imacros ", "-imacros")) match.replace("-imacros ", "-imacros")
for match in flag_pattern.findall(command)
]
return flags return flags
def transform_to_idedata_format(compile_commands): def transform_to_idedata_format(compile_commands):

View File

@ -38,8 +38,7 @@ async def test_duplicate_entities_on_different_devices(
# Get entity list # Get entity list
entities = await client.list_entities_services() entities = await client.list_entities_services()
all_entities: list[EntityInfo] = [] all_entities: list[EntityInfo] = []
for entity_list in entities[0]: all_entities.extend(entities[0])
all_entities.append(entity_list)
# Group entities by type for easier testing # Group entities by type for easier testing
sensors = [e for e in all_entities if e.__class__.__name__ == "SensorInfo"] sensors = [e for e in all_entities if e.__class__.__name__ == "SensorInfo"]

View File

@ -23,9 +23,7 @@ async def test_host_mode_fan_preset(
entities = await client.list_entities_services() entities = await client.list_entities_services()
fans: list[FanInfo] = [] fans: list[FanInfo] = []
for entity_list in entities: for entity_list in entities:
for entity in entity_list: fans.extend(entity for entity in entity_list if isinstance(entity, FanInfo))
if isinstance(entity, FanInfo):
fans.append(entity)
# Create a map of fan names to entity info # Create a map of fan names to entity info
fan_map = {fan.name: fan for fan in fans} fan_map = {fan.name: fan for fan in fans}

View File

@ -31,10 +31,8 @@ def dict_diff(a, b, path=""):
if isinstance(a, dict) and isinstance(b, dict): if isinstance(a, dict) and isinstance(b, dict):
a_keys = set(a) a_keys = set(a)
b_keys = set(b) b_keys = set(b)
for key in a_keys - b_keys: diffs.extend(f"{path}/{key} only in actual" for key in a_keys - b_keys)
diffs.append(f"{path}/{key} only in actual") diffs.extend(f"{path}/{key} only in expected" for key in b_keys - a_keys)
for key in b_keys - a_keys:
diffs.append(f"{path}/{key} only in expected")
for key in a_keys & b_keys: for key in a_keys & b_keys:
diffs.extend(dict_diff(a[key], b[key], f"{path}/{key}")) diffs.extend(dict_diff(a[key], b[key], f"{path}/{key}"))
elif isinstance(a, list) and isinstance(b, list): elif isinstance(a, list) and isinstance(b, list):
@ -42,11 +40,14 @@ def dict_diff(a, b, path=""):
for i in range(min_len): for i in range(min_len):
diffs.extend(dict_diff(a[i], b[i], f"{path}[{i}]")) diffs.extend(dict_diff(a[i], b[i], f"{path}[{i}]"))
if len(a) > len(b): if len(a) > len(b):
for i in range(min_len, len(a)): diffs.extend(
diffs.append(f"{path}[{i}] only in actual: {a[i]!r}") f"{path}[{i}] only in actual: {a[i]!r}" for i in range(min_len, len(a))
)
elif len(b) > len(a): elif len(b) > len(a):
for i in range(min_len, len(b)): diffs.extend(
diffs.append(f"{path}[{i}] only in expected: {b[i]!r}") f"{path}[{i}] only in expected: {b[i]!r}"
for i in range(min_len, len(b))
)
elif a != b: elif a != b:
diffs.append(f"\t{path}: actual={a!r} expected={b!r}") diffs.append(f"\t{path}: actual={a!r} expected={b!r}")
return diffs return diffs