Fix unnecessary-dict-comprehension-for-iterable (C420) (#140555)

This commit is contained in:
Marc Mueller 2025-03-13 23:32:00 +01:00 committed by GitHub
parent b48ab77a38
commit 5cf3bea8fe
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
19 changed files with 46 additions and 54 deletions

View File

@ -97,9 +97,9 @@ ISY_CONTROL_TO_DEVICE_CLASS = {
"WEIGHT": SensorDeviceClass.WEIGHT,
"WINDCH": SensorDeviceClass.TEMPERATURE,
}
ISY_CONTROL_TO_STATE_CLASS = {
control: SensorStateClass.MEASUREMENT for control in ISY_CONTROL_TO_DEVICE_CLASS
}
ISY_CONTROL_TO_STATE_CLASS = dict.fromkeys(
ISY_CONTROL_TO_DEVICE_CLASS, SensorStateClass.MEASUREMENT
)
ISY_CONTROL_TO_ENTITY_CATEGORY = {
PROP_RAMP_RATE: EntityCategory.DIAGNOSTIC,
PROP_ON_LEVEL: EntityCategory.DIAGNOSTIC,

View File

@ -486,7 +486,7 @@ class KNXModule:
transcoder := DPTBase.parse_transcoder(dpt)
):
self._address_filter_transcoder.update(
{_filter: transcoder for _filter in _filters}
dict.fromkeys(_filters, transcoder)
)
return self.xknx.telegram_queue.register_telegram_received_cb(

View File

@ -126,7 +126,7 @@ async def service_event_register_modify(call: ServiceCall) -> None:
transcoder := DPTBase.parse_transcoder(dpt)
):
knx_module.group_address_transcoder.update(
{_address: transcoder for _address in group_addresses}
dict.fromkeys(group_addresses, transcoder)
)
for group_address in group_addresses:
if group_address in knx_module.knx_event_callback.group_addresses:

View File

@ -203,7 +203,7 @@ class LoggerSettings:
else:
loggers = {domain}
combined_logs = {logger: LOGSEVERITY[settings.level] for logger in loggers}
combined_logs = dict.fromkeys(loggers, LOGSEVERITY[settings.level])
# Don't override the log levels with the ones from YAML
# since we want whatever the user is asking for to be honored.

View File

@ -135,7 +135,7 @@ class NetatmoOptionsFlowHandler(OptionsFlow):
vol.Optional(
CONF_WEATHER_AREAS,
default=weather_areas,
): cv.multi_select({wa: None for wa in weather_areas}),
): cv.multi_select(dict.fromkeys(weather_areas)),
vol.Optional(CONF_NEW_AREA): str,
}
)

View File

@ -234,12 +234,7 @@ class OnewireOptionsFlowHandler(OptionsFlow):
INPUT_ENTRY_DEVICE_SELECTION,
default=self._get_current_configured_sensors(),
description="Multiselect with list of devices to choose from",
): cv.multi_select(
{
friendly_name: False
for friendly_name in self.configurable_devices
}
),
): cv.multi_select(dict.fromkeys(self.configurable_devices, False)),
}
),
errors=errors,

View File

@ -175,7 +175,7 @@ def _validate_db_schema_precision(
# Mark the session as read_only to ensure that the test data is not committed
# to the database and we always rollback when the scope is exited
with session_scope(session=instance.get_session(), read_only=True) as session:
db_object = table_object(**{column: PRECISE_NUMBER for column in columns})
db_object = table_object(**dict.fromkeys(columns, PRECISE_NUMBER))
table = table_object.__tablename__
try:
session.add(db_object)
@ -184,7 +184,7 @@ def _validate_db_schema_precision(
_check_columns(
schema_errors=schema_errors,
stored={column: getattr(db_object, column) for column in columns},
expected={column: PRECISE_NUMBER for column in columns},
expected=dict.fromkeys(columns, PRECISE_NUMBER),
columns=columns,
table_name=table,
supports="double precision",

View File

@ -136,31 +136,28 @@ QUERY_STATISTICS_SUMMARY_SUM = (
STATISTIC_UNIT_TO_UNIT_CONVERTER: dict[str | None, type[BaseUnitConverter]] = {
**{unit: AreaConverter for unit in AreaConverter.VALID_UNITS},
**{
unit: BloodGlucoseConcentrationConverter
for unit in BloodGlucoseConcentrationConverter.VALID_UNITS
},
**{unit: ConductivityConverter for unit in ConductivityConverter.VALID_UNITS},
**{unit: DataRateConverter for unit in DataRateConverter.VALID_UNITS},
**{unit: DistanceConverter for unit in DistanceConverter.VALID_UNITS},
**{unit: DurationConverter for unit in DurationConverter.VALID_UNITS},
**{unit: ElectricCurrentConverter for unit in ElectricCurrentConverter.VALID_UNITS},
**{
unit: ElectricPotentialConverter
for unit in ElectricPotentialConverter.VALID_UNITS
},
**{unit: EnergyConverter for unit in EnergyConverter.VALID_UNITS},
**{unit: EnergyDistanceConverter for unit in EnergyDistanceConverter.VALID_UNITS},
**{unit: InformationConverter for unit in InformationConverter.VALID_UNITS},
**{unit: MassConverter for unit in MassConverter.VALID_UNITS},
**{unit: PowerConverter for unit in PowerConverter.VALID_UNITS},
**{unit: PressureConverter for unit in PressureConverter.VALID_UNITS},
**{unit: SpeedConverter for unit in SpeedConverter.VALID_UNITS},
**{unit: TemperatureConverter for unit in TemperatureConverter.VALID_UNITS},
**{unit: UnitlessRatioConverter for unit in UnitlessRatioConverter.VALID_UNITS},
**{unit: VolumeConverter for unit in VolumeConverter.VALID_UNITS},
**{unit: VolumeFlowRateConverter for unit in VolumeFlowRateConverter.VALID_UNITS},
**dict.fromkeys(AreaConverter.VALID_UNITS, AreaConverter),
**dict.fromkeys(
BloodGlucoseConcentrationConverter.VALID_UNITS,
BloodGlucoseConcentrationConverter,
),
**dict.fromkeys(ConductivityConverter.VALID_UNITS, ConductivityConverter),
**dict.fromkeys(DataRateConverter.VALID_UNITS, DataRateConverter),
**dict.fromkeys(DistanceConverter.VALID_UNITS, DistanceConverter),
**dict.fromkeys(DurationConverter.VALID_UNITS, DurationConverter),
**dict.fromkeys(ElectricCurrentConverter.VALID_UNITS, ElectricCurrentConverter),
**dict.fromkeys(ElectricPotentialConverter.VALID_UNITS, ElectricPotentialConverter),
**dict.fromkeys(EnergyConverter.VALID_UNITS, EnergyConverter),
**dict.fromkeys(EnergyDistanceConverter.VALID_UNITS, EnergyDistanceConverter),
**dict.fromkeys(InformationConverter.VALID_UNITS, InformationConverter),
**dict.fromkeys(MassConverter.VALID_UNITS, MassConverter),
**dict.fromkeys(PowerConverter.VALID_UNITS, PowerConverter),
**dict.fromkeys(PressureConverter.VALID_UNITS, PressureConverter),
**dict.fromkeys(SpeedConverter.VALID_UNITS, SpeedConverter),
**dict.fromkeys(TemperatureConverter.VALID_UNITS, TemperatureConverter),
**dict.fromkeys(UnitlessRatioConverter.VALID_UNITS, UnitlessRatioConverter),
**dict.fromkeys(VolumeConverter.VALID_UNITS, VolumeConverter),
**dict.fromkeys(VolumeFlowRateConverter.VALID_UNITS, VolumeFlowRateConverter),
}

View File

@ -30,9 +30,9 @@ RISCO_ARM = "arm"
RISCO_PARTIAL_ARM = "partial_arm"
RISCO_STATES = [RISCO_ARM, RISCO_PARTIAL_ARM, *RISCO_GROUPS]
DEFAULT_RISCO_GROUPS_TO_HA = {
group: AlarmControlPanelState.ARMED_HOME for group in RISCO_GROUPS
}
DEFAULT_RISCO_GROUPS_TO_HA = dict.fromkeys(
RISCO_GROUPS, AlarmControlPanelState.ARMED_HOME
)
DEFAULT_RISCO_STATES_TO_HA = {
RISCO_ARM: AlarmControlPanelState.ARMED_AWAY,
RISCO_PARTIAL_ARM: AlarmControlPanelState.ARMED_HOME,

View File

@ -75,7 +75,7 @@ class SolarLogCoordinator(DataUpdateCoordinator[SolarlogData]):
await self.solarlog.test_extended_data_available()
if logged_in or await self.solarlog.test_extended_data_available():
device_list = await self.solarlog.update_device_list()
self.solarlog.set_enabled_devices({key: True for key in device_list})
self.solarlog.set_enabled_devices(dict.fromkeys(device_list, True))
async def _async_update_data(self) -> SolarlogData:
"""Update the data from the SolarLog device."""

View File

@ -548,7 +548,7 @@ class TelegramNotificationService:
"""Initialize the service."""
self.allowed_chat_ids = allowed_chat_ids
self._default_user = self.allowed_chat_ids[0]
self._last_message_id = {user: None for user in self.allowed_chat_ids}
self._last_message_id = dict.fromkeys(self.allowed_chat_ids)
self._parsers = {
PARSER_HTML: ParseMode.HTML,
PARSER_MD: ParseMode.MARKDOWN,

View File

@ -248,7 +248,7 @@ class TeslaFleetEnergySiteHistoryCoordinator(DataUpdateCoordinator[dict[str, Any
self.updated_once = True
# Add all time periods together
output = {key: 0 for key in ENERGY_HISTORY_FIELDS}
output = dict.fromkeys(ENERGY_HISTORY_FIELDS, 0)
for period in data.get("time_series", []):
for key in ENERGY_HISTORY_FIELDS:
output[key] += period.get(key, 0)

View File

@ -192,7 +192,7 @@ class TeslemetryEnergyHistoryCoordinator(DataUpdateCoordinator[dict[str, Any]]):
raise UpdateFailed(e.message) from e
# Add all time periods together
output = {key: 0 for key in ENERGY_HISTORY_FIELDS}
output = dict.fromkeys(ENERGY_HISTORY_FIELDS, 0)
for period in data.get("time_series", []):
for key in ENERGY_HISTORY_FIELDS:
output[key] += period.get(key, 0)

View File

@ -52,7 +52,7 @@ class UkraineAlarmDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
except aiohttp.ClientError as error:
raise UpdateFailed(f"Error fetching alerts from API: {error}") from error
current = {alert_type: False for alert_type in ALERT_TYPES}
current = dict.fromkeys(ALERT_TYPES, False)
for alert in res[0]["activeAlerts"]:
current[alert["type"]] = True

View File

@ -117,7 +117,7 @@ MODEL_TO_BUTTON_MAP: dict[str, tuple[str, ...]] = {
ATTR_RESET_DUST_FILTER,
ATTR_RESET_UPPER_FILTER,
),
**{model: BUTTONS_FOR_VACUUM for model in MODELS_VACUUM},
**dict.fromkeys(MODELS_VACUUM, BUTTONS_FOR_VACUUM),
}

View File

@ -929,7 +929,7 @@ async def check_translations(
ignored_domains = set(ignore_translations_for_mock_domains)
# Set all ignored translation keys to "unused"
translation_errors = {k: "unused" for k in ignore_missing_translations}
translation_errors = dict.fromkeys(ignore_missing_translations, "unused")
translation_coros = set()

View File

@ -38,7 +38,7 @@ async def test_empty_callbacks(hass: HomeAssistant) -> None:
"""Ensure we handle a missing callback in a subscription."""
subscriber = HarmonySubscriberMixin(hass)
callbacks = {k: None for k in _ALL_CALLBACK_NAMES}
callbacks = dict.fromkeys(_ALL_CALLBACK_NAMES)
subscriber.async_subscribe(HarmonyCallback(**callbacks))
_call_all_callbacks(subscriber)
await hass.async_block_till_done()

View File

@ -176,7 +176,7 @@ WEATHER_EXPECTED_OBSERVATION_METRIC = {
ATTR_WEATHER_HUMIDITY: 10,
}
NONE_OBSERVATION = {key: None for key in DEFAULT_OBSERVATION}
NONE_OBSERVATION = dict.fromkeys(DEFAULT_OBSERVATION)
DEFAULT_FORECAST = [
{
@ -235,4 +235,4 @@ EXPECTED_FORECAST_METRIC = {
ATTR_FORECAST_HUMIDITY: 75,
}
NONE_FORECAST = [{key: None for key in DEFAULT_FORECAST[0]}]
NONE_FORECAST = [dict.fromkeys(DEFAULT_FORECAST[0])]

View File

@ -202,7 +202,7 @@ async def test_ll_hls_stream(
datetime_re = re.compile(r"#EXT-X-PROGRAM-DATE-TIME:(?P<datetime>.+)")
inf_re = re.compile(r"#EXTINF:(?P<segment_duration>[0-9]{1,}.[0-9]{3,}),")
# keep track of which tests were done (indexed by re)
tested = {regex: False for regex in (part_re, datetime_re, inf_re)}
tested = dict.fromkeys((part_re, datetime_re, inf_re), False)
# keep track of times and durations along playlist for checking consistency
part_durations = []
segment_duration = 0