mirror of
https://github.com/home-assistant/core.git
synced 2025-04-23 16:57:53 +00:00
Add pylint CodeStyle extension (#53147)
This commit is contained in:
parent
c35b5a1c64
commit
f6b162bc39
@ -299,7 +299,7 @@ def _validate_zone_input(zone_input):
|
||||
errors["base"] = "relay_inclusive"
|
||||
|
||||
# The following keys must be int
|
||||
for key in [CONF_ZONE_NUMBER, CONF_ZONE_LOOP, CONF_RELAY_ADDR, CONF_RELAY_CHAN]:
|
||||
for key in (CONF_ZONE_NUMBER, CONF_ZONE_LOOP, CONF_RELAY_ADDR, CONF_RELAY_CHAN):
|
||||
if key in zone_input:
|
||||
try:
|
||||
int(zone_input[key])
|
||||
@ -328,7 +328,7 @@ def _fix_input_types(zone_input):
|
||||
strings and then convert them to ints.
|
||||
"""
|
||||
|
||||
for key in [CONF_ZONE_LOOP, CONF_RELAY_ADDR, CONF_RELAY_CHAN]:
|
||||
for key in (CONF_ZONE_LOOP, CONF_RELAY_ADDR, CONF_RELAY_CHAN):
|
||||
if key in zone_input:
|
||||
zone_input[key] = int(zone_input[key])
|
||||
|
||||
|
@ -56,7 +56,7 @@ async def device_scan(identifier, loop, cache=None):
|
||||
if matches:
|
||||
return cache, matches[0]
|
||||
|
||||
for hosts in [_host_filter(), None]:
|
||||
for hosts in (_host_filter(), None):
|
||||
scan_result = await scan(loop, timeout=3, hosts=hosts)
|
||||
matches = [atv for atv in scan_result if _filter_device(atv)]
|
||||
|
||||
|
@ -178,7 +178,7 @@ class AprsListenerThread(threading.Thread):
|
||||
_LOGGER.warning(
|
||||
"APRS message contained invalid posambiguity: %s", str(pos_amb)
|
||||
)
|
||||
for attr in [ATTR_ALTITUDE, ATTR_COMMENT, ATTR_COURSE, ATTR_SPEED]:
|
||||
for attr in (ATTR_ALTITUDE, ATTR_COMMENT, ATTR_COURSE, ATTR_SPEED):
|
||||
if attr in msg:
|
||||
attrs[attr] = msg[attr]
|
||||
|
||||
|
@ -52,7 +52,7 @@ async def async_setup_entry(
|
||||
State(client, zone),
|
||||
config_entry.unique_id or config_entry.entry_id,
|
||||
)
|
||||
for zone in [1, 2]
|
||||
for zone in (1, 2)
|
||||
],
|
||||
True,
|
||||
)
|
||||
|
@ -451,12 +451,12 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"chargecycle_range",
|
||||
"total_electric_distance",
|
||||
):
|
||||
for attr in [
|
||||
for attr in (
|
||||
"community_average",
|
||||
"community_high",
|
||||
"community_low",
|
||||
"user_average",
|
||||
]:
|
||||
):
|
||||
device = BMWConnectedDriveSensor(
|
||||
account,
|
||||
vehicle,
|
||||
@ -466,7 +466,7 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
)
|
||||
entities.append(device)
|
||||
if attribute_name == "chargecycle_range":
|
||||
for attr in ["user_current_charge_cycle", "user_high"]:
|
||||
for attr in ("user_current_charge_cycle", "user_high"):
|
||||
device = BMWConnectedDriveSensor(
|
||||
account,
|
||||
vehicle,
|
||||
@ -476,7 +476,7 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
)
|
||||
entities.append(device)
|
||||
if attribute_name == "total_electric_distance":
|
||||
for attr in ["user_total"]:
|
||||
for attr in ("user_total",):
|
||||
device = BMWConnectedDriveSensor(
|
||||
account,
|
||||
vehicle,
|
||||
@ -593,13 +593,13 @@ class BMWConnectedDriveSensor(BMWConnectedDriveBaseEntity, SensorEntity):
|
||||
self._state = getattr(vehicle_last_trip, self._attribute)
|
||||
elif self._service == SERVICE_ALL_TRIPS:
|
||||
vehicle_all_trips = self._vehicle.state.all_trips
|
||||
for attribute in [
|
||||
for attribute in (
|
||||
"average_combined_consumption",
|
||||
"average_electric_consumption",
|
||||
"average_recuperation",
|
||||
"chargecycle_range",
|
||||
"total_electric_distance",
|
||||
]:
|
||||
):
|
||||
if self._attribute.startswith(f"{attribute}_"):
|
||||
attr = getattr(vehicle_all_trips, attribute)
|
||||
sub_attr = self._attribute.replace(f"{attribute}_", "")
|
||||
|
@ -109,7 +109,7 @@ async def async_setup_entry(
|
||||
api_class = ClimaCellV3WeatherEntity if api_version == 3 else ClimaCellWeatherEntity
|
||||
entities = [
|
||||
api_class(config_entry, coordinator, api_version, forecast_type)
|
||||
for forecast_type in [DAILY, HOURLY, NOWCAST]
|
||||
for forecast_type in (DAILY, HOURLY, NOWCAST)
|
||||
]
|
||||
async_add_entities(entities)
|
||||
|
||||
|
@ -166,10 +166,10 @@ class Configurator:
|
||||
data.update(
|
||||
{
|
||||
key: value
|
||||
for key, value in [
|
||||
for key, value in (
|
||||
(ATTR_DESCRIPTION, description),
|
||||
(ATTR_SUBMIT_CAPTION, submit_caption),
|
||||
]
|
||||
)
|
||||
if value is not None
|
||||
}
|
||||
)
|
||||
|
@ -135,7 +135,7 @@ class DaikinClimate(ClimateEntity):
|
||||
"""Set device settings using API."""
|
||||
values = {}
|
||||
|
||||
for attr in [ATTR_TEMPERATURE, ATTR_FAN_MODE, ATTR_SWING_MODE, ATTR_HVAC_MODE]:
|
||||
for attr in (ATTR_TEMPERATURE, ATTR_FAN_MODE, ATTR_SWING_MODE, ATTR_HVAC_MODE):
|
||||
value = settings.get(attr)
|
||||
if value is None:
|
||||
continue
|
||||
|
@ -153,7 +153,7 @@ async def async_refresh_devices_service(gateway):
|
||||
await gateway.api.refresh_state()
|
||||
gateway.ignore_state_updates = False
|
||||
|
||||
for new_device_type in [NEW_GROUP, NEW_LIGHT, NEW_SCENE, NEW_SENSOR]:
|
||||
for new_device_type in (NEW_GROUP, NEW_LIGHT, NEW_SCENE, NEW_SENSOR):
|
||||
gateway.async_add_device_callback(new_device_type, force=True)
|
||||
|
||||
|
||||
|
@ -52,7 +52,7 @@ async def async_setup_entry(
|
||||
for device in gateway.devices.values():
|
||||
if hasattr(device, "consumption_property"):
|
||||
for consumption in device.consumption_property:
|
||||
for consumption_type in ["current", "total"]:
|
||||
for consumption_type in ("current", "total"):
|
||||
entities.append(
|
||||
DevoloConsumptionEntity(
|
||||
homecontrol=gateway,
|
||||
|
@ -195,7 +195,7 @@ async def _update_listener(hass: HomeAssistant, entry: ConfigEntry):
|
||||
def _async_import_options_from_data_if_missing(hass: HomeAssistant, entry: ConfigEntry):
|
||||
options = dict(entry.options)
|
||||
modified = False
|
||||
for importable_option in [CONF_EVENTS]:
|
||||
for importable_option in (CONF_EVENTS,):
|
||||
if importable_option not in entry.options and importable_option in entry.data:
|
||||
options[importable_option] = entry.data[importable_option]
|
||||
modified = True
|
||||
|
@ -653,10 +653,10 @@ class EvoChild(EvoDevice):
|
||||
this_sp_day = -1 if sp_idx == -1 else 0
|
||||
next_sp_day = 1 if sp_idx + 1 == len(day["Switchpoints"]) else 0
|
||||
|
||||
for key, offset, idx in [
|
||||
for key, offset, idx in (
|
||||
("this", this_sp_day, sp_idx),
|
||||
("next", next_sp_day, (sp_idx + 1) * (1 - next_sp_day)),
|
||||
]:
|
||||
):
|
||||
sp_date = (day_time + timedelta(days=offset)).strftime("%Y-%m-%d")
|
||||
day = self._schedule["DailySchedules"][(day_of_week + offset) % 7]
|
||||
switchpoint = day["Switchpoints"][idx]
|
||||
|
@ -48,7 +48,7 @@ async def async_setup_entry(hass, entry):
|
||||
raise UpdateFailed(err) from err
|
||||
|
||||
data_init_tasks = []
|
||||
for api_category in [CATEGORY_CDC_REPORT, CATEGORY_USER_REPORT]:
|
||||
for api_category in (CATEGORY_CDC_REPORT, CATEGORY_USER_REPORT):
|
||||
coordinator = hass.data[DOMAIN][DATA_COORDINATOR][entry.entry_id][
|
||||
api_category
|
||||
] = DataUpdateCoordinator(
|
||||
|
@ -13,7 +13,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
async def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up services for Fritz integration."""
|
||||
|
||||
for service in [SERVICE_REBOOT, SERVICE_RECONNECT]:
|
||||
for service in (SERVICE_REBOOT, SERVICE_RECONNECT):
|
||||
if hass.services.has_service(DOMAIN, service):
|
||||
return
|
||||
|
||||
@ -34,7 +34,7 @@ async def async_setup_services(hass: HomeAssistant) -> None:
|
||||
fritz_tools = hass.data[DOMAIN][entry]
|
||||
await fritz_tools.service_fritzbox(service_call.service)
|
||||
|
||||
for service in [SERVICE_REBOOT, SERVICE_RECONNECT]:
|
||||
for service in (SERVICE_REBOOT, SERVICE_RECONNECT):
|
||||
hass.services.async_register(DOMAIN, service, async_call_fritz_service)
|
||||
|
||||
|
||||
|
@ -120,7 +120,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
async_track_time_interval(hass, broker.async_update, SCAN_INTERVAL)
|
||||
|
||||
for platform in ["climate", "water_heater", "sensor", "binary_sensor", "switch"]:
|
||||
for platform in ("climate", "water_heater", "sensor", "binary_sensor", "switch"):
|
||||
hass.async_create_task(async_load_platform(hass, platform, DOMAIN, {}, config))
|
||||
|
||||
setup_service_functions(hass, broker)
|
||||
|
@ -60,13 +60,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
# Set up DataUpdateCoordinators for the valve controller:
|
||||
init_valve_controller_tasks = []
|
||||
for api, api_coro in [
|
||||
for api, api_coro in (
|
||||
(API_SENSOR_PAIR_DUMP, client.sensor.pair_dump),
|
||||
(API_SYSTEM_DIAGNOSTICS, client.system.diagnostics),
|
||||
(API_SYSTEM_ONBOARD_SENSOR_STATUS, client.system.onboard_sensor_status),
|
||||
(API_VALVE_STATUS, client.valve.status),
|
||||
(API_WIFI_STATUS, client.wifi.status),
|
||||
]:
|
||||
):
|
||||
coordinator = hass.data[DOMAIN][DATA_COORDINATOR][entry.entry_id][
|
||||
api
|
||||
] = GuardianDataUpdateCoordinator(
|
||||
|
@ -44,7 +44,7 @@ async def async_setup_entry(
|
||||
"""Set up Guardian switches based on a config entry."""
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
|
||||
for service_name, schema, method in [
|
||||
for service_name, schema, method in (
|
||||
(SERVICE_DISABLE_AP, {}, "async_disable_ap"),
|
||||
(SERVICE_ENABLE_AP, {}, "async_enable_ap"),
|
||||
(SERVICE_PAIR_SENSOR, {vol.Required(CONF_UID): cv.string}, "async_pair_sensor"),
|
||||
@ -64,7 +64,7 @@ async def async_setup_entry(
|
||||
{vol.Required(CONF_UID): cv.string},
|
||||
"async_unpair_sensor",
|
||||
),
|
||||
]:
|
||||
):
|
||||
platform.async_register_entity_service(service_name, schema, method)
|
||||
|
||||
async_add_entities(
|
||||
|
@ -94,7 +94,7 @@ async def _migrate_old_unique_ids(
|
||||
def _async_import_options_from_data_if_missing(hass: HomeAssistant, entry: ConfigEntry):
|
||||
options = dict(entry.options)
|
||||
modified = 0
|
||||
for importable_option in [ATTR_ACTIVITY, ATTR_DELAY_SECS]:
|
||||
for importable_option in (ATTR_ACTIVITY, ATTR_DELAY_SECS):
|
||||
if importable_option not in entry.options and importable_option in entry.data:
|
||||
options[importable_option] = entry.data[importable_option]
|
||||
modified = 1
|
||||
|
@ -86,7 +86,7 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info=
|
||||
sensor_type = config.get(CONF_TYPE)
|
||||
name = config.get(CONF_NAME)
|
||||
|
||||
for template in [start, end]:
|
||||
for template in (start, end):
|
||||
if template is not None:
|
||||
template.hass = hass
|
||||
|
||||
|
@ -451,7 +451,7 @@ class Camera(HomeAccessory, PyhapCamera):
|
||||
_LOGGER.info("[%s] Stream already stopped", session_id)
|
||||
return True
|
||||
|
||||
for shutdown_method in ["close", "kill"]:
|
||||
for shutdown_method in ("close", "kill"):
|
||||
_LOGGER.info("[%s] %s stream", session_id, shutdown_method)
|
||||
try:
|
||||
await getattr(stream, shutdown_method)()
|
||||
|
@ -527,10 +527,10 @@ class HyperionLight(HyperionBaseLight):
|
||||
# color, effect), but this is not possible due to:
|
||||
# https://github.com/hyperion-project/hyperion.ng/issues/967
|
||||
if not bool(self._client.is_on()):
|
||||
for component in [
|
||||
for component in (
|
||||
const.KEY_COMPONENTID_ALL,
|
||||
const.KEY_COMPONENTID_LEDDEVICE,
|
||||
]:
|
||||
):
|
||||
if not await self._client.async_send_set_component(
|
||||
**{
|
||||
const.KEY_COMPONENTSTATE: {
|
||||
|
@ -164,7 +164,7 @@ class ImageProcessingFaceEntity(ImageProcessingEntity):
|
||||
f_co = face[ATTR_CONFIDENCE]
|
||||
if f_co > confidence:
|
||||
confidence = f_co
|
||||
for attr in [ATTR_NAME, ATTR_MOTION]:
|
||||
for attr in (ATTR_NAME, ATTR_MOTION):
|
||||
if attr in face:
|
||||
state = face[attr]
|
||||
break
|
||||
|
@ -53,7 +53,7 @@ async def async_setup(hass, hass_config):
|
||||
for heater in heaters:
|
||||
await heater.update()
|
||||
|
||||
for platform in ["water_heater", "binary_sensor", "sensor", "climate"]:
|
||||
for platform in ("water_heater", "binary_sensor", "sensor", "climate"):
|
||||
hass.async_create_task(
|
||||
async_load_platform(hass, platform, DOMAIN, {}, hass_config)
|
||||
)
|
||||
|
@ -224,7 +224,7 @@ class InsteonClimateEntity(InsteonEntity, ClimateEntity):
|
||||
"""Register INSTEON update events."""
|
||||
await super().async_added_to_hass()
|
||||
await self._insteon_device.async_read_op_flags()
|
||||
for group in [
|
||||
for group in (
|
||||
COOLING,
|
||||
HEATING,
|
||||
DEHUMIDIFYING,
|
||||
@ -236,5 +236,5 @@ class InsteonClimateEntity(InsteonEntity, ClimateEntity):
|
||||
HUMIDITY,
|
||||
HUMIDITY_HIGH,
|
||||
HUMIDITY_LOW,
|
||||
]:
|
||||
):
|
||||
self._insteon_device.groups[group].subscribe(self.async_entity_update)
|
||||
|
@ -59,7 +59,7 @@ async def async_setup_entry(hass, entry):
|
||||
raise UpdateFailed from err
|
||||
|
||||
init_data_update_tasks = []
|
||||
for sensor_type, api_coro in [
|
||||
for sensor_type, api_coro in (
|
||||
(TYPE_ALLERGY_FORECAST, client.allergens.extended),
|
||||
(TYPE_ALLERGY_INDEX, client.allergens.current),
|
||||
(TYPE_ALLERGY_OUTLOOK, client.allergens.outlook),
|
||||
@ -67,7 +67,7 @@ async def async_setup_entry(hass, entry):
|
||||
(TYPE_ASTHMA_INDEX, client.asthma.current),
|
||||
(TYPE_DISEASE_FORECAST, client.disease.extended),
|
||||
(TYPE_DISEASE_INDEX, client.disease.current),
|
||||
]:
|
||||
):
|
||||
coordinator = coordinators[sensor_type] = DataUpdateCoordinator(
|
||||
hass,
|
||||
LOGGER,
|
||||
|
@ -244,11 +244,11 @@ def _async_import_options_from_data_if_missing(
|
||||
):
|
||||
options = dict(entry.options)
|
||||
modified = False
|
||||
for importable_option in [
|
||||
for importable_option in (
|
||||
CONF_IGNORE_STRING,
|
||||
CONF_SENSOR_STRING,
|
||||
CONF_RESTORE_LIGHT_STATE,
|
||||
]:
|
||||
):
|
||||
if importable_option not in entry.options and importable_option in entry.data:
|
||||
options[importable_option] = entry.data[importable_option]
|
||||
modified = True
|
||||
|
@ -368,7 +368,7 @@ class KonnectedView(HomeAssistantView):
|
||||
|
||||
zone_data["device_id"] = device_id
|
||||
|
||||
for attr in ["state", "temp", "humi", "addr"]:
|
||||
for attr in ("state", "temp", "humi", "addr"):
|
||||
value = payload.get(attr)
|
||||
handler = HANDLERS.get(attr)
|
||||
if value is not None and handler:
|
||||
|
@ -275,12 +275,12 @@ class LIFXManager:
|
||||
for discovery in self.discoveries:
|
||||
discovery.cleanup()
|
||||
|
||||
for service in [
|
||||
for service in (
|
||||
SERVICE_LIFX_SET_STATE,
|
||||
SERVICE_EFFECT_STOP,
|
||||
SERVICE_EFFECT_PULSE,
|
||||
SERVICE_EFFECT_COLORLOOP,
|
||||
]:
|
||||
):
|
||||
self.hass.services.async_remove(LIFX_DOMAIN, service)
|
||||
|
||||
def register_set_state(self):
|
||||
|
@ -360,7 +360,7 @@ class MqttFan(MqttEntity, FanEntity):
|
||||
if self._feature_preset_mode:
|
||||
self._supported_features |= SUPPORT_PRESET_MODE
|
||||
|
||||
for tpl_dict in [self._command_templates, self._value_templates]:
|
||||
for tpl_dict in (self._command_templates, self._value_templates):
|
||||
for key, tpl in tpl_dict.items():
|
||||
if tpl is None:
|
||||
tpl_dict[key] = lambda value: value
|
||||
|
@ -237,7 +237,7 @@ class MqttHumidifier(MqttEntity, HumidifierEntity):
|
||||
)
|
||||
self._optimistic_mode = optimistic or self._topic[CONF_MODE_STATE_TOPIC] is None
|
||||
|
||||
for tpl_dict in [self._command_templates, self._value_templates]:
|
||||
for tpl_dict in (self._command_templates, self._value_templates):
|
||||
for key, tpl in tpl_dict.items():
|
||||
if tpl is None:
|
||||
tpl_dict[key] = lambda value: value
|
||||
|
@ -202,7 +202,7 @@ class MVGLiveData:
|
||||
|
||||
# now select the relevant data
|
||||
_nextdep = {ATTR_ATTRIBUTION: ATTRIBUTION}
|
||||
for k in ["destination", "linename", "time", "direction", "product"]:
|
||||
for k in ("destination", "linename", "time", "direction", "product"):
|
||||
_nextdep[k] = _departure.get(k, "")
|
||||
_nextdep["time"] = int(_nextdep["time"])
|
||||
self.departures.append(_nextdep)
|
||||
|
@ -54,7 +54,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
# Remove air_quality entities from registry if they exist
|
||||
ent_reg = entity_registry.async_get(hass)
|
||||
for sensor_type in ["sds", ATTR_SDS011, ATTR_SPS30]:
|
||||
for sensor_type in ("sds", ATTR_SDS011, ATTR_SPS30):
|
||||
unique_id = f"{coordinator.unique_id}-{sensor_type}"
|
||||
if entity_id := ent_reg.async_get_entity_id(
|
||||
AIR_QUALITY_PLATFORM, DOMAIN, unique_id
|
||||
|
@ -191,7 +191,7 @@ class NetatmoOptionsFlowHandler(config_entries.OptionsFlow):
|
||||
def fix_coordinates(user_input):
|
||||
"""Fix coordinates if they don't comply with the Netatmo API."""
|
||||
# Ensure coordinates have acceptable length for the Netatmo API
|
||||
for coordinate in [CONF_LAT_NE, CONF_LAT_SW, CONF_LON_NE, CONF_LON_SW]:
|
||||
for coordinate in (CONF_LAT_NE, CONF_LAT_SW, CONF_LON_NE, CONF_LON_SW):
|
||||
if len(str(user_input[coordinate]).split(".")[1]) < 7:
|
||||
user_input[coordinate] = user_input[coordinate] + 0.0000001
|
||||
|
||||
|
@ -247,10 +247,10 @@ async def async_setup_entry(hass, entry, async_add_entities):
|
||||
_LOGGER.debug("Adding weather sensors %s", entities)
|
||||
return entities
|
||||
|
||||
for data_class_name in [
|
||||
for data_class_name in (
|
||||
WEATHERSTATION_DATA_CLASS_NAME,
|
||||
HOMECOACH_DATA_CLASS_NAME,
|
||||
]:
|
||||
):
|
||||
await data_handler.register_data_class(data_class_name, data_class_name, None)
|
||||
data_class = data_handler.data.get(data_class_name)
|
||||
|
||||
|
@ -92,11 +92,11 @@ async def async_setup_entry(hass, config_entry):
|
||||
await openuv.async_update_protection_data()
|
||||
async_dispatcher_send(hass, TOPIC_UPDATE)
|
||||
|
||||
for service, method in [
|
||||
for service, method in (
|
||||
("update_data", update_data),
|
||||
("update_uv_index_data", update_uv_index_data),
|
||||
("update_protection_data", update_protection_data),
|
||||
]:
|
||||
):
|
||||
hass.services.async_register(DOMAIN, service, method)
|
||||
|
||||
return True
|
||||
|
@ -80,11 +80,11 @@ def async_create(
|
||||
"""Generate a notification."""
|
||||
data = {
|
||||
key: value
|
||||
for key, value in [
|
||||
for key, value in (
|
||||
(ATTR_TITLE, title),
|
||||
(ATTR_MESSAGE, message),
|
||||
(ATTR_NOTIFICATION_ID, notification_id),
|
||||
]
|
||||
)
|
||||
if value is not None
|
||||
}
|
||||
|
||||
|
@ -509,13 +509,13 @@ class PlexMediaPlayer(MediaPlayerEntity):
|
||||
def extra_state_attributes(self):
|
||||
"""Return the scene state attributes."""
|
||||
attributes = {}
|
||||
for attr in [
|
||||
for attr in (
|
||||
"media_content_rating",
|
||||
"media_library_title",
|
||||
"player_source",
|
||||
"media_summary",
|
||||
"username",
|
||||
]:
|
||||
):
|
||||
value = getattr(self, attr, None)
|
||||
if value:
|
||||
attributes[attr] = value
|
||||
|
@ -129,13 +129,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
raise UpdateFailed(err) from err
|
||||
|
||||
controller_init_tasks = []
|
||||
for api_category in [
|
||||
for api_category in (
|
||||
DATA_PROGRAMS,
|
||||
DATA_PROVISION_SETTINGS,
|
||||
DATA_RESTRICTIONS_CURRENT,
|
||||
DATA_RESTRICTIONS_UNIVERSAL,
|
||||
DATA_ZONES,
|
||||
]:
|
||||
):
|
||||
coordinator = hass.data[DOMAIN][DATA_COORDINATOR][entry.entry_id][
|
||||
api_category
|
||||
] = DataUpdateCoordinator(
|
||||
|
@ -123,7 +123,7 @@ async def async_setup_entry(
|
||||
alter_program_schema = {vol.Required(CONF_PROGRAM_ID): cv.positive_int}
|
||||
alter_zone_schema = {vol.Required(CONF_ZONE_ID): cv.positive_int}
|
||||
|
||||
for service_name, schema, method in [
|
||||
for service_name, schema, method in (
|
||||
("disable_program", alter_program_schema, "async_disable_program"),
|
||||
("disable_zone", alter_zone_schema, "async_disable_zone"),
|
||||
("enable_program", alter_program_schema, "async_enable_program"),
|
||||
@ -156,7 +156,7 @@ async def async_setup_entry(
|
||||
),
|
||||
("stop_zone", {vol.Required(CONF_ZONE_ID): cv.positive_int}, "async_stop_zone"),
|
||||
("unpause_watering", {}, "async_unpause_watering"),
|
||||
]:
|
||||
):
|
||||
platform.async_register_entity_service(service_name, schema, method)
|
||||
|
||||
controller = hass.data[DOMAIN][DATA_CONTROLLER][entry.entry_id]
|
||||
|
@ -262,7 +262,7 @@ class RMVDepartureData:
|
||||
elif journey["minutes"] < self._time_offset:
|
||||
continue
|
||||
|
||||
for attr in ["direction", "departure_time", "product", "minutes"]:
|
||||
for attr in ("direction", "departure_time", "product", "minutes"):
|
||||
_nextdep[attr] = journey.get(attr, "")
|
||||
|
||||
_nextdep["line"] = journey.get("number", "")
|
||||
|
@ -212,7 +212,7 @@ class IRobotVacuum(IRobotEntity, StateVacuumEntity):
|
||||
pos_x = pos_state.get("point", {}).get("x")
|
||||
pos_y = pos_state.get("point", {}).get("y")
|
||||
theta = pos_state.get("theta")
|
||||
if all(item is not None for item in [pos_x, pos_y, theta]):
|
||||
if all(item is not None for item in (pos_x, pos_y, theta)):
|
||||
position = f"({pos_x}, {pos_y}, {theta})"
|
||||
state_attrs[ATTR_POSITION] = position
|
||||
|
||||
|
@ -261,7 +261,7 @@ async def async_setup_entry(hass, config_entry): # noqa: C901
|
||||
LOGGER.error("Error during service call: %s", err)
|
||||
return
|
||||
|
||||
for service, method, schema in [
|
||||
for service, method, schema in (
|
||||
("clear_notifications", clear_notifications, None),
|
||||
("remove_pin", remove_pin, SERVICE_REMOVE_PIN_SCHEMA),
|
||||
("set_pin", set_pin, SERVICE_SET_PIN_SCHEMA),
|
||||
@ -270,7 +270,7 @@ async def async_setup_entry(hass, config_entry): # noqa: C901
|
||||
set_system_properties,
|
||||
SERVICE_SET_SYSTEM_PROPERTIES_SCHEMA,
|
||||
),
|
||||
]:
|
||||
):
|
||||
async_register_admin_service(hass, DOMAIN, service, method, schema=schema)
|
||||
|
||||
config_entry.async_on_unload(config_entry.add_update_listener(async_reload_entry))
|
||||
|
@ -72,31 +72,31 @@ class SolarEdgeSensorFactory:
|
||||
],
|
||||
] = {"site_details": (SolarEdgeDetailsSensor, details)}
|
||||
|
||||
for key in [
|
||||
for key in (
|
||||
"lifetime_energy",
|
||||
"energy_this_year",
|
||||
"energy_this_month",
|
||||
"energy_today",
|
||||
"current_power",
|
||||
]:
|
||||
):
|
||||
self.services[key] = (SolarEdgeOverviewSensor, overview)
|
||||
|
||||
for key in ["meters", "sensors", "gateways", "batteries", "inverters"]:
|
||||
for key in ("meters", "sensors", "gateways", "batteries", "inverters"):
|
||||
self.services[key] = (SolarEdgeInventorySensor, inventory)
|
||||
|
||||
for key in ["power_consumption", "solar_power", "grid_power", "storage_power"]:
|
||||
for key in ("power_consumption", "solar_power", "grid_power", "storage_power"):
|
||||
self.services[key] = (SolarEdgePowerFlowSensor, flow)
|
||||
|
||||
for key in ["storage_level"]:
|
||||
for key in ("storage_level",):
|
||||
self.services[key] = (SolarEdgeStorageLevelSensor, flow)
|
||||
|
||||
for key in [
|
||||
for key in (
|
||||
"purchased_power",
|
||||
"production_power",
|
||||
"feedin_power",
|
||||
"consumption_power",
|
||||
"selfconsumption_power",
|
||||
]:
|
||||
):
|
||||
self.services[key] = (SolarEdgeEnergyDetailsSensor, energy)
|
||||
|
||||
def create_sensor(self, sensor_type: SolarEdgeSensor) -> SolarEdgeSensor:
|
||||
|
@ -190,10 +190,10 @@ class SonosDiscoveryManager:
|
||||
_LOGGER.debug("Adding new speaker: %s", speaker_info)
|
||||
speaker = SonosSpeaker(self.hass, soco, speaker_info)
|
||||
self.data.discovered[soco.uid] = speaker
|
||||
for coordinator, coord_dict in [
|
||||
for coordinator, coord_dict in (
|
||||
(SonosAlarms, self.data.alarms),
|
||||
(SonosFavorites, self.data.favorites),
|
||||
]:
|
||||
):
|
||||
if soco.household_id not in coord_dict:
|
||||
new_coordinator = coordinator(self.hass, soco.household_id)
|
||||
new_coordinator.setup(soco)
|
||||
|
@ -332,7 +332,7 @@ async def async_setup(hass, config):
|
||||
attribute_templ = data.get(attribute)
|
||||
if attribute_templ:
|
||||
if any(
|
||||
isinstance(attribute_templ, vtype) for vtype in [float, int, str]
|
||||
isinstance(attribute_templ, vtype) for vtype in (float, int, str)
|
||||
):
|
||||
data[attribute] = attribute_templ
|
||||
else:
|
||||
@ -352,7 +352,7 @@ async def async_setup(hass, config):
|
||||
|
||||
msgtype = service.service
|
||||
kwargs = dict(service.data)
|
||||
for attribute in [
|
||||
for attribute in (
|
||||
ATTR_MESSAGE,
|
||||
ATTR_TITLE,
|
||||
ATTR_URL,
|
||||
@ -360,7 +360,7 @@ async def async_setup(hass, config):
|
||||
ATTR_CAPTION,
|
||||
ATTR_LONGITUDE,
|
||||
ATTR_LATITUDE,
|
||||
]:
|
||||
):
|
||||
_render_template_attr(kwargs, attribute)
|
||||
_LOGGER.debug("New telegram message %s: %s", msgtype, kwargs)
|
||||
|
||||
|
@ -42,14 +42,14 @@ async def async_setup_entry(
|
||||
sensors.extend(
|
||||
[
|
||||
ToonBoilerBinarySensor(coordinator, key=key)
|
||||
for key in [
|
||||
for key in (
|
||||
"thermostat_info_ot_communication_error_0",
|
||||
"thermostat_info_error_found_255",
|
||||
"thermostat_info_burner_info_None",
|
||||
"thermostat_info_burner_info_1",
|
||||
"thermostat_info_burner_info_2",
|
||||
"thermostat_info_burner_info_3",
|
||||
]
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
|
@ -89,7 +89,7 @@ async def async_setup_entry(
|
||||
sensors.extend(
|
||||
[
|
||||
ToonSolarDeviceSensor(coordinator, key=key)
|
||||
for key in [
|
||||
for key in (
|
||||
"solar_value",
|
||||
"solar_maximum",
|
||||
"solar_produced",
|
||||
@ -98,7 +98,7 @@ async def async_setup_entry(
|
||||
"power_usage_day_from_grid_usage",
|
||||
"power_usage_day_to_grid_usage",
|
||||
"power_usage_current_covered_by_solar",
|
||||
]
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
|
@ -136,7 +136,7 @@ def get_static_devices(config_data) -> SmartDevices:
|
||||
lights = []
|
||||
switches = []
|
||||
|
||||
for type_ in [CONF_LIGHT, CONF_SWITCH, CONF_STRIP, CONF_DIMMER]:
|
||||
for type_ in (CONF_LIGHT, CONF_SWITCH, CONF_STRIP, CONF_DIMMER):
|
||||
for entry in config_data[type_]:
|
||||
host = entry["host"]
|
||||
try:
|
||||
|
@ -183,12 +183,12 @@ class UkTransportLiveBusTimeSensor(UkTransportSensor):
|
||||
"""Return other details about the sensor state."""
|
||||
attrs = {}
|
||||
if self._data is not None:
|
||||
for key in [
|
||||
for key in (
|
||||
ATTR_ATCOCODE,
|
||||
ATTR_LOCALITY,
|
||||
ATTR_STOP_NAME,
|
||||
ATTR_REQUEST_TIME,
|
||||
]:
|
||||
):
|
||||
attrs[key] = self._data.get(key)
|
||||
attrs[ATTR_NEXT_BUSES] = self._next_buses
|
||||
return attrs
|
||||
|
@ -472,7 +472,7 @@ class UniversalMediaPlayer(MediaPlayerEntity):
|
||||
if SERVICE_MEDIA_PREVIOUS_TRACK in self._cmds:
|
||||
flags |= SUPPORT_PREVIOUS_TRACK
|
||||
|
||||
if any(cmd in self._cmds for cmd in [SERVICE_VOLUME_UP, SERVICE_VOLUME_DOWN]):
|
||||
if any(cmd in self._cmds for cmd in (SERVICE_VOLUME_UP, SERVICE_VOLUME_DOWN)):
|
||||
flags |= SUPPORT_VOLUME_STEP
|
||||
if SERVICE_VOLUME_SET in self._cmds:
|
||||
flags |= SUPPORT_VOLUME_SET
|
||||
|
@ -127,7 +127,7 @@ async def async_setup_entry(
|
||||
if not config_entry.options:
|
||||
new_data = config_entry.data.copy()
|
||||
options = {}
|
||||
for key in [
|
||||
for key in (
|
||||
CONF_INCL_FILTER,
|
||||
CONF_EXCL_FILTER,
|
||||
CONF_REALTIME,
|
||||
@ -136,7 +136,7 @@ async def async_setup_entry(
|
||||
CONF_AVOID_SUBSCRIPTION_ROADS,
|
||||
CONF_AVOID_FERRIES,
|
||||
CONF_UNITS,
|
||||
]:
|
||||
):
|
||||
if key in new_data:
|
||||
options[key] = new_data.pop(key)
|
||||
elif key in defaults:
|
||||
|
@ -122,12 +122,12 @@ class WashingtonStateTravelTimeSensor(WashingtonStateTransportSensor):
|
||||
"""Return other details about the sensor state."""
|
||||
if self._data is not None:
|
||||
attrs = {ATTR_ATTRIBUTION: ATTRIBUTION}
|
||||
for key in [
|
||||
for key in (
|
||||
ATTR_AVG_TIME,
|
||||
ATTR_NAME,
|
||||
ATTR_DESCRIPTION,
|
||||
ATTR_TRAVEL_TIME_ID,
|
||||
]:
|
||||
):
|
||||
attrs[key] = self._data.get(key)
|
||||
attrs[ATTR_TIME_UPDATED] = _parse_wsdot_timestamp(
|
||||
self._data.get(ATTR_TIME_UPDATED)
|
||||
|
@ -194,7 +194,7 @@ class XiaomiGenericSwitch(XiaomiDevice, SwitchEntity):
|
||||
if not self._in_use:
|
||||
self._load_power = 0
|
||||
|
||||
for key in [POWER_CONSUMED, ENERGY_CONSUMED]:
|
||||
for key in (POWER_CONSUMED, ENERGY_CONSUMED):
|
||||
if key in data:
|
||||
self._power_consumed = round(float(data[key]), 2)
|
||||
break
|
||||
|
@ -181,7 +181,7 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
|
||||
# The device has two switchable channels (mains and a USB port).
|
||||
# A switch device per channel will be created.
|
||||
for channel_usb in [True, False]:
|
||||
for channel_usb in (True, False):
|
||||
if channel_usb:
|
||||
unique_id_ch = f"{unique_id}-USB"
|
||||
else:
|
||||
|
@ -283,7 +283,7 @@ def _get_zamg_stations():
|
||||
try:
|
||||
stations[row["synnr"]] = tuple(
|
||||
float(row[coord].replace(",", "."))
|
||||
for coord in ["breite_dezi", "länge_dezi"]
|
||||
for coord in ("breite_dezi", "länge_dezi")
|
||||
)
|
||||
except KeyError:
|
||||
_LOGGER.error("ZAMG schema changed again, cannot autodetect station")
|
||||
|
@ -511,7 +511,7 @@ async def async_process_ha_core_config(hass: HomeAssistant, config: dict) -> Non
|
||||
|
||||
if any(
|
||||
k in config
|
||||
for k in [
|
||||
for k in (
|
||||
CONF_LATITUDE,
|
||||
CONF_LONGITUDE,
|
||||
CONF_NAME,
|
||||
@ -520,7 +520,7 @@ async def async_process_ha_core_config(hass: HomeAssistant, config: dict) -> Non
|
||||
CONF_UNIT_SYSTEM,
|
||||
CONF_EXTERNAL_URL,
|
||||
CONF_INTERNAL_URL,
|
||||
]
|
||||
)
|
||||
):
|
||||
hac.config_source = SOURCE_YAML
|
||||
|
||||
|
@ -599,7 +599,7 @@ class _ScriptRun:
|
||||
"""Fire an event."""
|
||||
self._step_log(self._action.get(CONF_ALIAS, self._action[CONF_EVENT]))
|
||||
event_data = {}
|
||||
for conf in [CONF_EVENT_DATA, CONF_EVENT_DATA_TEMPLATE]:
|
||||
for conf in (CONF_EVENT_DATA, CONF_EVENT_DATA_TEMPLATE):
|
||||
if conf not in self._action:
|
||||
continue
|
||||
|
||||
|
@ -227,7 +227,7 @@ def async_prepare_call_from_config(
|
||||
|
||||
service_data = {}
|
||||
|
||||
for conf in [CONF_SERVICE_DATA, CONF_SERVICE_DATA_TEMPLATE]:
|
||||
for conf in (CONF_SERVICE_DATA, CONF_SERVICE_DATA_TEMPLATE):
|
||||
if conf not in config:
|
||||
continue
|
||||
try:
|
||||
|
@ -91,13 +91,13 @@ class UnitSystem:
|
||||
"""Initialize the unit system object."""
|
||||
errors: str = ", ".join(
|
||||
UNIT_NOT_RECOGNIZED_TEMPLATE.format(unit, unit_type)
|
||||
for unit, unit_type in [
|
||||
for unit, unit_type in (
|
||||
(temperature, TEMPERATURE),
|
||||
(length, LENGTH),
|
||||
(volume, VOLUME),
|
||||
(mass, MASS),
|
||||
(pressure, PRESSURE),
|
||||
]
|
||||
)
|
||||
if not is_valid_unit(unit, unit_type)
|
||||
)
|
||||
|
||||
|
@ -25,6 +25,7 @@ ignore = [
|
||||
jobs = 2
|
||||
init-hook='from pylint.config.find_default_config_files import find_default_config_files; from pathlib import Path; import sys; sys.path.append(str(Path(Path(list(find_default_config_files())[0]).parent, "pylint/plugins")))'
|
||||
load-plugins = [
|
||||
"pylint.extensions.code_style",
|
||||
"pylint.extensions.typing",
|
||||
"pylint_strict_informational",
|
||||
"hass_constructor",
|
||||
@ -68,6 +69,9 @@ good-names = [
|
||||
# inconsistent-return-statements - doesn't handle raise
|
||||
# too-many-ancestors - it's too strict.
|
||||
# wrong-import-order - isort guards this
|
||||
# ---
|
||||
# Enable once current issues are fixed:
|
||||
# consider-using-namedtuple-or-dataclass (Pylint CodeStyle extension)
|
||||
disable = [
|
||||
"format",
|
||||
"abstract-class-little-used",
|
||||
@ -90,6 +94,7 @@ disable = [
|
||||
"too-many-boolean-expressions",
|
||||
"unused-argument",
|
||||
"wrong-import-order",
|
||||
"consider-using-namedtuple-or-dataclass",
|
||||
]
|
||||
enable = [
|
||||
#"useless-suppression", # temporarily every now and then to clean them up
|
||||
|
Loading…
x
Reference in New Issue
Block a user