mirror of
https://github.com/home-assistant/core.git
synced 2025-04-23 16:57:53 +00:00
Pylint 2.9.3 (#52972)
Co-authored-by: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
This commit is contained in:
parent
db8db18b54
commit
fbad453c89
@ -299,9 +299,9 @@ class AsusWrtRouter:
|
||||
)
|
||||
track_unknown = self._options.get(CONF_TRACK_UNKNOWN, DEFAULT_TRACK_UNKNOWN)
|
||||
|
||||
for device_mac in self._devices:
|
||||
for device_mac, device in self._devices.items():
|
||||
dev_info = wrt_devices.get(device_mac)
|
||||
self._devices[device_mac].update(dev_info, consider_home)
|
||||
device.update(dev_info, consider_home)
|
||||
|
||||
for device_mac, dev_info in wrt_devices.items():
|
||||
if device_mac in self._devices:
|
||||
|
@ -61,9 +61,9 @@ class ActivityStream(AugustSubscriberMixin):
|
||||
"""Cleanup any debounces."""
|
||||
for debouncer in self._update_debounce.values():
|
||||
debouncer.async_cancel()
|
||||
for house_id in self._schedule_updates:
|
||||
if self._schedule_updates[house_id] is not None:
|
||||
self._schedule_updates[house_id]()
|
||||
for house_id, updater in self._schedule_updates.items():
|
||||
if updater is not None:
|
||||
updater()
|
||||
self._schedule_updates[house_id] = None
|
||||
|
||||
def get_latest_device_activity(self, device_id, activity_types):
|
||||
|
@ -112,10 +112,10 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
entities.append(AugustDoorBinarySensor(data, "door_open", door))
|
||||
|
||||
for doorbell in data.doorbells:
|
||||
for sensor_type in SENSOR_TYPES_DOORBELL:
|
||||
for sensor_type, sensor in SENSOR_TYPES_DOORBELL.items():
|
||||
_LOGGER.debug(
|
||||
"Adding doorbell sensor class %s for %s",
|
||||
SENSOR_TYPES_DOORBELL[sensor_type][SENSOR_DEVICE_CLASS],
|
||||
sensor[SENSOR_DEVICE_CLASS],
|
||||
doorbell.device_name,
|
||||
)
|
||||
entities.append(AugustDoorbellBinarySensor(data, sensor_type, doorbell))
|
||||
|
@ -193,8 +193,8 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info=
|
||||
for player in target_players:
|
||||
await getattr(player, method["method"])(**params)
|
||||
|
||||
for service in SERVICE_TO_METHOD:
|
||||
schema = SERVICE_TO_METHOD[service]["schema"]
|
||||
for service, method in SERVICE_TO_METHOD.items():
|
||||
schema = method["schema"]
|
||||
hass.services.async_register(
|
||||
DOMAIN, service, async_service_handler, schema=schema
|
||||
)
|
||||
|
@ -8,6 +8,7 @@ from bluepy.btle import BTLEException # pylint: disable=import-error
|
||||
import decora # pylint: disable=import-error
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import util
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS,
|
||||
PLATFORM_SCHEMA,
|
||||
@ -16,7 +17,6 @@ from homeassistant.components.light import (
|
||||
)
|
||||
from homeassistant.const import CONF_API_KEY, CONF_DEVICES, CONF_NAME
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
import homeassistant.util as util
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -108,8 +108,8 @@ TEMPLATE_SCHEMA = vol.Schema({str: TEMPLATE_DATA_SCHEMA})
|
||||
def validate_area(config: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Validate that template parameters are only used if area is using the relevant template."""
|
||||
conf_set = set()
|
||||
for template in DEFAULT_TEMPLATES:
|
||||
for conf in DEFAULT_TEMPLATES[template]:
|
||||
for configs in DEFAULT_TEMPLATES.values():
|
||||
for conf in configs:
|
||||
conf_set.add(conf)
|
||||
if config.get(CONF_TEMPLATE):
|
||||
for conf in DEFAULT_TEMPLATES[config[CONF_TEMPLATE]]:
|
||||
|
@ -148,8 +148,7 @@ async def async_setup(hass, config):
|
||||
sensors = []
|
||||
binary_sensors = []
|
||||
if eight.users:
|
||||
for user in eight.users:
|
||||
obj = eight.users[user]
|
||||
for obj in eight.users.values():
|
||||
for sensor in SENSORS:
|
||||
sensors.append(f"{obj.side}_{sensor}")
|
||||
binary_sensors.append(f"{obj.side}_presence")
|
||||
|
@ -86,7 +86,7 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info=
|
||||
"""Handle devices which are added to Emby."""
|
||||
new_devices = []
|
||||
active_devices = []
|
||||
for dev_id in emby.devices:
|
||||
for dev_id, dev in emby.devices.items():
|
||||
active_devices.append(dev_id)
|
||||
if (
|
||||
dev_id not in active_emby_devices
|
||||
@ -96,9 +96,7 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info=
|
||||
active_emby_devices[dev_id] = new
|
||||
new_devices.append(new)
|
||||
|
||||
elif (
|
||||
dev_id in inactive_emby_devices and emby.devices[dev_id].state != "Off"
|
||||
):
|
||||
elif dev_id in inactive_emby_devices and dev.state != "Off":
|
||||
add = inactive_emby_devices.pop(dev_id)
|
||||
active_emby_devices[dev_id] = add
|
||||
_LOGGER.debug("Showing %s, item: %s", dev_id, add)
|
||||
|
@ -56,14 +56,13 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
name = data[NAME]
|
||||
|
||||
entities = []
|
||||
for condition in SENSORS:
|
||||
entity_name = ""
|
||||
for condition, sensor in SENSORS.items():
|
||||
if (
|
||||
condition == "inverters"
|
||||
and coordinator.data.get("inverters_production") is not None
|
||||
):
|
||||
for inverter in coordinator.data["inverters_production"]:
|
||||
entity_name = f"{name} {SENSORS[condition][0]} {inverter}"
|
||||
entity_name = f"{name} {sensor[0]} {inverter}"
|
||||
split_name = entity_name.split(" ")
|
||||
serial_number = split_name[-1]
|
||||
entities.append(
|
||||
@ -73,8 +72,8 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
name,
|
||||
config_entry.unique_id,
|
||||
serial_number,
|
||||
SENSORS[condition][1],
|
||||
SENSORS[condition][2],
|
||||
sensor[1],
|
||||
sensor[2],
|
||||
coordinator,
|
||||
)
|
||||
)
|
||||
@ -83,7 +82,7 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
if isinstance(data, str) and "not available" in data:
|
||||
continue
|
||||
|
||||
entity_name = f"{name} {SENSORS[condition][0]}"
|
||||
entity_name = f"{name} {sensor[0]}"
|
||||
entities.append(
|
||||
Envoy(
|
||||
condition,
|
||||
@ -91,8 +90,8 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
name,
|
||||
config_entry.unique_id,
|
||||
None,
|
||||
SENSORS[condition][1],
|
||||
SENSORS[condition][2],
|
||||
sensor[1],
|
||||
sensor[2],
|
||||
coordinator,
|
||||
)
|
||||
)
|
||||
|
@ -28,7 +28,7 @@ from homeassistant.components.weather import (
|
||||
)
|
||||
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME, TEMP_CELSIUS
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
import homeassistant.util.dt as dt
|
||||
from homeassistant.util import dt
|
||||
|
||||
CONF_FORECAST = "forecast"
|
||||
CONF_ATTRIBUTION = "Data provided by Environment Canada"
|
||||
|
@ -26,10 +26,6 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
# Pylint gets confused with the EsphomeEntity generics -> let mypy handle member checking
|
||||
# pylint: disable=no-member
|
||||
|
||||
|
||||
class EsphomeBinarySensor(
|
||||
EsphomeEntity[BinarySensorInfo, BinarySensorState], BinarySensorEntity
|
||||
):
|
||||
|
@ -32,10 +32,6 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
# Pylint gets confused with the EsphomeEntity generics -> let mypy handle member checking
|
||||
# pylint: disable=no-member
|
||||
|
||||
|
||||
class EsphomeCamera(Camera, EsphomeBaseEntity[CameraInfo, CameraState]):
|
||||
"""A camera implementation for ESPHome."""
|
||||
|
||||
|
@ -149,8 +149,7 @@ _PRESETS: EsphomeEnumMapper[ClimatePreset, str] = EsphomeEnumMapper(
|
||||
|
||||
|
||||
# https://github.com/PyCQA/pylint/issues/3150 for all @esphome_state_property
|
||||
# Pylint gets confused with the EsphomeEntity generics -> let mypy handle member checking
|
||||
# pylint: disable=invalid-overridden-method,no-member
|
||||
# pylint: disable=invalid-overridden-method
|
||||
|
||||
|
||||
class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEntity):
|
||||
|
@ -40,8 +40,7 @@ async def async_setup_entry(
|
||||
|
||||
|
||||
# https://github.com/PyCQA/pylint/issues/3150 for all @esphome_state_property
|
||||
# Pylint gets confused with the EsphomeEntity generics -> let mypy handle member checking
|
||||
# pylint: disable=invalid-overridden-method,no-member
|
||||
# pylint: disable=invalid-overridden-method
|
||||
|
||||
|
||||
class EsphomeCover(EsphomeEntity[CoverInfo, CoverState], CoverEntity):
|
||||
|
@ -58,8 +58,7 @@ _FAN_DIRECTIONS: EsphomeEnumMapper[FanDirection, str] = EsphomeEnumMapper(
|
||||
|
||||
|
||||
# https://github.com/PyCQA/pylint/issues/3150 for all @esphome_state_property
|
||||
# Pylint gets confused with the EsphomeEntity generics -> let mypy handle member checking
|
||||
# pylint: disable=invalid-overridden-method,no-member
|
||||
# pylint: disable=invalid-overridden-method
|
||||
|
||||
|
||||
class EsphomeFan(EsphomeEntity[FanInfo, FanState], FanEntity):
|
||||
|
@ -50,8 +50,7 @@ async def async_setup_entry(
|
||||
|
||||
|
||||
# https://github.com/PyCQA/pylint/issues/3150 for all @esphome_state_property
|
||||
# Pylint gets confused with the EsphomeEntity generics -> let mypy handle member checking
|
||||
# pylint: disable=invalid-overridden-method,no-member
|
||||
# pylint: disable=invalid-overridden-method
|
||||
|
||||
|
||||
class EsphomeLight(EsphomeEntity[LightInfo, LightState], LightEntity):
|
||||
|
@ -36,8 +36,7 @@ async def async_setup_entry(
|
||||
|
||||
|
||||
# https://github.com/PyCQA/pylint/issues/3150 for all @esphome_state_property
|
||||
# Pylint gets confused with the EsphomeEntity generics -> let mypy handle member checking
|
||||
# pylint: disable=invalid-overridden-method,no-member
|
||||
# pylint: disable=invalid-overridden-method
|
||||
|
||||
|
||||
class EsphomeNumber(EsphomeEntity[NumberInfo, NumberState], NumberEntity):
|
||||
|
@ -60,8 +60,7 @@ async def async_setup_entry(
|
||||
|
||||
|
||||
# https://github.com/PyCQA/pylint/issues/3150 for all @esphome_state_property
|
||||
# Pylint gets confused with the EsphomeEntity generics -> let mypy handle member checking
|
||||
# pylint: disable=invalid-overridden-method,no-member
|
||||
# pylint: disable=invalid-overridden-method
|
||||
|
||||
|
||||
_STATE_CLASSES: EsphomeEnumMapper[SensorStateClass, str | None] = EsphomeEnumMapper(
|
||||
|
@ -29,8 +29,7 @@ async def async_setup_entry(
|
||||
|
||||
|
||||
# https://github.com/PyCQA/pylint/issues/3150 for all @esphome_state_property
|
||||
# Pylint gets confused with the EsphomeEntity generics -> let mypy handle member checking
|
||||
# pylint: disable=invalid-overridden-method,no-member
|
||||
# pylint: disable=invalid-overridden-method
|
||||
|
||||
|
||||
class EsphomeSwitch(EsphomeEntity[SwitchInfo, SwitchState], SwitchEntity):
|
||||
|
@ -47,7 +47,7 @@ def _setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
||||
flume_devices = FlumeDeviceList(flume_auth, http_session=http_session)
|
||||
except RequestException as ex:
|
||||
raise ConfigEntryNotReady from ex
|
||||
except Exception as ex: # pylint: disable=broad-except
|
||||
except Exception as ex:
|
||||
raise ConfigEntryAuthFailed from ex
|
||||
|
||||
return flume_auth, flume_devices, http_session
|
||||
|
@ -50,25 +50,23 @@ async def async_setup_entry(
|
||||
)
|
||||
)
|
||||
|
||||
for sensor_key in CONNECTION_SENSORS:
|
||||
entities.append(
|
||||
FreeboxSensor(router, sensor_key, CONNECTION_SENSORS[sensor_key])
|
||||
)
|
||||
for sensor_key, sensor in CONNECTION_SENSORS.items():
|
||||
entities.append(FreeboxSensor(router, sensor_key, sensor))
|
||||
|
||||
for sensor_key in CALL_SENSORS:
|
||||
entities.append(FreeboxCallSensor(router, sensor_key, CALL_SENSORS[sensor_key]))
|
||||
for sensor_key, sensor in CALL_SENSORS.items():
|
||||
entities.append(FreeboxCallSensor(router, sensor_key, sensor))
|
||||
|
||||
_LOGGER.debug("%s - %s - %s disk(s)", router.name, router.mac, len(router.disks))
|
||||
for disk in router.disks.values():
|
||||
for partition in disk["partitions"]:
|
||||
for sensor_key in DISK_PARTITION_SENSORS:
|
||||
for sensor_key, sensor in DISK_PARTITION_SENSORS.items():
|
||||
entities.append(
|
||||
FreeboxDiskSensor(
|
||||
router,
|
||||
disk,
|
||||
partition,
|
||||
sensor_key,
|
||||
DISK_PARTITION_SENSORS[sensor_key],
|
||||
sensor,
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -262,8 +262,8 @@ def wifi_entities_list(
|
||||
networks[i] = ssid
|
||||
|
||||
return [
|
||||
FritzBoxWifiSwitch(fritzbox_tools, device_friendly_name, net, networks[net])
|
||||
for net in networks
|
||||
FritzBoxWifiSwitch(fritzbox_tools, device_friendly_name, net, network_name)
|
||||
for net, network_name in networks.items()
|
||||
]
|
||||
|
||||
|
||||
@ -428,8 +428,8 @@ class FritzBoxPortSwitch(FritzBoxBaseSwitch, SwitchEntity):
|
||||
"NewPortMappingDescription": "description",
|
||||
}
|
||||
|
||||
for key in attributes_dict:
|
||||
self._attributes[attributes_dict[key]] = self.port_mapping[key]
|
||||
for key, attr in attributes_dict.items():
|
||||
self._attributes[attr] = self.port_mapping[key]
|
||||
|
||||
async def _async_handle_port_switch_on_off(self, turn_on: bool) -> bool:
|
||||
|
||||
|
@ -25,9 +25,9 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
client,
|
||||
name,
|
||||
disk["mnt_point"],
|
||||
SENSOR_TYPES[sensor_type][1],
|
||||
sensor_details[1],
|
||||
sensor_type,
|
||||
SENSOR_TYPES[sensor_type],
|
||||
sensor_details,
|
||||
)
|
||||
)
|
||||
elif sensor_details[0] == "sensors":
|
||||
@ -39,9 +39,9 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
client,
|
||||
name,
|
||||
sensor["label"],
|
||||
SENSOR_TYPES[sensor_type][1],
|
||||
sensor_details[1],
|
||||
sensor_type,
|
||||
SENSOR_TYPES[sensor_type],
|
||||
sensor_details,
|
||||
)
|
||||
)
|
||||
elif client.api.data[sensor_details[0]]:
|
||||
@ -50,9 +50,9 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
client,
|
||||
name,
|
||||
"",
|
||||
SENSOR_TYPES[sensor_type][1],
|
||||
sensor_details[1],
|
||||
sensor_type,
|
||||
SENSOR_TYPES[sensor_type],
|
||||
sensor_details,
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -223,10 +223,10 @@ def setup(hass, config):
|
||||
|
||||
def check_correct_scopes(token_file):
|
||||
"""Check for the correct scopes in file."""
|
||||
tokenfile = open(token_file).read()
|
||||
if "readonly" in tokenfile:
|
||||
_LOGGER.warning("Please re-authenticate with Google")
|
||||
return False
|
||||
with open(token_file) as tokenfile:
|
||||
if "readonly" in tokenfile.read():
|
||||
_LOGGER.warning("Please re-authenticate with Google")
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
|
@ -214,8 +214,8 @@ async def handle_devices_execute(hass, data, payload):
|
||||
|
||||
execute_results = await asyncio.gather(
|
||||
*[
|
||||
_entity_execute(entities[entity_id], data, executions[entity_id])
|
||||
for entity_id in executions
|
||||
_entity_execute(entities[entity_id], data, execution)
|
||||
for entity_id, execution in executions.items()
|
||||
]
|
||||
)
|
||||
|
||||
|
@ -59,7 +59,9 @@ def setup(hass: HomeAssistant, yaml_config: dict[str, Any]):
|
||||
service_principal_path
|
||||
)
|
||||
|
||||
topic_path = publisher.topic_path(project_id, topic_name)
|
||||
topic_path = publisher.topic_path( # pylint: disable=no-member
|
||||
project_id, topic_name
|
||||
)
|
||||
|
||||
encoder = DateTimeJSONEncoder()
|
||||
|
||||
|
@ -27,7 +27,7 @@ from homeassistant.helpers import location
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import DiscoveryInfoType
|
||||
import homeassistant.util.dt as dt
|
||||
from homeassistant.util import dt
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -141,9 +141,9 @@ class Camera(HomeAccessory, PyhapCamera):
|
||||
def __init__(self, hass, driver, name, entity_id, aid, config):
|
||||
"""Initialize a Camera accessory object."""
|
||||
self._ffmpeg = hass.data[DATA_FFMPEG]
|
||||
for config_key in CONFIG_DEFAULTS:
|
||||
for config_key, conf in CONFIG_DEFAULTS.items():
|
||||
if config_key not in config:
|
||||
config[config_key] = CONFIG_DEFAULTS[config_key]
|
||||
config[config_key] = conf
|
||||
|
||||
max_fps = config[CONF_MAX_FPS]
|
||||
max_width = config[CONF_MAX_WIDTH]
|
||||
|
@ -160,8 +160,8 @@ class SensorManager:
|
||||
)
|
||||
)
|
||||
|
||||
for platform in to_add:
|
||||
self._component_add_entities[platform](to_add[platform])
|
||||
for platform, value in to_add.items():
|
||||
self._component_add_entities[platform](value)
|
||||
|
||||
|
||||
class GenericHueSensor(GenericHueDevice, entity.Entity):
|
||||
|
@ -38,7 +38,7 @@ def log_rate_limits(hass, target, resp, level=20):
|
||||
rate_limits["successful"],
|
||||
rate_limits["maximum"],
|
||||
rate_limits["errors"],
|
||||
str(resetsAtTime).split(".")[0],
|
||||
str(resetsAtTime).split(".", maxsplit=1)[0],
|
||||
)
|
||||
|
||||
|
||||
|
@ -384,7 +384,7 @@ class ItunesDevice(MediaPlayerEntity):
|
||||
|
||||
def media_next_track(self):
|
||||
"""Send media_next command to media player."""
|
||||
response = self.client.next() # pylint: disable=not-callable
|
||||
response = self.client.next()
|
||||
self.update_state(response)
|
||||
|
||||
def media_previous_track(self):
|
||||
|
@ -185,7 +185,7 @@ class KNXClimate(KnxEntity, ClimateEntity):
|
||||
f"{self._device.temperature.group_address_state}_"
|
||||
f"{self._device.target_temperature.group_address_state}_"
|
||||
f"{self._device.target_temperature.group_address}_"
|
||||
f"{self._device._setpoint_shift.group_address}" # pylint: disable=protected-access
|
||||
f"{self._device._setpoint_shift.group_address}"
|
||||
)
|
||||
|
||||
async def async_update(self) -> None:
|
||||
|
@ -142,7 +142,6 @@ class KulerskyLight(LightEntity):
|
||||
try:
|
||||
if not self._available:
|
||||
await self._light.connect()
|
||||
# pylint: disable=invalid-name
|
||||
rgbw = await self._light.get_color()
|
||||
except pykulersky.PykulerskyException as exc:
|
||||
if self._available:
|
||||
|
@ -139,7 +139,8 @@ class LgTVDevice(MediaPlayerEntity):
|
||||
self._sources = dict(zip(channel_names, channel_list))
|
||||
# sort source names by the major channel number
|
||||
source_tuples = [
|
||||
(k, self._sources[k].find("major").text) for k in self._sources
|
||||
(k, source.find("major").text)
|
||||
for k, source in self._sources.items()
|
||||
]
|
||||
sorted_sources = sorted(
|
||||
source_tuples, key=lambda channel: int(channel[1])
|
||||
|
@ -189,7 +189,7 @@ class Life360Scanner:
|
||||
{
|
||||
ATTR_ENTITY_ID: f"{DEVICE_TRACKER_DOMAIN}.{dev_id}",
|
||||
ATTR_WAIT: str(last_seen - (prev_seen or self._started)).split(
|
||||
"."
|
||||
".", maxsplit=1
|
||||
)[0],
|
||||
},
|
||||
)
|
||||
|
@ -81,7 +81,7 @@ class LuftDatenFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
return self._show_form({CONF_SENSOR_ID: "invalid_sensor"})
|
||||
|
||||
available_sensors = [
|
||||
x for x in luftdaten.values if luftdaten.values[x] is not None
|
||||
x for x, x_values in luftdaten.values.items() if x_values is not None
|
||||
]
|
||||
|
||||
if available_sensors:
|
||||
|
@ -79,7 +79,7 @@ def log_rate_limits(hass, device_name, resp, level=logging.INFO):
|
||||
rate_limits[ATTR_PUSH_RATE_LIMITS_SUCCESSFUL],
|
||||
rate_limits[ATTR_PUSH_RATE_LIMITS_MAXIMUM],
|
||||
rate_limits[ATTR_PUSH_RATE_LIMITS_ERRORS],
|
||||
str(resetsAtTime).split(".")[0],
|
||||
str(resetsAtTime).split(".", maxsplit=1)[0],
|
||||
)
|
||||
|
||||
|
||||
|
@ -8,7 +8,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components.fan import SUPPORT_DIRECTION, SUPPORT_SET_SPEED, FanEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
import homeassistant.helpers.entity_platform as entity_platform
|
||||
from homeassistant.helpers import entity_platform
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import HomeAssistantType
|
||||
from homeassistant.util.percentage import (
|
||||
|
@ -12,7 +12,7 @@ from homeassistant.components.light import (
|
||||
LightEntity,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
import homeassistant.helpers.entity_platform as entity_platform
|
||||
from homeassistant.helpers import entity_platform
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import HomeAssistantType
|
||||
from homeassistant.util.percentage import (
|
||||
|
@ -163,6 +163,8 @@ class MySensorsLight(mysensors.device.MySensorsEntity, LightEntity):
|
||||
|
||||
if self.assumed_state:
|
||||
# optimistically assume that light has changed state
|
||||
# pylint: disable=no-value-for-parameter
|
||||
# https://github.com/PyCQA/pylint/issues/4546
|
||||
self._hs = color_util.color_RGB_to_hs(*rgb) # type: ignore[assignment]
|
||||
self._white = white
|
||||
self._values[self.value_type] = hex_color
|
||||
|
@ -87,11 +87,9 @@ class NetatmoScheduleSelect(NetatmoBase, SelectEntity):
|
||||
|
||||
self._attr_unique_id = f"{self._home_id}-schedule-select"
|
||||
|
||||
self._attr_current_option = (
|
||||
self._data._get_selected_schedule( # pylint: disable=protected-access
|
||||
home_id=self._home_id
|
||||
).get("name")
|
||||
)
|
||||
self._attr_current_option = self._data._get_selected_schedule(
|
||||
home_id=self._home_id
|
||||
).get("name")
|
||||
self._attr_options = options
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
|
@ -22,10 +22,10 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
|
||||
ombi = hass.data[DOMAIN]["instance"]
|
||||
|
||||
for sensor in SENSOR_TYPES:
|
||||
for sensor, sensor_val in SENSOR_TYPES.items():
|
||||
sensor_label = sensor
|
||||
sensor_type = SENSOR_TYPES[sensor]["type"]
|
||||
sensor_icon = SENSOR_TYPES[sensor]["icon"]
|
||||
sensor_type = sensor_val["type"]
|
||||
sensor_icon = sensor_val["icon"]
|
||||
sensors.append(OmbiSensor(sensor_label, sensor_type, ombi, sensor_icon))
|
||||
|
||||
add_entities(sensors, True)
|
||||
|
@ -88,8 +88,7 @@ class OpenHardwareMonitorDevice(SensorEntity):
|
||||
array = self._data.data[OHM_CHILDREN]
|
||||
_attributes = {}
|
||||
|
||||
for path_index in range(0, len(self.path)):
|
||||
path_number = self.path[path_index]
|
||||
for path_index, path_number in enumerate(self.path):
|
||||
values = array[path_number]
|
||||
|
||||
if path_index == len(self.path) - 1:
|
||||
|
@ -83,9 +83,9 @@ class ZWaveDeviceEntityValues:
|
||||
return
|
||||
|
||||
# Go through the possible values for this entity defined by the schema.
|
||||
for name in self._values:
|
||||
for name, name_value in self._values.items():
|
||||
# Skip if it's already been added.
|
||||
if self._values[name] is not None:
|
||||
if name_value is not None:
|
||||
continue
|
||||
# Skip if the value doesn't match the schema.
|
||||
if not check_value_schema(value, self._schema[const.DISC_VALUES][name]):
|
||||
|
@ -256,14 +256,18 @@ class PingDataSubProcess(PingData):
|
||||
)
|
||||
|
||||
if sys.platform == "win32":
|
||||
match = WIN32_PING_MATCHER.search(str(out_data).split("\n")[-1])
|
||||
match = WIN32_PING_MATCHER.search(
|
||||
str(out_data).rsplit("\n", maxsplit=1)[-1]
|
||||
)
|
||||
rtt_min, rtt_avg, rtt_max = match.groups()
|
||||
return {"min": rtt_min, "avg": rtt_avg, "max": rtt_max, "mdev": ""}
|
||||
if "max/" not in str(out_data):
|
||||
match = PING_MATCHER_BUSYBOX.search(str(out_data).split("\n")[-1])
|
||||
match = PING_MATCHER_BUSYBOX.search(
|
||||
str(out_data).rsplit("\n", maxsplit=1)[-1]
|
||||
)
|
||||
rtt_min, rtt_avg, rtt_max = match.groups()
|
||||
return {"min": rtt_min, "avg": rtt_avg, "max": rtt_max, "mdev": ""}
|
||||
match = PING_MATCHER.search(str(out_data).split("\n")[-1])
|
||||
match = PING_MATCHER.search(str(out_data).rsplit("\n", maxsplit=1)[-1])
|
||||
rtt_min, rtt_avg, rtt_max, rtt_mdev = match.groups()
|
||||
return {"min": rtt_min, "avg": rtt_avg, "max": rtt_max, "mdev": rtt_mdev}
|
||||
except asyncio.TimeoutError:
|
||||
|
@ -230,10 +230,10 @@ class Proximity(Entity):
|
||||
closest_device: str = None
|
||||
dist_to_zone: float = None
|
||||
|
||||
for device in distances_to_zone:
|
||||
if not dist_to_zone or distances_to_zone[device] < dist_to_zone:
|
||||
for device, zone in distances_to_zone.items():
|
||||
if not dist_to_zone or zone < dist_to_zone:
|
||||
closest_device = device
|
||||
dist_to_zone = distances_to_zone[device]
|
||||
dist_to_zone = zone
|
||||
|
||||
# If the closest device is one of the other devices.
|
||||
if closest_device != entity:
|
||||
|
@ -30,8 +30,8 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
sensors = []
|
||||
|
||||
for device_type in ("doorbots", "authorized_doorbots", "stickup_cams"):
|
||||
for sensor_type in SENSOR_TYPES:
|
||||
if device_type not in SENSOR_TYPES[sensor_type][1]:
|
||||
for sensor_type, sensor in SENSOR_TYPES.items():
|
||||
if device_type not in sensor[1]:
|
||||
continue
|
||||
|
||||
for device in devices[device_type]:
|
||||
|
@ -19,19 +19,15 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
sensors = []
|
||||
|
||||
for device_type in ("chimes", "doorbots", "authorized_doorbots", "stickup_cams"):
|
||||
for sensor_type in SENSOR_TYPES:
|
||||
if device_type not in SENSOR_TYPES[sensor_type][1]:
|
||||
for sensor_type, sensor in SENSOR_TYPES.items():
|
||||
if device_type not in sensor[1]:
|
||||
continue
|
||||
|
||||
for device in devices[device_type]:
|
||||
if device_type == "battery" and device.battery_life is None:
|
||||
continue
|
||||
|
||||
sensors.append(
|
||||
SENSOR_TYPES[sensor_type][6](
|
||||
config_entry.entry_id, device, sensor_type
|
||||
)
|
||||
)
|
||||
sensors.append(sensor[6](config_entry.entry_id, device, sensor_type))
|
||||
|
||||
async_add_entities(sensors)
|
||||
|
||||
|
@ -26,9 +26,10 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
def kill_raspistill(*args):
|
||||
"""Kill any previously running raspistill process.."""
|
||||
subprocess.Popen(
|
||||
with subprocess.Popen(
|
||||
["killall", "raspistill"], stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT
|
||||
)
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
@ -116,7 +117,10 @@ class RaspberryCamera(Camera):
|
||||
cmd_args.append("-a")
|
||||
cmd_args.append(str(device_info[CONF_OVERLAY_TIMESTAMP]))
|
||||
|
||||
subprocess.Popen(cmd_args, stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT)
|
||||
with subprocess.Popen(
|
||||
cmd_args, stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT
|
||||
):
|
||||
pass
|
||||
|
||||
def camera_image(self):
|
||||
"""Return raspistill image response."""
|
||||
|
@ -96,8 +96,7 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
for i in range(len(data.active_voltage)):
|
||||
devices.append(SenseVoltageSensor(data, i, sense_monitor_id))
|
||||
|
||||
for type_id in TRENDS_SENSOR_TYPES:
|
||||
typ = TRENDS_SENSOR_TYPES[type_id]
|
||||
for type_id, typ in TRENDS_SENSOR_TYPES.items():
|
||||
for var in SENSOR_VARIANTS:
|
||||
name = typ.name
|
||||
sensor_type = typ.sensor_type
|
||||
|
@ -78,7 +78,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
),
|
||||
}
|
||||
|
||||
# pylint: disable=abstract-class-instantiated
|
||||
sentry_sdk.init(
|
||||
dsn=entry.data[CONF_DSN],
|
||||
environment=entry.options.get(CONF_ENVIRONMENT),
|
||||
|
@ -22,8 +22,7 @@ from homeassistant.components.notify import (
|
||||
)
|
||||
from homeassistant.const import ATTR_ICON, CONF_API_KEY, CONF_ICON, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import aiohttp_client, config_validation as cv
|
||||
import homeassistant.helpers.template as template
|
||||
from homeassistant.helpers import aiohttp_client, config_validation as cv, template
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
@ -149,38 +149,38 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
for service_location in smappee_base.smappee.service_locations.values():
|
||||
# Add all basic sensors (realtime values and aggregators)
|
||||
# Some are available in local only env
|
||||
for sensor in TREND_SENSORS:
|
||||
if not service_location.local_polling or TREND_SENSORS[sensor][5]:
|
||||
for sensor, attributes in TREND_SENSORS.items():
|
||||
if not service_location.local_polling or attributes[5]:
|
||||
entities.append(
|
||||
SmappeeSensor(
|
||||
smappee_base=smappee_base,
|
||||
service_location=service_location,
|
||||
sensor=sensor,
|
||||
attributes=TREND_SENSORS[sensor],
|
||||
attributes=attributes,
|
||||
)
|
||||
)
|
||||
|
||||
if service_location.has_reactive_value:
|
||||
for reactive_sensor in REACTIVE_SENSORS:
|
||||
for reactive_sensor, attributes in REACTIVE_SENSORS.items():
|
||||
entities.append(
|
||||
SmappeeSensor(
|
||||
smappee_base=smappee_base,
|
||||
service_location=service_location,
|
||||
sensor=reactive_sensor,
|
||||
attributes=REACTIVE_SENSORS[reactive_sensor],
|
||||
attributes=attributes,
|
||||
)
|
||||
)
|
||||
|
||||
# Add solar sensors (some are available in local only env)
|
||||
if service_location.has_solar_production:
|
||||
for sensor in SOLAR_SENSORS:
|
||||
if not service_location.local_polling or SOLAR_SENSORS[sensor][5]:
|
||||
for sensor, attributes in SOLAR_SENSORS.items():
|
||||
if not service_location.local_polling or attributes[5]:
|
||||
entities.append(
|
||||
SmappeeSensor(
|
||||
smappee_base=smappee_base,
|
||||
service_location=service_location,
|
||||
sensor=sensor,
|
||||
attributes=SOLAR_SENSORS[sensor],
|
||||
attributes=attributes,
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -240,7 +240,7 @@ class SnmpSwitch(SwitchEntity):
|
||||
await self._set(command)
|
||||
# User set vartype Null, command must be an empty string
|
||||
elif self._vartype == "Null":
|
||||
await self._set(Null)("")
|
||||
await self._set("")
|
||||
# user did not set vartype but command is digit: defaulting to Integer
|
||||
# or user did set vartype
|
||||
else:
|
||||
|
@ -78,8 +78,8 @@ async def async_setup_platform(
|
||||
device_data = hass.data[DOMAIN][DATA_DEVICE]
|
||||
|
||||
async_add_entities(
|
||||
SwitcherSensorEntity(device_data, attribute, SENSORS[attribute])
|
||||
for attribute in SENSORS
|
||||
SwitcherSensorEntity(device_data, attribute, sensor)
|
||||
for attribute, sensor in SENSORS.items()
|
||||
)
|
||||
|
||||
|
||||
|
@ -32,17 +32,13 @@ async def async_setup_entry(
|
||||
| SynoDSMUpgradeBinarySensor
|
||||
| SynoDSMStorageBinarySensor
|
||||
] = [
|
||||
SynoDSMSecurityBinarySensor(
|
||||
api, sensor_type, SECURITY_BINARY_SENSORS[sensor_type], coordinator
|
||||
)
|
||||
for sensor_type in SECURITY_BINARY_SENSORS
|
||||
SynoDSMSecurityBinarySensor(api, sensor_type, sensor, coordinator)
|
||||
for sensor_type, sensor in SECURITY_BINARY_SENSORS.items()
|
||||
]
|
||||
|
||||
entities += [
|
||||
SynoDSMUpgradeBinarySensor(
|
||||
api, sensor_type, UPGRADE_BINARY_SENSORS[sensor_type], coordinator
|
||||
)
|
||||
for sensor_type in UPGRADE_BINARY_SENSORS
|
||||
SynoDSMUpgradeBinarySensor(api, sensor_type, sensor, coordinator)
|
||||
for sensor_type, sensor in UPGRADE_BINARY_SENSORS.items()
|
||||
]
|
||||
|
||||
# Handle all disks
|
||||
@ -52,11 +48,11 @@ async def async_setup_entry(
|
||||
SynoDSMStorageBinarySensor(
|
||||
api,
|
||||
sensor_type,
|
||||
STORAGE_DISK_BINARY_SENSORS[sensor_type],
|
||||
sensor,
|
||||
coordinator,
|
||||
disk,
|
||||
)
|
||||
for sensor_type in STORAGE_DISK_BINARY_SENSORS
|
||||
for sensor_type, sensor in STORAGE_DISK_BINARY_SENSORS.items()
|
||||
]
|
||||
|
||||
async_add_entities(entities)
|
||||
|
@ -46,10 +46,8 @@ async def async_setup_entry(
|
||||
coordinator = data[COORDINATOR_CENTRAL]
|
||||
|
||||
entities: list[SynoDSMUtilSensor | SynoDSMStorageSensor | SynoDSMInfoSensor] = [
|
||||
SynoDSMUtilSensor(
|
||||
api, sensor_type, UTILISATION_SENSORS[sensor_type], coordinator
|
||||
)
|
||||
for sensor_type in UTILISATION_SENSORS
|
||||
SynoDSMUtilSensor(api, sensor_type, sensor, coordinator)
|
||||
for sensor_type, sensor in UTILISATION_SENSORS.items()
|
||||
]
|
||||
|
||||
# Handle all volumes
|
||||
@ -59,11 +57,11 @@ async def async_setup_entry(
|
||||
SynoDSMStorageSensor(
|
||||
api,
|
||||
sensor_type,
|
||||
STORAGE_VOL_SENSORS[sensor_type],
|
||||
sensor,
|
||||
coordinator,
|
||||
volume,
|
||||
)
|
||||
for sensor_type in STORAGE_VOL_SENSORS
|
||||
for sensor_type, sensor in STORAGE_VOL_SENSORS.items()
|
||||
]
|
||||
|
||||
# Handle all disks
|
||||
@ -73,18 +71,16 @@ async def async_setup_entry(
|
||||
SynoDSMStorageSensor(
|
||||
api,
|
||||
sensor_type,
|
||||
STORAGE_DISK_SENSORS[sensor_type],
|
||||
sensor,
|
||||
coordinator,
|
||||
disk,
|
||||
)
|
||||
for sensor_type in STORAGE_DISK_SENSORS
|
||||
for sensor_type, sensor in STORAGE_DISK_SENSORS.items()
|
||||
]
|
||||
|
||||
entities += [
|
||||
SynoDSMInfoSensor(
|
||||
api, sensor_type, INFORMATION_SENSORS[sensor_type], coordinator
|
||||
)
|
||||
for sensor_type in INFORMATION_SENSORS
|
||||
SynoDSMInfoSensor(api, sensor_type, sensor, coordinator)
|
||||
for sensor_type, sensor in INFORMATION_SENSORS.items()
|
||||
]
|
||||
|
||||
async_add_entities(entities)
|
||||
|
@ -44,9 +44,9 @@ async def async_setup_entry(
|
||||
await coordinator.async_refresh()
|
||||
entities += [
|
||||
SynoDSMSurveillanceHomeModeToggle(
|
||||
api, sensor_type, SURVEILLANCE_SWITCH[sensor_type], version, coordinator
|
||||
api, sensor_type, switch, version, coordinator
|
||||
)
|
||||
for sensor_type in SURVEILLANCE_SWITCH
|
||||
for sensor_type, switch in SURVEILLANCE_SWITCH.items()
|
||||
]
|
||||
|
||||
async_add_entities(entities, True)
|
||||
|
@ -18,7 +18,7 @@ from hatasmota.models import DiscoveryHashType, TasmotaDeviceConfig
|
||||
from hatasmota.mqtt import TasmotaMQTTClient
|
||||
from hatasmota.sensor import TasmotaBaseSensorConfig
|
||||
|
||||
import homeassistant.components.sensor as sensor
|
||||
from homeassistant.components import sensor
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dev_reg
|
||||
|
@ -127,9 +127,8 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
else:
|
||||
continue
|
||||
|
||||
for datatype in sensor_value_descriptions:
|
||||
for datatype, sensor_info in sensor_value_descriptions.items():
|
||||
if datatype & datatype_mask and tellcore_sensor.has_value(datatype):
|
||||
sensor_info = sensor_value_descriptions[datatype]
|
||||
sensors.append(
|
||||
TellstickSensor(sensor_name, tellcore_sensor, datatype, sensor_info)
|
||||
)
|
||||
|
@ -95,10 +95,10 @@ class TorqueReceiveDataView(HomeAssistantView):
|
||||
if pid in self.sensors:
|
||||
self.sensors[pid].async_on_update(data[key])
|
||||
|
||||
for pid in names:
|
||||
for pid, name in names.items():
|
||||
if pid not in self.sensors:
|
||||
self.sensors[pid] = TorqueSensor(
|
||||
ENTITY_NAME_FORMAT.format(self.vehicle, names[pid]), units.get(pid)
|
||||
ENTITY_NAME_FORMAT.format(self.vehicle, name), units.get(pid)
|
||||
)
|
||||
hass.async_add_job(self.add_entities, [self.sensors[pid]])
|
||||
|
||||
|
@ -197,7 +197,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
@callback
|
||||
def _async_cancel_tuya_tracker(event):
|
||||
domain_data[TUYA_TRACKER]()
|
||||
domain_data[TUYA_TRACKER]() # pylint: disable=not-callable
|
||||
|
||||
domain_data[STOP_CANCEL] = hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_STOP, _async_cancel_tuya_tracker
|
||||
|
@ -112,8 +112,8 @@ async def async_setup(hass, config):
|
||||
|
||||
hass.data[DOMAIN] = {"client": client, "state_proxy": state_proxy, "name": name}
|
||||
|
||||
for vallox_service in SERVICE_TO_METHOD:
|
||||
schema = SERVICE_TO_METHOD[vallox_service]["schema"]
|
||||
for vallox_service, method in SERVICE_TO_METHOD.items():
|
||||
schema = method["schema"]
|
||||
hass.services.async_register(
|
||||
DOMAIN, vallox_service, service_handler.async_handle, schema=schema
|
||||
)
|
||||
|
@ -92,8 +92,8 @@ async def async_setup(hass, config):
|
||||
data["method"] = method["method"]
|
||||
async_dispatcher_send(hass, DOMAIN, data)
|
||||
|
||||
for service in SERVICE_TO_METHOD:
|
||||
schema = SERVICE_TO_METHOD[service]["schema"]
|
||||
for service, method in SERVICE_TO_METHOD.items():
|
||||
schema = method["schema"]
|
||||
hass.services.async_register(
|
||||
DOMAIN, service, async_service_handler, schema=schema
|
||||
)
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""Constants used by the Withings component."""
|
||||
from enum import Enum
|
||||
|
||||
import homeassistant.const as const
|
||||
from homeassistant import const
|
||||
|
||||
CONF_PROFILES = "profiles"
|
||||
CONF_USE_WEBHOOK = "use_webhook"
|
||||
|
@ -9,7 +9,7 @@ import voluptuous as vol
|
||||
from homeassistant.components.binary_sensor import PLATFORM_SCHEMA, BinarySensorEntity
|
||||
from homeassistant.const import CONF_NAME, WEEKDAYS
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
import homeassistant.util.dt as dt
|
||||
from homeassistant.util import dt
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -634,8 +634,8 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
if update_tasks:
|
||||
await asyncio.wait(update_tasks)
|
||||
|
||||
for air_purifier_service in SERVICE_TO_METHOD:
|
||||
schema = SERVICE_TO_METHOD[air_purifier_service].get(
|
||||
for air_purifier_service, method in SERVICE_TO_METHOD.items():
|
||||
schema = method[air_purifier_service].get(
|
||||
"schema", AIRPURIFIER_SERVICE_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
|
@ -241,10 +241,8 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
if update_tasks:
|
||||
await asyncio.wait(update_tasks)
|
||||
|
||||
for xiaomi_miio_service in SERVICE_TO_METHOD:
|
||||
schema = SERVICE_TO_METHOD[xiaomi_miio_service].get(
|
||||
"schema", XIAOMI_MIIO_SERVICE_SCHEMA
|
||||
)
|
||||
for xiaomi_miio_service, method in SERVICE_TO_METHOD.items():
|
||||
schema = method.get("schema", XIAOMI_MIIO_SERVICE_SCHEMA)
|
||||
hass.services.async_register(
|
||||
DOMAIN, xiaomi_miio_service, async_service_handler, schema=schema
|
||||
)
|
||||
|
@ -250,8 +250,8 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
if update_tasks:
|
||||
await asyncio.wait(update_tasks)
|
||||
|
||||
for plug_service in SERVICE_TO_METHOD:
|
||||
schema = SERVICE_TO_METHOD[plug_service].get("schema", SERVICE_SCHEMA)
|
||||
for plug_service, method in SERVICE_TO_METHOD.items():
|
||||
schema = method[plug_service].get("schema", SERVICE_SCHEMA)
|
||||
hass.services.async_register(
|
||||
DOMAIN, plug_service, async_service_handler, schema=schema
|
||||
)
|
||||
|
@ -190,7 +190,9 @@ async def async_send_message( # noqa: C901
|
||||
_LOGGER.info("Sending file to %s", recipient)
|
||||
message = self.Message(sto=recipient, stype="chat")
|
||||
message["body"] = url
|
||||
message["oob"]["url"] = url
|
||||
message["oob"][ # pylint: disable=invalid-sequence-index
|
||||
"url"
|
||||
] = url
|
||||
try:
|
||||
message.send()
|
||||
except (IqError, IqTimeout, XMPPError) as ex:
|
||||
|
@ -1,5 +1,5 @@
|
||||
"""Closures channels module for Zigbee Home Automation."""
|
||||
import zigpy.zcl.clusters.closures as closures
|
||||
from zigpy.zcl.clusters import closures
|
||||
|
||||
from homeassistant.core import callback
|
||||
|
||||
|
@ -6,7 +6,7 @@ from collections.abc import Coroutine
|
||||
from typing import Any
|
||||
|
||||
import zigpy.exceptions
|
||||
import zigpy.zcl.clusters.general as general
|
||||
from zigpy.zcl.clusters import general
|
||||
from zigpy.zcl.foundation import Status
|
||||
|
||||
from homeassistant.core import callback
|
||||
|
@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Coroutine
|
||||
|
||||
import zigpy.zcl.clusters.homeautomation as homeautomation
|
||||
from zigpy.zcl.clusters import homeautomation
|
||||
|
||||
from .. import registries
|
||||
from ..const import (
|
||||
|
@ -11,7 +11,7 @@ from collections import namedtuple
|
||||
from typing import Any
|
||||
|
||||
from zigpy.exceptions import ZigbeeException
|
||||
import zigpy.zcl.clusters.hvac as hvac
|
||||
from zigpy.zcl.clusters import hvac
|
||||
from zigpy.zcl.foundation import Status
|
||||
|
||||
from homeassistant.core import callback
|
||||
|
@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
from collections.abc import Coroutine
|
||||
from contextlib import suppress
|
||||
|
||||
import zigpy.zcl.clusters.lighting as lighting
|
||||
from zigpy.zcl.clusters import lighting
|
||||
|
||||
from .. import registries
|
||||
from ..const import REPORT_CONFIG_DEFAULT
|
||||
|
@ -2,7 +2,7 @@
|
||||
import asyncio
|
||||
|
||||
import zigpy.exceptions
|
||||
import zigpy.zcl.clusters.lightlink as lightlink
|
||||
from zigpy.zcl.clusters import lightlink
|
||||
|
||||
from .. import registries
|
||||
from .base import ChannelStatus, ZigbeeChannel
|
||||
|
@ -1,5 +1,5 @@
|
||||
"""Measurement channels module for Zigbee Home Automation."""
|
||||
import zigpy.zcl.clusters.measurement as measurement
|
||||
from zigpy.zcl.clusters import measurement
|
||||
|
||||
from .. import registries
|
||||
from ..const import (
|
||||
|
@ -1,5 +1,5 @@
|
||||
"""Protocol channels module for Zigbee Home Automation."""
|
||||
import zigpy.zcl.clusters.protocol as protocol
|
||||
from zigpy.zcl.clusters import protocol
|
||||
|
||||
from .. import registries
|
||||
from .base import ZigbeeChannel
|
||||
|
@ -11,7 +11,7 @@ from collections.abc import Coroutine
|
||||
import logging
|
||||
|
||||
from zigpy.exceptions import ZigbeeException
|
||||
import zigpy.zcl.clusters.security as security
|
||||
from zigpy.zcl.clusters import security
|
||||
from zigpy.zcl.clusters.security import IasAce as AceCluster
|
||||
|
||||
from homeassistant.core import CALLABLE_T, callback
|
||||
|
@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Coroutine
|
||||
|
||||
import zigpy.zcl.clusters.smartenergy as smartenergy
|
||||
from zigpy.zcl.clusters import smartenergy
|
||||
|
||||
from homeassistant.const import (
|
||||
POWER_WATT,
|
||||
|
@ -5,9 +5,9 @@ import collections
|
||||
from typing import Callable, Dict
|
||||
|
||||
import attr
|
||||
from zigpy import zcl
|
||||
import zigpy.profiles.zha
|
||||
import zigpy.profiles.zll
|
||||
import zigpy.zcl as zcl
|
||||
|
||||
from homeassistant.components.alarm_control_panel import DOMAIN as ALARM
|
||||
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR
|
||||
|
@ -26,8 +26,8 @@ ZigpyGroupType = zigpy.group.Group
|
||||
ZigpyZdoType = zigpy.zdo.ZDO
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.components.zha.core import channels
|
||||
import homeassistant.components.zha.core.channels
|
||||
import homeassistant.components.zha.core.channels as channels
|
||||
import homeassistant.components.zha.core.channels.base as base_channels
|
||||
import homeassistant.components.zha.core.device
|
||||
import homeassistant.components.zha.core.gateway
|
||||
|
@ -67,8 +67,8 @@ async def async_get_actions(hass: HomeAssistant, device_id: str) -> list[dict]:
|
||||
]
|
||||
actions = [
|
||||
action
|
||||
for channel in DEVICE_ACTIONS
|
||||
for action in DEVICE_ACTIONS[channel]
|
||||
for channel, channel_actions in DEVICE_ACTIONS.items()
|
||||
for action in channel_actions
|
||||
if channel in cluster_channels
|
||||
]
|
||||
for action in actions:
|
||||
|
@ -6,7 +6,7 @@ import functools
|
||||
import math
|
||||
|
||||
from zigpy.exceptions import ZigbeeException
|
||||
import zigpy.zcl.clusters.hvac as hvac
|
||||
from zigpy.zcl.clusters import hvac
|
||||
|
||||
from homeassistant.components.fan import (
|
||||
ATTR_PERCENTAGE,
|
||||
|
@ -1114,8 +1114,8 @@ class ZWaveDeviceEntityValues:
|
||||
"""
|
||||
if not check_node_schema(value.node, self._schema):
|
||||
return
|
||||
for name in self._values:
|
||||
if self._values[name] is not None:
|
||||
for name, name_value in self._values.items():
|
||||
if name_value is not None:
|
||||
continue
|
||||
if not check_value_schema(value, self._schema[const.DISC_VALUES][name]):
|
||||
continue
|
||||
|
@ -899,7 +899,7 @@ async def async_check_ha_config_file(hass: HomeAssistant) -> str | None:
|
||||
This method is a coroutine.
|
||||
"""
|
||||
# pylint: disable=import-outside-toplevel
|
||||
import homeassistant.helpers.check_config as check_config
|
||||
from homeassistant.helpers import check_config
|
||||
|
||||
res = await check_config.async_check_ha_config_file(hass)
|
||||
|
||||
|
@ -201,7 +201,7 @@ class CoreState(enum.Enum):
|
||||
final_write = "FINAL_WRITE"
|
||||
stopped = "STOPPED"
|
||||
|
||||
def __str__(self) -> str: # pylint: disable=invalid-str-returned
|
||||
def __str__(self) -> str:
|
||||
"""Return the event."""
|
||||
return self.value
|
||||
|
||||
@ -593,7 +593,7 @@ class EventOrigin(enum.Enum):
|
||||
local = "LOCAL"
|
||||
remote = "REMOTE"
|
||||
|
||||
def __str__(self) -> str: # pylint: disable=invalid-str-returned
|
||||
def __str__(self) -> str:
|
||||
"""Return the event."""
|
||||
return self.value
|
||||
|
||||
@ -669,7 +669,7 @@ class EventBus:
|
||||
|
||||
This method must be run in the event loop.
|
||||
"""
|
||||
return {key: len(self._listeners[key]) for key in self._listeners}
|
||||
return {key: len(listeners) for key, listeners in self._listeners.items()}
|
||||
|
||||
@property
|
||||
def listeners(self) -> dict[str, int]:
|
||||
@ -1298,7 +1298,7 @@ class ServiceRegistry:
|
||||
|
||||
This method must be run in the event loop.
|
||||
"""
|
||||
return {domain: self._services[domain].copy() for domain in self._services}
|
||||
return {domain: service.copy() for domain, service in self._services.items()}
|
||||
|
||||
def has_service(self, domain: str, service: str) -> bool:
|
||||
"""Test if specified service exists.
|
||||
|
@ -977,10 +977,10 @@ class _TrackTemplateResultInfo:
|
||||
self._track_state_changes.async_update_listeners(
|
||||
_render_infos_to_track_states(
|
||||
[
|
||||
_suppress_domain_all_in_render_info(self._info[template])
|
||||
_suppress_domain_all_in_render_info(info)
|
||||
if self._rate_limit.async_has_timer(template)
|
||||
else self._info[template]
|
||||
for template in self._info
|
||||
else info
|
||||
for template, info in self._info.items()
|
||||
]
|
||||
)
|
||||
)
|
||||
|
@ -534,7 +534,7 @@ async def async_get_integration(hass: HomeAssistant, domain: str) -> Integration
|
||||
|
||||
try:
|
||||
integration = await _async_get_integration(hass, domain)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
except Exception:
|
||||
# Remove event from cache.
|
||||
cache.pop(domain)
|
||||
event.set()
|
||||
|
@ -161,9 +161,6 @@ def get_random_string(length: int = 10) -> str:
|
||||
class OrderedEnum(enum.Enum):
|
||||
"""Taken from Python 3.4.0 docs."""
|
||||
|
||||
# https://github.com/PyCQA/pylint/issues/2306
|
||||
# pylint: disable=comparison-with-callable
|
||||
|
||||
def __ge__(self, other: ENUM_T) -> bool:
|
||||
"""Return the greater than element."""
|
||||
if self.__class__ is other.__class__:
|
||||
|
@ -13,9 +13,9 @@ import ciso8601
|
||||
from homeassistant.const import MATCH_ALL
|
||||
|
||||
if sys.version_info[:2] >= (3, 9):
|
||||
import zoneinfo # pylint: disable=import-error
|
||||
import zoneinfo
|
||||
else:
|
||||
from backports import zoneinfo # pylint: disable=import-error
|
||||
from backports import zoneinfo
|
||||
|
||||
DATE_STR_FORMAT = "%Y-%m-%d"
|
||||
UTC = dt.timezone.utc
|
||||
|
@ -10,7 +10,7 @@ jsonpickle==1.4.1
|
||||
mock-open==1.4.0
|
||||
mypy==0.902
|
||||
pre-commit==2.13.0
|
||||
pylint==2.8.3
|
||||
pylint==2.9.3
|
||||
pipdeptree==1.0.0
|
||||
pylint-strict-informational==0.1
|
||||
pytest-aiohttp==0.3.0
|
||||
|
Loading…
x
Reference in New Issue
Block a user