mirror of
https://github.com/home-assistant/core.git
synced 2025-07-23 21:27:38 +00:00
Log lines do not end with a full stop (#37527)
This commit is contained in:
parent
01fd33f173
commit
53545c984b
@ -75,7 +75,7 @@ class CommandLineAuthProvider(AuthProvider):
|
|||||||
|
|
||||||
if process.returncode != 0:
|
if process.returncode != 0:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"User %r failed to authenticate, command exited with code %d.",
|
"User %r failed to authenticate, command exited with code %d",
|
||||||
username,
|
username,
|
||||||
process.returncode,
|
process.returncode,
|
||||||
)
|
)
|
||||||
|
@ -37,7 +37,7 @@ def is_on(hass, entity_id=None):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
if not hasattr(component, "is_on"):
|
if not hasattr(component, "is_on"):
|
||||||
_LOGGER.warning("Integration %s has no is_on method.", domain)
|
_LOGGER.warning("Integration %s has no is_on method", domain)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if component.is_on(ent_id):
|
if component.is_on(ent_id):
|
||||||
|
@ -183,7 +183,7 @@ class AdGuardHomeEntity(Entity):
|
|||||||
except AdGuardHomeError:
|
except AdGuardHomeError:
|
||||||
if self._available:
|
if self._available:
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"An error occurred while updating AdGuard Home sensor.",
|
"An error occurred while updating AdGuard Home sensor",
|
||||||
exc_info=True,
|
exc_info=True,
|
||||||
)
|
)
|
||||||
self._available = False
|
self._available = False
|
||||||
|
@ -73,7 +73,7 @@ class AdGuardHomeSwitch(AdGuardHomeDeviceEntity, SwitchEntity):
|
|||||||
try:
|
try:
|
||||||
await self._adguard_turn_off()
|
await self._adguard_turn_off()
|
||||||
except AdGuardHomeError:
|
except AdGuardHomeError:
|
||||||
_LOGGER.error("An error occurred while turning off AdGuard Home switch.")
|
_LOGGER.error("An error occurred while turning off AdGuard Home switch")
|
||||||
self._available = False
|
self._available = False
|
||||||
|
|
||||||
async def _adguard_turn_off(self) -> None:
|
async def _adguard_turn_off(self) -> None:
|
||||||
@ -85,7 +85,7 @@ class AdGuardHomeSwitch(AdGuardHomeDeviceEntity, SwitchEntity):
|
|||||||
try:
|
try:
|
||||||
await self._adguard_turn_on()
|
await self._adguard_turn_on()
|
||||||
except AdGuardHomeError:
|
except AdGuardHomeError:
|
||||||
_LOGGER.error("An error occurred while turning on AdGuard Home switch.")
|
_LOGGER.error("An error occurred while turning on AdGuard Home switch")
|
||||||
self._available = False
|
self._available = False
|
||||||
|
|
||||||
async def _adguard_turn_on(self) -> None:
|
async def _adguard_turn_on(self) -> None:
|
||||||
|
@ -162,7 +162,7 @@ def setup(hass, config):
|
|||||||
if not restart:
|
if not restart:
|
||||||
return
|
return
|
||||||
restart = False
|
restart = False
|
||||||
_LOGGER.warning("AlarmDecoder unexpectedly lost connection.")
|
_LOGGER.warning("AlarmDecoder unexpectedly lost connection")
|
||||||
hass.add_job(open_connection)
|
hass.add_job(open_connection)
|
||||||
|
|
||||||
def handle_message(sender, message):
|
def handle_message(sender, message):
|
||||||
|
@ -70,11 +70,11 @@ class Auth:
|
|||||||
await self.async_load_preferences()
|
await self.async_load_preferences()
|
||||||
|
|
||||||
if self.is_token_valid():
|
if self.is_token_valid():
|
||||||
_LOGGER.debug("Token still valid, using it.")
|
_LOGGER.debug("Token still valid, using it")
|
||||||
return self._prefs[STORAGE_ACCESS_TOKEN]
|
return self._prefs[STORAGE_ACCESS_TOKEN]
|
||||||
|
|
||||||
if self._prefs[STORAGE_REFRESH_TOKEN] is None:
|
if self._prefs[STORAGE_REFRESH_TOKEN] is None:
|
||||||
_LOGGER.debug("Token invalid and no refresh token available.")
|
_LOGGER.debug("Token invalid and no refresh token available")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
lwa_params = {
|
lwa_params = {
|
||||||
@ -84,7 +84,7 @@ class Auth:
|
|||||||
CONF_CLIENT_SECRET: self.client_secret,
|
CONF_CLIENT_SECRET: self.client_secret,
|
||||||
}
|
}
|
||||||
|
|
||||||
_LOGGER.debug("Calling LWA to refresh the access token.")
|
_LOGGER.debug("Calling LWA to refresh the access token")
|
||||||
return await self._async_request_new_token(lwa_params)
|
return await self._async_request_new_token(lwa_params)
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
@ -113,14 +113,14 @@ class Auth:
|
|||||||
)
|
)
|
||||||
|
|
||||||
except (asyncio.TimeoutError, aiohttp.ClientError):
|
except (asyncio.TimeoutError, aiohttp.ClientError):
|
||||||
_LOGGER.error("Timeout calling LWA to get auth token.")
|
_LOGGER.error("Timeout calling LWA to get auth token")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
_LOGGER.debug("LWA response header: %s", response.headers)
|
_LOGGER.debug("LWA response header: %s", response.headers)
|
||||||
_LOGGER.debug("LWA response status: %s", response.status)
|
_LOGGER.debug("LWA response status: %s", response.status)
|
||||||
|
|
||||||
if response.status != HTTP_OK:
|
if response.status != HTTP_OK:
|
||||||
_LOGGER.error("Error calling LWA to get auth token.")
|
_LOGGER.error("Error calling LWA to get auth token")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
response_json = await response.json()
|
response_json = await response.json()
|
||||||
|
@ -101,7 +101,7 @@ async def async_send_changereport_message(
|
|||||||
)
|
)
|
||||||
|
|
||||||
except (asyncio.TimeoutError, aiohttp.ClientError):
|
except (asyncio.TimeoutError, aiohttp.ClientError):
|
||||||
_LOGGER.error("Timeout sending report to Alexa.")
|
_LOGGER.error("Timeout sending report to Alexa")
|
||||||
return
|
return
|
||||||
|
|
||||||
response_text = await response.text()
|
response_text = await response.text()
|
||||||
@ -233,7 +233,7 @@ async def async_send_doorbell_event_message(hass, config, alexa_entity):
|
|||||||
)
|
)
|
||||||
|
|
||||||
except (asyncio.TimeoutError, aiohttp.ClientError):
|
except (asyncio.TimeoutError, aiohttp.ClientError):
|
||||||
_LOGGER.error("Timeout sending report to Alexa.")
|
_LOGGER.error("Timeout sending report to Alexa")
|
||||||
return
|
return
|
||||||
|
|
||||||
response_text = await response.text()
|
response_text = await response.text()
|
||||||
|
@ -48,7 +48,7 @@ def setup(hass, config):
|
|||||||
try:
|
try:
|
||||||
apcups_data.update(no_throttle=True)
|
apcups_data.update(no_throttle=True)
|
||||||
except Exception: # pylint: disable=broad-except
|
except Exception: # pylint: disable=broad-except
|
||||||
_LOGGER.exception("Failure while testing APCUPSd status retrieval.")
|
_LOGGER.exception("Failure while testing APCUPSd status retrieval")
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -97,7 +97,7 @@ def setup_scanner(hass, config, see, discovery_info=None):
|
|||||||
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, aprs_disconnect)
|
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, aprs_disconnect)
|
||||||
|
|
||||||
if not aprs_listener.start_event.wait(timeout):
|
if not aprs_listener.start_event.wait(timeout):
|
||||||
_LOGGER.error("Timeout waiting for APRS to connect.")
|
_LOGGER.error("Timeout waiting for APRS to connect")
|
||||||
return
|
return
|
||||||
|
|
||||||
if not aprs_listener.start_success:
|
if not aprs_listener.start_success:
|
||||||
@ -141,7 +141,7 @@ class AprsListenerThread(threading.Thread):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
_LOGGER.info(
|
_LOGGER.info(
|
||||||
"Opening connection to %s with callsign %s.", self.host, self.callsign
|
"Opening connection to %s with callsign %s", self.host, self.callsign
|
||||||
)
|
)
|
||||||
self.ais.connect()
|
self.ais.connect()
|
||||||
self.start_complete(
|
self.start_complete(
|
||||||
@ -152,7 +152,7 @@ class AprsListenerThread(threading.Thread):
|
|||||||
self.start_complete(False, str(err))
|
self.start_complete(False, str(err))
|
||||||
except OSError:
|
except OSError:
|
||||||
_LOGGER.info(
|
_LOGGER.info(
|
||||||
"Closing connection to %s with callsign %s.", self.host, self.callsign
|
"Closing connection to %s with callsign %s", self.host, self.callsign
|
||||||
)
|
)
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
|
@ -59,7 +59,7 @@ def setup(hass, config):
|
|||||||
if arlo_base_station is not None:
|
if arlo_base_station is not None:
|
||||||
arlo_base_station.refresh_rate = scan_interval.total_seconds()
|
arlo_base_station.refresh_rate = scan_interval.total_seconds()
|
||||||
elif not arlo.cameras:
|
elif not arlo.cameras:
|
||||||
_LOGGER.error("No Arlo camera or base station available.")
|
_LOGGER.error("No Arlo camera or base station available")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
hass.data[DATA_ARLO] = arlo
|
hass.data[DATA_ARLO] = arlo
|
||||||
|
@ -103,7 +103,7 @@ async def async_setup(hass, config, retry_delay=FIRST_RETRY_TIME):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
if not api.is_connected:
|
if not api.is_connected:
|
||||||
_LOGGER.error("Error connecting %s to %s.", DOMAIN, conf[CONF_HOST])
|
_LOGGER.error("Error connecting %s to %s", DOMAIN, conf[CONF_HOST])
|
||||||
return False
|
return False
|
||||||
|
|
||||||
hass.data[DATA_ASUSWRT] = api
|
hass.data[DATA_ASUSWRT] = api
|
||||||
|
@ -143,7 +143,7 @@ class AtomeData:
|
|||||||
values = self.atome_client.get_consumption(DAILY_TYPE)
|
values = self.atome_client.get_consumption(DAILY_TYPE)
|
||||||
self._day_usage = values["total"] / 1000
|
self._day_usage = values["total"] / 1000
|
||||||
self._day_price = values["price"]
|
self._day_price = values["price"]
|
||||||
_LOGGER.debug("Updating Atome daily data. Got: %d.", self._day_usage)
|
_LOGGER.debug("Updating Atome daily data. Got: %d", self._day_usage)
|
||||||
|
|
||||||
except KeyError as error:
|
except KeyError as error:
|
||||||
_LOGGER.error("Missing last value in values: %s: %s", values, error)
|
_LOGGER.error("Missing last value in values: %s: %s", values, error)
|
||||||
@ -165,7 +165,7 @@ class AtomeData:
|
|||||||
values = self.atome_client.get_consumption(WEEKLY_TYPE)
|
values = self.atome_client.get_consumption(WEEKLY_TYPE)
|
||||||
self._week_usage = values["total"] / 1000
|
self._week_usage = values["total"] / 1000
|
||||||
self._week_price = values["price"]
|
self._week_price = values["price"]
|
||||||
_LOGGER.debug("Updating Atome weekly data. Got: %d.", self._week_usage)
|
_LOGGER.debug("Updating Atome weekly data. Got: %d", self._week_usage)
|
||||||
|
|
||||||
except KeyError as error:
|
except KeyError as error:
|
||||||
_LOGGER.error("Missing last value in values: %s: %s", values, error)
|
_LOGGER.error("Missing last value in values: %s: %s", values, error)
|
||||||
@ -187,7 +187,7 @@ class AtomeData:
|
|||||||
values = self.atome_client.get_consumption(MONTHLY_TYPE)
|
values = self.atome_client.get_consumption(MONTHLY_TYPE)
|
||||||
self._month_usage = values["total"] / 1000
|
self._month_usage = values["total"] / 1000
|
||||||
self._month_price = values["price"]
|
self._month_price = values["price"]
|
||||||
_LOGGER.debug("Updating Atome monthly data. Got: %d.", self._month_usage)
|
_LOGGER.debug("Updating Atome monthly data. Got: %d", self._month_usage)
|
||||||
|
|
||||||
except KeyError as error:
|
except KeyError as error:
|
||||||
_LOGGER.error("Missing last value in values: %s: %s", values, error)
|
_LOGGER.error("Missing last value in values: %s: %s", values, error)
|
||||||
@ -209,7 +209,7 @@ class AtomeData:
|
|||||||
values = self.atome_client.get_consumption(YEARLY_TYPE)
|
values = self.atome_client.get_consumption(YEARLY_TYPE)
|
||||||
self._year_usage = values["total"] / 1000
|
self._year_usage = values["total"] / 1000
|
||||||
self._year_price = values["price"]
|
self._year_price = values["price"]
|
||||||
_LOGGER.debug("Updating Atome yearly data. Got: %d.", self._year_usage)
|
_LOGGER.debug("Updating Atome yearly data. Got: %d", self._year_usage)
|
||||||
|
|
||||||
except KeyError as error:
|
except KeyError as error:
|
||||||
_LOGGER.error("Missing last value in values: %s: %s", values, error)
|
_LOGGER.error("Missing last value in values: %s: %s", values, error)
|
||||||
|
@ -60,7 +60,7 @@ async def async_request_validation(hass, config_entry, august_gateway):
|
|||||||
# In the future this should start a new config flow
|
# In the future this should start a new config flow
|
||||||
# instead of using the legacy configurator
|
# instead of using the legacy configurator
|
||||||
#
|
#
|
||||||
_LOGGER.error("Access token is no longer valid.")
|
_LOGGER.error("Access token is no longer valid")
|
||||||
configurator = hass.components.configurator
|
configurator = hass.components.configurator
|
||||||
entry_id = config_entry.entry_id
|
entry_id = config_entry.entry_id
|
||||||
|
|
||||||
@ -351,7 +351,7 @@ class AugustData(AugustSubscriberMixin):
|
|||||||
doorbell_detail = self._device_detail_by_id.get(device_id)
|
doorbell_detail = self._device_detail_by_id.get(device_id)
|
||||||
if doorbell_detail is None:
|
if doorbell_detail is None:
|
||||||
_LOGGER.info(
|
_LOGGER.info(
|
||||||
"The doorbell %s could not be setup because the system could not fetch details about the doorbell.",
|
"The doorbell %s could not be setup because the system could not fetch details about the doorbell",
|
||||||
doorbell.device_name,
|
doorbell.device_name,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
@ -373,17 +373,17 @@ class AugustData(AugustSubscriberMixin):
|
|||||||
lock_detail = self._device_detail_by_id.get(device_id)
|
lock_detail = self._device_detail_by_id.get(device_id)
|
||||||
if lock_detail is None:
|
if lock_detail is None:
|
||||||
_LOGGER.info(
|
_LOGGER.info(
|
||||||
"The lock %s could not be setup because the system could not fetch details about the lock.",
|
"The lock %s could not be setup because the system could not fetch details about the lock",
|
||||||
lock.device_name,
|
lock.device_name,
|
||||||
)
|
)
|
||||||
elif lock_detail.bridge is None:
|
elif lock_detail.bridge is None:
|
||||||
_LOGGER.info(
|
_LOGGER.info(
|
||||||
"The lock %s could not be setup because it does not have a bridge (Connect).",
|
"The lock %s could not be setup because it does not have a bridge (Connect)",
|
||||||
lock.device_name,
|
lock.device_name,
|
||||||
)
|
)
|
||||||
elif not lock_detail.bridge.operative:
|
elif not lock_detail.bridge.operative:
|
||||||
_LOGGER.info(
|
_LOGGER.info(
|
||||||
"The lock %s could not be setup because the bridge (Connect) is not operative.",
|
"The lock %s could not be setup because the bridge (Connect) is not operative",
|
||||||
lock.device_name,
|
lock.device_name,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
@ -88,7 +88,7 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
|||||||
detail = data.get_device_detail(door.device_id)
|
detail = data.get_device_detail(door.device_id)
|
||||||
if not detail.doorsense:
|
if not detail.doorsense:
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Not adding sensor class door for lock %s because it does not have doorsense.",
|
"Not adding sensor class door for lock %s because it does not have doorsense",
|
||||||
door.device_name,
|
door.device_name,
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
@ -338,7 +338,7 @@ class AutomationEntity(ToggleEntity, RestoreEntity):
|
|||||||
else:
|
else:
|
||||||
enable_automation = DEFAULT_INITIAL_STATE
|
enable_automation = DEFAULT_INITIAL_STATE
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Automation %s not in state storage, state %s from default is used.",
|
"Automation %s not in state storage, state %s from default is used",
|
||||||
self.entity_id,
|
self.entity_id,
|
||||||
enable_automation,
|
enable_automation,
|
||||||
)
|
)
|
||||||
|
@ -145,7 +145,7 @@ class BMWConnectedDriveAccount:
|
|||||||
except OSError as exception:
|
except OSError as exception:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Could not connect to the BMW Connected Drive portal. "
|
"Could not connect to the BMW Connected Drive portal. "
|
||||||
"The vehicle state could not be updated."
|
"The vehicle state could not be updated"
|
||||||
)
|
)
|
||||||
_LOGGER.exception(exception)
|
_LOGGER.exception(exception)
|
||||||
|
|
||||||
|
@ -44,7 +44,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||||||
try:
|
try:
|
||||||
things = bapi.getThings()["things"]
|
things = bapi.getThings()["things"]
|
||||||
if not things:
|
if not things:
|
||||||
_LOGGER.error("No things present in account.")
|
_LOGGER.error("No things present in account")
|
||||||
else:
|
else:
|
||||||
add_entities(
|
add_entities(
|
||||||
[
|
[
|
||||||
|
@ -76,7 +76,7 @@ class BrData:
|
|||||||
|
|
||||||
async def schedule_update(self, minute=1):
|
async def schedule_update(self, minute=1):
|
||||||
"""Schedule an update after minute minutes."""
|
"""Schedule an update after minute minutes."""
|
||||||
_LOGGER.debug("Scheduling next update in %s minutes.", minute)
|
_LOGGER.debug("Scheduling next update in %s minutes", minute)
|
||||||
nxt = dt_util.utcnow() + timedelta(minutes=minute)
|
nxt = dt_util.utcnow() + timedelta(minutes=minute)
|
||||||
async_track_point_in_utc_time(self.hass, self.async_update, nxt)
|
async_track_point_in_utc_time(self.hass, self.async_update, nxt)
|
||||||
|
|
||||||
@ -115,8 +115,7 @@ class BrData:
|
|||||||
self.load_error_count += 1
|
self.load_error_count += 1
|
||||||
threshold_log(
|
threshold_log(
|
||||||
self.load_error_count,
|
self.load_error_count,
|
||||||
"Unable to retrieve json data from Buienradar."
|
"Unable to retrieve json data from Buienradar" "(Msg: %s, status: %s,)",
|
||||||
"(Msg: %s, status: %s,)",
|
|
||||||
content.get(MESSAGE),
|
content.get(MESSAGE),
|
||||||
content.get(STATUS_CODE),
|
content.get(STATUS_CODE),
|
||||||
)
|
)
|
||||||
@ -136,7 +135,7 @@ class BrData:
|
|||||||
# unable to get the data
|
# unable to get the data
|
||||||
threshold_log(
|
threshold_log(
|
||||||
self.rain_error_count,
|
self.rain_error_count,
|
||||||
"Unable to retrieve rain data from Buienradar." "(Msg: %s, status: %s)",
|
"Unable to retrieve rain data from Buienradar" "(Msg: %s, status: %s)",
|
||||||
raincontent.get(MESSAGE),
|
raincontent.get(MESSAGE),
|
||||||
raincontent.get(STATUS_CODE),
|
raincontent.get(STATUS_CODE),
|
||||||
)
|
)
|
||||||
|
@ -493,7 +493,7 @@ class CameraView(HomeAssistantView):
|
|||||||
raise web.HTTPUnauthorized()
|
raise web.HTTPUnauthorized()
|
||||||
|
|
||||||
if not camera.is_on:
|
if not camera.is_on:
|
||||||
_LOGGER.debug("Camera is off.")
|
_LOGGER.debug("Camera is off")
|
||||||
raise web.HTTPServiceUnavailable()
|
raise web.HTTPServiceUnavailable()
|
||||||
|
|
||||||
return await self.handle(request, camera)
|
return await self.handle(request, camera)
|
||||||
@ -549,7 +549,7 @@ async def websocket_camera_thumbnail(hass, connection, msg):
|
|||||||
|
|
||||||
Async friendly.
|
Async friendly.
|
||||||
"""
|
"""
|
||||||
_LOGGER.warning("The websocket command 'camera_thumbnail' has been deprecated.")
|
_LOGGER.warning("The websocket command 'camera_thumbnail' has been deprecated")
|
||||||
try:
|
try:
|
||||||
image = await async_get_image(hass, msg["entity_id"])
|
image = await async_get_image(hass, msg["entity_id"])
|
||||||
await connection.send_big_result(
|
await connection.send_big_result(
|
||||||
|
@ -104,7 +104,7 @@ def setup_internal_discovery(hass: HomeAssistant) -> None:
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
_LOGGER.debug("Starting internal pychromecast discovery.")
|
_LOGGER.debug("Starting internal pychromecast discovery")
|
||||||
listener = pychromecast.CastListener(
|
listener = pychromecast.CastListener(
|
||||||
internal_add_update_callback,
|
internal_add_update_callback,
|
||||||
internal_remove_callback,
|
internal_remove_callback,
|
||||||
@ -114,7 +114,7 @@ def setup_internal_discovery(hass: HomeAssistant) -> None:
|
|||||||
|
|
||||||
def stop_discovery(event):
|
def stop_discovery(event):
|
||||||
"""Stop discovery of new chromecasts."""
|
"""Stop discovery of new chromecasts."""
|
||||||
_LOGGER.debug("Stopping internal pychromecast discovery.")
|
_LOGGER.debug("Stopping internal pychromecast discovery")
|
||||||
pychromecast.discovery.stop_discovery(browser)
|
pychromecast.discovery.stop_discovery(browser)
|
||||||
hass.data[INTERNAL_DISCOVERY_RUNNING_KEY].release()
|
hass.data[INTERNAL_DISCOVERY_RUNNING_KEY].release()
|
||||||
|
|
||||||
|
@ -133,7 +133,7 @@ async def async_setup_platform(
|
|||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Setting configuration for Cast via platform is deprecated. "
|
"Setting configuration for Cast via platform is deprecated. "
|
||||||
"Configure via Cast integration instead."
|
"Configure via Cast integration instead."
|
||||||
"This option will become invalid in version 0.116."
|
"This option will become invalid in version 0.116"
|
||||||
)
|
)
|
||||||
await _async_setup_platform(hass, config, async_add_entities, discovery_info)
|
await _async_setup_platform(hass, config, async_add_entities, discovery_info)
|
||||||
|
|
||||||
@ -306,7 +306,7 @@ class CastDevice(MediaPlayerEntity):
|
|||||||
# Can't disconnect if not connected.
|
# Can't disconnect if not connected.
|
||||||
return
|
return
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"[%s %s] Disconnecting from chromecast socket.",
|
"[%s %s] Disconnecting from chromecast socket",
|
||||||
self.entity_id,
|
self.entity_id,
|
||||||
self._cast_info.friendly_name,
|
self._cast_info.friendly_name,
|
||||||
)
|
)
|
||||||
@ -479,7 +479,7 @@ class CastDevice(MediaPlayerEntity):
|
|||||||
self._chromecast.start_app(app_id)
|
self._chromecast.start_app(app_id)
|
||||||
if app_data:
|
if app_data:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Extra keys %s were ignored. Please use app_name to cast media.",
|
"Extra keys %s were ignored. Please use app_name to cast media",
|
||||||
app_data.keys(),
|
app_data.keys(),
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
@ -64,7 +64,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Currency ID %s or display currency %s "
|
"Currency ID %s or display currency %s "
|
||||||
"is not available. Using 1 (bitcoin) "
|
"is not available. Using 1 (bitcoin) "
|
||||||
"and USD.",
|
"and USD",
|
||||||
currency_id,
|
currency_id,
|
||||||
display_currency,
|
display_currency,
|
||||||
)
|
)
|
||||||
|
@ -83,9 +83,9 @@ class ZWaveConfigWriteView(HomeAssistantView):
|
|||||||
network = hass.data.get(const.DATA_NETWORK)
|
network = hass.data.get(const.DATA_NETWORK)
|
||||||
if network is None:
|
if network is None:
|
||||||
return self.json_message("No Z-Wave network data found", HTTP_NOT_FOUND)
|
return self.json_message("No Z-Wave network data found", HTTP_NOT_FOUND)
|
||||||
_LOGGER.info("Z-Wave configuration written to file.")
|
_LOGGER.info("Z-Wave configuration written to file")
|
||||||
network.write_config()
|
network.write_config()
|
||||||
return self.json_message("Z-Wave configuration saved to file.", HTTP_OK)
|
return self.json_message("Z-Wave configuration saved to file", HTTP_OK)
|
||||||
|
|
||||||
|
|
||||||
class ZWaveNodeValueView(HomeAssistantView):
|
class ZWaveNodeValueView(HomeAssistantView):
|
||||||
|
@ -66,7 +66,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||||||
|
|
||||||
add_entities(DecoraWifiLight(sw) for sw in all_switches)
|
add_entities(DecoraWifiLight(sw) for sw in all_switches)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
_LOGGER.error("Failed to communicate with myLeviton Service.")
|
_LOGGER.error("Failed to communicate with myLeviton Service")
|
||||||
|
|
||||||
# Listen for the stop event and log out.
|
# Listen for the stop event and log out.
|
||||||
def logout(event):
|
def logout(event):
|
||||||
@ -75,7 +75,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||||||
if session is not None:
|
if session is not None:
|
||||||
Person.logout(session)
|
Person.logout(session)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
_LOGGER.error("Failed to log out of myLeviton Service.")
|
_LOGGER.error("Failed to log out of myLeviton Service")
|
||||||
|
|
||||||
hass.bus.listen(EVENT_HOMEASSISTANT_STOP, logout)
|
hass.bus.listen(EVENT_HOMEASSISTANT_STOP, logout)
|
||||||
|
|
||||||
@ -127,7 +127,7 @@ class DecoraWifiLight(LightEntity):
|
|||||||
try:
|
try:
|
||||||
self._switch.update_attributes(attribs)
|
self._switch.update_attributes(attribs)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
_LOGGER.error("Failed to turn on myLeviton switch.")
|
_LOGGER.error("Failed to turn on myLeviton switch")
|
||||||
|
|
||||||
def turn_off(self, **kwargs):
|
def turn_off(self, **kwargs):
|
||||||
"""Instruct the switch to turn off."""
|
"""Instruct the switch to turn off."""
|
||||||
@ -135,11 +135,11 @@ class DecoraWifiLight(LightEntity):
|
|||||||
try:
|
try:
|
||||||
self._switch.update_attributes(attribs)
|
self._switch.update_attributes(attribs)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
_LOGGER.error("Failed to turn off myLeviton switch.")
|
_LOGGER.error("Failed to turn off myLeviton switch")
|
||||||
|
|
||||||
def update(self):
|
def update(self):
|
||||||
"""Fetch new state data for this switch."""
|
"""Fetch new state data for this switch."""
|
||||||
try:
|
try:
|
||||||
self._switch.refresh()
|
self._switch.refresh()
|
||||||
except ValueError:
|
except ValueError:
|
||||||
_LOGGER.error("Failed to update myLeviton switch data.")
|
_LOGGER.error("Failed to update myLeviton switch data")
|
||||||
|
@ -94,7 +94,7 @@ async def async_setup(hass: HomeAssistant, config: dict):
|
|||||||
doorstation = get_doorstation_by_token(hass, token)
|
doorstation = get_doorstation_by_token(hass, token)
|
||||||
|
|
||||||
if doorstation is None:
|
if doorstation is None:
|
||||||
_LOGGER.error("Device not found for provided token.")
|
_LOGGER.error("Device not found for provided token")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Clear webhooks
|
# Clear webhooks
|
||||||
|
@ -107,7 +107,7 @@ async def async_setup(hass, config):
|
|||||||
partner = conf.get(CONF_PARTNER)
|
partner = conf.get(CONF_PARTNER)
|
||||||
|
|
||||||
if hass.config.time_zone is None:
|
if hass.config.time_zone is None:
|
||||||
_LOGGER.error("Timezone is not set in Home Assistant.")
|
_LOGGER.error("Timezone is not set in Home Assistant")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
timezone = str(hass.config.time_zone)
|
timezone = str(hass.config.time_zone)
|
||||||
|
@ -168,6 +168,6 @@ USN: {unique_service_name}
|
|||||||
|
|
||||||
def clean_socket_close(sock):
|
def clean_socket_close(sock):
|
||||||
"""Close a socket connection and logs its closure."""
|
"""Close a socket connection and logs its closure."""
|
||||||
_LOGGER.info("UPNP responder shutting down.")
|
_LOGGER.info("UPNP responder shutting down")
|
||||||
|
|
||||||
sock.close()
|
sock.close()
|
||||||
|
@ -53,7 +53,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||||||
try:
|
try:
|
||||||
envirophat = importlib.import_module("envirophat")
|
envirophat = importlib.import_module("envirophat")
|
||||||
except OSError:
|
except OSError:
|
||||||
_LOGGER.error("No Enviro pHAT was found.")
|
_LOGGER.error("No Enviro pHAT was found")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
data = EnvirophatData(envirophat, config.get(CONF_USE_LEDS))
|
data = EnvirophatData(envirophat, config.get(CONF_USE_LEDS))
|
||||||
|
@ -194,7 +194,7 @@ async def async_setup(hass, config):
|
|||||||
controller.callback_login_timeout = connection_fail_callback
|
controller.callback_login_timeout = connection_fail_callback
|
||||||
controller.callback_login_success = connection_success_callback
|
controller.callback_login_success = connection_success_callback
|
||||||
|
|
||||||
_LOGGER.info("Start envisalink.")
|
_LOGGER.info("Start envisalink")
|
||||||
controller.start()
|
controller.start()
|
||||||
|
|
||||||
result = await sync_connect
|
result = await sync_connect
|
||||||
|
@ -160,9 +160,9 @@ class EverLightsLight(LightEntity):
|
|||||||
self._status = await self._api.get_status()
|
self._status = await self._api.get_status()
|
||||||
except pyeverlights.ConnectionError:
|
except pyeverlights.ConnectionError:
|
||||||
if self._available:
|
if self._available:
|
||||||
_LOGGER.warning("EverLights control box connection lost.")
|
_LOGGER.warning("EverLights control box connection lost")
|
||||||
self._available = False
|
self._available = False
|
||||||
else:
|
else:
|
||||||
if not self._available:
|
if not self._available:
|
||||||
_LOGGER.warning("EverLights control box connection restored.")
|
_LOGGER.warning("EverLights control box connection restored")
|
||||||
self._available = True
|
self._available = True
|
||||||
|
@ -161,14 +161,14 @@ def _handle_exception(err) -> bool:
|
|||||||
if err.status == HTTP_SERVICE_UNAVAILABLE:
|
if err.status == HTTP_SERVICE_UNAVAILABLE:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"The vendor says their server is currently unavailable. "
|
"The vendor says their server is currently unavailable. "
|
||||||
"Check the vendor's service status page."
|
"Check the vendor's service status page"
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if err.status == HTTP_TOO_MANY_REQUESTS:
|
if err.status == HTTP_TOO_MANY_REQUESTS:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"The vendor's API rate limit has been exceeded. "
|
"The vendor's API rate limit has been exceeded. "
|
||||||
"If this message persists, consider increasing the %s.",
|
"If this message persists, consider increasing the %s",
|
||||||
CONF_SCAN_INTERVAL,
|
CONF_SCAN_INTERVAL,
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
@ -221,7 +221,7 @@ async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool:
|
|||||||
except IndexError:
|
except IndexError:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Config error: '%s' = %s, but the valid range is 0-%s. "
|
"Config error: '%s' = %s, but the valid range is 0-%s. "
|
||||||
"Unable to continue. Fix any configuration errors and restart HA.",
|
"Unable to continue. Fix any configuration errors and restart HA",
|
||||||
CONF_LOCATION_IDX,
|
CONF_LOCATION_IDX,
|
||||||
loc_idx,
|
loc_idx,
|
||||||
len(client_v2.installation_info) - 1,
|
len(client_v2.installation_info) - 1,
|
||||||
|
@ -134,7 +134,7 @@ class FibaroController:
|
|||||||
info = self._client.info.get()
|
info = self._client.info.get()
|
||||||
self.hub_serial = slugify(info.serialNumber)
|
self.hub_serial = slugify(info.serialNumber)
|
||||||
except AssertionError:
|
except AssertionError:
|
||||||
_LOGGER.error("Can't connect to Fibaro HC. Please check URL.")
|
_LOGGER.error("Can't connect to Fibaro HC. Please check URL")
|
||||||
return False
|
return False
|
||||||
if login is None or login.status is False:
|
if login is None or login.status is False:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
|
@ -87,10 +87,10 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info=
|
|||||||
FoobotClient.TooManyRequests,
|
FoobotClient.TooManyRequests,
|
||||||
FoobotClient.InternalError,
|
FoobotClient.InternalError,
|
||||||
):
|
):
|
||||||
_LOGGER.exception("Failed to connect to foobot servers.")
|
_LOGGER.exception("Failed to connect to foobot servers")
|
||||||
raise PlatformNotReady
|
raise PlatformNotReady
|
||||||
except FoobotClient.ClientError:
|
except FoobotClient.ClientError:
|
||||||
_LOGGER.error("Failed to fetch data from foobot servers.")
|
_LOGGER.error("Failed to fetch data from foobot servers")
|
||||||
return
|
return
|
||||||
async_add_entities(dev, True)
|
async_add_entities(dev, True)
|
||||||
|
|
||||||
|
@ -59,7 +59,7 @@ class FreeboxWifiSwitch(SwitchEntity):
|
|||||||
await self._router.wifi.set_global_config(wifi_config)
|
await self._router.wifi.set_global_config(wifi_config)
|
||||||
except InsufficientPermissionsError:
|
except InsufficientPermissionsError:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Home Assistant does not have permissions to modify the Freebox settings. Please refer to documentation."
|
"Home Assistant does not have permissions to modify the Freebox settings. Please refer to documentation"
|
||||||
)
|
)
|
||||||
|
|
||||||
async def async_turn_on(self, **kwargs):
|
async def async_turn_on(self, **kwargs):
|
||||||
|
@ -349,7 +349,7 @@ def _async_setup_themes(hass, themes):
|
|||||||
hass.data[DATA_DEFAULT_THEME] = name
|
hass.data[DATA_DEFAULT_THEME] = name
|
||||||
update_theme_and_fire_event()
|
update_theme_and_fire_event()
|
||||||
else:
|
else:
|
||||||
_LOGGER.warning("Theme %s is not defined.", name)
|
_LOGGER.warning("Theme %s is not defined", name)
|
||||||
|
|
||||||
async def reload_themes(_):
|
async def reload_themes(_):
|
||||||
"""Reload themes."""
|
"""Reload themes."""
|
||||||
|
@ -34,7 +34,7 @@ async def async_setup_entry(
|
|||||||
) as err:
|
) as err:
|
||||||
_LOGGER.error("Error occurred during Garmin Connect Client update: %s", err)
|
_LOGGER.error("Error occurred during Garmin Connect Client update: %s", err)
|
||||||
except Exception: # pylint: disable=broad-except
|
except Exception: # pylint: disable=broad-except
|
||||||
_LOGGER.exception("Unknown error occurred during Garmin Connect Client update.")
|
_LOGGER.exception("Unknown error occurred during Garmin Connect Client update")
|
||||||
|
|
||||||
entities = []
|
entities = []
|
||||||
for (
|
for (
|
||||||
|
@ -222,7 +222,7 @@ def check_correct_scopes(token_file):
|
|||||||
"""Check for the correct scopes in file."""
|
"""Check for the correct scopes in file."""
|
||||||
tokenfile = open(token_file).read()
|
tokenfile = open(token_file).read()
|
||||||
if "readonly" in tokenfile:
|
if "readonly" in tokenfile:
|
||||||
_LOGGER.warning("Please re-authenticate with Google.")
|
_LOGGER.warning("Please re-authenticate with Google")
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -106,7 +106,7 @@ async def async_setup(hass: HomeAssistant, yaml_config: Dict[str, Any]):
|
|||||||
|
|
||||||
if agent_user_id is None:
|
if agent_user_id is None:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"No agent_user_id supplied for request_sync. Call as a user or pass in user id as agent_user_id."
|
"No agent_user_id supplied for request_sync. Call as a user or pass in user id as agent_user_id"
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -66,7 +66,7 @@ class GoogleMapsScanner:
|
|||||||
|
|
||||||
except InvalidCookies:
|
except InvalidCookies:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"The cookie file provided does not provide a valid session. Please create another one and try again."
|
"The cookie file provided does not provide a valid session. Please create another one and try again"
|
||||||
)
|
)
|
||||||
self.success_init = False
|
self.success_init = False
|
||||||
|
|
||||||
|
@ -258,7 +258,7 @@ class HarmonyRemote(remote.RemoteEntity):
|
|||||||
_LOGGER.debug("%s: Connecting", self._name)
|
_LOGGER.debug("%s: Connecting", self._name)
|
||||||
try:
|
try:
|
||||||
if not await self._client.connect():
|
if not await self._client.connect():
|
||||||
_LOGGER.warning("%s: Unable to connect to HUB.", self._name)
|
_LOGGER.warning("%s: Unable to connect to HUB", self._name)
|
||||||
await self._client.close()
|
await self._client.close()
|
||||||
return False
|
return False
|
||||||
except aioexc.TimeOut:
|
except aioexc.TimeOut:
|
||||||
@ -283,14 +283,14 @@ class HarmonyRemote(remote.RemoteEntity):
|
|||||||
|
|
||||||
async def got_connected(self, _=None):
|
async def got_connected(self, _=None):
|
||||||
"""Notification that we're connected to the HUB."""
|
"""Notification that we're connected to the HUB."""
|
||||||
_LOGGER.debug("%s: connected to the HUB.", self._name)
|
_LOGGER.debug("%s: connected to the HUB", self._name)
|
||||||
if not self._available:
|
if not self._available:
|
||||||
# We were disconnected before.
|
# We were disconnected before.
|
||||||
await self.new_config()
|
await self.new_config()
|
||||||
|
|
||||||
async def got_disconnected(self, _=None):
|
async def got_disconnected(self, _=None):
|
||||||
"""Notification that we're disconnected from the HUB."""
|
"""Notification that we're disconnected from the HUB."""
|
||||||
_LOGGER.debug("%s: disconnected from the HUB.", self._name)
|
_LOGGER.debug("%s: disconnected from the HUB", self._name)
|
||||||
self._available = False
|
self._available = False
|
||||||
# We're going to wait for 10 seconds before announcing we're
|
# We're going to wait for 10 seconds before announcing we're
|
||||||
# unavailable, this to allow a reconnection to happen.
|
# unavailable, this to allow a reconnection to happen.
|
||||||
|
@ -196,7 +196,7 @@ async def async_setup(hass, config):
|
|||||||
for env in ("HASSIO", "HASSIO_TOKEN"):
|
for env in ("HASSIO", "HASSIO_TOKEN"):
|
||||||
if os.environ.get(env):
|
if os.environ.get(env):
|
||||||
continue
|
continue
|
||||||
_LOGGER.error("Missing %s environment variable.", env)
|
_LOGGER.error("Missing %s environment variable", env)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
host = os.environ["HASSIO"]
|
host = os.environ["HASSIO"]
|
||||||
|
@ -78,7 +78,7 @@ class HassIOBaseAuth(HomeAssistantView):
|
|||||||
if prv is not None:
|
if prv is not None:
|
||||||
return prv
|
return prv
|
||||||
|
|
||||||
_LOGGER.error("Can't find Home Assistant auth.")
|
_LOGGER.error("Can't find Home Assistant auth")
|
||||||
raise HTTPNotFound()
|
raise HTTPNotFound()
|
||||||
|
|
||||||
|
|
||||||
|
@ -176,7 +176,7 @@ class HassIO:
|
|||||||
)
|
)
|
||||||
|
|
||||||
if request.status not in (HTTP_OK, HTTP_BAD_REQUEST):
|
if request.status not in (HTTP_OK, HTTP_BAD_REQUEST):
|
||||||
_LOGGER.error("%s return code %d.", command, request.status)
|
_LOGGER.error("%s return code %d", command, request.status)
|
||||||
raise HassioAPIError()
|
raise HassioAPIError()
|
||||||
|
|
||||||
answer = await request.json()
|
answer = await request.json()
|
||||||
|
@ -156,7 +156,7 @@ async def async_setup_platform(
|
|||||||
_are_valid_client_credentials, here_client
|
_are_valid_client_credentials, here_client
|
||||||
):
|
):
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Invalid credentials. This error is returned if the specified token was invalid or no contract could be found for this token."
|
"Invalid credentials. This error is returned if the specified token was invalid or no contract could be found for this token"
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -69,7 +69,7 @@ class ConfigEntryAuth(homeconnect.HomeConnectAPI):
|
|||||||
elif app.type == "Hob":
|
elif app.type == "Hob":
|
||||||
device = Hob(self.hass, app)
|
device = Hob(self.hass, app)
|
||||||
else:
|
else:
|
||||||
_LOGGER.warning("Appliance type %s not implemented.", app.type)
|
_LOGGER.warning("Appliance type %s not implemented", app.type)
|
||||||
continue
|
continue
|
||||||
devices.append({"device": device, "entities": device.get_entity_info()})
|
devices.append({"device": device, "entities": device.get_entity_info()})
|
||||||
self.devices = devices
|
self.devices = devices
|
||||||
@ -93,15 +93,15 @@ class HomeConnectDevice:
|
|||||||
try:
|
try:
|
||||||
self.appliance.get_status()
|
self.appliance.get_status()
|
||||||
except (HomeConnectError, ValueError):
|
except (HomeConnectError, ValueError):
|
||||||
_LOGGER.debug("Unable to fetch appliance status. Probably offline.")
|
_LOGGER.debug("Unable to fetch appliance status. Probably offline")
|
||||||
try:
|
try:
|
||||||
self.appliance.get_settings()
|
self.appliance.get_settings()
|
||||||
except (HomeConnectError, ValueError):
|
except (HomeConnectError, ValueError):
|
||||||
_LOGGER.debug("Unable to fetch settings. Probably offline.")
|
_LOGGER.debug("Unable to fetch settings. Probably offline")
|
||||||
try:
|
try:
|
||||||
program_active = self.appliance.get_programs_active()
|
program_active = self.appliance.get_programs_active()
|
||||||
except (HomeConnectError, ValueError):
|
except (HomeConnectError, ValueError):
|
||||||
_LOGGER.debug("Unable to fetch active programs. Probably offline.")
|
_LOGGER.debug("Unable to fetch active programs. Probably offline")
|
||||||
program_active = None
|
program_active = None
|
||||||
if program_active and "key" in program_active:
|
if program_active and "key" in program_active:
|
||||||
self.appliance.status[BSH_ACTIVE_PROGRAM] = {"value": program_active["key"]}
|
self.appliance.status[BSH_ACTIVE_PROGRAM] = {"value": program_active["key"]}
|
||||||
|
@ -191,7 +191,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
|||||||
# If the previous instance hasn't cleaned up yet
|
# If the previous instance hasn't cleaned up yet
|
||||||
# we need to wait a bit
|
# we need to wait a bit
|
||||||
if not await hass.async_add_executor_job(port_is_available, port):
|
if not await hass.async_add_executor_job(port_is_available, port):
|
||||||
_LOGGER.warning("The local port %s is in use.", port)
|
_LOGGER.warning("The local port %s is in use", port)
|
||||||
raise ConfigEntryNotReady
|
raise ConfigEntryNotReady
|
||||||
|
|
||||||
if CONF_ENTRY_INDEX in conf and conf[CONF_ENTRY_INDEX] == 0:
|
if CONF_ENTRY_INDEX in conf and conf[CONF_ENTRY_INDEX] == 0:
|
||||||
@ -266,7 +266,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
|
|||||||
if not await hass.async_add_executor_job(
|
if not await hass.async_add_executor_job(
|
||||||
port_is_available, entry.data[CONF_PORT]
|
port_is_available, entry.data[CONF_PORT]
|
||||||
):
|
):
|
||||||
_LOGGER.info("Waiting for the HomeKit server to shutdown.")
|
_LOGGER.info("Waiting for the HomeKit server to shutdown")
|
||||||
await asyncio.sleep(1)
|
await asyncio.sleep(1)
|
||||||
|
|
||||||
hass.data[DOMAIN].pop(entry.entry_id)
|
hass.data[DOMAIN].pop(entry.entry_id)
|
||||||
@ -310,7 +310,7 @@ def _async_register_events_and_services(hass: HomeAssistant):
|
|||||||
if homekit.status != STATUS_RUNNING:
|
if homekit.status != STATUS_RUNNING:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"HomeKit is not running. Either it is waiting to be "
|
"HomeKit is not running. Either it is waiting to be "
|
||||||
"started or has been stopped."
|
"started or has been stopped"
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -336,7 +336,7 @@ def _async_register_events_and_services(hass: HomeAssistant):
|
|||||||
if homekit.status != STATUS_READY:
|
if homekit.status != STATUS_READY:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"HomeKit is not ready. Either it is already starting up or has "
|
"HomeKit is not ready. Either it is already starting up or has "
|
||||||
"been stopped."
|
"been stopped"
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
await homekit.async_start()
|
await homekit.async_start()
|
||||||
@ -436,7 +436,7 @@ class HomeKit:
|
|||||||
# The bridge itself counts as an accessory
|
# The bridge itself counts as an accessory
|
||||||
if len(self.bridge.accessories) + 1 >= MAX_DEVICES:
|
if len(self.bridge.accessories) + 1 >= MAX_DEVICES:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Cannot add %s as this would exceeded the %d device limit. Consider using the filter option.",
|
"Cannot add %s as this would exceeded the %d device limit. Consider using the filter option",
|
||||||
state.entity_id,
|
state.entity_id,
|
||||||
MAX_DEVICES,
|
MAX_DEVICES,
|
||||||
)
|
)
|
||||||
|
@ -129,7 +129,7 @@ def get_accessory(hass, driver, state, aid, config):
|
|||||||
if not aid:
|
if not aid:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
'The entity "%s" is not supported, since it '
|
'The entity "%s" is not supported, since it '
|
||||||
"generates an invalid aid, please change it.",
|
"generates an invalid aid, please change it",
|
||||||
state.entity_id,
|
state.entity_id,
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
@ -61,6 +61,6 @@ class TurboJPEGSingleton:
|
|||||||
TurboJPEGSingleton.__instance = TurboJPEG()
|
TurboJPEGSingleton.__instance = TurboJPEG()
|
||||||
except Exception: # pylint: disable=broad-except
|
except Exception: # pylint: disable=broad-except
|
||||||
_LOGGER.exception(
|
_LOGGER.exception(
|
||||||
"libturbojpeg is not installed, cameras may impact HomeKit performance."
|
"libturbojpeg is not installed, cameras may impact HomeKit performance"
|
||||||
)
|
)
|
||||||
TurboJPEGSingleton.__instance = False
|
TurboJPEGSingleton.__instance = False
|
||||||
|
@ -357,17 +357,17 @@ class Camera(HomeAccessory, PyhapCamera):
|
|||||||
self._async_stop_ffmpeg_watch()
|
self._async_stop_ffmpeg_watch()
|
||||||
|
|
||||||
if not pid_is_alive(stream.process.pid):
|
if not pid_is_alive(stream.process.pid):
|
||||||
_LOGGER.info("[%s] Stream already stopped.", session_id)
|
_LOGGER.info("[%s] Stream already stopped", session_id)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
for shutdown_method in ["close", "kill"]:
|
for shutdown_method in ["close", "kill"]:
|
||||||
_LOGGER.info("[%s] %s stream.", session_id, shutdown_method)
|
_LOGGER.info("[%s] %s stream", session_id, shutdown_method)
|
||||||
try:
|
try:
|
||||||
await getattr(stream, shutdown_method)()
|
await getattr(stream, shutdown_method)()
|
||||||
return
|
return
|
||||||
except Exception: # pylint: disable=broad-except
|
except Exception: # pylint: disable=broad-except
|
||||||
_LOGGER.exception(
|
_LOGGER.exception(
|
||||||
"[%s] Failed to %s stream.", session_id, shutdown_method
|
"[%s] Failed to %s stream", session_id, shutdown_method
|
||||||
)
|
)
|
||||||
|
|
||||||
async def reconfigure_stream(self, session_info, stream_config):
|
async def reconfigure_stream(self, session_info, stream_config):
|
||||||
|
@ -336,7 +336,7 @@ class TelevisionMediaPlayer(HomeAccessory):
|
|||||||
input_type = 3 if "hdmi" in source.lower() else 0
|
input_type = 3 if "hdmi" in source.lower() else 0
|
||||||
serv_input.configure_char(CHAR_INPUT_SOURCE_TYPE, value=input_type)
|
serv_input.configure_char(CHAR_INPUT_SOURCE_TYPE, value=input_type)
|
||||||
serv_input.configure_char(CHAR_CURRENT_VISIBILITY_STATE, value=False)
|
serv_input.configure_char(CHAR_CURRENT_VISIBILITY_STATE, value=False)
|
||||||
_LOGGER.debug("%s: Added source %s.", self.entity_id, source)
|
_LOGGER.debug("%s: Added source %s", self.entity_id, source)
|
||||||
|
|
||||||
self.async_update_state(state)
|
self.async_update_state(state)
|
||||||
|
|
||||||
|
@ -334,7 +334,7 @@ class Thermostat(HomeAccessory):
|
|||||||
if not hc_modes:
|
if not hc_modes:
|
||||||
# This cannot be none OR an empty list
|
# This cannot be none OR an empty list
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"%s: HVAC modes not yet available. Please disable auto start for homekit.",
|
"%s: HVAC modes not yet available. Please disable auto start for homekit",
|
||||||
self.entity_id,
|
self.entity_id,
|
||||||
)
|
)
|
||||||
hc_modes = (
|
hc_modes = (
|
||||||
|
@ -306,7 +306,7 @@ class HomeKitSpeedMapping:
|
|||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"%s does not contain the speed setting "
|
"%s does not contain the speed setting "
|
||||||
"%s as its first element. "
|
"%s as its first element. "
|
||||||
"Assuming that %s is equivalent to 'off'.",
|
"Assuming that %s is equivalent to 'off'",
|
||||||
speed_list,
|
speed_list,
|
||||||
fan.SPEED_OFF,
|
fan.SPEED_OFF,
|
||||||
speed_list[0],
|
speed_list[0],
|
||||||
|
@ -275,7 +275,7 @@ class HKDevice:
|
|||||||
async def async_update(self, now=None):
|
async def async_update(self, now=None):
|
||||||
"""Poll state of all entities attached to this bridge/accessory."""
|
"""Poll state of all entities attached to this bridge/accessory."""
|
||||||
if not self.pollable_characteristics:
|
if not self.pollable_characteristics:
|
||||||
_LOGGER.debug("HomeKit connection not polling any characteristics.")
|
_LOGGER.debug("HomeKit connection not polling any characteristics")
|
||||||
return
|
return
|
||||||
|
|
||||||
if self._polling_lock.locked():
|
if self._polling_lock.locked():
|
||||||
|
@ -105,7 +105,7 @@ class HomematicipAlarmControlPanelEntity(AlarmControlPanelEntity):
|
|||||||
self.async_write_ha_state()
|
self.async_write_ha_state()
|
||||||
else:
|
else:
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Device Changed Event for %s (Alarm Control Panel) not fired. Entity is disabled.",
|
"Device Changed Event for %s (Alarm Control Panel) not fired. Entity is disabled",
|
||||||
self.name,
|
self.name,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -111,7 +111,7 @@ class HomematicipGenericDevice(Entity):
|
|||||||
self.async_write_ha_state()
|
self.async_write_ha_state()
|
||||||
else:
|
else:
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Device Changed Event for %s (%s) not fired. Entity is disabled.",
|
"Device Changed Event for %s (%s) not fired. Entity is disabled",
|
||||||
self.name,
|
self.name,
|
||||||
self._device.modelType,
|
self._device.modelType,
|
||||||
)
|
)
|
||||||
|
@ -45,7 +45,7 @@ class RequestDataValidator:
|
|||||||
data = await request.json()
|
data = await request.json()
|
||||||
except ValueError:
|
except ValueError:
|
||||||
if not self._allow_empty or (await request.content.read()) != b"":
|
if not self._allow_empty or (await request.content.read()) != b"":
|
||||||
_LOGGER.error("Invalid JSON received.")
|
_LOGGER.error("Invalid JSON received")
|
||||||
return view.json_message("Invalid JSON.", HTTP_BAD_REQUEST)
|
return view.json_message("Invalid JSON.", HTTP_BAD_REQUEST)
|
||||||
data = {}
|
data = {}
|
||||||
|
|
||||||
|
@ -153,7 +153,7 @@ class HueBridge:
|
|||||||
client_exceptions.ServerDisconnectedError,
|
client_exceptions.ServerDisconnectedError,
|
||||||
) as err:
|
) as err:
|
||||||
if tries == 3:
|
if tries == 3:
|
||||||
_LOGGER.error("Request failed %s times, giving up.", tries)
|
_LOGGER.error("Request failed %s times, giving up", tries)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
# We only retry if it's a server error. So raise on all 4XX errors.
|
# We only retry if it's a server error. So raise on all 4XX errors.
|
||||||
|
@ -76,7 +76,7 @@ async def async_setup(hass, config):
|
|||||||
for target, key in target_keys.items():
|
for target, key in target_keys.items():
|
||||||
res = pyfttt.send_event(key, event, value1, value2, value3)
|
res = pyfttt.send_event(key, event, value1, value2, value3)
|
||||||
if res.status_code != HTTP_OK:
|
if res.status_code != HTTP_OK:
|
||||||
_LOGGER.error("IFTTT reported error sending event to %s.", target)
|
_LOGGER.error("IFTTT reported error sending event to %s", target)
|
||||||
except requests.exceptions.RequestException:
|
except requests.exceptions.RequestException:
|
||||||
_LOGGER.exception("Error communicating with IFTTT")
|
_LOGGER.exception("Error communicating with IFTTT")
|
||||||
|
|
||||||
|
@ -155,7 +155,7 @@ class IslamicPrayerClient:
|
|||||||
self.available = True
|
self.available = True
|
||||||
except (exceptions.InvalidResponseError, ConnError):
|
except (exceptions.InvalidResponseError, ConnError):
|
||||||
self.available = False
|
self.available = False
|
||||||
_LOGGER.debug("Error retrieving prayer times.")
|
_LOGGER.debug("Error retrieving prayer times")
|
||||||
async_call_later(self.hass, 60, self.async_update)
|
async_call_later(self.hass, 60, self.async_update)
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -165,7 +165,7 @@ class IslamicPrayerClient:
|
|||||||
)
|
)
|
||||||
await self.async_schedule_future_update()
|
await self.async_schedule_future_update()
|
||||||
|
|
||||||
_LOGGER.debug("New prayer times retrieved. Updating sensors.")
|
_LOGGER.debug("New prayer times retrieved. Updating sensors")
|
||||||
async_dispatcher_send(self.hass, DATA_UPDATED)
|
async_dispatcher_send(self.hass, DATA_UPDATED)
|
||||||
|
|
||||||
async def async_setup(self):
|
async def async_setup(self):
|
||||||
|
@ -181,7 +181,7 @@ async def async_setup_entry(
|
|||||||
|
|
||||||
def _start_auto_update() -> None:
|
def _start_auto_update() -> None:
|
||||||
"""Start isy auto update."""
|
"""Start isy auto update."""
|
||||||
_LOGGER.debug("ISY Starting Event Stream and automatic updates.")
|
_LOGGER.debug("ISY Starting Event Stream and automatic updates")
|
||||||
isy.auto_update = True
|
isy.auto_update = True
|
||||||
|
|
||||||
await hass.async_add_executor_job(_start_auto_update)
|
await hass.async_add_executor_job(_start_auto_update)
|
||||||
@ -257,7 +257,7 @@ async def async_unload_entry(
|
|||||||
|
|
||||||
def _stop_auto_update() -> None:
|
def _stop_auto_update() -> None:
|
||||||
"""Start isy auto update."""
|
"""Start isy auto update."""
|
||||||
_LOGGER.debug("ISY Stopping Event Stream and automatic updates.")
|
_LOGGER.debug("ISY Stopping Event Stream and automatic updates")
|
||||||
isy.auto_update = False
|
isy.auto_update = False
|
||||||
|
|
||||||
await hass.async_add_executor_job(_stop_auto_update)
|
await hass.async_add_executor_job(_stop_auto_update)
|
||||||
|
@ -107,7 +107,7 @@ async def async_setup_entry(
|
|||||||
if not parent_device:
|
if not parent_device:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Node %s has a parent node %s, but no device "
|
"Node %s has a parent node %s, but no device "
|
||||||
"was created for the parent. Skipping.",
|
"was created for the parent. Skipping",
|
||||||
node.address,
|
node.address,
|
||||||
node.parent_node,
|
node.parent_node,
|
||||||
)
|
)
|
||||||
|
@ -157,7 +157,7 @@ class ISYNodeEntity(ISYEntity):
|
|||||||
"""Respond to an entity service command call."""
|
"""Respond to an entity service command call."""
|
||||||
if not hasattr(self._node, command):
|
if not hasattr(self._node, command):
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Invalid Service Call %s for device %s.", command, self.entity_id
|
"Invalid Service Call %s for device %s", command, self.entity_id
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
getattr(self._node, command)()
|
getattr(self._node, command)()
|
||||||
@ -168,7 +168,7 @@ class ISYNodeEntity(ISYEntity):
|
|||||||
"""Respond to an entity service raw command call."""
|
"""Respond to an entity service raw command call."""
|
||||||
if not hasattr(self._node, "send_cmd"):
|
if not hasattr(self._node, "send_cmd"):
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Invalid Service Call %s for device %s.", command, self.entity_id
|
"Invalid Service Call %s for device %s", command, self.entity_id
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
self._node.send_cmd(command, value, unit_of_measurement, parameters)
|
self._node.send_cmd(command, value, unit_of_measurement, parameters)
|
||||||
|
@ -330,7 +330,7 @@ def _categorize_programs(hass_isy_data: dict, programs: Programs) -> None:
|
|||||||
status = entity_folder.get_by_name(KEY_STATUS)
|
status = entity_folder.get_by_name(KEY_STATUS)
|
||||||
if not status or not status.protocol == PROTO_PROGRAM:
|
if not status or not status.protocol == PROTO_PROGRAM:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Program %s entity '%s' not loaded, invalid/missing status program.",
|
"Program %s entity '%s' not loaded, invalid/missing status program",
|
||||||
platform,
|
platform,
|
||||||
entity_folder.name,
|
entity_folder.name,
|
||||||
)
|
)
|
||||||
@ -340,7 +340,7 @@ def _categorize_programs(hass_isy_data: dict, programs: Programs) -> None:
|
|||||||
actions = entity_folder.get_by_name(KEY_ACTIONS)
|
actions = entity_folder.get_by_name(KEY_ACTIONS)
|
||||||
if not actions or not actions.protocol == PROTO_PROGRAM:
|
if not actions or not actions.protocol == PROTO_PROGRAM:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Program %s entity '%s' not loaded, invalid/missing actions program.",
|
"Program %s entity '%s' not loaded, invalid/missing actions program",
|
||||||
platform,
|
platform,
|
||||||
entity_folder.name,
|
entity_folder.name,
|
||||||
)
|
)
|
||||||
|
@ -211,7 +211,7 @@ def async_setup_services(hass: HomeAssistantType):
|
|||||||
await hass.async_add_executor_job(command.run)
|
await hass.async_add_executor_job(command.run)
|
||||||
return
|
return
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Could not run network resource command. Not found or enabled on the ISY."
|
"Could not run network resource command. Not found or enabled on the ISY"
|
||||||
)
|
)
|
||||||
|
|
||||||
async def async_send_program_command_service_handler(service):
|
async def async_send_program_command_service_handler(service):
|
||||||
@ -233,9 +233,7 @@ def async_setup_services(hass: HomeAssistantType):
|
|||||||
if program is not None:
|
if program is not None:
|
||||||
await hass.async_add_executor_job(getattr(program, command))
|
await hass.async_add_executor_job(getattr(program, command))
|
||||||
return
|
return
|
||||||
_LOGGER.error(
|
_LOGGER.error("Could not send program command. Not found or enabled on the ISY")
|
||||||
"Could not send program command. Not found or enabled on the ISY."
|
|
||||||
)
|
|
||||||
|
|
||||||
async def async_set_variable_service_handler(service):
|
async def async_set_variable_service_handler(service):
|
||||||
"""Handle a set variable service call."""
|
"""Handle a set variable service call."""
|
||||||
@ -258,7 +256,7 @@ def async_setup_services(hass: HomeAssistantType):
|
|||||||
if variable is not None:
|
if variable is not None:
|
||||||
await hass.async_add_executor_job(variable.set_value, value, init)
|
await hass.async_add_executor_job(variable.set_value, value, init)
|
||||||
return
|
return
|
||||||
_LOGGER.error("Could not set variable value. Not found or enabled on the ISY.")
|
_LOGGER.error("Could not set variable value. Not found or enabled on the ISY")
|
||||||
|
|
||||||
async def async_cleanup_registry_entries(service) -> None:
|
async def async_cleanup_registry_entries(service) -> None:
|
||||||
"""Remove extra entities that are no longer part of the integration."""
|
"""Remove extra entities that are no longer part of the integration."""
|
||||||
@ -369,7 +367,7 @@ def async_unload_services(hass: HomeAssistantType):
|
|||||||
):
|
):
|
||||||
return
|
return
|
||||||
|
|
||||||
_LOGGER.info("Unloading ISY994 Services.")
|
_LOGGER.info("Unloading ISY994 Services")
|
||||||
hass.services.async_remove(domain=DOMAIN, service=SERVICE_SYSTEM_QUERY)
|
hass.services.async_remove(domain=DOMAIN, service=SERVICE_SYSTEM_QUERY)
|
||||||
hass.services.async_remove(domain=DOMAIN, service=SERVICE_RUN_NETWORK_RESOURCE)
|
hass.services.async_remove(domain=DOMAIN, service=SERVICE_RUN_NETWORK_RESOURCE)
|
||||||
hass.services.async_remove(domain=DOMAIN, service=SERVICE_SEND_PROGRAM_COMMAND)
|
hass.services.async_remove(domain=DOMAIN, service=SERVICE_SEND_PROGRAM_COMMAND)
|
||||||
|
@ -45,12 +45,12 @@ class ISYSwitchEntity(ISYNodeEntity, SwitchEntity):
|
|||||||
def turn_off(self, **kwargs) -> None:
|
def turn_off(self, **kwargs) -> None:
|
||||||
"""Send the turn off command to the ISY994 switch."""
|
"""Send the turn off command to the ISY994 switch."""
|
||||||
if not self._node.turn_off():
|
if not self._node.turn_off():
|
||||||
_LOGGER.debug("Unable to turn off switch.")
|
_LOGGER.debug("Unable to turn off switch")
|
||||||
|
|
||||||
def turn_on(self, **kwargs) -> None:
|
def turn_on(self, **kwargs) -> None:
|
||||||
"""Send the turn on command to the ISY994 switch."""
|
"""Send the turn on command to the ISY994 switch."""
|
||||||
if not self._node.turn_on():
|
if not self._node.turn_on():
|
||||||
_LOGGER.debug("Unable to turn on switch.")
|
_LOGGER.debug("Unable to turn on switch")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def icon(self) -> str:
|
def icon(self) -> str:
|
||||||
|
@ -42,7 +42,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||||||
available_locks = kiwi.get_locks()
|
available_locks = kiwi.get_locks()
|
||||||
if not available_locks:
|
if not available_locks:
|
||||||
# No locks found; abort setup routine.
|
# No locks found; abort setup routine.
|
||||||
_LOGGER.info("No KIWI locks found in your account.")
|
_LOGGER.info("No KIWI locks found in your account")
|
||||||
return
|
return
|
||||||
add_entities([KiwiLock(lock, kiwi) for lock in available_locks], True)
|
add_entities([KiwiLock(lock, kiwi) for lock in available_locks], True)
|
||||||
|
|
||||||
|
@ -346,7 +346,7 @@ class KonnectedView(HomeAssistantView):
|
|||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Your Konnected device software may be out of "
|
"Your Konnected device software may be out of "
|
||||||
"date. Visit https://help.konnected.io for "
|
"date. Visit https://help.konnected.io for "
|
||||||
"updating instructions."
|
"updating instructions"
|
||||||
)
|
)
|
||||||
|
|
||||||
device = data[CONF_DEVICES].get(device_id)
|
device = data[CONF_DEVICES].get(device_id)
|
||||||
|
@ -248,7 +248,7 @@ async def async_setup(hass, config):
|
|||||||
connections.append(connection)
|
connections.append(connection)
|
||||||
_LOGGER.info('LCN connected to "%s"', connection_name)
|
_LOGGER.info('LCN connected to "%s"', connection_name)
|
||||||
except TimeoutError:
|
except TimeoutError:
|
||||||
_LOGGER.error('Connection to PCHK server "%s" failed.', connection_name)
|
_LOGGER.error('Connection to PCHK server "%s" failed', connection_name)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
hass.data[DATA_LCN][CONF_CONNECTIONS] = connections
|
hass.data[DATA_LCN][CONF_CONNECTIONS] = connections
|
||||||
|
@ -163,7 +163,7 @@ def aiolifx_effects():
|
|||||||
|
|
||||||
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
|
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
|
||||||
"""Set up the LIFX light platform. Obsolete."""
|
"""Set up the LIFX light platform. Obsolete."""
|
||||||
_LOGGER.warning("LIFX no longer works with light platform configuration.")
|
_LOGGER.warning("LIFX no longer works with light platform configuration")
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||||
|
@ -225,7 +225,7 @@ async def create_yaml_resource_col(hass, yaml_resources):
|
|||||||
else:
|
else:
|
||||||
if CONF_RESOURCES in ll_conf:
|
if CONF_RESOURCES in ll_conf:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Resources need to be specified in your configuration.yaml. Please see the docs."
|
"Resources need to be specified in your configuration.yaml. Please see the docs"
|
||||||
)
|
)
|
||||||
yaml_resources = ll_conf[CONF_RESOURCES]
|
yaml_resources = ll_conf[CONF_RESOURCES]
|
||||||
|
|
||||||
|
@ -289,7 +289,7 @@ class MatrixBot:
|
|||||||
if self._mx_id in self._auth_tokens:
|
if self._mx_id in self._auth_tokens:
|
||||||
try:
|
try:
|
||||||
client = self._login_by_token()
|
client = self._login_by_token()
|
||||||
_LOGGER.debug("Logged in using stored token.")
|
_LOGGER.debug("Logged in using stored token")
|
||||||
|
|
||||||
except MatrixRequestError as ex:
|
except MatrixRequestError as ex:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
@ -302,7 +302,7 @@ class MatrixBot:
|
|||||||
if not client:
|
if not client:
|
||||||
try:
|
try:
|
||||||
client = self._login_by_password()
|
client = self._login_by_password()
|
||||||
_LOGGER.debug("Logged in using password.")
|
_LOGGER.debug("Logged in using password")
|
||||||
|
|
||||||
except MatrixRequestError as ex:
|
except MatrixRequestError as ex:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
|
@ -906,7 +906,7 @@ async def websocket_handle_thumbnail(hass, connection, msg):
|
|||||||
return
|
return
|
||||||
|
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"The websocket command media_player_thumbnail is deprecated. Use /api/media_player_proxy instead."
|
"The websocket command media_player_thumbnail is deprecated. Use /api/media_player_proxy instead"
|
||||||
)
|
)
|
||||||
|
|
||||||
data, content_type = await player.async_get_media_image()
|
data, content_type = await player.async_get_media_image()
|
||||||
|
@ -79,7 +79,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
|||||||
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
|
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
|
||||||
"""Set up the MiFlora sensor."""
|
"""Set up the MiFlora sensor."""
|
||||||
backend = BACKEND
|
backend = BACKEND
|
||||||
_LOGGER.debug("Miflora is using %s backend.", backend.__name__)
|
_LOGGER.debug("Miflora is using %s backend", backend.__name__)
|
||||||
|
|
||||||
cache = config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL).total_seconds()
|
cache = config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL).total_seconds()
|
||||||
poller = miflora_poller.MiFloraPoller(
|
poller = miflora_poller.MiFloraPoller(
|
||||||
|
@ -72,7 +72,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
|||||||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||||
"""Set up the MiTempBt sensor."""
|
"""Set up the MiTempBt sensor."""
|
||||||
backend = BACKEND
|
backend = BACKEND
|
||||||
_LOGGER.debug("MiTempBt is using %s backend.", backend.__name__)
|
_LOGGER.debug("MiTempBt is using %s backend", backend.__name__)
|
||||||
|
|
||||||
cache = config.get(CONF_CACHE)
|
cache = config.get(CONF_CACHE)
|
||||||
poller = mitemp_bt_poller.MiTempBtPoller(
|
poller = mitemp_bt_poller.MiTempBtPoller(
|
||||||
|
@ -38,7 +38,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||||||
|
|
||||||
modem = bm(port)
|
modem = bm(port)
|
||||||
if modem.state == modem.STATE_FAILED:
|
if modem.state == modem.STATE_FAILED:
|
||||||
_LOGGER.error("Unable to initialize modem.")
|
_LOGGER.error("Unable to initialize modem")
|
||||||
return
|
return
|
||||||
|
|
||||||
add_entities([ModemCalleridSensor(hass, name, port, modem)])
|
add_entities([ModemCalleridSensor(hass, name, port, modem)])
|
||||||
|
@ -89,7 +89,7 @@ def valid_stations(stations, given_stations):
|
|||||||
if station is None:
|
if station is None:
|
||||||
continue
|
continue
|
||||||
if not any(s.code == station.upper() for s in stations):
|
if not any(s.code == station.upper() for s in stations):
|
||||||
_LOGGER.warning("Station '%s' is not a valid station.", station)
|
_LOGGER.warning("Station '%s' is not a valid station", station)
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -347,7 +347,7 @@ class NestDevice:
|
|||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Cannot retrieve device name for [%s]"
|
"Cannot retrieve device name for [%s]"
|
||||||
", please check your Nest developer "
|
", please check your Nest developer "
|
||||||
"account permission settings.",
|
"account permission settings",
|
||||||
device.serial,
|
device.serial,
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
@ -103,7 +103,7 @@ class NestCamera(Camera):
|
|||||||
def turn_on(self):
|
def turn_on(self):
|
||||||
"""Turn on camera."""
|
"""Turn on camera."""
|
||||||
if not self._online:
|
if not self._online:
|
||||||
_LOGGER.error("Camera %s is offline.", self._name)
|
_LOGGER.error("Camera %s is offline", self._name)
|
||||||
return
|
return
|
||||||
|
|
||||||
_LOGGER.debug("Turn on camera %s", self._name)
|
_LOGGER.debug("Turn on camera %s", self._name)
|
||||||
|
@ -532,7 +532,7 @@ class NetatmoPublicData:
|
|||||||
return
|
return
|
||||||
|
|
||||||
if data.CountStationInArea() == 0:
|
if data.CountStationInArea() == 0:
|
||||||
_LOGGER.warning("No Stations available in this area.")
|
_LOGGER.warning("No Stations available in this area")
|
||||||
return
|
return
|
||||||
|
|
||||||
self.data = data
|
self.data = data
|
||||||
|
@ -165,7 +165,7 @@ def setup(hass, config):
|
|||||||
"WARNING: This may poll your Leaf too often, and drain the 12V"
|
"WARNING: This may poll your Leaf too often, and drain the 12V"
|
||||||
" battery. If you drain your cars 12V battery it WILL NOT START"
|
" battery. If you drain your cars 12V battery it WILL NOT START"
|
||||||
" as the drive train battery won't connect."
|
" as the drive train battery won't connect."
|
||||||
" Don't set the intervals too low."
|
" Don't set the intervals too low"
|
||||||
)
|
)
|
||||||
|
|
||||||
data_store = LeafDataStore(hass, leaf, car_config)
|
data_store = LeafDataStore(hass, leaf, car_config)
|
||||||
|
@ -122,7 +122,7 @@ class OpenAlprCloudEntity(ImageProcessingAlprEntity):
|
|||||||
data = await request.json()
|
data = await request.json()
|
||||||
|
|
||||||
if request.status != HTTP_OK:
|
if request.status != HTTP_OK:
|
||||||
_LOGGER.error("Error %d -> %s.", request.status, data.get("error"))
|
_LOGGER.error("Error %d -> %s", request.status, data.get("error"))
|
||||||
return
|
return
|
||||||
|
|
||||||
except (asyncio.TimeoutError, aiohttp.ClientError):
|
except (asyncio.TimeoutError, aiohttp.ClientError):
|
||||||
|
@ -52,7 +52,7 @@ def setup(hass, config):
|
|||||||
try:
|
try:
|
||||||
interfaces_client.get_arp()
|
interfaces_client.get_arp()
|
||||||
except APIException:
|
except APIException:
|
||||||
_LOGGER.exception("Failure while connecting to OPNsense API endpoint.")
|
_LOGGER.exception("Failure while connecting to OPNsense API endpoint")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if tracker_interfaces:
|
if tracker_interfaces:
|
||||||
|
@ -377,7 +377,7 @@ async def async_handle_not_impl_msg(hass, context, message):
|
|||||||
|
|
||||||
async def async_handle_unsupported_msg(hass, context, message):
|
async def async_handle_unsupported_msg(hass, context, message):
|
||||||
"""Handle an unsupported or invalid message type."""
|
"""Handle an unsupported or invalid message type."""
|
||||||
_LOGGER.warning("Received unsupported message type: %s.", message.get("_type"))
|
_LOGGER.warning("Received unsupported message type: %s", message.get("_type"))
|
||||||
|
|
||||||
|
|
||||||
async def async_handle_message(hass, context, message):
|
async def async_handle_message(hass, context, message):
|
||||||
|
@ -172,7 +172,7 @@ async def async_setup(hass: HomeAssistant, config: dict) -> bool:
|
|||||||
if entity_id not in persistent_notifications:
|
if entity_id not in persistent_notifications:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Marking persistent_notification read failed: "
|
"Marking persistent_notification read failed: "
|
||||||
"Notification ID %s not found.",
|
"Notification ID %s not found",
|
||||||
notification_id,
|
notification_id,
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
@ -119,7 +119,7 @@ class PlaatoSensor(Entity):
|
|||||||
"""Return the state of the sensor."""
|
"""Return the state of the sensor."""
|
||||||
sensors = self.get_sensors()
|
sensors = self.get_sensors()
|
||||||
if sensors is False:
|
if sensors is False:
|
||||||
_LOGGER.debug("Device with name %s has no sensors.", self.name)
|
_LOGGER.debug("Device with name %s has no sensors", self.name)
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
if self._type == ATTR_ABV:
|
if self._type == ATTR_ABV:
|
||||||
|
@ -180,7 +180,7 @@ class PlexServer:
|
|||||||
f"hostname '{domain}' doesn't match"
|
f"hostname '{domain}' doesn't match"
|
||||||
):
|
):
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Plex SSL certificate's hostname changed, updating."
|
"Plex SSL certificate's hostname changed, updating"
|
||||||
)
|
)
|
||||||
if _update_plexdirect_hostname():
|
if _update_plexdirect_hostname():
|
||||||
config_entry_update_needed = True
|
config_entry_update_needed = True
|
||||||
@ -199,7 +199,7 @@ class PlexServer:
|
|||||||
system_accounts = self._plex_server.systemAccounts()
|
system_accounts = self._plex_server.systemAccounts()
|
||||||
except Unauthorized:
|
except Unauthorized:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Plex account has limited permissions, shared account filtering will not be available."
|
"Plex account has limited permissions, shared account filtering will not be available"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
self._accounts = [
|
self._accounts = [
|
||||||
|
@ -37,7 +37,7 @@ class PoolSenseConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
|
|
||||||
self._email = user_input[CONF_EMAIL]
|
self._email = user_input[CONF_EMAIL]
|
||||||
self._password = user_input[CONF_PASSWORD]
|
self._password = user_input[CONF_PASSWORD]
|
||||||
_LOGGER.debug("Configuring user: %s - Password hidden.", self._email)
|
_LOGGER.debug("Configuring user: %s - Password hidden", self._email)
|
||||||
|
|
||||||
poolsense = PoolSense()
|
poolsense = PoolSense()
|
||||||
api_key_valid = await poolsense.test_poolsense_credentials(
|
api_key_valid = await poolsense.test_poolsense_credentials(
|
||||||
|
@ -51,10 +51,10 @@ class QuantumGatewayDeviceScanner(DeviceScanner):
|
|||||||
self.success_init = self.quantum.success_init
|
self.success_init = self.quantum.success_init
|
||||||
except RequestException:
|
except RequestException:
|
||||||
self.success_init = False
|
self.success_init = False
|
||||||
_LOGGER.error("Unable to connect to gateway. Check host.")
|
_LOGGER.error("Unable to connect to gateway. Check host")
|
||||||
|
|
||||||
if not self.success_init:
|
if not self.success_init:
|
||||||
_LOGGER.error("Unable to login to gateway. Check password and host.")
|
_LOGGER.error("Unable to login to gateway. Check password and host")
|
||||||
|
|
||||||
def scan_devices(self):
|
def scan_devices(self):
|
||||||
"""Scan for new devices and return a list of found MACs."""
|
"""Scan for new devices and return a list of found MACs."""
|
||||||
|
@ -421,7 +421,7 @@ class Recorder(threading.Thread):
|
|||||||
except Exception as err: # pylint: disable=broad-except
|
except Exception as err: # pylint: disable=broad-except
|
||||||
# Must catch the exception to prevent the loop from collapsing
|
# Must catch the exception to prevent the loop from collapsing
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Error in database connectivity during keepalive: %s.", err,
|
"Error in database connectivity during keepalive: %s", err,
|
||||||
)
|
)
|
||||||
self._reopen_event_session()
|
self._reopen_event_session()
|
||||||
|
|
||||||
|
@ -165,7 +165,7 @@ def _drop_index(engine, table_name, index_name):
|
|||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Failed to drop index %s from table %s. Schema "
|
"Failed to drop index %s from table %s. Schema "
|
||||||
"Migration will continue; this is not a "
|
"Migration will continue; this is not a "
|
||||||
"critical operation.",
|
"critical operation",
|
||||||
index_name,
|
index_name,
|
||||||
table_name,
|
table_name,
|
||||||
)
|
)
|
||||||
@ -195,7 +195,7 @@ def _add_columns(engine, table_name, columns_def):
|
|||||||
except (InternalError, OperationalError):
|
except (InternalError, OperationalError):
|
||||||
# Some engines support adding all columns at once,
|
# Some engines support adding all columns at once,
|
||||||
# this error is when they don't
|
# this error is when they don't
|
||||||
_LOGGER.info("Unable to use quick column add. Adding 1 by 1.")
|
_LOGGER.info("Unable to use quick column add. Adding 1 by 1")
|
||||||
|
|
||||||
for column_def in columns_def:
|
for column_def in columns_def:
|
||||||
try:
|
try:
|
||||||
|
@ -59,7 +59,7 @@ def purge_old_data(instance, purge_days: int, repack: bool) -> bool:
|
|||||||
# If states or events purging isn't processing the purge_before yet,
|
# If states or events purging isn't processing the purge_before yet,
|
||||||
# return false, as we are not done yet.
|
# return false, as we are not done yet.
|
||||||
if batch_purge_before != purge_before:
|
if batch_purge_before != purge_before:
|
||||||
_LOGGER.debug("Purging hasn't fully completed yet.")
|
_LOGGER.debug("Purging hasn't fully completed yet")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Recorder runs is small, no need to batch run it
|
# Recorder runs is small, no need to batch run it
|
||||||
@ -94,7 +94,7 @@ def purge_old_data(instance, purge_days: int, repack: bool) -> bool:
|
|||||||
time.sleep(instance.db_retry_wait)
|
time.sleep(instance.db_retry_wait)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
_LOGGER.warning("Error purging history: %s.", err)
|
_LOGGER.warning("Error purging history: %s", err)
|
||||||
except SQLAlchemyError as err:
|
except SQLAlchemyError as err:
|
||||||
_LOGGER.warning("Error purging history: %s.", err)
|
_LOGGER.warning("Error purging history: %s", err)
|
||||||
return True
|
return True
|
||||||
|
@ -216,7 +216,7 @@ class RestSensor(Entity):
|
|||||||
_LOGGER.debug("JSON converted from XML: %s", value)
|
_LOGGER.debug("JSON converted from XML: %s", value)
|
||||||
except ExpatError:
|
except ExpatError:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"REST xml result could not be parsed and converted to JSON."
|
"REST xml result could not be parsed and converted to JSON"
|
||||||
)
|
)
|
||||||
_LOGGER.debug("Erroneous XML: %s", value)
|
_LOGGER.debug("Erroneous XML: %s", value)
|
||||||
|
|
||||||
|
@ -122,22 +122,22 @@ async def async_setup(hass, config):
|
|||||||
|
|
||||||
if response.status < HTTP_BAD_REQUEST:
|
if response.status < HTTP_BAD_REQUEST:
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Success. Url: %s. Status code: %d.",
|
"Success. Url: %s. Status code: %d",
|
||||||
response.url,
|
response.url,
|
||||||
response.status,
|
response.status,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Error. Url: %s. Status code %d.",
|
"Error. Url: %s. Status code %d",
|
||||||
response.url,
|
response.url,
|
||||||
response.status,
|
response.status,
|
||||||
)
|
)
|
||||||
|
|
||||||
except asyncio.TimeoutError:
|
except asyncio.TimeoutError:
|
||||||
_LOGGER.warning("Timeout call %s.", response.url, exc_info=1)
|
_LOGGER.warning("Timeout call %s", response.url, exc_info=1)
|
||||||
|
|
||||||
except aiohttp.ClientError:
|
except aiohttp.ClientError:
|
||||||
_LOGGER.error("Client error %s.", request_url, exc_info=1)
|
_LOGGER.error("Client error %s", request_url, exc_info=1)
|
||||||
|
|
||||||
# register services
|
# register services
|
||||||
hass.services.async_register(DOMAIN, name, async_service_handler)
|
hass.services.async_register(DOMAIN, name, async_service_handler)
|
||||||
|
@ -231,7 +231,7 @@ async def _async_process_config(hass, config, component):
|
|||||||
entity_id = ENTITY_ID_FORMAT.format(service.service)
|
entity_id = ENTITY_ID_FORMAT.format(service.service)
|
||||||
script_entity = component.get_entity(entity_id)
|
script_entity = component.get_entity(entity_id)
|
||||||
if script_entity.script.is_legacy and script_entity.is_on:
|
if script_entity.script.is_legacy and script_entity.is_on:
|
||||||
_LOGGER.warning("Script %s already running.", entity_id)
|
_LOGGER.warning("Script %s already running", entity_id)
|
||||||
return
|
return
|
||||||
await script_entity.async_turn_on(
|
await script_entity.async_turn_on(
|
||||||
variables=service.data, context=service.context
|
variables=service.data, context=service.context
|
||||||
|
@ -101,7 +101,7 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info=
|
|||||||
asyncio.TimeoutError,
|
asyncio.TimeoutError,
|
||||||
pysensibo.SensiboError,
|
pysensibo.SensiboError,
|
||||||
):
|
):
|
||||||
_LOGGER.exception("Failed to connect to Sensibo servers.")
|
_LOGGER.exception("Failed to connect to Sensibo servers")
|
||||||
raise PlatformNotReady
|
raise PlatformNotReady
|
||||||
|
|
||||||
if not devices:
|
if not devices:
|
||||||
@ -398,5 +398,5 @@ class SensiboClimate(ClimateEntity):
|
|||||||
data = await self._client.async_get_device(self._id, _FETCH_FIELDS)
|
data = await self._client.async_get_device(self._id, _FETCH_FIELDS)
|
||||||
self._do_update(data)
|
self._do_update(data)
|
||||||
except (aiohttp.client_exceptions.ClientError, pysensibo.SensiboError):
|
except (aiohttp.client_exceptions.ClientError, pysensibo.SensiboError):
|
||||||
_LOGGER.warning("Failed to connect to Sensibo servers.")
|
_LOGGER.warning("Failed to connect to Sensibo servers")
|
||||||
self._available = False
|
self._available = False
|
||||||
|
@ -65,7 +65,7 @@ class SignalNotificationService(BaseNotificationService):
|
|||||||
filenames = data[ATTR_FILENAMES]
|
filenames = data[ATTR_FILENAMES]
|
||||||
if ATTR_FILENAME in data:
|
if ATTR_FILENAME in data:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"The 'attachment' option is deprecated, please replace it with 'attachments'. This option will become invalid in version 0.108."
|
"The 'attachment' option is deprecated, please replace it with 'attachments'. This option will become invalid in version 0.108"
|
||||||
)
|
)
|
||||||
if filenames is None:
|
if filenames is None:
|
||||||
filenames = [data[ATTR_FILENAME]]
|
filenames = [data[ATTR_FILENAME]]
|
||||||
|
@ -524,7 +524,7 @@ class SimpliSafe:
|
|||||||
if isinstance(result, InvalidCredentialsError):
|
if isinstance(result, InvalidCredentialsError):
|
||||||
if self._emergency_refresh_token_used:
|
if self._emergency_refresh_token_used:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"SimpliSafe authentication disconnected. Please restart HASS."
|
"SimpliSafe authentication disconnected. Please restart HASS"
|
||||||
)
|
)
|
||||||
remove_listener = self._hass.data[DOMAIN][DATA_LISTENER].pop(
|
remove_listener = self._hass.data[DOMAIN][DATA_LISTENER].pop(
|
||||||
self._config_entry.entry_id
|
self._config_entry.entry_id
|
||||||
|
@ -138,7 +138,7 @@ class MailNotificationService(BaseNotificationService):
|
|||||||
except (smtplib.socket.gaierror, ConnectionRefusedError):
|
except (smtplib.socket.gaierror, ConnectionRefusedError):
|
||||||
_LOGGER.exception(
|
_LOGGER.exception(
|
||||||
"SMTP server not found or refused connection (%s:%s). "
|
"SMTP server not found or refused connection (%s:%s). "
|
||||||
"Please check the IP address, hostname, and availability of your SMTP server.",
|
"Please check the IP address, hostname, and availability of your SMTP server",
|
||||||
self._server,
|
self._server,
|
||||||
self._port,
|
self._port,
|
||||||
)
|
)
|
||||||
|
@ -161,7 +161,7 @@ async def async_setup(hass, config):
|
|||||||
"Received unknown intent %s", request["intent"]["intentName"]
|
"Received unknown intent %s", request["intent"]["intentName"]
|
||||||
)
|
)
|
||||||
except intent.IntentError:
|
except intent.IntentError:
|
||||||
_LOGGER.exception("Error while handling intent: %s.", intent_type)
|
_LOGGER.exception("Error while handling intent: %s", intent_type)
|
||||||
|
|
||||||
await hass.components.mqtt.async_subscribe(INTENT_TOPIC, message_received)
|
await hass.components.mqtt.async_subscribe(INTENT_TOPIC, message_received)
|
||||||
|
|
||||||
|
@ -57,7 +57,7 @@ async def async_setup_platform(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Set up from legacy configuration file. Obsolete."""
|
"""Set up from legacy configuration file. Obsolete."""
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Configuring Songpal through media_player platform is no longer supported. Convert to songpal platform or UI configuration."
|
"Configuring Songpal through media_player platform is no longer supported. Convert to songpal platform or UI configuration"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -75,7 +75,7 @@ async def async_setup_entry(
|
|||||||
): # set timeout to avoid blocking the setup process
|
): # set timeout to avoid blocking the setup process
|
||||||
await device.get_supported_methods()
|
await device.get_supported_methods()
|
||||||
except (SongpalException, asyncio.TimeoutError) as ex:
|
except (SongpalException, asyncio.TimeoutError) as ex:
|
||||||
_LOGGER.warning("[%s(%s)] Unable to connect.", name, endpoint)
|
_LOGGER.warning("[%s(%s)] Unable to connect", name, endpoint)
|
||||||
_LOGGER.debug("Unable to get methods from songpal: %s", ex)
|
_LOGGER.debug("Unable to get methods from songpal: %s", ex)
|
||||||
raise PlatformNotReady
|
raise PlatformNotReady
|
||||||
|
|
||||||
@ -128,7 +128,7 @@ class SongpalEntity(MediaPlayerEntity):
|
|||||||
|
|
||||||
async def async_activate_websocket(self):
|
async def async_activate_websocket(self):
|
||||||
"""Activate websocket for listening if wanted."""
|
"""Activate websocket for listening if wanted."""
|
||||||
_LOGGER.info("Activating websocket connection..")
|
_LOGGER.info("Activating websocket connection")
|
||||||
|
|
||||||
async def _volume_changed(volume: VolumeChange):
|
async def _volume_changed(volume: VolumeChange):
|
||||||
_LOGGER.debug("Volume changed: %s", volume)
|
_LOGGER.debug("Volume changed: %s", volume)
|
||||||
@ -152,7 +152,7 @@ class SongpalEntity(MediaPlayerEntity):
|
|||||||
|
|
||||||
async def _try_reconnect(connect: ConnectChange):
|
async def _try_reconnect(connect: ConnectChange):
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"[%s(%s)] Got disconnected, trying to reconnect.",
|
"[%s(%s)] Got disconnected, trying to reconnect",
|
||||||
self.name,
|
self.name,
|
||||||
self._dev.endpoint,
|
self._dev.endpoint,
|
||||||
)
|
)
|
||||||
@ -179,7 +179,7 @@ class SongpalEntity(MediaPlayerEntity):
|
|||||||
|
|
||||||
self.hass.loop.create_task(self._dev.listen_notifications())
|
self.hass.loop.create_task(self._dev.listen_notifications())
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"[%s(%s)] Connection reestablished.", self.name, self._dev.endpoint
|
"[%s(%s)] Connection reestablished", self.name, self._dev.endpoint
|
||||||
)
|
)
|
||||||
|
|
||||||
self._dev.on_notification(VolumeChange, _volume_changed)
|
self._dev.on_notification(VolumeChange, _volume_changed)
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user