mirror of
https://github.com/home-assistant/core.git
synced 2025-07-25 06:07:17 +00:00
commit
07ce284acd
@ -104,7 +104,7 @@ async def async_get_api(hass):
|
||||
|
||||
async def async_get_location(hass, api, latitude, longitude):
|
||||
"""Retrieve pyipma location, location name to be used as the entity name."""
|
||||
with async_timeout.timeout(10):
|
||||
with async_timeout.timeout(30):
|
||||
location = await Location.get(api, float(latitude), float(longitude))
|
||||
|
||||
_LOGGER.debug(
|
||||
|
@ -342,7 +342,6 @@ class Recorder(threading.Thread):
|
||||
# has changed. This reduces the disk io.
|
||||
while True:
|
||||
event = self.queue.get()
|
||||
|
||||
if event is None:
|
||||
self._close_run()
|
||||
self._close_connection()
|
||||
@ -356,7 +355,7 @@ class Recorder(threading.Thread):
|
||||
self.queue.task_done()
|
||||
if self.commit_interval:
|
||||
self._timechanges_seen += 1
|
||||
if self.commit_interval >= self._timechanges_seen:
|
||||
if self._timechanges_seen >= self.commit_interval:
|
||||
self._timechanges_seen = 0
|
||||
self._commit_event_session_or_retry()
|
||||
continue
|
||||
@ -376,6 +375,9 @@ class Recorder(threading.Thread):
|
||||
self.event_session.flush()
|
||||
except (TypeError, ValueError):
|
||||
_LOGGER.warning("Event is not JSON serializable: %s", event)
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
# Must catch the exception to prevent the loop from collapsing
|
||||
_LOGGER.exception("Error adding event: %s", err)
|
||||
|
||||
if dbevent and event.event_type == EVENT_STATE_CHANGED:
|
||||
try:
|
||||
@ -387,6 +389,9 @@ class Recorder(threading.Thread):
|
||||
"State is not JSON serializable: %s",
|
||||
event.data.get("new_state"),
|
||||
)
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
# Must catch the exception to prevent the loop from collapsing
|
||||
_LOGGER.exception("Error adding state change: %s", err)
|
||||
|
||||
# If they do not have a commit interval
|
||||
# than we commit right away
|
||||
@ -404,17 +409,26 @@ class Recorder(threading.Thread):
|
||||
try:
|
||||
self._commit_event_session()
|
||||
return
|
||||
|
||||
except exc.OperationalError as err:
|
||||
_LOGGER.error(
|
||||
"Error in database connectivity: %s. " "(retrying in %s seconds)",
|
||||
err,
|
||||
self.db_retry_wait,
|
||||
)
|
||||
except (exc.InternalError, exc.OperationalError) as err:
|
||||
if err.connection_invalidated:
|
||||
_LOGGER.error(
|
||||
"Database connection invalidated: %s. "
|
||||
"(retrying in %s seconds)",
|
||||
err,
|
||||
self.db_retry_wait,
|
||||
)
|
||||
else:
|
||||
_LOGGER.error(
|
||||
"Error in database connectivity: %s. "
|
||||
"(retrying in %s seconds)",
|
||||
err,
|
||||
self.db_retry_wait,
|
||||
)
|
||||
tries += 1
|
||||
|
||||
except exc.SQLAlchemyError:
|
||||
_LOGGER.exception("Error saving events")
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
# Must catch the exception to prevent the loop from collapsing
|
||||
_LOGGER.exception("Error saving events: %s", err)
|
||||
return
|
||||
|
||||
_LOGGER.error(
|
||||
@ -423,10 +437,15 @@ class Recorder(threading.Thread):
|
||||
)
|
||||
try:
|
||||
self.event_session.close()
|
||||
except exc.SQLAlchemyError:
|
||||
_LOGGER.exception("Failed to close event session.")
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
# Must catch the exception to prevent the loop from collapsing
|
||||
_LOGGER.exception("Error while closing event session: %s", err)
|
||||
|
||||
self.event_session = self.get_session()
|
||||
try:
|
||||
self.event_session = self.get_session()
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
# Must catch the exception to prevent the loop from collapsing
|
||||
_LOGGER.exception("Error while creating new event session: %s", err)
|
||||
|
||||
def _commit_event_session(self):
|
||||
try:
|
||||
|
@ -83,6 +83,8 @@ async def async_setup_entry(hass, config_entry):
|
||||
controller_id = get_controller_id_from_config_entry(config_entry)
|
||||
hass.data[DOMAIN][controller_id] = controller
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, controller.shutdown)
|
||||
|
||||
if controller.mac is None:
|
||||
return True
|
||||
|
||||
@ -96,8 +98,6 @@ async def async_setup_entry(hass, config_entry):
|
||||
# sw_version=config.raw['swversion'],
|
||||
)
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, controller.shutdown)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
@ -64,7 +64,7 @@ class VelbusLight(VelbusEntity, Light):
|
||||
@property
|
||||
def brightness(self):
|
||||
"""Return the brightness of the light."""
|
||||
return self._module.get_dimmer_state(self._channel)
|
||||
return int((self._module.get_dimmer_state(self._channel) * 255) / 100)
|
||||
|
||||
def turn_on(self, **kwargs):
|
||||
"""Instruct the Velbus light to turn on."""
|
||||
@ -80,10 +80,15 @@ class VelbusLight(VelbusEntity, Light):
|
||||
attr, *args = "set_led_state", self._channel, "on"
|
||||
else:
|
||||
if ATTR_BRIGHTNESS in kwargs:
|
||||
# Make sure a low but non-zero value is not rounded down to zero
|
||||
if kwargs[ATTR_BRIGHTNESS] == 0:
|
||||
brightness = 0
|
||||
else:
|
||||
brightness = max(int((kwargs[ATTR_BRIGHTNESS] * 100) / 255), 1)
|
||||
attr, *args = (
|
||||
"set_dimmer_state",
|
||||
self._channel,
|
||||
kwargs[ATTR_BRIGHTNESS],
|
||||
brightness,
|
||||
kwargs.get(ATTR_TRANSITION, 0),
|
||||
)
|
||||
else:
|
||||
|
@ -93,14 +93,10 @@ async def async_setup_entry(hass, config_entry):
|
||||
"""
|
||||
|
||||
zha_data = hass.data.setdefault(DATA_ZHA, {})
|
||||
zha_data[DATA_ZHA_PLATFORM_LOADED] = {}
|
||||
config = zha_data.get(DATA_ZHA_CONFIG, {})
|
||||
|
||||
zha_data[DATA_ZHA_DISPATCHERS] = []
|
||||
for component in COMPONENTS:
|
||||
zha_data[component] = []
|
||||
coro = hass.config_entries.async_forward_entry_setup(config_entry, component)
|
||||
zha_data[DATA_ZHA_PLATFORM_LOADED][component] = hass.async_create_task(coro)
|
||||
zha_data.setdefault(component, [])
|
||||
|
||||
if config.get(CONF_ENABLE_QUIRKS, True):
|
||||
# needs to be done here so that the ZHA module is finished loading
|
||||
@ -110,6 +106,12 @@ async def async_setup_entry(hass, config_entry):
|
||||
zha_gateway = ZHAGateway(hass, config, config_entry)
|
||||
await zha_gateway.async_initialize()
|
||||
|
||||
zha_data[DATA_ZHA_DISPATCHERS] = []
|
||||
zha_data[DATA_ZHA_PLATFORM_LOADED] = []
|
||||
for component in COMPONENTS:
|
||||
coro = hass.config_entries.async_forward_entry_setup(config_entry, component)
|
||||
zha_data[DATA_ZHA_PLATFORM_LOADED].append(hass.async_create_task(coro))
|
||||
|
||||
device_registry = await hass.helpers.device_registry.async_get_registry()
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=config_entry.entry_id,
|
||||
@ -128,7 +130,7 @@ async def async_setup_entry(hass, config_entry):
|
||||
await zha_data[DATA_ZHA_GATEWAY].async_update_device_storage()
|
||||
|
||||
hass.bus.async_listen_once(ha_const.EVENT_HOMEASSISTANT_STOP, async_zha_shutdown)
|
||||
hass.async_create_task(async_load_entities(hass, config_entry))
|
||||
asyncio.create_task(async_load_entities(hass, config_entry))
|
||||
return True
|
||||
|
||||
|
||||
@ -153,11 +155,7 @@ async def async_load_entities(
|
||||
) -> None:
|
||||
"""Load entities after integration was setup."""
|
||||
await hass.data[DATA_ZHA][DATA_ZHA_GATEWAY].async_prepare_entities()
|
||||
to_setup = [
|
||||
hass.data[DATA_ZHA][DATA_ZHA_PLATFORM_LOADED][comp]
|
||||
for comp in COMPONENTS
|
||||
if hass.data[DATA_ZHA][comp]
|
||||
]
|
||||
to_setup = hass.data[DATA_ZHA][DATA_ZHA_PLATFORM_LOADED]
|
||||
results = await asyncio.gather(*to_setup, return_exceptions=True)
|
||||
for res in results:
|
||||
if isinstance(res, Exception):
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""Constants used by Home Assistant components."""
|
||||
MAJOR_VERSION = 0
|
||||
MINOR_VERSION = 107
|
||||
PATCH_VERSION = "6"
|
||||
PATCH_VERSION = "7"
|
||||
__short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__ = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER = (3, 7, 0)
|
||||
|
Loading…
x
Reference in New Issue
Block a user