mirror of
https://github.com/home-assistant/core.git
synced 2025-09-19 18:09:48 +00:00
Compare commits
126 Commits
cloud_enab
...
2024.11.3
Author | SHA1 | Date | |
---|---|---|---|
![]() |
0644d782cd | ||
![]() |
4ef50ffd88 | ||
![]() |
bfcd4194f3 | ||
![]() |
2f05240e4c | ||
![]() |
44ad8081a3 | ||
![]() |
780eaa8379 | ||
![]() |
75dcdfb087 | ||
![]() |
c88ff2ca44 | ||
![]() |
402c668f05 | ||
![]() |
93b4570c04 | ||
![]() |
50a610914b | ||
![]() |
8db18181d0 | ||
![]() |
335124acc6 | ||
![]() |
24ccb9b894 | ||
![]() |
a75ce850b8 | ||
![]() |
4753510ace | ||
![]() |
fc607ea7e5 | ||
![]() |
477141c22a | ||
![]() |
aaa36adbcc | ||
![]() |
9447180c04 | ||
![]() |
6853234f9d | ||
![]() |
6944ba0333 | ||
![]() |
04bc041174 | ||
![]() |
a024acf096 | ||
![]() |
5b1aca53ac | ||
![]() |
a588ced2e3 | ||
![]() |
876112ff54 | ||
![]() |
a48f88033d | ||
![]() |
5deba1766e | ||
![]() |
4863243f5a | ||
![]() |
847afabed1 | ||
![]() |
ac270e19be | ||
![]() |
ca40b96a89 | ||
![]() |
045e285bfe | ||
![]() |
8d6f2e78f5 | ||
![]() |
9e4d26137e | ||
![]() |
f74bfdc974 | ||
![]() |
1cabcdf257 | ||
![]() |
c6931d656e | ||
![]() |
942830505a | ||
![]() |
880f28e28a | ||
![]() |
f406ffa75a | ||
![]() |
0d695c843f | ||
![]() |
5f09eb97e1 | ||
![]() |
6d561ca373 | ||
![]() |
663ebe199d | ||
![]() |
8b9c4db2b3 | ||
![]() |
e478b9b599 | ||
![]() |
5acdf58976 | ||
![]() |
6d861e7f47 | ||
![]() |
281a8eda31 | ||
![]() |
1bc005d0d4 | ||
![]() |
95d60987ab | ||
![]() |
53e38454b2 | ||
![]() |
876b86cd3d | ||
![]() |
cb104935ea | ||
![]() |
4c24e26926 | ||
![]() |
4b13d8bc47 | ||
![]() |
433e3718f8 | ||
![]() |
1e3c2c0631 | ||
![]() |
3a2f996c13 | ||
![]() |
e4cb3c67d9 | ||
![]() |
8a22433168 | ||
![]() |
0976476d16 | ||
![]() |
28f46a0f88 | ||
![]() |
8b173656e7 | ||
![]() |
08f6f2759b | ||
![]() |
f4798d27c7 | ||
![]() |
103a84b4bd | ||
![]() |
4d3502e061 | ||
![]() |
79329e16cf | ||
![]() |
929164251a | ||
![]() |
300724443a | ||
![]() |
70ef3a355c | ||
![]() |
83162c1461 | ||
![]() |
a12c76dbdd | ||
![]() |
9292b6da3d | ||
![]() |
8d05183de2 | ||
![]() |
a86ff41bbc | ||
![]() |
ce92f3de44 | ||
![]() |
465d8b2ee2 | ||
![]() |
218eedfd93 | ||
![]() |
afec354b84 | ||
![]() |
282f92e5f3 | ||
![]() |
f6cd74e2d7 | ||
![]() |
f821ddeab8 | ||
![]() |
d408b7ac62 | ||
![]() |
83baa1a788 | ||
![]() |
07a8cf14cd | ||
![]() |
9f447af468 | ||
![]() |
c399d8f571 | ||
![]() |
4ea9574229 | ||
![]() |
592b8ed0a0 | ||
![]() |
6b91c0810a | ||
![]() |
9579e4a9c1 | ||
![]() |
7f4f90f06d | ||
![]() |
701a901fe4 | ||
![]() |
f914642e31 | ||
![]() |
32dc9fc238 | ||
![]() |
b27e0f9fe7 | ||
![]() |
f040060b3c | ||
![]() |
cc45793896 | ||
![]() |
ab0556227c | ||
![]() |
c16fb9c93d | ||
![]() |
da8fc7a2fc | ||
![]() |
864b4d86f2 | ||
![]() |
1bb0ced7c0 | ||
![]() |
2fe4fc908b | ||
![]() |
aa2c3b046f | ||
![]() |
22822cb8aa | ||
![]() |
b71383c997 | ||
![]() |
b0b163df48 | ||
![]() |
35539dbf60 | ||
![]() |
09d03e8edf | ||
![]() |
46e37f3bdd | ||
![]() |
0206c149cf | ||
![]() |
29620ef977 | ||
![]() |
9012b113ad | ||
![]() |
5f5f6cc3d5 | ||
![]() |
7ff501f3ec | ||
![]() |
b0f110b9ab | ||
![]() |
2692bc23a5 | ||
![]() |
1beac5f0f8 | ||
![]() |
ec7ba1b7fd | ||
![]() |
5bd1b0dd9c | ||
![]() |
a2ad4c9cfd |
@@ -90,7 +90,7 @@ repos:
|
||||
pass_filenames: false
|
||||
language: script
|
||||
types: [text]
|
||||
files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml)$
|
||||
files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml|homeassistant/components/go2rtc/const\.py)$
|
||||
- id: hassfest-mypy-config
|
||||
name: hassfest-mypy-config
|
||||
entry: script/run-in-env.sh python3 -m script.hassfest -p mypy_config
|
||||
|
@@ -55,7 +55,7 @@ RUN \
|
||||
"armv7") go2rtc_suffix='arm' ;; \
|
||||
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
||||
esac \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.6/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.7/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& chmod +x /bin/go2rtc \
|
||||
# Verify go2rtc can be executed
|
||||
&& go2rtc --version
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/agent_dvr",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["agent"],
|
||||
"requirements": ["agent-py==0.0.23"]
|
||||
"requirements": ["agent-py==0.0.24"]
|
||||
}
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairq"],
|
||||
"requirements": ["aioairq==0.3.2"]
|
||||
"requirements": ["aioairq==0.4.3"]
|
||||
}
|
||||
|
@@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"],
|
||||
"requirements": ["aioairzone==0.9.5"]
|
||||
"requirements": ["aioairzone==0.9.7"]
|
||||
}
|
||||
|
@@ -6,7 +6,7 @@ import asyncio
|
||||
from datetime import timedelta
|
||||
from functools import partial
|
||||
import logging
|
||||
from typing import Any, Final, final
|
||||
from typing import TYPE_CHECKING, Any, Final, final
|
||||
|
||||
from propcache import cached_property
|
||||
import voluptuous as vol
|
||||
@@ -221,9 +221,15 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
|
||||
@property
|
||||
def state(self) -> str | None:
|
||||
"""Return the current state."""
|
||||
if (alarm_state := self.alarm_state) is None:
|
||||
return None
|
||||
return alarm_state
|
||||
if (alarm_state := self.alarm_state) is not None:
|
||||
return alarm_state
|
||||
if self._attr_state is not None:
|
||||
# Backwards compatibility for integrations that set state directly
|
||||
# Should be removed in 2025.11
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(self._attr_state, str)
|
||||
return self._attr_state
|
||||
return None
|
||||
|
||||
@cached_property
|
||||
def alarm_state(self) -> AlarmControlPanelState | None:
|
||||
|
@@ -38,6 +38,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ApSystemsConfigEntry) ->
|
||||
ip_address=entry.data[CONF_IP_ADDRESS],
|
||||
port=entry.data.get(CONF_PORT, DEFAULT_PORT),
|
||||
timeout=8,
|
||||
enable_debounce=True,
|
||||
)
|
||||
coordinator = ApSystemsDataCoordinator(hass, api)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/apsystems",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["apsystems-ez1==2.2.1"]
|
||||
"requirements": ["apsystems-ez1==2.4.0"]
|
||||
}
|
||||
|
@@ -5,6 +5,7 @@ from __future__ import annotations
|
||||
from typing import Any
|
||||
|
||||
from aiohttp.client_exceptions import ClientConnectionError
|
||||
from APsystemsEZ1 import InverterReturnedError
|
||||
|
||||
from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -40,7 +41,7 @@ class ApSystemsInverterSwitch(ApSystemsEntity, SwitchEntity):
|
||||
"""Update switch status and availability."""
|
||||
try:
|
||||
status = await self._api.get_device_power_status()
|
||||
except (TimeoutError, ClientConnectionError):
|
||||
except (TimeoutError, ClientConnectionError, InverterReturnedError):
|
||||
self._attr_available = False
|
||||
else:
|
||||
self._attr_available = True
|
||||
|
@@ -770,7 +770,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
|
||||
async def async_set_volume_level(self, volume: float) -> None:
|
||||
"""Send volume_up command to media player."""
|
||||
volume = int(volume * 100)
|
||||
volume = int(round(volume * 100))
|
||||
volume = min(100, volume)
|
||||
volume = max(0, volume)
|
||||
|
||||
|
@@ -16,7 +16,7 @@
|
||||
"requirements": [
|
||||
"bleak==0.22.3",
|
||||
"bleak-retry-connector==3.6.0",
|
||||
"bluetooth-adapters==0.20.0",
|
||||
"bluetooth-adapters==0.20.2",
|
||||
"bluetooth-auto-recovery==1.4.2",
|
||||
"bluetooth-data-tools==1.20.0",
|
||||
"dbus-fast==2.24.3",
|
||||
|
@@ -7,6 +7,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiostreammagic"],
|
||||
"requirements": ["aiostreammagic==2.8.4"],
|
||||
"requirements": ["aiostreammagic==2.8.5"],
|
||||
"zeroconf": ["_stream-magic._tcp.local.", "_smoip._tcp.local."]
|
||||
}
|
||||
|
@@ -51,8 +51,13 @@ CONTROL_ENTITIES: tuple[CambridgeAudioSelectEntityDescription, ...] = (
|
||||
CambridgeAudioSelectEntityDescription(
|
||||
key="display_brightness",
|
||||
translation_key="display_brightness",
|
||||
options=[x.value for x in DisplayBrightness],
|
||||
options=[
|
||||
DisplayBrightness.BRIGHT.value,
|
||||
DisplayBrightness.DIM.value,
|
||||
DisplayBrightness.OFF.value,
|
||||
],
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
load_fn=lambda client: client.display.brightness != DisplayBrightness.NONE,
|
||||
value_fn=lambda client: client.display.brightness,
|
||||
set_value_fn=lambda client, value: client.set_display_brightness(
|
||||
DisplayBrightness(value)
|
||||
|
@@ -53,7 +53,7 @@
|
||||
},
|
||||
"view_path": {
|
||||
"name": "View path",
|
||||
"description": "The path of the dashboard view to show."
|
||||
"description": "The URL path of the dashboard view to show."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -168,7 +168,7 @@ class ElectricityMapsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
return self.async_create_entry(
|
||||
title=get_extra_name(data) or "CO2 Signal",
|
||||
title=get_extra_name(data) or "Electricity Maps",
|
||||
data=data,
|
||||
)
|
||||
|
||||
|
@@ -294,7 +294,7 @@ class DefaultAgent(ConversationEntity):
|
||||
self.hass, language, DOMAIN, [DOMAIN]
|
||||
)
|
||||
response_text = translations.get(
|
||||
f"component.{DOMAIN}.agent.done", "Done"
|
||||
f"component.{DOMAIN}.conversation.agent.done", "Done"
|
||||
)
|
||||
|
||||
response.async_set_speech(response_text)
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==1.7.4", "home-assistant-intents==2024.11.4"]
|
||||
"requirements": ["hassil==1.7.4", "home-assistant-intents==2024.11.6"]
|
||||
}
|
||||
|
@@ -68,7 +68,7 @@
|
||||
}
|
||||
},
|
||||
"alarm_arm_home_instant": {
|
||||
"name": "Alarm are home instant",
|
||||
"name": "Alarm arm home instant",
|
||||
"description": "Arms the ElkM1 in home instant mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
|
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/elmax",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["elmax_api"],
|
||||
"requirements": ["elmax-api==0.0.5"],
|
||||
"requirements": ["elmax-api==0.0.6.1"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_elmax-ssl._tcp.local."
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["sense_energy"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["sense-energy==0.13.2"]
|
||||
"requirements": ["sense-energy==0.13.4"]
|
||||
}
|
||||
|
@@ -179,6 +179,9 @@ class FFmpegConvertResponse(web.StreamResponse):
|
||||
# Remove metadata and cover art
|
||||
command_args.extend(["-map_metadata", "-1", "-vn"])
|
||||
|
||||
# disable progress stats on stderr
|
||||
command_args.append("-nostats")
|
||||
|
||||
# Output to stdout
|
||||
command_args.append("pipe:")
|
||||
|
||||
|
@@ -118,7 +118,7 @@
|
||||
},
|
||||
"service_calls_not_allowed": {
|
||||
"title": "{name} is not permitted to perform Home Assistant actions",
|
||||
"description": "The ESPHome device attempted to perform a Home Assistant action, but this functionality is not enabled.\n\nIf you trust this device and want to allow it to perfom Home Assistant action, you can enable this functionality in the options flow."
|
||||
"description": "The ESPHome device attempted to perform a Home Assistant action, but this functionality is not enabled.\n\nIf you trust this device and want to allow it to perform Home Assistant action, you can enable this functionality in the options flow."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import html
|
||||
import logging
|
||||
from typing import Any
|
||||
import urllib.error
|
||||
@@ -106,7 +107,7 @@ class FeedReaderConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.abort_on_import_error(user_input[CONF_URL], "url_error")
|
||||
return self.show_user_form(user_input, {"base": "url_error"})
|
||||
|
||||
feed_title = feed["feed"]["title"]
|
||||
feed_title = html.unescape(feed["feed"]["title"])
|
||||
|
||||
return self.async_create_entry(
|
||||
title=feed_title,
|
||||
|
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from calendar import timegm
|
||||
from datetime import datetime
|
||||
import html
|
||||
from logging import getLogger
|
||||
from time import gmtime, struct_time
|
||||
from typing import TYPE_CHECKING
|
||||
@@ -102,7 +103,8 @@ class FeedReaderCoordinator(
|
||||
"""Set up the feed manager."""
|
||||
feed = await self._async_fetch_feed()
|
||||
self.logger.debug("Feed data fetched from %s : %s", self.url, feed["feed"])
|
||||
self.feed_author = feed["feed"].get("author")
|
||||
if feed_author := feed["feed"].get("author"):
|
||||
self.feed_author = html.unescape(feed_author)
|
||||
self.feed_version = feedparser.api.SUPPORTED_VERSIONS.get(feed["version"])
|
||||
self._feed = feed
|
||||
|
||||
|
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import html
|
||||
import logging
|
||||
|
||||
from feedparser import FeedParserDict
|
||||
@@ -76,15 +77,22 @@ class FeedReaderEvent(CoordinatorEntity[FeedReaderCoordinator], EventEntity):
|
||||
# so we always take the first entry in list, since we only care about the latest entry
|
||||
feed_data: FeedParserDict = data[0]
|
||||
|
||||
if description := feed_data.get("description"):
|
||||
description = html.unescape(description)
|
||||
|
||||
if title := feed_data.get("title"):
|
||||
title = html.unescape(title)
|
||||
|
||||
if content := feed_data.get("content"):
|
||||
if isinstance(content, list) and isinstance(content[0], dict):
|
||||
content = content[0].get("value")
|
||||
content = html.unescape(content)
|
||||
|
||||
self._trigger_event(
|
||||
EVENT_FEEDREADER,
|
||||
{
|
||||
ATTR_DESCRIPTION: feed_data.get("description"),
|
||||
ATTR_TITLE: feed_data.get("title"),
|
||||
ATTR_DESCRIPTION: description,
|
||||
ATTR_TITLE: title,
|
||||
ATTR_LINK: feed_data.get("link"),
|
||||
ATTR_CONTENT: content,
|
||||
},
|
||||
|
@@ -4,5 +4,5 @@
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ffmpeg",
|
||||
"integration_type": "system",
|
||||
"requirements": ["ha-ffmpeg==3.2.1"]
|
||||
"requirements": ["ha-ffmpeg==3.2.2"]
|
||||
}
|
||||
|
@@ -69,37 +69,29 @@ class FibaroCover(FibaroEntity, CoverEntity):
|
||||
# so if it is missing we have a device which supports open / close only
|
||||
return not self.fibaro_device.value.has_value
|
||||
|
||||
@property
|
||||
def current_cover_position(self) -> int | None:
|
||||
"""Return current position of cover. 0 is closed, 100 is open."""
|
||||
return self.bound(self.level)
|
||||
def update(self) -> None:
|
||||
"""Update the state."""
|
||||
super().update()
|
||||
|
||||
@property
|
||||
def current_cover_tilt_position(self) -> int | None:
|
||||
"""Return the current tilt position for venetian blinds."""
|
||||
return self.bound(self.level2)
|
||||
self._attr_current_cover_position = self.bound(self.level)
|
||||
self._attr_current_cover_tilt_position = self.bound(self.level2)
|
||||
|
||||
@property
|
||||
def is_opening(self) -> bool | None:
|
||||
"""Return if the cover is opening or not.
|
||||
device_state = self.fibaro_device.state
|
||||
|
||||
Be aware that this property is only available for some modern devices.
|
||||
For example the Fibaro Roller Shutter 4 reports this correctly.
|
||||
"""
|
||||
if self.fibaro_device.state.has_value:
|
||||
return self.fibaro_device.state.str_value().lower() == "opening"
|
||||
return None
|
||||
# Be aware that opening and closing is only available for some modern
|
||||
# devices.
|
||||
# For example the Fibaro Roller Shutter 4 reports this correctly.
|
||||
if device_state.has_value:
|
||||
self._attr_is_opening = device_state.str_value().lower() == "opening"
|
||||
self._attr_is_closing = device_state.str_value().lower() == "closing"
|
||||
|
||||
@property
|
||||
def is_closing(self) -> bool | None:
|
||||
"""Return if the cover is closing or not.
|
||||
|
||||
Be aware that this property is only available for some modern devices.
|
||||
For example the Fibaro Roller Shutter 4 reports this correctly.
|
||||
"""
|
||||
if self.fibaro_device.state.has_value:
|
||||
return self.fibaro_device.state.str_value().lower() == "closing"
|
||||
return None
|
||||
closed: bool | None = None
|
||||
if self._is_open_close_only():
|
||||
if device_state.has_value and device_state.str_value().lower() != "unknown":
|
||||
closed = device_state.str_value().lower() == "closed"
|
||||
elif self.current_cover_position is not None:
|
||||
closed = self.current_cover_position == 0
|
||||
self._attr_is_closed = closed
|
||||
|
||||
def set_cover_position(self, **kwargs: Any) -> None:
|
||||
"""Move the cover to a specific position."""
|
||||
@@ -109,19 +101,6 @@ class FibaroCover(FibaroEntity, CoverEntity):
|
||||
"""Move the cover to a specific position."""
|
||||
self.set_level2(cast(int, kwargs.get(ATTR_TILT_POSITION)))
|
||||
|
||||
@property
|
||||
def is_closed(self) -> bool | None:
|
||||
"""Return if the cover is closed."""
|
||||
if self._is_open_close_only():
|
||||
state = self.fibaro_device.state
|
||||
if not state.has_value or state.str_value().lower() == "unknown":
|
||||
return None
|
||||
return state.str_value().lower() == "closed"
|
||||
|
||||
if self.current_cover_position is None:
|
||||
return None
|
||||
return self.current_cover_position == 0
|
||||
|
||||
def open_cover(self, **kwargs: Any) -> None:
|
||||
"""Open the cover."""
|
||||
self.action("open")
|
||||
|
@@ -18,7 +18,7 @@
|
||||
},
|
||||
"data_description": {
|
||||
"file_path": "The local file path to retrieve the sensor value from",
|
||||
"value_template": "A template to render the the sensors value based on the file content",
|
||||
"value_template": "A template to render the sensors value based on the file content",
|
||||
"unit_of_measurement": "Unit of measurement for the sensor"
|
||||
}
|
||||
},
|
||||
|
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20241106.0"]
|
||||
"requirements": ["home-assistant-frontend==20241106.2"]
|
||||
}
|
||||
|
@@ -282,7 +282,7 @@ async def async_test_stream(
|
||||
return {CONF_STREAM_SOURCE: "timeout"}
|
||||
await stream.stop()
|
||||
except StreamWorkerError as err:
|
||||
return {CONF_STREAM_SOURCE: str(err)}
|
||||
return {CONF_STREAM_SOURCE: "unknown_with_details", "error_details": str(err)}
|
||||
except PermissionError:
|
||||
return {CONF_STREAM_SOURCE: "stream_not_permitted"}
|
||||
except OSError as err:
|
||||
@@ -339,6 +339,7 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the start of the config flow."""
|
||||
errors = {}
|
||||
description_placeholders = {}
|
||||
hass = self.hass
|
||||
if user_input:
|
||||
# Secondary validation because serialised vol can't seem to handle this complexity:
|
||||
@@ -372,6 +373,8 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
# temporary preview for user to check the image
|
||||
self.preview_cam = user_input
|
||||
return await self.async_step_user_confirm_still()
|
||||
if "error_details" in errors:
|
||||
description_placeholders["error"] = errors.pop("error_details")
|
||||
elif self.user_input:
|
||||
user_input = self.user_input
|
||||
else:
|
||||
@@ -379,6 +382,7 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=build_schema(user_input),
|
||||
description_placeholders=description_placeholders,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
@@ -3,6 +3,7 @@
|
||||
"config": {
|
||||
"error": {
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]",
|
||||
"unknown_with_details": "An unknown error occurred: {error}",
|
||||
"already_exists": "A camera with these URL settings already exists.",
|
||||
"unable_still_load": "Unable to load valid image from still image URL (e.g. invalid host, URL or authentication failure). Review log for more info.",
|
||||
"unable_still_load_auth": "Unable to load valid image from still image URL: The camera may require a user name and password, or they are not correct.",
|
||||
|
@@ -3,7 +3,7 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Add generic thermostat helper",
|
||||
"title": "Add generic thermostat",
|
||||
"description": "Create a climate entity that controls the temperature via a switch and sensor.",
|
||||
"data": {
|
||||
"ac_mode": "Cooling mode",
|
||||
@@ -17,8 +17,8 @@
|
||||
"data_description": {
|
||||
"ac_mode": "Set the actuator specified to be treated as a cooling device instead of a heating device.",
|
||||
"heater": "Switch entity used to cool or heat depending on A/C mode.",
|
||||
"target_sensor": "Temperature sensor that reflect the current temperature.",
|
||||
"min_cycle_duration": "Set a minimum amount of time that the switch specified must be in its current state prior to being switched either off or on. This option will be ignored if the keep alive option is set.",
|
||||
"target_sensor": "Temperature sensor that reflects the current temperature.",
|
||||
"min_cycle_duration": "Set a minimum amount of time that the switch specified must be in its current state prior to being switched either off or on.",
|
||||
"cold_tolerance": "Minimum amount of difference between the temperature read by the temperature sensor the target temperature that must change prior to being switched on. For example, if the target temperature is 25 and the tolerance is 0.5 the heater will start when the sensor equals or goes below 24.5.",
|
||||
"hot_tolerance": "Minimum amount of difference between the temperature read by the temperature sensor the target temperature that must change prior to being switched off. For example, if the target temperature is 25 and the tolerance is 0.5 the heater will stop when the sensor equals or goes above 25.5."
|
||||
}
|
||||
|
@@ -4,6 +4,7 @@ import logging
|
||||
import shutil
|
||||
|
||||
from aiohttp.client_exceptions import ClientConnectionError, ServerConnectionError
|
||||
from awesomeversion import AwesomeVersion
|
||||
from go2rtc_client import Go2RtcRestClient
|
||||
from go2rtc_client.exceptions import Go2RtcClientError, Go2RtcVersionError
|
||||
from go2rtc_client.ws import (
|
||||
@@ -32,13 +33,23 @@ from homeassistant.config_entries import SOURCE_SYSTEM, ConfigEntry
|
||||
from homeassistant.const import CONF_URL, EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv, discovery_flow
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
discovery_flow,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.package import is_docker_env
|
||||
|
||||
from .const import CONF_DEBUG_UI, DEBUG_UI_URL_MESSAGE, DOMAIN, HA_MANAGED_URL
|
||||
from .const import (
|
||||
CONF_DEBUG_UI,
|
||||
DEBUG_UI_URL_MESSAGE,
|
||||
DOMAIN,
|
||||
HA_MANAGED_URL,
|
||||
RECOMMENDED_VERSION,
|
||||
)
|
||||
from .server import Server
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -147,7 +158,21 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
# Validate the server URL
|
||||
try:
|
||||
client = Go2RtcRestClient(async_get_clientsession(hass), url)
|
||||
await client.validate_server_version()
|
||||
version = await client.validate_server_version()
|
||||
if version < AwesomeVersion(RECOMMENDED_VERSION):
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"recommended_version",
|
||||
is_fixable=False,
|
||||
is_persistent=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="recommended_version",
|
||||
translation_placeholders={
|
||||
"recommended_version": RECOMMENDED_VERSION,
|
||||
"current_version": str(version),
|
||||
},
|
||||
)
|
||||
except Go2RtcClientError as err:
|
||||
if isinstance(err.__cause__, _RETRYABLE_ERRORS):
|
||||
raise ConfigEntryNotReady(
|
||||
@@ -224,7 +249,13 @@ class WebRTCProvider(CameraWebRTCProvider):
|
||||
):
|
||||
await self._rest_client.streams.add(
|
||||
camera.entity_id,
|
||||
[stream_source, f"ffmpeg:{camera.entity_id}#audio=opus"],
|
||||
[
|
||||
stream_source,
|
||||
# We are setting any ffmpeg rtsp related logs to debug
|
||||
# Connection problems to the camera will be logged by the first stream
|
||||
# Therefore setting it to debug will not hide any important logs
|
||||
f"ffmpeg:{camera.entity_id}#audio=opus#query=log_level=debug",
|
||||
],
|
||||
)
|
||||
|
||||
@callback
|
||||
|
@@ -6,3 +6,4 @@ CONF_DEBUG_UI = "debug_ui"
|
||||
DEBUG_UI_URL_MESSAGE = "Url and debug_ui cannot be set at the same time."
|
||||
HA_MANAGED_API_PORT = 11984
|
||||
HA_MANAGED_URL = f"http://localhost:{HA_MANAGED_API_PORT}/"
|
||||
RECOMMENDED_VERSION = "1.9.7"
|
||||
|
@@ -7,6 +7,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/go2rtc",
|
||||
"integration_type": "system",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["go2rtc-client==0.1.0"],
|
||||
"requirements": ["go2rtc-client==0.1.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -24,14 +24,15 @@ _RESPAWN_COOLDOWN = 1
|
||||
|
||||
# Default configuration for HA
|
||||
# - Api is listening only on localhost
|
||||
# - Disable rtsp listener
|
||||
# - Enable rtsp for localhost only as ffmpeg needs it
|
||||
# - Clear default ice servers
|
||||
_GO2RTC_CONFIG_FORMAT = r"""
|
||||
_GO2RTC_CONFIG_FORMAT = r"""# This file is managed by Home Assistant
|
||||
# Do not edit it manually
|
||||
|
||||
api:
|
||||
listen: "{api_ip}:{api_port}"
|
||||
|
||||
rtsp:
|
||||
# ffmpeg needs rtsp for opus audio transcoding
|
||||
listen: "127.0.0.1:18554"
|
||||
|
||||
webrtc:
|
||||
|
8
homeassistant/components/go2rtc/strings.json
Normal file
8
homeassistant/components/go2rtc/strings.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"issues": {
|
||||
"recommended_version": {
|
||||
"title": "Outdated go2rtc server detected",
|
||||
"description": "We detected that you are using an outdated go2rtc server version. For the best experience, we recommend updating the go2rtc server to version `{recommended_version}`.\nCurrently you are using version `{current_version}`."
|
||||
}
|
||||
}
|
||||
}
|
@@ -87,8 +87,8 @@
|
||||
}
|
||||
},
|
||||
"create_event": {
|
||||
"name": "Creates event",
|
||||
"description": "Add a new calendar event.",
|
||||
"name": "Create event",
|
||||
"description": "Adds a new calendar event.",
|
||||
"fields": {
|
||||
"summary": {
|
||||
"name": "Summary",
|
||||
|
@@ -274,7 +274,7 @@
|
||||
"fields": {
|
||||
"addon": {
|
||||
"name": "Add-on",
|
||||
"description": "The add-on slug."
|
||||
"description": "The add-on to start."
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -284,17 +284,17 @@
|
||||
"fields": {
|
||||
"addon": {
|
||||
"name": "[%key:component::hassio::services::addon_start::fields::addon::name%]",
|
||||
"description": "[%key:component::hassio::services::addon_start::fields::addon::description%]"
|
||||
"description": "The add-on to restart."
|
||||
}
|
||||
}
|
||||
},
|
||||
"addon_stdin": {
|
||||
"name": "Write data to add-on stdin.",
|
||||
"description": "Writes data to add-on stdin.",
|
||||
"description": "Writes data to the add-on's standard input.",
|
||||
"fields": {
|
||||
"addon": {
|
||||
"name": "[%key:component::hassio::services::addon_start::fields::addon::name%]",
|
||||
"description": "[%key:component::hassio::services::addon_start::fields::addon::description%]"
|
||||
"description": "The add-on to write to."
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -304,7 +304,7 @@
|
||||
"fields": {
|
||||
"addon": {
|
||||
"name": "[%key:component::hassio::services::addon_start::fields::addon::name%]",
|
||||
"description": "[%key:component::hassio::services::addon_start::fields::addon::description%]"
|
||||
"description": "The add-on to stop."
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -314,7 +314,7 @@
|
||||
"fields": {
|
||||
"addon": {
|
||||
"name": "[%key:component::hassio::services::addon_start::fields::addon::name%]",
|
||||
"description": "[%key:component::hassio::services::addon_start::fields::addon::description%]"
|
||||
"description": "The add-on to update."
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.60", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.61", "babel==2.15.0"]
|
||||
}
|
||||
|
@@ -18,6 +18,8 @@ from homeassistant.const import (
|
||||
SERVICE_ALARM_ARM_HOME,
|
||||
SERVICE_ALARM_ARM_NIGHT,
|
||||
SERVICE_ALARM_DISARM,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import State, callback
|
||||
|
||||
@@ -152,12 +154,12 @@ class SecuritySystem(HomeAccessory):
|
||||
@callback
|
||||
def async_update_state(self, new_state: State) -> None:
|
||||
"""Update security state after state changed."""
|
||||
hass_state = None
|
||||
if new_state and new_state.state == "None":
|
||||
# Bail out early for no state
|
||||
hass_state: str | AlarmControlPanelState = new_state.state
|
||||
if hass_state in {"None", STATE_UNKNOWN, STATE_UNAVAILABLE}:
|
||||
# Bail out early for no state, unknown or unavailable
|
||||
return
|
||||
if new_state and new_state.state is not None:
|
||||
hass_state = AlarmControlPanelState(new_state.state)
|
||||
if hass_state is not None:
|
||||
hass_state = AlarmControlPanelState(hass_state)
|
||||
if (
|
||||
hass_state
|
||||
and (current_state := HASS_TO_HOMEKIT_CURRENT.get(hass_state)) is not None
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["homematicip"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["homematicip==1.1.2"]
|
||||
"requirements": ["homematicip==1.1.3"]
|
||||
}
|
||||
|
@@ -130,10 +130,15 @@ class HueSceneEntity(HueSceneEntityBase):
|
||||
@property
|
||||
def is_dynamic(self) -> bool:
|
||||
"""Return if this scene has a dynamic color palette."""
|
||||
if self.resource.palette.color and len(self.resource.palette.color) > 1:
|
||||
if (
|
||||
self.resource.palette
|
||||
and self.resource.palette.color
|
||||
and len(self.resource.palette.color) > 1
|
||||
):
|
||||
return True
|
||||
if (
|
||||
self.resource.palette.color_temperature
|
||||
self.resource.palette
|
||||
and self.resource.palette.color_temperature
|
||||
and len(self.resource.palette.color_temperature) > 1
|
||||
):
|
||||
return True
|
||||
|
@@ -95,7 +95,7 @@ class PowerViewNumber(ShadeEntity, RestoreNumber):
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{self._attr_unique_id}_{description.key}"
|
||||
|
||||
def set_native_value(self, value: float) -> None:
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Update the current value."""
|
||||
self._attr_native_value = value
|
||||
self.entity_description.store_value_fn(self.coordinator, self._shade.id, value)
|
||||
|
@@ -8,6 +8,7 @@ from aioautomower.exceptions import (
|
||||
ApiException,
|
||||
AuthException,
|
||||
HusqvarnaWSServerHandshakeError,
|
||||
TimeoutException,
|
||||
)
|
||||
from aioautomower.model import MowerAttributes
|
||||
from aioautomower.session import AutomowerSession
|
||||
@@ -22,6 +23,7 @@ from .const import DOMAIN
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
MAX_WS_RECONNECT_TIME = 600
|
||||
SCAN_INTERVAL = timedelta(minutes=8)
|
||||
DEFAULT_RECONNECT_TIME = 2 # Define a default reconnect time
|
||||
|
||||
|
||||
class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttributes]]):
|
||||
@@ -40,8 +42,8 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttrib
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
self.api = api
|
||||
|
||||
self.ws_connected: bool = False
|
||||
self.reconnect_time = DEFAULT_RECONNECT_TIME
|
||||
|
||||
async def _async_update_data(self) -> dict[str, MowerAttributes]:
|
||||
"""Subscribe for websocket and poll data from the API."""
|
||||
@@ -66,24 +68,28 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttrib
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
automower_client: AutomowerSession,
|
||||
reconnect_time: int = 2,
|
||||
) -> None:
|
||||
"""Listen with the client."""
|
||||
try:
|
||||
await automower_client.auth.websocket_connect()
|
||||
reconnect_time = 2
|
||||
# Reset reconnect time after successful connection
|
||||
self.reconnect_time = DEFAULT_RECONNECT_TIME
|
||||
await automower_client.start_listening()
|
||||
except HusqvarnaWSServerHandshakeError as err:
|
||||
_LOGGER.debug(
|
||||
"Failed to connect to websocket. Trying to reconnect: %s", err
|
||||
"Failed to connect to websocket. Trying to reconnect: %s",
|
||||
err,
|
||||
)
|
||||
except TimeoutException as err:
|
||||
_LOGGER.debug(
|
||||
"Failed to listen to websocket. Trying to reconnect: %s",
|
||||
err,
|
||||
)
|
||||
|
||||
if not hass.is_stopping:
|
||||
await asyncio.sleep(reconnect_time)
|
||||
reconnect_time = min(reconnect_time * 2, MAX_WS_RECONNECT_TIME)
|
||||
await self.client_listen(
|
||||
hass=hass,
|
||||
entry=entry,
|
||||
automower_client=automower_client,
|
||||
reconnect_time=reconnect_time,
|
||||
await asyncio.sleep(self.reconnect_time)
|
||||
self.reconnect_time = min(self.reconnect_time * 2, MAX_WS_RECONNECT_TIME)
|
||||
entry.async_create_background_task(
|
||||
hass,
|
||||
self.client_listen(hass, entry, automower_client),
|
||||
"reconnect_task",
|
||||
)
|
||||
|
@@ -32,6 +32,10 @@
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]"
|
||||
},
|
||||
"step": {
|
||||
"init": {
|
||||
"title": "Options",
|
||||
|
@@ -7,8 +7,12 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
|
||||
from .const import DOMAIN, SCAN_INTERVAL
|
||||
from .coordinator import HydrawiseDataUpdateCoordinator
|
||||
from .const import DOMAIN
|
||||
from .coordinator import (
|
||||
HydrawiseMainDataUpdateCoordinator,
|
||||
HydrawiseUpdateCoordinators,
|
||||
HydrawiseWaterUseDataUpdateCoordinator,
|
||||
)
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.BINARY_SENSOR,
|
||||
@@ -29,9 +33,18 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
auth.Auth(config_entry.data[CONF_USERNAME], config_entry.data[CONF_PASSWORD])
|
||||
)
|
||||
|
||||
coordinator = HydrawiseDataUpdateCoordinator(hass, hydrawise, SCAN_INTERVAL)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
hass.data.setdefault(DOMAIN, {})[config_entry.entry_id] = coordinator
|
||||
main_coordinator = HydrawiseMainDataUpdateCoordinator(hass, hydrawise)
|
||||
await main_coordinator.async_config_entry_first_refresh()
|
||||
water_use_coordinator = HydrawiseWaterUseDataUpdateCoordinator(
|
||||
hass, hydrawise, main_coordinator
|
||||
)
|
||||
await water_use_coordinator.async_config_entry_first_refresh()
|
||||
hass.data.setdefault(DOMAIN, {})[config_entry.entry_id] = (
|
||||
HydrawiseUpdateCoordinators(
|
||||
main=main_coordinator,
|
||||
water_use=water_use_coordinator,
|
||||
)
|
||||
)
|
||||
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
@@ -21,7 +21,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import VolDictType
|
||||
|
||||
from .const import DOMAIN, SERVICE_RESUME, SERVICE_START_WATERING, SERVICE_SUSPEND
|
||||
from .coordinator import HydrawiseDataUpdateCoordinator
|
||||
from .coordinator import HydrawiseUpdateCoordinators
|
||||
from .entity import HydrawiseEntity
|
||||
|
||||
|
||||
@@ -81,18 +81,16 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Hydrawise binary_sensor platform."""
|
||||
coordinator: HydrawiseDataUpdateCoordinator = hass.data[DOMAIN][
|
||||
config_entry.entry_id
|
||||
]
|
||||
coordinators: HydrawiseUpdateCoordinators = hass.data[DOMAIN][config_entry.entry_id]
|
||||
entities: list[HydrawiseBinarySensor] = []
|
||||
for controller in coordinator.data.controllers.values():
|
||||
for controller in coordinators.main.data.controllers.values():
|
||||
entities.extend(
|
||||
HydrawiseBinarySensor(coordinator, description, controller)
|
||||
HydrawiseBinarySensor(coordinators.main, description, controller)
|
||||
for description in CONTROLLER_BINARY_SENSORS
|
||||
)
|
||||
entities.extend(
|
||||
HydrawiseBinarySensor(
|
||||
coordinator,
|
||||
coordinators.main,
|
||||
description,
|
||||
controller,
|
||||
sensor_id=sensor.id,
|
||||
@@ -103,7 +101,7 @@ async def async_setup_entry(
|
||||
)
|
||||
entities.extend(
|
||||
HydrawiseZoneBinarySensor(
|
||||
coordinator, description, controller, zone_id=zone.id
|
||||
coordinators.main, description, controller, zone_id=zone.id
|
||||
)
|
||||
for zone in controller.zones
|
||||
for description in ZONE_BINARY_SENSORS
|
||||
|
@@ -10,7 +10,8 @@ DEFAULT_WATERING_TIME = timedelta(minutes=15)
|
||||
|
||||
MANUFACTURER = "Hydrawise"
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=60)
|
||||
MAIN_SCAN_INTERVAL = timedelta(seconds=60)
|
||||
WATER_USE_SCAN_INTERVAL = timedelta(minutes=60)
|
||||
|
||||
SIGNAL_UPDATE_HYDRAWISE = "hydrawise_update"
|
||||
|
||||
|
@@ -2,8 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from pydrawise import Hydrawise
|
||||
from pydrawise.schema import Controller, ControllerWaterUseSummary, Sensor, User, Zone
|
||||
@@ -12,7 +11,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
from homeassistant.util.dt import now
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .const import DOMAIN, LOGGER, MAIN_SCAN_INTERVAL, WATER_USE_SCAN_INTERVAL
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -20,22 +19,39 @@ class HydrawiseData:
|
||||
"""Container for data fetched from the Hydrawise API."""
|
||||
|
||||
user: User
|
||||
controllers: dict[int, Controller]
|
||||
zones: dict[int, Zone]
|
||||
sensors: dict[int, Sensor]
|
||||
daily_water_summary: dict[int, ControllerWaterUseSummary]
|
||||
controllers: dict[int, Controller] = field(default_factory=dict)
|
||||
zones: dict[int, Zone] = field(default_factory=dict)
|
||||
sensors: dict[int, Sensor] = field(default_factory=dict)
|
||||
daily_water_summary: dict[int, ControllerWaterUseSummary] = field(
|
||||
default_factory=dict
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class HydrawiseUpdateCoordinators:
|
||||
"""Container for all Hydrawise DataUpdateCoordinator instances."""
|
||||
|
||||
main: HydrawiseMainDataUpdateCoordinator
|
||||
water_use: HydrawiseWaterUseDataUpdateCoordinator
|
||||
|
||||
|
||||
class HydrawiseDataUpdateCoordinator(DataUpdateCoordinator[HydrawiseData]):
|
||||
"""The Hydrawise Data Update Coordinator."""
|
||||
"""Base class for Hydrawise Data Update Coordinators."""
|
||||
|
||||
api: Hydrawise
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, api: Hydrawise, scan_interval: timedelta
|
||||
) -> None:
|
||||
|
||||
class HydrawiseMainDataUpdateCoordinator(HydrawiseDataUpdateCoordinator):
|
||||
"""The main Hydrawise Data Update Coordinator.
|
||||
|
||||
This fetches the primary state data for Hydrawise controllers and zones
|
||||
at a relatively frequent interval so that the primary functions of the
|
||||
integration are updated in a timely manner.
|
||||
"""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, api: Hydrawise) -> None:
|
||||
"""Initialize HydrawiseDataUpdateCoordinator."""
|
||||
super().__init__(hass, LOGGER, name=DOMAIN, update_interval=scan_interval)
|
||||
super().__init__(hass, LOGGER, name=DOMAIN, update_interval=MAIN_SCAN_INTERVAL)
|
||||
self.api = api
|
||||
|
||||
async def _async_update_data(self) -> HydrawiseData:
|
||||
@@ -43,28 +59,56 @@ class HydrawiseDataUpdateCoordinator(DataUpdateCoordinator[HydrawiseData]):
|
||||
# Don't fetch zones. We'll fetch them for each controller later.
|
||||
# This is to prevent 502 errors in some cases.
|
||||
# See: https://github.com/home-assistant/core/issues/120128
|
||||
user = await self.api.get_user(fetch_zones=False)
|
||||
controllers = {}
|
||||
zones = {}
|
||||
sensors = {}
|
||||
daily_water_summary: dict[int, ControllerWaterUseSummary] = {}
|
||||
for controller in user.controllers:
|
||||
controllers[controller.id] = controller
|
||||
data = HydrawiseData(user=await self.api.get_user(fetch_zones=False))
|
||||
for controller in data.user.controllers:
|
||||
data.controllers[controller.id] = controller
|
||||
controller.zones = await self.api.get_zones(controller)
|
||||
for zone in controller.zones:
|
||||
zones[zone.id] = zone
|
||||
data.zones[zone.id] = zone
|
||||
for sensor in controller.sensors:
|
||||
sensors[sensor.id] = sensor
|
||||
data.sensors[sensor.id] = sensor
|
||||
return data
|
||||
|
||||
|
||||
class HydrawiseWaterUseDataUpdateCoordinator(HydrawiseDataUpdateCoordinator):
|
||||
"""Data Update Coordinator for Hydrawise Water Use.
|
||||
|
||||
This fetches data that is more expensive for the Hydrawise API to compute
|
||||
at a less frequent interval as to not overload the Hydrawise servers.
|
||||
"""
|
||||
|
||||
_main_coordinator: HydrawiseMainDataUpdateCoordinator
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
api: Hydrawise,
|
||||
main_coordinator: HydrawiseMainDataUpdateCoordinator,
|
||||
) -> None:
|
||||
"""Initialize HydrawiseWaterUseDataUpdateCoordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
LOGGER,
|
||||
name=f"{DOMAIN} water use",
|
||||
update_interval=WATER_USE_SCAN_INTERVAL,
|
||||
)
|
||||
self.api = api
|
||||
self._main_coordinator = main_coordinator
|
||||
|
||||
async def _async_update_data(self) -> HydrawiseData:
|
||||
"""Fetch the latest data from Hydrawise."""
|
||||
daily_water_summary: dict[int, ControllerWaterUseSummary] = {}
|
||||
for controller in self._main_coordinator.data.controllers.values():
|
||||
daily_water_summary[controller.id] = await self.api.get_water_use_summary(
|
||||
controller,
|
||||
now().replace(hour=0, minute=0, second=0, microsecond=0),
|
||||
now(),
|
||||
)
|
||||
|
||||
main_data = self._main_coordinator.data
|
||||
return HydrawiseData(
|
||||
user=user,
|
||||
controllers=controllers,
|
||||
zones=zones,
|
||||
sensors=sensors,
|
||||
user=main_data.user,
|
||||
controllers=main_data.controllers,
|
||||
zones=main_data.zones,
|
||||
sensors=main_data.sensors,
|
||||
daily_water_summary=daily_water_summary,
|
||||
)
|
||||
|
@@ -19,7 +19,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import HydrawiseDataUpdateCoordinator
|
||||
from .coordinator import HydrawiseUpdateCoordinators
|
||||
from .entity import HydrawiseEntity
|
||||
|
||||
|
||||
@@ -92,7 +92,7 @@ def _get_controller_daily_total_water_use(sensor: HydrawiseSensor) -> float | No
|
||||
return daily_water_summary.total_use
|
||||
|
||||
|
||||
CONTROLLER_SENSORS: tuple[HydrawiseSensorEntityDescription, ...] = (
|
||||
WATER_USE_CONTROLLER_SENSORS: tuple[HydrawiseSensorEntityDescription, ...] = (
|
||||
HydrawiseSensorEntityDescription(
|
||||
key="daily_active_water_time",
|
||||
translation_key="daily_active_water_time",
|
||||
@@ -103,6 +103,16 @@ CONTROLLER_SENSORS: tuple[HydrawiseSensorEntityDescription, ...] = (
|
||||
)
|
||||
|
||||
|
||||
WATER_USE_ZONE_SENSORS: tuple[HydrawiseSensorEntityDescription, ...] = (
|
||||
HydrawiseSensorEntityDescription(
|
||||
key="daily_active_water_time",
|
||||
translation_key="daily_active_water_time",
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
value_fn=_get_zone_daily_active_water_time,
|
||||
),
|
||||
)
|
||||
|
||||
FLOW_CONTROLLER_SENSORS: tuple[HydrawiseSensorEntityDescription, ...] = (
|
||||
HydrawiseSensorEntityDescription(
|
||||
key="daily_total_water_use",
|
||||
@@ -150,13 +160,6 @@ ZONE_SENSORS: tuple[HydrawiseSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
value_fn=_get_zone_watering_time,
|
||||
),
|
||||
HydrawiseSensorEntityDescription(
|
||||
key="daily_active_water_time",
|
||||
translation_key="daily_active_water_time",
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
value_fn=_get_zone_daily_active_water_time,
|
||||
),
|
||||
)
|
||||
|
||||
FLOW_MEASUREMENT_KEYS = [x.key for x in FLOW_CONTROLLER_SENSORS]
|
||||
@@ -168,29 +171,37 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Hydrawise sensor platform."""
|
||||
coordinator: HydrawiseDataUpdateCoordinator = hass.data[DOMAIN][
|
||||
config_entry.entry_id
|
||||
]
|
||||
coordinators: HydrawiseUpdateCoordinators = hass.data[DOMAIN][config_entry.entry_id]
|
||||
entities: list[HydrawiseSensor] = []
|
||||
for controller in coordinator.data.controllers.values():
|
||||
for controller in coordinators.main.data.controllers.values():
|
||||
entities.extend(
|
||||
HydrawiseSensor(coordinator, description, controller)
|
||||
for description in CONTROLLER_SENSORS
|
||||
HydrawiseSensor(coordinators.water_use, description, controller)
|
||||
for description in WATER_USE_CONTROLLER_SENSORS
|
||||
)
|
||||
entities.extend(
|
||||
HydrawiseSensor(coordinator, description, controller, zone_id=zone.id)
|
||||
HydrawiseSensor(
|
||||
coordinators.water_use, description, controller, zone_id=zone.id
|
||||
)
|
||||
for zone in controller.zones
|
||||
for description in WATER_USE_ZONE_SENSORS
|
||||
)
|
||||
entities.extend(
|
||||
HydrawiseSensor(coordinators.main, description, controller, zone_id=zone.id)
|
||||
for zone in controller.zones
|
||||
for description in ZONE_SENSORS
|
||||
)
|
||||
if coordinator.data.daily_water_summary[controller.id].total_use is not None:
|
||||
if (
|
||||
coordinators.water_use.data.daily_water_summary[controller.id].total_use
|
||||
is not None
|
||||
):
|
||||
# we have a flow sensor for this controller
|
||||
entities.extend(
|
||||
HydrawiseSensor(coordinator, description, controller)
|
||||
HydrawiseSensor(coordinators.water_use, description, controller)
|
||||
for description in FLOW_CONTROLLER_SENSORS
|
||||
)
|
||||
entities.extend(
|
||||
HydrawiseSensor(
|
||||
coordinator,
|
||||
coordinators.water_use,
|
||||
description,
|
||||
controller,
|
||||
zone_id=zone.id,
|
||||
|
@@ -20,7 +20,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import DEFAULT_WATERING_TIME, DOMAIN
|
||||
from .coordinator import HydrawiseDataUpdateCoordinator
|
||||
from .coordinator import HydrawiseUpdateCoordinators
|
||||
from .entity import HydrawiseEntity
|
||||
|
||||
|
||||
@@ -66,12 +66,10 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Hydrawise switch platform."""
|
||||
coordinator: HydrawiseDataUpdateCoordinator = hass.data[DOMAIN][
|
||||
config_entry.entry_id
|
||||
]
|
||||
coordinators: HydrawiseUpdateCoordinators = hass.data[DOMAIN][config_entry.entry_id]
|
||||
async_add_entities(
|
||||
HydrawiseSwitch(coordinator, description, controller, zone_id=zone.id)
|
||||
for controller in coordinator.data.controllers.values()
|
||||
HydrawiseSwitch(coordinators.main, description, controller, zone_id=zone.id)
|
||||
for controller in coordinators.main.data.controllers.values()
|
||||
for zone in controller.zones
|
||||
for description in SWITCH_TYPES
|
||||
)
|
||||
|
@@ -17,7 +17,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import HydrawiseDataUpdateCoordinator
|
||||
from .coordinator import HydrawiseUpdateCoordinators
|
||||
from .entity import HydrawiseEntity
|
||||
|
||||
VALVE_TYPES: tuple[ValveEntityDescription, ...] = (
|
||||
@@ -34,12 +34,10 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Hydrawise valve platform."""
|
||||
coordinator: HydrawiseDataUpdateCoordinator = hass.data[DOMAIN][
|
||||
config_entry.entry_id
|
||||
]
|
||||
coordinators: HydrawiseUpdateCoordinators = hass.data[DOMAIN][config_entry.entry_id]
|
||||
async_add_entities(
|
||||
HydrawiseValve(coordinator, description, controller, zone_id=zone.id)
|
||||
for controller in coordinator.data.controllers.values()
|
||||
HydrawiseValve(coordinators.main, description, controller, zone_id=zone.id)
|
||||
for controller in coordinators.main.data.controllers.values()
|
||||
for zone in controller.zones
|
||||
for description in VALVE_TYPES
|
||||
)
|
||||
|
@@ -104,7 +104,7 @@
|
||||
"services": {
|
||||
"fetch": {
|
||||
"name": "Fetch message",
|
||||
"description": "Fetch the email message from the server.",
|
||||
"description": "Fetch an email message from the server.",
|
||||
"fields": {
|
||||
"entry": {
|
||||
"name": "Entry",
|
||||
|
@@ -112,7 +112,7 @@
|
||||
"services": {
|
||||
"add_all_link": {
|
||||
"name": "Add all link",
|
||||
"description": "Tells the Insteom Modem (IM) start All-Linking mode. Once the IM is in All-Linking mode, press the link button on the device to complete All-Linking.",
|
||||
"description": "Tells the Insteon Modem (IM) start All-Linking mode. Once the IM is in All-Linking mode, press the link button on the device to complete All-Linking.",
|
||||
"fields": {
|
||||
"group": {
|
||||
"name": "Group",
|
||||
|
@@ -25,7 +25,8 @@
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_host": "[%key:common::config_flow::error::invalid_host%]"
|
||||
}
|
||||
},
|
||||
"device_automation": {
|
||||
|
@@ -72,8 +72,11 @@ class ThinQFanEntity(ThinQEntity, FanEntity):
|
||||
super().__init__(coordinator, entity_description, property_id)
|
||||
|
||||
self._ordered_named_fan_speeds = []
|
||||
self._attr_supported_features |= FanEntityFeature.SET_SPEED
|
||||
|
||||
self._attr_supported_features = (
|
||||
FanEntityFeature.SET_SPEED
|
||||
| FanEntityFeature.TURN_ON
|
||||
| FanEntityFeature.TURN_OFF
|
||||
)
|
||||
if (fan_modes := self.data.fan_modes) is not None:
|
||||
self._attr_speed_count = len(fan_modes)
|
||||
if self.speed_count == 4:
|
||||
@@ -98,7 +101,7 @@ class ThinQFanEntity(ThinQEntity, FanEntity):
|
||||
self._attr_percentage = 0
|
||||
|
||||
_LOGGER.debug(
|
||||
"[%s:%s] update status: %s -> %s (percntage=%s)",
|
||||
"[%s:%s] update status: %s -> %s (percentage=%s)",
|
||||
self.coordinator.device_name,
|
||||
self.property_id,
|
||||
self.data.is_on,
|
||||
@@ -120,7 +123,7 @@ class ThinQFanEntity(ThinQEntity, FanEntity):
|
||||
return
|
||||
|
||||
_LOGGER.debug(
|
||||
"[%s:%s] async_set_percentage. percntage=%s, value=%s",
|
||||
"[%s:%s] async_set_percentage. percentage=%s, value=%s",
|
||||
self.coordinator.device_name,
|
||||
self.property_id,
|
||||
percentage,
|
||||
|
@@ -7,6 +7,6 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["linkplay"],
|
||||
"requirements": ["python-linkplay==0.0.17"],
|
||||
"requirements": ["python-linkplay==0.0.20"],
|
||||
"zeroconf": ["_linkplay._tcp.local."]
|
||||
}
|
||||
|
@@ -9,7 +9,7 @@ from typing import Any, Concatenate
|
||||
from linkplay.bridge import LinkPlayBridge
|
||||
from linkplay.consts import EqualizerMode, LoopMode, PlayingMode, PlayingStatus
|
||||
from linkplay.controller import LinkPlayController, LinkPlayMultiroom
|
||||
from linkplay.exceptions import LinkPlayException, LinkPlayRequestException
|
||||
from linkplay.exceptions import LinkPlayRequestException
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import media_source
|
||||
@@ -69,6 +69,8 @@ SOURCE_MAP: dict[PlayingMode, str] = {
|
||||
PlayingMode.FM: "FM Radio",
|
||||
PlayingMode.RCA: "RCA",
|
||||
PlayingMode.UDISK: "USB",
|
||||
PlayingMode.SPOTIFY: "Spotify",
|
||||
PlayingMode.TIDAL: "Tidal",
|
||||
PlayingMode.FOLLOWER: "Follower",
|
||||
}
|
||||
|
||||
@@ -201,9 +203,8 @@ class LinkPlayMediaPlayerEntity(MediaPlayerEntity):
|
||||
try:
|
||||
await self._bridge.player.update_status()
|
||||
self._update_properties()
|
||||
except LinkPlayException:
|
||||
except LinkPlayRequestException:
|
||||
self._attr_available = False
|
||||
raise
|
||||
|
||||
@exception_wrap
|
||||
async def async_select_source(self, source: str) -> None:
|
||||
@@ -292,7 +293,15 @@ class LinkPlayMediaPlayerEntity(MediaPlayerEntity):
|
||||
@exception_wrap
|
||||
async def async_play_preset(self, preset_number: int) -> None:
|
||||
"""Play preset number."""
|
||||
await self._bridge.player.play_preset(preset_number)
|
||||
try:
|
||||
await self._bridge.player.play_preset(preset_number)
|
||||
except ValueError as err:
|
||||
raise HomeAssistantError(err) from err
|
||||
|
||||
@exception_wrap
|
||||
async def async_media_seek(self, position: float) -> None:
|
||||
"""Seek to a position."""
|
||||
await self._bridge.player.seek(round(position))
|
||||
|
||||
@exception_wrap
|
||||
async def async_join_players(self, group_members: list[str]) -> None:
|
||||
@@ -379,9 +388,9 @@ class LinkPlayMediaPlayerEntity(MediaPlayerEntity):
|
||||
)
|
||||
|
||||
self._attr_source = SOURCE_MAP.get(self._bridge.player.play_mode, "other")
|
||||
self._attr_media_position = self._bridge.player.current_position / 1000
|
||||
self._attr_media_position = self._bridge.player.current_position_in_seconds
|
||||
self._attr_media_position_updated_at = utcnow()
|
||||
self._attr_media_duration = self._bridge.player.total_length / 1000
|
||||
self._attr_media_duration = self._bridge.player.total_length_in_seconds
|
||||
self._attr_media_artist = self._bridge.player.artist
|
||||
self._attr_media_title = self._bridge.player.title
|
||||
self._attr_media_album_name = self._bridge.player.album
|
||||
|
@@ -11,5 +11,4 @@ play_preset:
|
||||
selector:
|
||||
number:
|
||||
min: 1
|
||||
max: 10
|
||||
mode: box
|
||||
|
@@ -28,12 +28,12 @@
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"set_up_new_device": "A new device was detected. Please set it up as a new entity instead of reconfiguring."
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"no_mac": "A MAC address was not found. It required to identify the device. Please ensure your device is connectable.",
|
||||
"set_up_new_device": "A new device was detected. Please set it up as a new entity instead of reconfiguring."
|
||||
"no_mac": "A MAC address was not found. It required to identify the device. Please ensure your device is connectable."
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
@@ -33,7 +33,7 @@ from homeassistant.const import (
|
||||
CONF_PROTOCOL,
|
||||
CONF_USERNAME,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.data_entry_flow import AbortFlow
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
@@ -737,6 +737,16 @@ class MQTTOptionsFlowHandler(OptionsFlow):
|
||||
)
|
||||
|
||||
|
||||
async def _get_uploaded_file(hass: HomeAssistant, id: str) -> str:
|
||||
"""Get file content from uploaded file."""
|
||||
|
||||
def _proces_uploaded_file() -> str:
|
||||
with process_uploaded_file(hass, id) as file_path:
|
||||
return file_path.read_text(encoding=DEFAULT_ENCODING)
|
||||
|
||||
return await hass.async_add_executor_job(_proces_uploaded_file)
|
||||
|
||||
|
||||
async def async_get_broker_settings(
|
||||
flow: ConfigFlow | OptionsFlow,
|
||||
fields: OrderedDict[Any, Any],
|
||||
@@ -795,8 +805,7 @@ async def async_get_broker_settings(
|
||||
return False
|
||||
certificate_id: str | None = user_input.get(CONF_CERTIFICATE)
|
||||
if certificate_id:
|
||||
with process_uploaded_file(hass, certificate_id) as certificate_file:
|
||||
certificate = certificate_file.read_text(encoding=DEFAULT_ENCODING)
|
||||
certificate = await _get_uploaded_file(hass, certificate_id)
|
||||
|
||||
# Return to form for file upload CA cert or client cert and key
|
||||
if (
|
||||
@@ -812,15 +821,9 @@ async def async_get_broker_settings(
|
||||
return False
|
||||
|
||||
if client_certificate_id:
|
||||
with process_uploaded_file(
|
||||
hass, client_certificate_id
|
||||
) as client_certificate_file:
|
||||
client_certificate = client_certificate_file.read_text(
|
||||
encoding=DEFAULT_ENCODING
|
||||
)
|
||||
client_certificate = await _get_uploaded_file(hass, client_certificate_id)
|
||||
if client_key_id:
|
||||
with process_uploaded_file(hass, client_key_id) as key_file:
|
||||
client_key = key_file.read_text(encoding=DEFAULT_ENCODING)
|
||||
client_key = await _get_uploaded_file(hass, client_key_id)
|
||||
|
||||
certificate_data: dict[str, Any] = {}
|
||||
if certificate:
|
||||
|
@@ -12,11 +12,12 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import MyUplinkConfigEntry, MyUplinkDataCoordinator
|
||||
from .const import F_SERIES
|
||||
from .entity import MyUplinkEntity, MyUplinkSystemEntity
|
||||
from .helpers import find_matching_platform
|
||||
from .helpers import find_matching_platform, transform_model_series
|
||||
|
||||
CATEGORY_BASED_DESCRIPTIONS: dict[str, dict[str, BinarySensorEntityDescription]] = {
|
||||
"F730": {
|
||||
F_SERIES: {
|
||||
"43161": BinarySensorEntityDescription(
|
||||
key="elect_add",
|
||||
translation_key="elect_add",
|
||||
@@ -50,6 +51,7 @@ def get_description(device_point: DevicePoint) -> BinarySensorEntityDescription
|
||||
2. Default to None
|
||||
"""
|
||||
prefix, _, _ = device_point.category.partition(" ")
|
||||
prefix = transform_model_series(prefix)
|
||||
return CATEGORY_BASED_DESCRIPTIONS.get(prefix, {}).get(device_point.parameter_id)
|
||||
|
||||
|
||||
|
@@ -6,3 +6,5 @@ API_ENDPOINT = "https://api.myuplink.com"
|
||||
OAUTH2_AUTHORIZE = "https://api.myuplink.com/oauth/authorize"
|
||||
OAUTH2_TOKEN = "https://api.myuplink.com/oauth/token"
|
||||
OAUTH2_SCOPES = ["WRITESYSTEM", "READSYSTEM", "offline_access"]
|
||||
|
||||
F_SERIES = "f-series"
|
||||
|
@@ -6,6 +6,8 @@ from homeassistant.components.number import NumberEntityDescription
|
||||
from homeassistant.components.sensor import SensorEntityDescription
|
||||
from homeassistant.const import Platform
|
||||
|
||||
from .const import F_SERIES
|
||||
|
||||
|
||||
def find_matching_platform(
|
||||
device_point: DevicePoint,
|
||||
@@ -86,8 +88,9 @@ PARAMETER_ID_TO_EXCLUDE_F730 = (
|
||||
"47941",
|
||||
"47975",
|
||||
"48009",
|
||||
"48042",
|
||||
"48072",
|
||||
"48442",
|
||||
"49909",
|
||||
"50113",
|
||||
)
|
||||
|
||||
@@ -110,7 +113,7 @@ def skip_entity(model: str, device_point: DevicePoint) -> bool:
|
||||
):
|
||||
return False
|
||||
return True
|
||||
if "F730" in model:
|
||||
if model.lower().startswith("f"):
|
||||
# Entity names containing weekdays are used for advanced scheduling in the
|
||||
# heat pump and should not be exposed in the integration
|
||||
if any(d in device_point.parameter_name.lower() for d in WEEKDAYS):
|
||||
@@ -118,3 +121,10 @@ def skip_entity(model: str, device_point: DevicePoint) -> bool:
|
||||
if device_point.parameter_id in PARAMETER_ID_TO_EXCLUDE_F730:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def transform_model_series(prefix: str) -> str:
|
||||
"""Remap all F-series models."""
|
||||
if prefix.lower().startswith("f"):
|
||||
return F_SERIES
|
||||
return prefix
|
||||
|
@@ -10,8 +10,9 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import MyUplinkConfigEntry, MyUplinkDataCoordinator
|
||||
from .const import F_SERIES
|
||||
from .entity import MyUplinkEntity
|
||||
from .helpers import find_matching_platform, skip_entity
|
||||
from .helpers import find_matching_platform, skip_entity, transform_model_series
|
||||
|
||||
DEVICE_POINT_UNIT_DESCRIPTIONS: dict[str, NumberEntityDescription] = {
|
||||
"DM": NumberEntityDescription(
|
||||
@@ -22,7 +23,7 @@ DEVICE_POINT_UNIT_DESCRIPTIONS: dict[str, NumberEntityDescription] = {
|
||||
}
|
||||
|
||||
CATEGORY_BASED_DESCRIPTIONS: dict[str, dict[str, NumberEntityDescription]] = {
|
||||
"F730": {
|
||||
F_SERIES: {
|
||||
"40940": NumberEntityDescription(
|
||||
key="degree_minutes",
|
||||
translation_key="degree_minutes",
|
||||
@@ -48,6 +49,7 @@ def get_description(device_point: DevicePoint) -> NumberEntityDescription | None
|
||||
3. Default to None
|
||||
"""
|
||||
prefix, _, _ = device_point.category.partition(" ")
|
||||
prefix = transform_model_series(prefix)
|
||||
description = CATEGORY_BASED_DESCRIPTIONS.get(prefix, {}).get(
|
||||
device_point.parameter_id
|
||||
)
|
||||
|
@@ -25,8 +25,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from . import MyUplinkConfigEntry, MyUplinkDataCoordinator
|
||||
from .const import F_SERIES
|
||||
from .entity import MyUplinkEntity
|
||||
from .helpers import find_matching_platform, skip_entity
|
||||
from .helpers import find_matching_platform, skip_entity, transform_model_series
|
||||
|
||||
DEVICE_POINT_UNIT_DESCRIPTIONS: dict[str, SensorEntityDescription] = {
|
||||
"°C": SensorEntityDescription(
|
||||
@@ -139,7 +140,7 @@ DEVICE_POINT_UNIT_DESCRIPTIONS: dict[str, SensorEntityDescription] = {
|
||||
MARKER_FOR_UNKNOWN_VALUE = -32768
|
||||
|
||||
CATEGORY_BASED_DESCRIPTIONS: dict[str, dict[str, SensorEntityDescription]] = {
|
||||
"F730": {
|
||||
F_SERIES: {
|
||||
"43108": SensorEntityDescription(
|
||||
key="fan_mode",
|
||||
translation_key="fan_mode",
|
||||
@@ -200,6 +201,7 @@ def get_description(device_point: DevicePoint) -> SensorEntityDescription | None
|
||||
"""
|
||||
description = None
|
||||
prefix, _, _ = device_point.category.partition(" ")
|
||||
prefix = transform_model_series(prefix)
|
||||
description = CATEGORY_BASED_DESCRIPTIONS.get(prefix, {}).get(
|
||||
device_point.parameter_id
|
||||
)
|
||||
|
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"application_credentials": {
|
||||
"description": "Follow the [instructions]({more_info_url}) to give Home Assistant access to your myUplink account. You also need to create application credentials linked to your account:\n1. Go to [Applications at myUplink developer site]({create_creds_url}) and get credentials from an existing application or select **Create New Application**.\n1. Set appropriate Application name and Description\n2. Enter `{callback_url}` as Callback Url"
|
||||
"description": "Follow the [instructions]({more_info_url}) to give Home Assistant access to your myUplink account. You also need to create application credentials linked to your account:\n1. Go to [Applications at myUplink developer site]({create_creds_url}) and get credentials from an existing application or select **Create New Application**.\n1. Set appropriate Application name and Description\n1. Enter `{callback_url}` as Callback URL"
|
||||
},
|
||||
"config": {
|
||||
"step": {
|
||||
|
@@ -12,11 +12,12 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import MyUplinkConfigEntry, MyUplinkDataCoordinator
|
||||
from .const import F_SERIES
|
||||
from .entity import MyUplinkEntity
|
||||
from .helpers import find_matching_platform, skip_entity
|
||||
from .helpers import find_matching_platform, skip_entity, transform_model_series
|
||||
|
||||
CATEGORY_BASED_DESCRIPTIONS: dict[str, dict[str, SwitchEntityDescription]] = {
|
||||
"F730": {
|
||||
F_SERIES: {
|
||||
"50004": SwitchEntityDescription(
|
||||
key="temporary_lux",
|
||||
translation_key="temporary_lux",
|
||||
@@ -47,6 +48,7 @@ def get_description(device_point: DevicePoint) -> SwitchEntityDescription | None
|
||||
2. Default to None
|
||||
"""
|
||||
prefix, _, _ = device_point.category.partition(" ")
|
||||
prefix = transform_model_series(prefix)
|
||||
return CATEGORY_BASED_DESCRIPTIONS.get(prefix, {}).get(device_point.parameter_id)
|
||||
|
||||
|
||||
|
@@ -114,9 +114,8 @@ async def new_subscriber(
|
||||
implementation, config_entry_oauth2_flow.LocalOAuth2Implementation
|
||||
):
|
||||
raise TypeError(f"Unexpected auth implementation {implementation}")
|
||||
subscription_name = entry.data.get(
|
||||
CONF_SUBSCRIPTION_NAME, entry.data[CONF_SUBSCRIBER_ID]
|
||||
)
|
||||
if (subscription_name := entry.data.get(CONF_SUBSCRIPTION_NAME)) is None:
|
||||
subscription_name = entry.data[CONF_SUBSCRIBER_ID]
|
||||
auth = AsyncConfigEntryAuth(
|
||||
aiohttp_client.async_get_clientsession(hass),
|
||||
config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation),
|
||||
|
@@ -2,9 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from abc import ABC
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Awaitable, Callable
|
||||
import datetime
|
||||
import functools
|
||||
import logging
|
||||
@@ -19,6 +19,7 @@ from google_nest_sdm.camera_traits import (
|
||||
from google_nest_sdm.device import Device
|
||||
from google_nest_sdm.device_manager import DeviceManager
|
||||
from google_nest_sdm.exceptions import ApiException
|
||||
from webrtc_models import RTCIceCandidate
|
||||
|
||||
from homeassistant.components.camera import (
|
||||
Camera,
|
||||
@@ -46,6 +47,11 @@ PLACEHOLDER = Path(__file__).parent / "placeholder.png"
|
||||
# Used to schedule an alarm to refresh the stream before expiration
|
||||
STREAM_EXPIRATION_BUFFER = datetime.timedelta(seconds=30)
|
||||
|
||||
# Refresh streams with a bounded interval and backoff on failure
|
||||
MIN_REFRESH_BACKOFF_INTERVAL = datetime.timedelta(minutes=1)
|
||||
MAX_REFRESH_BACKOFF_INTERVAL = datetime.timedelta(minutes=10)
|
||||
BACKOFF_MULTIPLIER = 1.5
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
||||
@@ -67,6 +73,68 @@ async def async_setup_entry(
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class StreamRefresh:
|
||||
"""Class that will refresh an expiring stream.
|
||||
|
||||
This class will schedule an alarm for the next expiration time of a stream.
|
||||
When the alarm fires, it runs the provided `refresh_cb` to extend the
|
||||
lifetime of the stream and return a new expiration time.
|
||||
|
||||
A simple backoff will be applied when the refresh callback fails.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
expires_at: datetime.datetime,
|
||||
refresh_cb: Callable[[], Awaitable[datetime.datetime | None]],
|
||||
) -> None:
|
||||
"""Initialize StreamRefresh."""
|
||||
self._hass = hass
|
||||
self._unsub: Callable[[], None] | None = None
|
||||
self._min_refresh_interval = MIN_REFRESH_BACKOFF_INTERVAL
|
||||
self._refresh_cb = refresh_cb
|
||||
self._schedule_stream_refresh(expires_at - STREAM_EXPIRATION_BUFFER)
|
||||
|
||||
def unsub(self) -> None:
|
||||
"""Invalidates the stream."""
|
||||
if self._unsub:
|
||||
self._unsub()
|
||||
|
||||
async def _handle_refresh(self, _: datetime.datetime) -> None:
|
||||
"""Alarm that fires to check if the stream should be refreshed."""
|
||||
self._unsub = None
|
||||
try:
|
||||
expires_at = await self._refresh_cb()
|
||||
except ApiException as err:
|
||||
_LOGGER.debug("Failed to refresh stream: %s", err)
|
||||
# Increase backoff until the max backoff interval is reached
|
||||
self._min_refresh_interval = min(
|
||||
self._min_refresh_interval * BACKOFF_MULTIPLIER,
|
||||
MAX_REFRESH_BACKOFF_INTERVAL,
|
||||
)
|
||||
refresh_time = utcnow() + self._min_refresh_interval
|
||||
else:
|
||||
if expires_at is None:
|
||||
return
|
||||
self._min_refresh_interval = MIN_REFRESH_BACKOFF_INTERVAL # Reset backoff
|
||||
# Defend against invalid stream expiration time in the past
|
||||
refresh_time = max(
|
||||
expires_at - STREAM_EXPIRATION_BUFFER,
|
||||
utcnow() + self._min_refresh_interval,
|
||||
)
|
||||
self._schedule_stream_refresh(refresh_time)
|
||||
|
||||
def _schedule_stream_refresh(self, refresh_time: datetime.datetime) -> None:
|
||||
"""Schedules an alarm to refresh any streams before expiration."""
|
||||
_LOGGER.debug("Scheduling stream refresh for %s", refresh_time)
|
||||
self._unsub = async_track_point_in_utc_time(
|
||||
self._hass,
|
||||
self._handle_refresh,
|
||||
refresh_time,
|
||||
)
|
||||
|
||||
|
||||
class NestCameraBaseEntity(Camera, ABC):
|
||||
"""Devices that support cameras."""
|
||||
|
||||
@@ -86,41 +154,6 @@ class NestCameraBaseEntity(Camera, ABC):
|
||||
self.stream_options[CONF_EXTRA_PART_WAIT_TIME] = 3
|
||||
# The API "name" field is a unique device identifier.
|
||||
self._attr_unique_id = f"{self._device.name}-camera"
|
||||
self._stream_refresh_unsub: Callable[[], None] | None = None
|
||||
|
||||
@abstractmethod
|
||||
def _stream_expires_at(self) -> datetime.datetime | None:
|
||||
"""Next time when a stream expires."""
|
||||
|
||||
@abstractmethod
|
||||
async def _async_refresh_stream(self) -> None:
|
||||
"""Refresh any stream to extend expiration time."""
|
||||
|
||||
def _schedule_stream_refresh(self) -> None:
|
||||
"""Schedules an alarm to refresh any streams before expiration."""
|
||||
if self._stream_refresh_unsub is not None:
|
||||
self._stream_refresh_unsub()
|
||||
|
||||
expiration_time = self._stream_expires_at()
|
||||
if not expiration_time:
|
||||
return
|
||||
refresh_time = expiration_time - STREAM_EXPIRATION_BUFFER
|
||||
_LOGGER.debug("Scheduled next stream refresh for %s", refresh_time)
|
||||
|
||||
self._stream_refresh_unsub = async_track_point_in_utc_time(
|
||||
self.hass,
|
||||
self._handle_stream_refresh,
|
||||
refresh_time,
|
||||
)
|
||||
|
||||
async def _handle_stream_refresh(self, _: datetime.datetime) -> None:
|
||||
"""Alarm that fires to check if the stream should be refreshed."""
|
||||
_LOGGER.debug("Examining streams to refresh")
|
||||
self._stream_refresh_unsub = None
|
||||
try:
|
||||
await self._async_refresh_stream()
|
||||
finally:
|
||||
self._schedule_stream_refresh()
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity is added to register update signal handler."""
|
||||
@@ -128,12 +161,6 @@ class NestCameraBaseEntity(Camera, ABC):
|
||||
self._device.add_update_listener(self.async_write_ha_state)
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Invalidates the RTSP token when unloaded."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if self._stream_refresh_unsub:
|
||||
self._stream_refresh_unsub()
|
||||
|
||||
|
||||
class NestRTSPEntity(NestCameraBaseEntity):
|
||||
"""Nest cameras that use RTSP."""
|
||||
@@ -146,6 +173,7 @@ class NestRTSPEntity(NestCameraBaseEntity):
|
||||
super().__init__(device)
|
||||
self._create_stream_url_lock = asyncio.Lock()
|
||||
self._rtsp_live_stream_trait = device.traits[CameraLiveStreamTrait.NAME]
|
||||
self._refresh_unsub: Callable[[], None] | None = None
|
||||
|
||||
@property
|
||||
def use_stream_for_stills(self) -> bool:
|
||||
@@ -173,20 +201,21 @@ class NestRTSPEntity(NestCameraBaseEntity):
|
||||
)
|
||||
except ApiException as err:
|
||||
raise HomeAssistantError(f"Nest API error: {err}") from err
|
||||
self._schedule_stream_refresh()
|
||||
refresh = StreamRefresh(
|
||||
self.hass,
|
||||
self._rtsp_stream.expires_at,
|
||||
self._async_refresh_stream,
|
||||
)
|
||||
self._refresh_unsub = refresh.unsub
|
||||
assert self._rtsp_stream
|
||||
if self._rtsp_stream.expires_at < utcnow():
|
||||
_LOGGER.warning("Stream already expired")
|
||||
return self._rtsp_stream.rtsp_stream_url
|
||||
|
||||
def _stream_expires_at(self) -> datetime.datetime | None:
|
||||
"""Next time when a stream expires."""
|
||||
return self._rtsp_stream.expires_at if self._rtsp_stream else None
|
||||
|
||||
async def _async_refresh_stream(self) -> None:
|
||||
async def _async_refresh_stream(self) -> datetime.datetime | None:
|
||||
"""Refresh stream to extend expiration time."""
|
||||
if not self._rtsp_stream:
|
||||
return
|
||||
return None
|
||||
_LOGGER.debug("Extending RTSP stream")
|
||||
try:
|
||||
self._rtsp_stream = await self._rtsp_stream.extend_rtsp_stream()
|
||||
@@ -197,14 +226,17 @@ class NestRTSPEntity(NestCameraBaseEntity):
|
||||
if self.stream:
|
||||
await self.stream.stop()
|
||||
self.stream = None
|
||||
return
|
||||
return None
|
||||
# Update the stream worker with the latest valid url
|
||||
if self.stream:
|
||||
self.stream.update_source(self._rtsp_stream.rtsp_stream_url)
|
||||
return self._rtsp_stream.expires_at
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Invalidates the RTSP token when unloaded."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if self._refresh_unsub is not None:
|
||||
self._refresh_unsub()
|
||||
if self._rtsp_stream:
|
||||
try:
|
||||
await self._rtsp_stream.stop_stream()
|
||||
@@ -220,34 +252,23 @@ class NestWebRTCEntity(NestCameraBaseEntity):
|
||||
"""Initialize the camera."""
|
||||
super().__init__(device)
|
||||
self._webrtc_sessions: dict[str, WebRtcStream] = {}
|
||||
self._refresh_unsub: dict[str, Callable[[], None]] = {}
|
||||
|
||||
@property
|
||||
def frontend_stream_type(self) -> StreamType | None:
|
||||
"""Return the type of stream supported by this camera."""
|
||||
return StreamType.WEB_RTC
|
||||
|
||||
def _stream_expires_at(self) -> datetime.datetime | None:
|
||||
"""Next time when a stream expires."""
|
||||
if not self._webrtc_sessions:
|
||||
return None
|
||||
return min(stream.expires_at for stream in self._webrtc_sessions.values())
|
||||
|
||||
async def _async_refresh_stream(self) -> None:
|
||||
async def _async_refresh_stream(self, session_id: str) -> datetime.datetime | None:
|
||||
"""Refresh stream to extend expiration time."""
|
||||
now = utcnow()
|
||||
for webrtc_stream in list(self._webrtc_sessions.values()):
|
||||
if now < (webrtc_stream.expires_at - STREAM_EXPIRATION_BUFFER):
|
||||
_LOGGER.debug(
|
||||
"Stream does not yet expire: %s", webrtc_stream.expires_at
|
||||
)
|
||||
continue
|
||||
_LOGGER.debug("Extending WebRTC stream %s", webrtc_stream.media_session_id)
|
||||
try:
|
||||
webrtc_stream = await webrtc_stream.extend_stream()
|
||||
except ApiException as err:
|
||||
_LOGGER.debug("Failed to extend stream: %s", err)
|
||||
else:
|
||||
self._webrtc_sessions[webrtc_stream.media_session_id] = webrtc_stream
|
||||
if not (webrtc_stream := self._webrtc_sessions.get(session_id)):
|
||||
return None
|
||||
_LOGGER.debug("Extending WebRTC stream %s", webrtc_stream.media_session_id)
|
||||
webrtc_stream = await webrtc_stream.extend_stream()
|
||||
if session_id in self._webrtc_sessions:
|
||||
self._webrtc_sessions[session_id] = webrtc_stream
|
||||
return webrtc_stream.expires_at
|
||||
return None
|
||||
|
||||
async def async_camera_image(
|
||||
self, width: int | None = None, height: int | None = None
|
||||
@@ -275,7 +296,18 @@ class NestWebRTCEntity(NestCameraBaseEntity):
|
||||
)
|
||||
self._webrtc_sessions[session_id] = stream
|
||||
send_message(WebRTCAnswer(stream.answer_sdp))
|
||||
self._schedule_stream_refresh()
|
||||
refresh = StreamRefresh(
|
||||
self.hass,
|
||||
stream.expires_at,
|
||||
functools.partial(self._async_refresh_stream, session_id),
|
||||
)
|
||||
self._refresh_unsub[session_id] = refresh.unsub
|
||||
|
||||
async def async_on_webrtc_candidate(
|
||||
self, session_id: str, candidate: RTCIceCandidate
|
||||
) -> None:
|
||||
"""Ignore WebRTC candidates for Nest cloud based cameras."""
|
||||
return
|
||||
|
||||
@callback
|
||||
def close_webrtc_session(self, session_id: str) -> None:
|
||||
@@ -284,6 +316,8 @@ class NestWebRTCEntity(NestCameraBaseEntity):
|
||||
_LOGGER.debug(
|
||||
"Closing WebRTC session %s, %s", session_id, stream.media_session_id
|
||||
)
|
||||
unsub = self._refresh_unsub.pop(session_id)
|
||||
unsub()
|
||||
|
||||
async def stop_stream() -> None:
|
||||
try:
|
||||
|
@@ -20,5 +20,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["google_nest_sdm"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["google-nest-sdm==6.1.3"]
|
||||
"requirements": ["google-nest-sdm==6.1.5"]
|
||||
}
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["nice_go"],
|
||||
"requirements": ["nice-go==0.3.9"]
|
||||
"requirements": ["nice-go==0.3.10"]
|
||||
}
|
||||
|
@@ -11,9 +11,11 @@
|
||||
"title": "Downloading model"
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"download_failed": "Model downloading failed"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"download_failed": "Model downloading failed",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"progress": {
|
||||
|
@@ -137,7 +137,7 @@ class OnewireOptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
}
|
||||
|
||||
if not self.configurable_devices:
|
||||
return self.async_abort(reason="No configurable devices found.")
|
||||
return self.async_abort(reason="no_configurable_devices")
|
||||
|
||||
return await self.async_step_device_selection(user_input=None)
|
||||
|
||||
|
@@ -94,6 +94,9 @@
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"abort": {
|
||||
"no_configurable_devices": "No configurable devices found"
|
||||
},
|
||||
"error": {
|
||||
"device_not_selected": "Select devices to configure"
|
||||
},
|
||||
|
@@ -57,10 +57,13 @@ class P1MonitorFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): TextSelector(),
|
||||
vol.Required(CONF_PORT, default=80): NumberSelector(
|
||||
NumberSelectorConfig(
|
||||
mode=NumberSelectorMode.BOX,
|
||||
)
|
||||
vol.Required(CONF_PORT, default=80): vol.All(
|
||||
NumberSelector(
|
||||
NumberSelectorConfig(
|
||||
min=1, max=65535, mode=NumberSelectorMode.BOX
|
||||
),
|
||||
),
|
||||
vol.Coerce(int),
|
||||
),
|
||||
}
|
||||
),
|
||||
|
@@ -18,11 +18,11 @@
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]",
|
||||
"pairing_failure": "Unable to pair: {error_id}",
|
||||
"invalid_pin": "Invalid PIN"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"pairing_failure": "Unable to pair: {error_id}",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
}
|
||||
},
|
||||
|
@@ -27,6 +27,12 @@ from .const import CONF_PING_COUNT, DEFAULT_PING_COUNT, DOMAIN
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _clean_user_input(user_input: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Clean up the user input."""
|
||||
user_input[CONF_HOST] = user_input[CONF_HOST].strip()
|
||||
return user_input
|
||||
|
||||
|
||||
class PingConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Ping."""
|
||||
|
||||
@@ -46,6 +52,7 @@ class PingConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
),
|
||||
)
|
||||
|
||||
user_input = _clean_user_input(user_input)
|
||||
if not is_ip_address(user_input[CONF_HOST]):
|
||||
self.async_abort(reason="invalid_ip_address")
|
||||
|
||||
@@ -81,7 +88,7 @@ class OptionsFlowHandler(OptionsFlow):
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage the options."""
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(title="", data=user_input)
|
||||
return self.async_create_entry(title="", data=_clean_user_input(user_input))
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
|
@@ -64,10 +64,10 @@ WEBHOOK_SCHEMA = vol.Schema(
|
||||
vol.Required(ATTR_DEVICE_NAME): cv.string,
|
||||
vol.Required(ATTR_DEVICE_ID): cv.positive_int,
|
||||
vol.Required(ATTR_TEMP_UNIT): vol.In(
|
||||
UnitOfTemperature.CELSIUS, UnitOfTemperature.FAHRENHEIT
|
||||
[UnitOfTemperature.CELSIUS, UnitOfTemperature.FAHRENHEIT]
|
||||
),
|
||||
vol.Required(ATTR_VOLUME_UNIT): vol.In(
|
||||
UnitOfVolume.LITERS, UnitOfVolume.GALLONS
|
||||
[UnitOfVolume.LITERS, UnitOfVolume.GALLONS]
|
||||
),
|
||||
vol.Required(ATTR_BPM): cv.positive_int,
|
||||
vol.Required(ATTR_TEMP): vol.Coerce(float),
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/plaato",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pyplaato"],
|
||||
"requirements": ["pyplaato==0.0.18"]
|
||||
"requirements": ["pyplaato==0.0.19"]
|
||||
}
|
||||
|
@@ -251,8 +251,8 @@ class PowerwallConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle reauth confirmation."""
|
||||
errors: dict[str, str] | None = {}
|
||||
description_placeholders: dict[str, str] = {}
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
if user_input is not None:
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
errors, _, description_placeholders = await self._async_try_connect(
|
||||
{CONF_IP_ADDRESS: reauth_entry.data[CONF_IP_ADDRESS], **user_input}
|
||||
)
|
||||
@@ -261,6 +261,10 @@ class PowerwallConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
reauth_entry, data_updates=user_input
|
||||
)
|
||||
|
||||
self.context["title_placeholders"] = {
|
||||
"name": reauth_entry.title,
|
||||
"ip_address": reauth_entry.data[CONF_IP_ADDRESS],
|
||||
}
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema({vol.Optional(CONF_PASSWORD): str}),
|
||||
|
@@ -8,6 +8,7 @@ from typing import Any
|
||||
from rachiopy import Rachio
|
||||
from requests.exceptions import Timeout
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
@@ -38,6 +39,7 @@ class RachioUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
rachio: Rachio,
|
||||
config_entry: ConfigEntry,
|
||||
base_station,
|
||||
base_count: int,
|
||||
) -> None:
|
||||
@@ -48,6 +50,7 @@ class RachioUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=f"{DOMAIN} update coordinator",
|
||||
# To avoid exceeding the rate limit, increase polling interval for
|
||||
# each additional base station on the account
|
||||
@@ -76,6 +79,7 @@ class RachioScheduleUpdateCoordinator(DataUpdateCoordinator[list[dict[str, Any]]
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
rachio: Rachio,
|
||||
config_entry: ConfigEntry,
|
||||
base_station,
|
||||
) -> None:
|
||||
"""Initialize a Rachio schedule coordinator."""
|
||||
@@ -85,6 +89,7 @@ class RachioScheduleUpdateCoordinator(DataUpdateCoordinator[list[dict[str, Any]]
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=f"{DOMAIN} schedule update coordinator",
|
||||
update_interval=timedelta(minutes=30),
|
||||
)
|
||||
|
@@ -189,8 +189,10 @@ class RachioPerson:
|
||||
RachioBaseStation(
|
||||
rachio,
|
||||
base,
|
||||
RachioUpdateCoordinator(hass, rachio, base, base_count),
|
||||
RachioScheduleUpdateCoordinator(hass, rachio, base),
|
||||
RachioUpdateCoordinator(
|
||||
hass, rachio, self.config_entry, base, base_count
|
||||
),
|
||||
RachioScheduleUpdateCoordinator(hass, rachio, self.config_entry, base),
|
||||
)
|
||||
for base in base_stations
|
||||
)
|
||||
|
@@ -110,7 +110,7 @@ def purge_old_data(
|
||||
_LOGGER.debug("Purging hasn't fully completed yet")
|
||||
return False
|
||||
|
||||
if apply_filter and _purge_filtered_data(instance, session) is False:
|
||||
if apply_filter and not _purge_filtered_data(instance, session):
|
||||
_LOGGER.debug("Cleanup filtered data hasn't fully completed yet")
|
||||
return False
|
||||
|
||||
@@ -631,7 +631,10 @@ def _purge_old_entity_ids(instance: Recorder, session: Session) -> None:
|
||||
|
||||
|
||||
def _purge_filtered_data(instance: Recorder, session: Session) -> bool:
|
||||
"""Remove filtered states and events that shouldn't be in the database."""
|
||||
"""Remove filtered states and events that shouldn't be in the database.
|
||||
|
||||
Returns true if all states and events are purged.
|
||||
"""
|
||||
_LOGGER.debug("Cleanup filtered data")
|
||||
database_engine = instance.database_engine
|
||||
assert database_engine is not None
|
||||
@@ -639,7 +642,7 @@ def _purge_filtered_data(instance: Recorder, session: Session) -> bool:
|
||||
|
||||
# Check if excluded entity_ids are in database
|
||||
entity_filter = instance.entity_filter
|
||||
has_more_states_to_purge = False
|
||||
has_more_to_purge = False
|
||||
excluded_metadata_ids: list[str] = [
|
||||
metadata_id
|
||||
for (metadata_id, entity_id) in session.query(
|
||||
@@ -648,12 +651,11 @@ def _purge_filtered_data(instance: Recorder, session: Session) -> bool:
|
||||
if entity_filter and not entity_filter(entity_id)
|
||||
]
|
||||
if excluded_metadata_ids:
|
||||
has_more_states_to_purge = _purge_filtered_states(
|
||||
has_more_to_purge |= not _purge_filtered_states(
|
||||
instance, session, excluded_metadata_ids, database_engine, now_timestamp
|
||||
)
|
||||
|
||||
# Check if excluded event_types are in database
|
||||
has_more_events_to_purge = False
|
||||
if (
|
||||
event_type_to_event_type_ids := instance.event_type_manager.get_many(
|
||||
instance.exclude_event_types, session
|
||||
@@ -665,12 +667,12 @@ def _purge_filtered_data(instance: Recorder, session: Session) -> bool:
|
||||
if event_type_id is not None
|
||||
]
|
||||
):
|
||||
has_more_events_to_purge = _purge_filtered_events(
|
||||
has_more_to_purge |= not _purge_filtered_events(
|
||||
instance, session, excluded_event_type_ids, now_timestamp
|
||||
)
|
||||
|
||||
# Purge has completed if there are not more state or events to purge
|
||||
return not (has_more_states_to_purge or has_more_events_to_purge)
|
||||
return not has_more_to_purge
|
||||
|
||||
|
||||
def _purge_filtered_states(
|
||||
|
@@ -326,7 +326,19 @@ def migrate_entity_ids(
|
||||
else:
|
||||
new_device_id = f"{device_uid[0]}_{host.api.camera_uid(ch)}"
|
||||
new_identifiers = {(DOMAIN, new_device_id)}
|
||||
device_reg.async_update_device(device.id, new_identifiers=new_identifiers)
|
||||
existing_device = device_reg.async_get_device(identifiers=new_identifiers)
|
||||
if existing_device is None:
|
||||
device_reg.async_update_device(
|
||||
device.id, new_identifiers=new_identifiers
|
||||
)
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Reolink device with uid %s already exists, "
|
||||
"removing device with uid %s",
|
||||
new_device_id,
|
||||
device_uid,
|
||||
)
|
||||
device_reg.async_remove_device(device.id)
|
||||
|
||||
entity_reg = er.async_get(hass)
|
||||
entities = er.async_entries_for_config_entry(entity_reg, config_entry_id)
|
||||
@@ -352,4 +364,18 @@ def migrate_entity_ids(
|
||||
id_parts = entity.unique_id.split("_", 2)
|
||||
if host.api.supported(ch, "UID") and id_parts[1] != host.api.camera_uid(ch):
|
||||
new_id = f"{host.unique_id}_{host.api.camera_uid(ch)}_{id_parts[2]}"
|
||||
entity_reg.async_update_entity(entity.entity_id, new_unique_id=new_id)
|
||||
existing_entity = entity_reg.async_get_entity_id(
|
||||
entity.domain, entity.platform, new_id
|
||||
)
|
||||
if existing_entity is None:
|
||||
entity_reg.async_update_entity(
|
||||
entity.entity_id, new_unique_id=new_id
|
||||
)
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Reolink entity with unique_id %s already exists, "
|
||||
"removing device with unique_id %s",
|
||||
new_id,
|
||||
entity.unique_id,
|
||||
)
|
||||
entity_reg.async_remove(entity.entity_id)
|
||||
|
@@ -110,6 +110,7 @@ class ReolinkHost:
|
||||
self._cancel_onvif_check: CALLBACK_TYPE | None = None
|
||||
self._cancel_long_poll_check: CALLBACK_TYPE | None = None
|
||||
self._poll_job = HassJob(self._async_poll_all_motion, cancel_on_shutdown=True)
|
||||
self._fast_poll_error: bool = False
|
||||
self._long_poll_task: asyncio.Task | None = None
|
||||
self._lost_subscription: bool = False
|
||||
|
||||
@@ -699,14 +700,20 @@ class ReolinkHost:
|
||||
return
|
||||
|
||||
try:
|
||||
await self._api.get_motion_state_all_ch()
|
||||
if self._api.session_active:
|
||||
await self._api.get_motion_state_all_ch()
|
||||
except ReolinkError as err:
|
||||
_LOGGER.error(
|
||||
"Reolink error while polling motion state for host %s:%s: %s",
|
||||
self._api.host,
|
||||
self._api.port,
|
||||
err,
|
||||
)
|
||||
if not self._fast_poll_error:
|
||||
_LOGGER.error(
|
||||
"Reolink error while polling motion state for host %s:%s: %s",
|
||||
self._api.host,
|
||||
self._api.port,
|
||||
err,
|
||||
)
|
||||
self._fast_poll_error = True
|
||||
else:
|
||||
if self._api.session_active:
|
||||
self._fast_poll_error = False
|
||||
finally:
|
||||
# schedule next poll
|
||||
if not self._hass.is_stopping:
|
||||
|
@@ -18,5 +18,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/reolink",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["reolink_aio"],
|
||||
"requirements": ["reolink-aio==0.10.4"]
|
||||
"requirements": ["reolink-aio==0.11.2"]
|
||||
}
|
||||
|
@@ -96,7 +96,7 @@ class RingEvent(RingBaseEntity[RingListenCoordinator, RingDeviceT], EventEntity)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
if alert := self._get_coordinator_alert():
|
||||
if (alert := self._get_coordinator_alert()) and not alert.is_update:
|
||||
self._async_handle_event(alert.kind)
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
|
@@ -30,5 +30,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["ring_doorbell"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["ring-doorbell==0.9.8"]
|
||||
"requirements": ["ring-doorbell==0.9.12"]
|
||||
}
|
||||
|
@@ -47,7 +47,6 @@ class RoborockCoordinators:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) -> bool:
|
||||
"""Set up roborock from a config entry."""
|
||||
|
||||
_LOGGER.debug("Integration async setup entry: %s", entry.as_dict())
|
||||
entry.async_on_unload(entry.add_update_listener(update_listener))
|
||||
|
||||
user_data = UserData.from_dict(entry.data[CONF_USER_DATA])
|
||||
|
@@ -7,7 +7,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["roborock"],
|
||||
"requirements": [
|
||||
"python-roborock==2.6.1",
|
||||
"python-roborock==2.7.2",
|
||||
"vacuum-map-parser-roborock==0.1.2"
|
||||
]
|
||||
}
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioruckus"],
|
||||
"requirements": ["aioruckus==0.41"]
|
||||
"requirements": ["aioruckus==0.42"]
|
||||
}
|
||||
|
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/sense",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["sense_energy"],
|
||||
"requirements": ["sense-energy==0.13.2"]
|
||||
"requirements": ["sense-energy==0.13.4"]
|
||||
}
|
||||
|
@@ -178,6 +178,7 @@ AIRQ_SENSOR_TYPES: tuple[SensiboDeviceSensorEntityDescription, ...] = (
|
||||
value_fn=lambda data: data.co2,
|
||||
extra_fn=None,
|
||||
),
|
||||
*DEVICE_SENSOR_TYPES,
|
||||
)
|
||||
|
||||
ELEMENT_SENSOR_TYPES: tuple[SensiboDeviceSensorEntityDescription, ...] = (
|
||||
|
@@ -1,8 +1,8 @@
|
||||
"""Services for the seventeentrack integration."""
|
||||
|
||||
from typing import Final
|
||||
from typing import Any, Final
|
||||
|
||||
from pyseventeentrack.package import PACKAGE_STATUS_MAP
|
||||
from pyseventeentrack.package import PACKAGE_STATUS_MAP, Package
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
|
||||
@@ -81,18 +81,7 @@ def setup_services(hass: HomeAssistant) -> None:
|
||||
|
||||
return {
|
||||
"packages": [
|
||||
{
|
||||
ATTR_DESTINATION_COUNTRY: package.destination_country,
|
||||
ATTR_ORIGIN_COUNTRY: package.origin_country,
|
||||
ATTR_PACKAGE_TYPE: package.package_type,
|
||||
ATTR_TRACKING_INFO_LANGUAGE: package.tracking_info_language,
|
||||
ATTR_TRACKING_NUMBER: package.tracking_number,
|
||||
ATTR_LOCATION: package.location,
|
||||
ATTR_STATUS: package.status,
|
||||
ATTR_TIMESTAMP: package.timestamp.isoformat(),
|
||||
ATTR_INFO_TEXT: package.info_text,
|
||||
ATTR_FRIENDLY_NAME: package.friendly_name,
|
||||
}
|
||||
package_to_dict(package)
|
||||
for package in live_packages
|
||||
if slugify(package.status) in package_states or package_states == []
|
||||
]
|
||||
@@ -110,6 +99,22 @@ def setup_services(hass: HomeAssistant) -> None:
|
||||
|
||||
await seventeen_coordinator.client.profile.archive_package(tracking_number)
|
||||
|
||||
def package_to_dict(package: Package) -> dict[str, Any]:
|
||||
result = {
|
||||
ATTR_DESTINATION_COUNTRY: package.destination_country,
|
||||
ATTR_ORIGIN_COUNTRY: package.origin_country,
|
||||
ATTR_PACKAGE_TYPE: package.package_type,
|
||||
ATTR_TRACKING_INFO_LANGUAGE: package.tracking_info_language,
|
||||
ATTR_TRACKING_NUMBER: package.tracking_number,
|
||||
ATTR_LOCATION: package.location,
|
||||
ATTR_STATUS: package.status,
|
||||
ATTR_INFO_TEXT: package.info_text,
|
||||
ATTR_FRIENDLY_NAME: package.friendly_name,
|
||||
}
|
||||
if timestamp := package.timestamp:
|
||||
result[ATTR_TIMESTAMP] = timestamp.isoformat()
|
||||
return result
|
||||
|
||||
async def _validate_service(config_entry_id):
|
||||
entry: ConfigEntry | None = hass.config_entries.async_get_entry(config_entry_id)
|
||||
if not entry:
|
||||
|
@@ -603,7 +603,7 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]):
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Fetch data."""
|
||||
if self.update_sleep_period():
|
||||
if self.update_sleep_period() or self.hass.is_stopping:
|
||||
return
|
||||
|
||||
if self.sleep_period:
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["smarttub"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["python-smarttub==0.0.36"]
|
||||
"requirements": ["python-smarttub==0.0.38"]
|
||||
}
|
||||
|
@@ -28,6 +28,10 @@
|
||||
"deprecated_yaml_import_issue_auth_error": {
|
||||
"title": "YAML import failed due to an authentication error",
|
||||
"description": "Configuring {integration_title} using YAML is being removed but there was an authentication error while importing your existing configuration.\nSetup will not proceed.\n\nVerify that your {integration_title} is operating correctly and restart Home Assistant to attempt the import again.\n\nAlternatively, you may remove the `{domain}` configuration from your configuration.yaml entirely, restart Home Assistant, and add the {integration_title} integration manually."
|
||||
},
|
||||
"deprecated_yaml_import_issue_cannot_connect": {
|
||||
"title": "YAML import failed due to a connection error",
|
||||
"description": "Configuring {integration_title} using YAML is being removed but there was a connect error while importing your existing configuration.\nSetup will not proceed.\n\nVerify that your {integration_title} is operating correctly and restart Home Assistant to attempt the import again.\n\nAlternatively, you may remove the `{domain}` configuration from your configuration.yaml entirely, restart Home Assistant, and add the {integration_title} integration manually."
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
@@ -8,7 +8,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/sonos",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["soco"],
|
||||
"requirements": ["soco==0.30.4", "sonos-websocket==0.1.3"],
|
||||
"requirements": ["soco==0.30.6", "sonos-websocket==0.1.3"],
|
||||
"ssdp": [
|
||||
{
|
||||
"st": "urn:schemas-upnp-org:device:ZonePlayer:1"
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user