mirror of
https://github.com/home-assistant/core.git
synced 2025-10-24 11:09:37 +00:00
Compare commits
47 Commits
zjs-config
...
zc1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e104626bc1 | ||
|
|
7a61c818c6 | ||
|
|
2800625bcf | ||
|
|
cfec998221 | ||
|
|
7203cffbd7 | ||
|
|
23397ef6a9 | ||
|
|
0e154635ff | ||
|
|
2e6e518722 | ||
|
|
e0cded97c7 | ||
|
|
87a6a029bb | ||
|
|
1cc3c22d3f | ||
|
|
2341d1d965 | ||
|
|
a0bae9485c | ||
|
|
f281b0fc6b | ||
|
|
6f89fe81cc | ||
|
|
34f6ead7a1 | ||
|
|
8985527a87 | ||
|
|
bd87a3aa4d | ||
|
|
768a505904 | ||
|
|
d97c1f0fc3 | ||
|
|
c3fcd34d4c | ||
|
|
44d9eaea95 | ||
|
|
0f34f5139a | ||
|
|
2afb1a673d | ||
|
|
c2f7f29630 | ||
|
|
b01f5dd24b | ||
|
|
0cda0c449f | ||
|
|
40fdf12bc9 | ||
|
|
3939a80302 | ||
|
|
d32a102613 | ||
|
|
20949d39c4 | ||
|
|
310a0c8d13 | ||
|
|
c9e80ac7e9 | ||
|
|
5df4e9e1cf | ||
|
|
4022ee74e8 | ||
|
|
80a4115c44 | ||
|
|
ce548efd80 | ||
|
|
2edf622b41 | ||
|
|
66ac9078aa | ||
|
|
ba75f18f5a | ||
|
|
8ee2ece03e | ||
|
|
7060ab8c44 | ||
|
|
85d8244b8a | ||
|
|
3f9421ab08 | ||
|
|
2f3fbf00b7 | ||
|
|
d595ec8a07 | ||
|
|
4ff5462cc4 |
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
uses: github/codeql-action/init@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
uses: github/codeql-action/analyze@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
@@ -555,6 +555,7 @@ homeassistant.components.vacuum.*
|
||||
homeassistant.components.vallox.*
|
||||
homeassistant.components.valve.*
|
||||
homeassistant.components.velbus.*
|
||||
homeassistant.components.vivotek.*
|
||||
homeassistant.components.vlc_telnet.*
|
||||
homeassistant.components.vodafone_station.*
|
||||
homeassistant.components.volvo.*
|
||||
|
||||
2
CODEOWNERS
generated
2
CODEOWNERS
generated
@@ -1196,8 +1196,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/plex/ @jjlawren
|
||||
/homeassistant/components/plugwise/ @CoMPaTech @bouwew
|
||||
/tests/components/plugwise/ @CoMPaTech @bouwew
|
||||
/homeassistant/components/plum_lightpad/ @ColinHarrington @prystupa
|
||||
/tests/components/plum_lightpad/ @ColinHarrington @prystupa
|
||||
/homeassistant/components/point/ @fredrike
|
||||
/tests/components/point/ @fredrike
|
||||
/homeassistant/components/pooldose/ @lmaertin
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"domain": "ibm",
|
||||
"name": "IBM",
|
||||
"integrations": ["watson_iot", "watson_tts"]
|
||||
}
|
||||
@@ -24,5 +24,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/airthings_ble",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["airthings-ble==0.9.2"]
|
||||
"requirements": ["airthings-ble==1.1.1"]
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==6.2.7"]
|
||||
"requirements": ["aioamazondevices==6.2.8"]
|
||||
}
|
||||
|
||||
@@ -19,8 +19,8 @@
|
||||
"bleak-retry-connector==4.4.3",
|
||||
"bluetooth-adapters==2.1.0",
|
||||
"bluetooth-auto-recovery==1.5.3",
|
||||
"bluetooth-data-tools==1.28.2",
|
||||
"dbus-fast==2.44.3",
|
||||
"habluetooth==5.6.4"
|
||||
"bluetooth-data-tools==1.28.3",
|
||||
"dbus-fast==2.44.5",
|
||||
"habluetooth==5.7.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -17,6 +17,6 @@
|
||||
"requirements": [
|
||||
"aiodhcpwatcher==1.2.1",
|
||||
"aiodiscover==2.7.1",
|
||||
"cached-ipaddress==0.10.0"
|
||||
"cached-ipaddress==1.0.1"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyenphase"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pyenphase==2.3.0"],
|
||||
"requirements": ["pyenphase==2.4.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_enphase-envoy._tcp.local."
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
"requirements": [
|
||||
"aioesphomeapi==41.11.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.3.0"
|
||||
"bleak-esphome==3.4.0"
|
||||
],
|
||||
"zeroconf": ["_esphomelib._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -22,6 +22,7 @@ from google.protobuf import timestamp_pb2
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -91,6 +92,16 @@ def convert_time(time_str: str) -> timestamp_pb2.Timestamp | None:
|
||||
return timestamp
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS = [
|
||||
SensorEntityDescription(
|
||||
key="duration",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
@@ -105,20 +116,20 @@ async def async_setup_entry(
|
||||
client_options = ClientOptions(api_key=api_key)
|
||||
client = RoutesAsyncClient(client_options=client_options)
|
||||
|
||||
sensor = GoogleTravelTimeSensor(
|
||||
config_entry, name, api_key, origin, destination, client
|
||||
)
|
||||
sensors = [
|
||||
GoogleTravelTimeSensor(
|
||||
config_entry, name, api_key, origin, destination, client, sensor_description
|
||||
)
|
||||
for sensor_description in SENSOR_DESCRIPTIONS
|
||||
]
|
||||
|
||||
async_add_entities([sensor], False)
|
||||
async_add_entities(sensors, False)
|
||||
|
||||
|
||||
class GoogleTravelTimeSensor(SensorEntity):
|
||||
"""Representation of a Google travel time sensor."""
|
||||
|
||||
_attr_attribution = ATTRIBUTION
|
||||
_attr_native_unit_of_measurement = UnitOfTime.MINUTES
|
||||
_attr_device_class = SensorDeviceClass.DURATION
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -128,8 +139,10 @@ class GoogleTravelTimeSensor(SensorEntity):
|
||||
origin: str,
|
||||
destination: str,
|
||||
client: RoutesAsyncClient,
|
||||
sensor_description: SensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
self.entity_description = sensor_description
|
||||
self._attr_name = name
|
||||
self._attr_unique_id = config_entry.entry_id
|
||||
self._attr_device_info = DeviceInfo(
|
||||
|
||||
@@ -73,7 +73,6 @@ class HassioAddonSwitch(HassioAddonEntity, SwitchEntity):
|
||||
try:
|
||||
await supervisor_client.addons.start_addon(self._addon_slug)
|
||||
except SupervisorError as err:
|
||||
_LOGGER.error("Failed to start addon %s: %s", self._addon_slug, err)
|
||||
raise HomeAssistantError(err) from err
|
||||
|
||||
await self.coordinator.force_addon_info_data_refresh(self._addon_slug)
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
"loggers": ["pyhap"],
|
||||
"requirements": [
|
||||
"HAP-python==5.0.0",
|
||||
"fnv-hash-fast==1.5.0",
|
||||
"fnv-hash-fast==1.6.0",
|
||||
"PyQRCode==1.2.1",
|
||||
"base36==0.1.1"
|
||||
],
|
||||
|
||||
@@ -8,13 +8,16 @@ from idasen_ha import Desk
|
||||
|
||||
from homeassistant.components import bluetooth
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type IdasenDeskConfigEntry = ConfigEntry[IdasenDeskCoordinator]
|
||||
|
||||
UPDATE_DEBOUNCE_TIME = 0.2
|
||||
|
||||
|
||||
class IdasenDeskCoordinator(DataUpdateCoordinator[int | None]):
|
||||
"""Class to manage updates for the Idasen Desk."""
|
||||
@@ -33,9 +36,22 @@ class IdasenDeskCoordinator(DataUpdateCoordinator[int | None]):
|
||||
hass, _LOGGER, config_entry=config_entry, name=config_entry.title
|
||||
)
|
||||
self.address = address
|
||||
self._expected_connected = False
|
||||
self.desk = Desk(self._async_handle_update)
|
||||
|
||||
self.desk = Desk(self.async_set_updated_data)
|
||||
self._expected_connected = False
|
||||
self._height: int | None = None
|
||||
|
||||
@callback
|
||||
def async_update_data() -> None:
|
||||
self.async_set_updated_data(self._height)
|
||||
|
||||
self._debouncer = Debouncer(
|
||||
hass=self.hass,
|
||||
logger=_LOGGER,
|
||||
cooldown=UPDATE_DEBOUNCE_TIME,
|
||||
immediate=True,
|
||||
function=async_update_data,
|
||||
)
|
||||
|
||||
async def async_connect(self) -> bool:
|
||||
"""Connect to desk."""
|
||||
@@ -60,3 +76,9 @@ class IdasenDeskCoordinator(DataUpdateCoordinator[int | None]):
|
||||
"""Ensure that the desk is connected if that is the expected state."""
|
||||
if self._expected_connected:
|
||||
await self.async_connect()
|
||||
|
||||
@callback
|
||||
def _async_handle_update(self, height: int | None) -> None:
|
||||
"""Handle an update from the desk."""
|
||||
self._height = height
|
||||
self._debouncer.async_schedule_call()
|
||||
|
||||
@@ -147,8 +147,9 @@ class KrakenData:
|
||||
|
||||
def _get_websocket_name_asset_pairs(self) -> str:
|
||||
return ",".join(
|
||||
self.tradable_asset_pairs[tracked_pair]
|
||||
pair
|
||||
for tracked_pair in self._config_entry.options[CONF_TRACKED_ASSET_PAIRS]
|
||||
if (pair := self.tradable_asset_pairs.get(tracked_pair)) is not None
|
||||
)
|
||||
|
||||
def set_update_interval(self, update_interval: int) -> None:
|
||||
|
||||
@@ -156,7 +156,7 @@ async def async_setup_entry(
|
||||
for description in SENSOR_TYPES
|
||||
]
|
||||
)
|
||||
async_add_entities(entities, True)
|
||||
async_add_entities(entities)
|
||||
|
||||
_async_add_kraken_sensors(config_entry.options[CONF_TRACKED_ASSET_PAIRS])
|
||||
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ld2410_ble",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bluetooth-data-tools==1.28.2", "ld2410-ble==0.1.1"]
|
||||
"requirements": ["bluetooth-data-tools==1.28.3", "ld2410-ble==0.1.1"]
|
||||
}
|
||||
|
||||
@@ -35,5 +35,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/led_ble",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["bluetooth-data-tools==1.28.2", "led-ble==1.1.7"]
|
||||
"requirements": ["bluetooth-data-tools==1.28.3", "led-ble==1.1.7"]
|
||||
}
|
||||
|
||||
@@ -196,11 +196,11 @@ class LocalTodoListEntity(TodoListEntity):
|
||||
item_idx: dict[str, int] = {itm.uid: idx for idx, itm in enumerate(todos)}
|
||||
if uid not in item_idx:
|
||||
raise HomeAssistantError(
|
||||
"Item '{uid}' not found in todo list {self.entity_id}"
|
||||
f"Item '{uid}' not found in todo list {self.entity_id}"
|
||||
)
|
||||
if previous_uid and previous_uid not in item_idx:
|
||||
raise HomeAssistantError(
|
||||
"Item '{previous_uid}' not found in todo list {self.entity_id}"
|
||||
f"Item '{previous_uid}' not found in todo list {self.entity_id}"
|
||||
)
|
||||
dst_idx = item_idx[previous_uid] + 1 if previous_uid else 0
|
||||
src_idx = item_idx[uid]
|
||||
|
||||
@@ -88,6 +88,17 @@ DISCOVERY_SCHEMAS = [
|
||||
entity_class=MatterBinarySensor,
|
||||
required_attributes=(clusters.OccupancySensing.Attributes.Occupancy,),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.BINARY_SENSOR,
|
||||
entity_description=MatterBinarySensorEntityDescription(
|
||||
key="ThermostatOccupancySensor",
|
||||
device_class=BinarySensorDeviceClass.OCCUPANCY,
|
||||
# The first bit = if occupied
|
||||
device_to_ha=lambda x: (x & 1 == 1) if x is not None else None,
|
||||
),
|
||||
entity_class=MatterBinarySensor,
|
||||
required_attributes=(clusters.Thermostat.Attributes.Occupancy,),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.BINARY_SENSOR,
|
||||
entity_description=MatterBinarySensorEntityDescription(
|
||||
|
||||
@@ -188,7 +188,10 @@ class MqttLock(MqttEntity, LockEntity):
|
||||
return
|
||||
if payload == self._config[CONF_PAYLOAD_RESET]:
|
||||
# Reset the state to `unknown`
|
||||
self._attr_is_locked = None
|
||||
self._attr_is_locked = self._attr_is_locking = None
|
||||
self._attr_is_unlocking = None
|
||||
self._attr_is_open = self._attr_is_opening = None
|
||||
self._attr_is_jammed = None
|
||||
elif payload in self._valid_states:
|
||||
self._attr_is_locked = payload == self._config[CONF_STATE_LOCKED]
|
||||
self._attr_is_locking = payload == self._config[CONF_STATE_LOCKING]
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pynordpool"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pynordpool==0.3.0"],
|
||||
"requirements": ["pynordpool==0.3.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -35,7 +35,8 @@ from .const import CONF_DELETE_PERMANENTLY, DATA_BACKUP_AGENT_LISTENERS, DOMAIN
|
||||
from .coordinator import OneDriveConfigEntry
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
UPLOAD_CHUNK_SIZE = 32 * 320 * 1024 # 10.4MB
|
||||
MAX_CHUNK_SIZE = 60 * 1024 * 1024 # largest chunk possible, must be <= 60 MiB
|
||||
TARGET_CHUNKS = 20
|
||||
TIMEOUT = ClientTimeout(connect=10, total=43200) # 12 hours
|
||||
METADATA_VERSION = 2
|
||||
CACHE_TTL = 300
|
||||
@@ -161,11 +162,21 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
self._folder_id,
|
||||
await open_stream(),
|
||||
)
|
||||
|
||||
# determine chunk based on target chunks
|
||||
upload_chunk_size = backup.size / TARGET_CHUNKS
|
||||
# find the nearest multiple of 320KB
|
||||
upload_chunk_size = round(upload_chunk_size / (320 * 1024)) * (320 * 1024)
|
||||
# limit to max chunk size
|
||||
upload_chunk_size = min(upload_chunk_size, MAX_CHUNK_SIZE)
|
||||
# ensure minimum chunk size of 320KB
|
||||
upload_chunk_size = max(upload_chunk_size, 320 * 1024)
|
||||
|
||||
try:
|
||||
backup_file = await LargeFileUploadClient.upload(
|
||||
self._token_function,
|
||||
file,
|
||||
upload_chunk_size=UPLOAD_CHUNK_SIZE,
|
||||
upload_chunk_size=upload_chunk_size,
|
||||
session=async_get_clientsession(self._hass),
|
||||
)
|
||||
except HashMismatchError as err:
|
||||
|
||||
61
homeassistant/components/oralb/icons.json
Normal file
61
homeassistant/components/oralb/icons.json
Normal file
@@ -0,0 +1,61 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"pressure": {
|
||||
"default": "mdi:tooth-outline",
|
||||
"state": {
|
||||
"high": "mdi:tooth",
|
||||
"low": "mdi:alert",
|
||||
"power_button_pressed": "mdi:power",
|
||||
"button_pressed": "mdi:radiobox-marked"
|
||||
}
|
||||
},
|
||||
"sector": {
|
||||
"default": "mdi:circle-outline",
|
||||
"state": {
|
||||
"sector_1": "mdi:circle-slice-2",
|
||||
"sector_2": "mdi:circle-slice-4",
|
||||
"sector_3": "mdi:circle-slice-6",
|
||||
"sector_4": "mdi:circle-slice-8",
|
||||
"success": "mdi:check-circle-outline"
|
||||
}
|
||||
},
|
||||
"toothbrush_state": {
|
||||
"default": "mdi:toothbrush-electric",
|
||||
"state": {
|
||||
"initializing": "mdi:sync",
|
||||
"idle": "mdi:toothbrush-electric",
|
||||
"running": "mdi:waveform",
|
||||
"charging": "mdi:battery-charging",
|
||||
"setup": "mdi:wrench",
|
||||
"flight_menu": "mdi:airplane",
|
||||
"selection_menu": "mdi:menu",
|
||||
"off": "mdi:power",
|
||||
"sleeping": "mdi:sleep",
|
||||
"transport": "mdi:dolly"
|
||||
}
|
||||
},
|
||||
"number_of_sectors": {
|
||||
"default": "mdi:chart-pie"
|
||||
},
|
||||
"mode": {
|
||||
"default": "mdi:toothbrush-paste",
|
||||
"state": {
|
||||
"daily_clean": "mdi:repeat-once",
|
||||
"sensitive": "mdi:feather",
|
||||
"gum_care": "mdi:tooth-outline",
|
||||
"intense": "mdi:shape-circle-plus",
|
||||
"whitening": "mdi:shimmer",
|
||||
"whiten": "mdi:shimmer",
|
||||
"tongue_cleaning": "mdi:gate-and",
|
||||
"super_sensitive": "mdi:feather",
|
||||
"massage": "mdi:spa",
|
||||
"deep_clean": "mdi:water",
|
||||
"turbo": "mdi:car-turbocharger",
|
||||
"off": "mdi:power",
|
||||
"settings": "mdi:cog-outline"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,13 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from oralb_ble import OralBSensor, SensorUpdate
|
||||
from oralb_ble.parser import (
|
||||
IO_SERIES_MODES,
|
||||
PRESSURE,
|
||||
SECTOR_MAP,
|
||||
SMART_SERIES_MODES,
|
||||
STATES,
|
||||
)
|
||||
|
||||
from homeassistant.components.bluetooth.passive_update_processor import (
|
||||
PassiveBluetoothDataProcessor,
|
||||
@@ -39,6 +46,8 @@ SENSOR_DESCRIPTIONS: dict[str, SensorEntityDescription] = {
|
||||
key=OralBSensor.SECTOR,
|
||||
translation_key="sector",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
options=[v.replace(" ", "_") for v in set(SECTOR_MAP.values()) | {"no_sector"}],
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
),
|
||||
OralBSensor.NUMBER_OF_SECTORS: SensorEntityDescription(
|
||||
key=OralBSensor.NUMBER_OF_SECTORS,
|
||||
@@ -53,16 +62,26 @@ SENSOR_DESCRIPTIONS: dict[str, SensorEntityDescription] = {
|
||||
),
|
||||
OralBSensor.TOOTHBRUSH_STATE: SensorEntityDescription(
|
||||
key=OralBSensor.TOOTHBRUSH_STATE,
|
||||
translation_key="toothbrush_state",
|
||||
options=[v.replace(" ", "_") for v in set(STATES.values())],
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
name=None,
|
||||
),
|
||||
OralBSensor.PRESSURE: SensorEntityDescription(
|
||||
key=OralBSensor.PRESSURE,
|
||||
translation_key="pressure",
|
||||
options=[v.replace(" ", "_") for v in set(PRESSURE.values()) | {"low"}],
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
),
|
||||
OralBSensor.MODE: SensorEntityDescription(
|
||||
key=OralBSensor.MODE,
|
||||
translation_key="mode",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
options=[
|
||||
v.replace(" ", "_")
|
||||
for v in set(IO_SERIES_MODES.values()) | set(SMART_SERIES_MODES.values())
|
||||
],
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
),
|
||||
OralBSensor.SIGNAL_STRENGTH: SensorEntityDescription(
|
||||
key=OralBSensor.SIGNAL_STRENGTH,
|
||||
@@ -134,7 +153,15 @@ class OralBBluetoothSensorEntity(
|
||||
@property
|
||||
def native_value(self) -> str | int | None:
|
||||
"""Return the native value."""
|
||||
return self.processor.entity_data.get(self.entity_key)
|
||||
value = self.processor.entity_data.get(self.entity_key)
|
||||
if isinstance(value, str):
|
||||
value = value.replace(" ", "_")
|
||||
if (
|
||||
self.entity_description.options is not None
|
||||
and value not in self.entity_description.options
|
||||
): # append unknown values to enum
|
||||
self.entity_description.options.append(value)
|
||||
return value
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
|
||||
@@ -22,7 +22,15 @@
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"sector": {
|
||||
"name": "Sector"
|
||||
"name": "Sector",
|
||||
"state": {
|
||||
"no_sector": "No sector",
|
||||
"sector_1": "Sector 1",
|
||||
"sector_2": "Sector 2",
|
||||
"sector_3": "Sector 3",
|
||||
"sector_4": "Sector 4",
|
||||
"success": "Success"
|
||||
}
|
||||
},
|
||||
"number_of_sectors": {
|
||||
"name": "Number of sectors"
|
||||
@@ -31,10 +39,48 @@
|
||||
"name": "Sector timer"
|
||||
},
|
||||
"pressure": {
|
||||
"name": "Pressure"
|
||||
"name": "Pressure",
|
||||
"state": {
|
||||
"normal": "[%key:common::state::normal%]",
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"power_button_pressed": "Power button pressed",
|
||||
"button_pressed": "Button pressed"
|
||||
}
|
||||
},
|
||||
"mode": {
|
||||
"name": "Brushing mode"
|
||||
"name": "Brushing mode",
|
||||
"state": {
|
||||
"daily_clean": "Daily clean",
|
||||
"sensitive": "Sensitive",
|
||||
"gum_care": "Gum care",
|
||||
"intense": "Intense",
|
||||
"whitening": "Whiten",
|
||||
"whiten": "[%key:component::oralb::entity::sensor::mode::state::whitening%]",
|
||||
"tongue_cleaning": "Tongue clean",
|
||||
"super_sensitive": "Super sensitive",
|
||||
"massage": "Massage",
|
||||
"deep_clean": "Deep clean",
|
||||
"turbo": "Turbo",
|
||||
"off": "[%key:common::state::off%]",
|
||||
"settings": "Settings"
|
||||
}
|
||||
},
|
||||
"toothbrush_state": {
|
||||
"state": {
|
||||
"initializing": "Initializing",
|
||||
"idle": "[%key:common::state::idle%]",
|
||||
"running": "Running",
|
||||
"charging": "[%key:common::state::charging%]",
|
||||
"setup": "Setup",
|
||||
"flight_menu": "Flight menu",
|
||||
"selection_menu": "Selection menu",
|
||||
"off": "[%key:common::state::off%]",
|
||||
"sleeping": "Sleeping",
|
||||
"transport": "Transport",
|
||||
"final_test": "Final test",
|
||||
"pcb_test": "PCB test"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,52 +1,36 @@
|
||||
"""Support for Plum Lightpad devices."""
|
||||
|
||||
import logging
|
||||
|
||||
from aiohttp import ContentTypeError
|
||||
from requests.exceptions import ConnectTimeout, HTTPError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_PASSWORD,
|
||||
CONF_USERNAME,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
|
||||
from .const import DOMAIN
|
||||
from .utils import load_plum
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS = [Platform.LIGHT]
|
||||
DOMAIN = "plum_lightpad"
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, _: ConfigEntry) -> bool:
|
||||
"""Set up Plum Lightpad from a config entry."""
|
||||
_LOGGER.debug("Setting up config entry with ID = %s", entry.unique_id)
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
DOMAIN,
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
translation_key="integration_removed",
|
||||
translation_placeholders={
|
||||
"entries": "/config/integrations/integration/plum_lightpad",
|
||||
},
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload config entry."""
|
||||
if all(
|
||||
config_entry.state is ConfigEntryState.NOT_LOADED
|
||||
for config_entry in hass.config_entries.async_entries(DOMAIN)
|
||||
if config_entry.entry_id != entry.entry_id
|
||||
):
|
||||
ir.async_delete_issue(hass, DOMAIN, DOMAIN)
|
||||
|
||||
username = entry.data[CONF_USERNAME]
|
||||
password = entry.data[CONF_PASSWORD]
|
||||
|
||||
try:
|
||||
plum = await load_plum(username, password, hass)
|
||||
except ContentTypeError as ex:
|
||||
_LOGGER.error("Unable to authenticate to Plum cloud: %s", ex)
|
||||
return False
|
||||
except (ConnectTimeout, HTTPError) as ex:
|
||||
_LOGGER.error("Unable to connect to Plum cloud: %s", ex)
|
||||
raise ConfigEntryNotReady from ex
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
hass.data[DOMAIN][entry.entry_id] = plum
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
def cleanup(event):
|
||||
"""Clean up resources."""
|
||||
plum.cleanup()
|
||||
|
||||
entry.async_on_unload(hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, cleanup))
|
||||
return True
|
||||
|
||||
@@ -2,59 +2,12 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
from homeassistant.config_entries import ConfigFlow
|
||||
|
||||
from aiohttp import ContentTypeError
|
||||
from requests.exceptions import ConnectTimeout, HTTPError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
|
||||
from .const import DOMAIN
|
||||
from .utils import load_plum
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
from . import DOMAIN
|
||||
|
||||
|
||||
class PlumLightpadConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Config flow for Plum Lightpad integration."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def _show_form(self, errors=None):
|
||||
schema = {
|
||||
vol.Required(CONF_USERNAME): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(schema),
|
||||
errors=errors or {},
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by the user or redirected to by import."""
|
||||
if not user_input:
|
||||
return self._show_form()
|
||||
|
||||
username = user_input[CONF_USERNAME]
|
||||
password = user_input[CONF_PASSWORD]
|
||||
|
||||
# load Plum just so we know username/password work
|
||||
try:
|
||||
await load_plum(username, password, self.hass)
|
||||
except (ContentTypeError, ConnectTimeout, HTTPError) as ex:
|
||||
_LOGGER.error("Unable to connect/authenticate to Plum cloud: %s", str(ex))
|
||||
return self._show_form({"base": "cannot_connect"})
|
||||
|
||||
await self.async_set_unique_id(username)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=username, data={CONF_USERNAME: username, CONF_PASSWORD: password}
|
||||
)
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
"""Constants for the Plum Lightpad component."""
|
||||
|
||||
DOMAIN = "plum_lightpad"
|
||||
@@ -1,9 +0,0 @@
|
||||
{
|
||||
"entity": {
|
||||
"light": {
|
||||
"glow_ring": {
|
||||
"default": "mdi:crop-portrait"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,201 +0,0 @@
|
||||
"""Support for Plum Lightpad lights."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from plumlightpad import Plum
|
||||
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS,
|
||||
ATTR_HS_COLOR,
|
||||
ColorMode,
|
||||
LightEntity,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util import color as color_util
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Plum Lightpad dimmer lights and glow rings."""
|
||||
|
||||
plum: Plum = hass.data[DOMAIN][entry.entry_id]
|
||||
|
||||
def setup_entities(device) -> None:
|
||||
entities: list[LightEntity] = []
|
||||
|
||||
if "lpid" in device:
|
||||
lightpad = plum.get_lightpad(device["lpid"])
|
||||
entities.append(GlowRing(lightpad=lightpad))
|
||||
|
||||
if "llid" in device:
|
||||
logical_load = plum.get_load(device["llid"])
|
||||
entities.append(PlumLight(load=logical_load))
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
async def new_load(device):
|
||||
setup_entities(device)
|
||||
|
||||
async def new_lightpad(device):
|
||||
setup_entities(device)
|
||||
|
||||
device_web_session = async_get_clientsession(hass, verify_ssl=False)
|
||||
entry.async_create_background_task(
|
||||
hass,
|
||||
plum.discover(
|
||||
hass.loop,
|
||||
loadListener=new_load,
|
||||
lightpadListener=new_lightpad,
|
||||
websession=device_web_session,
|
||||
),
|
||||
"plum.light-discover",
|
||||
)
|
||||
|
||||
|
||||
class PlumLight(LightEntity):
|
||||
"""Representation of a Plum Lightpad dimmer."""
|
||||
|
||||
_attr_should_poll = False
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
|
||||
def __init__(self, load):
|
||||
"""Initialize the light."""
|
||||
self._load = load
|
||||
self._brightness = load.level
|
||||
unique_id = f"{load.llid}.light"
|
||||
self._attr_unique_id = unique_id
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, unique_id)},
|
||||
manufacturer="Plum",
|
||||
model="Dimmer",
|
||||
name=load.name,
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to dimmerchange events."""
|
||||
self._load.add_event_listener("dimmerchange", self.dimmerchange)
|
||||
|
||||
def dimmerchange(self, event):
|
||||
"""Change event handler updating the brightness."""
|
||||
self._brightness = event["level"]
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
@property
|
||||
def brightness(self) -> int:
|
||||
"""Return the brightness of this switch between 0..255."""
|
||||
return self._brightness
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if light is on."""
|
||||
return self._brightness > 0
|
||||
|
||||
@property
|
||||
def color_mode(self) -> ColorMode:
|
||||
"""Flag supported features."""
|
||||
if self._load.dimmable:
|
||||
return ColorMode.BRIGHTNESS
|
||||
return ColorMode.ONOFF
|
||||
|
||||
@property
|
||||
def supported_color_modes(self) -> set[ColorMode]:
|
||||
"""Flag supported color modes."""
|
||||
return {self.color_mode}
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the light on."""
|
||||
if ATTR_BRIGHTNESS in kwargs:
|
||||
await self._load.turn_on(kwargs[ATTR_BRIGHTNESS])
|
||||
else:
|
||||
await self._load.turn_on()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the light off."""
|
||||
await self._load.turn_off()
|
||||
|
||||
|
||||
class GlowRing(LightEntity):
|
||||
"""Representation of a Plum Lightpad dimmer glow ring."""
|
||||
|
||||
_attr_color_mode = ColorMode.HS
|
||||
_attr_should_poll = False
|
||||
_attr_translation_key = "glow_ring"
|
||||
_attr_supported_color_modes = {ColorMode.HS}
|
||||
|
||||
def __init__(self, lightpad):
|
||||
"""Initialize the light."""
|
||||
self._lightpad = lightpad
|
||||
self._attr_name = f"{lightpad.friendly_name} Glow Ring"
|
||||
|
||||
self._attr_is_on = lightpad.glow_enabled
|
||||
self._glow_intensity = lightpad.glow_intensity
|
||||
unique_id = f"{self._lightpad.lpid}.glow"
|
||||
self._attr_unique_id = unique_id
|
||||
|
||||
self._red = lightpad.glow_color["red"]
|
||||
self._green = lightpad.glow_color["green"]
|
||||
self._blue = lightpad.glow_color["blue"]
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, unique_id)},
|
||||
manufacturer="Plum",
|
||||
model="Glow Ring",
|
||||
name=self._attr_name,
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to configchange events."""
|
||||
self._lightpad.add_event_listener("configchange", self.configchange_event)
|
||||
|
||||
def configchange_event(self, event):
|
||||
"""Handle Configuration change event."""
|
||||
config = event["changes"]
|
||||
|
||||
self._attr_is_on = config["glowEnabled"]
|
||||
self._glow_intensity = config["glowIntensity"]
|
||||
|
||||
self._red = config["glowColor"]["red"]
|
||||
self._green = config["glowColor"]["green"]
|
||||
self._blue = config["glowColor"]["blue"]
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
@property
|
||||
def hs_color(self):
|
||||
"""Return the hue and saturation color value [float, float]."""
|
||||
return color_util.color_RGB_to_hs(self._red, self._green, self._blue)
|
||||
|
||||
@property
|
||||
def brightness(self) -> int:
|
||||
"""Return the brightness of this switch between 0..255."""
|
||||
return min(max(int(round(self._glow_intensity * 255, 0)), 0), 255)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the light on."""
|
||||
if ATTR_BRIGHTNESS in kwargs:
|
||||
brightness_pct = kwargs[ATTR_BRIGHTNESS] / 255.0
|
||||
await self._lightpad.set_config({"glowIntensity": brightness_pct})
|
||||
elif ATTR_HS_COLOR in kwargs:
|
||||
hs_color = kwargs[ATTR_HS_COLOR]
|
||||
red, green, blue = color_util.color_hs_to_RGB(*hs_color)
|
||||
await self._lightpad.set_glow_color(red, green, blue, 0)
|
||||
else:
|
||||
await self._lightpad.set_config({"glowEnabled": True})
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the light off."""
|
||||
if ATTR_BRIGHTNESS in kwargs:
|
||||
brightness_pct = kwargs[ATTR_BRIGHTNESS] / 255.0
|
||||
await self._lightpad.set_config({"glowIntensity": brightness_pct})
|
||||
else:
|
||||
await self._lightpad.set_config({"glowEnabled": False})
|
||||
@@ -1,10 +1,9 @@
|
||||
{
|
||||
"domain": "plum_lightpad",
|
||||
"name": "Plum Lightpad",
|
||||
"codeowners": ["@ColinHarrington", "@prystupa"],
|
||||
"config_flow": true,
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/plum_lightpad",
|
||||
"integration_type": "system",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["plumlightpad"],
|
||||
"requirements": ["plumlightpad==0.0.11"]
|
||||
"requirements": []
|
||||
}
|
||||
|
||||
@@ -1,18 +1,8 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"username": "[%key:common::config_flow::data::email%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
|
||||
"issues": {
|
||||
"integration_removed": {
|
||||
"title": "The Plum Lightpad integration has been removed",
|
||||
"description": "The Plum Lightpad integration has been removed from Home Assistant.\n\nThe required cloud services are no longer available since the Plum servers have been shut down. To resolve this issue, please remove the (now defunct) integration entries from your Home Assistant setup. [Click here to see your existing Plum Lightpad integration entries]({entries})."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
"""Reusable utilities for the Plum Lightpad component."""
|
||||
|
||||
from plumlightpad import Plum
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
|
||||
async def load_plum(username: str, password: str, hass: HomeAssistant) -> Plum:
|
||||
"""Initialize Plum Lightpad API and load metadata stored in the cloud."""
|
||||
plum = Plum(username, password)
|
||||
cloud_web_session = async_get_clientsession(hass, verify_ssl=True)
|
||||
await plum.loadCloudData(cloud_web_session)
|
||||
return plum
|
||||
@@ -18,7 +18,7 @@ from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
from .coordinator import PortainerCoordinator
|
||||
|
||||
_PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR]
|
||||
_PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SWITCH]
|
||||
|
||||
type PortainerConfigEntry = ConfigEntry[PortainerCoordinator]
|
||||
|
||||
|
||||
@@ -131,15 +131,7 @@ class PortainerContainerSensor(PortainerContainerEntity, BinarySensorEntity):
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator, via_device)
|
||||
|
||||
# Container ID's are ephemeral, so use the container name for the unique ID
|
||||
# The first one, should always be unique, it's fine if users have aliases
|
||||
# According to Docker's API docs, the first name is unique
|
||||
device_identifier = (
|
||||
self._device_info.names[0].replace("/", " ").strip()
|
||||
if self._device_info.names
|
||||
else None
|
||||
)
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_identifier}_{entity_description.key}"
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_name}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
|
||||
@@ -57,25 +57,25 @@ class PortainerContainerEntity(PortainerCoordinatorEntity):
|
||||
self.device_id = self._device_info.id
|
||||
self.endpoint_id = via_device.endpoint.id
|
||||
|
||||
device_name = (
|
||||
self._device_info.names[0].replace("/", " ").strip()
|
||||
if self._device_info.names
|
||||
else None
|
||||
)
|
||||
# Container ID's are ephemeral, so use the container name for the unique ID
|
||||
# The first one, should always be unique, it's fine if users have aliases
|
||||
# According to Docker's API docs, the first name is unique
|
||||
assert self._device_info.names, "Container names list unexpectedly empty"
|
||||
self.device_name = self._device_info.names[0].replace("/", " ").strip()
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={
|
||||
(DOMAIN, f"{self.coordinator.config_entry.entry_id}_{device_name}")
|
||||
(DOMAIN, f"{self.coordinator.config_entry.entry_id}_{self.device_name}")
|
||||
},
|
||||
manufacturer=DEFAULT_NAME,
|
||||
configuration_url=URL(
|
||||
f"{coordinator.config_entry.data[CONF_URL]}#!/{self.endpoint_id}/docker/containers/{self.device_id}"
|
||||
),
|
||||
model="Container",
|
||||
name=device_name,
|
||||
name=self.device_name,
|
||||
via_device=(
|
||||
DOMAIN,
|
||||
f"{self.coordinator.config_entry.entry_id}_{self.endpoint_id}",
|
||||
),
|
||||
translation_key=None if device_name else "unknown_container",
|
||||
translation_key=None if self.device_name else "unknown_container",
|
||||
)
|
||||
|
||||
12
homeassistant/components/portainer/icons.json
Normal file
12
homeassistant/components/portainer/icons.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"entity": {
|
||||
"switch": {
|
||||
"container": {
|
||||
"default": "mdi:arrow-down-box",
|
||||
"state": {
|
||||
"on": "mdi:arrow-up-box"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -45,6 +45,11 @@
|
||||
"status": {
|
||||
"name": "Status"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"container": {
|
||||
"name": "Container"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
|
||||
141
homeassistant/components/portainer/switch.py
Normal file
141
homeassistant/components/portainer/switch.py
Normal file
@@ -0,0 +1,141 @@
|
||||
"""Switch platform for Portainer containers."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from pyportainer import Portainer
|
||||
from pyportainer.exceptions import (
|
||||
PortainerAuthenticationError,
|
||||
PortainerConnectionError,
|
||||
PortainerTimeoutError,
|
||||
)
|
||||
from pyportainer.models.docker import DockerContainer
|
||||
|
||||
from homeassistant.components.switch import (
|
||||
SwitchDeviceClass,
|
||||
SwitchEntity,
|
||||
SwitchEntityDescription,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import PortainerConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .coordinator import PortainerCoordinator
|
||||
from .entity import PortainerContainerEntity, PortainerCoordinatorData
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class PortainerSwitchEntityDescription(SwitchEntityDescription):
|
||||
"""Class to hold Portainer switch description."""
|
||||
|
||||
is_on_fn: Callable[[DockerContainer], bool | None]
|
||||
turn_on_fn: Callable[[str, Portainer, int, str], Coroutine[Any, Any, None]]
|
||||
turn_off_fn: Callable[[str, Portainer, int, str], Coroutine[Any, Any, None]]
|
||||
|
||||
|
||||
async def perform_action(
|
||||
action: str, portainer: Portainer, endpoint_id: int, container_id: str
|
||||
) -> None:
|
||||
"""Stop a container."""
|
||||
try:
|
||||
if action == "start":
|
||||
await portainer.start_container(endpoint_id, container_id)
|
||||
elif action == "stop":
|
||||
await portainer.stop_container(endpoint_id, container_id)
|
||||
except PortainerAuthenticationError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
except PortainerConnectionError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
except PortainerTimeoutError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="timeout_connect",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
|
||||
|
||||
SWITCHES: tuple[PortainerSwitchEntityDescription, ...] = (
|
||||
PortainerSwitchEntityDescription(
|
||||
key="container",
|
||||
translation_key="container",
|
||||
device_class=SwitchDeviceClass.SWITCH,
|
||||
is_on_fn=lambda data: data.state == "running",
|
||||
turn_on_fn=perform_action,
|
||||
turn_off_fn=perform_action,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: PortainerConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Portainer switch sensors."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
PortainerContainerSwitch(
|
||||
coordinator=coordinator,
|
||||
entity_description=entity_description,
|
||||
device_info=container,
|
||||
via_device=endpoint,
|
||||
)
|
||||
for endpoint in coordinator.data.values()
|
||||
for container in endpoint.containers.values()
|
||||
for entity_description in SWITCHES
|
||||
)
|
||||
|
||||
|
||||
class PortainerContainerSwitch(PortainerContainerEntity, SwitchEntity):
|
||||
"""Representation of a Portainer container switch."""
|
||||
|
||||
entity_description: PortainerSwitchEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: PortainerSwitchEntityDescription,
|
||||
device_info: DockerContainer,
|
||||
via_device: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize the Portainer container switch."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator, via_device)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_name}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return the state of the device."""
|
||||
return self.entity_description.is_on_fn(
|
||||
self.coordinator.data[self.endpoint_id].containers[self.device_id]
|
||||
)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Start (turn on) the container."""
|
||||
await self.entity_description.turn_on_fn(
|
||||
"start", self.coordinator.portainer, self.endpoint_id, self.device_id
|
||||
)
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Stop (turn off) the container."""
|
||||
await self.entity_description.turn_off_fn(
|
||||
"stop", self.coordinator.portainer, self.endpoint_id, self.device_id
|
||||
)
|
||||
await self.coordinator.async_request_refresh()
|
||||
@@ -6,5 +6,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/private_ble_device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bluetooth-data-tools==1.28.2"]
|
||||
"requirements": ["bluetooth-data-tools==1.28.3"]
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"quality_scale": "internal",
|
||||
"requirements": [
|
||||
"SQLAlchemy==2.0.41",
|
||||
"fnv-hash-fast==1.5.0",
|
||||
"fnv-hash-fast==1.6.0",
|
||||
"psutil-home-assistant==0.0.1"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -82,7 +82,7 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
assert self._client
|
||||
errors: dict[str, str] = {}
|
||||
try:
|
||||
await self._client.request_code()
|
||||
await self._client.request_code_v4()
|
||||
except RoborockAccountDoesNotExist:
|
||||
errors["base"] = "invalid_email"
|
||||
except RoborockUrlException:
|
||||
@@ -111,7 +111,7 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
code = user_input[CONF_ENTRY_CODE]
|
||||
_LOGGER.debug("Logging into Roborock account using email provided code")
|
||||
try:
|
||||
user_data = await self._client.code_login(code)
|
||||
user_data = await self._client.code_login_v4(code)
|
||||
except RoborockInvalidCode:
|
||||
errors["base"] = "invalid_code"
|
||||
except RoborockException:
|
||||
@@ -129,7 +129,7 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
reauth_entry, data_updates={CONF_USER_DATA: user_data.as_dict()}
|
||||
)
|
||||
self._abort_if_unique_id_configured(error="already_configured_account")
|
||||
return self._create_entry(self._client, self._username, user_data)
|
||||
return await self._create_entry(self._client, self._username, user_data)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="code",
|
||||
@@ -176,7 +176,7 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
return await self.async_step_code()
|
||||
return self.async_show_form(step_id="reauth_confirm", errors=errors)
|
||||
|
||||
def _create_entry(
|
||||
async def _create_entry(
|
||||
self, client: RoborockApiClient, username: str, user_data: UserData
|
||||
) -> ConfigFlowResult:
|
||||
"""Finished config flow and create entry."""
|
||||
@@ -185,7 +185,7 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
data={
|
||||
CONF_USERNAME: username,
|
||||
CONF_USER_DATA: user_data.as_dict(),
|
||||
CONF_BASE_URL: client.base_url,
|
||||
CONF_BASE_URL: await client.base_url,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -52,6 +52,12 @@
|
||||
"total_cleaning_time": {
|
||||
"default": "mdi:history"
|
||||
},
|
||||
"cleaning_brush_time_left": {
|
||||
"default": "mdi:brush"
|
||||
},
|
||||
"strainer_time_left": {
|
||||
"default": "mdi:filter-variant"
|
||||
},
|
||||
"status": {
|
||||
"default": "mdi:information-outline"
|
||||
},
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
"loggers": ["roborock"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": [
|
||||
"python-roborock==2.49.1",
|
||||
"python-roborock==2.50.2",
|
||||
"vacuum-map-parser-roborock==0.1.4"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -101,6 +101,24 @@ SENSOR_DESCRIPTIONS = [
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
protocol_listener=RoborockDataProtocol.FILTER_WORK_TIME,
|
||||
),
|
||||
RoborockSensorDescription(
|
||||
native_unit_of_measurement=UnitOfTime.HOURS,
|
||||
key="cleaning_brush_time_left",
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
translation_key="cleaning_brush_time_left",
|
||||
value_fn=lambda data: data.consumable.cleaning_brush_time_left,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
is_dock_entity=True,
|
||||
),
|
||||
RoborockSensorDescription(
|
||||
native_unit_of_measurement=UnitOfTime.HOURS,
|
||||
key="strainer_time_left",
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
translation_key="strainer_time_left",
|
||||
value_fn=lambda data: data.consumable.strainer_time_left,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
is_dock_entity=True,
|
||||
),
|
||||
RoborockSensorDescription(
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
key="sensor_time_left",
|
||||
|
||||
@@ -220,6 +220,12 @@
|
||||
"sensor_time_left": {
|
||||
"name": "Sensor time left"
|
||||
},
|
||||
"cleaning_brush_time_left": {
|
||||
"name": "Maintenance brush time left"
|
||||
},
|
||||
"strainer_time_left": {
|
||||
"name": "Strainer time left"
|
||||
},
|
||||
"status": {
|
||||
"name": "Status",
|
||||
"state": {
|
||||
|
||||
@@ -5,10 +5,6 @@ from typing import Any
|
||||
from roborock.code_mappings import RoborockStateCode
|
||||
from roborock.roborock_message import RoborockDataProtocol
|
||||
from roborock.roborock_typing import RoborockCommand
|
||||
from vacuum_map_parser_base.config.color import ColorsPalette
|
||||
from vacuum_map_parser_base.config.image_config import ImageConfig
|
||||
from vacuum_map_parser_base.config.size import Sizes
|
||||
from vacuum_map_parser_roborock.map_data_parser import RoborockMapDataParser
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.vacuum import (
|
||||
@@ -223,8 +219,7 @@ class RoborockVacuum(RoborockCoordinatedEntityV1, StateVacuumEntity):
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="map_failure",
|
||||
)
|
||||
parser = RoborockMapDataParser(ColorsPalette(), Sizes(), [], ImageConfig(), [])
|
||||
parsed_map = parser.parse(map_data)
|
||||
parsed_map = self.coordinator.map_parser.parse(map_data)
|
||||
robot_position = parsed_map.vacuum_position
|
||||
|
||||
if robot_position is None:
|
||||
|
||||
@@ -8,15 +8,26 @@ from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.device_registry import DeviceEntry
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN, LOGGER, PLATFORMS
|
||||
from .coordinator import SensiboDataUpdateCoordinator
|
||||
from .services import async_setup_services
|
||||
from .util import NoDevicesError, NoUsernameError, async_validate_api
|
||||
|
||||
type SensiboConfigEntry = ConfigEntry[SensiboDataUpdateCoordinator]
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Sensibo component."""
|
||||
async_setup_services(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: SensiboConfigEntry) -> bool:
|
||||
"""Set up Sensibo from a config entry."""
|
||||
|
||||
@@ -5,26 +5,14 @@ from __future__ import annotations
|
||||
from bisect import bisect_left
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
ATTR_FAN_MODE,
|
||||
ATTR_HVAC_MODE,
|
||||
ATTR_SWING_MODE,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
ATTR_MODE,
|
||||
ATTR_STATE,
|
||||
ATTR_TEMPERATURE,
|
||||
PRECISION_TENTHS,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, SupportsResponse
|
||||
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_TENTHS, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.unit_conversion import TemperatureConverter
|
||||
|
||||
@@ -33,30 +21,6 @@ from .const import DOMAIN
|
||||
from .coordinator import SensiboDataUpdateCoordinator
|
||||
from .entity import SensiboDeviceBaseEntity, async_handle_api_call
|
||||
|
||||
SERVICE_ASSUME_STATE = "assume_state"
|
||||
SERVICE_ENABLE_TIMER = "enable_timer"
|
||||
ATTR_MINUTES = "minutes"
|
||||
SERVICE_ENABLE_PURE_BOOST = "enable_pure_boost"
|
||||
SERVICE_DISABLE_PURE_BOOST = "disable_pure_boost"
|
||||
SERVICE_FULL_STATE = "full_state"
|
||||
SERVICE_ENABLE_CLIMATE_REACT = "enable_climate_react"
|
||||
SERVICE_GET_DEVICE_CAPABILITIES = "get_device_capabilities"
|
||||
ATTR_HIGH_TEMPERATURE_THRESHOLD = "high_temperature_threshold"
|
||||
ATTR_HIGH_TEMPERATURE_STATE = "high_temperature_state"
|
||||
ATTR_LOW_TEMPERATURE_THRESHOLD = "low_temperature_threshold"
|
||||
ATTR_LOW_TEMPERATURE_STATE = "low_temperature_state"
|
||||
ATTR_SMART_TYPE = "smart_type"
|
||||
|
||||
ATTR_AC_INTEGRATION = "ac_integration"
|
||||
ATTR_GEO_INTEGRATION = "geo_integration"
|
||||
ATTR_INDOOR_INTEGRATION = "indoor_integration"
|
||||
ATTR_OUTDOOR_INTEGRATION = "outdoor_integration"
|
||||
ATTR_SENSITIVITY = "sensitivity"
|
||||
ATTR_TARGET_TEMPERATURE = "target_temperature"
|
||||
ATTR_HORIZONTAL_SWING_MODE = "horizontal_swing_mode"
|
||||
ATTR_LIGHT = "light"
|
||||
BOOST_INCLUSIVE = "boost_inclusive"
|
||||
|
||||
AVAILABLE_FAN_MODES = {
|
||||
"quiet",
|
||||
"low",
|
||||
@@ -162,66 +126,6 @@ async def async_setup_entry(
|
||||
entry.async_on_unload(coordinator.async_add_listener(_add_remove_devices))
|
||||
_add_remove_devices()
|
||||
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_ASSUME_STATE,
|
||||
{
|
||||
vol.Required(ATTR_STATE): vol.In(["on", "off"]),
|
||||
},
|
||||
"async_assume_state",
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_ENABLE_TIMER,
|
||||
{
|
||||
vol.Required(ATTR_MINUTES): cv.positive_int,
|
||||
},
|
||||
"async_enable_timer",
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_ENABLE_PURE_BOOST,
|
||||
{
|
||||
vol.Required(ATTR_AC_INTEGRATION): bool,
|
||||
vol.Required(ATTR_GEO_INTEGRATION): bool,
|
||||
vol.Required(ATTR_INDOOR_INTEGRATION): bool,
|
||||
vol.Required(ATTR_OUTDOOR_INTEGRATION): bool,
|
||||
vol.Required(ATTR_SENSITIVITY): vol.In(["normal", "sensitive"]),
|
||||
},
|
||||
"async_enable_pure_boost",
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_FULL_STATE,
|
||||
{
|
||||
vol.Required(ATTR_MODE): vol.In(
|
||||
["cool", "heat", "fan", "auto", "dry", "off"]
|
||||
),
|
||||
vol.Optional(ATTR_TARGET_TEMPERATURE): int,
|
||||
vol.Optional(ATTR_FAN_MODE): str,
|
||||
vol.Optional(ATTR_SWING_MODE): str,
|
||||
vol.Optional(ATTR_HORIZONTAL_SWING_MODE): str,
|
||||
vol.Optional(ATTR_LIGHT): vol.In(["on", "off", "dim"]),
|
||||
},
|
||||
"async_full_ac_state",
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_ENABLE_CLIMATE_REACT,
|
||||
{
|
||||
vol.Required(ATTR_HIGH_TEMPERATURE_THRESHOLD): vol.Coerce(float),
|
||||
vol.Required(ATTR_HIGH_TEMPERATURE_STATE): dict,
|
||||
vol.Required(ATTR_LOW_TEMPERATURE_THRESHOLD): vol.Coerce(float),
|
||||
vol.Required(ATTR_LOW_TEMPERATURE_STATE): dict,
|
||||
vol.Required(ATTR_SMART_TYPE): vol.In(
|
||||
["temperature", "feelslike", "humidity"]
|
||||
),
|
||||
},
|
||||
"async_enable_climate_react",
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_GET_DEVICE_CAPABILITIES,
|
||||
{vol.Required(ATTR_HVAC_MODE): vol.Coerce(HVACMode)},
|
||||
"async_get_device_capabilities",
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
|
||||
class SensiboClimate(SensiboDeviceBaseEntity, ClimateEntity):
|
||||
"""Representation of a Sensibo climate device."""
|
||||
|
||||
124
homeassistant/components/sensibo/services.py
Normal file
124
homeassistant/components/sensibo/services.py
Normal file
@@ -0,0 +1,124 @@
|
||||
"""Sensibo services."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
ATTR_FAN_MODE,
|
||||
ATTR_HVAC_MODE,
|
||||
ATTR_SWING_MODE,
|
||||
DOMAIN as CLIMATE_DOMAIN,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import ATTR_MODE, ATTR_STATE
|
||||
from homeassistant.core import HomeAssistant, SupportsResponse, callback
|
||||
from homeassistant.helpers import config_validation as cv, service
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
SERVICE_ASSUME_STATE = "assume_state"
|
||||
SERVICE_ENABLE_TIMER = "enable_timer"
|
||||
ATTR_MINUTES = "minutes"
|
||||
SERVICE_ENABLE_PURE_BOOST = "enable_pure_boost"
|
||||
SERVICE_DISABLE_PURE_BOOST = "disable_pure_boost"
|
||||
SERVICE_FULL_STATE = "full_state"
|
||||
SERVICE_ENABLE_CLIMATE_REACT = "enable_climate_react"
|
||||
SERVICE_GET_DEVICE_CAPABILITIES = "get_device_capabilities"
|
||||
ATTR_HIGH_TEMPERATURE_THRESHOLD = "high_temperature_threshold"
|
||||
ATTR_HIGH_TEMPERATURE_STATE = "high_temperature_state"
|
||||
ATTR_LOW_TEMPERATURE_THRESHOLD = "low_temperature_threshold"
|
||||
ATTR_LOW_TEMPERATURE_STATE = "low_temperature_state"
|
||||
ATTR_SMART_TYPE = "smart_type"
|
||||
|
||||
ATTR_AC_INTEGRATION = "ac_integration"
|
||||
ATTR_GEO_INTEGRATION = "geo_integration"
|
||||
ATTR_INDOOR_INTEGRATION = "indoor_integration"
|
||||
ATTR_OUTDOOR_INTEGRATION = "outdoor_integration"
|
||||
ATTR_SENSITIVITY = "sensitivity"
|
||||
ATTR_TARGET_TEMPERATURE = "target_temperature"
|
||||
ATTR_HORIZONTAL_SWING_MODE = "horizontal_swing_mode"
|
||||
ATTR_LIGHT = "light"
|
||||
BOOST_INCLUSIVE = "boost_inclusive"
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register Sensibo services."""
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_ASSUME_STATE,
|
||||
entity_domain=CLIMATE_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_STATE): vol.In(["on", "off"]),
|
||||
},
|
||||
func="async_assume_state",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_ENABLE_TIMER,
|
||||
entity_domain=CLIMATE_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_MINUTES): cv.positive_int,
|
||||
},
|
||||
func="async_enable_timer",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_ENABLE_PURE_BOOST,
|
||||
entity_domain=CLIMATE_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_AC_INTEGRATION): bool,
|
||||
vol.Required(ATTR_GEO_INTEGRATION): bool,
|
||||
vol.Required(ATTR_INDOOR_INTEGRATION): bool,
|
||||
vol.Required(ATTR_OUTDOOR_INTEGRATION): bool,
|
||||
vol.Required(ATTR_SENSITIVITY): vol.In(["normal", "sensitive"]),
|
||||
},
|
||||
func="async_enable_pure_boost",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_FULL_STATE,
|
||||
entity_domain=CLIMATE_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_MODE): vol.In(
|
||||
["cool", "heat", "fan", "auto", "dry", "off"]
|
||||
),
|
||||
vol.Optional(ATTR_TARGET_TEMPERATURE): int,
|
||||
vol.Optional(ATTR_FAN_MODE): str,
|
||||
vol.Optional(ATTR_SWING_MODE): str,
|
||||
vol.Optional(ATTR_HORIZONTAL_SWING_MODE): str,
|
||||
vol.Optional(ATTR_LIGHT): vol.In(["on", "off", "dim"]),
|
||||
},
|
||||
func="async_full_ac_state",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_ENABLE_CLIMATE_REACT,
|
||||
entity_domain=CLIMATE_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_HIGH_TEMPERATURE_THRESHOLD): vol.Coerce(float),
|
||||
vol.Required(ATTR_HIGH_TEMPERATURE_STATE): dict,
|
||||
vol.Required(ATTR_LOW_TEMPERATURE_THRESHOLD): vol.Coerce(float),
|
||||
vol.Required(ATTR_LOW_TEMPERATURE_STATE): dict,
|
||||
vol.Required(ATTR_SMART_TYPE): vol.In(
|
||||
["temperature", "feelslike", "humidity"]
|
||||
),
|
||||
},
|
||||
func="async_enable_climate_react",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_GET_DEVICE_CAPABILITIES,
|
||||
entity_domain=CLIMATE_DOMAIN,
|
||||
schema={vol.Required(ATTR_HVAC_MODE): vol.Coerce(HVACMode)},
|
||||
func="async_get_device_capabilities",
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
@@ -30,6 +30,7 @@ from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
)
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.helpers.template import Template
|
||||
from homeassistant.helpers.trigger_template_entity import (
|
||||
CONF_AVAILABILITY,
|
||||
@@ -69,6 +70,15 @@ async def async_setup_platform(
|
||||
) -> None:
|
||||
"""Set up the SQL sensor from yaml."""
|
||||
if (conf := discovery_info) is None:
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"sensor_platform_yaml_not_supported",
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="platform_yaml_not_supported",
|
||||
learn_more_url="https://www.home-assistant.io/integrations/sql/",
|
||||
)
|
||||
return
|
||||
|
||||
name: Template = conf[CONF_NAME]
|
||||
|
||||
@@ -166,6 +166,10 @@
|
||||
"entity_id_query_does_full_table_scan": {
|
||||
"title": "SQL query does full table scan",
|
||||
"description": "The query `{query}` contains the keyword `entity_id` but does not reference the `states_meta` table. This will cause a full table scan and database instability. Please check the documentation and use `states_meta.entity_id` instead."
|
||||
},
|
||||
"platform_yaml_not_supported": {
|
||||
"title": "Platform YAML is not supported in SQL",
|
||||
"description": "Platform YAML setup is not supported.\nChange from configuring it in the `sensor:` key to using the `sql:` key directly in configuration.yaml.\nTo see the detailed documentation, select Learn more."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["libpyvivotek"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["libpyvivotek==0.4.0"]
|
||||
"requirements": ["libpyvivotek==0.6.1"]
|
||||
}
|
||||
|
||||
@@ -1,100 +0,0 @@
|
||||
"""Support for Vultr."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
from vultr import Vultr as VultrAPI
|
||||
|
||||
from homeassistant.components import persistent_notification
|
||||
from homeassistant.const import CONF_API_KEY, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_AUTO_BACKUPS = "auto_backups"
|
||||
ATTR_ALLOWED_BANDWIDTH = "allowed_bandwidth_gb"
|
||||
ATTR_COST_PER_MONTH = "cost_per_month"
|
||||
ATTR_CURRENT_BANDWIDTH_USED = "current_bandwidth_gb"
|
||||
ATTR_CREATED_AT = "created_at"
|
||||
ATTR_DISK = "disk"
|
||||
ATTR_SUBSCRIPTION_ID = "subid"
|
||||
ATTR_SUBSCRIPTION_NAME = "label"
|
||||
ATTR_IPV4_ADDRESS = "ipv4_address"
|
||||
ATTR_IPV6_ADDRESS = "ipv6_address"
|
||||
ATTR_MEMORY = "memory"
|
||||
ATTR_OS = "os"
|
||||
ATTR_PENDING_CHARGES = "pending_charges"
|
||||
ATTR_REGION = "region"
|
||||
ATTR_VCPUS = "vcpus"
|
||||
|
||||
CONF_SUBSCRIPTION = "subscription"
|
||||
|
||||
DATA_VULTR = "data_vultr"
|
||||
DOMAIN = "vultr"
|
||||
|
||||
NOTIFICATION_ID = "vultr_notification"
|
||||
NOTIFICATION_TITLE = "Vultr Setup"
|
||||
|
||||
VULTR_PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.SWITCH]
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{DOMAIN: vol.Schema({vol.Required(CONF_API_KEY): cv.string})}, extra=vol.ALLOW_EXTRA
|
||||
)
|
||||
|
||||
|
||||
def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Vultr component."""
|
||||
api_key = config[DOMAIN].get(CONF_API_KEY)
|
||||
|
||||
vultr = Vultr(api_key)
|
||||
|
||||
try:
|
||||
vultr.update()
|
||||
except RuntimeError as ex:
|
||||
_LOGGER.error("Failed to make update API request because: %s", ex)
|
||||
persistent_notification.create(
|
||||
hass,
|
||||
f"Error: {ex}",
|
||||
title=NOTIFICATION_TITLE,
|
||||
notification_id=NOTIFICATION_ID,
|
||||
)
|
||||
return False
|
||||
|
||||
hass.data[DATA_VULTR] = vultr
|
||||
return True
|
||||
|
||||
|
||||
class Vultr:
|
||||
"""Handle all communication with the Vultr API."""
|
||||
|
||||
def __init__(self, api_key):
|
||||
"""Initialize the Vultr connection."""
|
||||
|
||||
self._api_key = api_key
|
||||
self.data = None
|
||||
self.api = VultrAPI(self._api_key)
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def update(self):
|
||||
"""Use the data from Vultr API."""
|
||||
self.data = self.api.server_list()
|
||||
|
||||
def _force_update(self):
|
||||
"""Use the data from Vultr API."""
|
||||
self.data = self.api.server_list()
|
||||
|
||||
def halt(self, subscription):
|
||||
"""Halt a subscription (hard power off)."""
|
||||
self.api.server_halt(subscription)
|
||||
self._force_update()
|
||||
|
||||
def start(self, subscription):
|
||||
"""Start a subscription."""
|
||||
self.api.server_start(subscription)
|
||||
self._force_update()
|
||||
@@ -1,121 +0,0 @@
|
||||
"""Support for monitoring the state of Vultr subscriptions (VPS)."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
PLATFORM_SCHEMA as BINARY_SENSOR_PLATFORM_SCHEMA,
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
)
|
||||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import (
|
||||
ATTR_ALLOWED_BANDWIDTH,
|
||||
ATTR_AUTO_BACKUPS,
|
||||
ATTR_COST_PER_MONTH,
|
||||
ATTR_CREATED_AT,
|
||||
ATTR_DISK,
|
||||
ATTR_IPV4_ADDRESS,
|
||||
ATTR_IPV6_ADDRESS,
|
||||
ATTR_MEMORY,
|
||||
ATTR_OS,
|
||||
ATTR_REGION,
|
||||
ATTR_SUBSCRIPTION_ID,
|
||||
ATTR_SUBSCRIPTION_NAME,
|
||||
ATTR_VCPUS,
|
||||
CONF_SUBSCRIPTION,
|
||||
DATA_VULTR,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_NAME = "Vultr {}"
|
||||
PLATFORM_SCHEMA = BINARY_SENSOR_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_SUBSCRIPTION): cv.string,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the Vultr subscription (server) binary sensor."""
|
||||
vultr = hass.data[DATA_VULTR]
|
||||
|
||||
subscription = config.get(CONF_SUBSCRIPTION)
|
||||
name = config.get(CONF_NAME)
|
||||
|
||||
if subscription not in vultr.data:
|
||||
_LOGGER.error("Subscription %s not found", subscription)
|
||||
return
|
||||
|
||||
add_entities([VultrBinarySensor(vultr, subscription, name)], True)
|
||||
|
||||
|
||||
class VultrBinarySensor(BinarySensorEntity):
|
||||
"""Representation of a Vultr subscription sensor."""
|
||||
|
||||
_attr_device_class = BinarySensorDeviceClass.POWER
|
||||
|
||||
def __init__(self, vultr, subscription, name):
|
||||
"""Initialize a new Vultr binary sensor."""
|
||||
self._vultr = vultr
|
||||
self._name = name
|
||||
|
||||
self.subscription = subscription
|
||||
self.data = None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the sensor."""
|
||||
try:
|
||||
return self._name.format(self.data["label"])
|
||||
except (KeyError, TypeError):
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
"""Return the icon of this server."""
|
||||
return "mdi:server" if self.is_on else "mdi:server-off"
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return true if the binary sensor is on."""
|
||||
return self.data["power_status"] == "running"
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
"""Return the state attributes of the Vultr subscription."""
|
||||
return {
|
||||
ATTR_ALLOWED_BANDWIDTH: self.data.get("allowed_bandwidth_gb"),
|
||||
ATTR_AUTO_BACKUPS: self.data.get("auto_backups"),
|
||||
ATTR_COST_PER_MONTH: self.data.get("cost_per_month"),
|
||||
ATTR_CREATED_AT: self.data.get("date_created"),
|
||||
ATTR_DISK: self.data.get("disk"),
|
||||
ATTR_IPV4_ADDRESS: self.data.get("main_ip"),
|
||||
ATTR_IPV6_ADDRESS: self.data.get("v6_main_ip"),
|
||||
ATTR_MEMORY: self.data.get("ram"),
|
||||
ATTR_OS: self.data.get("os"),
|
||||
ATTR_REGION: self.data.get("location"),
|
||||
ATTR_SUBSCRIPTION_ID: self.data.get("SUBID"),
|
||||
ATTR_SUBSCRIPTION_NAME: self.data.get("label"),
|
||||
ATTR_VCPUS: self.data.get("vcpu_count"),
|
||||
}
|
||||
|
||||
def update(self) -> None:
|
||||
"""Update state of sensor."""
|
||||
self._vultr.update()
|
||||
self.data = self._vultr.data[self.subscription]
|
||||
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"domain": "vultr",
|
||||
"name": "Vultr",
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/vultr",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["vultr"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["vultr==0.1.2"]
|
||||
}
|
||||
@@ -1,123 +0,0 @@
|
||||
"""Support for monitoring the state of Vultr Subscriptions."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA,
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import CONF_MONITORED_CONDITIONS, CONF_NAME, UnitOfInformation
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import (
|
||||
ATTR_CURRENT_BANDWIDTH_USED,
|
||||
ATTR_PENDING_CHARGES,
|
||||
CONF_SUBSCRIPTION,
|
||||
DATA_VULTR,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_NAME = "Vultr {} {}"
|
||||
SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
||||
SensorEntityDescription(
|
||||
key=ATTR_CURRENT_BANDWIDTH_USED,
|
||||
name="Current Bandwidth Used",
|
||||
native_unit_of_measurement=UnitOfInformation.GIGABYTES,
|
||||
device_class=SensorDeviceClass.DATA_SIZE,
|
||||
icon="mdi:chart-histogram",
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=ATTR_PENDING_CHARGES,
|
||||
name="Pending Charges",
|
||||
native_unit_of_measurement="US$",
|
||||
icon="mdi:currency-usd",
|
||||
),
|
||||
)
|
||||
SENSOR_KEYS: list[str] = [desc.key for desc in SENSOR_TYPES]
|
||||
|
||||
PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_SUBSCRIPTION): cv.string,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_MONITORED_CONDITIONS, default=SENSOR_KEYS): vol.All(
|
||||
cv.ensure_list, [vol.In(SENSOR_KEYS)]
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the Vultr subscription (server) sensor."""
|
||||
vultr = hass.data[DATA_VULTR]
|
||||
|
||||
subscription = config[CONF_SUBSCRIPTION]
|
||||
name = config[CONF_NAME]
|
||||
monitored_conditions = config[CONF_MONITORED_CONDITIONS]
|
||||
|
||||
if subscription not in vultr.data:
|
||||
_LOGGER.error("Subscription %s not found", subscription)
|
||||
return
|
||||
|
||||
entities = [
|
||||
VultrSensor(vultr, subscription, name, description)
|
||||
for description in SENSOR_TYPES
|
||||
if description.key in monitored_conditions
|
||||
]
|
||||
|
||||
add_entities(entities, True)
|
||||
|
||||
|
||||
class VultrSensor(SensorEntity):
|
||||
"""Representation of a Vultr subscription sensor."""
|
||||
|
||||
def __init__(
|
||||
self, vultr, subscription, name, description: SensorEntityDescription
|
||||
) -> None:
|
||||
"""Initialize a new Vultr sensor."""
|
||||
self.entity_description = description
|
||||
self._vultr = vultr
|
||||
self._name = name
|
||||
|
||||
self.subscription = subscription
|
||||
self.data = None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the sensor."""
|
||||
try:
|
||||
return self._name.format(self.entity_description.name)
|
||||
except IndexError:
|
||||
try:
|
||||
return self._name.format(
|
||||
self.data["label"], self.entity_description.name
|
||||
)
|
||||
except (KeyError, TypeError):
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
"""Return the value of this given sensor type."""
|
||||
try:
|
||||
return round(float(self.data.get(self.entity_description.key)), 2)
|
||||
except (TypeError, ValueError):
|
||||
return self.data.get(self.entity_description.key)
|
||||
|
||||
def update(self) -> None:
|
||||
"""Update state of sensor."""
|
||||
self._vultr.update()
|
||||
self.data = self._vultr.data[self.subscription]
|
||||
@@ -1,129 +0,0 @@
|
||||
"""Support for interacting with Vultr subscriptions."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.switch import (
|
||||
PLATFORM_SCHEMA as SWITCH_PLATFORM_SCHEMA,
|
||||
SwitchEntity,
|
||||
)
|
||||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import (
|
||||
ATTR_ALLOWED_BANDWIDTH,
|
||||
ATTR_AUTO_BACKUPS,
|
||||
ATTR_COST_PER_MONTH,
|
||||
ATTR_CREATED_AT,
|
||||
ATTR_DISK,
|
||||
ATTR_IPV4_ADDRESS,
|
||||
ATTR_IPV6_ADDRESS,
|
||||
ATTR_MEMORY,
|
||||
ATTR_OS,
|
||||
ATTR_REGION,
|
||||
ATTR_SUBSCRIPTION_ID,
|
||||
ATTR_SUBSCRIPTION_NAME,
|
||||
ATTR_VCPUS,
|
||||
CONF_SUBSCRIPTION,
|
||||
DATA_VULTR,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_NAME = "Vultr {}"
|
||||
PLATFORM_SCHEMA = SWITCH_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_SUBSCRIPTION): cv.string,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the Vultr subscription switch."""
|
||||
vultr = hass.data[DATA_VULTR]
|
||||
|
||||
subscription = config.get(CONF_SUBSCRIPTION)
|
||||
name = config.get(CONF_NAME)
|
||||
|
||||
if subscription not in vultr.data:
|
||||
_LOGGER.error("Subscription %s not found", subscription)
|
||||
return
|
||||
|
||||
add_entities([VultrSwitch(vultr, subscription, name)], True)
|
||||
|
||||
|
||||
class VultrSwitch(SwitchEntity):
|
||||
"""Representation of a Vultr subscription switch."""
|
||||
|
||||
def __init__(self, vultr, subscription, name):
|
||||
"""Initialize a new Vultr switch."""
|
||||
self._vultr = vultr
|
||||
self._name = name
|
||||
|
||||
self.subscription = subscription
|
||||
self.data = None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the switch."""
|
||||
try:
|
||||
return self._name.format(self.data["label"])
|
||||
except (TypeError, KeyError):
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return true if switch is on."""
|
||||
return self.data["power_status"] == "running"
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
"""Return the icon of this server."""
|
||||
return "mdi:server" if self.is_on else "mdi:server-off"
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
"""Return the state attributes of the Vultr subscription."""
|
||||
return {
|
||||
ATTR_ALLOWED_BANDWIDTH: self.data.get("allowed_bandwidth_gb"),
|
||||
ATTR_AUTO_BACKUPS: self.data.get("auto_backups"),
|
||||
ATTR_COST_PER_MONTH: self.data.get("cost_per_month"),
|
||||
ATTR_CREATED_AT: self.data.get("date_created"),
|
||||
ATTR_DISK: self.data.get("disk"),
|
||||
ATTR_IPV4_ADDRESS: self.data.get("main_ip"),
|
||||
ATTR_IPV6_ADDRESS: self.data.get("v6_main_ip"),
|
||||
ATTR_MEMORY: self.data.get("ram"),
|
||||
ATTR_OS: self.data.get("os"),
|
||||
ATTR_REGION: self.data.get("location"),
|
||||
ATTR_SUBSCRIPTION_ID: self.data.get("SUBID"),
|
||||
ATTR_SUBSCRIPTION_NAME: self.data.get("label"),
|
||||
ATTR_VCPUS: self.data.get("vcpu_count"),
|
||||
}
|
||||
|
||||
def turn_on(self, **kwargs: Any) -> None:
|
||||
"""Boot-up the subscription."""
|
||||
if self.data["power_status"] != "running":
|
||||
self._vultr.start(self.subscription)
|
||||
|
||||
def turn_off(self, **kwargs: Any) -> None:
|
||||
"""Halt the subscription."""
|
||||
if self.data["power_status"] == "running":
|
||||
self._vultr.halt(self.subscription)
|
||||
|
||||
def update(self) -> None:
|
||||
"""Get the latest data from the device and update the data."""
|
||||
self._vultr.update()
|
||||
self.data = self._vultr.data[self.subscription]
|
||||
@@ -3,7 +3,7 @@
|
||||
from enum import StrEnum
|
||||
|
||||
DOMAIN = "wallbox"
|
||||
UPDATE_INTERVAL = 60
|
||||
UPDATE_INTERVAL = 90
|
||||
|
||||
BIDIRECTIONAL_MODEL_PREFIXES = ["QS"]
|
||||
|
||||
|
||||
@@ -209,7 +209,12 @@ class WallboxCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
) from wallbox_connection_error
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Get new sensor data for Wallbox component."""
|
||||
"""Get new sensor data for Wallbox component. Set update interval to be UPDATE_INTERVAL * #wallbox chargers configured, this is necessary due to rate limitations."""
|
||||
|
||||
self.update_interval = timedelta(
|
||||
seconds=UPDATE_INTERVAL
|
||||
* max(len(self.hass.config_entries.async_loaded_entries(DOMAIN)), 1)
|
||||
)
|
||||
return await self.hass.async_add_executor_job(self._get_data)
|
||||
|
||||
@_require_authentication
|
||||
|
||||
@@ -1,227 +0,0 @@
|
||||
"""Support for the IBM Watson IoT Platform."""
|
||||
|
||||
import logging
|
||||
import queue
|
||||
import threading
|
||||
import time
|
||||
|
||||
from ibmiotf import MissingMessageEncoderException
|
||||
from ibmiotf.gateway import Client
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_DOMAINS,
|
||||
CONF_ENTITIES,
|
||||
CONF_EXCLUDE,
|
||||
CONF_ID,
|
||||
CONF_INCLUDE,
|
||||
CONF_TOKEN,
|
||||
CONF_TYPE,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
EVENT_STATE_CHANGED,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, state as state_helper
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_ORG = "organization"
|
||||
|
||||
DOMAIN = "watson_iot"
|
||||
|
||||
MAX_TRIES = 3
|
||||
|
||||
RETRY_DELAY = 20
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ORG): cv.string,
|
||||
vol.Required(CONF_TYPE): cv.string,
|
||||
vol.Required(CONF_ID): cv.string,
|
||||
vol.Required(CONF_TOKEN): cv.string,
|
||||
vol.Optional(CONF_EXCLUDE, default={}): vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_ENTITIES, default=[]): cv.entity_ids,
|
||||
vol.Optional(CONF_DOMAINS, default=[]): vol.All(
|
||||
cv.ensure_list, [cv.string]
|
||||
),
|
||||
}
|
||||
),
|
||||
vol.Optional(CONF_INCLUDE, default={}): vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_ENTITIES, default=[]): cv.entity_ids,
|
||||
vol.Optional(CONF_DOMAINS, default=[]): vol.All(
|
||||
cv.ensure_list, [cv.string]
|
||||
),
|
||||
}
|
||||
),
|
||||
}
|
||||
)
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Watson IoT Platform component."""
|
||||
|
||||
conf = config[DOMAIN]
|
||||
|
||||
include = conf[CONF_INCLUDE]
|
||||
exclude = conf[CONF_EXCLUDE]
|
||||
include_e = set(include[CONF_ENTITIES])
|
||||
include_d = set(include[CONF_DOMAINS])
|
||||
exclude_e = set(exclude[CONF_ENTITIES])
|
||||
exclude_d = set(exclude[CONF_DOMAINS])
|
||||
|
||||
client_args = {
|
||||
"org": conf[CONF_ORG],
|
||||
"type": conf[CONF_TYPE],
|
||||
"id": conf[CONF_ID],
|
||||
"auth-method": "token",
|
||||
"auth-token": conf[CONF_TOKEN],
|
||||
}
|
||||
watson_gateway = Client(client_args)
|
||||
|
||||
def event_to_json(event):
|
||||
"""Add an event to the outgoing list."""
|
||||
state = event.data.get("new_state")
|
||||
if (
|
||||
state is None
|
||||
or state.state in (STATE_UNKNOWN, "", STATE_UNAVAILABLE)
|
||||
or state.entity_id in exclude_e
|
||||
or state.domain in exclude_d
|
||||
):
|
||||
return None
|
||||
|
||||
if (include_e and state.entity_id not in include_e) or (
|
||||
include_d and state.domain not in include_d
|
||||
):
|
||||
return None
|
||||
|
||||
try:
|
||||
_state_as_value = float(state.state)
|
||||
except ValueError:
|
||||
_state_as_value = None
|
||||
|
||||
if _state_as_value is None:
|
||||
try:
|
||||
_state_as_value = float(state_helper.state_as_number(state))
|
||||
except ValueError:
|
||||
_state_as_value = None
|
||||
|
||||
out_event = {
|
||||
"tags": {"domain": state.domain, "entity_id": state.object_id},
|
||||
"time": event.time_fired.isoformat(),
|
||||
"fields": {"state": state.state},
|
||||
}
|
||||
if _state_as_value is not None:
|
||||
out_event["fields"]["state_value"] = _state_as_value
|
||||
|
||||
for key, value in state.attributes.items():
|
||||
if key != "unit_of_measurement":
|
||||
# If the key is already in fields
|
||||
if key in out_event["fields"]:
|
||||
key = f"{key}_"
|
||||
# For each value we try to cast it as float
|
||||
# But if we cannot do it we store the value
|
||||
# as string
|
||||
try:
|
||||
out_event["fields"][key] = float(value)
|
||||
except (ValueError, TypeError):
|
||||
out_event["fields"][key] = str(value)
|
||||
|
||||
return out_event
|
||||
|
||||
instance = hass.data[DOMAIN] = WatsonIOTThread(hass, watson_gateway, event_to_json)
|
||||
instance.start()
|
||||
|
||||
def shutdown(event):
|
||||
"""Shut down the thread."""
|
||||
instance.queue.put(None)
|
||||
instance.join()
|
||||
|
||||
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, shutdown)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class WatsonIOTThread(threading.Thread):
|
||||
"""A threaded event handler class."""
|
||||
|
||||
def __init__(self, hass, gateway, event_to_json):
|
||||
"""Initialize the listener."""
|
||||
threading.Thread.__init__(self, name="WatsonIOT")
|
||||
self.queue = queue.Queue()
|
||||
self.gateway = gateway
|
||||
self.gateway.connect()
|
||||
self.event_to_json = event_to_json
|
||||
self.write_errors = 0
|
||||
self.shutdown = False
|
||||
hass.bus.listen(EVENT_STATE_CHANGED, self._event_listener)
|
||||
|
||||
@callback
|
||||
def _event_listener(self, event):
|
||||
"""Listen for new messages on the bus and queue them for Watson IoT."""
|
||||
item = (time.monotonic(), event)
|
||||
self.queue.put(item)
|
||||
|
||||
def get_events_json(self):
|
||||
"""Return an event formatted for writing."""
|
||||
events = []
|
||||
|
||||
try:
|
||||
if (item := self.queue.get()) is None:
|
||||
self.shutdown = True
|
||||
else:
|
||||
event_json = self.event_to_json(item[1])
|
||||
if event_json:
|
||||
events.append(event_json)
|
||||
|
||||
except queue.Empty:
|
||||
pass
|
||||
|
||||
return events
|
||||
|
||||
def write_to_watson(self, events):
|
||||
"""Write preprocessed events to watson."""
|
||||
|
||||
for event in events:
|
||||
for retry in range(MAX_TRIES + 1):
|
||||
try:
|
||||
for field in event["fields"]:
|
||||
value = event["fields"][field]
|
||||
device_success = self.gateway.publishDeviceEvent(
|
||||
event["tags"]["domain"],
|
||||
event["tags"]["entity_id"],
|
||||
field,
|
||||
"json",
|
||||
value,
|
||||
)
|
||||
if not device_success:
|
||||
_LOGGER.error("Failed to publish message to Watson IoT")
|
||||
continue
|
||||
break
|
||||
except (MissingMessageEncoderException, OSError):
|
||||
if retry < MAX_TRIES:
|
||||
time.sleep(RETRY_DELAY)
|
||||
else:
|
||||
_LOGGER.exception("Failed to publish message to Watson IoT")
|
||||
|
||||
def run(self):
|
||||
"""Process incoming events."""
|
||||
while not self.shutdown:
|
||||
if event := self.get_events_json():
|
||||
self.write_to_watson(event)
|
||||
self.queue.task_done()
|
||||
|
||||
def block_till_done(self):
|
||||
"""Block till all events processed."""
|
||||
self.queue.join()
|
||||
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"domain": "watson_iot",
|
||||
"name": "IBM Watson IoT Platform",
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/watson_iot",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["ibmiotf", "paho_mqtt"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["ibmiotf==0.3.4"]
|
||||
}
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["zeroconf"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["zeroconf==0.147.2"]
|
||||
"requirements": ["zeroconf==1.0.0"]
|
||||
}
|
||||
|
||||
@@ -80,760 +80,3 @@ Both apps are available as Home Assistant add-ons. There are also Docker contain
|
||||
|
||||
[running_zwave_js_server]: docs/running_z_wave_js_server.png "Running Z-Wave JS Server"
|
||||
[//]: # (https://docs.google.com/drawings/d/1YhSVNuss3fa1VFTKQLaACxXg7y6qo742n2oYpdLRs7E/edit?usp=sharing)
|
||||
|
||||
## Config Flow
|
||||
|
||||
This section documents the Z-Wave JS integration config flow, showing how different entry points and steps interact.
|
||||
|
||||
Below is a diagram showing all steps and descriptions of each step. Afterwards, each entry point and step is described in detail.
|
||||
|
||||
```mermaid
|
||||
graph TB
|
||||
user[user] --> installation_type{installation_type<br/>menu}
|
||||
installation_type --> intent_recommended[intent_recommended]
|
||||
installation_type --> intent_custom[intent_custom]
|
||||
|
||||
intent_recommended --> on_supervisor[on_supervisor]
|
||||
intent_custom --> on_supervisor
|
||||
|
||||
on_supervisor --> manual[manual]
|
||||
on_supervisor --> configure_addon_user[configure_addon_user]
|
||||
on_supervisor --> finish_addon_setup_user[finish_addon_setup_user]
|
||||
on_supervisor --> install_addon[install_addon]
|
||||
|
||||
manual --> create_entry((create entry))
|
||||
|
||||
configure_addon_user --> network_type[network_type]
|
||||
network_type --> configure_security_keys[configure_security_keys]
|
||||
network_type --> start_addon[start_addon]
|
||||
configure_security_keys --> start_addon
|
||||
|
||||
start_addon --> rf_region[rf_region]
|
||||
rf_region --> start_addon
|
||||
start_addon --> start_failed[start_failed]
|
||||
start_addon --> finish_addon_setup[finish_addon_setup]
|
||||
|
||||
finish_addon_setup --> finish_addon_setup_user
|
||||
finish_addon_setup_user --> create_entry
|
||||
|
||||
install_addon --> install_failed[install_failed]
|
||||
install_addon --> configure_addon[configure_addon]
|
||||
configure_addon --> configure_addon_user
|
||||
|
||||
zeroconf[zeroconf] --> zeroconf_confirm[zeroconf_confirm]
|
||||
zeroconf_confirm --> manual
|
||||
|
||||
usb[usb] --> confirm_usb_migration[confirm_usb_migration]
|
||||
usb --> installation_type
|
||||
confirm_usb_migration --> intent_migrate[intent_migrate]
|
||||
|
||||
hassio[hassio] --> hassio_confirm[hassio_confirm]
|
||||
hassio_confirm --> on_supervisor
|
||||
|
||||
esphome[esphome] --> installation_type
|
||||
|
||||
reconfigure[reconfigure] --> reconfigure_menu{reconfigure<br/>menu}
|
||||
reconfigure_menu --> intent_reconfigure[intent_reconfigure]
|
||||
reconfigure_menu --> intent_migrate
|
||||
|
||||
intent_reconfigure --> on_supervisor_reconfigure[on_supervisor_reconfigure]
|
||||
intent_reconfigure --> manual_reconfigure[manual_reconfigure]
|
||||
|
||||
on_supervisor_reconfigure --> manual_reconfigure
|
||||
on_supervisor_reconfigure --> install_addon
|
||||
on_supervisor_reconfigure --> configure_addon_reconfigure[configure_addon_reconfigure]
|
||||
|
||||
configure_addon_reconfigure --> start_addon
|
||||
configure_addon_reconfigure --> finish_addon_setup_reconfigure[finish_addon_setup_reconfigure]
|
||||
|
||||
finish_addon_setup --> finish_addon_setup_reconfigure
|
||||
finish_addon_setup_reconfigure --> abort_reconfig((abort<br/>reconfigure_successful))
|
||||
manual_reconfigure --> abort_reconfig
|
||||
|
||||
intent_migrate --> backup_nvm[backup_nvm]
|
||||
backup_nvm --> backup_failed[backup_failed]
|
||||
backup_nvm --> instruct_unplug[instruct_unplug]
|
||||
instruct_unplug --> choose_serial_port[choose_serial_port]
|
||||
instruct_unplug --> start_addon
|
||||
choose_serial_port --> start_addon
|
||||
|
||||
finish_addon_setup --> finish_addon_setup_migrate[finish_addon_setup_migrate]
|
||||
finish_addon_setup_migrate --> restore_nvm[restore_nvm]
|
||||
restore_nvm --> restore_failed[restore_failed]
|
||||
restore_failed --> restore_nvm
|
||||
restore_nvm --> migration_done[migration_done]
|
||||
|
||||
style user fill:#e1f5ff
|
||||
style zeroconf fill:#e1f5ff
|
||||
style usb fill:#e1f5ff
|
||||
style hassio fill:#e1f5ff
|
||||
style esphome fill:#e1f5ff
|
||||
style reconfigure fill:#e1f5ff
|
||||
style create_entry fill:#c8e6c9
|
||||
style abort_reconfig fill:#c8e6c9
|
||||
style install_failed fill:#ffcdd2
|
||||
style start_failed fill:#ffcdd2
|
||||
style backup_failed fill:#ffcdd2
|
||||
style migration_done fill:#c8e6c9
|
||||
```
|
||||
|
||||
### Step Descriptions
|
||||
|
||||
#### Entry Points
|
||||
|
||||
- **`user`**
|
||||
- Entry point when user manually adds the integration through UI
|
||||
- Checks if running on Home Assistant Supervisor (Supervisor OS/Container)
|
||||
- If on Supervisor: shows `installation_type` menu
|
||||
- If not on Supervisor: goes directly to `manual` step
|
||||
|
||||
- **`zeroconf`**
|
||||
- Entry point for Zeroconf/mDNS discovered Z-Wave JS servers
|
||||
- Extracts `homeId` from discovery properties and sets as unique ID
|
||||
- Aborts if already configured with same home ID
|
||||
- Builds WebSocket URL from discovered host:port
|
||||
- Shows `zeroconf_confirm` to user
|
||||
|
||||
- **`usb`**
|
||||
- Entry point for USB-discovered Z-Wave controllers
|
||||
- Only works on Home Assistant Supervisor (aborts with `discovery_requires_supervisor` otherwise)
|
||||
- Allows multiple USB flows in progress (for migration scenarios)
|
||||
- Filters out 2652 Zigbee sticks that share same VID/PID with some Z-Wave sticks
|
||||
- Converts device path to `/dev/serial/by-id/` format for stability
|
||||
- Checks if device is already configured in add-on
|
||||
- Sets temporary unique ID based on USB identifiers
|
||||
- If existing entries found: looks for add-on entry to enable migration
|
||||
- If no existing entries: goes to new setup flow
|
||||
|
||||
- **`hassio`**
|
||||
- Entry point when Z-Wave JS add-on announces itself via Supervisor discovery
|
||||
- Validates this is the official Z-Wave JS add-on (checks slug)
|
||||
- Builds WebSocket URL from discovery config
|
||||
- Gets version info and home ID from server
|
||||
- Sets unique ID to home ID
|
||||
- If already configured: updates URL and aborts
|
||||
- If new: shows `hassio_confirm`
|
||||
|
||||
- **`esphome`**
|
||||
- Entry point for ESPHome devices with Z-Wave over socket support
|
||||
- Only works on Home Assistant Supervisor
|
||||
- Special handling if home ID exists in discovery:
|
||||
- Looks for existing entry with matching home ID
|
||||
- If entry uses socket connection: updates add-on config with new socket path and reloads
|
||||
- Sets unique ID to home ID
|
||||
- Stores socket path from discovery
|
||||
- Sets `_adapter_discovered` flag to skip manual device selection
|
||||
- Goes to `installation_type` menu
|
||||
|
||||
- **`reconfigure`**
|
||||
- Entry point when user reconfigures existing config entry
|
||||
- Stores reference to config entry being reconfigured
|
||||
- Shows menu with two options:
|
||||
- `intent_reconfigure`: Change connection settings
|
||||
- `intent_migrate`: Migrate to different controller hardware
|
||||
|
||||
#### Menu Steps
|
||||
|
||||
- **`installation_type`**
|
||||
- Menu shown on Supervisor when setting up integration
|
||||
- Options:
|
||||
- `intent_recommended`: Guided setup with add-on (auto-configures everything)
|
||||
- `intent_custom`: Advanced setup (choose add-on or manual server)
|
||||
|
||||
#### Intent Steps
|
||||
|
||||
- **`intent_recommended`**
|
||||
- User selected recommended installation
|
||||
- Sets `_recommended_install` flag for automatic configuration
|
||||
- Forces add-on usage: calls `on_supervisor` with `use_addon=True`
|
||||
|
||||
- **`intent_custom`**
|
||||
- User selected custom installation
|
||||
- If adapter was discovered (USB/ESPHome): forces add-on usage
|
||||
- If no adapter discovered: goes to `on_supervisor` to ask user preference
|
||||
|
||||
- **`intent_reconfigure`**
|
||||
- User wants to reconfigure connection settings (not migrate hardware)
|
||||
- Checks if on Supervisor:
|
||||
- Yes: goes to `on_supervisor_reconfigure`
|
||||
- No: goes to `manual_reconfigure`
|
||||
|
||||
- **`intent_migrate`**
|
||||
- User wants to migrate to different Z-Wave controller hardware
|
||||
- Validates requirements:
|
||||
- Adapter must be discovered OR existing entry must use add-on
|
||||
- Config entry must be loaded (needs access to driver)
|
||||
- Controller SDK version must be >= 6.61 (older versions don't support NVM export)
|
||||
- Sets `_migrating` flag
|
||||
- Starts migration: goes to `backup_nvm`
|
||||
|
||||
#### Configuration Steps - Supervisor Add-on Path
|
||||
|
||||
- **`on_supervisor`**
|
||||
- Asks user if they want to use the Z-Wave JS add-on or manual server
|
||||
- If user_input is None: shows form with checkbox for `use_addon` (default: True)
|
||||
- If `use_addon=False`: goes to `manual` step
|
||||
- If `use_addon=True`:
|
||||
- Gets add-on info and checks state
|
||||
- If add-on running: loads config from add-on, goes to `finish_addon_setup_user`
|
||||
- If add-on not running: goes to `configure_addon_user`
|
||||
- If add-on not installed: goes to `install_addon`
|
||||
|
||||
- **`configure_addon_user`**
|
||||
- Collects USB path or ESPHome socket path for add-on
|
||||
- If adapter was discovered: skips asking, uses stored path
|
||||
- If no adapter discovered: shows form with:
|
||||
- Optional USB path dropdown (populated from available USB ports)
|
||||
- Optional socket path text field (for ESPHome or remote sockets)
|
||||
- Goes to `network_type`
|
||||
|
||||
- **`network_type`**
|
||||
- Asks if creating new Z-Wave network or using existing network
|
||||
- If recommended install: automatically selects "new" (generates new keys)
|
||||
- Shows form with options:
|
||||
- `new`: Generate new security keys (blank keys)
|
||||
- `existing`: Import existing network keys
|
||||
- If new: clears all security keys and goes to `start_addon`
|
||||
- If existing: goes to `configure_security_keys`
|
||||
|
||||
- **`configure_security_keys`**
|
||||
- Collects security keys for existing Z-Wave network
|
||||
- Shows form with optional fields for:
|
||||
- S0 Legacy Key (32 hex chars)
|
||||
- S2 Unauthenticated Key (32 hex chars)
|
||||
- S2 Authenticated Key (32 hex chars)
|
||||
- S2 Access Control Key (32 hex chars)
|
||||
- Long Range S2 Authenticated Key (32 hex chars)
|
||||
- Long Range S2 Access Control Key (32 hex chars)
|
||||
- Pre-populates with existing add-on config if available
|
||||
- Stores keys in config flow state
|
||||
- Goes to `start_addon`
|
||||
|
||||
- **`rf_region`**
|
||||
- Asks user to select RF region for Z-Wave controller
|
||||
- Only shown if:
|
||||
- Home Assistant country is not set
|
||||
- Add-on RF region is not configured or set to "Automatic"
|
||||
- Shows dropdown with regions:
|
||||
- Australia/New Zealand, China, Europe, Hong Kong, India, Israel, Japan, Korea, Russia, USA
|
||||
- Stores selected region in add-on config updates
|
||||
- Returns to `start_addon`
|
||||
|
||||
#### Configuration Steps - Manual Server Path
|
||||
|
||||
- **`manual`**
|
||||
- Collects WebSocket URL for external Z-Wave JS server
|
||||
- Shows form with text field for URL (default: `ws://localhost:3000`)
|
||||
- Validates input:
|
||||
- URL must start with `ws://` or `wss://`
|
||||
- Attempts connection to get version info
|
||||
- On success:
|
||||
- Sets unique ID to home ID from server
|
||||
- If already configured: updates URL and aborts
|
||||
- If new: creates config entry
|
||||
- On error: shows error message and re-displays form
|
||||
|
||||
#### Progress Steps
|
||||
|
||||
- **`install_addon`**
|
||||
- Progress step that installs Z-Wave JS add-on
|
||||
- Creates background task to install add-on via Supervisor API
|
||||
- Shows progress spinner to user
|
||||
- On success:
|
||||
- Sets `integration_created_addon` flag (for cleanup on removal)
|
||||
- Goes to `configure_addon`
|
||||
- On failure: goes to `install_failed`
|
||||
|
||||
- **`install_failed`**
|
||||
- Add-on installation failed
|
||||
- Aborts flow with reason `addon_install_failed`
|
||||
|
||||
- **`start_addon`**
|
||||
- Progress step that starts or restarts Z-Wave JS add-on
|
||||
- First checks if RF region needs to be selected:
|
||||
- If country not set AND RF region not configured: goes to `rf_region`
|
||||
- If there are pending add-on config updates: applies them before starting
|
||||
- Creates background task (`_async_start_addon`):
|
||||
- Starts or restarts add-on via Supervisor API
|
||||
- Polls for up to 200 seconds (40 rounds × 5 seconds) waiting for server to respond
|
||||
- Gets WebSocket URL from add-on discovery info
|
||||
- Validates connection by getting version info
|
||||
- On success: goes to `finish_addon_setup`
|
||||
- On failure: goes to `start_failed`
|
||||
|
||||
- **`start_failed`**
|
||||
- Add-on start/restart failed
|
||||
- If migrating: aborts with `addon_start_failed`
|
||||
- If reconfiguring: calls `async_revert_addon_config` to restore original config
|
||||
- Otherwise: aborts with `addon_start_failed`
|
||||
|
||||
- **`backup_nvm`**
|
||||
- Progress step that backs up Z-Wave controller NVM (non-volatile memory)
|
||||
- Creates background task (`_async_backup_network`):
|
||||
- Gets driver controller from config entry runtime data
|
||||
- Registers progress callback to forward backup progress to UI (0-100%)
|
||||
- Calls `controller.async_backup_nvm_raw()` to get raw NVM binary data
|
||||
- Saves backup to file: `~/.homeassistant/zwavejs_nvm_backup_YYYY-MM-DD_HH-MM-SS.bin`
|
||||
- On success: goes to `instruct_unplug`
|
||||
- On failure: goes to `backup_failed`
|
||||
|
||||
- **`backup_failed`**
|
||||
- NVM backup failed
|
||||
- Aborts migration with reason `backup_failed`
|
||||
|
||||
- **`restore_nvm`**
|
||||
- Progress step that restores NVM backup to new controller
|
||||
- Creates background task (`_async_restore_network_backup`):
|
||||
- Sets `keep_old_devices` flag to preserve device customizations
|
||||
- Reloads config entry to reconnect to new controller
|
||||
- Registers progress callbacks for convert (50%) and restore (50%) phases
|
||||
- Calls `controller.async_restore_nvm()` with backup data
|
||||
- Waits for driver ready event (with timeout)
|
||||
- Gets new version info and updates config entry unique ID to new home ID
|
||||
- Reloads entry again to clean up old controller device
|
||||
- On success: goes to `migration_done`
|
||||
- On failure: goes to `restore_failed`
|
||||
|
||||
- **`restore_failed`**
|
||||
- NVM restore failed
|
||||
- Shows form with:
|
||||
- Error message
|
||||
- Backup file path
|
||||
- Download link for backup file (base64 encoded)
|
||||
- Retry button
|
||||
- If user retries: goes back to `restore_nvm`
|
||||
|
||||
#### Finish Steps
|
||||
|
||||
- **`configure_addon`**
|
||||
- Router step that delegates to appropriate addon configuration
|
||||
- If reconfiguring: goes to `configure_addon_reconfigure`
|
||||
- Otherwise: goes to `configure_addon_user`
|
||||
|
||||
- **`finish_addon_setup`**
|
||||
- Router step that delegates to appropriate finish logic
|
||||
- If migrating: goes to `finish_addon_setup_migrate`
|
||||
- If reconfiguring: goes to `finish_addon_setup_reconfigure`
|
||||
- Otherwise: goes to `finish_addon_setup_user`
|
||||
|
||||
- **`finish_addon_setup_user`**
|
||||
- Finalizes setup for new config entry
|
||||
- Gets add-on discovery info if WebSocket URL not set
|
||||
- Gets version info from server if not already fetched
|
||||
- Sets unique ID to home ID
|
||||
- For USB discovery: updates unique ID from temporary USB-based ID to home ID
|
||||
- Checks if already configured: updates URL/paths and aborts
|
||||
- Creates config entry with all collected data:
|
||||
- WebSocket URL
|
||||
- USB path
|
||||
- Socket path
|
||||
- All security keys
|
||||
- Add-on flags
|
||||
- Aborts any other in-progress flows
|
||||
|
||||
#### Confirmation Steps
|
||||
|
||||
- **`zeroconf_confirm`**
|
||||
- Confirms adding Zeroconf-discovered server
|
||||
- Shows form with home ID and WebSocket URL
|
||||
- On confirmation: goes to `manual` with pre-filled URL
|
||||
|
||||
- **`confirm_usb_migration`**
|
||||
- Confirms migrating to newly discovered USB controller
|
||||
- Shows form with USB device title
|
||||
- On confirmation: goes to `intent_migrate`
|
||||
|
||||
- **`hassio_confirm`**
|
||||
- Confirms adding add-on discovered server
|
||||
- Shows simple confirmation form
|
||||
- On confirmation: goes to `on_supervisor` with `use_addon=True`
|
||||
|
||||
- **`instruct_unplug`**
|
||||
- Instructs user to unplug old controller after backup
|
||||
- Unloads config entry before asking (to release USB port)
|
||||
- Shows form with backup file path
|
||||
- On confirmation:
|
||||
- If adapter was discovered: goes to `start_addon` (path already known)
|
||||
- If adapter not discovered: goes to `choose_serial_port`
|
||||
|
||||
- **`choose_serial_port`**
|
||||
- Shows available serial ports for new controller
|
||||
- Gets list of USB ports
|
||||
- Removes old controller path from list
|
||||
- Adds "Use Socket" option for ESPHome/remote connections
|
||||
- Shows form with:
|
||||
- Optional USB path dropdown
|
||||
- Optional socket path text field
|
||||
- Stores selected path in add-on config updates
|
||||
- Goes to `start_addon`
|
||||
|
||||
#### Reconfiguration Steps
|
||||
|
||||
- **`on_supervisor_reconfigure`**
|
||||
- Asks if user wants add-on or manual server during reconfigure
|
||||
- Shows form with `use_addon` checkbox (pre-filled with current value)
|
||||
- If `use_addon=False`:
|
||||
- If was using add-on: unloads entry and stops add-on
|
||||
- Goes to `manual_reconfigure`
|
||||
- If `use_addon=True`:
|
||||
- If add-on not installed: goes to `install_addon`
|
||||
- If add-on installed: goes to `configure_addon_reconfigure`
|
||||
|
||||
- **`manual_reconfigure`**
|
||||
- Collects new WebSocket URL when reconfiguring manual setup
|
||||
- Shows form with URL field (pre-filled with current URL)
|
||||
- Validates connection and gets version info
|
||||
- Verifies home ID matches existing config entry (prevents wrong device)
|
||||
- Updates config entry with new URL
|
||||
- Disables add-on handling flags
|
||||
- Aborts with `reconfigure_successful`
|
||||
|
||||
- **`configure_addon_reconfigure`**
|
||||
- Updates add-on configuration during reconfigure
|
||||
- Gets current add-on config
|
||||
- Shows form with:
|
||||
- USB path dropdown (including "Use Socket" option)
|
||||
- Socket path text field
|
||||
- All six security key fields
|
||||
- Pre-fills with current add-on config values
|
||||
- On submit:
|
||||
- Updates add-on config with new values
|
||||
- If add-on running and no restart needed: goes to `finish_addon_setup_reconfigure`
|
||||
- Otherwise: unloads entry and goes to `start_addon`
|
||||
|
||||
- **`finish_addon_setup_reconfigure`**
|
||||
- Finalizes reconfiguration
|
||||
- If there's a pending revert reason: reverts config and aborts
|
||||
- Gets WebSocket URL from add-on discovery
|
||||
- Gets version info from server
|
||||
- Verifies home ID matches (prevents wrong device)
|
||||
- Updates config entry with all new values
|
||||
- Reloads config entry
|
||||
- Aborts with `reconfigure_successful`
|
||||
- On error: calls `async_revert_addon_config` to restore original config
|
||||
|
||||
#### Migration Finish Steps
|
||||
|
||||
- **`finish_addon_setup_migrate`**
|
||||
- Finalizes migration to new controller
|
||||
- Updates config entry with:
|
||||
- New WebSocket URL
|
||||
- New USB/socket path
|
||||
- Same security keys
|
||||
- New home ID as unique ID
|
||||
- Note: Does NOT reload entry here (done in restore step)
|
||||
- Goes to `restore_nvm`
|
||||
|
||||
- **`migration_done`**
|
||||
- Migration completed successfully
|
||||
- Aborts with `migration_successful`
|
||||
|
||||
### User Entry Point
|
||||
|
||||
Initial setup flow when user manually adds the integration:
|
||||
|
||||
```mermaid
|
||||
graph TB
|
||||
user[user] --> hassio_check{Is Supervisor?}
|
||||
hassio_check -->|Yes| installation_type{installation_type<br/>menu}
|
||||
hassio_check -->|No| manual[manual]
|
||||
|
||||
installation_type -->|Recommended| intent_recommended[intent_recommended]
|
||||
installation_type -->|Custom| intent_custom[intent_custom]
|
||||
|
||||
intent_recommended --> use_addon_true[on_supervisor<br/>use_addon=True]
|
||||
intent_custom --> adapter_check{Adapter<br/>discovered?}
|
||||
adapter_check -->|Yes| use_addon_true
|
||||
adapter_check -->|No| on_supervisor[on_supervisor<br/>ask use_addon]
|
||||
|
||||
on_supervisor -->|use_addon=False| manual
|
||||
on_supervisor -->|use_addon=True| use_addon_true
|
||||
|
||||
use_addon_true --> addon_state{Add-on state?}
|
||||
addon_state -->|Running| finish_addon_setup_user[finish_addon_setup_user]
|
||||
addon_state -->|Not Running| configure_addon_user[configure_addon_user]
|
||||
addon_state -->|Not Installed| install_addon[install_addon]
|
||||
|
||||
install_addon -->|Success| configure_addon_user
|
||||
install_addon -->|Fail| install_failed[install_failed<br/>abort]
|
||||
|
||||
configure_addon_user --> adapter_discovered{Adapter<br/>discovered?}
|
||||
adapter_discovered -->|Yes| network_type[network_type]
|
||||
adapter_discovered -->|No| ask_usb[Ask USB/Socket path]
|
||||
ask_usb --> network_type
|
||||
|
||||
network_type --> recommended{Recommended<br/>install?}
|
||||
recommended -->|Yes| start_addon[start_addon]
|
||||
recommended -->|No| ask_network[Ask network type]
|
||||
|
||||
ask_network -->|New| start_addon
|
||||
ask_network -->|Existing| configure_security_keys[configure_security_keys]
|
||||
configure_security_keys --> start_addon
|
||||
|
||||
start_addon --> rf_region_check{Country not set<br/>& RF region not<br/>configured?}
|
||||
rf_region_check -->|Yes| rf_region[rf_region]
|
||||
rf_region_check -->|No| start_progress[Start add-on]
|
||||
rf_region --> start_progress
|
||||
|
||||
start_progress -->|Success| finish_addon_setup_user
|
||||
start_progress -->|Fail| start_failed[start_failed<br/>abort]
|
||||
|
||||
finish_addon_setup_user --> finalize[Get version info<br/>Set unique ID<br/>Create entry]
|
||||
finalize --> create_entry((create entry))
|
||||
|
||||
manual --> ask_url[Ask WebSocket URL<br/>Validate connection]
|
||||
ask_url -->|Success| create_entry
|
||||
ask_url -->|Fail| ask_url
|
||||
|
||||
style user fill:#e1f5ff
|
||||
style create_entry fill:#c8e6c9
|
||||
style install_failed fill:#ffcdd2
|
||||
style start_failed fill:#ffcdd2
|
||||
```
|
||||
|
||||
### USB Discovery Entry Point
|
||||
|
||||
Flow triggered when a USB Z-Wave stick is discovered:
|
||||
|
||||
```mermaid
|
||||
graph TB
|
||||
usb[usb discovery] --> supervisor_check{Is Supervisor?}
|
||||
supervisor_check -->|No| abort_supervisor[abort<br/>discovery_requires_supervisor]
|
||||
supervisor_check -->|Yes| flow_check{Non-USB flows<br/>in progress?}
|
||||
|
||||
flow_check -->|Yes| abort_progress[abort<br/>already_in_progress]
|
||||
flow_check -->|No| existing_check{Existing<br/>entries?}
|
||||
|
||||
existing_check -->|No| setup_temp[Set temp unique ID<br/>Store USB path]
|
||||
existing_check -->|Yes| find_addon_entry{Entry with<br/>use_addon=True<br/>exists?}
|
||||
|
||||
find_addon_entry -->|No| abort_addon_req[abort<br/>addon_required]
|
||||
find_addon_entry -->|Yes| check_configured{Device already<br/>configured in<br/>add-on?}
|
||||
|
||||
check_configured -->|Yes| abort_configured[abort<br/>already_configured]
|
||||
check_configured -->|No| setup_temp
|
||||
|
||||
setup_temp --> entries_exist{Existing<br/>entries?}
|
||||
|
||||
entries_exist -->|Yes| confirm_usb_migration[confirm_usb_migration]
|
||||
entries_exist -->|No| installation_type{installation_type<br/>menu}
|
||||
|
||||
confirm_usb_migration -->|Confirm| intent_migrate[intent_migrate]
|
||||
confirm_usb_migration -->|Cancel| abort_user[User aborts]
|
||||
|
||||
installation_type -->|Recommended| intent_recommended[intent_recommended]
|
||||
installation_type -->|Custom| intent_custom[intent_custom]
|
||||
|
||||
intent_recommended --> on_supervisor[on_supervisor<br/>use_addon=True]
|
||||
intent_custom --> on_supervisor
|
||||
|
||||
on_supervisor --> addon_state{Add-on state?}
|
||||
addon_state -->|Running| finish_addon_setup_user[finish_addon_setup_user]
|
||||
addon_state -->|Not Running| network_type[network_type]
|
||||
addon_state -->|Not Installed| install_addon[install_addon]
|
||||
|
||||
install_addon --> configure_addon_user[configure_addon_user]
|
||||
configure_addon_user --> network_type
|
||||
|
||||
network_type --> recommended{Recommended?}
|
||||
recommended -->|Yes| start_addon[start_addon]
|
||||
recommended -->|No| ask_network[Ask network type]
|
||||
ask_network -->|New| start_addon
|
||||
ask_network -->|Existing| configure_security_keys[configure_security_keys]
|
||||
configure_security_keys --> start_addon
|
||||
|
||||
start_addon --> rf_check{Country not set<br/>& RF region not<br/>configured?}
|
||||
rf_check -->|Yes| rf_region[rf_region]
|
||||
rf_check -->|No| start_progress[Start add-on]
|
||||
rf_region --> start_progress
|
||||
|
||||
start_progress --> finish_addon_setup[finish_addon_setup]
|
||||
finish_addon_setup --> finish_addon_setup_user
|
||||
finish_addon_setup_user --> finalize[Update unique ID<br/>Create entry]
|
||||
finalize --> create_entry((create entry))
|
||||
|
||||
intent_migrate --> migration_flow[See Migration flow]
|
||||
|
||||
style usb fill:#e1f5ff
|
||||
style create_entry fill:#c8e6c9
|
||||
style abort_supervisor fill:#ffcdd2
|
||||
style abort_progress fill:#ffcdd2
|
||||
style abort_addon_req fill:#ffcdd2
|
||||
style abort_configured fill:#ffcdd2
|
||||
style migration_flow fill:#fff9c4
|
||||
```
|
||||
|
||||
### Zeroconf Discovery Entry Point
|
||||
|
||||
Flow triggered when Z-Wave JS server is discovered via Zeroconf:
|
||||
|
||||
```mermaid
|
||||
graph TB
|
||||
zeroconf[zeroconf discovery] --> setup[Extract home_id<br/>Set unique ID<br/>Store WebSocket URL]
|
||||
setup --> check_configured{Already<br/>configured?}
|
||||
|
||||
check_configured -->|Yes| abort_configured[abort<br/>already_configured]
|
||||
check_configured -->|No| zeroconf_confirm[zeroconf_confirm]
|
||||
|
||||
zeroconf_confirm -->|Confirm| manual[manual<br/>with stored URL]
|
||||
zeroconf_confirm -->|Cancel| abort_user[User aborts]
|
||||
|
||||
manual --> validate[Validate connection<br/>Get version info]
|
||||
validate -->|Success| create_entry((create entry))
|
||||
validate -->|Fail| manual
|
||||
|
||||
style zeroconf fill:#e1f5ff
|
||||
style create_entry fill:#c8e6c9
|
||||
style abort_configured fill:#ffcdd2
|
||||
```
|
||||
|
||||
### Add-on Discovery Entry Point (hassio)
|
||||
|
||||
Flow triggered when the Z-Wave JS add-on reports its availability:
|
||||
|
||||
```mermaid
|
||||
graph TB
|
||||
hassio[hassio discovery] --> flow_check{Other flows<br/>in progress?}
|
||||
flow_check -->|Yes| abort_progress[abort<br/>already_in_progress]
|
||||
flow_check -->|No| slug_check{Is Z-Wave JS<br/>add-on?}
|
||||
|
||||
slug_check -->|No| abort_slug[abort<br/>not_zwave_js_addon]
|
||||
slug_check -->|Yes| validate[Build WebSocket URL<br/>Get version info<br/>Set unique ID]
|
||||
|
||||
validate -->|Fail| abort_connect[abort<br/>cannot_connect]
|
||||
validate -->|Success| check_configured{Already<br/>configured?}
|
||||
|
||||
check_configured -->|Yes| update_abort[Update URL<br/>abort already_configured]
|
||||
check_configured -->|No| hassio_confirm[hassio_confirm]
|
||||
|
||||
hassio_confirm -->|Confirm| on_supervisor[on_supervisor<br/>use_addon=True]
|
||||
hassio_confirm -->|Cancel| abort_user[User aborts]
|
||||
|
||||
on_supervisor --> addon_state{Add-on state?}
|
||||
addon_state -->|Running| finish_addon_setup_user[finish_addon_setup_user]
|
||||
addon_state -->|Not Running| configure_addon_user[configure_addon_user]
|
||||
addon_state -->|Not Installed| install_addon[install_addon]
|
||||
|
||||
install_addon --> configure_addon_user
|
||||
configure_addon_user --> network_type[network_type]
|
||||
network_type --> start_addon[start_addon]
|
||||
start_addon --> finish_addon_setup[finish_addon_setup]
|
||||
finish_addon_setup --> finish_addon_setup_user
|
||||
finish_addon_setup_user --> create_entry((create entry))
|
||||
|
||||
style hassio fill:#e1f5ff
|
||||
style create_entry fill:#c8e6c9
|
||||
style abort_progress fill:#ffcdd2
|
||||
style abort_slug fill:#ffcdd2
|
||||
style abort_connect fill:#ffcdd2
|
||||
```
|
||||
|
||||
### ESPHome Discovery Entry Point
|
||||
|
||||
Flow triggered when an ESPHome device with Z-Wave support is discovered:
|
||||
|
||||
```mermaid
|
||||
graph TB
|
||||
esphome[esphome discovery] --> supervisor_check{Is Supervisor?}
|
||||
supervisor_check -->|No| abort_hassio[abort<br/>not_hassio]
|
||||
supervisor_check -->|Yes| match_check{Home ID exists<br/>& matching entry<br/>with socket?}
|
||||
|
||||
match_check -->|Yes| update_reload[Update add-on config<br/>Reload entry]
|
||||
match_check -->|No| setup_discovery[Set unique ID<br/>Store socket path<br/>Set adapter_discovered]
|
||||
|
||||
update_reload --> abort_configured[abort<br/>already_configured]
|
||||
|
||||
setup_discovery --> installation_type{installation_type<br/>menu}
|
||||
|
||||
installation_type -->|Recommended| intent_recommended[intent_recommended]
|
||||
installation_type -->|Custom| intent_custom[intent_custom]
|
||||
|
||||
intent_recommended --> on_supervisor[on_supervisor<br/>use_addon=True]
|
||||
intent_custom --> on_supervisor
|
||||
|
||||
on_supervisor --> addon_state{Add-on state?}
|
||||
addon_state -->|Running| finish_addon_setup_user[finish_addon_setup_user]
|
||||
addon_state -->|Not Running| network_type[network_type]
|
||||
addon_state -->|Not Installed| install_addon[install_addon]
|
||||
|
||||
install_addon --> configure_addon_user[configure_addon_user]
|
||||
configure_addon_user --> network_type
|
||||
network_type --> start_addon[start_addon]
|
||||
start_addon --> finish_addon_setup[finish_addon_setup]
|
||||
finish_addon_setup --> finish_addon_setup_user
|
||||
|
||||
finish_addon_setup_user --> unique_id_check{Unique ID set<br/>& matching USB<br/>entry?}
|
||||
unique_id_check -->|Yes| update_reload
|
||||
unique_id_check -->|No| create_entry((create entry))
|
||||
|
||||
style esphome fill:#e1f5ff
|
||||
style create_entry fill:#c8e6c9
|
||||
style abort_hassio fill:#ffcdd2
|
||||
style abort_configured fill:#ffcdd2
|
||||
```
|
||||
|
||||
### Reconfigure Entry Point
|
||||
|
||||
Flow triggered when user reconfigures an existing entry:
|
||||
|
||||
```mermaid
|
||||
graph TB
|
||||
reconfigure[reconfigure] --> reconfigure_menu{reconfigure<br/>menu}
|
||||
|
||||
reconfigure_menu -->|Reconfigure| intent_reconfigure[intent_reconfigure]
|
||||
reconfigure_menu -->|Migrate| intent_migrate[intent_migrate]
|
||||
|
||||
intent_reconfigure --> supervisor_check{Is Supervisor?}
|
||||
supervisor_check -->|No| manual_reconfigure[manual_reconfigure]
|
||||
supervisor_check -->|Yes| on_supervisor_reconfigure[on_supervisor_reconfigure]
|
||||
|
||||
on_supervisor_reconfigure --> ask_use_addon{Use add-on?}
|
||||
ask_use_addon -->|No & was using| stop_addon[Unload entry<br/>Stop add-on]
|
||||
ask_use_addon -->|No| manual_reconfigure
|
||||
stop_addon -->|Fail| abort_stop[abort<br/>addon_stop_failed]
|
||||
stop_addon -->|Success| manual_reconfigure
|
||||
|
||||
ask_use_addon -->|Yes| addon_state{Add-on state?}
|
||||
addon_state -->|Not Installed| install_addon[install_addon]
|
||||
addon_state -->|Installed| configure_addon_reconfigure[configure_addon_reconfigure]
|
||||
|
||||
install_addon --> configure_addon_reconfigure
|
||||
|
||||
configure_addon_reconfigure --> update_config[Ask USB/Socket/Keys<br/>Update add-on config]
|
||||
|
||||
update_config --> running_check{Add-on running<br/>& no restart<br/>needed?}
|
||||
running_check -->|Yes| finish_addon_setup_reconfigure[finish_addon_setup_reconfigure]
|
||||
running_check -->|No| unload_start[Unload entry if needed<br/>Start add-on]
|
||||
|
||||
unload_start --> rf_check{Country not set<br/>& RF region not<br/>configured?}
|
||||
rf_check -->|Yes| rf_region[rf_region]
|
||||
rf_check -->|No| start_addon[start_addon]
|
||||
rf_region --> start_addon
|
||||
|
||||
start_addon -->|Fail| revert_start[Revert config<br/>abort addon_start_failed]
|
||||
start_addon -->|Success| finish_addon_setup[finish_addon_setup]
|
||||
|
||||
finish_addon_setup --> finish_addon_setup_reconfigure
|
||||
|
||||
finish_addon_setup_reconfigure --> validate[Get WebSocket URL<br/>Get version info<br/>Check home ID]
|
||||
validate -->|Cannot connect| revert_connect[Revert config<br/>abort cannot_connect]
|
||||
validate -->|Wrong device| revert_device[Revert config<br/>abort different_device]
|
||||
validate -->|Success| update_reload[Update entry<br/>Reload entry]
|
||||
update_reload --> abort_success[abort<br/>reconfigure_successful]
|
||||
|
||||
manual_reconfigure --> ask_validate[Ask WebSocket URL<br/>Validate connection]
|
||||
ask_validate -->|Fail| ask_validate
|
||||
ask_validate -->|Success| check_home_id{Home ID<br/>matches?}
|
||||
|
||||
check_home_id -->|No| abort_different[abort<br/>different_device]
|
||||
check_home_id -->|Yes| update_manual[Update entry<br/>Disable add-on]
|
||||
update_manual --> abort_success
|
||||
|
||||
style reconfigure fill:#e1f5ff
|
||||
style abort_success fill:#c8e6c9
|
||||
style abort_stop fill:#ffcdd2
|
||||
style abort_different fill:#ffcdd2
|
||||
style revert_start fill:#ffcdd2
|
||||
style revert_connect fill:#ffcdd2
|
||||
style revert_device fill:#ffcdd2
|
||||
```
|
||||
|
||||
@@ -703,7 +703,15 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
async def async_step_on_supervisor(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle logic when on Supervisor host."""
|
||||
"""Handle logic when on Supervisor host.
|
||||
|
||||
When the add-on is running, we copy over it's settings.
|
||||
We will ignore settings for USB/Socket if those were discovered.
|
||||
|
||||
If add-on is not running, we will configure the add-on.
|
||||
|
||||
When it's not installed, we install it with new config options.
|
||||
"""
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
step_id="on_supervisor", data_schema=ON_SUPERVISOR_SCHEMA
|
||||
@@ -717,8 +725,11 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
if addon_info.state == AddonState.RUNNING:
|
||||
addon_config = addon_info.options
|
||||
self.usb_path = addon_config.get(CONF_ADDON_DEVICE)
|
||||
self.socket_path = addon_config.get(CONF_ADDON_SOCKET)
|
||||
# Use the options set by USB/ESPHome discovery
|
||||
if not self._adapter_discovered:
|
||||
self.usb_path = addon_config.get(CONF_ADDON_DEVICE)
|
||||
self.socket_path = addon_config.get(CONF_ADDON_SOCKET)
|
||||
|
||||
self.s0_legacy_key = addon_config.get(CONF_ADDON_S0_LEGACY_KEY, "")
|
||||
self.s2_access_control_key = addon_config.get(
|
||||
CONF_ADDON_S2_ACCESS_CONTROL_KEY, ""
|
||||
@@ -918,7 +929,7 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
discovery_info = await self._async_get_addon_discovery_info()
|
||||
self.ws_address = f"ws://{discovery_info['host']}:{discovery_info['port']}"
|
||||
|
||||
if not self.unique_id or self.source in (SOURCE_USB, SOURCE_ESPHOME):
|
||||
if not self.unique_id or self.source == SOURCE_USB:
|
||||
if not self.version_info:
|
||||
try:
|
||||
self.version_info = await async_get_version_info(
|
||||
@@ -931,6 +942,21 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
str(self.version_info.home_id), raise_on_progress=False
|
||||
)
|
||||
|
||||
# When we came from discovery, make sure we update the add-on
|
||||
if self._adapter_discovered and self.use_addon:
|
||||
await self._async_set_addon_config(
|
||||
{
|
||||
CONF_ADDON_DEVICE: self.usb_path,
|
||||
CONF_ADDON_SOCKET: self.socket_path,
|
||||
CONF_ADDON_S0_LEGACY_KEY: self.s0_legacy_key,
|
||||
CONF_ADDON_S2_ACCESS_CONTROL_KEY: self.s2_access_control_key,
|
||||
CONF_ADDON_S2_AUTHENTICATED_KEY: self.s2_authenticated_key,
|
||||
CONF_ADDON_S2_UNAUTHENTICATED_KEY: self.s2_unauthenticated_key,
|
||||
CONF_ADDON_LR_S2_ACCESS_CONTROL_KEY: self.lr_s2_access_control_key,
|
||||
CONF_ADDON_LR_S2_AUTHENTICATED_KEY: self.lr_s2_authenticated_key,
|
||||
}
|
||||
)
|
||||
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={
|
||||
CONF_URL: self.ws_address,
|
||||
@@ -942,7 +968,12 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
CONF_S2_UNAUTHENTICATED_KEY: self.s2_unauthenticated_key,
|
||||
CONF_LR_S2_ACCESS_CONTROL_KEY: self.lr_s2_access_control_key,
|
||||
CONF_LR_S2_AUTHENTICATED_KEY: self.lr_s2_authenticated_key,
|
||||
}
|
||||
},
|
||||
error=(
|
||||
"migration_successful"
|
||||
if self.source in (SOURCE_USB, SOURCE_ESPHOME)
|
||||
else "already_configured"
|
||||
),
|
||||
)
|
||||
return self._async_create_entry_from_vars()
|
||||
|
||||
@@ -1490,6 +1521,8 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
# Only update existing entries that are configured via sockets
|
||||
and existing_entry.data.get(CONF_SOCKET_PATH)
|
||||
# And use the add-on
|
||||
and existing_entry.data.get(CONF_USE_ADDON)
|
||||
):
|
||||
await self._async_set_addon_config(
|
||||
{CONF_ADDON_SOCKET: discovery_info.socket_path}
|
||||
@@ -1498,6 +1531,11 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self.hass.config_entries.async_schedule_reload(existing_entry.entry_id)
|
||||
return self.async_abort(reason="already_configured")
|
||||
|
||||
# We are not aborting if home ID configured here, we just want to make sure that it's set
|
||||
# We will update a USB based config entry automatically in `async_step_finish_addon_setup_user`
|
||||
await self.async_set_unique_id(
|
||||
str(discovery_info.zwave_home_id), raise_on_progress=False
|
||||
)
|
||||
self.socket_path = discovery_info.socket_path
|
||||
self.context["title_placeholders"] = {
|
||||
CONF_NAME: f"{discovery_info.name} via ESPHome"
|
||||
|
||||
1
homeassistant/generated/config_flows.py
generated
1
homeassistant/generated/config_flows.py
generated
@@ -499,7 +499,6 @@ FLOWS = {
|
||||
"playstation_network",
|
||||
"plex",
|
||||
"plugwise",
|
||||
"plum_lightpad",
|
||||
"point",
|
||||
"pooldose",
|
||||
"poolsense",
|
||||
|
||||
@@ -2923,23 +2923,6 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"single_config_entry": true
|
||||
},
|
||||
"ibm": {
|
||||
"name": "IBM",
|
||||
"integrations": {
|
||||
"watson_iot": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "IBM Watson IoT Platform"
|
||||
},
|
||||
"watson_tts": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "IBM Watson TTS"
|
||||
}
|
||||
}
|
||||
},
|
||||
"idteck_prox": {
|
||||
"name": "IDTECK Proximity Reader",
|
||||
"integration_type": "hub",
|
||||
@@ -5104,12 +5087,6 @@
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
"plum_lightpad": {
|
||||
"name": "Plum Lightpad",
|
||||
"integration_type": "hub",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push"
|
||||
},
|
||||
"pocketcasts": {
|
||||
"name": "Pocket Casts",
|
||||
"integration_type": "hub",
|
||||
@@ -7411,12 +7388,6 @@
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_polling"
|
||||
},
|
||||
"vultr": {
|
||||
"name": "Vultr",
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_polling"
|
||||
},
|
||||
"w800rf32": {
|
||||
"name": "WGL Designs W800RF32",
|
||||
"integration_type": "hub",
|
||||
@@ -7453,6 +7424,12 @@
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push"
|
||||
},
|
||||
"watson_tts": {
|
||||
"name": "IBM Watson TTS",
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push"
|
||||
},
|
||||
"watttime": {
|
||||
"name": "WattTime",
|
||||
"integration_type": "service",
|
||||
|
||||
@@ -85,11 +85,8 @@ class Debouncer[_R_co]:
|
||||
|
||||
return False
|
||||
|
||||
# Locked means a call is in progress. Any call is good, so abort.
|
||||
if self._execute_lock.locked():
|
||||
return False
|
||||
|
||||
if not self.immediate:
|
||||
# If not immediate or in progress, we schedule a call for later.
|
||||
if not self.immediate or self._execute_lock.locked():
|
||||
self._execute_at_end_of_timer = True
|
||||
self._schedule_timer()
|
||||
return False
|
||||
|
||||
@@ -561,15 +561,16 @@ class Template:
|
||||
finally:
|
||||
self.hass.loop.call_soon_threadsafe(finish_event.set)
|
||||
|
||||
template_render_thread = ThreadWithException(target=_render_template)
|
||||
try:
|
||||
template_render_thread = ThreadWithException(target=_render_template)
|
||||
template_render_thread.start()
|
||||
async with asyncio.timeout(timeout):
|
||||
await finish_event.wait()
|
||||
if self._exc_info:
|
||||
raise TemplateError(self._exc_info[1].with_traceback(self._exc_info[2]))
|
||||
except TimeoutError:
|
||||
template_render_thread.raise_exc(TimeoutError)
|
||||
if template_render_thread.is_alive():
|
||||
template_render_thread.raise_exc(TimeoutError)
|
||||
return True
|
||||
finally:
|
||||
template_render_thread.join()
|
||||
|
||||
@@ -10,7 +10,7 @@ aiohttp==3.12.15
|
||||
aiohttp_cors==0.8.1
|
||||
aiousbwatcher==1.1.1
|
||||
aiozoneinfo==0.2.3
|
||||
annotatedyaml==0.4.5
|
||||
annotatedyaml==1.0.2
|
||||
astral==2.2
|
||||
async-interrupt==1.2.2
|
||||
async-upnp-client==0.45.0
|
||||
@@ -24,18 +24,18 @@ bleak-retry-connector==4.4.3
|
||||
bleak==1.0.1
|
||||
bluetooth-adapters==2.1.0
|
||||
bluetooth-auto-recovery==1.5.3
|
||||
bluetooth-data-tools==1.28.2
|
||||
cached-ipaddress==0.10.0
|
||||
bluetooth-data-tools==1.28.3
|
||||
cached-ipaddress==1.0.1
|
||||
certifi>=2021.5.30
|
||||
ciso8601==2.3.3
|
||||
cronsim==2.6
|
||||
cryptography==46.0.2
|
||||
dbus-fast==2.44.3
|
||||
dbus-fast==2.44.5
|
||||
file-read-backwards==2.0.0
|
||||
fnv-hash-fast==1.5.0
|
||||
fnv-hash-fast==1.6.0
|
||||
go2rtc-client==0.2.1
|
||||
ha-ffmpeg==3.2.2
|
||||
habluetooth==5.6.4
|
||||
habluetooth==5.7.0
|
||||
hass-nabucasa==1.2.0
|
||||
hassil==3.2.0
|
||||
home-assistant-bluetooth==1.13.1
|
||||
@@ -60,14 +60,14 @@ pyserial==3.5
|
||||
pyspeex-noise==1.0.2
|
||||
python-slugify==8.0.4
|
||||
PyTurboJPEG==1.8.0
|
||||
PyYAML==6.0.2
|
||||
PyYAML==6.0.3
|
||||
requests==2.32.5
|
||||
securetar==2025.2.1
|
||||
SQLAlchemy==2.0.41
|
||||
standard-aifc==3.13.0
|
||||
standard-telnetlib==3.13.0
|
||||
typing-extensions>=4.15.0,<5.0
|
||||
ulid-transform==1.4.0
|
||||
ulid-transform==1.5.2
|
||||
urllib3>=2.0
|
||||
uv==0.8.9
|
||||
voluptuous-openapi==0.1.0
|
||||
@@ -75,7 +75,7 @@ voluptuous-serialize==2.7.0
|
||||
voluptuous==0.15.2
|
||||
webrtc-models==0.3.0
|
||||
yarl==1.20.1
|
||||
zeroconf==0.147.2
|
||||
zeroconf==1.0.0
|
||||
|
||||
# Constrain pycryptodome to avoid vulnerability
|
||||
# see https://github.com/home-assistant/core/pull/16238
|
||||
@@ -88,9 +88,9 @@ httplib2>=0.19.0
|
||||
# gRPC is an implicit dependency that we want to make explicit so we manage
|
||||
# upgrades intentionally. It is a large package to build from source and we
|
||||
# want to ensure we have wheels built.
|
||||
grpcio==1.72.1
|
||||
grpcio-status==1.72.1
|
||||
grpcio-reflection==1.72.1
|
||||
grpcio==1.75.1
|
||||
grpcio-status==1.75.1
|
||||
grpcio-reflection==1.75.1
|
||||
|
||||
# This is a old unmaintained library and is replaced with pycryptodome
|
||||
pycrypto==1000000000.0.0
|
||||
|
||||
10
mypy.ini
generated
10
mypy.ini
generated
@@ -5309,6 +5309,16 @@ disallow_untyped_defs = true
|
||||
warn_return_any = true
|
||||
warn_unreachable = true
|
||||
|
||||
[mypy-homeassistant.components.vivotek.*]
|
||||
check_untyped_defs = true
|
||||
disallow_incomplete_defs = true
|
||||
disallow_subclassing_any = true
|
||||
disallow_untyped_calls = true
|
||||
disallow_untyped_decorators = true
|
||||
disallow_untyped_defs = true
|
||||
warn_return_any = true
|
||||
warn_unreachable = true
|
||||
|
||||
[mypy-homeassistant.components.vlc_telnet.*]
|
||||
check_untyped_defs = true
|
||||
disallow_incomplete_defs = true
|
||||
|
||||
@@ -33,7 +33,7 @@ dependencies = [
|
||||
"aiohttp-fast-zlib==0.3.0",
|
||||
"aiohttp-asyncmdnsresolver==0.1.1",
|
||||
"aiozoneinfo==0.2.3",
|
||||
"annotatedyaml==0.4.5",
|
||||
"annotatedyaml==1.0.2",
|
||||
"astral==2.2",
|
||||
"async-interrupt==1.2.2",
|
||||
"attrs==25.3.0",
|
||||
@@ -44,7 +44,7 @@ dependencies = [
|
||||
"certifi>=2021.5.30",
|
||||
"ciso8601==2.3.3",
|
||||
"cronsim==2.6",
|
||||
"fnv-hash-fast==1.5.0",
|
||||
"fnv-hash-fast==1.6.0",
|
||||
# hass-nabucasa is imported by helpers which don't depend on the cloud
|
||||
# integration
|
||||
"hass-nabucasa==1.2.0",
|
||||
@@ -65,14 +65,14 @@ dependencies = [
|
||||
"packaging>=23.1",
|
||||
"psutil-home-assistant==0.0.1",
|
||||
"python-slugify==8.0.4",
|
||||
"PyYAML==6.0.2",
|
||||
"PyYAML==6.0.3",
|
||||
"requests==2.32.5",
|
||||
"securetar==2025.2.1",
|
||||
"SQLAlchemy==2.0.41",
|
||||
"standard-aifc==3.13.0",
|
||||
"standard-telnetlib==3.13.0",
|
||||
"typing-extensions>=4.15.0,<5.0",
|
||||
"ulid-transform==1.4.0",
|
||||
"ulid-transform==1.5.2",
|
||||
"urllib3>=2.0",
|
||||
"uv==0.8.9",
|
||||
"voluptuous==0.15.2",
|
||||
@@ -80,7 +80,7 @@ dependencies = [
|
||||
"voluptuous-openapi==0.1.0",
|
||||
"yarl==1.20.1",
|
||||
"webrtc-models==0.3.0",
|
||||
"zeroconf==0.147.2",
|
||||
"zeroconf==1.0.0",
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
|
||||
10
requirements.txt
generated
10
requirements.txt
generated
@@ -10,7 +10,7 @@ aiohttp_cors==0.8.1
|
||||
aiohttp-fast-zlib==0.3.0
|
||||
aiohttp-asyncmdnsresolver==0.1.1
|
||||
aiozoneinfo==0.2.3
|
||||
annotatedyaml==0.4.5
|
||||
annotatedyaml==1.0.2
|
||||
astral==2.2
|
||||
async-interrupt==1.2.2
|
||||
attrs==25.3.0
|
||||
@@ -21,7 +21,7 @@ bcrypt==5.0.0
|
||||
certifi>=2021.5.30
|
||||
ciso8601==2.3.3
|
||||
cronsim==2.6
|
||||
fnv-hash-fast==1.5.0
|
||||
fnv-hash-fast==1.6.0
|
||||
hass-nabucasa==1.2.0
|
||||
httpx==0.28.1
|
||||
home-assistant-bluetooth==1.13.1
|
||||
@@ -37,14 +37,14 @@ orjson==3.11.3
|
||||
packaging>=23.1
|
||||
psutil-home-assistant==0.0.1
|
||||
python-slugify==8.0.4
|
||||
PyYAML==6.0.2
|
||||
PyYAML==6.0.3
|
||||
requests==2.32.5
|
||||
securetar==2025.2.1
|
||||
SQLAlchemy==2.0.41
|
||||
standard-aifc==3.13.0
|
||||
standard-telnetlib==3.13.0
|
||||
typing-extensions>=4.15.0,<5.0
|
||||
ulid-transform==1.4.0
|
||||
ulid-transform==1.5.2
|
||||
urllib3>=2.0
|
||||
uv==0.8.9
|
||||
voluptuous==0.15.2
|
||||
@@ -52,4 +52,4 @@ voluptuous-serialize==2.7.0
|
||||
voluptuous-openapi==0.1.0
|
||||
yarl==1.20.1
|
||||
webrtc-models==0.3.0
|
||||
zeroconf==0.147.2
|
||||
zeroconf==1.0.0
|
||||
|
||||
35
requirements_all.txt
generated
35
requirements_all.txt
generated
@@ -185,7 +185,7 @@ aioairzone-cloud==0.7.2
|
||||
aioairzone==1.0.1
|
||||
|
||||
# homeassistant.components.alexa_devices
|
||||
aioamazondevices==6.2.7
|
||||
aioamazondevices==6.2.8
|
||||
|
||||
# homeassistant.components.ambient_network
|
||||
# homeassistant.components.ambient_station
|
||||
@@ -456,7 +456,7 @@ airly==1.1.0
|
||||
airos==0.5.4
|
||||
|
||||
# homeassistant.components.airthings_ble
|
||||
airthings-ble==0.9.2
|
||||
airthings-ble==1.1.1
|
||||
|
||||
# homeassistant.components.airthings
|
||||
airthings-cloud==0.2.0
|
||||
@@ -627,7 +627,7 @@ bimmer-connected[china]==0.17.3
|
||||
bizkaibus==0.1.1
|
||||
|
||||
# homeassistant.components.esphome
|
||||
bleak-esphome==3.3.0
|
||||
bleak-esphome==3.4.0
|
||||
|
||||
# homeassistant.components.bluetooth
|
||||
bleak-retry-connector==4.4.3
|
||||
@@ -663,7 +663,7 @@ bluetooth-auto-recovery==1.5.3
|
||||
# homeassistant.components.ld2410_ble
|
||||
# homeassistant.components.led_ble
|
||||
# homeassistant.components.private_ble_device
|
||||
bluetooth-data-tools==1.28.2
|
||||
bluetooth-data-tools==1.28.3
|
||||
|
||||
# homeassistant.components.bond
|
||||
bond-async==0.2.1
|
||||
@@ -712,7 +712,7 @@ btsmarthub-devicelist==0.2.3
|
||||
buienradar==1.0.6
|
||||
|
||||
# homeassistant.components.dhcp
|
||||
cached-ipaddress==0.10.0
|
||||
cached-ipaddress==1.0.1
|
||||
|
||||
# homeassistant.components.caldav
|
||||
caldav==1.6.0
|
||||
@@ -770,7 +770,7 @@ datadog==0.52.0
|
||||
datapoint==0.12.1
|
||||
|
||||
# homeassistant.components.bluetooth
|
||||
dbus-fast==2.44.3
|
||||
dbus-fast==2.44.5
|
||||
|
||||
# homeassistant.components.debugpy
|
||||
debugpy==1.8.16
|
||||
@@ -972,7 +972,7 @@ flux-led==1.2.0
|
||||
|
||||
# homeassistant.components.homekit
|
||||
# homeassistant.components.recorder
|
||||
fnv-hash-fast==1.5.0
|
||||
fnv-hash-fast==1.6.0
|
||||
|
||||
# homeassistant.components.foobot
|
||||
foobot_async==1.0.0
|
||||
@@ -1142,7 +1142,7 @@ ha-silabs-firmware-client==0.2.0
|
||||
habiticalib==0.4.5
|
||||
|
||||
# homeassistant.components.bluetooth
|
||||
habluetooth==5.6.4
|
||||
habluetooth==5.7.0
|
||||
|
||||
# homeassistant.components.cloud
|
||||
hass-nabucasa==1.2.0
|
||||
@@ -1218,9 +1218,6 @@ iaqualink==0.6.0
|
||||
# homeassistant.components.ibeacon
|
||||
ibeacon-ble==1.2.0
|
||||
|
||||
# homeassistant.components.watson_iot
|
||||
ibmiotf==0.3.4
|
||||
|
||||
# homeassistant.components.google
|
||||
# homeassistant.components.local_calendar
|
||||
# homeassistant.components.local_todo
|
||||
@@ -1361,7 +1358,7 @@ letpot==0.6.2
|
||||
libpyfoscamcgi==0.0.7
|
||||
|
||||
# homeassistant.components.vivotek
|
||||
libpyvivotek==0.4.0
|
||||
libpyvivotek==0.6.1
|
||||
|
||||
# homeassistant.components.libre_hardware_monitor
|
||||
librehardwaremonitor-api==1.4.0
|
||||
@@ -1725,9 +1722,6 @@ plexwebsocket==0.0.14
|
||||
# homeassistant.components.plugwise
|
||||
plugwise==1.7.8
|
||||
|
||||
# homeassistant.components.plum_lightpad
|
||||
plumlightpad==0.0.11
|
||||
|
||||
# homeassistant.components.serial_pm
|
||||
pmsensor==0.4
|
||||
|
||||
@@ -2000,7 +1994,7 @@ pyegps==0.2.5
|
||||
pyemoncms==0.1.3
|
||||
|
||||
# homeassistant.components.enphase_envoy
|
||||
pyenphase==2.3.0
|
||||
pyenphase==2.4.0
|
||||
|
||||
# homeassistant.components.envisalink
|
||||
pyenvisalink==4.7
|
||||
@@ -2219,7 +2213,7 @@ pynina==0.3.6
|
||||
pynobo==1.8.1
|
||||
|
||||
# homeassistant.components.nordpool
|
||||
pynordpool==0.3.0
|
||||
pynordpool==0.3.1
|
||||
|
||||
# homeassistant.components.nuki
|
||||
pynuki==1.6.3
|
||||
@@ -2550,7 +2544,7 @@ python-rabbitair==0.0.8
|
||||
python-ripple-api==0.0.3
|
||||
|
||||
# homeassistant.components.roborock
|
||||
python-roborock==2.49.1
|
||||
python-roborock==2.50.2
|
||||
|
||||
# homeassistant.components.smarttub
|
||||
python-smarttub==0.0.44
|
||||
@@ -3118,9 +3112,6 @@ vsure==2.6.7
|
||||
# homeassistant.components.vasttrafik
|
||||
vtjp==0.2.1
|
||||
|
||||
# homeassistant.components.vultr
|
||||
vultr==0.1.2
|
||||
|
||||
# homeassistant.components.samsungtv
|
||||
# homeassistant.components.wake_on_lan
|
||||
wakeonlan==3.1.0
|
||||
@@ -3238,7 +3229,7 @@ zamg==0.3.6
|
||||
zcc-helper==3.7
|
||||
|
||||
# homeassistant.components.zeroconf
|
||||
zeroconf==0.147.2
|
||||
zeroconf==1.0.0
|
||||
|
||||
# homeassistant.components.zeversolar
|
||||
zeversolar==0.3.2
|
||||
|
||||
30
requirements_test_all.txt
generated
30
requirements_test_all.txt
generated
@@ -173,7 +173,7 @@ aioairzone-cloud==0.7.2
|
||||
aioairzone==1.0.1
|
||||
|
||||
# homeassistant.components.alexa_devices
|
||||
aioamazondevices==6.2.7
|
||||
aioamazondevices==6.2.8
|
||||
|
||||
# homeassistant.components.ambient_network
|
||||
# homeassistant.components.ambient_station
|
||||
@@ -438,7 +438,7 @@ airly==1.1.0
|
||||
airos==0.5.4
|
||||
|
||||
# homeassistant.components.airthings_ble
|
||||
airthings-ble==0.9.2
|
||||
airthings-ble==1.1.1
|
||||
|
||||
# homeassistant.components.airthings
|
||||
airthings-cloud==0.2.0
|
||||
@@ -561,7 +561,7 @@ beautifulsoup4==4.13.3
|
||||
bimmer-connected[china]==0.17.3
|
||||
|
||||
# homeassistant.components.esphome
|
||||
bleak-esphome==3.3.0
|
||||
bleak-esphome==3.4.0
|
||||
|
||||
# homeassistant.components.bluetooth
|
||||
bleak-retry-connector==4.4.3
|
||||
@@ -594,7 +594,7 @@ bluetooth-auto-recovery==1.5.3
|
||||
# homeassistant.components.ld2410_ble
|
||||
# homeassistant.components.led_ble
|
||||
# homeassistant.components.private_ble_device
|
||||
bluetooth-data-tools==1.28.2
|
||||
bluetooth-data-tools==1.28.3
|
||||
|
||||
# homeassistant.components.bond
|
||||
bond-async==0.2.1
|
||||
@@ -630,7 +630,7 @@ bthome-ble==3.14.2
|
||||
buienradar==1.0.6
|
||||
|
||||
# homeassistant.components.dhcp
|
||||
cached-ipaddress==0.10.0
|
||||
cached-ipaddress==1.0.1
|
||||
|
||||
# homeassistant.components.caldav
|
||||
caldav==1.6.0
|
||||
@@ -673,7 +673,7 @@ datadog==0.52.0
|
||||
datapoint==0.12.1
|
||||
|
||||
# homeassistant.components.bluetooth
|
||||
dbus-fast==2.44.3
|
||||
dbus-fast==2.44.5
|
||||
|
||||
# homeassistant.components.debugpy
|
||||
debugpy==1.8.16
|
||||
@@ -848,7 +848,7 @@ flux-led==1.2.0
|
||||
|
||||
# homeassistant.components.homekit
|
||||
# homeassistant.components.recorder
|
||||
fnv-hash-fast==1.5.0
|
||||
fnv-hash-fast==1.6.0
|
||||
|
||||
# homeassistant.components.foobot
|
||||
foobot_async==1.0.0
|
||||
@@ -1003,7 +1003,7 @@ ha-silabs-firmware-client==0.2.0
|
||||
habiticalib==0.4.5
|
||||
|
||||
# homeassistant.components.bluetooth
|
||||
habluetooth==5.6.4
|
||||
habluetooth==5.7.0
|
||||
|
||||
# homeassistant.components.cloud
|
||||
hass-nabucasa==1.2.0
|
||||
@@ -1463,9 +1463,6 @@ plexwebsocket==0.0.14
|
||||
# homeassistant.components.plugwise
|
||||
plugwise==1.7.8
|
||||
|
||||
# homeassistant.components.plum_lightpad
|
||||
plumlightpad==0.0.11
|
||||
|
||||
# homeassistant.components.poolsense
|
||||
poolsense==0.0.8
|
||||
|
||||
@@ -1675,7 +1672,7 @@ pyegps==0.2.5
|
||||
pyemoncms==0.1.3
|
||||
|
||||
# homeassistant.components.enphase_envoy
|
||||
pyenphase==2.3.0
|
||||
pyenphase==2.4.0
|
||||
|
||||
# homeassistant.components.everlights
|
||||
pyeverlights==0.1.0
|
||||
@@ -1852,7 +1849,7 @@ pynina==0.3.6
|
||||
pynobo==1.8.1
|
||||
|
||||
# homeassistant.components.nordpool
|
||||
pynordpool==0.3.0
|
||||
pynordpool==0.3.1
|
||||
|
||||
# homeassistant.components.nuki
|
||||
pynuki==1.6.3
|
||||
@@ -2120,7 +2117,7 @@ python-pooldose==0.5.0
|
||||
python-rabbitair==0.0.8
|
||||
|
||||
# homeassistant.components.roborock
|
||||
python-roborock==2.49.1
|
||||
python-roborock==2.50.2
|
||||
|
||||
# homeassistant.components.smarttub
|
||||
python-smarttub==0.0.44
|
||||
@@ -2583,9 +2580,6 @@ volvocarsapi==0.4.2
|
||||
# homeassistant.components.verisure
|
||||
vsure==2.6.7
|
||||
|
||||
# homeassistant.components.vultr
|
||||
vultr==0.1.2
|
||||
|
||||
# homeassistant.components.samsungtv
|
||||
# homeassistant.components.wake_on_lan
|
||||
wakeonlan==3.1.0
|
||||
@@ -2685,7 +2679,7 @@ zamg==0.3.6
|
||||
zcc-helper==3.7
|
||||
|
||||
# homeassistant.components.zeroconf
|
||||
zeroconf==0.147.2
|
||||
zeroconf==1.0.0
|
||||
|
||||
# homeassistant.components.zeversolar
|
||||
zeversolar==0.3.2
|
||||
|
||||
@@ -113,9 +113,9 @@ httplib2>=0.19.0
|
||||
# gRPC is an implicit dependency that we want to make explicit so we manage
|
||||
# upgrades intentionally. It is a large package to build from source and we
|
||||
# want to ensure we have wheels built.
|
||||
grpcio==1.72.1
|
||||
grpcio-status==1.72.1
|
||||
grpcio-reflection==1.72.1
|
||||
grpcio==1.75.1
|
||||
grpcio-status==1.75.1
|
||||
grpcio-reflection==1.75.1
|
||||
|
||||
# This is a old unmaintained library and is replaced with pycryptodome
|
||||
pycrypto==1000000000.0.0
|
||||
|
||||
@@ -1062,13 +1062,11 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [
|
||||
"volkszaehler",
|
||||
"volumio",
|
||||
"volvooncall",
|
||||
"vultr",
|
||||
"w800rf32",
|
||||
"wake_on_lan",
|
||||
"wallbox",
|
||||
"waqi",
|
||||
"waterfurnace",
|
||||
"watson_iot",
|
||||
"watson_tts",
|
||||
"watttime",
|
||||
"waze_travel_time",
|
||||
@@ -2112,13 +2110,11 @@ INTEGRATIONS_WITHOUT_SCALE = [
|
||||
"volkszaehler",
|
||||
"volumio",
|
||||
"volvooncall",
|
||||
"vultr",
|
||||
"w800rf32",
|
||||
"wake_on_lan",
|
||||
"wallbox",
|
||||
"waqi",
|
||||
"waterfurnace",
|
||||
"watson_iot",
|
||||
"watson_tts",
|
||||
"watttime",
|
||||
"waze_travel_time",
|
||||
|
||||
@@ -120,7 +120,7 @@ def create_client_mock(
|
||||
async def init_integration(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
platform="",
|
||||
platform: str | None = None,
|
||||
charge_point: dict | None = None,
|
||||
status: dict | None = None,
|
||||
grid: dict | None = None,
|
||||
@@ -136,6 +136,10 @@ async def init_integration(
|
||||
if grid is None:
|
||||
grid = {}
|
||||
|
||||
platforms = [platform] if platform else []
|
||||
if platform:
|
||||
platforms.append(platform)
|
||||
|
||||
future_container = FutureContainer(hass.loop.create_future())
|
||||
started_loop = Event()
|
||||
|
||||
@@ -144,7 +148,7 @@ async def init_integration(
|
||||
)
|
||||
|
||||
with (
|
||||
patch("homeassistant.components.blue_current.PLATFORMS", [platform]),
|
||||
patch("homeassistant.components.blue_current.PLATFORMS", platforms),
|
||||
patch("homeassistant.components.blue_current.Client", return_value=client_mock),
|
||||
):
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
@@ -193,6 +193,22 @@ def _load_json_2_meter_data(
|
||||
mocked_data: EnvoyData, json_fixture: dict[str, Any]
|
||||
) -> None:
|
||||
"""Fill envoy meter data from fixture."""
|
||||
if meters := json_fixture["data"].get("ctmeters"):
|
||||
mocked_data.ctmeters = {}
|
||||
[
|
||||
mocked_data.ctmeters.update({meter: EnvoyMeterData(**meter_data)})
|
||||
for meter, meter_data in meters.items()
|
||||
]
|
||||
if meters := json_fixture["data"].get("ctmeters_phases"):
|
||||
mocked_data.ctmeters_phases = {}
|
||||
for meter, meter_data in meters.items():
|
||||
meter_phase_data: dict[str, EnvoyMeterData] = {}
|
||||
[
|
||||
meter_phase_data.update({phase: EnvoyMeterData(**phase_data)})
|
||||
for phase, phase_data in meter_data.items()
|
||||
]
|
||||
mocked_data.ctmeters_phases.update({meter: meter_phase_data})
|
||||
|
||||
if item := json_fixture["data"].get("ctmeter_production"):
|
||||
mocked_data.ctmeter_production = EnvoyMeterData(**item)
|
||||
if item := json_fixture["data"].get("ctmeter_consumption"):
|
||||
|
||||
@@ -27,6 +27,8 @@
|
||||
"system_consumption_phases": null,
|
||||
"system_net_consumption_phases": null,
|
||||
"system_production_phases": null,
|
||||
"ctmeters": {},
|
||||
"ctmeters_phases": {},
|
||||
"ctmeter_production": null,
|
||||
"ctmeter_consumption": null,
|
||||
"ctmeter_storage": null,
|
||||
|
||||
@@ -37,6 +37,39 @@
|
||||
"system_consumption_phases": null,
|
||||
"system_net_consumption_phases": null,
|
||||
"system_production_phases": null,
|
||||
"ctmeters": {
|
||||
"production": {
|
||||
"eid": "100000010",
|
||||
"timestamp": 1708006110,
|
||||
"energy_delivered": 11234,
|
||||
"energy_received": 12345,
|
||||
"active_power": 100,
|
||||
"power_factor": 0.11,
|
||||
"voltage": 111,
|
||||
"current": 0.2,
|
||||
"frequency": 50.1,
|
||||
"state": "enabled",
|
||||
"measurement_type": "production",
|
||||
"metering_status": "normal",
|
||||
"status_flags": ["production-imbalance", "power-on-unused-phase"]
|
||||
},
|
||||
"net-consumption": {
|
||||
"eid": "100000020",
|
||||
"timestamp": 1708006120,
|
||||
"energy_delivered": 21234,
|
||||
"energy_received": 22345,
|
||||
"active_power": 101,
|
||||
"power_factor": 0.21,
|
||||
"voltage": 112,
|
||||
"current": 0.3,
|
||||
"frequency": 50.2,
|
||||
"state": "enabled",
|
||||
"measurement_type": "net-consumption",
|
||||
"metering_status": "normal",
|
||||
"status_flags": []
|
||||
}
|
||||
},
|
||||
"ctmeters_phases": {},
|
||||
"ctmeter_production": {
|
||||
"eid": "100000010",
|
||||
"timestamp": 1708006110,
|
||||
|
||||
@@ -87,6 +87,134 @@
|
||||
"watts_now": 2341
|
||||
},
|
||||
"system_net_consumption_phases": null,
|
||||
"ctmeters": {
|
||||
"production": {
|
||||
"eid": "100000010",
|
||||
"timestamp": 1708006110,
|
||||
"energy_delivered": 11234,
|
||||
"energy_received": 12345,
|
||||
"active_power": 100,
|
||||
"power_factor": 0.11,
|
||||
"voltage": 111,
|
||||
"current": 0.2,
|
||||
"frequency": 50.1,
|
||||
"state": "enabled",
|
||||
"measurement_type": "production",
|
||||
"metering_status": "normal",
|
||||
"status_flags": ["production-imbalance", "power-on-unused-phase"]
|
||||
},
|
||||
"net-consumption": {
|
||||
"eid": "100000020",
|
||||
"timestamp": 1708006120,
|
||||
"energy_delivered": 21234,
|
||||
"energy_received": 22345,
|
||||
"active_power": 101,
|
||||
"power_factor": 0.21,
|
||||
"voltage": 112,
|
||||
"current": 0.3,
|
||||
"frequency": 50.2,
|
||||
"state": "enabled",
|
||||
"measurement_type": "net-consumption",
|
||||
"metering_status": "normal",
|
||||
"status_flags": []
|
||||
}
|
||||
},
|
||||
"ctmeters_phases": {
|
||||
"production": {
|
||||
"L1": {
|
||||
"eid": "100000011",
|
||||
"timestamp": 1708006111,
|
||||
"energy_delivered": 112341,
|
||||
"energy_received": 123451,
|
||||
"active_power": 20,
|
||||
"power_factor": 0.12,
|
||||
"voltage": 111,
|
||||
"current": 0.2,
|
||||
"frequency": 50.1,
|
||||
"state": "enabled",
|
||||
"measurement_type": "production",
|
||||
"metering_status": "normal",
|
||||
"status_flags": ["production-imbalance"]
|
||||
},
|
||||
"L2": {
|
||||
"eid": "100000012",
|
||||
"timestamp": 1708006112,
|
||||
"energy_delivered": 112342,
|
||||
"energy_received": 123452,
|
||||
"active_power": 30,
|
||||
"power_factor": 0.13,
|
||||
"voltage": 111,
|
||||
"current": 0.2,
|
||||
"frequency": 50.1,
|
||||
"state": "enabled",
|
||||
"measurement_type": "production",
|
||||
"metering_status": "normal",
|
||||
"status_flags": ["power-on-unused-phase"]
|
||||
},
|
||||
"L3": {
|
||||
"eid": "100000013",
|
||||
"timestamp": 1708006113,
|
||||
"energy_delivered": 112343,
|
||||
"energy_received": 123453,
|
||||
"active_power": 50,
|
||||
"power_factor": 0.14,
|
||||
"voltage": 111,
|
||||
"current": 0.2,
|
||||
"frequency": 50.1,
|
||||
"state": "enabled",
|
||||
"measurement_type": "production",
|
||||
"metering_status": "normal",
|
||||
"status_flags": []
|
||||
}
|
||||
},
|
||||
"net-consumption": {
|
||||
"L1": {
|
||||
"eid": "100000021",
|
||||
"timestamp": 1708006121,
|
||||
"energy_delivered": 212341,
|
||||
"energy_received": 223451,
|
||||
"active_power": 21,
|
||||
"power_factor": 0.22,
|
||||
"voltage": 112,
|
||||
"current": 0.3,
|
||||
"frequency": 50.2,
|
||||
"state": "enabled",
|
||||
"measurement_type": "net-consumption",
|
||||
"metering_status": "normal",
|
||||
"status_flags": []
|
||||
},
|
||||
"L2": {
|
||||
"eid": "100000022",
|
||||
"timestamp": 1708006122,
|
||||
"energy_delivered": 212342,
|
||||
"energy_received": 223452,
|
||||
"active_power": 31,
|
||||
"power_factor": 0.23,
|
||||
"voltage": 112,
|
||||
"current": 0.3,
|
||||
"frequency": 50.2,
|
||||
"state": "enabled",
|
||||
"measurement_type": "net-consumption",
|
||||
"metering_status": "normal",
|
||||
"status_flags": []
|
||||
},
|
||||
"L3": {
|
||||
"eid": "100000023",
|
||||
"timestamp": 1708006123,
|
||||
"energy_delivered": 212343,
|
||||
"energy_received": 223453,
|
||||
"active_power": 51,
|
||||
"power_factor": 0.24,
|
||||
"voltage": 112,
|
||||
"current": 0.3,
|
||||
"frequency": 50.2,
|
||||
"state": "enabled",
|
||||
"measurement_type": "net-consumption",
|
||||
"metering_status": "normal",
|
||||
"status_flags": []
|
||||
}
|
||||
}
|
||||
},
|
||||
"ctmeter_production": {
|
||||
"eid": "100000010",
|
||||
"timestamp": 1708006110,
|
||||
|
||||
@@ -75,6 +75,134 @@
|
||||
"watts_now": 2341
|
||||
},
|
||||
"system_net_consumption_phases": null,
|
||||
"ctmeters": {
|
||||
"production": {
|
||||
"eid": "100000010",
|
||||
"timestamp": 1708006110,
|
||||
"energy_delivered": 11234,
|
||||
"energy_received": 12345,
|
||||
"active_power": 100,
|
||||
"power_factor": 0.11,
|
||||
"voltage": 111,
|
||||
"current": 0.2,
|
||||
"frequency": 50.1,
|
||||
"state": "enabled",
|
||||
"measurement_type": "production",
|
||||
"metering_status": "normal",
|
||||
"status_flags": ["production-imbalance", "power-on-unused-phase"]
|
||||
},
|
||||
"net-consumption": {
|
||||
"eid": "100000020",
|
||||
"timestamp": 1708006120,
|
||||
"energy_delivered": 21234,
|
||||
"energy_received": 22345,
|
||||
"active_power": 101,
|
||||
"power_factor": 0.21,
|
||||
"voltage": 112,
|
||||
"current": 0.3,
|
||||
"frequency": 50.2,
|
||||
"state": "enabled",
|
||||
"measurement_type": "net-consumption",
|
||||
"metering_status": "normal",
|
||||
"status_flags": []
|
||||
}
|
||||
},
|
||||
"ctmeters_phases": {
|
||||
"production": {
|
||||
"L1": {
|
||||
"eid": "100000011",
|
||||
"timestamp": 1708006111,
|
||||
"energy_delivered": 112341,
|
||||
"energy_received": 123451,
|
||||
"active_power": 20,
|
||||
"power_factor": 0.12,
|
||||
"voltage": 111,
|
||||
"current": 0.2,
|
||||
"frequency": 50.1,
|
||||
"state": "enabled",
|
||||
"measurement_type": "production",
|
||||
"metering_status": "normal",
|
||||
"status_flags": ["production-imbalance"]
|
||||
},
|
||||
"L2": {
|
||||
"eid": "100000012",
|
||||
"timestamp": 1708006112,
|
||||
"energy_delivered": 112342,
|
||||
"energy_received": 123452,
|
||||
"active_power": 30,
|
||||
"power_factor": 0.13,
|
||||
"voltage": 111,
|
||||
"current": 0.2,
|
||||
"frequency": 50.1,
|
||||
"state": "enabled",
|
||||
"measurement_type": "production",
|
||||
"metering_status": "normal",
|
||||
"status_flags": ["power-on-unused-phase"]
|
||||
},
|
||||
"L3": {
|
||||
"eid": "100000013",
|
||||
"timestamp": 1708006113,
|
||||
"energy_delivered": 112343,
|
||||
"energy_received": 123453,
|
||||
"active_power": 50,
|
||||
"power_factor": 0.14,
|
||||
"voltage": 111,
|
||||
"current": 0.2,
|
||||
"frequency": 50.1,
|
||||
"state": "enabled",
|
||||
"measurement_type": "production",
|
||||
"metering_status": "normal",
|
||||
"status_flags": []
|
||||
}
|
||||
},
|
||||
"net-consumption": {
|
||||
"L1": {
|
||||
"eid": "100000021",
|
||||
"timestamp": 1708006121,
|
||||
"energy_delivered": 212341,
|
||||
"energy_received": 223451,
|
||||
"active_power": 21,
|
||||
"power_factor": 0.22,
|
||||
"voltage": 112,
|
||||
"current": 0.3,
|
||||
"frequency": 50.2,
|
||||
"state": "enabled",
|
||||
"measurement_type": "net-consumption",
|
||||
"metering_status": "normal",
|
||||
"status_flags": []
|
||||
},
|
||||
"L2": {
|
||||
"eid": "100000022",
|
||||
"timestamp": 1708006122,
|
||||
"energy_delivered": 212342,
|
||||
"energy_received": 223452,
|
||||
"active_power": 31,
|
||||
"power_factor": 0.23,
|
||||
"voltage": 112,
|
||||
"current": 0.3,
|
||||
"frequency": 50.2,
|
||||
"state": "enabled",
|
||||
"measurement_type": "net-consumption",
|
||||
"metering_status": "normal",
|
||||
"status_flags": []
|
||||
},
|
||||
"L3": {
|
||||
"eid": "100000023",
|
||||
"timestamp": 1708006123,
|
||||
"energy_delivered": 212343,
|
||||
"energy_received": 223453,
|
||||
"active_power": 51,
|
||||
"power_factor": 0.24,
|
||||
"voltage": 112,
|
||||
"current": 0.3,
|
||||
"frequency": 50.2,
|
||||
"state": "enabled",
|
||||
"measurement_type": "net-consumption",
|
||||
"metering_status": "normal",
|
||||
"status_flags": []
|
||||
}
|
||||
}
|
||||
},
|
||||
"ctmeter_production": {
|
||||
"eid": "100000010",
|
||||
"timestamp": 1708006110,
|
||||
|
||||
@@ -151,6 +151,196 @@
|
||||
"watts_now": 3234
|
||||
}
|
||||
},
|
||||
"ctmeters": {
|
||||
"production": {
|
||||
"eid": "100000010",
|
||||
"timestamp": 1708006110,
|
||||
"energy_delivered": 11234,
|
||||
"energy_received": 12345,
|
||||
"active_power": 100,
|
||||
"power_factor": 0.11,
|
||||
"voltage": 111,
|
||||
"current": 0.2,
|
||||
"frequency": 50.1,
|
||||
"state": "enabled",
|
||||
"measurement_type": "production",
|
||||
"metering_status": "normal",
|
||||
"status_flags": ["production-imbalance", "power-on-unused-phase"]
|
||||
},
|
||||
"net-consumption": {
|
||||
"eid": "100000020",
|
||||
"timestamp": 1708006120,
|
||||
"energy_delivered": 21234,
|
||||
"energy_received": 22345,
|
||||
"active_power": 101,
|
||||
"power_factor": 0.21,
|
||||
"voltage": 112,
|
||||
"current": 0.3,
|
||||
"frequency": 50.2,
|
||||
"state": "enabled",
|
||||
"measurement_type": "net-consumption",
|
||||
"metering_status": "normal",
|
||||
"status_flags": []
|
||||
},
|
||||
"storage": {
|
||||
"eid": "100000030",
|
||||
"timestamp": 1708006120,
|
||||
"energy_delivered": 31234,
|
||||
"energy_received": 32345,
|
||||
"active_power": 103,
|
||||
"power_factor": 0.23,
|
||||
"voltage": 113,
|
||||
"current": 0.4,
|
||||
"frequency": 50.3,
|
||||
"state": "enabled",
|
||||
"measurement_type": "storage",
|
||||
"metering_status": "normal",
|
||||
"status_flags": []
|
||||
}
|
||||
},
|
||||
"ctmeters_phases": {
|
||||
"production": {
|
||||
"L1": {
|
||||
"eid": "100000011",
|
||||
"timestamp": 1708006111,
|
||||
"energy_delivered": 112341,
|
||||
"energy_received": 123451,
|
||||
"active_power": 20,
|
||||
"power_factor": 0.12,
|
||||
"voltage": 111,
|
||||
"current": 0.2,
|
||||
"frequency": 50.1,
|
||||
"state": "enabled",
|
||||
"measurement_type": "production",
|
||||
"metering_status": "normal",
|
||||
"status_flags": ["production-imbalance"]
|
||||
},
|
||||
"L2": {
|
||||
"eid": "100000012",
|
||||
"timestamp": 1708006112,
|
||||
"energy_delivered": 112342,
|
||||
"energy_received": 123452,
|
||||
"active_power": 30,
|
||||
"power_factor": 0.13,
|
||||
"voltage": 111,
|
||||
"current": 0.2,
|
||||
"frequency": 50.1,
|
||||
"state": "enabled",
|
||||
"measurement_type": "production",
|
||||
"metering_status": "normal",
|
||||
"status_flags": ["power-on-unused-phase"]
|
||||
},
|
||||
"L3": {
|
||||
"eid": "100000013",
|
||||
"timestamp": 1708006113,
|
||||
"energy_delivered": 112343,
|
||||
"energy_received": 123453,
|
||||
"active_power": 50,
|
||||
"power_factor": 0.14,
|
||||
"voltage": 111,
|
||||
"current": 0.2,
|
||||
"frequency": 50.1,
|
||||
"state": "enabled",
|
||||
"measurement_type": "production",
|
||||
"metering_status": "normal",
|
||||
"status_flags": []
|
||||
}
|
||||
},
|
||||
"net-consumption": {
|
||||
"L1": {
|
||||
"eid": "100000021",
|
||||
"timestamp": 1708006121,
|
||||
"energy_delivered": 212341,
|
||||
"energy_received": 223451,
|
||||
"active_power": 21,
|
||||
"power_factor": 0.22,
|
||||
"voltage": 112,
|
||||
"current": 0.3,
|
||||
"frequency": 50.2,
|
||||
"state": "enabled",
|
||||
"measurement_type": "net-consumption",
|
||||
"metering_status": "normal",
|
||||
"status_flags": []
|
||||
},
|
||||
"L2": {
|
||||
"eid": "100000022",
|
||||
"timestamp": 1708006122,
|
||||
"energy_delivered": 212342,
|
||||
"energy_received": 223452,
|
||||
"active_power": 31,
|
||||
"power_factor": 0.23,
|
||||
"voltage": 112,
|
||||
"current": 0.3,
|
||||
"frequency": 50.2,
|
||||
"state": "enabled",
|
||||
"measurement_type": "net-consumption",
|
||||
"metering_status": "normal",
|
||||
"status_flags": []
|
||||
},
|
||||
"L3": {
|
||||
"eid": "100000023",
|
||||
"timestamp": 1708006123,
|
||||
"energy_delivered": 212343,
|
||||
"energy_received": 223453,
|
||||
"active_power": 51,
|
||||
"power_factor": 0.24,
|
||||
"voltage": 112,
|
||||
"current": 0.3,
|
||||
"frequency": 50.2,
|
||||
"state": "enabled",
|
||||
"measurement_type": "net-consumption",
|
||||
"metering_status": "normal",
|
||||
"status_flags": []
|
||||
}
|
||||
},
|
||||
"storage": {
|
||||
"L1": {
|
||||
"eid": "100000031",
|
||||
"timestamp": 1708006121,
|
||||
"energy_delivered": 312341,
|
||||
"energy_received": 323451,
|
||||
"active_power": 22,
|
||||
"power_factor": 0.32,
|
||||
"voltage": 113,
|
||||
"current": 0.4,
|
||||
"frequency": 50.3,
|
||||
"state": "enabled",
|
||||
"measurement_type": "storage",
|
||||
"metering_status": "normal",
|
||||
"status_flags": []
|
||||
},
|
||||
"L2": {
|
||||
"eid": "100000032",
|
||||
"timestamp": 1708006122,
|
||||
"energy_delivered": 312342,
|
||||
"energy_received": 323452,
|
||||
"active_power": 33,
|
||||
"power_factor": 0.23,
|
||||
"voltage": 112,
|
||||
"current": 0.3,
|
||||
"frequency": 50.2,
|
||||
"state": "enabled",
|
||||
"measurement_type": "storage",
|
||||
"metering_status": "normal",
|
||||
"status_flags": []
|
||||
},
|
||||
"L3": {
|
||||
"eid": "100000033",
|
||||
"timestamp": 1708006123,
|
||||
"energy_delivered": 312343,
|
||||
"energy_received": 323453,
|
||||
"active_power": 53,
|
||||
"power_factor": 0.24,
|
||||
"voltage": 112,
|
||||
"current": 0.3,
|
||||
"frequency": 50.2,
|
||||
"state": "enabled",
|
||||
"measurement_type": "storage",
|
||||
"metering_status": "normal",
|
||||
"status_flags": []
|
||||
}
|
||||
}
|
||||
},
|
||||
"ctmeter_production": {
|
||||
"eid": "100000010",
|
||||
"timestamp": 1708006110,
|
||||
|
||||
@@ -94,6 +94,134 @@
|
||||
"watts_now": 3234
|
||||
}
|
||||
},
|
||||
"ctmeters": {
|
||||
"production": {
|
||||
"eid": "100000010",
|
||||
"timestamp": 1708006110,
|
||||
"energy_delivered": 11234,
|
||||
"energy_received": 12345,
|
||||
"active_power": 100,
|
||||
"power_factor": 0.11,
|
||||
"voltage": 111,
|
||||
"current": 0.2,
|
||||
"frequency": 50.1,
|
||||
"state": "enabled",
|
||||
"measurement_type": "production",
|
||||
"metering_status": "normal",
|
||||
"status_flags": ["production-imbalance", "power-on-unused-phase"]
|
||||
},
|
||||
"net-consumption": {
|
||||
"eid": "100000020",
|
||||
"timestamp": 1708006120,
|
||||
"energy_delivered": 21234,
|
||||
"energy_received": 22345,
|
||||
"active_power": 101,
|
||||
"power_factor": 0.21,
|
||||
"voltage": 112,
|
||||
"current": 0.3,
|
||||
"frequency": 50.2,
|
||||
"state": "enabled",
|
||||
"measurement_type": "net-consumption",
|
||||
"metering_status": "normal",
|
||||
"status_flags": []
|
||||
}
|
||||
},
|
||||
"ctmeters_phases": {
|
||||
"production": {
|
||||
"L1": {
|
||||
"eid": "100000011",
|
||||
"timestamp": 1708006111,
|
||||
"energy_delivered": 112341,
|
||||
"energy_received": 123451,
|
||||
"active_power": 20,
|
||||
"power_factor": 0.12,
|
||||
"voltage": 111,
|
||||
"current": 0.2,
|
||||
"frequency": 50.1,
|
||||
"state": "enabled",
|
||||
"measurement_type": "production",
|
||||
"metering_status": "normal",
|
||||
"status_flags": ["production-imbalance"]
|
||||
},
|
||||
"L2": {
|
||||
"eid": "100000012",
|
||||
"timestamp": 1708006112,
|
||||
"energy_delivered": 112342,
|
||||
"energy_received": 123452,
|
||||
"active_power": 30,
|
||||
"power_factor": 0.13,
|
||||
"voltage": 111,
|
||||
"current": 0.2,
|
||||
"frequency": 50.1,
|
||||
"state": "enabled",
|
||||
"measurement_type": "production",
|
||||
"metering_status": "normal",
|
||||
"status_flags": ["power-on-unused-phase"]
|
||||
},
|
||||
"L3": {
|
||||
"eid": "100000013",
|
||||
"timestamp": 1708006113,
|
||||
"energy_delivered": 112343,
|
||||
"energy_received": 123453,
|
||||
"active_power": 50,
|
||||
"power_factor": 0.14,
|
||||
"voltage": 111,
|
||||
"current": 0.2,
|
||||
"frequency": 50.1,
|
||||
"state": "enabled",
|
||||
"measurement_type": "production",
|
||||
"metering_status": "normal",
|
||||
"status_flags": []
|
||||
}
|
||||
},
|
||||
"net-consumption": {
|
||||
"L1": {
|
||||
"eid": "100000021",
|
||||
"timestamp": 1708006121,
|
||||
"energy_delivered": 212341,
|
||||
"energy_received": 223451,
|
||||
"active_power": 21,
|
||||
"power_factor": 0.22,
|
||||
"voltage": 112,
|
||||
"current": 0.3,
|
||||
"frequency": 50.2,
|
||||
"state": "enabled",
|
||||
"measurement_type": "net-consumption",
|
||||
"metering_status": "normal",
|
||||
"status_flags": []
|
||||
},
|
||||
"L2": {
|
||||
"eid": "100000022",
|
||||
"timestamp": 1708006122,
|
||||
"energy_delivered": 212342,
|
||||
"energy_received": 223452,
|
||||
"active_power": 31,
|
||||
"power_factor": 0.23,
|
||||
"voltage": 112,
|
||||
"current": 0.3,
|
||||
"frequency": 50.2,
|
||||
"state": "enabled",
|
||||
"measurement_type": "net-consumption",
|
||||
"metering_status": "normal",
|
||||
"status_flags": []
|
||||
},
|
||||
"L3": {
|
||||
"eid": "100000023",
|
||||
"timestamp": 1708006123,
|
||||
"energy_delivered": 212343,
|
||||
"energy_received": 223453,
|
||||
"active_power": 51,
|
||||
"power_factor": 0.24,
|
||||
"voltage": 112,
|
||||
"current": 0.3,
|
||||
"frequency": 50.2,
|
||||
"state": "enabled",
|
||||
"measurement_type": "net-consumption",
|
||||
"metering_status": "normal",
|
||||
"status_flags": []
|
||||
}
|
||||
}
|
||||
},
|
||||
"ctmeter_production": {
|
||||
"eid": "100000010",
|
||||
"timestamp": 1708006110,
|
||||
|
||||
@@ -32,6 +32,39 @@
|
||||
"system_consumption_phases": null,
|
||||
"system_net_consumption_phases": null,
|
||||
"system_production_phases": null,
|
||||
"ctmeters": {
|
||||
"production": {
|
||||
"eid": "100000010",
|
||||
"timestamp": 1708006110,
|
||||
"energy_delivered": 11234,
|
||||
"energy_received": 12345,
|
||||
"active_power": 100,
|
||||
"power_factor": 0.11,
|
||||
"voltage": 111,
|
||||
"current": 0.2,
|
||||
"frequency": 50.1,
|
||||
"state": "enabled",
|
||||
"measurement_type": "production",
|
||||
"metering_status": "normal",
|
||||
"status_flags": ["production-imbalance", "power-on-unused-phase"]
|
||||
},
|
||||
"total-consumption": {
|
||||
"eid": "100000020",
|
||||
"timestamp": 1708006120,
|
||||
"energy_delivered": 21234,
|
||||
"energy_received": 22345,
|
||||
"active_power": 101,
|
||||
"power_factor": 0.21,
|
||||
"voltage": 112,
|
||||
"current": 0.3,
|
||||
"frequency": 50.2,
|
||||
"state": "enabled",
|
||||
"measurement_type": "total-consumption",
|
||||
"metering_status": "normal",
|
||||
"status_flags": []
|
||||
}
|
||||
},
|
||||
"ctmeters_phases": {},
|
||||
"ctmeter_production": {
|
||||
"eid": "100000010",
|
||||
"timestamp": 1708006110,
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 70 B |
Binary file not shown.
|
Before Width: | Height: | Size: 70 B |
@@ -87,7 +87,6 @@ async def test_form_login(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> N
|
||||
result["flow_id"],
|
||||
user_input=MOCK_DATA_LOGIN_STEP,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "test-user"
|
||||
@@ -208,7 +207,6 @@ async def test_form_advanced(hass: HomeAssistant, mock_setup_entry: AsyncMock) -
|
||||
result["flow_id"],
|
||||
user_input=MOCK_DATA_ADVANCED_STEP,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "test-user"
|
||||
@@ -329,8 +327,6 @@ async def test_flow_reauth(
|
||||
user_input,
|
||||
)
|
||||
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "reauth_successful"
|
||||
assert config_entry.data[CONF_API_KEY] == "cd0e5985-17de-4b4f-849e-5d506c5e4382"
|
||||
@@ -399,8 +395,6 @@ async def test_flow_reauth_errors(
|
||||
result["flow_id"], user_input
|
||||
)
|
||||
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {"base": text_error}
|
||||
|
||||
@@ -412,8 +406,6 @@ async def test_flow_reauth_errors(
|
||||
user_input=USER_INPUT_REAUTH_API_KEY,
|
||||
)
|
||||
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "reauth_successful"
|
||||
assert config_entry.data[CONF_API_KEY] == "cd0e5985-17de-4b4f-849e-5d506c5e4382"
|
||||
@@ -446,8 +438,6 @@ async def test_flow_reauth_unique_id_mismatch(hass: HomeAssistant) -> None:
|
||||
USER_INPUT_REAUTH_LOGIN,
|
||||
)
|
||||
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "unique_id_mismatch"
|
||||
|
||||
@@ -469,8 +459,6 @@ async def test_flow_reconfigure(
|
||||
USER_INPUT_RECONFIGURE,
|
||||
)
|
||||
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "reconfigure_successful"
|
||||
assert config_entry.data[CONF_API_KEY] == "cd0e5985-17de-4b4f-849e-5d506c5e4382"
|
||||
@@ -507,8 +495,6 @@ async def test_flow_reconfigure_errors(
|
||||
USER_INPUT_RECONFIGURE,
|
||||
)
|
||||
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {"base": text_error}
|
||||
|
||||
@@ -519,8 +505,6 @@ async def test_flow_reconfigure_errors(
|
||||
user_input=USER_INPUT_RECONFIGURE,
|
||||
)
|
||||
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "reconfigure_successful"
|
||||
assert config_entry.data[CONF_API_KEY] == "cd0e5985-17de-4b4f-849e-5d506c5e4382"
|
||||
|
||||
@@ -12,7 +12,6 @@ from habiticalib import HabiticaGroupsResponse, HabiticaUserResponse
|
||||
import pytest
|
||||
import respx
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
from syrupy.extensions.image import PNGImageSnapshotExtension
|
||||
|
||||
from homeassistant.components.habitica.const import ASSETS_URL, DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
@@ -50,12 +49,8 @@ async def test_image_platform(
|
||||
"homeassistant.components.habitica.coordinator.BytesIO",
|
||||
) as avatar:
|
||||
avatar.side_effect = [
|
||||
BytesIO(
|
||||
b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x01\x00\x00\x00\x01\x08\x06\x00\x00\x00\x1f\x15\xc4\x89\x00\x00\x00\rIDATx\xdac\xfc\xcf\xc0\xf0\x1f\x00\x05\x05\x02\x00_\xc8\xf1\xd2\x00\x00\x00\x00IEND\xaeB`\x82"
|
||||
),
|
||||
BytesIO(
|
||||
b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x01\x00\x00\x00\x01\x08\x06\x00\x00\x00\x1f\x15\xc4\x89\x00\x00\x00\rIDATx\xdacd`\xf8\xff\x1f\x00\x03\x07\x02\x000&\xc7a\x00\x00\x00\x00IEND\xaeB`\x82"
|
||||
),
|
||||
BytesIO(b"\x89PNGTestImage1"),
|
||||
BytesIO(b"\x89PNGTestImage2"),
|
||||
]
|
||||
|
||||
config_entry.add_to_hass(hass)
|
||||
@@ -77,9 +72,7 @@ async def test_image_platform(
|
||||
resp = await client.get(state.attributes["entity_picture"])
|
||||
assert resp.status == HTTPStatus.OK
|
||||
|
||||
assert (await resp.read()) == snapshot(
|
||||
extension_class=PNGImageSnapshotExtension
|
||||
)
|
||||
assert (await resp.read()) == b"\x89PNGTestImage1"
|
||||
|
||||
habitica.get_user.return_value = HabiticaUserResponse.from_json(
|
||||
await async_load_fixture(hass, "rogue_fixture.json", DOMAIN)
|
||||
@@ -95,9 +88,7 @@ async def test_image_platform(
|
||||
resp = await client.get(state.attributes["entity_picture"])
|
||||
assert resp.status == HTTPStatus.OK
|
||||
|
||||
assert (await resp.read()) == snapshot(
|
||||
extension_class=PNGImageSnapshotExtension
|
||||
)
|
||||
assert (await resp.read()) == b"\x89PNGTestImage2"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("habitica")
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""The tests for the hassio switch."""
|
||||
|
||||
from collections.abc import AsyncGenerator
|
||||
import os
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
@@ -18,6 +19,39 @@ from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
MOCK_ENVIRON = {"SUPERVISOR": "127.0.0.1", "SUPERVISOR_TOKEN": "abcdefgh"}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def setup_integration(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> AsyncGenerator[MockConfigEntry]:
|
||||
"""Set up the hassio integration and enable entity."""
|
||||
config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN)
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
with patch.dict(os.environ, MOCK_ENVIRON):
|
||||
result = await async_setup_component(
|
||||
hass,
|
||||
"hassio",
|
||||
{"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}},
|
||||
)
|
||||
assert result
|
||||
await hass.async_block_till_done()
|
||||
|
||||
yield config_entry
|
||||
|
||||
|
||||
async def enable_entity(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
config_entry: MockConfigEntry,
|
||||
entity_id: str,
|
||||
) -> None:
|
||||
"""Enable an entity and reload the config entry."""
|
||||
entity_registry.async_update_entity(entity_id, disabled_by=None)
|
||||
await hass.config_entries.async_reload(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def mock_all(
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
@@ -170,31 +204,18 @@ async def test_switch_state(
|
||||
entity_id: str,
|
||||
expected: str,
|
||||
addon_state: str,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
entity_registry: er.EntityRegistry,
|
||||
addon_installed: AsyncMock,
|
||||
setup_integration: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test hassio addon switch state."""
|
||||
addon_installed.return_value.state = addon_state
|
||||
config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN)
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
with patch.dict(os.environ, MOCK_ENVIRON):
|
||||
result = await async_setup_component(
|
||||
hass,
|
||||
"hassio",
|
||||
{"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}},
|
||||
)
|
||||
assert result
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Verify that the entity is disabled by default.
|
||||
assert hass.states.get(entity_id) is None
|
||||
|
||||
# Enable the entity.
|
||||
entity_registry.async_update_entity(entity_id, disabled_by=None)
|
||||
await hass.config_entries.async_reload(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
await enable_entity(hass, entity_registry, setup_integration, entity_id)
|
||||
|
||||
# Verify that the entity have the expected state.
|
||||
state = hass.states.get(entity_id)
|
||||
@@ -210,6 +231,7 @@ async def test_switch_turn_on(
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
entity_registry: er.EntityRegistry,
|
||||
addon_installed: AsyncMock,
|
||||
setup_integration: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test turning on addon switch."""
|
||||
entity_id = "switch.test_two"
|
||||
@@ -218,25 +240,11 @@ async def test_switch_turn_on(
|
||||
# Mock the start addon API call
|
||||
aioclient_mock.post("http://127.0.0.1/addons/test-two/start", json={"result": "ok"})
|
||||
|
||||
config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN)
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
with patch.dict(os.environ, MOCK_ENVIRON):
|
||||
result = await async_setup_component(
|
||||
hass,
|
||||
"hassio",
|
||||
{"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}},
|
||||
)
|
||||
assert result
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Verify that the entity is disabled by default.
|
||||
assert hass.states.get(entity_id) is None
|
||||
|
||||
# Enable the entity.
|
||||
entity_registry.async_update_entity(entity_id, disabled_by=None)
|
||||
await hass.config_entries.async_reload(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
await enable_entity(hass, entity_registry, setup_integration, entity_id)
|
||||
|
||||
# Verify initial state is off
|
||||
state = hass.states.get(entity_id)
|
||||
@@ -252,13 +260,8 @@ async def test_switch_turn_on(
|
||||
)
|
||||
|
||||
# Verify the API was called
|
||||
assert len(aioclient_mock.mock_calls) > 0
|
||||
start_call_found = False
|
||||
for call in aioclient_mock.mock_calls:
|
||||
if call[1].path == "/addons/test-two/start" and call[0] == "POST":
|
||||
start_call_found = True
|
||||
break
|
||||
assert start_call_found
|
||||
assert aioclient_mock.mock_calls[-1][1].path == "/addons/test-two/start"
|
||||
assert aioclient_mock.mock_calls[-1][0] == "POST"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@@ -269,6 +272,7 @@ async def test_switch_turn_off(
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
entity_registry: er.EntityRegistry,
|
||||
addon_installed: AsyncMock,
|
||||
setup_integration: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test turning off addon switch."""
|
||||
entity_id = "switch.test"
|
||||
@@ -277,25 +281,11 @@ async def test_switch_turn_off(
|
||||
# Mock the stop addon API call
|
||||
aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"})
|
||||
|
||||
config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN)
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
with patch.dict(os.environ, MOCK_ENVIRON):
|
||||
result = await async_setup_component(
|
||||
hass,
|
||||
"hassio",
|
||||
{"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}},
|
||||
)
|
||||
assert result
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Verify that the entity is disabled by default.
|
||||
assert hass.states.get(entity_id) is None
|
||||
|
||||
# Enable the entity.
|
||||
entity_registry.async_update_entity(entity_id, disabled_by=None)
|
||||
await hass.config_entries.async_reload(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
await enable_entity(hass, entity_registry, setup_integration, entity_id)
|
||||
|
||||
# Verify initial state is on
|
||||
state = hass.states.get(entity_id)
|
||||
@@ -311,10 +301,5 @@ async def test_switch_turn_off(
|
||||
)
|
||||
|
||||
# Verify the API was called
|
||||
assert len(aioclient_mock.mock_calls) > 0
|
||||
stop_call_found = False
|
||||
for call in aioclient_mock.mock_calls:
|
||||
if call[1].path == "/addons/test/stop" and call[0] == "POST":
|
||||
stop_call_found = True
|
||||
break
|
||||
assert stop_call_found
|
||||
assert aioclient_mock.mock_calls[-1][1].path == "/addons/test/stop"
|
||||
assert aioclient_mock.mock_calls[-1][0] == "POST"
|
||||
|
||||
@@ -38,6 +38,8 @@ NOT_IDASEN_DISCOVERY_INFO = BluetoothServiceInfoBleak(
|
||||
tx_power=-127,
|
||||
)
|
||||
|
||||
UPDATE_DEBOUNCE_TIME = 0.2
|
||||
|
||||
|
||||
async def init_integration(hass: HomeAssistant) -> MockConfigEntry:
|
||||
"""Set up the IKEA Idasen Desk integration in Home Assistant."""
|
||||
|
||||
@@ -4,6 +4,7 @@ from typing import Any
|
||||
from unittest.mock import AsyncMock, MagicMock
|
||||
|
||||
from bleak.exc import BleakError
|
||||
from freezegun.api import FrozenDateTimeFactory
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.cover import (
|
||||
@@ -22,12 +23,13 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from . import init_integration
|
||||
from . import UPDATE_DEBOUNCE_TIME, init_integration
|
||||
|
||||
from tests.common import async_fire_time_changed
|
||||
|
||||
|
||||
async def test_cover_available(
|
||||
hass: HomeAssistant,
|
||||
mock_desk_api: MagicMock,
|
||||
hass: HomeAssistant, mock_desk_api: MagicMock, freezer: FrozenDateTimeFactory
|
||||
) -> None:
|
||||
"""Test cover available property."""
|
||||
entity_id = "cover.test"
|
||||
@@ -42,6 +44,9 @@ async def test_cover_available(
|
||||
mock_desk_api.is_connected = False
|
||||
mock_desk_api.trigger_update_callback(None)
|
||||
|
||||
freezer.tick(UPDATE_DEBOUNCE_TIME)
|
||||
async_fire_time_changed(hass)
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
assert state.state == STATE_UNAVAILABLE
|
||||
@@ -64,6 +69,7 @@ async def test_cover_services(
|
||||
service_data: dict[str, Any],
|
||||
expected_state: str,
|
||||
expected_position: int,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
) -> None:
|
||||
"""Test cover services."""
|
||||
entity_id = "cover.test"
|
||||
@@ -78,7 +84,9 @@ async def test_cover_services(
|
||||
{"entity_id": entity_id, **service_data},
|
||||
blocking=True,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
freezer.tick(UPDATE_DEBOUNCE_TIME)
|
||||
async_fire_time_changed(hass)
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
assert state.state == expected_state
|
||||
@@ -113,4 +121,3 @@ async def test_cover_services_exception(
|
||||
{"entity_id": entity_id, **service_data},
|
||||
blocking=True,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@@ -2,18 +2,23 @@
|
||||
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from freezegun.api import FrozenDateTimeFactory
|
||||
import pytest
|
||||
|
||||
from homeassistant.const import STATE_UNAVAILABLE
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import init_integration
|
||||
from . import UPDATE_DEBOUNCE_TIME, init_integration
|
||||
|
||||
from tests.common import async_fire_time_changed
|
||||
|
||||
EXPECTED_INITIAL_HEIGHT = "1"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_height_sensor(hass: HomeAssistant, mock_desk_api: MagicMock) -> None:
|
||||
async def test_height_sensor(
|
||||
hass: HomeAssistant, mock_desk_api: MagicMock, freezer: FrozenDateTimeFactory
|
||||
) -> None:
|
||||
"""Test height sensor."""
|
||||
await init_integration(hass)
|
||||
|
||||
@@ -24,6 +29,15 @@ async def test_height_sensor(hass: HomeAssistant, mock_desk_api: MagicMock) -> N
|
||||
|
||||
mock_desk_api.height = 1.2
|
||||
mock_desk_api.trigger_update_callback(None)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# State should still be the same due to the debouncer
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
assert state.state == EXPECTED_INITIAL_HEIGHT
|
||||
|
||||
freezer.tick(UPDATE_DEBOUNCE_TIME)
|
||||
async_fire_time_changed(hass)
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
@@ -34,6 +48,7 @@ async def test_height_sensor(hass: HomeAssistant, mock_desk_api: MagicMock) -> N
|
||||
async def test_sensor_available(
|
||||
hass: HomeAssistant,
|
||||
mock_desk_api: MagicMock,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
) -> None:
|
||||
"""Test sensor available property."""
|
||||
await init_integration(hass)
|
||||
@@ -46,6 +61,9 @@ async def test_sensor_available(
|
||||
mock_desk_api.is_connected = False
|
||||
mock_desk_api.trigger_update_callback(None)
|
||||
|
||||
freezer.tick(UPDATE_DEBOUNCE_TIME)
|
||||
async_fire_time_changed(hass)
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
assert state.state == STATE_UNAVAILABLE
|
||||
|
||||
@@ -318,6 +318,7 @@
|
||||
"1/64/65531": [0, 65528, 65529, 65531, 65532, 65533],
|
||||
"1/513/0": 2830,
|
||||
"1/513/1": 1250,
|
||||
"1/513/2": 1,
|
||||
"1/513/3": null,
|
||||
"1/513/4": null,
|
||||
"1/513/5": null,
|
||||
|
||||
@@ -1222,6 +1222,55 @@
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[thermostat][binary_sensor.longan_link_hvac_occupancy-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'binary_sensor.longan_link_hvac_occupancy',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <BinarySensorDeviceClass.OCCUPANCY: 'occupancy'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Occupancy',
|
||||
'platform': 'matter',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-ThermostatOccupancySensor-513-2',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[thermostat][binary_sensor.longan_link_hvac_occupancy-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'occupancy',
|
||||
'friendly_name': 'Longan link HVAC Occupancy',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.longan_link_hvac_occupancy',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[valve][binary_sensor.valve_general_fault-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from chip.clusters import Objects as clusters
|
||||
from matter_server.client.models.node import MatterNode
|
||||
from matter_server.common.models import EventType
|
||||
import pytest
|
||||
@@ -344,3 +345,31 @@ async def test_water_valve(
|
||||
state = hass.states.get("binary_sensor.valve_valve_leaking")
|
||||
assert state
|
||||
assert state.state == "on"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("node_fixture", ["thermostat"])
|
||||
async def test_thermostat_occupancy(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
matter_node: MatterNode,
|
||||
) -> None:
|
||||
"""Test thermostat occupancy."""
|
||||
state = hass.states.get("binary_sensor.longan_link_hvac_occupancy")
|
||||
assert state
|
||||
assert state.state == "on"
|
||||
|
||||
# Test Occupancy attribute change
|
||||
occupancy_attribute = clusters.Thermostat.Attributes.Occupancy
|
||||
|
||||
set_node_attribute(
|
||||
matter_node,
|
||||
1,
|
||||
occupancy_attribute.cluster_id,
|
||||
occupancy_attribute.attribute_id,
|
||||
0,
|
||||
)
|
||||
await trigger_subscription_callback(hass, matter_client)
|
||||
|
||||
state = hass.states.get("binary_sensor.longan_link_hvac_occupancy")
|
||||
assert state
|
||||
assert state.state == "off"
|
||||
|
||||
@@ -75,6 +75,7 @@ CONFIG_WITH_STATES = {
|
||||
"state_opening": "opening",
|
||||
"state_unlocked": "unlocked",
|
||||
"state_unlocking": "unlocking",
|
||||
"state_jammed": "jammed",
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -89,6 +90,7 @@ CONFIG_WITH_STATES = {
|
||||
(CONFIG_WITH_STATES, "opening", LockState.OPENING),
|
||||
(CONFIG_WITH_STATES, "unlocked", LockState.UNLOCKED),
|
||||
(CONFIG_WITH_STATES, "unlocking", LockState.UNLOCKING),
|
||||
(CONFIG_WITH_STATES, "jammed", LockState.JAMMED),
|
||||
],
|
||||
)
|
||||
async def test_controlling_state_via_topic(
|
||||
@@ -111,6 +113,12 @@ async def test_controlling_state_via_topic(
|
||||
state = hass.states.get("lock.test")
|
||||
assert state.state == lock_state
|
||||
|
||||
async_fire_mqtt_message(hass, "state-topic", "None")
|
||||
await hass.async_block_till_done()
|
||||
|
||||
state = hass.states.get("lock.test")
|
||||
assert state.state == STATE_UNKNOWN
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("hass_config", "payload", "lock_state"),
|
||||
|
||||
@@ -101,7 +101,7 @@ async def test_sensors_io_series_4(hass: HomeAssistant) -> None:
|
||||
|
||||
toothbrush_sensor = hass.states.get("sensor.io_series_4_48be_brushing_mode")
|
||||
toothbrush_sensor_attrs = toothbrush_sensor.attributes
|
||||
assert toothbrush_sensor.state == "gum care"
|
||||
assert toothbrush_sensor.state == "gum_care"
|
||||
assert (
|
||||
toothbrush_sensor_attrs[ATTR_FRIENDLY_NAME] == "IO Series 4 48BE Brushing mode"
|
||||
)
|
||||
@@ -133,7 +133,7 @@ async def test_sensors_io_series_4(hass: HomeAssistant) -> None:
|
||||
|
||||
toothbrush_sensor = hass.states.get("sensor.io_series_4_48be_brushing_mode")
|
||||
# Sleepy devices should keep their state over time
|
||||
assert toothbrush_sensor.state == "gum care"
|
||||
assert toothbrush_sensor.state == "gum_care"
|
||||
toothbrush_sensor_attrs = toothbrush_sensor.attributes
|
||||
assert toothbrush_sensor_attrs[ATTR_ASSUMED_STATE] is True
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user