mirror of
https://github.com/home-assistant/core.git
synced 2025-10-15 14:49:34 +00:00
Compare commits
2 Commits
cdce8p-bui
...
zjs-config
Author | SHA1 | Date | |
---|---|---|---|
![]() |
fe35fac8ee | ||
![]() |
4bccc57b46 |
710
.github/workflows/ci.yaml
vendored
710
.github/workflows/ci.yaml
vendored
File diff suppressed because it is too large
Load Diff
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
|||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
|
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
|
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||||
with:
|
with:
|
||||||
languages: python
|
languages: python
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
|
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||||
with:
|
with:
|
||||||
category: "/language:python"
|
category: "/language:python"
|
||||||
|
48
.github/workflows/wheels.yml
vendored
48
.github/workflows/wheels.yml
vendored
@@ -125,7 +125,7 @@ jobs:
|
|||||||
|
|
||||||
core:
|
core:
|
||||||
name: Build Core wheels ${{ matrix.abi }} for ${{ matrix.arch }} (musllinux_1_2)
|
name: Build Core wheels ${{ matrix.abi }} for ${{ matrix.arch }} (musllinux_1_2)
|
||||||
if: false && github.repository_owner == 'home-assistant'
|
if: github.repository_owner == 'home-assistant'
|
||||||
needs: init
|
needs: init
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
@@ -177,26 +177,12 @@ jobs:
|
|||||||
name: Build wheels ${{ matrix.abi }} for ${{ matrix.arch }}
|
name: Build wheels ${{ matrix.abi }} for ${{ matrix.arch }}
|
||||||
if: github.repository_owner == 'home-assistant'
|
if: github.repository_owner == 'home-assistant'
|
||||||
needs: init
|
needs: init
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
abi: ["cp313"]
|
||||||
- os: ubuntu-latest
|
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||||
arch: amd64
|
|
||||||
abi: cp313
|
|
||||||
- os: ubuntu-latest
|
|
||||||
arch: i386
|
|
||||||
abi: cp313
|
|
||||||
- os: ubuntu-24.04-arm
|
|
||||||
arch: aarch64
|
|
||||||
abi: cp313
|
|
||||||
- os: ubuntu-latest
|
|
||||||
arch: armv7
|
|
||||||
abi: cp313
|
|
||||||
- os: ubuntu-latest
|
|
||||||
arch: armhf
|
|
||||||
abi: cp313
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
@@ -233,35 +219,9 @@ jobs:
|
|||||||
sed -i "/uv/d" requirements.txt
|
sed -i "/uv/d" requirements.txt
|
||||||
sed -i "/uv/d" requirements_diff.txt
|
sed -i "/uv/d" requirements_diff.txt
|
||||||
|
|
||||||
- name: Create requirements file for custom build
|
|
||||||
run: |
|
|
||||||
touch requirements_custom.txt
|
|
||||||
echo -n "cython==3.1.4" >> requirements_custom.txt
|
|
||||||
|
|
||||||
- name: Modify requirements file for custom build
|
|
||||||
if: contains(fromJSON('["armv7", "armhf"]'), matrix.arch)
|
|
||||||
id: modify-requirements
|
|
||||||
run: |
|
|
||||||
echo " # force update" >> requirements_custom.txt
|
|
||||||
echo "skip_binary=cython" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Build wheels (custom)
|
|
||||||
uses: cdce8p/wheels@master
|
|
||||||
with:
|
|
||||||
abi: ${{ matrix.abi }}
|
|
||||||
tag: musllinux_1_2
|
|
||||||
arch: ${{ matrix.arch }}
|
|
||||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
|
||||||
env-file: true
|
|
||||||
skip-binary: ${{ steps.modify-requirements.outputs.skip_binary }}
|
|
||||||
constraints: "homeassistant/package_constraints.txt"
|
|
||||||
requirements: "requirements_custom.txt"
|
|
||||||
verbose: true
|
|
||||||
|
|
||||||
# home-assistant/wheels doesn't support sha pinning
|
# home-assistant/wheels doesn't support sha pinning
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
uses: home-assistant/wheels@2025.09.1
|
uses: home-assistant/wheels@2025.09.1
|
||||||
if: false
|
|
||||||
with:
|
with:
|
||||||
abi: ${{ matrix.abi }}
|
abi: ${{ matrix.abi }}
|
||||||
tag: musllinux_1_2
|
tag: musllinux_1_2
|
||||||
|
@@ -555,7 +555,6 @@ homeassistant.components.vacuum.*
|
|||||||
homeassistant.components.vallox.*
|
homeassistant.components.vallox.*
|
||||||
homeassistant.components.valve.*
|
homeassistant.components.valve.*
|
||||||
homeassistant.components.velbus.*
|
homeassistant.components.velbus.*
|
||||||
homeassistant.components.vivotek.*
|
|
||||||
homeassistant.components.vlc_telnet.*
|
homeassistant.components.vlc_telnet.*
|
||||||
homeassistant.components.vodafone_station.*
|
homeassistant.components.vodafone_station.*
|
||||||
homeassistant.components.volvo.*
|
homeassistant.components.volvo.*
|
||||||
|
2
CODEOWNERS
generated
2
CODEOWNERS
generated
@@ -1196,6 +1196,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/plex/ @jjlawren
|
/tests/components/plex/ @jjlawren
|
||||||
/homeassistant/components/plugwise/ @CoMPaTech @bouwew
|
/homeassistant/components/plugwise/ @CoMPaTech @bouwew
|
||||||
/tests/components/plugwise/ @CoMPaTech @bouwew
|
/tests/components/plugwise/ @CoMPaTech @bouwew
|
||||||
|
/homeassistant/components/plum_lightpad/ @ColinHarrington @prystupa
|
||||||
|
/tests/components/plum_lightpad/ @ColinHarrington @prystupa
|
||||||
/homeassistant/components/point/ @fredrike
|
/homeassistant/components/point/ @fredrike
|
||||||
/tests/components/point/ @fredrike
|
/tests/components/point/ @fredrike
|
||||||
/homeassistant/components/pooldose/ @lmaertin
|
/homeassistant/components/pooldose/ @lmaertin
|
||||||
|
5
homeassistant/brands/ibm.json
Normal file
5
homeassistant/brands/ibm.json
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"domain": "ibm",
|
||||||
|
"name": "IBM",
|
||||||
|
"integrations": ["watson_iot", "watson_tts"]
|
||||||
|
}
|
@@ -12,13 +12,11 @@ from homeassistant.components.bluetooth import async_get_scanner
|
|||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import CONF_ADDRESS
|
from homeassistant.const import CONF_ADDRESS
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.debounce import Debouncer
|
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||||
|
|
||||||
from .const import CONF_IS_NEW_STYLE_SCALE
|
from .const import CONF_IS_NEW_STYLE_SCALE
|
||||||
|
|
||||||
SCAN_INTERVAL = timedelta(seconds=15)
|
SCAN_INTERVAL = timedelta(seconds=15)
|
||||||
UPDATE_DEBOUNCE_TIME = 0.2
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -40,19 +38,11 @@ class AcaiaCoordinator(DataUpdateCoordinator[None]):
|
|||||||
config_entry=entry,
|
config_entry=entry,
|
||||||
)
|
)
|
||||||
|
|
||||||
debouncer = Debouncer(
|
|
||||||
hass=hass,
|
|
||||||
logger=_LOGGER,
|
|
||||||
cooldown=UPDATE_DEBOUNCE_TIME,
|
|
||||||
immediate=True,
|
|
||||||
function=self.async_update_listeners,
|
|
||||||
)
|
|
||||||
|
|
||||||
self._scale = AcaiaScale(
|
self._scale = AcaiaScale(
|
||||||
address_or_ble_device=entry.data[CONF_ADDRESS],
|
address_or_ble_device=entry.data[CONF_ADDRESS],
|
||||||
name=entry.title,
|
name=entry.title,
|
||||||
is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE],
|
is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE],
|
||||||
notify_callback=debouncer.async_schedule_call,
|
notify_callback=self.async_update_listeners,
|
||||||
scanner=async_get_scanner(hass),
|
scanner=async_get_scanner(hass),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/airos",
|
"documentation": "https://www.home-assistant.io/integrations/airos",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"quality_scale": "bronze",
|
"quality_scale": "bronze",
|
||||||
"requirements": ["airos==0.5.5"]
|
"requirements": ["airos==0.5.4"]
|
||||||
}
|
}
|
||||||
|
@@ -8,7 +8,6 @@ from typing import Any
|
|||||||
|
|
||||||
from airthings_ble import AirthingsBluetoothDeviceData, AirthingsDevice
|
from airthings_ble import AirthingsBluetoothDeviceData, AirthingsDevice
|
||||||
from bleak import BleakError
|
from bleak import BleakError
|
||||||
from habluetooth import BluetoothServiceInfoBleak
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.components import bluetooth
|
from homeassistant.components import bluetooth
|
||||||
@@ -45,7 +44,7 @@ def get_name(device: AirthingsDevice) -> str:
|
|||||||
|
|
||||||
name = device.friendly_name()
|
name = device.friendly_name()
|
||||||
if identifier := device.identifier:
|
if identifier := device.identifier:
|
||||||
name += f" ({device.model.value}{identifier})"
|
name += f" ({identifier})"
|
||||||
return name
|
return name
|
||||||
|
|
||||||
|
|
||||||
@@ -118,12 +117,6 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
"""Confirm discovery."""
|
"""Confirm discovery."""
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
if (
|
|
||||||
self._discovered_device is not None
|
|
||||||
and self._discovered_device.device.firmware.need_firmware_upgrade
|
|
||||||
):
|
|
||||||
return self.async_abort(reason="firmware_upgrade_required")
|
|
||||||
|
|
||||||
return self.async_create_entry(
|
return self.async_create_entry(
|
||||||
title=self.context["title_placeholders"]["name"], data={}
|
title=self.context["title_placeholders"]["name"], data={}
|
||||||
)
|
)
|
||||||
@@ -144,9 +137,6 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
self._abort_if_unique_id_configured()
|
self._abort_if_unique_id_configured()
|
||||||
discovery = self._discovered_devices[address]
|
discovery = self._discovered_devices[address]
|
||||||
|
|
||||||
if discovery.device.firmware.need_firmware_upgrade:
|
|
||||||
return self.async_abort(reason="firmware_upgrade_required")
|
|
||||||
|
|
||||||
self.context["title_placeholders"] = {
|
self.context["title_placeholders"] = {
|
||||||
"name": discovery.name,
|
"name": discovery.name,
|
||||||
}
|
}
|
||||||
@@ -156,27 +146,21 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
return self.async_create_entry(title=discovery.name, data={})
|
return self.async_create_entry(title=discovery.name, data={})
|
||||||
|
|
||||||
current_addresses = self._async_current_ids(include_ignore=False)
|
current_addresses = self._async_current_ids(include_ignore=False)
|
||||||
devices: list[BluetoothServiceInfoBleak] = []
|
|
||||||
for discovery_info in async_discovered_service_info(self.hass):
|
for discovery_info in async_discovered_service_info(self.hass):
|
||||||
address = discovery_info.address
|
address = discovery_info.address
|
||||||
if address in current_addresses or address in self._discovered_devices:
|
if address in current_addresses or address in self._discovered_devices:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if MFCT_ID not in discovery_info.manufacturer_data:
|
if MFCT_ID not in discovery_info.manufacturer_data:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not any(uuid in SERVICE_UUIDS for uuid in discovery_info.service_uuids):
|
if not any(uuid in SERVICE_UUIDS for uuid in discovery_info.service_uuids):
|
||||||
continue
|
continue
|
||||||
devices.append(discovery_info)
|
|
||||||
|
|
||||||
for discovery_info in devices:
|
|
||||||
address = discovery_info.address
|
|
||||||
try:
|
try:
|
||||||
device = await self._get_device_data(discovery_info)
|
device = await self._get_device_data(discovery_info)
|
||||||
except AirthingsDeviceUpdateError:
|
except AirthingsDeviceUpdateError:
|
||||||
_LOGGER.error(
|
return self.async_abort(reason="cannot_connect")
|
||||||
"Error connecting to and getting data from %s",
|
|
||||||
discovery_info.address,
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
except Exception:
|
except Exception:
|
||||||
_LOGGER.exception("Unknown error occurred")
|
_LOGGER.exception("Unknown error occurred")
|
||||||
return self.async_abort(reason="unknown")
|
return self.async_abort(reason="unknown")
|
||||||
|
@@ -24,5 +24,5 @@
|
|||||||
"dependencies": ["bluetooth_adapters"],
|
"dependencies": ["bluetooth_adapters"],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/airthings_ble",
|
"documentation": "https://www.home-assistant.io/integrations/airthings_ble",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"requirements": ["airthings-ble==1.1.1"]
|
"requirements": ["airthings-ble==0.9.2"]
|
||||||
}
|
}
|
||||||
|
@@ -20,7 +20,6 @@
|
|||||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||||
"firmware_upgrade_required": "Your device requires a firmware upgrade. Please use the Airthings app (Android/iOS) to upgrade it.",
|
|
||||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@@ -18,9 +18,7 @@ from homeassistant.components.binary_sensor import (
|
|||||||
from homeassistant.const import EntityCategory
|
from homeassistant.const import EntityCategory
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
import homeassistant.helpers.entity_registry as er
|
|
||||||
|
|
||||||
from .const import _LOGGER, DOMAIN
|
|
||||||
from .coordinator import AmazonConfigEntry
|
from .coordinator import AmazonConfigEntry
|
||||||
from .entity import AmazonEntity
|
from .entity import AmazonEntity
|
||||||
from .utils import async_update_unique_id
|
from .utils import async_update_unique_id
|
||||||
@@ -53,47 +51,11 @@ BINARY_SENSORS: Final = (
|
|||||||
),
|
),
|
||||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
||||||
is_available_fn=lambda device, key: (
|
is_available_fn=lambda device, key: (
|
||||||
device.online
|
device.online and device.sensors[key].error is False
|
||||||
and (sensor := device.sensors.get(key)) is not None
|
|
||||||
and sensor.error is False
|
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
DEPRECATED_BINARY_SENSORS: Final = (
|
|
||||||
AmazonBinarySensorEntityDescription(
|
|
||||||
key="bluetooth",
|
|
||||||
entity_category=EntityCategory.DIAGNOSTIC,
|
|
||||||
translation_key="bluetooth",
|
|
||||||
is_on_fn=lambda device, key: False,
|
|
||||||
),
|
|
||||||
AmazonBinarySensorEntityDescription(
|
|
||||||
key="babyCryDetectionState",
|
|
||||||
translation_key="baby_cry_detection",
|
|
||||||
is_on_fn=lambda device, key: False,
|
|
||||||
),
|
|
||||||
AmazonBinarySensorEntityDescription(
|
|
||||||
key="beepingApplianceDetectionState",
|
|
||||||
translation_key="beeping_appliance_detection",
|
|
||||||
is_on_fn=lambda device, key: False,
|
|
||||||
),
|
|
||||||
AmazonBinarySensorEntityDescription(
|
|
||||||
key="coughDetectionState",
|
|
||||||
translation_key="cough_detection",
|
|
||||||
is_on_fn=lambda device, key: False,
|
|
||||||
),
|
|
||||||
AmazonBinarySensorEntityDescription(
|
|
||||||
key="dogBarkDetectionState",
|
|
||||||
translation_key="dog_bark_detection",
|
|
||||||
is_on_fn=lambda device, key: False,
|
|
||||||
),
|
|
||||||
AmazonBinarySensorEntityDescription(
|
|
||||||
key="waterSoundsDetectionState",
|
|
||||||
translation_key="water_sounds_detection",
|
|
||||||
is_on_fn=lambda device, key: False,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
@@ -104,8 +66,6 @@ async def async_setup_entry(
|
|||||||
|
|
||||||
coordinator = entry.runtime_data
|
coordinator = entry.runtime_data
|
||||||
|
|
||||||
entity_registry = er.async_get(hass)
|
|
||||||
|
|
||||||
# Replace unique id for "detectionState" binary sensor
|
# Replace unique id for "detectionState" binary sensor
|
||||||
await async_update_unique_id(
|
await async_update_unique_id(
|
||||||
hass,
|
hass,
|
||||||
@@ -115,16 +75,6 @@ async def async_setup_entry(
|
|||||||
"detectionState",
|
"detectionState",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Clean up deprecated sensors
|
|
||||||
for sensor_desc in DEPRECATED_BINARY_SENSORS:
|
|
||||||
for serial_num in coordinator.data:
|
|
||||||
unique_id = f"{serial_num}-{sensor_desc.key}"
|
|
||||||
if entity_id := entity_registry.async_get_entity_id(
|
|
||||||
BINARY_SENSOR_DOMAIN, DOMAIN, unique_id
|
|
||||||
):
|
|
||||||
_LOGGER.debug("Removing deprecated entity %s", entity_id)
|
|
||||||
entity_registry.async_remove(entity_id)
|
|
||||||
|
|
||||||
known_devices: set[str] = set()
|
known_devices: set[str] = set()
|
||||||
|
|
||||||
def _check_device() -> None:
|
def _check_device() -> None:
|
||||||
|
@@ -8,5 +8,5 @@
|
|||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["aioamazondevices"],
|
"loggers": ["aioamazondevices"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["aioamazondevices==6.2.9"]
|
"requirements": ["aioamazondevices==6.2.7"]
|
||||||
}
|
}
|
||||||
|
@@ -32,9 +32,7 @@ class AmazonSensorEntityDescription(SensorEntityDescription):
|
|||||||
|
|
||||||
native_unit_of_measurement_fn: Callable[[AmazonDevice, str], str] | None = None
|
native_unit_of_measurement_fn: Callable[[AmazonDevice, str], str] | None = None
|
||||||
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
|
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
|
||||||
device.online
|
device.online and device.sensors[key].error is False
|
||||||
and (sensor := device.sensors.get(key)) is not None
|
|
||||||
and sensor.error is False
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -42,9 +40,9 @@ SENSORS: Final = (
|
|||||||
AmazonSensorEntityDescription(
|
AmazonSensorEntityDescription(
|
||||||
key="temperature",
|
key="temperature",
|
||||||
device_class=SensorDeviceClass.TEMPERATURE,
|
device_class=SensorDeviceClass.TEMPERATURE,
|
||||||
native_unit_of_measurement_fn=lambda device, key: (
|
native_unit_of_measurement_fn=lambda device, _key: (
|
||||||
UnitOfTemperature.CELSIUS
|
UnitOfTemperature.CELSIUS
|
||||||
if key in device.sensors and device.sensors[key].scale == "CELSIUS"
|
if device.sensors[_key].scale == "CELSIUS"
|
||||||
else UnitOfTemperature.FAHRENHEIT
|
else UnitOfTemperature.FAHRENHEIT
|
||||||
),
|
),
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
@@ -18,11 +18,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
|||||||
|
|
||||||
from .coordinator import AmazonConfigEntry
|
from .coordinator import AmazonConfigEntry
|
||||||
from .entity import AmazonEntity
|
from .entity import AmazonEntity
|
||||||
from .utils import (
|
from .utils import alexa_api_call, async_update_unique_id
|
||||||
alexa_api_call,
|
|
||||||
async_remove_dnd_from_virtual_group,
|
|
||||||
async_update_unique_id,
|
|
||||||
)
|
|
||||||
|
|
||||||
PARALLEL_UPDATES = 1
|
PARALLEL_UPDATES = 1
|
||||||
|
|
||||||
@@ -33,9 +29,7 @@ class AmazonSwitchEntityDescription(SwitchEntityDescription):
|
|||||||
|
|
||||||
is_on_fn: Callable[[AmazonDevice], bool]
|
is_on_fn: Callable[[AmazonDevice], bool]
|
||||||
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
|
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
|
||||||
device.online
|
device.online and device.sensors[key].error is False
|
||||||
and (sensor := device.sensors.get(key)) is not None
|
|
||||||
and sensor.error is False
|
|
||||||
)
|
)
|
||||||
method: str
|
method: str
|
||||||
|
|
||||||
@@ -64,9 +58,6 @@ async def async_setup_entry(
|
|||||||
hass, coordinator, SWITCH_DOMAIN, "do_not_disturb", "dnd"
|
hass, coordinator, SWITCH_DOMAIN, "do_not_disturb", "dnd"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Remove DND switch from virtual groups
|
|
||||||
await async_remove_dnd_from_virtual_group(hass, coordinator)
|
|
||||||
|
|
||||||
known_devices: set[str] = set()
|
known_devices: set[str] = set()
|
||||||
|
|
||||||
def _check_device() -> None:
|
def _check_device() -> None:
|
||||||
|
@@ -4,10 +4,8 @@ from collections.abc import Awaitable, Callable, Coroutine
|
|||||||
from functools import wraps
|
from functools import wraps
|
||||||
from typing import Any, Concatenate
|
from typing import Any, Concatenate
|
||||||
|
|
||||||
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
|
|
||||||
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
||||||
|
|
||||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
import homeassistant.helpers.entity_registry as er
|
import homeassistant.helpers.entity_registry as er
|
||||||
@@ -63,21 +61,3 @@ async def async_update_unique_id(
|
|||||||
|
|
||||||
# Update the registry with the new unique_id
|
# Update the registry with the new unique_id
|
||||||
entity_registry.async_update_entity(entity_id, new_unique_id=new_unique_id)
|
entity_registry.async_update_entity(entity_id, new_unique_id=new_unique_id)
|
||||||
|
|
||||||
|
|
||||||
async def async_remove_dnd_from_virtual_group(
|
|
||||||
hass: HomeAssistant,
|
|
||||||
coordinator: AmazonDevicesCoordinator,
|
|
||||||
) -> None:
|
|
||||||
"""Remove entity DND from virtual group."""
|
|
||||||
entity_registry = er.async_get(hass)
|
|
||||||
|
|
||||||
for serial_num in coordinator.data:
|
|
||||||
unique_id = f"{serial_num}-do_not_disturb"
|
|
||||||
entity_id = entity_registry.async_get_entity_id(
|
|
||||||
DOMAIN, SWITCH_DOMAIN, unique_id
|
|
||||||
)
|
|
||||||
is_group = coordinator.data[serial_num].device_family == SPEAKER_GROUP_FAMILY
|
|
||||||
if entity_id and is_group:
|
|
||||||
entity_registry.async_remove(entity_id)
|
|
||||||
_LOGGER.debug("Removed DND switch from virtual group %s", entity_id)
|
|
||||||
|
@@ -19,8 +19,9 @@ CONF_THINKING_BUDGET = "thinking_budget"
|
|||||||
RECOMMENDED_THINKING_BUDGET = 0
|
RECOMMENDED_THINKING_BUDGET = 0
|
||||||
MIN_THINKING_BUDGET = 1024
|
MIN_THINKING_BUDGET = 1024
|
||||||
|
|
||||||
NON_THINKING_MODELS = [
|
THINKING_MODELS = [
|
||||||
"claude-3-5", # Both sonnet and haiku
|
"claude-3-7-sonnet",
|
||||||
"claude-3-opus",
|
"claude-sonnet-4-0",
|
||||||
"claude-3-haiku",
|
"claude-opus-4-0",
|
||||||
|
"claude-opus-4-1",
|
||||||
]
|
]
|
||||||
|
@@ -51,11 +51,11 @@ from .const import (
|
|||||||
DOMAIN,
|
DOMAIN,
|
||||||
LOGGER,
|
LOGGER,
|
||||||
MIN_THINKING_BUDGET,
|
MIN_THINKING_BUDGET,
|
||||||
NON_THINKING_MODELS,
|
|
||||||
RECOMMENDED_CHAT_MODEL,
|
RECOMMENDED_CHAT_MODEL,
|
||||||
RECOMMENDED_MAX_TOKENS,
|
RECOMMENDED_MAX_TOKENS,
|
||||||
RECOMMENDED_TEMPERATURE,
|
RECOMMENDED_TEMPERATURE,
|
||||||
RECOMMENDED_THINKING_BUDGET,
|
RECOMMENDED_THINKING_BUDGET,
|
||||||
|
THINKING_MODELS,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Max number of back and forth with the LLM to generate a response
|
# Max number of back and forth with the LLM to generate a response
|
||||||
@@ -364,7 +364,7 @@ class AnthropicBaseLLMEntity(Entity):
|
|||||||
if tools:
|
if tools:
|
||||||
model_args["tools"] = tools
|
model_args["tools"] = tools
|
||||||
if (
|
if (
|
||||||
not model.startswith(tuple(NON_THINKING_MODELS))
|
model.startswith(tuple(THINKING_MODELS))
|
||||||
and thinking_budget >= MIN_THINKING_BUDGET
|
and thinking_budget >= MIN_THINKING_BUDGET
|
||||||
):
|
):
|
||||||
model_args["thinking"] = ThinkingConfigEnabledParam(
|
model_args["thinking"] = ThinkingConfigEnabledParam(
|
||||||
|
@@ -8,5 +8,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"requirements": ["anthropic==0.69.0"]
|
"requirements": ["anthropic==0.62.0"]
|
||||||
}
|
}
|
||||||
|
@@ -19,8 +19,8 @@
|
|||||||
"bleak-retry-connector==4.4.3",
|
"bleak-retry-connector==4.4.3",
|
||||||
"bluetooth-adapters==2.1.0",
|
"bluetooth-adapters==2.1.0",
|
||||||
"bluetooth-auto-recovery==1.5.3",
|
"bluetooth-auto-recovery==1.5.3",
|
||||||
"bluetooth-data-tools==1.28.3",
|
"bluetooth-data-tools==1.28.2",
|
||||||
"dbus-fast==2.44.5",
|
"dbus-fast==2.44.3",
|
||||||
"habluetooth==5.7.0"
|
"habluetooth==5.6.4"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@@ -23,7 +23,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
|||||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
|
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
|
||||||
from homeassistant.util.ssl import client_context_no_verify
|
from homeassistant.util.ssl import client_context_no_verify
|
||||||
|
|
||||||
from .const import KEY_MAC, TIMEOUT_SEC
|
from .const import KEY_MAC, TIMEOUT
|
||||||
from .coordinator import DaikinConfigEntry, DaikinCoordinator
|
from .coordinator import DaikinConfigEntry, DaikinCoordinator
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@@ -42,7 +42,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: DaikinConfigEntry) -> bo
|
|||||||
session = async_get_clientsession(hass)
|
session = async_get_clientsession(hass)
|
||||||
host = conf[CONF_HOST]
|
host = conf[CONF_HOST]
|
||||||
try:
|
try:
|
||||||
async with asyncio.timeout(TIMEOUT_SEC):
|
async with asyncio.timeout(TIMEOUT):
|
||||||
device: Appliance = await DaikinFactory(
|
device: Appliance = await DaikinFactory(
|
||||||
host,
|
host,
|
||||||
session,
|
session,
|
||||||
@@ -53,7 +53,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: DaikinConfigEntry) -> bo
|
|||||||
)
|
)
|
||||||
_LOGGER.debug("Connection to %s successful", host)
|
_LOGGER.debug("Connection to %s successful", host)
|
||||||
except TimeoutError as err:
|
except TimeoutError as err:
|
||||||
_LOGGER.debug("Connection to %s timed out in %s seconds", host, TIMEOUT_SEC)
|
_LOGGER.debug("Connection to %s timed out in 60 seconds", host)
|
||||||
raise ConfigEntryNotReady from err
|
raise ConfigEntryNotReady from err
|
||||||
except ClientConnectionError as err:
|
except ClientConnectionError as err:
|
||||||
_LOGGER.debug("ClientConnectionError to %s", host)
|
_LOGGER.debug("ClientConnectionError to %s", host)
|
||||||
|
@@ -20,7 +20,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
|||||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||||
from homeassistant.util.ssl import client_context_no_verify
|
from homeassistant.util.ssl import client_context_no_verify
|
||||||
|
|
||||||
from .const import DOMAIN, KEY_MAC, TIMEOUT_SEC
|
from .const import DOMAIN, KEY_MAC, TIMEOUT
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -84,7 +84,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
password = None
|
password = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
async with asyncio.timeout(TIMEOUT_SEC):
|
async with asyncio.timeout(TIMEOUT):
|
||||||
device: Appliance = await DaikinFactory(
|
device: Appliance = await DaikinFactory(
|
||||||
host,
|
host,
|
||||||
async_get_clientsession(self.hass),
|
async_get_clientsession(self.hass),
|
||||||
|
@@ -24,4 +24,4 @@ ATTR_STATE_OFF = "off"
|
|||||||
KEY_MAC = "mac"
|
KEY_MAC = "mac"
|
||||||
KEY_IP = "ip"
|
KEY_IP = "ip"
|
||||||
|
|
||||||
TIMEOUT_SEC = 120
|
TIMEOUT = 60
|
||||||
|
@@ -9,7 +9,7 @@ from homeassistant.config_entries import ConfigEntry
|
|||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||||
|
|
||||||
from .const import DOMAIN, TIMEOUT_SEC
|
from .const import DOMAIN
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -28,7 +28,7 @@ class DaikinCoordinator(DataUpdateCoordinator[None]):
|
|||||||
_LOGGER,
|
_LOGGER,
|
||||||
config_entry=entry,
|
config_entry=entry,
|
||||||
name=device.values.get("name", DOMAIN),
|
name=device.values.get("name", DOMAIN),
|
||||||
update_interval=timedelta(seconds=TIMEOUT_SEC),
|
update_interval=timedelta(seconds=60),
|
||||||
)
|
)
|
||||||
self.device = device
|
self.device = device
|
||||||
|
|
||||||
|
@@ -6,6 +6,6 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/daikin",
|
"documentation": "https://www.home-assistant.io/integrations/daikin",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["pydaikin"],
|
"loggers": ["pydaikin"],
|
||||||
"requirements": ["pydaikin==2.17.1"],
|
"requirements": ["pydaikin==2.16.0"],
|
||||||
"zeroconf": ["_dkapi._tcp.local."]
|
"zeroconf": ["_dkapi._tcp.local."]
|
||||||
}
|
}
|
||||||
|
@@ -17,6 +17,6 @@
|
|||||||
"requirements": [
|
"requirements": [
|
||||||
"aiodhcpwatcher==1.2.1",
|
"aiodhcpwatcher==1.2.1",
|
||||||
"aiodiscover==2.7.1",
|
"aiodiscover==2.7.1",
|
||||||
"cached-ipaddress==1.0.1"
|
"cached-ipaddress==0.10.0"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@@ -7,7 +7,7 @@
|
|||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["pyenphase"],
|
"loggers": ["pyenphase"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["pyenphase==2.4.0"],
|
"requirements": ["pyenphase==2.3.0"],
|
||||||
"zeroconf": [
|
"zeroconf": [
|
||||||
{
|
{
|
||||||
"type": "_enphase-envoy._tcp.local."
|
"type": "_enphase-envoy._tcp.local."
|
||||||
|
@@ -22,23 +22,19 @@ import voluptuous as vol
|
|||||||
|
|
||||||
from homeassistant.components import zeroconf
|
from homeassistant.components import zeroconf
|
||||||
from homeassistant.config_entries import (
|
from homeassistant.config_entries import (
|
||||||
SOURCE_ESPHOME,
|
|
||||||
SOURCE_IGNORE,
|
SOURCE_IGNORE,
|
||||||
SOURCE_REAUTH,
|
SOURCE_REAUTH,
|
||||||
SOURCE_RECONFIGURE,
|
SOURCE_RECONFIGURE,
|
||||||
ConfigEntry,
|
ConfigEntry,
|
||||||
ConfigFlow,
|
ConfigFlow,
|
||||||
ConfigFlowResult,
|
ConfigFlowResult,
|
||||||
FlowType,
|
|
||||||
OptionsFlow,
|
OptionsFlow,
|
||||||
)
|
)
|
||||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT
|
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT
|
||||||
from homeassistant.core import callback
|
from homeassistant.core import callback
|
||||||
from homeassistant.data_entry_flow import AbortFlow, FlowResultType
|
from homeassistant.data_entry_flow import AbortFlow
|
||||||
from homeassistant.helpers import discovery_flow
|
|
||||||
from homeassistant.helpers.device_registry import format_mac
|
from homeassistant.helpers.device_registry import format_mac
|
||||||
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
|
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
|
||||||
from homeassistant.helpers.service_info.esphome import ESPHomeServiceInfo
|
|
||||||
from homeassistant.helpers.service_info.hassio import HassioServiceInfo
|
from homeassistant.helpers.service_info.hassio import HassioServiceInfo
|
||||||
from homeassistant.helpers.service_info.mqtt import MqttServiceInfo
|
from homeassistant.helpers.service_info.mqtt import MqttServiceInfo
|
||||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||||
@@ -79,7 +75,6 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
"""Initialize flow."""
|
"""Initialize flow."""
|
||||||
self._host: str | None = None
|
self._host: str | None = None
|
||||||
self._connected_address: str | None = None
|
|
||||||
self.__name: str | None = None
|
self.__name: str | None = None
|
||||||
self._port: int | None = None
|
self._port: int | None = None
|
||||||
self._password: str | None = None
|
self._password: str | None = None
|
||||||
@@ -503,55 +498,18 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
await self.hass.config_entries.async_remove(
|
await self.hass.config_entries.async_remove(
|
||||||
self._entry_with_name_conflict.entry_id
|
self._entry_with_name_conflict.entry_id
|
||||||
)
|
)
|
||||||
return await self._async_create_entry()
|
return self._async_create_entry()
|
||||||
|
|
||||||
async def _async_create_entry(self) -> ConfigFlowResult:
|
@callback
|
||||||
|
def _async_create_entry(self) -> ConfigFlowResult:
|
||||||
"""Create the config entry."""
|
"""Create the config entry."""
|
||||||
assert self._name is not None
|
assert self._name is not None
|
||||||
assert self._device_info is not None
|
|
||||||
|
|
||||||
# Check if Z-Wave capabilities are present and start discovery flow
|
|
||||||
next_flow_id: str | None = None
|
|
||||||
if self._device_info.zwave_proxy_feature_flags:
|
|
||||||
assert self._connected_address is not None
|
|
||||||
assert self._port is not None
|
|
||||||
|
|
||||||
# Start Z-Wave discovery flow and get the flow ID
|
|
||||||
zwave_result = await self.hass.config_entries.flow.async_init(
|
|
||||||
"zwave_js",
|
|
||||||
context={
|
|
||||||
"source": SOURCE_ESPHOME,
|
|
||||||
"discovery_key": discovery_flow.DiscoveryKey(
|
|
||||||
domain=DOMAIN,
|
|
||||||
key=self._device_info.mac_address,
|
|
||||||
version=1,
|
|
||||||
),
|
|
||||||
},
|
|
||||||
data=ESPHomeServiceInfo(
|
|
||||||
name=self._device_info.name,
|
|
||||||
zwave_home_id=self._device_info.zwave_home_id or None,
|
|
||||||
ip_address=self._connected_address,
|
|
||||||
port=self._port,
|
|
||||||
noise_psk=self._noise_psk,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
if zwave_result["type"] in (
|
|
||||||
FlowResultType.ABORT,
|
|
||||||
FlowResultType.CREATE_ENTRY,
|
|
||||||
):
|
|
||||||
_LOGGER.debug(
|
|
||||||
"Unable to continue created Z-Wave JS config flow: %s", zwave_result
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
next_flow_id = zwave_result["flow_id"]
|
|
||||||
|
|
||||||
return self.async_create_entry(
|
return self.async_create_entry(
|
||||||
title=self._name,
|
title=self._name,
|
||||||
data=self._async_make_config_data(),
|
data=self._async_make_config_data(),
|
||||||
options={
|
options={
|
||||||
CONF_ALLOW_SERVICE_CALLS: DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS,
|
CONF_ALLOW_SERVICE_CALLS: DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS,
|
||||||
},
|
},
|
||||||
next_flow=(FlowType.CONFIG_FLOW, next_flow_id) if next_flow_id else None,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
@@ -598,7 +556,7 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
if entry.data.get(CONF_DEVICE_NAME) == self._device_name:
|
if entry.data.get(CONF_DEVICE_NAME) == self._device_name:
|
||||||
self._entry_with_name_conflict = entry
|
self._entry_with_name_conflict = entry
|
||||||
return await self.async_step_name_conflict()
|
return await self.async_step_name_conflict()
|
||||||
return await self._async_create_entry()
|
return self._async_create_entry()
|
||||||
|
|
||||||
async def _async_reauth_validated_connection(self) -> ConfigFlowResult:
|
async def _async_reauth_validated_connection(self) -> ConfigFlowResult:
|
||||||
"""Handle reauth validated connection."""
|
"""Handle reauth validated connection."""
|
||||||
@@ -745,7 +703,6 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
try:
|
try:
|
||||||
await cli.connect()
|
await cli.connect()
|
||||||
self._device_info = await cli.device_info()
|
self._device_info = await cli.device_info()
|
||||||
self._connected_address = cli.connected_address
|
|
||||||
except InvalidAuthAPIError:
|
except InvalidAuthAPIError:
|
||||||
return ERROR_INVALID_PASSWORD_AUTH
|
return ERROR_INVALID_PASSWORD_AUTH
|
||||||
except RequiresEncryptionAPIError:
|
except RequiresEncryptionAPIError:
|
||||||
|
@@ -17,9 +17,9 @@
|
|||||||
"mqtt": ["esphome/discover/#"],
|
"mqtt": ["esphome/discover/#"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": [
|
"requirements": [
|
||||||
"aioesphomeapi==41.12.0",
|
"aioesphomeapi==41.11.0",
|
||||||
"esphome-dashboard-api==1.3.0",
|
"esphome-dashboard-api==1.3.0",
|
||||||
"bleak-esphome==3.4.0"
|
"bleak-esphome==3.3.0"
|
||||||
],
|
],
|
||||||
"zeroconf": ["_esphomelib._tcp.local."]
|
"zeroconf": ["_esphomelib._tcp.local."]
|
||||||
}
|
}
|
||||||
|
@@ -54,7 +54,7 @@ async def async_setup_entry(
|
|||||||
except aiohttp.ClientResponseError as err:
|
except aiohttp.ClientResponseError as err:
|
||||||
if 400 <= err.status < 500:
|
if 400 <= err.status < 500:
|
||||||
raise ConfigEntryAuthFailed(
|
raise ConfigEntryAuthFailed(
|
||||||
translation_domain=DOMAIN, translation_key="reauth_required"
|
"OAuth session is not valid, reauth required"
|
||||||
) from err
|
) from err
|
||||||
raise ConfigEntryNotReady from err
|
raise ConfigEntryNotReady from err
|
||||||
except aiohttp.ClientError as err:
|
except aiohttp.ClientError as err:
|
||||||
@@ -76,6 +76,10 @@ async def async_unload_entry(
|
|||||||
hass: HomeAssistant, entry: GoogleAssistantSDKConfigEntry
|
hass: HomeAssistant, entry: GoogleAssistantSDKConfigEntry
|
||||||
) -> bool:
|
) -> bool:
|
||||||
"""Unload a config entry."""
|
"""Unload a config entry."""
|
||||||
|
if not hass.config_entries.async_loaded_entries(DOMAIN):
|
||||||
|
for service_name in hass.services.async_services_for_domain(DOMAIN):
|
||||||
|
hass.services.async_remove(DOMAIN, service_name)
|
||||||
|
|
||||||
conversation.async_unset_agent(hass, entry)
|
conversation.async_unset_agent(hass, entry)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@@ -26,7 +26,7 @@ from homeassistant.components.media_player import (
|
|||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import ATTR_ENTITY_ID, CONF_ACCESS_TOKEN
|
from homeassistant.const import ATTR_ENTITY_ID, CONF_ACCESS_TOKEN
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session
|
from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session
|
||||||
from homeassistant.helpers.event import async_call_later
|
from homeassistant.helpers.event import async_call_later
|
||||||
|
|
||||||
@@ -68,13 +68,7 @@ async def async_send_text_commands(
|
|||||||
) -> list[CommandResponse]:
|
) -> list[CommandResponse]:
|
||||||
"""Send text commands to Google Assistant Service."""
|
"""Send text commands to Google Assistant Service."""
|
||||||
# There can only be 1 entry (config_flow has single_instance_allowed)
|
# There can only be 1 entry (config_flow has single_instance_allowed)
|
||||||
entries = hass.config_entries.async_loaded_entries(DOMAIN)
|
entry: GoogleAssistantSDKConfigEntry = hass.config_entries.async_entries(DOMAIN)[0]
|
||||||
if not entries:
|
|
||||||
raise ServiceValidationError(
|
|
||||||
translation_domain=DOMAIN,
|
|
||||||
translation_key="entry_not_loaded",
|
|
||||||
)
|
|
||||||
entry: GoogleAssistantSDKConfigEntry = entries[0]
|
|
||||||
|
|
||||||
session = entry.runtime_data.session
|
session = entry.runtime_data.session
|
||||||
try:
|
try:
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
"""Services for the Google Assistant SDK integration."""
|
"""Support for Google Assistant SDK."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
@@ -59,20 +59,14 @@
|
|||||||
},
|
},
|
||||||
"media_player": {
|
"media_player": {
|
||||||
"name": "Media player entity",
|
"name": "Media player entity",
|
||||||
"description": "Name(s) of media player entities to play the Google Assistant's audio response on. This does not target the device for the command itself."
|
"description": "Name(s) of media player entities to play response on."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"exceptions": {
|
"exceptions": {
|
||||||
"entry_not_loaded": {
|
|
||||||
"message": "Entry not loaded"
|
|
||||||
},
|
|
||||||
"grpc_error": {
|
"grpc_error": {
|
||||||
"message": "Failed to communicate with Google Assistant"
|
"message": "Failed to communicate with Google Assistant"
|
||||||
},
|
|
||||||
"reauth_required": {
|
|
||||||
"message": "Credentials are invalid, re-authentication required"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -22,7 +22,6 @@ from homeassistant.exceptions import (
|
|||||||
from homeassistant.helpers import config_entry_oauth2_flow
|
from homeassistant.helpers import config_entry_oauth2_flow
|
||||||
|
|
||||||
_UPLOAD_AND_DOWNLOAD_TIMEOUT = 12 * 3600
|
_UPLOAD_AND_DOWNLOAD_TIMEOUT = 12 * 3600
|
||||||
_UPLOAD_MAX_RETRIES = 20
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -151,7 +150,6 @@ class DriveClient:
|
|||||||
backup_metadata,
|
backup_metadata,
|
||||||
open_stream,
|
open_stream,
|
||||||
backup.size,
|
backup.size,
|
||||||
max_retries=_UPLOAD_MAX_RETRIES,
|
|
||||||
timeout=ClientTimeout(total=_UPLOAD_AND_DOWNLOAD_TIMEOUT),
|
timeout=ClientTimeout(total=_UPLOAD_AND_DOWNLOAD_TIMEOUT),
|
||||||
)
|
)
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
|
@@ -456,7 +456,6 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
|||||||
"""Initialize the agent."""
|
"""Initialize the agent."""
|
||||||
self.entry = entry
|
self.entry = entry
|
||||||
self.subentry = subentry
|
self.subentry = subentry
|
||||||
self.default_model = default_model
|
|
||||||
self._attr_name = subentry.title
|
self._attr_name = subentry.title
|
||||||
self._genai_client = entry.runtime_data
|
self._genai_client = entry.runtime_data
|
||||||
self._attr_unique_id = subentry.subentry_id
|
self._attr_unique_id = subentry.subentry_id
|
||||||
@@ -490,7 +489,7 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
|||||||
tools = tools or []
|
tools = tools or []
|
||||||
tools.append(Tool(google_search=GoogleSearch()))
|
tools.append(Tool(google_search=GoogleSearch()))
|
||||||
|
|
||||||
model_name = options.get(CONF_CHAT_MODEL, self.default_model)
|
model_name = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||||
# Avoid INVALID_ARGUMENT Developer instruction is not enabled for <model>
|
# Avoid INVALID_ARGUMENT Developer instruction is not enabled for <model>
|
||||||
supports_system_instruction = (
|
supports_system_instruction = (
|
||||||
"gemma" not in model_name
|
"gemma" not in model_name
|
||||||
@@ -621,7 +620,7 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
|||||||
def create_generate_content_config(self) -> GenerateContentConfig:
|
def create_generate_content_config(self) -> GenerateContentConfig:
|
||||||
"""Create the GenerateContentConfig for the LLM."""
|
"""Create the GenerateContentConfig for the LLM."""
|
||||||
options = self.subentry.data
|
options = self.subentry.data
|
||||||
model = options.get(CONF_CHAT_MODEL, self.default_model)
|
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||||
thinking_config: ThinkingConfig | None = None
|
thinking_config: ThinkingConfig | None = None
|
||||||
if model.startswith("models/gemini-2.5") and not model.endswith(
|
if model.startswith("models/gemini-2.5") and not model.endswith(
|
||||||
("tts", "image", "image-preview")
|
("tts", "image", "image-preview")
|
||||||
|
@@ -22,7 +22,6 @@ from google.protobuf import timestamp_pb2
|
|||||||
from homeassistant.components.sensor import (
|
from homeassistant.components.sensor import (
|
||||||
SensorDeviceClass,
|
SensorDeviceClass,
|
||||||
SensorEntity,
|
SensorEntity,
|
||||||
SensorEntityDescription,
|
|
||||||
SensorStateClass,
|
SensorStateClass,
|
||||||
)
|
)
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
@@ -92,16 +91,6 @@ def convert_time(time_str: str) -> timestamp_pb2.Timestamp | None:
|
|||||||
return timestamp
|
return timestamp
|
||||||
|
|
||||||
|
|
||||||
SENSOR_DESCRIPTIONS = [
|
|
||||||
SensorEntityDescription(
|
|
||||||
key="duration",
|
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
|
||||||
device_class=SensorDeviceClass.DURATION,
|
|
||||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
config_entry: ConfigEntry,
|
config_entry: ConfigEntry,
|
||||||
@@ -116,20 +105,20 @@ async def async_setup_entry(
|
|||||||
client_options = ClientOptions(api_key=api_key)
|
client_options = ClientOptions(api_key=api_key)
|
||||||
client = RoutesAsyncClient(client_options=client_options)
|
client = RoutesAsyncClient(client_options=client_options)
|
||||||
|
|
||||||
sensors = [
|
sensor = GoogleTravelTimeSensor(
|
||||||
GoogleTravelTimeSensor(
|
config_entry, name, api_key, origin, destination, client
|
||||||
config_entry, name, api_key, origin, destination, client, sensor_description
|
|
||||||
)
|
)
|
||||||
for sensor_description in SENSOR_DESCRIPTIONS
|
|
||||||
]
|
|
||||||
|
|
||||||
async_add_entities(sensors, False)
|
async_add_entities([sensor], False)
|
||||||
|
|
||||||
|
|
||||||
class GoogleTravelTimeSensor(SensorEntity):
|
class GoogleTravelTimeSensor(SensorEntity):
|
||||||
"""Representation of a Google travel time sensor."""
|
"""Representation of a Google travel time sensor."""
|
||||||
|
|
||||||
_attr_attribution = ATTRIBUTION
|
_attr_attribution = ATTRIBUTION
|
||||||
|
_attr_native_unit_of_measurement = UnitOfTime.MINUTES
|
||||||
|
_attr_device_class = SensorDeviceClass.DURATION
|
||||||
|
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@@ -139,10 +128,8 @@ class GoogleTravelTimeSensor(SensorEntity):
|
|||||||
origin: str,
|
origin: str,
|
||||||
destination: str,
|
destination: str,
|
||||||
client: RoutesAsyncClient,
|
client: RoutesAsyncClient,
|
||||||
sensor_description: SensorEntityDescription,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the sensor."""
|
"""Initialize the sensor."""
|
||||||
self.entity_description = sensor_description
|
|
||||||
self._attr_name = name
|
self._attr_name = name
|
||||||
self._attr_unique_id = config_entry.entry_id
|
self._attr_unique_id = config_entry.entry_id
|
||||||
self._attr_device_info = DeviceInfo(
|
self._attr_device_info = DeviceInfo(
|
||||||
|
@@ -73,6 +73,7 @@ class HassioAddonSwitch(HassioAddonEntity, SwitchEntity):
|
|||||||
try:
|
try:
|
||||||
await supervisor_client.addons.start_addon(self._addon_slug)
|
await supervisor_client.addons.start_addon(self._addon_slug)
|
||||||
except SupervisorError as err:
|
except SupervisorError as err:
|
||||||
|
_LOGGER.error("Failed to start addon %s: %s", self._addon_slug, err)
|
||||||
raise HomeAssistantError(err) from err
|
raise HomeAssistantError(err) from err
|
||||||
|
|
||||||
await self.coordinator.force_addon_info_data_refresh(self._addon_slug)
|
await self.coordinator.force_addon_info_data_refresh(self._addon_slug)
|
||||||
|
@@ -10,7 +10,7 @@
|
|||||||
"loggers": ["pyhap"],
|
"loggers": ["pyhap"],
|
||||||
"requirements": [
|
"requirements": [
|
||||||
"HAP-python==5.0.0",
|
"HAP-python==5.0.0",
|
||||||
"fnv-hash-fast==1.6.0",
|
"fnv-hash-fast==1.5.0",
|
||||||
"PyQRCode==1.2.1",
|
"PyQRCode==1.2.1",
|
||||||
"base36==0.1.1"
|
"base36==0.1.1"
|
||||||
],
|
],
|
||||||
|
@@ -14,6 +14,6 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
|
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["aiohomekit", "commentjson"],
|
"loggers": ["aiohomekit", "commentjson"],
|
||||||
"requirements": ["aiohomekit==3.2.20"],
|
"requirements": ["aiohomekit==3.2.19"],
|
||||||
"zeroconf": ["_hap._tcp.local.", "_hap._udp.local."]
|
"zeroconf": ["_hap._tcp.local.", "_hap._udp.local."]
|
||||||
}
|
}
|
||||||
|
@@ -122,24 +122,11 @@ async def async_setup_entry(
|
|||||||
coordinators.main.new_zones_callbacks.append(_add_new_zones)
|
coordinators.main.new_zones_callbacks.append(_add_new_zones)
|
||||||
|
|
||||||
platform = entity_platform.async_get_current_platform()
|
platform = entity_platform.async_get_current_platform()
|
||||||
|
platform.async_register_entity_service(SERVICE_RESUME, None, "resume")
|
||||||
platform.async_register_entity_service(
|
platform.async_register_entity_service(
|
||||||
SERVICE_RESUME,
|
SERVICE_START_WATERING, SCHEMA_START_WATERING, "start_watering"
|
||||||
None,
|
|
||||||
"resume",
|
|
||||||
entity_device_classes=(BinarySensorDeviceClass.RUNNING,),
|
|
||||||
)
|
|
||||||
platform.async_register_entity_service(
|
|
||||||
SERVICE_START_WATERING,
|
|
||||||
SCHEMA_START_WATERING,
|
|
||||||
"start_watering",
|
|
||||||
entity_device_classes=(BinarySensorDeviceClass.RUNNING,),
|
|
||||||
)
|
|
||||||
platform.async_register_entity_service(
|
|
||||||
SERVICE_SUSPEND,
|
|
||||||
SCHEMA_SUSPEND,
|
|
||||||
"suspend",
|
|
||||||
entity_device_classes=(BinarySensorDeviceClass.RUNNING,),
|
|
||||||
)
|
)
|
||||||
|
platform.async_register_entity_service(SERVICE_SUSPEND, SCHEMA_SUSPEND, "suspend")
|
||||||
|
|
||||||
|
|
||||||
class HydrawiseBinarySensor(HydrawiseEntity, BinarySensorEntity):
|
class HydrawiseBinarySensor(HydrawiseEntity, BinarySensorEntity):
|
||||||
|
@@ -8,16 +8,13 @@ from idasen_ha import Desk
|
|||||||
|
|
||||||
from homeassistant.components import bluetooth
|
from homeassistant.components import bluetooth
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.debounce import Debouncer
|
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
type IdasenDeskConfigEntry = ConfigEntry[IdasenDeskCoordinator]
|
type IdasenDeskConfigEntry = ConfigEntry[IdasenDeskCoordinator]
|
||||||
|
|
||||||
UPDATE_DEBOUNCE_TIME = 0.2
|
|
||||||
|
|
||||||
|
|
||||||
class IdasenDeskCoordinator(DataUpdateCoordinator[int | None]):
|
class IdasenDeskCoordinator(DataUpdateCoordinator[int | None]):
|
||||||
"""Class to manage updates for the Idasen Desk."""
|
"""Class to manage updates for the Idasen Desk."""
|
||||||
@@ -36,22 +33,9 @@ class IdasenDeskCoordinator(DataUpdateCoordinator[int | None]):
|
|||||||
hass, _LOGGER, config_entry=config_entry, name=config_entry.title
|
hass, _LOGGER, config_entry=config_entry, name=config_entry.title
|
||||||
)
|
)
|
||||||
self.address = address
|
self.address = address
|
||||||
self.desk = Desk(self._async_handle_update)
|
|
||||||
|
|
||||||
self._expected_connected = False
|
self._expected_connected = False
|
||||||
self._height: int | None = None
|
|
||||||
|
|
||||||
@callback
|
self.desk = Desk(self.async_set_updated_data)
|
||||||
def async_update_data() -> None:
|
|
||||||
self.async_set_updated_data(self._height)
|
|
||||||
|
|
||||||
self._debouncer = Debouncer(
|
|
||||||
hass=self.hass,
|
|
||||||
logger=_LOGGER,
|
|
||||||
cooldown=UPDATE_DEBOUNCE_TIME,
|
|
||||||
immediate=True,
|
|
||||||
function=async_update_data,
|
|
||||||
)
|
|
||||||
|
|
||||||
async def async_connect(self) -> bool:
|
async def async_connect(self) -> bool:
|
||||||
"""Connect to desk."""
|
"""Connect to desk."""
|
||||||
@@ -76,9 +60,3 @@ class IdasenDeskCoordinator(DataUpdateCoordinator[int | None]):
|
|||||||
"""Ensure that the desk is connected if that is the expected state."""
|
"""Ensure that the desk is connected if that is the expected state."""
|
||||||
if self._expected_connected:
|
if self._expected_connected:
|
||||||
await self.async_connect()
|
await self.async_connect()
|
||||||
|
|
||||||
@callback
|
|
||||||
def _async_handle_update(self, height: int | None) -> None:
|
|
||||||
"""Handle an update from the desk."""
|
|
||||||
self._height = height
|
|
||||||
self._debouncer.async_schedule_call()
|
|
||||||
|
@@ -147,9 +147,8 @@ class KrakenData:
|
|||||||
|
|
||||||
def _get_websocket_name_asset_pairs(self) -> str:
|
def _get_websocket_name_asset_pairs(self) -> str:
|
||||||
return ",".join(
|
return ",".join(
|
||||||
pair
|
self.tradable_asset_pairs[tracked_pair]
|
||||||
for tracked_pair in self._config_entry.options[CONF_TRACKED_ASSET_PAIRS]
|
for tracked_pair in self._config_entry.options[CONF_TRACKED_ASSET_PAIRS]
|
||||||
if (pair := self.tradable_asset_pairs.get(tracked_pair)) is not None
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def set_update_interval(self, update_interval: int) -> None:
|
def set_update_interval(self, update_interval: int) -> None:
|
||||||
|
@@ -156,7 +156,7 @@ async def async_setup_entry(
|
|||||||
for description in SENSOR_TYPES
|
for description in SENSOR_TYPES
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
async_add_entities(entities)
|
async_add_entities(entities, True)
|
||||||
|
|
||||||
_async_add_kraken_sensors(config_entry.options[CONF_TRACKED_ASSET_PAIRS])
|
_async_add_kraken_sensors(config_entry.options[CONF_TRACKED_ASSET_PAIRS])
|
||||||
|
|
||||||
|
@@ -5,7 +5,7 @@ from dataclasses import dataclass
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import cast
|
from typing import cast
|
||||||
|
|
||||||
from pylamarzocco.const import BackFlushStatus, MachineState, ModelName, WidgetType
|
from pylamarzocco.const import BackFlushStatus, ModelName, WidgetType
|
||||||
from pylamarzocco.models import (
|
from pylamarzocco.models import (
|
||||||
BackFlush,
|
BackFlush,
|
||||||
BaseWidgetOutput,
|
BaseWidgetOutput,
|
||||||
@@ -97,14 +97,7 @@ ENTITIES: tuple[LaMarzoccoSensorEntityDescription, ...] = (
|
|||||||
).brewing_start_time
|
).brewing_start_time
|
||||||
),
|
),
|
||||||
entity_category=EntityCategory.DIAGNOSTIC,
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
available_fn=(
|
available_fn=(lambda coordinator: not coordinator.websocket_terminated),
|
||||||
lambda coordinator: not coordinator.websocket_terminated
|
|
||||||
and cast(
|
|
||||||
MachineStatus,
|
|
||||||
coordinator.device.dashboard.config[WidgetType.CM_MACHINE_STATUS],
|
|
||||||
).status
|
|
||||||
is MachineState.BREWING
|
|
||||||
),
|
|
||||||
),
|
),
|
||||||
LaMarzoccoSensorEntityDescription(
|
LaMarzoccoSensorEntityDescription(
|
||||||
key="steam_boiler_ready_time",
|
key="steam_boiler_ready_time",
|
||||||
|
@@ -20,5 +20,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/ld2410_ble",
|
"documentation": "https://www.home-assistant.io/integrations/ld2410_ble",
|
||||||
"integration_type": "device",
|
"integration_type": "device",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"requirements": ["bluetooth-data-tools==1.28.3", "ld2410-ble==0.1.1"]
|
"requirements": ["bluetooth-data-tools==1.28.2", "ld2410-ble==0.1.1"]
|
||||||
}
|
}
|
||||||
|
@@ -35,5 +35,5 @@
|
|||||||
"dependencies": ["bluetooth_adapters"],
|
"dependencies": ["bluetooth_adapters"],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/led_ble",
|
"documentation": "https://www.home-assistant.io/integrations/led_ble",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"requirements": ["bluetooth-data-tools==1.28.3", "led-ble==1.1.7"]
|
"requirements": ["bluetooth-data-tools==1.28.2", "led-ble==1.1.7"]
|
||||||
}
|
}
|
||||||
|
@@ -7,6 +7,6 @@
|
|||||||
"integration_type": "hub",
|
"integration_type": "hub",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["letpot"],
|
"loggers": ["letpot"],
|
||||||
"quality_scale": "silver",
|
"quality_scale": "bronze",
|
||||||
"requirements": ["letpot==0.6.2"]
|
"requirements": ["letpot==0.6.2"]
|
||||||
}
|
}
|
||||||
|
@@ -41,10 +41,7 @@ rules:
|
|||||||
docs-installation-parameters: done
|
docs-installation-parameters: done
|
||||||
entity-unavailable: done
|
entity-unavailable: done
|
||||||
integration-owner: done
|
integration-owner: done
|
||||||
log-when-unavailable:
|
log-when-unavailable: todo
|
||||||
status: done
|
|
||||||
comment: |
|
|
||||||
Logging handled by library when (un)available once (push) or coordinator (pull).
|
|
||||||
parallel-updates: done
|
parallel-updates: done
|
||||||
reauthentication-flow: done
|
reauthentication-flow: done
|
||||||
test-coverage: done
|
test-coverage: done
|
||||||
|
@@ -196,11 +196,11 @@ class LocalTodoListEntity(TodoListEntity):
|
|||||||
item_idx: dict[str, int] = {itm.uid: idx for idx, itm in enumerate(todos)}
|
item_idx: dict[str, int] = {itm.uid: idx for idx, itm in enumerate(todos)}
|
||||||
if uid not in item_idx:
|
if uid not in item_idx:
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(
|
||||||
f"Item '{uid}' not found in todo list {self.entity_id}"
|
"Item '{uid}' not found in todo list {self.entity_id}"
|
||||||
)
|
)
|
||||||
if previous_uid and previous_uid not in item_idx:
|
if previous_uid and previous_uid not in item_idx:
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(
|
||||||
f"Item '{previous_uid}' not found in todo list {self.entity_id}"
|
"Item '{previous_uid}' not found in todo list {self.entity_id}"
|
||||||
)
|
)
|
||||||
dst_idx = item_idx[previous_uid] + 1 if previous_uid else 0
|
dst_idx = item_idx[previous_uid] + 1 if previous_uid else 0
|
||||||
src_idx = item_idx[uid]
|
src_idx = item_idx[uid]
|
||||||
|
@@ -88,17 +88,6 @@ DISCOVERY_SCHEMAS = [
|
|||||||
entity_class=MatterBinarySensor,
|
entity_class=MatterBinarySensor,
|
||||||
required_attributes=(clusters.OccupancySensing.Attributes.Occupancy,),
|
required_attributes=(clusters.OccupancySensing.Attributes.Occupancy,),
|
||||||
),
|
),
|
||||||
MatterDiscoverySchema(
|
|
||||||
platform=Platform.BINARY_SENSOR,
|
|
||||||
entity_description=MatterBinarySensorEntityDescription(
|
|
||||||
key="ThermostatOccupancySensor",
|
|
||||||
device_class=BinarySensorDeviceClass.OCCUPANCY,
|
|
||||||
# The first bit = if occupied
|
|
||||||
device_to_ha=lambda x: (x & 1 == 1) if x is not None else None,
|
|
||||||
),
|
|
||||||
entity_class=MatterBinarySensor,
|
|
||||||
required_attributes=(clusters.Thermostat.Attributes.Occupancy,),
|
|
||||||
),
|
|
||||||
MatterDiscoverySchema(
|
MatterDiscoverySchema(
|
||||||
platform=Platform.BINARY_SENSOR,
|
platform=Platform.BINARY_SENSOR,
|
||||||
entity_description=MatterBinarySensorEntityDescription(
|
entity_description=MatterBinarySensorEntityDescription(
|
||||||
|
@@ -146,13 +146,6 @@
|
|||||||
"off": "mdi:lock-off"
|
"off": "mdi:lock-off"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"speaker_mute": {
|
|
||||||
"default": "mdi:volume-high",
|
|
||||||
"state": {
|
|
||||||
"on": "mdi:volume-mute",
|
|
||||||
"off": "mdi:volume-high"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"evse_charging_switch": {
|
"evse_charging_switch": {
|
||||||
"default": "mdi:ev-station"
|
"default": "mdi:ev-station"
|
||||||
},
|
},
|
||||||
|
@@ -176,7 +176,6 @@ DISCOVERY_SCHEMAS = [
|
|||||||
),
|
),
|
||||||
entity_class=MatterNumber,
|
entity_class=MatterNumber,
|
||||||
required_attributes=(clusters.LevelControl.Attributes.OnLevel,),
|
required_attributes=(clusters.LevelControl.Attributes.OnLevel,),
|
||||||
not_device_type=(device_types.Speaker,),
|
|
||||||
# allow None value to account for 'default' value
|
# allow None value to account for 'default' value
|
||||||
allow_none_value=True,
|
allow_none_value=True,
|
||||||
),
|
),
|
||||||
|
@@ -152,7 +152,6 @@ PUMP_CONTROL_MODE_MAP = {
|
|||||||
clusters.PumpConfigurationAndControl.Enums.ControlModeEnum.kUnknownEnumValue: None,
|
clusters.PumpConfigurationAndControl.Enums.ControlModeEnum.kUnknownEnumValue: None,
|
||||||
}
|
}
|
||||||
|
|
||||||
HUMIDITY_SCALING_FACTOR = 100
|
|
||||||
TEMPERATURE_SCALING_FACTOR = 100
|
TEMPERATURE_SCALING_FACTOR = 100
|
||||||
|
|
||||||
|
|
||||||
@@ -309,7 +308,7 @@ DISCOVERY_SCHEMAS = [
|
|||||||
key="TemperatureSensor",
|
key="TemperatureSensor",
|
||||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||||
device_class=SensorDeviceClass.TEMPERATURE,
|
device_class=SensorDeviceClass.TEMPERATURE,
|
||||||
device_to_ha=lambda x: x / TEMPERATURE_SCALING_FACTOR,
|
device_to_ha=lambda x: x / 100,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
entity_class=MatterSensor,
|
entity_class=MatterSensor,
|
||||||
@@ -345,7 +344,7 @@ DISCOVERY_SCHEMAS = [
|
|||||||
key="HumiditySensor",
|
key="HumiditySensor",
|
||||||
native_unit_of_measurement=PERCENTAGE,
|
native_unit_of_measurement=PERCENTAGE,
|
||||||
device_class=SensorDeviceClass.HUMIDITY,
|
device_class=SensorDeviceClass.HUMIDITY,
|
||||||
device_to_ha=lambda x: x / HUMIDITY_SCALING_FACTOR,
|
device_to_ha=lambda x: x / 100,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
entity_class=MatterSensor,
|
entity_class=MatterSensor,
|
||||||
@@ -1137,7 +1136,7 @@ DISCOVERY_SCHEMAS = [
|
|||||||
key="ThermostatLocalTemperature",
|
key="ThermostatLocalTemperature",
|
||||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||||
device_class=SensorDeviceClass.TEMPERATURE,
|
device_class=SensorDeviceClass.TEMPERATURE,
|
||||||
device_to_ha=lambda x: x / TEMPERATURE_SCALING_FACTOR,
|
device_to_ha=lambda x: x / 100,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
entity_class=MatterSensor,
|
entity_class=MatterSensor,
|
||||||
|
@@ -514,9 +514,6 @@
|
|||||||
"power": {
|
"power": {
|
||||||
"name": "Power"
|
"name": "Power"
|
||||||
},
|
},
|
||||||
"speaker_mute": {
|
|
||||||
"name": "Mute"
|
|
||||||
},
|
|
||||||
"child_lock": {
|
"child_lock": {
|
||||||
"name": "Child lock"
|
"name": "Child lock"
|
||||||
},
|
},
|
||||||
|
@@ -203,6 +203,7 @@ DISCOVERY_SCHEMAS = [
|
|||||||
device_types.Refrigerator,
|
device_types.Refrigerator,
|
||||||
device_types.RoboticVacuumCleaner,
|
device_types.RoboticVacuumCleaner,
|
||||||
device_types.RoomAirConditioner,
|
device_types.RoomAirConditioner,
|
||||||
|
device_types.Speaker,
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
MatterDiscoverySchema(
|
MatterDiscoverySchema(
|
||||||
@@ -241,24 +242,6 @@ DISCOVERY_SCHEMAS = [
|
|||||||
device_types.Speaker,
|
device_types.Speaker,
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
MatterDiscoverySchema(
|
|
||||||
platform=Platform.SWITCH,
|
|
||||||
entity_description=MatterNumericSwitchEntityDescription(
|
|
||||||
key="MatterMuteToggle",
|
|
||||||
translation_key="speaker_mute",
|
|
||||||
device_to_ha={
|
|
||||||
True: False, # True means volume is on, so HA should show mute as off
|
|
||||||
False: True, # False means volume is off (muted), so HA should show mute as on
|
|
||||||
}.get,
|
|
||||||
ha_to_device={
|
|
||||||
False: True, # HA showing mute as off means volume is on, so send True
|
|
||||||
True: False, # HA showing mute as on means volume is off (muted), so send False
|
|
||||||
}.get,
|
|
||||||
),
|
|
||||||
entity_class=MatterNumericSwitch,
|
|
||||||
required_attributes=(clusters.OnOff.Attributes.OnOff,),
|
|
||||||
device_type=(device_types.Speaker,),
|
|
||||||
),
|
|
||||||
MatterDiscoverySchema(
|
MatterDiscoverySchema(
|
||||||
platform=Platform.SWITCH,
|
platform=Platform.SWITCH,
|
||||||
entity_description=MatterNumericSwitchEntityDescription(
|
entity_description=MatterNumericSwitchEntityDescription(
|
||||||
|
@@ -1,16 +1,7 @@
|
|||||||
"""Model Context Protocol transport protocol for Streamable HTTP and SSE.
|
"""Model Context Protocol transport protocol for Server Sent Events (SSE).
|
||||||
|
|
||||||
This registers HTTP endpoints that support the Streamable HTTP protocol as
|
This registers HTTP endpoints that supports SSE as a transport layer
|
||||||
well as the older SSE as a transport layer.
|
for the Model Context Protocol. There are two HTTP endpoints:
|
||||||
|
|
||||||
The Streamable HTTP protocol uses a single HTTP endpoint:
|
|
||||||
|
|
||||||
- /api/mcp_server: The Streamable HTTP endpoint currently implements the
|
|
||||||
stateless protocol for simplicity. This receives client requests and
|
|
||||||
sends them to the MCP server, then waits for a response to send back to
|
|
||||||
the client.
|
|
||||||
|
|
||||||
The older SSE protocol has two HTTP endpoints:
|
|
||||||
|
|
||||||
- /mcp_server/sse: The SSE endpoint that is used to establish a session
|
- /mcp_server/sse: The SSE endpoint that is used to establish a session
|
||||||
with the client and glue to the MCP server. This is used to push responses
|
with the client and glue to the MCP server. This is used to push responses
|
||||||
@@ -23,9 +14,6 @@ The older SSE protocol has two HTTP endpoints:
|
|||||||
See https://modelcontextprotocol.io/docs/concepts/transports
|
See https://modelcontextprotocol.io/docs/concepts/transports
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import asyncio
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from http import HTTPStatus
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
@@ -33,14 +21,13 @@ from aiohttp.web_exceptions import HTTPBadRequest, HTTPNotFound
|
|||||||
from aiohttp_sse import sse_response
|
from aiohttp_sse import sse_response
|
||||||
import anyio
|
import anyio
|
||||||
from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream
|
from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream
|
||||||
from mcp import JSONRPCRequest, types
|
from mcp import types
|
||||||
from mcp.server import InitializationOptions, Server
|
|
||||||
from mcp.shared.message import SessionMessage
|
from mcp.shared.message import SessionMessage
|
||||||
|
|
||||||
from homeassistant.components import conversation
|
from homeassistant.components import conversation
|
||||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView
|
from homeassistant.components.http import KEY_HASS, HomeAssistantView
|
||||||
from homeassistant.const import CONF_LLM_HASS_API
|
from homeassistant.const import CONF_LLM_HASS_API
|
||||||
from homeassistant.core import Context, HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers import llm
|
from homeassistant.helpers import llm
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
@@ -50,14 +37,6 @@ from .types import MCPServerConfigEntry
|
|||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
# Streamable HTTP endpoint
|
|
||||||
STREAMABLE_API = f"/api/{DOMAIN}"
|
|
||||||
TIMEOUT = 60 # Seconds
|
|
||||||
|
|
||||||
# Content types
|
|
||||||
CONTENT_TYPE_JSON = "application/json"
|
|
||||||
|
|
||||||
# Legacy SSE endpoint
|
|
||||||
SSE_API = f"/{DOMAIN}/sse"
|
SSE_API = f"/{DOMAIN}/sse"
|
||||||
MESSAGES_API = f"/{DOMAIN}/messages/{{session_id}}"
|
MESSAGES_API = f"/{DOMAIN}/messages/{{session_id}}"
|
||||||
|
|
||||||
@@ -67,7 +46,6 @@ def async_register(hass: HomeAssistant) -> None:
|
|||||||
"""Register the websocket API."""
|
"""Register the websocket API."""
|
||||||
hass.http.register_view(ModelContextProtocolSSEView())
|
hass.http.register_view(ModelContextProtocolSSEView())
|
||||||
hass.http.register_view(ModelContextProtocolMessagesView())
|
hass.http.register_view(ModelContextProtocolMessagesView())
|
||||||
hass.http.register_view(ModelContextProtocolStreamableView())
|
|
||||||
|
|
||||||
|
|
||||||
def async_get_config_entry(hass: HomeAssistant) -> MCPServerConfigEntry:
|
def async_get_config_entry(hass: HomeAssistant) -> MCPServerConfigEntry:
|
||||||
@@ -88,52 +66,6 @@ def async_get_config_entry(hass: HomeAssistant) -> MCPServerConfigEntry:
|
|||||||
return config_entries[0]
|
return config_entries[0]
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class Streams:
|
|
||||||
"""Pairs of streams for MCP server communication."""
|
|
||||||
|
|
||||||
# The MCP server reads from the read stream. The HTTP handler receives
|
|
||||||
# incoming client messages and writes the to the read_stream_writer.
|
|
||||||
read_stream: MemoryObjectReceiveStream[SessionMessage | Exception]
|
|
||||||
read_stream_writer: MemoryObjectSendStream[SessionMessage | Exception]
|
|
||||||
|
|
||||||
# The MCP server writes to the write stream. The HTTP handler reads from
|
|
||||||
# the write stream and sends messages to the client.
|
|
||||||
write_stream: MemoryObjectSendStream[SessionMessage]
|
|
||||||
write_stream_reader: MemoryObjectReceiveStream[SessionMessage]
|
|
||||||
|
|
||||||
|
|
||||||
def create_streams() -> Streams:
|
|
||||||
"""Create a new pair of streams for MCP server communication."""
|
|
||||||
read_stream_writer, read_stream = anyio.create_memory_object_stream(0)
|
|
||||||
write_stream, write_stream_reader = anyio.create_memory_object_stream(0)
|
|
||||||
return Streams(
|
|
||||||
read_stream=read_stream,
|
|
||||||
read_stream_writer=read_stream_writer,
|
|
||||||
write_stream=write_stream,
|
|
||||||
write_stream_reader=write_stream_reader,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def create_mcp_server(
|
|
||||||
hass: HomeAssistant, context: Context, entry: MCPServerConfigEntry
|
|
||||||
) -> tuple[Server, InitializationOptions]:
|
|
||||||
"""Initialize the MCP server to ensure it's ready to handle requests."""
|
|
||||||
llm_context = llm.LLMContext(
|
|
||||||
platform=DOMAIN,
|
|
||||||
context=context,
|
|
||||||
language="*",
|
|
||||||
assistant=conversation.DOMAIN,
|
|
||||||
device_id=None,
|
|
||||||
)
|
|
||||||
llm_api_id = entry.data[CONF_LLM_HASS_API]
|
|
||||||
server = await create_server(hass, llm_api_id, llm_context)
|
|
||||||
options = await hass.async_add_executor_job(
|
|
||||||
server.create_initialization_options # Reads package for version info
|
|
||||||
)
|
|
||||||
return server, options
|
|
||||||
|
|
||||||
|
|
||||||
class ModelContextProtocolSSEView(HomeAssistantView):
|
class ModelContextProtocolSSEView(HomeAssistantView):
|
||||||
"""Model Context Protocol SSE endpoint."""
|
"""Model Context Protocol SSE endpoint."""
|
||||||
|
|
||||||
@@ -154,12 +86,30 @@ class ModelContextProtocolSSEView(HomeAssistantView):
|
|||||||
entry = async_get_config_entry(hass)
|
entry = async_get_config_entry(hass)
|
||||||
session_manager = entry.runtime_data
|
session_manager = entry.runtime_data
|
||||||
|
|
||||||
server, options = await create_mcp_server(hass, self.context(request), entry)
|
context = llm.LLMContext(
|
||||||
streams = create_streams()
|
platform=DOMAIN,
|
||||||
|
context=self.context(request),
|
||||||
|
language="*",
|
||||||
|
assistant=conversation.DOMAIN,
|
||||||
|
device_id=None,
|
||||||
|
)
|
||||||
|
llm_api_id = entry.data[CONF_LLM_HASS_API]
|
||||||
|
server = await create_server(hass, llm_api_id, context)
|
||||||
|
options = await hass.async_add_executor_job(
|
||||||
|
server.create_initialization_options # Reads package for version info
|
||||||
|
)
|
||||||
|
|
||||||
|
read_stream: MemoryObjectReceiveStream[SessionMessage | Exception]
|
||||||
|
read_stream_writer: MemoryObjectSendStream[SessionMessage | Exception]
|
||||||
|
read_stream_writer, read_stream = anyio.create_memory_object_stream(0)
|
||||||
|
|
||||||
|
write_stream: MemoryObjectSendStream[SessionMessage]
|
||||||
|
write_stream_reader: MemoryObjectReceiveStream[SessionMessage]
|
||||||
|
write_stream, write_stream_reader = anyio.create_memory_object_stream(0)
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
sse_response(request) as response,
|
sse_response(request) as response,
|
||||||
session_manager.create(Session(streams.read_stream_writer)) as session_id,
|
session_manager.create(Session(read_stream_writer)) as session_id,
|
||||||
):
|
):
|
||||||
session_uri = MESSAGES_API.format(session_id=session_id)
|
session_uri = MESSAGES_API.format(session_id=session_id)
|
||||||
_LOGGER.debug("Sending SSE endpoint: %s", session_uri)
|
_LOGGER.debug("Sending SSE endpoint: %s", session_uri)
|
||||||
@@ -167,7 +117,7 @@ class ModelContextProtocolSSEView(HomeAssistantView):
|
|||||||
|
|
||||||
async def sse_reader() -> None:
|
async def sse_reader() -> None:
|
||||||
"""Forward MCP server responses to the client."""
|
"""Forward MCP server responses to the client."""
|
||||||
async for session_message in streams.write_stream_reader:
|
async for session_message in write_stream_reader:
|
||||||
_LOGGER.debug("Sending SSE message: %s", session_message)
|
_LOGGER.debug("Sending SSE message: %s", session_message)
|
||||||
await response.send(
|
await response.send(
|
||||||
session_message.message.model_dump_json(
|
session_message.message.model_dump_json(
|
||||||
@@ -178,7 +128,7 @@ class ModelContextProtocolSSEView(HomeAssistantView):
|
|||||||
|
|
||||||
async with anyio.create_task_group() as tg:
|
async with anyio.create_task_group() as tg:
|
||||||
tg.start_soon(sse_reader)
|
tg.start_soon(sse_reader)
|
||||||
await server.run(streams.read_stream, streams.write_stream, options)
|
await server.run(read_stream, write_stream, options)
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
@@ -218,64 +168,3 @@ class ModelContextProtocolMessagesView(HomeAssistantView):
|
|||||||
_LOGGER.debug("Received client message: %s", message)
|
_LOGGER.debug("Received client message: %s", message)
|
||||||
await session.read_stream_writer.send(SessionMessage(message))
|
await session.read_stream_writer.send(SessionMessage(message))
|
||||||
return web.Response(status=200)
|
return web.Response(status=200)
|
||||||
|
|
||||||
|
|
||||||
class ModelContextProtocolStreamableView(HomeAssistantView):
|
|
||||||
"""Model Context Protocol Streamable HTTP endpoint."""
|
|
||||||
|
|
||||||
name = f"{DOMAIN}:streamable"
|
|
||||||
url = STREAMABLE_API
|
|
||||||
|
|
||||||
async def get(self, request: web.Request) -> web.StreamResponse:
|
|
||||||
"""Handle unsupported methods."""
|
|
||||||
return web.Response(
|
|
||||||
status=HTTPStatus.METHOD_NOT_ALLOWED, text="Only POST method is supported"
|
|
||||||
)
|
|
||||||
|
|
||||||
async def post(self, request: web.Request) -> web.StreamResponse:
|
|
||||||
"""Process JSON-RPC messages for the Model Context Protocol."""
|
|
||||||
hass = request.app[KEY_HASS]
|
|
||||||
entry = async_get_config_entry(hass)
|
|
||||||
|
|
||||||
# The request must include a JSON-RPC message
|
|
||||||
if CONTENT_TYPE_JSON not in request.headers.get("accept", ""):
|
|
||||||
raise HTTPBadRequest(text=f"Client must accept {CONTENT_TYPE_JSON}")
|
|
||||||
if request.content_type != CONTENT_TYPE_JSON:
|
|
||||||
raise HTTPBadRequest(text=f"Content-Type must be {CONTENT_TYPE_JSON}")
|
|
||||||
try:
|
|
||||||
json_data = await request.json()
|
|
||||||
message = types.JSONRPCMessage.model_validate(json_data)
|
|
||||||
except ValueError as err:
|
|
||||||
_LOGGER.debug("Failed to parse message as JSON-RPC message: %s", err)
|
|
||||||
raise HTTPBadRequest(text="Request must be a JSON-RPC message") from err
|
|
||||||
|
|
||||||
_LOGGER.debug("Received client message: %s", message)
|
|
||||||
|
|
||||||
# For notifications and responses only, return 202 Accepted
|
|
||||||
if not isinstance(message.root, JSONRPCRequest):
|
|
||||||
_LOGGER.debug("Notification or response received, returning 202")
|
|
||||||
return web.Response(status=HTTPStatus.ACCEPTED)
|
|
||||||
|
|
||||||
# The MCP server runs as a background task for the duration of the
|
|
||||||
# request. We open a buffered stream pair to communicate with it. The
|
|
||||||
# request is sent to the MCP server and we wait for a single response
|
|
||||||
# then shut down the server.
|
|
||||||
server, options = await create_mcp_server(hass, self.context(request), entry)
|
|
||||||
streams = create_streams()
|
|
||||||
|
|
||||||
async def run_server() -> None:
|
|
||||||
await server.run(
|
|
||||||
streams.read_stream, streams.write_stream, options, stateless=True
|
|
||||||
)
|
|
||||||
|
|
||||||
async with asyncio.timeout(TIMEOUT), anyio.create_task_group() as tg:
|
|
||||||
tg.start_soon(run_server)
|
|
||||||
|
|
||||||
await streams.read_stream_writer.send(SessionMessage(message))
|
|
||||||
session_message = await anext(streams.write_stream_reader)
|
|
||||||
tg.cancel_scope.cancel()
|
|
||||||
|
|
||||||
_LOGGER.debug("Sending response: %s", session_message)
|
|
||||||
return web.json_response(
|
|
||||||
data=session_message.message.model_dump(by_alias=True, exclude_none=True),
|
|
||||||
)
|
|
||||||
|
@@ -48,6 +48,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: MealieConfigEntry) -> bo
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
|
await client.define_household_support()
|
||||||
about = await client.get_about()
|
about = await client.get_about()
|
||||||
version = create_version(about.version)
|
version = create_version(about.version)
|
||||||
except MealieAuthenticationError as error:
|
except MealieAuthenticationError as error:
|
||||||
|
@@ -19,4 +19,4 @@ ATTR_NOTE_TEXT = "note_text"
|
|||||||
ATTR_SEARCH_TERMS = "search_terms"
|
ATTR_SEARCH_TERMS = "search_terms"
|
||||||
ATTR_RESULT_LIMIT = "result_limit"
|
ATTR_RESULT_LIMIT = "result_limit"
|
||||||
|
|
||||||
MIN_REQUIRED_MEALIE_VERSION = AwesomeVersion("v2.0.0")
|
MIN_REQUIRED_MEALIE_VERSION = AwesomeVersion("v1.0.0")
|
||||||
|
@@ -7,5 +7,5 @@
|
|||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"quality_scale": "silver",
|
"quality_scale": "silver",
|
||||||
"requirements": ["aiomealie==1.0.0"]
|
"requirements": ["aiomealie==0.11.0"]
|
||||||
}
|
}
|
||||||
|
@@ -50,7 +50,7 @@ rules:
|
|||||||
docs-data-update: done
|
docs-data-update: done
|
||||||
docs-examples: done
|
docs-examples: done
|
||||||
docs-known-limitations: todo
|
docs-known-limitations: todo
|
||||||
docs-supported-devices: done
|
docs-supported-devices: todo
|
||||||
docs-supported-functions: done
|
docs-supported-functions: done
|
||||||
docs-troubleshooting: todo
|
docs-troubleshooting: todo
|
||||||
docs-use-cases: todo
|
docs-use-cases: todo
|
||||||
|
@@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/melcloud",
|
"documentation": "https://www.home-assistant.io/integrations/melcloud",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["pymelcloud"],
|
"loggers": ["pymelcloud"],
|
||||||
"requirements": ["python-melcloud==0.1.2"]
|
"requirements": ["python-melcloud==0.1.0"]
|
||||||
}
|
}
|
||||||
|
@@ -7,5 +7,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/mill",
|
"documentation": "https://www.home-assistant.io/integrations/mill",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["mill", "mill_local"],
|
"loggers": ["mill", "mill_local"],
|
||||||
"requirements": ["millheater==0.14.0", "mill-local==0.3.0"]
|
"requirements": ["millheater==0.13.1", "mill-local==0.3.0"]
|
||||||
}
|
}
|
||||||
|
@@ -11,9 +11,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
"""Set up Min/Max from a config entry."""
|
"""Set up Min/Max from a config entry."""
|
||||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
|
|
||||||
|
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||||
|
"""Update listener, called when the config entry options are changed."""
|
||||||
|
await hass.config_entries.async_reload(entry.entry_id)
|
||||||
|
|
||||||
|
|
||||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||||
"""Unload a config entry."""
|
"""Unload a config entry."""
|
||||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||||
|
@@ -71,7 +71,6 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
|||||||
|
|
||||||
config_flow = CONFIG_FLOW
|
config_flow = CONFIG_FLOW
|
||||||
options_flow = OPTIONS_FLOW
|
options_flow = OPTIONS_FLOW
|
||||||
options_flow_reloads = True
|
|
||||||
|
|
||||||
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
|
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
|
||||||
"""Return config entry title."""
|
"""Return config entry title."""
|
||||||
|
@@ -66,7 +66,6 @@ from homeassistant.config_entries import (
|
|||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
ATTR_CONFIGURATION_URL,
|
ATTR_CONFIGURATION_URL,
|
||||||
ATTR_HW_VERSION,
|
ATTR_HW_VERSION,
|
||||||
ATTR_MANUFACTURER,
|
|
||||||
ATTR_MODEL,
|
ATTR_MODEL,
|
||||||
ATTR_MODEL_ID,
|
ATTR_MODEL_ID,
|
||||||
ATTR_NAME,
|
ATTR_NAME,
|
||||||
@@ -3051,7 +3050,6 @@ MQTT_DEVICE_PLATFORM_FIELDS = {
|
|||||||
),
|
),
|
||||||
ATTR_MODEL: PlatformField(selector=TEXT_SELECTOR, required=False),
|
ATTR_MODEL: PlatformField(selector=TEXT_SELECTOR, required=False),
|
||||||
ATTR_MODEL_ID: PlatformField(selector=TEXT_SELECTOR, required=False),
|
ATTR_MODEL_ID: PlatformField(selector=TEXT_SELECTOR, required=False),
|
||||||
ATTR_MANUFACTURER: PlatformField(selector=TEXT_SELECTOR, required=False),
|
|
||||||
ATTR_CONFIGURATION_URL: PlatformField(
|
ATTR_CONFIGURATION_URL: PlatformField(
|
||||||
selector=TEXT_SELECTOR, required=False, validator=cv.url, error="invalid_url"
|
selector=TEXT_SELECTOR, required=False, validator=cv.url, error="invalid_url"
|
||||||
),
|
),
|
||||||
|
@@ -188,10 +188,7 @@ class MqttLock(MqttEntity, LockEntity):
|
|||||||
return
|
return
|
||||||
if payload == self._config[CONF_PAYLOAD_RESET]:
|
if payload == self._config[CONF_PAYLOAD_RESET]:
|
||||||
# Reset the state to `unknown`
|
# Reset the state to `unknown`
|
||||||
self._attr_is_locked = self._attr_is_locking = None
|
self._attr_is_locked = None
|
||||||
self._attr_is_unlocking = None
|
|
||||||
self._attr_is_open = self._attr_is_opening = None
|
|
||||||
self._attr_is_jammed = None
|
|
||||||
elif payload in self._valid_states:
|
elif payload in self._valid_states:
|
||||||
self._attr_is_locked = payload == self._config[CONF_STATE_LOCKED]
|
self._attr_is_locked = payload == self._config[CONF_STATE_LOCKED]
|
||||||
self._attr_is_locking = payload == self._config[CONF_STATE_LOCKING]
|
self._attr_is_locking = payload == self._config[CONF_STATE_LOCKING]
|
||||||
|
@@ -165,15 +165,13 @@
|
|||||||
"name": "[%key:common::config_flow::data::name%]",
|
"name": "[%key:common::config_flow::data::name%]",
|
||||||
"configuration_url": "Configuration URL",
|
"configuration_url": "Configuration URL",
|
||||||
"model": "Model",
|
"model": "Model",
|
||||||
"model_id": "Model ID",
|
"model_id": "Model ID"
|
||||||
"manufacturer": "Manufacturer"
|
|
||||||
},
|
},
|
||||||
"data_description": {
|
"data_description": {
|
||||||
"name": "The name of the manually added MQTT device.",
|
"name": "The name of the manually added MQTT device.",
|
||||||
"configuration_url": "A link to the webpage that can manage the configuration of this device. Can be either a 'http://', 'https://' or an internal 'homeassistant://' URL.",
|
"configuration_url": "A link to the webpage that can manage the configuration of this device. Can be either a 'http://', 'https://' or an internal 'homeassistant://' URL.",
|
||||||
"model": "E.g. 'Cleanmaster Pro'.",
|
"model": "E.g. 'Cleanmaster Pro'.",
|
||||||
"model_id": "E.g. '123NK2PRO'.",
|
"model_id": "E.g. '123NK2PRO'."
|
||||||
"manufacturer": "E.g. Cleanmaster Ltd."
|
|
||||||
},
|
},
|
||||||
"sections": {
|
"sections": {
|
||||||
"advanced_settings": {
|
"advanced_settings": {
|
||||||
|
@@ -8,6 +8,6 @@
|
|||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["pynordpool"],
|
"loggers": ["pynordpool"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["pynordpool==0.3.1"],
|
"requirements": ["pynordpool==0.3.0"],
|
||||||
"single_config_entry": true
|
"single_config_entry": true
|
||||||
}
|
}
|
||||||
|
@@ -7,5 +7,5 @@
|
|||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["aionfty"],
|
"loggers": ["aionfty"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["aiontfy==0.6.1"]
|
"requirements": ["aiontfy==0.6.0"]
|
||||||
}
|
}
|
||||||
|
@@ -163,7 +163,7 @@ SENSOR_DESCRIPTIONS: tuple[NtfySensorEntityDescription, ...] = (
|
|||||||
device_class=SensorDeviceClass.DATA_SIZE,
|
device_class=SensorDeviceClass.DATA_SIZE,
|
||||||
native_unit_of_measurement=UnitOfInformation.BYTES,
|
native_unit_of_measurement=UnitOfInformation.BYTES,
|
||||||
suggested_unit_of_measurement=UnitOfInformation.MEBIBYTES,
|
suggested_unit_of_measurement=UnitOfInformation.MEBIBYTES,
|
||||||
suggested_display_precision=2,
|
suggested_display_precision=0,
|
||||||
),
|
),
|
||||||
NtfySensorEntityDescription(
|
NtfySensorEntityDescription(
|
||||||
key=NtfySensor.ATTACHMENT_TOTAL_SIZE_REMAINING,
|
key=NtfySensor.ATTACHMENT_TOTAL_SIZE_REMAINING,
|
||||||
@@ -172,7 +172,7 @@ SENSOR_DESCRIPTIONS: tuple[NtfySensorEntityDescription, ...] = (
|
|||||||
device_class=SensorDeviceClass.DATA_SIZE,
|
device_class=SensorDeviceClass.DATA_SIZE,
|
||||||
native_unit_of_measurement=UnitOfInformation.BYTES,
|
native_unit_of_measurement=UnitOfInformation.BYTES,
|
||||||
suggested_unit_of_measurement=UnitOfInformation.MEBIBYTES,
|
suggested_unit_of_measurement=UnitOfInformation.MEBIBYTES,
|
||||||
suggested_display_precision=2,
|
suggested_display_precision=0,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
NtfySensorEntityDescription(
|
NtfySensorEntityDescription(
|
||||||
|
@@ -95,7 +95,6 @@ def _convert_content(
|
|||||||
return ollama.Message(
|
return ollama.Message(
|
||||||
role=MessageRole.ASSISTANT.value,
|
role=MessageRole.ASSISTANT.value,
|
||||||
content=chat_content.content,
|
content=chat_content.content,
|
||||||
thinking=chat_content.thinking_content,
|
|
||||||
tool_calls=[
|
tool_calls=[
|
||||||
ollama.Message.ToolCall(
|
ollama.Message.ToolCall(
|
||||||
function=ollama.Message.ToolCall.Function(
|
function=ollama.Message.ToolCall.Function(
|
||||||
@@ -104,8 +103,7 @@ def _convert_content(
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
for tool_call in chat_content.tool_calls or ()
|
for tool_call in chat_content.tool_calls or ()
|
||||||
]
|
],
|
||||||
or None,
|
|
||||||
)
|
)
|
||||||
if isinstance(chat_content, conversation.UserContent):
|
if isinstance(chat_content, conversation.UserContent):
|
||||||
images: list[ollama.Image] = []
|
images: list[ollama.Image] = []
|
||||||
@@ -164,8 +162,6 @@ async def _transform_stream(
|
|||||||
]
|
]
|
||||||
if (content := response_message.get("content")) is not None:
|
if (content := response_message.get("content")) is not None:
|
||||||
chunk["content"] = content
|
chunk["content"] = content
|
||||||
if (thinking := response_message.get("thinking")) is not None:
|
|
||||||
chunk["thinking_content"] = thinking
|
|
||||||
if response_message.get("done"):
|
if response_message.get("done"):
|
||||||
new_msg = True
|
new_msg = True
|
||||||
yield chunk
|
yield chunk
|
||||||
|
@@ -35,8 +35,7 @@ from .const import CONF_DELETE_PERMANENTLY, DATA_BACKUP_AGENT_LISTENERS, DOMAIN
|
|||||||
from .coordinator import OneDriveConfigEntry
|
from .coordinator import OneDriveConfigEntry
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
MAX_CHUNK_SIZE = 60 * 1024 * 1024 # largest chunk possible, must be <= 60 MiB
|
UPLOAD_CHUNK_SIZE = 32 * 320 * 1024 # 10.4MB
|
||||||
TARGET_CHUNKS = 20
|
|
||||||
TIMEOUT = ClientTimeout(connect=10, total=43200) # 12 hours
|
TIMEOUT = ClientTimeout(connect=10, total=43200) # 12 hours
|
||||||
METADATA_VERSION = 2
|
METADATA_VERSION = 2
|
||||||
CACHE_TTL = 300
|
CACHE_TTL = 300
|
||||||
@@ -162,21 +161,11 @@ class OneDriveBackupAgent(BackupAgent):
|
|||||||
self._folder_id,
|
self._folder_id,
|
||||||
await open_stream(),
|
await open_stream(),
|
||||||
)
|
)
|
||||||
|
|
||||||
# determine chunk based on target chunks
|
|
||||||
upload_chunk_size = backup.size / TARGET_CHUNKS
|
|
||||||
# find the nearest multiple of 320KB
|
|
||||||
upload_chunk_size = round(upload_chunk_size / (320 * 1024)) * (320 * 1024)
|
|
||||||
# limit to max chunk size
|
|
||||||
upload_chunk_size = min(upload_chunk_size, MAX_CHUNK_SIZE)
|
|
||||||
# ensure minimum chunk size of 320KB
|
|
||||||
upload_chunk_size = max(upload_chunk_size, 320 * 1024)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
backup_file = await LargeFileUploadClient.upload(
|
backup_file = await LargeFileUploadClient.upload(
|
||||||
self._token_function,
|
self._token_function,
|
||||||
file,
|
file,
|
||||||
upload_chunk_size=upload_chunk_size,
|
upload_chunk_size=UPLOAD_CHUNK_SIZE,
|
||||||
session=async_get_clientsession(self._hass),
|
session=async_get_clientsession(self._hass),
|
||||||
)
|
)
|
||||||
except HashMismatchError as err:
|
except HashMismatchError as err:
|
||||||
|
@@ -8,5 +8,5 @@
|
|||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["opower"],
|
"loggers": ["opower"],
|
||||||
"quality_scale": "bronze",
|
"quality_scale": "bronze",
|
||||||
"requirements": ["opower==0.15.6"]
|
"requirements": ["opower==0.15.5"]
|
||||||
}
|
}
|
||||||
|
@@ -1,61 +0,0 @@
|
|||||||
{
|
|
||||||
"entity": {
|
|
||||||
"sensor": {
|
|
||||||
"pressure": {
|
|
||||||
"default": "mdi:tooth-outline",
|
|
||||||
"state": {
|
|
||||||
"high": "mdi:tooth",
|
|
||||||
"low": "mdi:alert",
|
|
||||||
"power_button_pressed": "mdi:power",
|
|
||||||
"button_pressed": "mdi:radiobox-marked"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"sector": {
|
|
||||||
"default": "mdi:circle-outline",
|
|
||||||
"state": {
|
|
||||||
"sector_1": "mdi:circle-slice-2",
|
|
||||||
"sector_2": "mdi:circle-slice-4",
|
|
||||||
"sector_3": "mdi:circle-slice-6",
|
|
||||||
"sector_4": "mdi:circle-slice-8",
|
|
||||||
"success": "mdi:check-circle-outline"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"toothbrush_state": {
|
|
||||||
"default": "mdi:toothbrush-electric",
|
|
||||||
"state": {
|
|
||||||
"initializing": "mdi:sync",
|
|
||||||
"idle": "mdi:toothbrush-electric",
|
|
||||||
"running": "mdi:waveform",
|
|
||||||
"charging": "mdi:battery-charging",
|
|
||||||
"setup": "mdi:wrench",
|
|
||||||
"flight_menu": "mdi:airplane",
|
|
||||||
"selection_menu": "mdi:menu",
|
|
||||||
"off": "mdi:power",
|
|
||||||
"sleeping": "mdi:sleep",
|
|
||||||
"transport": "mdi:dolly"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"number_of_sectors": {
|
|
||||||
"default": "mdi:chart-pie"
|
|
||||||
},
|
|
||||||
"mode": {
|
|
||||||
"default": "mdi:toothbrush-paste",
|
|
||||||
"state": {
|
|
||||||
"daily_clean": "mdi:repeat-once",
|
|
||||||
"sensitive": "mdi:feather",
|
|
||||||
"gum_care": "mdi:tooth-outline",
|
|
||||||
"intense": "mdi:shape-circle-plus",
|
|
||||||
"whitening": "mdi:shimmer",
|
|
||||||
"whiten": "mdi:shimmer",
|
|
||||||
"tongue_cleaning": "mdi:gate-and",
|
|
||||||
"super_sensitive": "mdi:feather",
|
|
||||||
"massage": "mdi:spa",
|
|
||||||
"deep_clean": "mdi:water",
|
|
||||||
"turbo": "mdi:car-turbocharger",
|
|
||||||
"off": "mdi:power",
|
|
||||||
"settings": "mdi:cog-outline"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@@ -3,13 +3,6 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from oralb_ble import OralBSensor, SensorUpdate
|
from oralb_ble import OralBSensor, SensorUpdate
|
||||||
from oralb_ble.parser import (
|
|
||||||
IO_SERIES_MODES,
|
|
||||||
PRESSURE,
|
|
||||||
SECTOR_MAP,
|
|
||||||
SMART_SERIES_MODES,
|
|
||||||
STATES,
|
|
||||||
)
|
|
||||||
|
|
||||||
from homeassistant.components.bluetooth.passive_update_processor import (
|
from homeassistant.components.bluetooth.passive_update_processor import (
|
||||||
PassiveBluetoothDataProcessor,
|
PassiveBluetoothDataProcessor,
|
||||||
@@ -46,8 +39,6 @@ SENSOR_DESCRIPTIONS: dict[str, SensorEntityDescription] = {
|
|||||||
key=OralBSensor.SECTOR,
|
key=OralBSensor.SECTOR,
|
||||||
translation_key="sector",
|
translation_key="sector",
|
||||||
entity_category=EntityCategory.DIAGNOSTIC,
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
options=[v.replace(" ", "_") for v in set(SECTOR_MAP.values()) | {"no_sector"}],
|
|
||||||
device_class=SensorDeviceClass.ENUM,
|
|
||||||
),
|
),
|
||||||
OralBSensor.NUMBER_OF_SECTORS: SensorEntityDescription(
|
OralBSensor.NUMBER_OF_SECTORS: SensorEntityDescription(
|
||||||
key=OralBSensor.NUMBER_OF_SECTORS,
|
key=OralBSensor.NUMBER_OF_SECTORS,
|
||||||
@@ -62,26 +53,16 @@ SENSOR_DESCRIPTIONS: dict[str, SensorEntityDescription] = {
|
|||||||
),
|
),
|
||||||
OralBSensor.TOOTHBRUSH_STATE: SensorEntityDescription(
|
OralBSensor.TOOTHBRUSH_STATE: SensorEntityDescription(
|
||||||
key=OralBSensor.TOOTHBRUSH_STATE,
|
key=OralBSensor.TOOTHBRUSH_STATE,
|
||||||
translation_key="toothbrush_state",
|
|
||||||
options=[v.replace(" ", "_") for v in set(STATES.values())],
|
|
||||||
device_class=SensorDeviceClass.ENUM,
|
|
||||||
name=None,
|
name=None,
|
||||||
),
|
),
|
||||||
OralBSensor.PRESSURE: SensorEntityDescription(
|
OralBSensor.PRESSURE: SensorEntityDescription(
|
||||||
key=OralBSensor.PRESSURE,
|
key=OralBSensor.PRESSURE,
|
||||||
translation_key="pressure",
|
translation_key="pressure",
|
||||||
options=[v.replace(" ", "_") for v in set(PRESSURE.values()) | {"low"}],
|
|
||||||
device_class=SensorDeviceClass.ENUM,
|
|
||||||
),
|
),
|
||||||
OralBSensor.MODE: SensorEntityDescription(
|
OralBSensor.MODE: SensorEntityDescription(
|
||||||
key=OralBSensor.MODE,
|
key=OralBSensor.MODE,
|
||||||
translation_key="mode",
|
translation_key="mode",
|
||||||
entity_category=EntityCategory.DIAGNOSTIC,
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
options=[
|
|
||||||
v.replace(" ", "_")
|
|
||||||
for v in set(IO_SERIES_MODES.values()) | set(SMART_SERIES_MODES.values())
|
|
||||||
],
|
|
||||||
device_class=SensorDeviceClass.ENUM,
|
|
||||||
),
|
),
|
||||||
OralBSensor.SIGNAL_STRENGTH: SensorEntityDescription(
|
OralBSensor.SIGNAL_STRENGTH: SensorEntityDescription(
|
||||||
key=OralBSensor.SIGNAL_STRENGTH,
|
key=OralBSensor.SIGNAL_STRENGTH,
|
||||||
@@ -153,15 +134,7 @@ class OralBBluetoothSensorEntity(
|
|||||||
@property
|
@property
|
||||||
def native_value(self) -> str | int | None:
|
def native_value(self) -> str | int | None:
|
||||||
"""Return the native value."""
|
"""Return the native value."""
|
||||||
value = self.processor.entity_data.get(self.entity_key)
|
return self.processor.entity_data.get(self.entity_key)
|
||||||
if isinstance(value, str):
|
|
||||||
value = value.replace(" ", "_")
|
|
||||||
if (
|
|
||||||
self.entity_description.options is not None
|
|
||||||
and value not in self.entity_description.options
|
|
||||||
): # append unknown values to enum
|
|
||||||
self.entity_description.options.append(value)
|
|
||||||
return value
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def available(self) -> bool:
|
def available(self) -> bool:
|
||||||
|
@@ -22,15 +22,7 @@
|
|||||||
"entity": {
|
"entity": {
|
||||||
"sensor": {
|
"sensor": {
|
||||||
"sector": {
|
"sector": {
|
||||||
"name": "Sector",
|
"name": "Sector"
|
||||||
"state": {
|
|
||||||
"no_sector": "No sector",
|
|
||||||
"sector_1": "Sector 1",
|
|
||||||
"sector_2": "Sector 2",
|
|
||||||
"sector_3": "Sector 3",
|
|
||||||
"sector_4": "Sector 4",
|
|
||||||
"success": "Success"
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"number_of_sectors": {
|
"number_of_sectors": {
|
||||||
"name": "Number of sectors"
|
"name": "Number of sectors"
|
||||||
@@ -39,48 +31,10 @@
|
|||||||
"name": "Sector timer"
|
"name": "Sector timer"
|
||||||
},
|
},
|
||||||
"pressure": {
|
"pressure": {
|
||||||
"name": "Pressure",
|
"name": "Pressure"
|
||||||
"state": {
|
|
||||||
"normal": "[%key:common::state::normal%]",
|
|
||||||
"high": "[%key:common::state::high%]",
|
|
||||||
"low": "[%key:common::state::low%]",
|
|
||||||
"power_button_pressed": "Power button pressed",
|
|
||||||
"button_pressed": "Button pressed"
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"mode": {
|
"mode": {
|
||||||
"name": "Brushing mode",
|
"name": "Brushing mode"
|
||||||
"state": {
|
|
||||||
"daily_clean": "Daily clean",
|
|
||||||
"sensitive": "Sensitive",
|
|
||||||
"gum_care": "Gum care",
|
|
||||||
"intense": "Intense",
|
|
||||||
"whitening": "Whiten",
|
|
||||||
"whiten": "[%key:component::oralb::entity::sensor::mode::state::whitening%]",
|
|
||||||
"tongue_cleaning": "Tongue clean",
|
|
||||||
"super_sensitive": "Super sensitive",
|
|
||||||
"massage": "Massage",
|
|
||||||
"deep_clean": "Deep clean",
|
|
||||||
"turbo": "Turbo",
|
|
||||||
"off": "[%key:common::state::off%]",
|
|
||||||
"settings": "Settings"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"toothbrush_state": {
|
|
||||||
"state": {
|
|
||||||
"initializing": "Initializing",
|
|
||||||
"idle": "[%key:common::state::idle%]",
|
|
||||||
"running": "Running",
|
|
||||||
"charging": "[%key:common::state::charging%]",
|
|
||||||
"setup": "Setup",
|
|
||||||
"flight_menu": "Flight menu",
|
|
||||||
"selection_menu": "Selection menu",
|
|
||||||
"off": "[%key:common::state::off%]",
|
|
||||||
"sleeping": "Sleeping",
|
|
||||||
"transport": "Transport",
|
|
||||||
"final_test": "Final test",
|
|
||||||
"pcb_test": "PCB test"
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -1,36 +1,52 @@
|
|||||||
"""Support for Plum Lightpad devices."""
|
"""Support for Plum Lightpad devices."""
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
|
import logging
|
||||||
|
|
||||||
|
from aiohttp import ContentTypeError
|
||||||
|
from requests.exceptions import ConnectTimeout, HTTPError
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.const import (
|
||||||
|
CONF_PASSWORD,
|
||||||
|
CONF_USERNAME,
|
||||||
|
EVENT_HOMEASSISTANT_STOP,
|
||||||
|
Platform,
|
||||||
|
)
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import issue_registry as ir
|
from homeassistant.exceptions import ConfigEntryNotReady
|
||||||
|
|
||||||
DOMAIN = "plum_lightpad"
|
from .const import DOMAIN
|
||||||
|
from .utils import load_plum
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
PLATFORMS = [Platform.LIGHT]
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, _: ConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||||
"""Set up Plum Lightpad from a config entry."""
|
"""Set up Plum Lightpad from a config entry."""
|
||||||
ir.async_create_issue(
|
_LOGGER.debug("Setting up config entry with ID = %s", entry.unique_id)
|
||||||
hass,
|
|
||||||
DOMAIN,
|
|
||||||
DOMAIN,
|
|
||||||
is_fixable=False,
|
|
||||||
severity=ir.IssueSeverity.ERROR,
|
|
||||||
translation_key="integration_removed",
|
|
||||||
translation_placeholders={
|
|
||||||
"entries": "/config/integrations/integration/plum_lightpad",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|
||||||
"""Unload config entry."""
|
|
||||||
if all(
|
|
||||||
config_entry.state is ConfigEntryState.NOT_LOADED
|
|
||||||
for config_entry in hass.config_entries.async_entries(DOMAIN)
|
|
||||||
if config_entry.entry_id != entry.entry_id
|
|
||||||
):
|
|
||||||
ir.async_delete_issue(hass, DOMAIN, DOMAIN)
|
|
||||||
|
|
||||||
|
username = entry.data[CONF_USERNAME]
|
||||||
|
password = entry.data[CONF_PASSWORD]
|
||||||
|
|
||||||
|
try:
|
||||||
|
plum = await load_plum(username, password, hass)
|
||||||
|
except ContentTypeError as ex:
|
||||||
|
_LOGGER.error("Unable to authenticate to Plum cloud: %s", ex)
|
||||||
|
return False
|
||||||
|
except (ConnectTimeout, HTTPError) as ex:
|
||||||
|
_LOGGER.error("Unable to connect to Plum cloud: %s", ex)
|
||||||
|
raise ConfigEntryNotReady from ex
|
||||||
|
|
||||||
|
hass.data.setdefault(DOMAIN, {})
|
||||||
|
hass.data[DOMAIN][entry.entry_id] = plum
|
||||||
|
|
||||||
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
|
|
||||||
|
def cleanup(event):
|
||||||
|
"""Clean up resources."""
|
||||||
|
plum.cleanup()
|
||||||
|
|
||||||
|
entry.async_on_unload(hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, cleanup))
|
||||||
return True
|
return True
|
||||||
|
@@ -2,12 +2,59 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigFlow
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from . import DOMAIN
|
from aiohttp import ContentTypeError
|
||||||
|
from requests.exceptions import ConnectTimeout, HTTPError
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||||
|
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||||
|
|
||||||
|
from .const import DOMAIN
|
||||||
|
from .utils import load_plum
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class PlumLightpadConfigFlow(ConfigFlow, domain=DOMAIN):
|
class PlumLightpadConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
"""Config flow for Plum Lightpad integration."""
|
"""Config flow for Plum Lightpad integration."""
|
||||||
|
|
||||||
VERSION = 1
|
VERSION = 1
|
||||||
|
|
||||||
|
def _show_form(self, errors=None):
|
||||||
|
schema = {
|
||||||
|
vol.Required(CONF_USERNAME): str,
|
||||||
|
vol.Required(CONF_PASSWORD): str,
|
||||||
|
}
|
||||||
|
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="user",
|
||||||
|
data_schema=vol.Schema(schema),
|
||||||
|
errors=errors or {},
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_step_user(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle a flow initialized by the user or redirected to by import."""
|
||||||
|
if not user_input:
|
||||||
|
return self._show_form()
|
||||||
|
|
||||||
|
username = user_input[CONF_USERNAME]
|
||||||
|
password = user_input[CONF_PASSWORD]
|
||||||
|
|
||||||
|
# load Plum just so we know username/password work
|
||||||
|
try:
|
||||||
|
await load_plum(username, password, self.hass)
|
||||||
|
except (ContentTypeError, ConnectTimeout, HTTPError) as ex:
|
||||||
|
_LOGGER.error("Unable to connect/authenticate to Plum cloud: %s", str(ex))
|
||||||
|
return self._show_form({"base": "cannot_connect"})
|
||||||
|
|
||||||
|
await self.async_set_unique_id(username)
|
||||||
|
self._abort_if_unique_id_configured()
|
||||||
|
|
||||||
|
return self.async_create_entry(
|
||||||
|
title=username, data={CONF_USERNAME: username, CONF_PASSWORD: password}
|
||||||
|
)
|
||||||
|
3
homeassistant/components/plum_lightpad/const.py
Normal file
3
homeassistant/components/plum_lightpad/const.py
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
"""Constants for the Plum Lightpad component."""
|
||||||
|
|
||||||
|
DOMAIN = "plum_lightpad"
|
9
homeassistant/components/plum_lightpad/icons.json
Normal file
9
homeassistant/components/plum_lightpad/icons.json
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"entity": {
|
||||||
|
"light": {
|
||||||
|
"glow_ring": {
|
||||||
|
"default": "mdi:crop-portrait"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
201
homeassistant/components/plum_lightpad/light.py
Normal file
201
homeassistant/components/plum_lightpad/light.py
Normal file
@@ -0,0 +1,201 @@
|
|||||||
|
"""Support for Plum Lightpad lights."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from plumlightpad import Plum
|
||||||
|
|
||||||
|
from homeassistant.components.light import (
|
||||||
|
ATTR_BRIGHTNESS,
|
||||||
|
ATTR_HS_COLOR,
|
||||||
|
ColorMode,
|
||||||
|
LightEntity,
|
||||||
|
)
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
|
from homeassistant.helpers.device_registry import DeviceInfo
|
||||||
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
from homeassistant.util import color as color_util
|
||||||
|
|
||||||
|
from .const import DOMAIN
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entry: ConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up Plum Lightpad dimmer lights and glow rings."""
|
||||||
|
|
||||||
|
plum: Plum = hass.data[DOMAIN][entry.entry_id]
|
||||||
|
|
||||||
|
def setup_entities(device) -> None:
|
||||||
|
entities: list[LightEntity] = []
|
||||||
|
|
||||||
|
if "lpid" in device:
|
||||||
|
lightpad = plum.get_lightpad(device["lpid"])
|
||||||
|
entities.append(GlowRing(lightpad=lightpad))
|
||||||
|
|
||||||
|
if "llid" in device:
|
||||||
|
logical_load = plum.get_load(device["llid"])
|
||||||
|
entities.append(PlumLight(load=logical_load))
|
||||||
|
|
||||||
|
async_add_entities(entities)
|
||||||
|
|
||||||
|
async def new_load(device):
|
||||||
|
setup_entities(device)
|
||||||
|
|
||||||
|
async def new_lightpad(device):
|
||||||
|
setup_entities(device)
|
||||||
|
|
||||||
|
device_web_session = async_get_clientsession(hass, verify_ssl=False)
|
||||||
|
entry.async_create_background_task(
|
||||||
|
hass,
|
||||||
|
plum.discover(
|
||||||
|
hass.loop,
|
||||||
|
loadListener=new_load,
|
||||||
|
lightpadListener=new_lightpad,
|
||||||
|
websession=device_web_session,
|
||||||
|
),
|
||||||
|
"plum.light-discover",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PlumLight(LightEntity):
|
||||||
|
"""Representation of a Plum Lightpad dimmer."""
|
||||||
|
|
||||||
|
_attr_should_poll = False
|
||||||
|
_attr_has_entity_name = True
|
||||||
|
_attr_name = None
|
||||||
|
|
||||||
|
def __init__(self, load):
|
||||||
|
"""Initialize the light."""
|
||||||
|
self._load = load
|
||||||
|
self._brightness = load.level
|
||||||
|
unique_id = f"{load.llid}.light"
|
||||||
|
self._attr_unique_id = unique_id
|
||||||
|
self._attr_device_info = DeviceInfo(
|
||||||
|
identifiers={(DOMAIN, unique_id)},
|
||||||
|
manufacturer="Plum",
|
||||||
|
model="Dimmer",
|
||||||
|
name=load.name,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_added_to_hass(self) -> None:
|
||||||
|
"""Subscribe to dimmerchange events."""
|
||||||
|
self._load.add_event_listener("dimmerchange", self.dimmerchange)
|
||||||
|
|
||||||
|
def dimmerchange(self, event):
|
||||||
|
"""Change event handler updating the brightness."""
|
||||||
|
self._brightness = event["level"]
|
||||||
|
self.schedule_update_ha_state()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def brightness(self) -> int:
|
||||||
|
"""Return the brightness of this switch between 0..255."""
|
||||||
|
return self._brightness
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_on(self) -> bool:
|
||||||
|
"""Return true if light is on."""
|
||||||
|
return self._brightness > 0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def color_mode(self) -> ColorMode:
|
||||||
|
"""Flag supported features."""
|
||||||
|
if self._load.dimmable:
|
||||||
|
return ColorMode.BRIGHTNESS
|
||||||
|
return ColorMode.ONOFF
|
||||||
|
|
||||||
|
@property
|
||||||
|
def supported_color_modes(self) -> set[ColorMode]:
|
||||||
|
"""Flag supported color modes."""
|
||||||
|
return {self.color_mode}
|
||||||
|
|
||||||
|
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||||
|
"""Turn the light on."""
|
||||||
|
if ATTR_BRIGHTNESS in kwargs:
|
||||||
|
await self._load.turn_on(kwargs[ATTR_BRIGHTNESS])
|
||||||
|
else:
|
||||||
|
await self._load.turn_on()
|
||||||
|
|
||||||
|
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||||
|
"""Turn the light off."""
|
||||||
|
await self._load.turn_off()
|
||||||
|
|
||||||
|
|
||||||
|
class GlowRing(LightEntity):
|
||||||
|
"""Representation of a Plum Lightpad dimmer glow ring."""
|
||||||
|
|
||||||
|
_attr_color_mode = ColorMode.HS
|
||||||
|
_attr_should_poll = False
|
||||||
|
_attr_translation_key = "glow_ring"
|
||||||
|
_attr_supported_color_modes = {ColorMode.HS}
|
||||||
|
|
||||||
|
def __init__(self, lightpad):
|
||||||
|
"""Initialize the light."""
|
||||||
|
self._lightpad = lightpad
|
||||||
|
self._attr_name = f"{lightpad.friendly_name} Glow Ring"
|
||||||
|
|
||||||
|
self._attr_is_on = lightpad.glow_enabled
|
||||||
|
self._glow_intensity = lightpad.glow_intensity
|
||||||
|
unique_id = f"{self._lightpad.lpid}.glow"
|
||||||
|
self._attr_unique_id = unique_id
|
||||||
|
|
||||||
|
self._red = lightpad.glow_color["red"]
|
||||||
|
self._green = lightpad.glow_color["green"]
|
||||||
|
self._blue = lightpad.glow_color["blue"]
|
||||||
|
self._attr_device_info = DeviceInfo(
|
||||||
|
identifiers={(DOMAIN, unique_id)},
|
||||||
|
manufacturer="Plum",
|
||||||
|
model="Glow Ring",
|
||||||
|
name=self._attr_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_added_to_hass(self) -> None:
|
||||||
|
"""Subscribe to configchange events."""
|
||||||
|
self._lightpad.add_event_listener("configchange", self.configchange_event)
|
||||||
|
|
||||||
|
def configchange_event(self, event):
|
||||||
|
"""Handle Configuration change event."""
|
||||||
|
config = event["changes"]
|
||||||
|
|
||||||
|
self._attr_is_on = config["glowEnabled"]
|
||||||
|
self._glow_intensity = config["glowIntensity"]
|
||||||
|
|
||||||
|
self._red = config["glowColor"]["red"]
|
||||||
|
self._green = config["glowColor"]["green"]
|
||||||
|
self._blue = config["glowColor"]["blue"]
|
||||||
|
self.schedule_update_ha_state()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hs_color(self):
|
||||||
|
"""Return the hue and saturation color value [float, float]."""
|
||||||
|
return color_util.color_RGB_to_hs(self._red, self._green, self._blue)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def brightness(self) -> int:
|
||||||
|
"""Return the brightness of this switch between 0..255."""
|
||||||
|
return min(max(int(round(self._glow_intensity * 255, 0)), 0), 255)
|
||||||
|
|
||||||
|
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||||
|
"""Turn the light on."""
|
||||||
|
if ATTR_BRIGHTNESS in kwargs:
|
||||||
|
brightness_pct = kwargs[ATTR_BRIGHTNESS] / 255.0
|
||||||
|
await self._lightpad.set_config({"glowIntensity": brightness_pct})
|
||||||
|
elif ATTR_HS_COLOR in kwargs:
|
||||||
|
hs_color = kwargs[ATTR_HS_COLOR]
|
||||||
|
red, green, blue = color_util.color_hs_to_RGB(*hs_color)
|
||||||
|
await self._lightpad.set_glow_color(red, green, blue, 0)
|
||||||
|
else:
|
||||||
|
await self._lightpad.set_config({"glowEnabled": True})
|
||||||
|
|
||||||
|
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||||
|
"""Turn the light off."""
|
||||||
|
if ATTR_BRIGHTNESS in kwargs:
|
||||||
|
brightness_pct = kwargs[ATTR_BRIGHTNESS] / 255.0
|
||||||
|
await self._lightpad.set_config({"glowIntensity": brightness_pct})
|
||||||
|
else:
|
||||||
|
await self._lightpad.set_config({"glowEnabled": False})
|
@@ -1,9 +1,10 @@
|
|||||||
{
|
{
|
||||||
"domain": "plum_lightpad",
|
"domain": "plum_lightpad",
|
||||||
"name": "Plum Lightpad",
|
"name": "Plum Lightpad",
|
||||||
"codeowners": [],
|
"codeowners": ["@ColinHarrington", "@prystupa"],
|
||||||
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/plum_lightpad",
|
"documentation": "https://www.home-assistant.io/integrations/plum_lightpad",
|
||||||
"integration_type": "system",
|
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"requirements": []
|
"loggers": ["plumlightpad"],
|
||||||
|
"requirements": ["plumlightpad==0.0.11"]
|
||||||
}
|
}
|
||||||
|
@@ -1,8 +1,18 @@
|
|||||||
{
|
{
|
||||||
"issues": {
|
"config": {
|
||||||
"integration_removed": {
|
"step": {
|
||||||
"title": "The Plum Lightpad integration has been removed",
|
"user": {
|
||||||
"description": "The Plum Lightpad integration has been removed from Home Assistant.\n\nThe required cloud services are no longer available since the Plum servers have been shut down. To resolve this issue, please remove the (now defunct) integration entries from your Home Assistant setup. [Click here to see your existing Plum Lightpad integration entries]({entries})."
|
"data": {
|
||||||
|
"username": "[%key:common::config_flow::data::email%]",
|
||||||
|
"password": "[%key:common::config_flow::data::password%]"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"error": {
|
||||||
|
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
||||||
|
},
|
||||||
|
"abort": {
|
||||||
|
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
14
homeassistant/components/plum_lightpad/utils.py
Normal file
14
homeassistant/components/plum_lightpad/utils.py
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
"""Reusable utilities for the Plum Lightpad component."""
|
||||||
|
|
||||||
|
from plumlightpad import Plum
|
||||||
|
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
|
|
||||||
|
|
||||||
|
async def load_plum(username: str, password: str, hass: HomeAssistant) -> Plum:
|
||||||
|
"""Initialize Plum Lightpad API and load metadata stored in the cloud."""
|
||||||
|
plum = Plum(username, password)
|
||||||
|
cloud_web_session = async_get_clientsession(hass, verify_ssl=True)
|
||||||
|
await plum.loadCloudData(cloud_web_session)
|
||||||
|
return plum
|
@@ -18,7 +18,7 @@ from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
|||||||
|
|
||||||
from .coordinator import PortainerCoordinator
|
from .coordinator import PortainerCoordinator
|
||||||
|
|
||||||
_PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.SWITCH]
|
_PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR]
|
||||||
|
|
||||||
type PortainerConfigEntry = ConfigEntry[PortainerCoordinator]
|
type PortainerConfigEntry = ConfigEntry[PortainerCoordinator]
|
||||||
|
|
||||||
|
@@ -131,7 +131,15 @@ class PortainerContainerSensor(PortainerContainerEntity, BinarySensorEntity):
|
|||||||
self.entity_description = entity_description
|
self.entity_description = entity_description
|
||||||
super().__init__(device_info, coordinator, via_device)
|
super().__init__(device_info, coordinator, via_device)
|
||||||
|
|
||||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_name}_{entity_description.key}"
|
# Container ID's are ephemeral, so use the container name for the unique ID
|
||||||
|
# The first one, should always be unique, it's fine if users have aliases
|
||||||
|
# According to Docker's API docs, the first name is unique
|
||||||
|
device_identifier = (
|
||||||
|
self._device_info.names[0].replace("/", " ").strip()
|
||||||
|
if self._device_info.names
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_identifier}_{entity_description.key}"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def available(self) -> bool:
|
def available(self) -> bool:
|
||||||
|
@@ -1,128 +0,0 @@
|
|||||||
"""Support for Portainer buttons."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from collections.abc import Callable, Coroutine
|
|
||||||
from dataclasses import dataclass
|
|
||||||
import logging
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from pyportainer import Portainer
|
|
||||||
from pyportainer.exceptions import (
|
|
||||||
PortainerAuthenticationError,
|
|
||||||
PortainerConnectionError,
|
|
||||||
PortainerTimeoutError,
|
|
||||||
)
|
|
||||||
from pyportainer.models.docker import DockerContainer
|
|
||||||
|
|
||||||
from homeassistant.components.button import (
|
|
||||||
ButtonDeviceClass,
|
|
||||||
ButtonEntity,
|
|
||||||
ButtonEntityDescription,
|
|
||||||
)
|
|
||||||
from homeassistant.const import EntityCategory
|
|
||||||
from homeassistant.core import HomeAssistant
|
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
|
||||||
|
|
||||||
from . import PortainerConfigEntry
|
|
||||||
from .const import DOMAIN
|
|
||||||
from .coordinator import PortainerCoordinator, PortainerCoordinatorData
|
|
||||||
from .entity import PortainerContainerEntity
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True, kw_only=True)
|
|
||||||
class PortainerButtonDescription(ButtonEntityDescription):
|
|
||||||
"""Class to describe a Portainer button entity."""
|
|
||||||
|
|
||||||
press_action: Callable[
|
|
||||||
[Portainer, int, str],
|
|
||||||
Coroutine[Any, Any, None],
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
BUTTONS: tuple[PortainerButtonDescription, ...] = (
|
|
||||||
PortainerButtonDescription(
|
|
||||||
key="restart",
|
|
||||||
name="Restart Container",
|
|
||||||
device_class=ButtonDeviceClass.RESTART,
|
|
||||||
entity_category=EntityCategory.CONFIG,
|
|
||||||
press_action=(
|
|
||||||
lambda portainer, endpoint_id, container_id: portainer.restart_container(
|
|
||||||
endpoint_id, container_id
|
|
||||||
)
|
|
||||||
),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
|
||||||
hass: HomeAssistant,
|
|
||||||
entry: PortainerConfigEntry,
|
|
||||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
|
||||||
) -> None:
|
|
||||||
"""Set up Portainer buttons."""
|
|
||||||
coordinator: PortainerCoordinator = entry.runtime_data
|
|
||||||
|
|
||||||
async_add_entities(
|
|
||||||
PortainerButton(
|
|
||||||
coordinator=coordinator,
|
|
||||||
entity_description=entity_description,
|
|
||||||
device_info=container,
|
|
||||||
via_device=endpoint,
|
|
||||||
)
|
|
||||||
for endpoint in coordinator.data.values()
|
|
||||||
for container in endpoint.containers.values()
|
|
||||||
for entity_description in BUTTONS
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class PortainerButton(PortainerContainerEntity, ButtonEntity):
|
|
||||||
"""Defines a Portainer button."""
|
|
||||||
|
|
||||||
entity_description: PortainerButtonDescription
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
coordinator: PortainerCoordinator,
|
|
||||||
entity_description: PortainerButtonDescription,
|
|
||||||
device_info: DockerContainer,
|
|
||||||
via_device: PortainerCoordinatorData,
|
|
||||||
) -> None:
|
|
||||||
"""Initialize the Portainer button entity."""
|
|
||||||
self.entity_description = entity_description
|
|
||||||
super().__init__(device_info, coordinator, via_device)
|
|
||||||
|
|
||||||
device_identifier = (
|
|
||||||
self._device_info.names[0].replace("/", " ").strip()
|
|
||||||
if self._device_info.names
|
|
||||||
else None
|
|
||||||
)
|
|
||||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_identifier}_{entity_description.key}"
|
|
||||||
|
|
||||||
async def async_press(self) -> None:
|
|
||||||
"""Trigger the Portainer button press service."""
|
|
||||||
try:
|
|
||||||
await self.entity_description.press_action(
|
|
||||||
self.coordinator.portainer, self.endpoint_id, self.device_id
|
|
||||||
)
|
|
||||||
except PortainerConnectionError as err:
|
|
||||||
raise HomeAssistantError(
|
|
||||||
translation_domain=DOMAIN,
|
|
||||||
translation_key="cannot_connect",
|
|
||||||
translation_placeholders={"error": repr(err)},
|
|
||||||
) from err
|
|
||||||
except PortainerAuthenticationError as err:
|
|
||||||
raise HomeAssistantError(
|
|
||||||
translation_domain=DOMAIN,
|
|
||||||
translation_key="invalid_auth",
|
|
||||||
translation_placeholders={"error": repr(err)},
|
|
||||||
) from err
|
|
||||||
except PortainerTimeoutError as err:
|
|
||||||
raise HomeAssistantError(
|
|
||||||
translation_domain=DOMAIN,
|
|
||||||
translation_key="timeout_connect",
|
|
||||||
translation_placeholders={"error": repr(err)},
|
|
||||||
) from err
|
|
@@ -57,25 +57,25 @@ class PortainerContainerEntity(PortainerCoordinatorEntity):
|
|||||||
self.device_id = self._device_info.id
|
self.device_id = self._device_info.id
|
||||||
self.endpoint_id = via_device.endpoint.id
|
self.endpoint_id = via_device.endpoint.id
|
||||||
|
|
||||||
# Container ID's are ephemeral, so use the container name for the unique ID
|
device_name = (
|
||||||
# The first one, should always be unique, it's fine if users have aliases
|
self._device_info.names[0].replace("/", " ").strip()
|
||||||
# According to Docker's API docs, the first name is unique
|
if self._device_info.names
|
||||||
assert self._device_info.names, "Container names list unexpectedly empty"
|
else None
|
||||||
self.device_name = self._device_info.names[0].replace("/", " ").strip()
|
)
|
||||||
|
|
||||||
self._attr_device_info = DeviceInfo(
|
self._attr_device_info = DeviceInfo(
|
||||||
identifiers={
|
identifiers={
|
||||||
(DOMAIN, f"{self.coordinator.config_entry.entry_id}_{self.device_name}")
|
(DOMAIN, f"{self.coordinator.config_entry.entry_id}_{device_name}")
|
||||||
},
|
},
|
||||||
manufacturer=DEFAULT_NAME,
|
manufacturer=DEFAULT_NAME,
|
||||||
configuration_url=URL(
|
configuration_url=URL(
|
||||||
f"{coordinator.config_entry.data[CONF_URL]}#!/{self.endpoint_id}/docker/containers/{self.device_id}"
|
f"{coordinator.config_entry.data[CONF_URL]}#!/{self.endpoint_id}/docker/containers/{self.device_id}"
|
||||||
),
|
),
|
||||||
model="Container",
|
model="Container",
|
||||||
name=self.device_name,
|
name=device_name,
|
||||||
via_device=(
|
via_device=(
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
f"{self.coordinator.config_entry.entry_id}_{self.endpoint_id}",
|
f"{self.coordinator.config_entry.entry_id}_{self.endpoint_id}",
|
||||||
),
|
),
|
||||||
translation_key=None if self.device_name else "unknown_container",
|
translation_key=None if device_name else "unknown_container",
|
||||||
)
|
)
|
||||||
|
@@ -1,12 +0,0 @@
|
|||||||
{
|
|
||||||
"entity": {
|
|
||||||
"switch": {
|
|
||||||
"container": {
|
|
||||||
"default": "mdi:arrow-down-box",
|
|
||||||
"state": {
|
|
||||||
"on": "mdi:arrow-up-box"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@@ -26,7 +26,10 @@ rules:
|
|||||||
unique-config-entry: done
|
unique-config-entry: done
|
||||||
|
|
||||||
# Silver
|
# Silver
|
||||||
action-exceptions: done
|
action-exceptions:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
No custom actions are defined.
|
||||||
config-entry-unloading: done
|
config-entry-unloading: done
|
||||||
docs-configuration-parameters: done
|
docs-configuration-parameters: done
|
||||||
docs-installation-parameters: done
|
docs-installation-parameters: done
|
||||||
|
@@ -45,11 +45,6 @@
|
|||||||
"status": {
|
"status": {
|
||||||
"name": "Status"
|
"name": "Status"
|
||||||
}
|
}
|
||||||
},
|
|
||||||
"switch": {
|
|
||||||
"container": {
|
|
||||||
"name": "Container"
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"exceptions": {
|
"exceptions": {
|
||||||
|
@@ -1,141 +0,0 @@
|
|||||||
"""Switch platform for Portainer containers."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from collections.abc import Callable, Coroutine
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from pyportainer import Portainer
|
|
||||||
from pyportainer.exceptions import (
|
|
||||||
PortainerAuthenticationError,
|
|
||||||
PortainerConnectionError,
|
|
||||||
PortainerTimeoutError,
|
|
||||||
)
|
|
||||||
from pyportainer.models.docker import DockerContainer
|
|
||||||
|
|
||||||
from homeassistant.components.switch import (
|
|
||||||
SwitchDeviceClass,
|
|
||||||
SwitchEntity,
|
|
||||||
SwitchEntityDescription,
|
|
||||||
)
|
|
||||||
from homeassistant.core import HomeAssistant
|
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
|
||||||
|
|
||||||
from . import PortainerConfigEntry
|
|
||||||
from .const import DOMAIN
|
|
||||||
from .coordinator import PortainerCoordinator
|
|
||||||
from .entity import PortainerContainerEntity, PortainerCoordinatorData
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True, kw_only=True)
|
|
||||||
class PortainerSwitchEntityDescription(SwitchEntityDescription):
|
|
||||||
"""Class to hold Portainer switch description."""
|
|
||||||
|
|
||||||
is_on_fn: Callable[[DockerContainer], bool | None]
|
|
||||||
turn_on_fn: Callable[[str, Portainer, int, str], Coroutine[Any, Any, None]]
|
|
||||||
turn_off_fn: Callable[[str, Portainer, int, str], Coroutine[Any, Any, None]]
|
|
||||||
|
|
||||||
|
|
||||||
async def perform_action(
|
|
||||||
action: str, portainer: Portainer, endpoint_id: int, container_id: str
|
|
||||||
) -> None:
|
|
||||||
"""Stop a container."""
|
|
||||||
try:
|
|
||||||
if action == "start":
|
|
||||||
await portainer.start_container(endpoint_id, container_id)
|
|
||||||
elif action == "stop":
|
|
||||||
await portainer.stop_container(endpoint_id, container_id)
|
|
||||||
except PortainerAuthenticationError as err:
|
|
||||||
raise HomeAssistantError(
|
|
||||||
translation_domain=DOMAIN,
|
|
||||||
translation_key="invalid_auth",
|
|
||||||
translation_placeholders={"error": repr(err)},
|
|
||||||
) from err
|
|
||||||
except PortainerConnectionError as err:
|
|
||||||
raise HomeAssistantError(
|
|
||||||
translation_domain=DOMAIN,
|
|
||||||
translation_key="cannot_connect",
|
|
||||||
translation_placeholders={"error": repr(err)},
|
|
||||||
) from err
|
|
||||||
except PortainerTimeoutError as err:
|
|
||||||
raise HomeAssistantError(
|
|
||||||
translation_domain=DOMAIN,
|
|
||||||
translation_key="timeout_connect",
|
|
||||||
translation_placeholders={"error": repr(err)},
|
|
||||||
) from err
|
|
||||||
|
|
||||||
|
|
||||||
SWITCHES: tuple[PortainerSwitchEntityDescription, ...] = (
|
|
||||||
PortainerSwitchEntityDescription(
|
|
||||||
key="container",
|
|
||||||
translation_key="container",
|
|
||||||
device_class=SwitchDeviceClass.SWITCH,
|
|
||||||
is_on_fn=lambda data: data.state == "running",
|
|
||||||
turn_on_fn=perform_action,
|
|
||||||
turn_off_fn=perform_action,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
|
||||||
hass: HomeAssistant,
|
|
||||||
entry: PortainerConfigEntry,
|
|
||||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
|
||||||
) -> None:
|
|
||||||
"""Set up Portainer switch sensors."""
|
|
||||||
|
|
||||||
coordinator = entry.runtime_data
|
|
||||||
|
|
||||||
async_add_entities(
|
|
||||||
PortainerContainerSwitch(
|
|
||||||
coordinator=coordinator,
|
|
||||||
entity_description=entity_description,
|
|
||||||
device_info=container,
|
|
||||||
via_device=endpoint,
|
|
||||||
)
|
|
||||||
for endpoint in coordinator.data.values()
|
|
||||||
for container in endpoint.containers.values()
|
|
||||||
for entity_description in SWITCHES
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class PortainerContainerSwitch(PortainerContainerEntity, SwitchEntity):
|
|
||||||
"""Representation of a Portainer container switch."""
|
|
||||||
|
|
||||||
entity_description: PortainerSwitchEntityDescription
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
coordinator: PortainerCoordinator,
|
|
||||||
entity_description: PortainerSwitchEntityDescription,
|
|
||||||
device_info: DockerContainer,
|
|
||||||
via_device: PortainerCoordinatorData,
|
|
||||||
) -> None:
|
|
||||||
"""Initialize the Portainer container switch."""
|
|
||||||
self.entity_description = entity_description
|
|
||||||
super().__init__(device_info, coordinator, via_device)
|
|
||||||
|
|
||||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_name}_{entity_description.key}"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_on(self) -> bool | None:
|
|
||||||
"""Return the state of the device."""
|
|
||||||
return self.entity_description.is_on_fn(
|
|
||||||
self.coordinator.data[self.endpoint_id].containers[self.device_id]
|
|
||||||
)
|
|
||||||
|
|
||||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
|
||||||
"""Start (turn on) the container."""
|
|
||||||
await self.entity_description.turn_on_fn(
|
|
||||||
"start", self.coordinator.portainer, self.endpoint_id, self.device_id
|
|
||||||
)
|
|
||||||
await self.coordinator.async_request_refresh()
|
|
||||||
|
|
||||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
|
||||||
"""Stop (turn off) the container."""
|
|
||||||
await self.entity_description.turn_off_fn(
|
|
||||||
"stop", self.coordinator.portainer, self.endpoint_id, self.device_id
|
|
||||||
)
|
|
||||||
await self.coordinator.async_request_refresh()
|
|
@@ -6,5 +6,5 @@
|
|||||||
"dependencies": ["bluetooth_adapters"],
|
"dependencies": ["bluetooth_adapters"],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/private_ble_device",
|
"documentation": "https://www.home-assistant.io/integrations/private_ble_device",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"requirements": ["bluetooth-data-tools==1.28.3"]
|
"requirements": ["bluetooth-data-tools==1.28.2"]
|
||||||
}
|
}
|
||||||
|
@@ -6,5 +6,5 @@
|
|||||||
"iot_class": "assumed_state",
|
"iot_class": "assumed_state",
|
||||||
"loggers": ["raspyrfm_client"],
|
"loggers": ["raspyrfm_client"],
|
||||||
"quality_scale": "legacy",
|
"quality_scale": "legacy",
|
||||||
"requirements": ["raspyrfm-client==1.2.9"]
|
"requirements": ["raspyrfm-client==1.2.8"]
|
||||||
}
|
}
|
||||||
|
@@ -8,7 +8,7 @@
|
|||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": [
|
"requirements": [
|
||||||
"SQLAlchemy==2.0.41",
|
"SQLAlchemy==2.0.41",
|
||||||
"fnv-hash-fast==1.6.0",
|
"fnv-hash-fast==1.5.0",
|
||||||
"psutil-home-assistant==0.0.1"
|
"psutil-home-assistant==0.0.1"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@@ -2490,7 +2490,7 @@ class BaseMigration(ABC):
|
|||||||
start_schema_version: int,
|
start_schema_version: int,
|
||||||
migration_changes: dict[str, int],
|
migration_changes: dict[str, int],
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize a new BaseMigration.
|
"""Initialize a new BaseRunTimeMigration.
|
||||||
|
|
||||||
:param initial_schema_version: The schema version the database was created with.
|
:param initial_schema_version: The schema version the database was created with.
|
||||||
:param start_schema_version: The schema version when starting the migration.
|
:param start_schema_version: The schema version when starting the migration.
|
||||||
@@ -2964,12 +2964,7 @@ class EventIDPostMigration(BaseRunTimeMigration):
|
|||||||
_drop_foreign_key_constraints(
|
_drop_foreign_key_constraints(
|
||||||
session_maker, instance.engine, TABLE_STATES, "event_id"
|
session_maker, instance.engine, TABLE_STATES, "event_id"
|
||||||
)
|
)
|
||||||
except (InternalError, OperationalError) as err:
|
except (InternalError, OperationalError):
|
||||||
_LOGGER.debug(
|
|
||||||
"Could not drop foreign key constraint on states.event_id, "
|
|
||||||
"will try again later",
|
|
||||||
exc_info=err,
|
|
||||||
)
|
|
||||||
fk_remove_ok = False
|
fk_remove_ok = False
|
||||||
else:
|
else:
|
||||||
fk_remove_ok = True
|
fk_remove_ok = True
|
||||||
|
@@ -82,7 +82,7 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
assert self._client
|
assert self._client
|
||||||
errors: dict[str, str] = {}
|
errors: dict[str, str] = {}
|
||||||
try:
|
try:
|
||||||
await self._client.request_code_v4()
|
await self._client.request_code()
|
||||||
except RoborockAccountDoesNotExist:
|
except RoborockAccountDoesNotExist:
|
||||||
errors["base"] = "invalid_email"
|
errors["base"] = "invalid_email"
|
||||||
except RoborockUrlException:
|
except RoborockUrlException:
|
||||||
@@ -111,7 +111,7 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
code = user_input[CONF_ENTRY_CODE]
|
code = user_input[CONF_ENTRY_CODE]
|
||||||
_LOGGER.debug("Logging into Roborock account using email provided code")
|
_LOGGER.debug("Logging into Roborock account using email provided code")
|
||||||
try:
|
try:
|
||||||
user_data = await self._client.code_login_v4(code)
|
user_data = await self._client.code_login(code)
|
||||||
except RoborockInvalidCode:
|
except RoborockInvalidCode:
|
||||||
errors["base"] = "invalid_code"
|
errors["base"] = "invalid_code"
|
||||||
except RoborockException:
|
except RoborockException:
|
||||||
@@ -129,7 +129,7 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
reauth_entry, data_updates={CONF_USER_DATA: user_data.as_dict()}
|
reauth_entry, data_updates={CONF_USER_DATA: user_data.as_dict()}
|
||||||
)
|
)
|
||||||
self._abort_if_unique_id_configured(error="already_configured_account")
|
self._abort_if_unique_id_configured(error="already_configured_account")
|
||||||
return await self._create_entry(self._client, self._username, user_data)
|
return self._create_entry(self._client, self._username, user_data)
|
||||||
|
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
step_id="code",
|
step_id="code",
|
||||||
@@ -176,7 +176,7 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
return await self.async_step_code()
|
return await self.async_step_code()
|
||||||
return self.async_show_form(step_id="reauth_confirm", errors=errors)
|
return self.async_show_form(step_id="reauth_confirm", errors=errors)
|
||||||
|
|
||||||
async def _create_entry(
|
def _create_entry(
|
||||||
self, client: RoborockApiClient, username: str, user_data: UserData
|
self, client: RoborockApiClient, username: str, user_data: UserData
|
||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
"""Finished config flow and create entry."""
|
"""Finished config flow and create entry."""
|
||||||
@@ -185,7 +185,7 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
data={
|
data={
|
||||||
CONF_USERNAME: username,
|
CONF_USERNAME: username,
|
||||||
CONF_USER_DATA: user_data.as_dict(),
|
CONF_USER_DATA: user_data.as_dict(),
|
||||||
CONF_BASE_URL: await client.base_url,
|
CONF_BASE_URL: client.base_url,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@@ -52,12 +52,6 @@
|
|||||||
"total_cleaning_time": {
|
"total_cleaning_time": {
|
||||||
"default": "mdi:history"
|
"default": "mdi:history"
|
||||||
},
|
},
|
||||||
"cleaning_brush_time_left": {
|
|
||||||
"default": "mdi:brush"
|
|
||||||
},
|
|
||||||
"strainer_time_left": {
|
|
||||||
"default": "mdi:filter-variant"
|
|
||||||
},
|
|
||||||
"status": {
|
"status": {
|
||||||
"default": "mdi:information-outline"
|
"default": "mdi:information-outline"
|
||||||
},
|
},
|
||||||
|
@@ -19,7 +19,7 @@
|
|||||||
"loggers": ["roborock"],
|
"loggers": ["roborock"],
|
||||||
"quality_scale": "silver",
|
"quality_scale": "silver",
|
||||||
"requirements": [
|
"requirements": [
|
||||||
"python-roborock==2.50.2",
|
"python-roborock==2.49.1",
|
||||||
"vacuum-map-parser-roborock==0.1.4"
|
"vacuum-map-parser-roborock==0.1.4"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@@ -101,24 +101,6 @@ SENSOR_DESCRIPTIONS = [
|
|||||||
entity_category=EntityCategory.DIAGNOSTIC,
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
protocol_listener=RoborockDataProtocol.FILTER_WORK_TIME,
|
protocol_listener=RoborockDataProtocol.FILTER_WORK_TIME,
|
||||||
),
|
),
|
||||||
RoborockSensorDescription(
|
|
||||||
native_unit_of_measurement=UnitOfTime.HOURS,
|
|
||||||
key="cleaning_brush_time_left",
|
|
||||||
device_class=SensorDeviceClass.DURATION,
|
|
||||||
translation_key="cleaning_brush_time_left",
|
|
||||||
value_fn=lambda data: data.consumable.cleaning_brush_time_left,
|
|
||||||
entity_category=EntityCategory.DIAGNOSTIC,
|
|
||||||
is_dock_entity=True,
|
|
||||||
),
|
|
||||||
RoborockSensorDescription(
|
|
||||||
native_unit_of_measurement=UnitOfTime.HOURS,
|
|
||||||
key="strainer_time_left",
|
|
||||||
device_class=SensorDeviceClass.DURATION,
|
|
||||||
translation_key="strainer_time_left",
|
|
||||||
value_fn=lambda data: data.consumable.strainer_time_left,
|
|
||||||
entity_category=EntityCategory.DIAGNOSTIC,
|
|
||||||
is_dock_entity=True,
|
|
||||||
),
|
|
||||||
RoborockSensorDescription(
|
RoborockSensorDescription(
|
||||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||||
key="sensor_time_left",
|
key="sensor_time_left",
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user