Compare commits

..

2 Commits

Author SHA1 Message Date
Paul Bottein
31581bedef Fix program key 2025-08-11 23:41:35 +02:00
Paul Bottein
9426573619 Fix options for active and select program in Home Connect 2025-08-11 21:30:08 +02:00
524 changed files with 3693 additions and 33553 deletions

View File

@@ -27,7 +27,7 @@ jobs:
publish: ${{ steps.version.outputs.publish }}
steps:
- name: Checkout the repository
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
with:
fetch-depth: 0
@@ -90,7 +90,7 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Download nightly wheels of frontend
if: needs.init.outputs.channel == 'dev'
@@ -242,7 +242,7 @@ jobs:
- green
steps:
- name: Checkout the repository
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Set build additional args
run: |
@@ -279,7 +279,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Initialize git
uses: home-assistant/actions/helpers/git-init@master
@@ -321,7 +321,7 @@ jobs:
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
steps:
- name: Checkout the repository
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Install Cosign
uses: sigstore/cosign-installer@v3.9.2
@@ -454,7 +454,7 @@ jobs:
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
steps:
- name: Checkout the repository
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.6.0
@@ -499,7 +499,7 @@ jobs:
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Login to GitHub Container Registry
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0

View File

@@ -37,7 +37,7 @@ on:
type: boolean
env:
CACHE_VERSION: 5
CACHE_VERSION: 4
UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 1
HA_SHORT_VERSION: "2025.9"
@@ -94,7 +94,7 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: Check out code from GitHub
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Generate partial Python venv restore key
id: generate_python_cache_key
run: |
@@ -246,7 +246,7 @@ jobs:
- info
steps:
- name: Check out code from GitHub
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
@@ -292,7 +292,7 @@ jobs:
- pre-commit
steps:
- name: Check out code from GitHub
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.6.0
id: python
@@ -332,7 +332,7 @@ jobs:
- pre-commit
steps:
- name: Check out code from GitHub
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.6.0
id: python
@@ -372,7 +372,7 @@ jobs:
- pre-commit
steps:
- name: Check out code from GitHub
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.6.0
id: python
@@ -462,7 +462,7 @@ jobs:
- script/hassfest/docker/Dockerfile
steps:
- name: Check out code from GitHub
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Register hadolint problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
@@ -481,7 +481,7 @@ jobs:
python-version: ${{ fromJSON(needs.info.outputs.python_versions) }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.6.0
@@ -584,7 +584,7 @@ jobs:
sudo apt-get -y install \
libturbojpeg
- name: Check out code from GitHub
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
@@ -617,7 +617,7 @@ jobs:
- base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
@@ -651,7 +651,7 @@ jobs:
&& github.event_name == 'pull_request'
steps:
- name: Check out code from GitHub
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Dependency review
uses: actions/dependency-review-action@v4.7.1
with:
@@ -674,7 +674,7 @@ jobs:
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.6.0
@@ -717,7 +717,7 @@ jobs:
- base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
@@ -764,7 +764,7 @@ jobs:
- base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
@@ -809,7 +809,7 @@ jobs:
- base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
@@ -886,7 +886,7 @@ jobs:
libturbojpeg \
libgammu-dev
- name: Check out code from GitHub
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
@@ -947,7 +947,7 @@ jobs:
libgammu-dev \
libxml2-utils
- name: Check out code from GitHub
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.6.0
@@ -1080,7 +1080,7 @@ jobs:
libmariadb-dev-compat \
libxml2-utils
- name: Check out code from GitHub
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.6.0
@@ -1222,7 +1222,7 @@ jobs:
sudo apt-get -y install \
postgresql-server-dev-14
- name: Check out code from GitHub
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.6.0
@@ -1334,7 +1334,7 @@ jobs:
timeout-minutes: 10
steps:
- name: Check out code from GitHub
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Download all coverage artifacts
uses: actions/download-artifact@v5.0.0
with:
@@ -1381,7 +1381,7 @@ jobs:
libgammu-dev \
libxml2-utils
- name: Check out code from GitHub
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.6.0
@@ -1484,7 +1484,7 @@ jobs:
timeout-minutes: 10
steps:
- name: Check out code from GitHub
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Download all coverage artifacts
uses: actions/download-artifact@v5.0.0
with:

View File

@@ -21,14 +21,14 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Initialize CodeQL
uses: github/codeql-action/init@v3.29.9
uses: github/codeql-action/init@v3.29.7
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3.29.9
uses: github/codeql-action/analyze@v3.29.7
with:
category: "/language:python"

View File

@@ -231,7 +231,7 @@ jobs:
- name: Detect duplicates using AI
id: ai_detection
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
uses: actions/ai-inference@v2.0.0
uses: actions/ai-inference@v1.2.8
with:
model: openai/gpt-4o
system-prompt: |

View File

@@ -57,7 +57,7 @@ jobs:
- name: Detect language using AI
id: ai_language_detection
if: steps.detect_language.outputs.should_continue == 'true'
uses: actions/ai-inference@v2.0.0
uses: actions/ai-inference@v1.2.8
with:
model: openai/gpt-4o-mini
system-prompt: |

View File

@@ -9,7 +9,7 @@ jobs:
check-authorization:
runs-on: ubuntu-latest
# Only run if this is a Task issue type (from the issue form)
if: github.event.issue.type.name == 'Task'
if: github.event.issue.issue_type == 'Task'
steps:
- name: Check if user is authorized
uses: actions/github-script@v7

View File

@@ -19,7 +19,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.6.0

View File

@@ -32,7 +32,7 @@ jobs:
architectures: ${{ steps.info.outputs.architectures }}
steps:
- name: Checkout the repository
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
@@ -135,7 +135,7 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Download env_file
uses: actions/download-artifact@v5.0.0
@@ -184,7 +184,7 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v5.0.0
uses: actions/checkout@v4.2.2
- name: Download env_file
uses: actions/download-artifact@v5.0.0

View File

@@ -18,7 +18,7 @@ repos:
exclude_types: [csv, json, html]
exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v6.0.0
rev: v5.0.0
hooks:
- id: check-executables-have-shebangs
stages: [manual]

View File

@@ -466,7 +466,6 @@ homeassistant.components.simplisafe.*
homeassistant.components.siren.*
homeassistant.components.skybell.*
homeassistant.components.slack.*
homeassistant.components.sleep_as_android.*
homeassistant.components.sleepiq.*
homeassistant.components.smhi.*
homeassistant.components.smlight.*

10
CODEOWNERS generated
View File

@@ -156,8 +156,8 @@ build.json @home-assistant/supervisor
/tests/components/assist_pipeline/ @balloob @synesthesiam
/homeassistant/components/assist_satellite/ @home-assistant/core @synesthesiam
/tests/components/assist_satellite/ @home-assistant/core @synesthesiam
/homeassistant/components/asuswrt/ @kennedyshead @ollo69 @Vaskivskyi
/tests/components/asuswrt/ @kennedyshead @ollo69 @Vaskivskyi
/homeassistant/components/asuswrt/ @kennedyshead @ollo69
/tests/components/asuswrt/ @kennedyshead @ollo69
/homeassistant/components/atag/ @MatsNL
/tests/components/atag/ @MatsNL
/homeassistant/components/aten_pe/ @mtdcr
@@ -438,8 +438,8 @@ build.json @home-assistant/supervisor
/tests/components/enigma2/ @autinerd
/homeassistant/components/enocean/ @bdurrer
/tests/components/enocean/ @bdurrer
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
/tests/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @joostlek @catsmanac
/tests/components/enphase_envoy/ @bdraco @cgarwood @joostlek @catsmanac
/homeassistant/components/entur_public_transport/ @hfurubotten
/homeassistant/components/environment_canada/ @gwww @michaeldavie
/tests/components/environment_canada/ @gwww @michaeldavie
@@ -1415,8 +1415,6 @@ build.json @home-assistant/supervisor
/tests/components/skybell/ @tkdrob
/homeassistant/components/slack/ @tkdrob @fletcherau
/tests/components/slack/ @tkdrob @fletcherau
/homeassistant/components/sleep_as_android/ @tr4nt0r
/tests/components/sleep_as_android/ @tr4nt0r
/homeassistant/components/sleepiq/ @mfugate1 @kbickar
/tests/components/sleepiq/ @mfugate1 @kbickar
/homeassistant/components/slide/ @ualex73

2
Dockerfile generated
View File

@@ -31,7 +31,7 @@ RUN \
&& go2rtc --version
# Install uv
RUN pip3 install uv==0.8.9
RUN pip3 install uv==0.7.1
WORKDIR /usr/src

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/airos",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["airos==0.3.0"]
"requirements": ["airos==0.2.7"]
}

View File

@@ -6,7 +6,7 @@ from collections.abc import Callable
from dataclasses import dataclass
import logging
from airos.data import DerivedWirelessMode, DerivedWirelessRole, NetRole
from airos.data import NetRole, WirelessMode
from homeassistant.components.sensor import (
SensorDeviceClass,
@@ -19,8 +19,6 @@ from homeassistant.const import (
SIGNAL_STRENGTH_DECIBELS,
UnitOfDataRate,
UnitOfFrequency,
UnitOfLength,
UnitOfTime,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -31,11 +29,8 @@ from .entity import AirOSEntity
_LOGGER = logging.getLogger(__name__)
WIRELESS_MODE_OPTIONS = [mode.value.replace("-", "_").lower() for mode in WirelessMode]
NETROLE_OPTIONS = [mode.value for mode in NetRole]
WIRELESS_MODE_OPTIONS = [mode.value for mode in DerivedWirelessMode]
WIRELESS_ROLE_OPTIONS = [mode.value for mode in DerivedWirelessRole]
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
@@ -123,41 +118,6 @@ SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
value_fn=lambda data: data.wireless.polling.ul_capacity,
),
AirOSSensorEntityDescription(
key="host_uptime",
translation_key="host_uptime",
native_unit_of_measurement=UnitOfTime.SECONDS,
device_class=SensorDeviceClass.DURATION,
suggested_display_precision=0,
suggested_unit_of_measurement=UnitOfTime.DAYS,
value_fn=lambda data: data.host.uptime,
entity_registry_enabled_default=False,
),
AirOSSensorEntityDescription(
key="wireless_distance",
translation_key="wireless_distance",
native_unit_of_measurement=UnitOfLength.METERS,
device_class=SensorDeviceClass.DISTANCE,
suggested_display_precision=1,
suggested_unit_of_measurement=UnitOfLength.KILOMETERS,
value_fn=lambda data: data.wireless.distance,
),
AirOSSensorEntityDescription(
key="wireless_mode",
translation_key="wireless_mode",
device_class=SensorDeviceClass.ENUM,
value_fn=lambda data: data.derived.mode.value,
options=WIRELESS_MODE_OPTIONS,
entity_registry_enabled_default=False,
),
AirOSSensorEntityDescription(
key="wireless_role",
translation_key="wireless_role",
device_class=SensorDeviceClass.ENUM,
value_fn=lambda data: data.derived.role.value,
options=WIRELESS_ROLE_OPTIONS,
entity_registry_enabled_default=False,
),
)

View File

@@ -77,26 +77,6 @@
},
"wireless_remote_hostname": {
"name": "Remote hostname"
},
"host_uptime": {
"name": "Uptime"
},
"wireless_distance": {
"name": "Wireless distance"
},
"wireless_role": {
"name": "Wireless role",
"state": {
"access_point": "Access point",
"station": "Station"
}
},
"wireless_mode": {
"name": "Wireless mode",
"state": {
"point_to_point": "Point-to-point",
"point_to_multipoint": "Point-to-multipoint"
}
}
}
},

View File

@@ -9,7 +9,7 @@ from homeassistant.core import HomeAssistant
from .const import CONF_CLIP_NEGATIVE, CONF_RETURN_AVERAGE
from .coordinator import AirQCoordinator
PLATFORMS: list[Platform] = [Platform.NUMBER, Platform.SENSOR]
PLATFORMS: list[Platform] = [Platform.SENSOR]
AirQConfigEntry = ConfigEntry[AirQCoordinator]

View File

@@ -75,7 +75,6 @@ class AirQCoordinator(DataUpdateCoordinator):
return_average=self.return_average,
clip_negative_values=self.clip_negative,
)
data["brightness"] = await self.airq.get_current_brightness()
if warming_up_sensors := identify_warming_up_sensors(data):
_LOGGER.debug(
"Following sensors are still warming up: %s", warming_up_sensors

View File

@@ -1,85 +0,0 @@
"""Definition of air-Q number platform used to control the LED strips."""
from __future__ import annotations
from collections.abc import Awaitable, Callable
from dataclasses import dataclass
import logging
from aioairq.core import AirQ
from homeassistant.components.number import NumberEntity, NumberEntityDescription
from homeassistant.const import PERCENTAGE
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import AirQConfigEntry, AirQCoordinator
_LOGGER = logging.getLogger(__name__)
@dataclass(frozen=True, kw_only=True)
class AirQBrightnessDescription(NumberEntityDescription):
"""Describes AirQ number entity responsible for brightness control."""
value: Callable[[dict], float]
set_value: Callable[[AirQ, float], Awaitable[None]]
AIRQ_LED_BRIGHTNESS = AirQBrightnessDescription(
key="airq_led_brightness",
translation_key="airq_led_brightness",
native_min_value=0.0,
native_max_value=100.0,
native_step=1.0,
native_unit_of_measurement=PERCENTAGE,
value=lambda data: data["brightness"],
set_value=lambda device, value: device.set_current_brightness(value),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: AirQConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up number entities: a single entity for the LEDs."""
coordinator = entry.runtime_data
entities = [AirQLEDBrightness(coordinator, AIRQ_LED_BRIGHTNESS)]
async_add_entities(entities)
class AirQLEDBrightness(CoordinatorEntity[AirQCoordinator], NumberEntity):
"""Representation of the LEDs from a single AirQ."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: AirQCoordinator,
description: AirQBrightnessDescription,
) -> None:
"""Initialize a single sensor."""
super().__init__(coordinator)
self.entity_description: AirQBrightnessDescription = description
self._attr_device_info = coordinator.device_info
self._attr_unique_id = f"{coordinator.device_id}_{description.key}"
@property
def native_value(self) -> float:
"""Return the brightness of the LEDs in %."""
return self.entity_description.value(self.coordinator.data)
async def async_set_native_value(self, value: float) -> None:
"""Set the brightness of the LEDs to the value in %."""
_LOGGER.debug(
"Changing LED brighntess from %.0f%% to %.0f%%",
self.coordinator.data["brightness"],
value,
)
await self.entity_description.set_value(self.coordinator.airq, value)
await self.coordinator.async_request_refresh()

View File

@@ -35,11 +35,6 @@
}
},
"entity": {
"number": {
"airq_led_brightness": {
"name": "LED brightness"
}
},
"sensor": {
"acetaldehyde": {
"name": "Acetaldehyde"

View File

@@ -5,7 +5,7 @@ from __future__ import annotations
from aioambient.util import get_public_device_id
from homeassistant.core import callback
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity, EntityDescription
@@ -37,7 +37,6 @@ class AmbientWeatherEntity(Entity):
identifiers={(DOMAIN, mac_address)},
manufacturer="Ambient Weather",
name=station_name.capitalize(),
connections={(CONNECTION_NETWORK_MAC, mac_address)},
)
self._attr_unique_id = f"{mac_address}_{description.key}"

View File

@@ -390,6 +390,7 @@ def _domains_from_yaml_config(yaml_configuration: dict[str, Any]) -> set[str]:
async def async_devices_payload(hass: HomeAssistant) -> dict:
"""Return the devices payload."""
integrations_without_model_id: set[str] = set()
devices: list[dict[str, Any]] = []
dev_reg = dr.async_get(hass)
# Devices that need via device info set
@@ -399,6 +400,10 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
seen_integrations = set()
for device in dev_reg.devices.values():
# Ignore services
if device.entry_type:
continue
if not device.primary_config_entry:
continue
@@ -409,6 +414,13 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
seen_integrations.add(config_entry.domain)
if not device.model_id:
integrations_without_model_id.add(config_entry.domain)
continue
if not device.manufacturer:
continue
new_indexes[device.id] = len(devices)
devices.append(
{
@@ -420,10 +432,8 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
"hw_version": device.hw_version,
"has_configuration_url": device.configuration_url is not None,
"via_device": None,
"entry_type": device.entry_type.value if device.entry_type else None,
}
)
if device.via_device_id:
via_devices[device.id] = device.via_device_id
@@ -443,11 +453,15 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
for device_info in devices:
if integration := integrations.get(device_info["integration"]):
device_info["is_custom_integration"] = not integration.is_built_in
# Include version for custom integrations
if not integration.is_built_in and integration.version:
device_info["custom_integration_version"] = str(integration.version)
return {
"version": "home-assistant:1",
"no_model_id": sorted(
[
domain
for domain in integrations_without_model_id
if domain in integrations and integrations[domain].is_built_in
]
),
"devices": devices,
}

View File

@@ -30,9 +30,10 @@ class AndroidIPCamDataUpdateCoordinator(DataUpdateCoordinator[None]):
cam: PyDroidIPCam,
) -> None:
"""Initialize the Android IP Webcam."""
self.hass = hass
self.cam = cam
super().__init__(
hass,
self.hass,
_LOGGER,
config_entry=config_entry,
name=f"{DOMAIN} {config_entry.data[CONF_HOST]}",

View File

@@ -2,10 +2,11 @@
from collections.abc import AsyncGenerator, Callable, Iterable
import json
from typing import Any
from typing import Any, cast
import anthropic
from anthropic import AsyncStream
from anthropic._types import NOT_GIVEN
from anthropic.types import (
InputJSONDelta,
MessageDeltaUsage,
@@ -16,6 +17,7 @@ from anthropic.types import (
RawContentBlockStopEvent,
RawMessageDeltaEvent,
RawMessageStartEvent,
RawMessageStopEvent,
RedactedThinkingBlock,
RedactedThinkingBlockParam,
SignatureDelta,
@@ -33,7 +35,6 @@ from anthropic.types import (
ToolUseBlockParam,
Usage,
)
from anthropic.types.message_create_params import MessageCreateParamsStreaming
from voluptuous_openapi import convert
from homeassistant.components import conversation
@@ -128,28 +129,6 @@ def _convert_content(
)
)
if isinstance(content.native, ThinkingBlock):
messages[-1]["content"].append( # type: ignore[union-attr]
ThinkingBlockParam(
type="thinking",
thinking=content.thinking_content or "",
signature=content.native.signature,
)
)
elif isinstance(content.native, RedactedThinkingBlock):
redacted_thinking_block = RedactedThinkingBlockParam(
type="redacted_thinking",
data=content.native.data,
)
if isinstance(messages[-1]["content"], str):
messages[-1]["content"] = [
TextBlockParam(type="text", text=messages[-1]["content"]),
redacted_thinking_block,
]
else:
messages[-1]["content"].append( # type: ignore[attr-defined]
redacted_thinking_block
)
if content.content:
messages[-1]["content"].append( # type: ignore[union-attr]
TextBlockParam(type="text", text=content.content)
@@ -173,9 +152,10 @@ def _convert_content(
return messages
async def _transform_stream(
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
chat_log: conversation.ChatLog,
stream: AsyncStream[MessageStreamEvent],
result: AsyncStream[MessageStreamEvent],
messages: list[MessageParam],
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
"""Transform the response stream into HA format.
@@ -206,25 +186,31 @@ async def _transform_stream(
Each message could contain multiple blocks of the same type.
"""
if stream is None:
if result is None:
raise TypeError("Expected a stream of messages")
current_tool_block: ToolUseBlockParam | None = None
current_message: MessageParam | None = None
current_block: (
TextBlockParam
| ToolUseBlockParam
| ThinkingBlockParam
| RedactedThinkingBlockParam
| None
) = None
current_tool_args: str
input_usage: Usage | None = None
has_content = False
has_native = False
async for response in stream:
async for response in result:
LOGGER.debug("Received response: %s", response)
if isinstance(response, RawMessageStartEvent):
if response.message.role != "assistant":
raise ValueError("Unexpected message role")
current_message = MessageParam(role=response.message.role, content=[])
input_usage = response.message.usage
elif isinstance(response, RawContentBlockStartEvent):
if isinstance(response.content_block, ToolUseBlock):
current_tool_block = ToolUseBlockParam(
current_block = ToolUseBlockParam(
type="tool_use",
id=response.content_block.id,
name=response.content_block.name,
@@ -232,64 +218,75 @@ async def _transform_stream(
)
current_tool_args = ""
elif isinstance(response.content_block, TextBlock):
if has_content:
yield {"role": "assistant"}
has_native = False
has_content = True
current_block = TextBlockParam(
type="text", text=response.content_block.text
)
yield {"role": "assistant"}
if response.content_block.text:
yield {"content": response.content_block.text}
elif isinstance(response.content_block, ThinkingBlock):
if has_native:
yield {"role": "assistant"}
has_native = False
has_content = False
current_block = ThinkingBlockParam(
type="thinking",
thinking=response.content_block.thinking,
signature=response.content_block.signature,
)
elif isinstance(response.content_block, RedactedThinkingBlock):
current_block = RedactedThinkingBlockParam(
type="redacted_thinking", data=response.content_block.data
)
LOGGER.debug(
"Some of Claudes internal reasoning has been automatically "
"encrypted for safety reasons. This doesnt affect the quality of "
"responses"
)
if has_native:
yield {"role": "assistant"}
has_native = False
has_content = False
yield {"native": response.content_block}
has_native = True
elif isinstance(response, RawContentBlockDeltaEvent):
if current_block is None:
raise ValueError("Unexpected delta without a block")
if isinstance(response.delta, InputJSONDelta):
current_tool_args += response.delta.partial_json
elif isinstance(response.delta, TextDelta):
text_block = cast(TextBlockParam, current_block)
text_block["text"] += response.delta.text
yield {"content": response.delta.text}
elif isinstance(response.delta, ThinkingDelta):
yield {"thinking_content": response.delta.thinking}
thinking_block = cast(ThinkingBlockParam, current_block)
thinking_block["thinking"] += response.delta.thinking
elif isinstance(response.delta, SignatureDelta):
yield {
"native": ThinkingBlock(
type="thinking",
thinking="",
signature=response.delta.signature,
)
}
has_native = True
thinking_block = cast(ThinkingBlockParam, current_block)
thinking_block["signature"] += response.delta.signature
elif isinstance(response, RawContentBlockStopEvent):
if current_tool_block is not None:
if current_block is None:
raise ValueError("Unexpected stop event without a current block")
if current_block["type"] == "tool_use":
# tool block
tool_args = json.loads(current_tool_args) if current_tool_args else {}
current_tool_block["input"] = tool_args
current_block["input"] = tool_args
yield {
"tool_calls": [
llm.ToolInput(
id=current_tool_block["id"],
tool_name=current_tool_block["name"],
id=current_block["id"],
tool_name=current_block["name"],
tool_args=tool_args,
)
]
}
current_tool_block = None
elif current_block["type"] == "thinking":
# thinking block
LOGGER.debug("Thinking: %s", current_block["thinking"])
if current_message is None:
raise ValueError("Unexpected stop event without a current message")
current_message["content"].append(current_block) # type: ignore[union-attr]
current_block = None
elif isinstance(response, RawMessageDeltaEvent):
if (usage := response.usage) is not None:
chat_log.async_trace(_create_token_stats(input_usage, usage))
if response.delta.stop_reason == "refusal":
raise HomeAssistantError("Potential policy violation detected")
elif isinstance(response, RawMessageStopEvent):
if current_message is not None:
messages.append(current_message)
current_message = None
def _create_token_stats(
@@ -354,48 +351,48 @@ class AnthropicBaseLLMEntity(Entity):
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET)
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
model_args = MessageCreateParamsStreaming(
model=model,
messages=messages,
max_tokens=options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
system=system.content,
stream=True,
)
if tools:
model_args["tools"] = tools
if (
model.startswith(tuple(THINKING_MODELS))
and thinking_budget >= MIN_THINKING_BUDGET
):
model_args["thinking"] = ThinkingConfigEnabledParam(
type="enabled", budget_tokens=thinking_budget
)
else:
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
model_args["temperature"] = options.get(
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
)
# To prevent infinite loops, we limit the number of iterations
for _iteration in range(MAX_TOOL_ITERATIONS):
model_args = {
"model": model,
"messages": messages,
"tools": tools or NOT_GIVEN,
"max_tokens": options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
"system": system.content,
"stream": True,
}
if (
model.startswith(tuple(THINKING_MODELS))
and thinking_budget >= MIN_THINKING_BUDGET
):
model_args["thinking"] = ThinkingConfigEnabledParam(
type="enabled", budget_tokens=thinking_budget
)
else:
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
model_args["temperature"] = options.get(
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
)
try:
stream = await client.messages.create(**model_args)
messages.extend(
_convert_content(
[
content
async for content in chat_log.async_add_delta_content_stream(
self.entity_id,
_transform_stream(chat_log, stream),
)
]
)
)
except anthropic.AnthropicError as err:
raise HomeAssistantError(
f"Sorry, I had a problem talking to Anthropic: {err}"
) from err
messages.extend(
_convert_content(
[
content
async for content in chat_log.async_add_delta_content_stream(
self.entity_id,
_transform_stream(chat_log, stream, messages),
)
if not isinstance(content, conversation.AssistantContent)
]
)
)
if not chat_log.unresponded_tool_results:
break

View File

@@ -7,7 +7,10 @@ rules:
status: done
comment: |
Consider deriving a base entity.
config-flow-test-coverage: done
config-flow-test-coverage:
status: done
comment: |
Consider looking into making a `mock_setup_entry` fixture that just automatically do this.
config-flow: done
dependency-transparency: done
docs-actions:

View File

@@ -11,7 +11,7 @@ import time
from typing import Any, Literal, final
from hassil import Intents, recognize
from hassil.expression import Expression, Group, ListReference
from hassil.expression import Expression, ListReference, Sequence
from hassil.intents import WildcardSlotList
from homeassistant.components import conversation, media_source, stt, tts
@@ -413,7 +413,7 @@ class AssistSatelliteEntity(entity.Entity):
for intent in intents.intents.values():
for intent_data in intent.data:
for sentence in intent_data.sentences:
_collect_list_references(sentence.expression, wildcard_names)
_collect_list_references(sentence, wildcard_names)
for wildcard_name in wildcard_names:
intents.slot_lists[wildcard_name] = WildcardSlotList(wildcard_name)
@@ -727,9 +727,9 @@ class AssistSatelliteEntity(entity.Entity):
def _collect_list_references(expression: Expression, list_names: set[str]) -> None:
"""Collect list reference names recursively."""
if isinstance(expression, Group):
grp: Group = expression
for item in grp.items:
if isinstance(expression, Sequence):
seq: Sequence = expression
for item in seq.items:
_collect_list_references(item, list_names)
elif isinstance(expression, ListReference):
# {list}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/assist_satellite",
"integration_type": "entity",
"quality_scale": "internal",
"requirements": ["hassil==3.1.0"]
"requirements": ["hassil==2.2.3"]
}

View File

@@ -5,16 +5,15 @@ from __future__ import annotations
from abc import ABC, abstractmethod
from collections import namedtuple
from collections.abc import Awaitable, Callable, Coroutine
from datetime import datetime
import functools
import logging
from typing import Any, cast
from aioasuswrt.asuswrt import AsusWrt as AsusWrtLegacy
from aiohttp import ClientSession
from asusrouter import AsusRouter, AsusRouterError
from asusrouter.modules.client import AsusClient
from asusrouter.modules.data import AsusData
from asusrouter.modules.homeassistant import convert_to_ha_data, convert_to_ha_sensors
from pyasuswrt import AsusWrtError, AsusWrtHttp
from pyasuswrt.exceptions import AsusWrtNotAvailableInfoError
from homeassistant.const import (
CONF_HOST,
@@ -42,13 +41,14 @@ from .const import (
PROTOCOL_HTTPS,
PROTOCOL_TELNET,
SENSORS_BYTES,
SENSORS_CPU,
SENSORS_LOAD_AVG,
SENSORS_MEMORY,
SENSORS_RATES,
SENSORS_TEMPERATURES,
SENSORS_TEMPERATURES_LEGACY,
SENSORS_UPTIME,
)
from .helpers import clean_dict, translate_to_legacy
SENSORS_TYPE_BYTES = "sensors_bytes"
SENSORS_TYPE_COUNT = "sensors_count"
@@ -310,16 +310,16 @@ class AsusWrtHttpBridge(AsusWrtBridge):
def __init__(self, conf: dict[str, Any], session: ClientSession) -> None:
"""Initialize Bridge that use HTTP library."""
super().__init__(conf[CONF_HOST])
self._api = self._get_api(conf, session)
self._api: AsusWrtHttp = self._get_api(conf, session)
@staticmethod
def _get_api(conf: dict[str, Any], session: ClientSession) -> AsusRouter:
"""Get the AsusRouter API."""
return AsusRouter(
hostname=conf[CONF_HOST],
username=conf[CONF_USERNAME],
password=conf.get(CONF_PASSWORD, ""),
use_ssl=conf[CONF_PROTOCOL] == PROTOCOL_HTTPS,
def _get_api(conf: dict[str, Any], session: ClientSession) -> AsusWrtHttp:
"""Get the AsusWrtHttp API."""
return AsusWrtHttp(
conf[CONF_HOST],
conf[CONF_USERNAME],
conf.get(CONF_PASSWORD, ""),
use_https=conf[CONF_PROTOCOL] == PROTOCOL_HTTPS,
port=conf.get(CONF_PORT),
session=session,
)
@@ -327,90 +327,46 @@ class AsusWrtHttpBridge(AsusWrtBridge):
@property
def is_connected(self) -> bool:
"""Get connected status."""
return self._api.connected
return cast(bool, self._api.is_connected)
async def async_connect(self) -> None:
"""Connect to the device."""
await self._api.async_connect()
# Collect the identity
_identity = await self._api.async_get_identity()
# get main router properties
if mac := _identity.mac:
if mac := self._api.mac:
self._label_mac = format_mac(mac)
self._firmware = str(_identity.firmware)
self._model = _identity.model
self._firmware = self._api.firmware
self._model = self._api.model
async def async_disconnect(self) -> None:
"""Disconnect to the device."""
await self._api.async_disconnect()
async def _get_data(
self,
datatype: AsusData,
force: bool = False,
) -> dict[str, Any]:
"""Get data from the device.
This is a generic method which automatically converts to
the Home Assistant-compatible format.
"""
try:
raw = await self._api.async_get_data(datatype, force=force)
return translate_to_legacy(clean_dict(convert_to_ha_data(raw)))
except AsusRouterError as ex:
raise UpdateFailed(ex) from ex
async def _get_sensors(self, datatype: AsusData) -> list[str]:
"""Get the available sensors.
This is a generic method which automatically converts to
the Home Assistant-compatible format.
"""
sensors = []
try:
data = await self._api.async_get_data(datatype)
# Get the list of sensors from the raw data
# and translate in to the legacy format
sensors = translate_to_legacy(convert_to_ha_sensors(data, datatype))
_LOGGER.debug("Available `%s` sensors: %s", datatype.value, sensors)
except AsusRouterError as ex:
_LOGGER.warning(
"Cannot get available `%s` sensors with exception: %s",
datatype.value,
ex,
)
return sensors
async def async_get_connected_devices(self) -> dict[str, WrtDevice]:
"""Get list of connected devices."""
api_devices: dict[str, AsusClient] = await self._api.async_get_data(
AsusData.CLIENTS, force=True
)
api_devices = await self._api.async_get_connected_devices()
return {
format_mac(mac): WrtDevice(
dev.connection.ip_address, dev.description.name, dev.connection.node
)
format_mac(mac): WrtDevice(dev.ip, dev.name, dev.node)
for mac, dev in api_devices.items()
if dev.connection is not None
and dev.description is not None
and dev.connection.ip_address is not None
}
async def async_get_available_sensors(self) -> dict[str, dict[str, Any]]:
"""Return a dictionary of available sensors for this bridge."""
sensors_cpu = await self._get_available_cpu_sensors()
sensors_temperatures = await self._get_available_temperature_sensors()
sensors_loadavg = await self._get_loadavg_sensors_availability()
return {
SENSORS_TYPE_BYTES: {
KEY_SENSORS: SENSORS_BYTES,
KEY_METHOD: self._get_bytes,
},
SENSORS_TYPE_CPU: {
KEY_SENSORS: await self._get_sensors(AsusData.CPU),
KEY_SENSORS: sensors_cpu,
KEY_METHOD: self._get_cpu_usage,
},
SENSORS_TYPE_LOAD_AVG: {
KEY_SENSORS: await self._get_sensors(AsusData.SYSINFO),
KEY_SENSORS: sensors_loadavg,
KEY_METHOD: self._get_load_avg,
},
SENSORS_TYPE_MEMORY: {
@@ -426,44 +382,95 @@ class AsusWrtHttpBridge(AsusWrtBridge):
KEY_METHOD: self._get_uptime,
},
SENSORS_TYPE_TEMPERATURES: {
KEY_SENSORS: await self._get_sensors(AsusData.TEMPERATURE),
KEY_SENSORS: sensors_temperatures,
KEY_METHOD: self._get_temperatures,
},
}
async def _get_available_cpu_sensors(self) -> list[str]:
"""Check which cpu information is available on the router."""
try:
available_cpu = await self._api.async_get_cpu_usage()
available_sensors = [t for t in SENSORS_CPU if t in available_cpu]
except AsusWrtError as exc:
_LOGGER.warning(
(
"Failed checking cpu sensor availability for ASUS router"
" %s. Exception: %s"
),
self.host,
exc,
)
return []
return available_sensors
async def _get_available_temperature_sensors(self) -> list[str]:
"""Check which temperature information is available on the router."""
try:
available_temps = await self._api.async_get_temperatures()
available_sensors = [
t for t in SENSORS_TEMPERATURES if t in available_temps
]
except AsusWrtError as exc:
_LOGGER.warning(
(
"Failed checking temperature sensor availability for ASUS router"
" %s. Exception: %s"
),
self.host,
exc,
)
return []
return available_sensors
async def _get_loadavg_sensors_availability(self) -> list[str]:
"""Check if load avg is available on the router."""
try:
await self._api.async_get_loadavg()
except AsusWrtNotAvailableInfoError:
return []
except AsusWrtError:
pass
return SENSORS_LOAD_AVG
@handle_errors_and_zip(AsusWrtError, SENSORS_BYTES)
async def _get_bytes(self) -> Any:
"""Fetch byte information from the router."""
return await self._get_data(AsusData.NETWORK)
return await self._api.async_get_traffic_bytes()
@handle_errors_and_zip(AsusWrtError, SENSORS_RATES)
async def _get_rates(self) -> Any:
"""Fetch rates information from the router."""
data = await self._get_data(AsusData.NETWORK)
# Convert from bits/s to Bytes/s for compatibility with legacy sensors
return {
key: (
value / 8
if key in SENSORS_RATES and isinstance(value, (int, float))
else value
)
for key, value in data.items()
}
return await self._api.async_get_traffic_rates()
@handle_errors_and_zip(AsusWrtError, SENSORS_LOAD_AVG)
async def _get_load_avg(self) -> Any:
"""Fetch cpu load avg information from the router."""
return await self._get_data(AsusData.SYSINFO)
return await self._api.async_get_loadavg()
@handle_errors_and_zip(AsusWrtError, None)
async def _get_temperatures(self) -> Any:
"""Fetch temperatures information from the router."""
return await self._get_data(AsusData.TEMPERATURE)
return await self._api.async_get_temperatures()
@handle_errors_and_zip(AsusWrtError, None)
async def _get_cpu_usage(self) -> Any:
"""Fetch cpu information from the router."""
return await self._get_data(AsusData.CPU)
return await self._api.async_get_cpu_usage()
@handle_errors_and_zip(AsusWrtError, None)
async def _get_memory_usage(self) -> Any:
"""Fetch memory information from the router."""
return await self._get_data(AsusData.RAM)
return await self._api.async_get_memory_usage()
async def _get_uptime(self) -> dict[str, Any]:
"""Fetch uptime from the router."""
return await self._get_data(AsusData.BOOTTIME)
try:
uptimes = await self._api.async_get_uptime()
except AsusWrtError as exc:
raise UpdateFailed(exc) from exc
last_boot = datetime.fromisoformat(uptimes["last_boot"])
uptime = uptimes["uptime"]
return dict(zip(SENSORS_UPTIME, [last_boot, uptime], strict=False))

View File

@@ -7,7 +7,7 @@ import os
import socket
from typing import Any, cast
from asusrouter import AsusRouterError
from pyasuswrt import AsusWrtError
import voluptuous as vol
from homeassistant.components.device_tracker import (
@@ -189,7 +189,7 @@ class AsusWrtFlowHandler(ConfigFlow, domain=DOMAIN):
try:
await api.async_connect()
except (AsusRouterError, OSError):
except (AsusWrtError, OSError):
_LOGGER.error(
"Error connecting to the AsusWrt router at %s using protocol %s",
host,

View File

@@ -1,56 +0,0 @@
"""Helpers for AsusWRT integration."""
from __future__ import annotations
from typing import Any, TypeVar
T = TypeVar("T", dict[str, Any], list[Any], None)
TRANSLATION_MAP = {
"wan_rx": "sensor_rx_bytes",
"wan_tx": "sensor_tx_bytes",
"total_usage": "cpu_total_usage",
"usage": "mem_usage_perc",
"free": "mem_free",
"used": "mem_used",
"wan_rx_speed": "sensor_rx_rates",
"wan_tx_speed": "sensor_tx_rates",
"2ghz": "2.4GHz",
"5ghz": "5.0GHz",
"5ghz2": "5.0GHz_2",
"6ghz": "6.0GHz",
"cpu": "CPU",
"datetime": "sensor_last_boot",
"uptime": "sensor_uptime",
**{f"{num}_usage": f"cpu{num}_usage" for num in range(1, 9)},
**{f"load_avg_{load}": f"sensor_load_avg{load}" for load in ("1", "5", "15")},
}
def clean_dict(raw: dict[str, Any]) -> dict[str, Any]:
"""Cleans dictionary from None values.
The `state` key is always preserved regardless of its value.
"""
return {k: v for k, v in raw.items() if v is not None or k.endswith("state")}
def translate_to_legacy(raw: T) -> T:
"""Translate raw data to legacy format for dicts and lists."""
if raw is None:
return None
if isinstance(raw, dict):
return {TRANSLATION_MAP.get(k, k): v for k, v in raw.items()}
if isinstance(raw, list):
return [
TRANSLATION_MAP[item]
if isinstance(item, str) and item in TRANSLATION_MAP
else item
for item in raw
]
return raw

View File

@@ -1,11 +1,11 @@
{
"domain": "asuswrt",
"name": "ASUSWRT",
"codeowners": ["@kennedyshead", "@ollo69", "@Vaskivskyi"],
"codeowners": ["@kennedyshead", "@ollo69"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/asuswrt",
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["aioasuswrt", "asusrouter", "asyncssh"],
"requirements": ["aioasuswrt==1.4.0", "asusrouter==1.19.0"]
"loggers": ["aioasuswrt", "asyncssh"],
"requirements": ["aioasuswrt==1.4.0", "pyasuswrt==0.1.21"]
}

View File

@@ -7,7 +7,7 @@ from datetime import datetime, timedelta
import logging
from typing import TYPE_CHECKING, Any
from asusrouter import AsusRouterError
from pyasuswrt import AsusWrtError
from homeassistant.components.device_tracker import (
CONF_CONSIDER_HOME,
@@ -229,7 +229,7 @@ class AsusWrtRouter:
"""Set up a AsusWrt router."""
try:
await self._api.async_connect()
except (AsusRouterError, OSError) as exc:
except (AsusWrtError, OSError) as exc:
raise ConfigEntryNotReady from exc
if not self._api.is_connected:
raise ConfigEntryNotReady
@@ -284,7 +284,7 @@ class AsusWrtRouter:
_LOGGER.debug("Checking devices for ASUS router %s", self.host)
try:
wrt_devices = await self._api.async_get_connected_devices()
except (OSError, AsusRouterError) as exc:
except (OSError, AsusWrtError) as exc:
if not self._connect_error:
self._connect_error = True
_LOGGER.error(

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/blue_current",
"iot_class": "cloud_push",
"loggers": ["bluecurrent_api"],
"requirements": ["bluecurrent-api==1.3.1"]
"requirements": ["bluecurrent-api==1.2.4"]
}

View File

@@ -16,7 +16,7 @@
"quality_scale": "internal",
"requirements": [
"bleak==1.0.1",
"bleak-retry-connector==4.0.1",
"bleak-retry-connector==4.0.0",
"bluetooth-adapters==2.0.0",
"bluetooth-auto-recovery==1.5.2",
"bluetooth-data-tools==1.28.2",

View File

@@ -69,7 +69,12 @@ class SHCEntity(SHCBaseEntity):
manufacturer=device.manufacturer,
model=device.device_model,
name=device.name,
via_device=(DOMAIN, device.root_device_id),
via_device=(
DOMAIN,
device.parent_device_id
if device.parent_device_id is not None
else parent_id,
),
)
super().__init__(device=device, parent_id=parent_id, entry_id=entry_id)

View File

@@ -7,7 +7,7 @@
"documentation": "https://www.home-assistant.io/integrations/bosch_shc",
"iot_class": "local_push",
"loggers": ["boschshcpy"],
"requirements": ["boschshcpy==0.2.107"],
"requirements": ["boschshcpy==0.2.91"],
"zeroconf": [
{
"type": "_http._tcp.local.",

View File

@@ -53,7 +53,8 @@ async def async_setup_entry(
assert unique_id is not None
async_add_entities(
BraviaTVButton(coordinator, unique_id, description) for description in BUTTONS
BraviaTVButton(coordinator, unique_id, config_entry.title, description)
for description in BUTTONS
)
@@ -66,10 +67,11 @@ class BraviaTVButton(BraviaTVEntity, ButtonEntity):
self,
coordinator: BraviaTVCoordinator,
unique_id: str,
model: str,
description: BraviaTVButtonDescription,
) -> None:
"""Initialize the button."""
super().__init__(coordinator, unique_id)
super().__init__(coordinator, unique_id, model)
self._attr_unique_id = f"{unique_id}_{description.key}"
self.entity_description = description

View File

@@ -79,16 +79,14 @@ class BraviaTVConfigFlow(ConfigFlow, domain=DOMAIN):
system_info = await self.client.get_system_info()
cid = system_info[ATTR_CID].lower()
title = system_info[ATTR_MODEL]
self.device_config[CONF_MAC] = system_info[ATTR_MAC]
await self.async_set_unique_id(cid)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=f"{system_info['name']} {system_info[ATTR_MODEL]}",
data=self.device_config,
)
return self.async_create_entry(title=title, data=self.device_config)
async def async_reauth_device(self) -> ConfigFlowResult:
"""Reauthorize Bravia TV device from config."""

View File

@@ -81,7 +81,6 @@ class BraviaTVCoordinator(DataUpdateCoordinator[None]):
self.use_psk = config_entry.data.get(CONF_USE_PSK, False)
self.client_id = config_entry.data.get(CONF_CLIENT_ID, LEGACY_CLIENT_ID)
self.nickname = config_entry.data.get(CONF_NICKNAME, NICKNAME_PREFIX)
self.system_info: dict[str, str] = {}
self.source: str | None = None
self.source_list: list[str] = []
self.source_map: dict[str, dict] = {}
@@ -151,9 +150,6 @@ class BraviaTVCoordinator(DataUpdateCoordinator[None]):
self.is_on = power_status == "active"
self.skipped_updates = 0
if not self.system_info:
self.system_info = await self.client.get_system_info()
if self.is_on is False:
return

View File

@@ -12,16 +12,23 @@ class BraviaTVEntity(CoordinatorEntity[BraviaTVCoordinator]):
_attr_has_entity_name = True
def __init__(self, coordinator: BraviaTVCoordinator, unique_id: str) -> None:
def __init__(
self,
coordinator: BraviaTVCoordinator,
unique_id: str,
model: str,
) -> None:
"""Initialize the entity."""
super().__init__(coordinator)
self._attr_unique_id = unique_id
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, unique_id)},
connections={(CONNECTION_NETWORK_MAC, coordinator.system_info["macAddr"])},
manufacturer=ATTR_MANUFACTURER,
model_id=coordinator.system_info["model"],
hw_version=coordinator.system_info["generation"],
serial_number=coordinator.system_info["serial"],
model=model,
name=f"{ATTR_MANUFACTURER} {model}",
)
if coordinator.client.mac is not None:
self._attr_device_info["connections"] = {
(CONNECTION_NETWORK_MAC, coordinator.client.mac)
}

View File

@@ -34,7 +34,9 @@ async def async_setup_entry(
unique_id = config_entry.unique_id
assert unique_id is not None
async_add_entities([BraviaTVMediaPlayer(coordinator, unique_id)])
async_add_entities(
[BraviaTVMediaPlayer(coordinator, unique_id, config_entry.title)]
)
class BraviaTVMediaPlayer(BraviaTVEntity, MediaPlayerEntity):

View File

@@ -24,7 +24,7 @@ async def async_setup_entry(
unique_id = config_entry.unique_id
assert unique_id is not None
async_add_entities([BraviaTVRemote(coordinator, unique_id)])
async_add_entities([BraviaTVRemote(coordinator, unique_id, config_entry.title)])
class BraviaTVRemote(BraviaTVEntity, RemoteEntity):

View File

@@ -255,7 +255,7 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
)
entity_description: ClimateEntityDescription
_attr_current_humidity: float | None = None
_attr_current_humidity: int | None = None
_attr_current_temperature: float | None = None
_attr_fan_mode: str | None
_attr_fan_modes: list[str] | None

View File

@@ -6,16 +6,12 @@ import asyncio
from collections.abc import Callable
from contextlib import suppress
from datetime import datetime, timedelta
from http import HTTPStatus
import logging
from typing import TYPE_CHECKING, Any
import aiohttp
from hass_nabucasa import AlexaApiError, Cloud
from hass_nabucasa.alexa_api import (
AlexaAccessTokenDetails,
AlexaApiNeedsRelinkError,
AlexaApiNoTokenError,
)
from hass_nabucasa import Cloud, cloud_api
from yarl import URL
from homeassistant.components import persistent_notification
@@ -150,7 +146,7 @@ class CloudAlexaConfig(alexa_config.AbstractConfig):
self._cloud_user = cloud_user
self._prefs = prefs
self._cloud = cloud
self._token: str | None = None
self._token = None
self._token_valid: datetime | None = None
self._cur_entity_prefs = async_get_assistant_settings(hass, CLOUD_ALEXA)
self._alexa_sync_unsub: Callable[[], None] | None = None
@@ -322,31 +318,32 @@ class CloudAlexaConfig(alexa_config.AbstractConfig):
async def async_get_access_token(self) -> str | None:
"""Get an access token."""
details: AlexaAccessTokenDetails | None
if self._token_valid is not None and self._token_valid > utcnow():
return self._token
try:
details = await self._cloud.alexa_api.access_token()
except AlexaApiNeedsRelinkError as exception:
if self.should_report_state:
persistent_notification.async_create(
self.hass,
(
"There was an error reporting state to Alexa"
f" ({exception.reason}). Please re-link your Alexa skill via"
" the Alexa app to continue using it."
),
"Alexa state reporting disabled",
"cloud_alexa_report",
)
raise alexa_errors.RequireRelink from exception
except (AlexaApiNoTokenError, AlexaApiError) as exception:
raise alexa_errors.NoTokenAvailable from exception
resp = await cloud_api.async_alexa_access_token(self._cloud)
body = await resp.json()
self._token = details["access_token"]
self._endpoint = details["event_endpoint"]
self._token_valid = utcnow() + timedelta(seconds=details["expires_in"])
if resp.status == HTTPStatus.BAD_REQUEST:
if body["reason"] in ("RefreshTokenNotFound", "UnknownRegion"):
if self.should_report_state:
persistent_notification.async_create(
self.hass,
(
"There was an error reporting state to Alexa"
f" ({body['reason']}). Please re-link your Alexa skill via"
" the Alexa app to continue using it."
),
"Alexa state reporting disabled",
"cloud_alexa_report",
)
raise alexa_errors.RequireRelink
raise alexa_errors.NoTokenAvailable
self._token = body["access_token"]
self._endpoint = body["event_endpoint"]
self._token_valid = utcnow() + timedelta(seconds=body["expires_in"])
return self._token
async def _async_prefs_updated(self, prefs: CloudPreferences) -> None:

View File

@@ -7,7 +7,7 @@ from http import HTTPStatus
import logging
from typing import TYPE_CHECKING, Any
from hass_nabucasa import Cloud
from hass_nabucasa import Cloud, cloud_api
from hass_nabucasa.google_report_state import ErrorResponse
from homeassistant.components.binary_sensor import BinarySensorDeviceClass
@@ -377,7 +377,7 @@ class CloudGoogleConfig(AbstractConfig):
return HTTPStatus.OK
async with self._sync_entities_lock:
resp = await self._cloud.google_report_state.request_sync()
resp = await cloud_api.async_google_actions_request_sync(self._cloud)
return resp.status
async def async_connect_agent_user(self, agent_user_id: str) -> None:

View File

@@ -13,6 +13,6 @@
"integration_type": "system",
"iot_class": "cloud_push",
"loggers": ["acme", "hass_nabucasa", "snitun"],
"requirements": ["hass-nabucasa==1.0.0"],
"requirements": ["hass-nabucasa==0.111.2"],
"single_config_entry": true
}

View File

@@ -7,18 +7,22 @@ import logging
from coinbase.rest import RESTClient
from coinbase.rest.rest_base import HTTPError
from coinbase.wallet.client import Client as LegacyClient
from coinbase.wallet.error import AuthenticationError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers import entity_registry as er
from homeassistant.util import Throttle
from .const import (
ACCOUNT_IS_VAULT,
API_ACCOUNT_AMOUNT,
API_ACCOUNT_AVALIABLE,
API_ACCOUNT_BALANCE,
API_ACCOUNT_CURRENCY,
API_ACCOUNT_CURRENCY_CODE,
API_ACCOUNT_HOLD,
API_ACCOUNT_ID,
API_ACCOUNT_NAME,
@@ -27,9 +31,12 @@ from .const import (
API_DATA,
API_RATES_CURRENCY,
API_RESOURCE_TYPE,
API_TYPE_VAULT,
API_V3_ACCOUNT_ID,
API_V3_TYPE_VAULT,
CONF_CURRENCIES,
CONF_EXCHANGE_BASE,
CONF_EXCHANGE_RATES,
)
_LOGGER = logging.getLogger(__name__)
@@ -44,6 +51,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: CoinbaseConfigEntry) ->
"""Set up Coinbase from a config entry."""
instance = await hass.async_add_executor_job(create_and_update_instance, entry)
entry.async_on_unload(entry.add_update_listener(update_listener))
entry.runtime_data = instance
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
@@ -58,28 +68,68 @@ async def async_unload_entry(hass: HomeAssistant, entry: CoinbaseConfigEntry) ->
def create_and_update_instance(entry: CoinbaseConfigEntry) -> CoinbaseData:
"""Create and update a Coinbase Data instance."""
# Check if user is using deprecated v2 API credentials
if "organizations" not in entry.data[CONF_API_KEY]:
# Trigger reauthentication to ask user for v3 credentials
raise ConfigEntryAuthFailed(
"Your Coinbase API key appears to be for the deprecated v2 API. "
"Please reconfigure with a new API key created for the v3 API. "
"Visit https://www.coinbase.com/developer-platform to create new credentials."
client = LegacyClient(entry.data[CONF_API_KEY], entry.data[CONF_API_TOKEN])
version = "v2"
else:
client = RESTClient(
api_key=entry.data[CONF_API_KEY], api_secret=entry.data[CONF_API_TOKEN]
)
client = RESTClient(
api_key=entry.data[CONF_API_KEY], api_secret=entry.data[CONF_API_TOKEN]
)
version = "v3"
base_rate = entry.options.get(CONF_EXCHANGE_BASE, "USD")
instance = CoinbaseData(client, base_rate)
instance = CoinbaseData(client, base_rate, version)
instance.update()
return instance
def get_accounts(client):
async def update_listener(
hass: HomeAssistant, config_entry: CoinbaseConfigEntry
) -> None:
"""Handle options update."""
await hass.config_entries.async_reload(config_entry.entry_id)
registry = er.async_get(hass)
entities = er.async_entries_for_config_entry(registry, config_entry.entry_id)
# Remove orphaned entities
for entity in entities:
currency = entity.unique_id.split("-")[-1]
if (
"xe" in entity.unique_id
and currency not in config_entry.options.get(CONF_EXCHANGE_RATES, [])
) or (
"wallet" in entity.unique_id
and currency not in config_entry.options.get(CONF_CURRENCIES, [])
):
registry.async_remove(entity.entity_id)
def get_accounts(client, version):
"""Handle paginated accounts."""
response = client.get_accounts()
if version == "v2":
accounts = response[API_DATA]
next_starting_after = response.pagination.next_starting_after
while next_starting_after:
response = client.get_accounts(starting_after=next_starting_after)
accounts += response[API_DATA]
next_starting_after = response.pagination.next_starting_after
return [
{
API_ACCOUNT_ID: account[API_ACCOUNT_ID],
API_ACCOUNT_NAME: account[API_ACCOUNT_NAME],
API_ACCOUNT_CURRENCY: account[API_ACCOUNT_CURRENCY][
API_ACCOUNT_CURRENCY_CODE
],
API_ACCOUNT_AMOUNT: account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT],
ACCOUNT_IS_VAULT: account[API_RESOURCE_TYPE] == API_TYPE_VAULT,
}
for account in accounts
]
accounts = response[API_ACCOUNTS]
while response["has_next"]:
response = client.get_accounts(cursor=response["cursor"])
@@ -103,28 +153,37 @@ def get_accounts(client):
class CoinbaseData:
"""Get the latest data and update the states."""
def __init__(self, client, exchange_base):
def __init__(self, client, exchange_base, version):
"""Init the coinbase data object."""
self.client = client
self.accounts = None
self.exchange_base = exchange_base
self.exchange_rates = None
self.user_id = (
"v3_" + client.get_portfolios()["portfolios"][0][API_V3_ACCOUNT_ID]
)
if version == "v2":
self.user_id = self.client.get_current_user()[API_ACCOUNT_ID]
else:
self.user_id = (
"v3_" + client.get_portfolios()["portfolios"][0][API_V3_ACCOUNT_ID]
)
self.api_version = version
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from coinbase."""
try:
self.accounts = get_accounts(self.client)
self.exchange_rates = self.client.get(
"/v2/exchange-rates",
params={API_RATES_CURRENCY: self.exchange_base},
)[API_DATA]
except HTTPError as coinbase_error:
self.accounts = get_accounts(self.client, self.api_version)
if self.api_version == "v2":
self.exchange_rates = self.client.get_exchange_rates(
currency=self.exchange_base
)
else:
self.exchange_rates = self.client.get(
"/v2/exchange-rates",
params={API_RATES_CURRENCY: self.exchange_base},
)[API_DATA]
except (AuthenticationError, HTTPError) as coinbase_error:
_LOGGER.error(
"Authentication error connecting to coinbase: %s", coinbase_error
)

View File

@@ -2,20 +2,17 @@
from __future__ import annotations
from collections.abc import Mapping
import logging
from typing import Any
from coinbase.rest import RESTClient
from coinbase.rest.rest_base import HTTPError
from coinbase.wallet.client import Client as LegacyClient
from coinbase.wallet.error import AuthenticationError
import voluptuous as vol
from homeassistant.config_entries import (
ConfigFlow,
ConfigFlowResult,
OptionsFlowWithReload,
)
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, CONF_API_VERSION
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv
@@ -48,6 +45,9 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
def get_user_from_client(api_key, api_token):
"""Get the user name from Coinbase API credentials."""
if "organizations" not in api_key:
client = LegacyClient(api_key, api_token)
return client.get_current_user()["name"]
client = RESTClient(api_key=api_key, api_secret=api_token)
return client.get_portfolios()["portfolios"][0]["name"]
@@ -59,7 +59,7 @@ async def validate_api(hass: HomeAssistant, data):
user = await hass.async_add_executor_job(
get_user_from_client, data[CONF_API_KEY], data[CONF_API_TOKEN]
)
except HTTPError as error:
except (AuthenticationError, HTTPError) as error:
if "api key" in str(error) or " 401 Client Error" in str(error):
_LOGGER.debug("Coinbase rejected API credentials due to an invalid API key")
raise InvalidKey from error
@@ -74,8 +74,8 @@ async def validate_api(hass: HomeAssistant, data):
raise InvalidAuth from error
except ConnectionError as error:
raise CannotConnect from error
return {"title": user}
api_version = "v3" if "organizations" in data[CONF_API_KEY] else "v2"
return {"title": user, "api_version": api_version}
async def validate_options(
@@ -85,17 +85,20 @@ async def validate_options(
client = config_entry.runtime_data.client
accounts = await hass.async_add_executor_job(get_accounts, client)
accounts = await hass.async_add_executor_job(
get_accounts, client, config_entry.data.get("api_version", "v2")
)
accounts_currencies = [
account[API_ACCOUNT_CURRENCY]
for account in accounts
if not account[ACCOUNT_IS_VAULT]
]
resp = await hass.async_add_executor_job(client.get, "/v2/exchange-rates")
available_rates = resp[API_DATA]
if config_entry.data.get("api_version", "v2") == "v2":
available_rates = await hass.async_add_executor_job(client.get_exchange_rates)
else:
resp = await hass.async_add_executor_job(client.get, "/v2/exchange-rates")
available_rates = resp[API_DATA]
if CONF_CURRENCIES in options:
for currency in options[CONF_CURRENCIES]:
if currency not in accounts_currencies:
@@ -114,8 +117,6 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN):
VERSION = 1
reauth_entry: CoinbaseConfigEntry
async def async_step_user(
self, user_input: dict[str, str] | None = None
) -> ConfigFlowResult:
@@ -142,63 +143,12 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN):
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
user_input[CONF_API_VERSION] = info["api_version"]
return self.async_create_entry(title=info["title"], data=user_input)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Handle reauthentication flow."""
self.reauth_entry = self._get_reauth_entry()
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, str] | None = None
) -> ConfigFlowResult:
"""Handle reauthentication confirmation."""
errors: dict[str, str] = {}
if user_input is None:
return self.async_show_form(
step_id="reauth_confirm",
data_schema=STEP_USER_DATA_SCHEMA,
description_placeholders={
"account_name": self.reauth_entry.title,
},
errors=errors,
)
try:
await validate_api(self.hass, user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidKey:
errors["base"] = "invalid_auth_key"
except InvalidSecret:
errors["base"] = "invalid_auth_secret"
except InvalidAuth:
errors["base"] = "invalid_auth"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_update_reload_and_abort(
self.reauth_entry,
data_updates=user_input,
reason="reauth_successful",
)
return self.async_show_form(
step_id="reauth_confirm",
data_schema=STEP_USER_DATA_SCHEMA,
description_placeholders={
"account_name": self.reauth_entry.title,
},
errors=errors,
)
@staticmethod
@callback
def async_get_options_flow(
@@ -208,7 +158,7 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN):
return OptionsFlowHandler()
class OptionsFlowHandler(OptionsFlowWithReload):
class OptionsFlowHandler(OptionsFlow):
"""Handle a option flow for Coinbase."""
async def async_step_init(

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/coinbase",
"iot_class": "cloud_polling",
"loggers": ["coinbase"],
"requirements": ["coinbase-advanced-py==1.2.2"]
"requirements": ["coinbase==2.1.0", "coinbase-advanced-py==1.2.2"]
}

View File

@@ -6,7 +6,6 @@ import logging
from homeassistant.components.sensor import SensorEntity, SensorStateClass
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -28,6 +27,7 @@ from .const import (
_LOGGER = logging.getLogger(__name__)
ATTR_NATIVE_BALANCE = "Balance in native currency"
ATTR_API_VERSION = "API Version"
CURRENCY_ICONS = {
"BTC": "mdi:currency-btc",
@@ -69,26 +69,11 @@ async def async_setup_entry(
CONF_EXCHANGE_PRECISION, CONF_EXCHANGE_PRECISION_DEFAULT
)
# Remove orphaned entities
registry = er.async_get(hass)
existing_entities = er.async_entries_for_config_entry(
registry, config_entry.entry_id
)
for entity in existing_entities:
currency = entity.unique_id.split("-")[-1]
if (
"xe" in entity.unique_id
and currency not in config_entry.options.get(CONF_EXCHANGE_RATES, [])
) or (
"wallet" in entity.unique_id
and currency not in config_entry.options.get(CONF_CURRENCIES, [])
):
registry.async_remove(entity.entity_id)
for currency in desired_currencies:
_LOGGER.debug(
"Attempting to set up %s account sensor",
"Attempting to set up %s account sensor with %s API",
currency,
instance.api_version,
)
if currency not in provided_currencies:
_LOGGER.warning(
@@ -104,8 +89,9 @@ async def async_setup_entry(
if CONF_EXCHANGE_RATES in config_entry.options:
for rate in config_entry.options[CONF_EXCHANGE_RATES]:
_LOGGER.debug(
"Attempting to set up %s exchange rate sensor",
"Attempting to set up %s account sensor with %s API",
rate,
instance.api_version,
)
entities.append(
ExchangeRateSensor(
@@ -160,13 +146,15 @@ class AccountSensor(SensorEntity):
"""Return the state attributes of the sensor."""
return {
ATTR_NATIVE_BALANCE: f"{self._native_balance} {self._coinbase_data.exchange_base}",
ATTR_API_VERSION: self._coinbase_data.api_version,
}
def update(self) -> None:
"""Get the latest state of the sensor."""
_LOGGER.debug(
"Updating %s account sensor",
"Updating %s account sensor with %s API",
self._currency,
self._coinbase_data.api_version,
)
self._coinbase_data.update()
for account in self._coinbase_data.accounts:
@@ -222,8 +210,9 @@ class ExchangeRateSensor(SensorEntity):
def update(self) -> None:
"""Get the latest state of the sensor."""
_LOGGER.debug(
"Updating %s rate sensor",
"Updating %s rate sensor with %s API",
self._currency,
self._coinbase_data.api_version,
)
self._coinbase_data.update()
self._attr_native_value = round(

View File

@@ -8,14 +8,6 @@
"api_key": "[%key:common::config_flow::data::api_key%]",
"api_token": "API secret"
}
},
"reauth_confirm": {
"title": "Update Coinbase API credentials",
"description": "Your current Coinbase API key appears to be for the deprecated v2 API. Please reconfigure with a new API key created for the v3 API. Visit https://www.coinbase.com/developer-platform to create new credentials for {account_name}.",
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]",
"api_token": "API secret"
}
}
},
"error": {
@@ -26,8 +18,7 @@
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"reauth_successful": "Successfully updated credentials"
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
}
},
"options": {

View File

@@ -40,7 +40,6 @@ from .chat_log import (
ConverseError,
SystemContent,
ToolResultContent,
ToolResultContentDeltaDict,
UserContent,
async_get_chat_log,
)
@@ -80,7 +79,6 @@ __all__ = [
"ConverseError",
"SystemContent",
"ToolResultContent",
"ToolResultContentDeltaDict",
"UserContent",
"async_conversation_trace_append",
"async_converse",
@@ -119,7 +117,7 @@ CONFIG_SCHEMA = vol.Schema(
{cv.string: vol.All(cv.ensure_list, [cv.string])}
)
}
),
)
},
extra=vol.ALLOW_EXTRA,
)
@@ -270,9 +268,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
entity_component = EntityComponent[ConversationEntity](_LOGGER, DOMAIN, hass)
hass.data[DATA_COMPONENT] = entity_component
agent_config = config.get(DOMAIN, {})
await async_setup_default_agent(
hass, entity_component, config_intents=agent_config.get("intents", {})
hass, entity_component, config.get(DOMAIN, {}).get("intents", {})
)
async def handle_process(service: ServiceCall) -> ServiceResponse:

View File

@@ -9,7 +9,7 @@ from contextvars import ContextVar
from dataclasses import asdict, dataclass, field, replace
import logging
from pathlib import Path
from typing import Any, Literal, TypedDict, cast
from typing import Any, Literal, TypedDict
import voluptuous as vol
@@ -190,15 +190,6 @@ class AssistantContentDeltaDict(TypedDict, total=False):
native: Any
class ToolResultContentDeltaDict(TypedDict, total=False):
"""Tool result content."""
role: Literal["tool_result"]
tool_call_id: str
tool_name: str
tool_result: JsonObjectType
@dataclass
class ChatLog:
"""Class holding the chat history of a specific conversation."""
@@ -244,25 +235,17 @@ class ChatLog:
@callback
def async_add_assistant_content_without_tools(
self, content: AssistantContent | ToolResultContent
self, content: AssistantContent
) -> None:
"""Add assistant content to the log.
Allows assistant content without tool calls or with external tool calls only,
as well as tool results for the external tools.
"""
"""Add assistant content to the log."""
LOGGER.debug("Adding assistant content: %s", content)
if (
isinstance(content, AssistantContent)
and content.tool_calls is not None
and any(not tool_call.external for tool_call in content.tool_calls)
):
raise ValueError("Non-external tool calls not allowed")
if content.tool_calls is not None:
raise ValueError("Tool calls not allowed")
self.content.append(content)
async def async_add_assistant_content(
self,
content: AssistantContent | ToolResultContent,
content: AssistantContent,
/,
tool_call_tasks: dict[str, asyncio.Task] | None = None,
) -> AsyncGenerator[ToolResultContent]:
@@ -275,11 +258,7 @@ class ChatLog:
LOGGER.debug("Adding assistant content: %s", content)
self.content.append(content)
if (
not isinstance(content, AssistantContent)
or content.tool_calls is None
or all(tool_call.external for tool_call in content.tool_calls)
):
if content.tool_calls is None:
return
if self.llm_api is None:
@@ -288,16 +267,13 @@ class ChatLog:
if tool_call_tasks is None:
tool_call_tasks = {}
for tool_input in content.tool_calls:
if tool_input.id not in tool_call_tasks and not tool_input.external:
if tool_input.id not in tool_call_tasks:
tool_call_tasks[tool_input.id] = self.hass.async_create_task(
self.llm_api.async_call_tool(tool_input),
name=f"llm_tool_{tool_input.id}",
)
for tool_input in content.tool_calls:
if tool_input.external:
continue
LOGGER.debug(
"Tool call: %s(%s)", tool_input.tool_name, tool_input.tool_args
)
@@ -320,9 +296,7 @@ class ChatLog:
yield response_content
async def async_add_delta_content_stream(
self,
agent_id: str,
stream: AsyncIterable[AssistantContentDeltaDict | ToolResultContentDeltaDict],
self, agent_id: str, stream: AsyncIterable[AssistantContentDeltaDict]
) -> AsyncGenerator[AssistantContent | ToolResultContent]:
"""Stream content into the chat log.
@@ -346,34 +320,30 @@ class ChatLog:
# Indicates update to current message
if "role" not in delta:
# ToolResultContentDeltaDict will always have a role
assistant_delta = cast(AssistantContentDeltaDict, delta)
if delta_content := assistant_delta.get("content"):
if delta_content := delta.get("content"):
current_content += delta_content
if delta_thinking_content := assistant_delta.get("thinking_content"):
if delta_thinking_content := delta.get("thinking_content"):
current_thinking_content += delta_thinking_content
if delta_native := assistant_delta.get("native"):
if delta_native := delta.get("native"):
if current_native is not None:
raise RuntimeError(
"Native content already set, cannot overwrite"
)
current_native = delta_native
if delta_tool_calls := assistant_delta.get("tool_calls"):
if delta_tool_calls := delta.get("tool_calls"):
if self.llm_api is None:
raise ValueError("No LLM API configured")
current_tool_calls += delta_tool_calls
# Start processing the tool calls as soon as we know about them
for tool_call in delta_tool_calls:
if not tool_call.external:
if self.llm_api is None:
raise ValueError("No LLM API configured")
tool_call_tasks[tool_call.id] = self.hass.async_create_task(
self.llm_api.async_call_tool(tool_call),
name=f"llm_tool_{tool_call.id}",
)
tool_call_tasks[tool_call.id] = self.hass.async_create_task(
self.llm_api.async_call_tool(tool_call),
name=f"llm_tool_{tool_call.id}",
)
if self.delta_listener:
if filtered_delta := {
k: v for k, v in assistant_delta.items() if k != "native"
k: v for k, v in delta.items() if k != "native"
}:
# We do not want to send the native content to the listener
# as it is not JSON serializable
@@ -381,6 +351,10 @@ class ChatLog:
continue
# Starting a new message
if delta["role"] != "assistant":
raise ValueError(f"Only assistant role expected. Got {delta['role']}")
# Yield the previous message if it has content
if (
current_content
@@ -388,7 +362,7 @@ class ChatLog:
or current_tool_calls
or current_native
):
content: AssistantContent | ToolResultContent = AssistantContent(
content = AssistantContent(
agent_id=agent_id,
content=current_content or None,
thinking_content=current_thinking_content or None,
@@ -402,38 +376,14 @@ class ChatLog:
yield tool_result
if self.delta_listener:
self.delta_listener(self, asdict(tool_result))
current_content = ""
current_thinking_content = ""
current_native = None
current_tool_calls = []
if delta["role"] == "assistant":
current_content = delta.get("content") or ""
current_thinking_content = delta.get("thinking_content") or ""
current_tool_calls = delta.get("tool_calls") or []
current_native = delta.get("native")
current_content = delta.get("content") or ""
current_thinking_content = delta.get("thinking_content") or ""
current_tool_calls = delta.get("tool_calls") or []
current_native = delta.get("native")
if self.delta_listener:
if filtered_delta := {
k: v for k, v in delta.items() if k != "native"
}:
self.delta_listener(self, filtered_delta)
elif delta["role"] == "tool_result":
content = ToolResultContent(
agent_id=agent_id,
tool_call_id=delta["tool_call_id"],
tool_name=delta["tool_name"],
tool_result=delta["tool_result"],
)
yield content
if self.delta_listener:
self.delta_listener(self, asdict(content))
self.async_add_assistant_content_without_tools(content)
else:
raise ValueError(
"Only assistant and tool_result roles expected."
f" Got {delta['role']}"
)
if self.delta_listener:
self.delta_listener(self, delta) # type: ignore[arg-type]
if (
current_content

View File

@@ -14,19 +14,14 @@ import re
import time
from typing import IO, Any, cast
from hassil.expression import Expression, Group, ListReference, TextChunk
from hassil.fuzzy import FuzzyNgramMatcher, SlotCombinationInfo
from hassil.expression import Expression, ListReference, Sequence, TextChunk
from hassil.intents import (
Intent,
IntentData,
Intents,
SlotList,
TextSlotList,
TextSlotValue,
WildcardSlotList,
)
from hassil.models import MatchEntity
from hassil.ngram import Sqlite3NgramModel
from hassil.recognize import (
MISSING_ENTITY,
RecognizeResult,
@@ -36,15 +31,7 @@ from hassil.recognize import (
from hassil.string_matcher import UnmatchedRangeEntity, UnmatchedTextEntity
from hassil.trie import Trie
from hassil.util import merge_dict
from home_assistant_intents import (
ErrorKey,
FuzzyConfig,
FuzzyLanguageResponses,
get_fuzzy_config,
get_fuzzy_language,
get_intents,
get_languages,
)
from home_assistant_intents import ErrorKey, get_intents, get_languages
import yaml
from homeassistant import core
@@ -89,7 +76,6 @@ TRIGGER_CALLBACK_TYPE = Callable[
]
METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
METADATA_CUSTOM_FILE = "hass_custom_file"
METADATA_FUZZY_MATCH = "hass_fuzzy_match"
ERROR_SENTINEL = object()
@@ -108,8 +94,6 @@ class LanguageIntents:
intent_responses: dict[str, Any]
error_responses: dict[str, Any]
language_variant: str | None
fuzzy_matcher: FuzzyNgramMatcher | None = None
fuzzy_responses: FuzzyLanguageResponses | None = None
@dataclass(slots=True)
@@ -135,13 +119,10 @@ class IntentMatchingStage(Enum):
EXPOSED_ENTITIES_ONLY = auto()
"""Match against exposed entities only."""
FUZZY = auto()
"""Use fuzzy matching to guess intent."""
UNEXPOSED_ENTITIES = auto()
"""Match against unexposed entities in Home Assistant."""
UNKNOWN_NAMES = auto()
FUZZY = auto()
"""Capture names that are not known to Home Assistant."""
@@ -260,10 +241,6 @@ class DefaultAgent(ConversationEntity):
# LRU cache to avoid unnecessary intent matching
self._intent_cache = IntentCache(capacity=128)
# Shared configuration for fuzzy matching
self.fuzzy_matching = True
self._fuzzy_config: FuzzyConfig | None = None
@property
def supported_languages(self) -> list[str]:
"""Return a list of supported languages."""
@@ -322,7 +299,7 @@ class DefaultAgent(ConversationEntity):
_LOGGER.warning("No intents were loaded for language: %s", language)
return None
slot_lists = await self._make_slot_lists()
slot_lists = self._make_slot_lists()
intent_context = self._make_intent_context(user_input)
if self._exposed_names_trie is not None:
@@ -579,36 +556,6 @@ class DefaultAgent(ConversationEntity):
# Don't try matching against all entities or doing a fuzzy match
return None
# Use fuzzy matching
skip_fuzzy_match = False
if cache_value is not None:
if (cache_value.result is not None) and (
cache_value.stage == IntentMatchingStage.FUZZY
):
_LOGGER.debug("Got cached result for fuzzy match")
return cache_value.result
# Continue with matching, but we know we won't succeed for fuzzy
# match.
skip_fuzzy_match = True
if (not skip_fuzzy_match) and self.fuzzy_matching:
start_time = time.monotonic()
fuzzy_result = self._recognize_fuzzy(lang_intents, user_input)
# Update cache
self._intent_cache.put(
cache_key,
IntentCacheValue(result=fuzzy_result, stage=IntentMatchingStage.FUZZY),
)
_LOGGER.debug(
"Did fuzzy match in %s second(s)", time.monotonic() - start_time
)
if fuzzy_result is not None:
return fuzzy_result
# Try again with all entities (including unexposed)
skip_unexposed_entities_match = False
if cache_value is not None:
@@ -654,160 +601,102 @@ class DefaultAgent(ConversationEntity):
# This should fail the intent handling phase (async_match_targets).
return strict_result
# Check unknown names
skip_unknown_names = False
# Try again with missing entities enabled
skip_fuzzy_match = False
if cache_value is not None:
if (cache_value.result is not None) and (
cache_value.stage == IntentMatchingStage.UNKNOWN_NAMES
cache_value.stage == IntentMatchingStage.FUZZY
):
_LOGGER.debug("Got cached result for unknown names")
_LOGGER.debug("Got cached result for fuzzy match")
return cache_value.result
skip_unknown_names = True
# We know we won't succeed for fuzzy matching.
skip_fuzzy_match = True
maybe_result: RecognizeResult | None = None
if not skip_unknown_names:
if not skip_fuzzy_match:
start_time = time.monotonic()
maybe_result = self._recognize_unknown_names(
lang_intents, user_input, slot_lists, intent_context
)
best_num_matched_entities = 0
best_num_unmatched_entities = 0
best_num_unmatched_ranges = 0
for result in recognize_all(
user_input.text,
lang_intents.intents,
slot_lists=slot_lists,
intent_context=intent_context,
allow_unmatched_entities=True,
):
if result.text_chunks_matched < 1:
# Skip results that don't match any literal text
continue
# Don't count missing entities that couldn't be filled from context
num_matched_entities = 0
for matched_entity in result.entities_list:
if matched_entity.name not in result.unmatched_entities:
num_matched_entities += 1
num_unmatched_entities = 0
num_unmatched_ranges = 0
for unmatched_entity in result.unmatched_entities_list:
if isinstance(unmatched_entity, UnmatchedTextEntity):
if unmatched_entity.text != MISSING_ENTITY:
num_unmatched_entities += 1
elif isinstance(unmatched_entity, UnmatchedRangeEntity):
num_unmatched_ranges += 1
num_unmatched_entities += 1
else:
num_unmatched_entities += 1
if (
(maybe_result is None) # first result
or (
# More literal text matched
result.text_chunks_matched > maybe_result.text_chunks_matched
)
or (
# More entities matched
num_matched_entities > best_num_matched_entities
)
or (
# Fewer unmatched entities
(num_matched_entities == best_num_matched_entities)
and (num_unmatched_entities < best_num_unmatched_entities)
)
or (
# Prefer unmatched ranges
(num_matched_entities == best_num_matched_entities)
and (num_unmatched_entities == best_num_unmatched_entities)
and (num_unmatched_ranges > best_num_unmatched_ranges)
)
or (
# Prefer match failures with entities
(result.text_chunks_matched == maybe_result.text_chunks_matched)
and (num_unmatched_entities == best_num_unmatched_entities)
and (num_unmatched_ranges == best_num_unmatched_ranges)
and (
("name" in result.entities)
or ("name" in result.unmatched_entities)
)
)
):
maybe_result = result
best_num_matched_entities = num_matched_entities
best_num_unmatched_entities = num_unmatched_entities
best_num_unmatched_ranges = num_unmatched_ranges
# Update cache
self._intent_cache.put(
cache_key,
IntentCacheValue(
result=maybe_result, stage=IntentMatchingStage.UNKNOWN_NAMES
),
IntentCacheValue(result=maybe_result, stage=IntentMatchingStage.FUZZY),
)
_LOGGER.debug(
"Did unknown names match in %s second(s)", time.monotonic() - start_time
"Did fuzzy match in %s second(s)", time.monotonic() - start_time
)
return maybe_result
def _recognize_fuzzy(
self, lang_intents: LanguageIntents, user_input: ConversationInput
) -> RecognizeResult | None:
"""Return fuzzy recognition from hassil."""
if lang_intents.fuzzy_matcher is None:
return None
fuzzy_result = lang_intents.fuzzy_matcher.match(user_input.text)
if fuzzy_result is None:
return None
response = "default"
if lang_intents.fuzzy_responses:
domain = "" # no domain
if "name" in fuzzy_result.slots:
domain = fuzzy_result.name_domain
elif "domain" in fuzzy_result.slots:
domain = fuzzy_result.slots["domain"].value
slot_combo = tuple(sorted(fuzzy_result.slots))
if (
intent_responses := lang_intents.fuzzy_responses.get(
fuzzy_result.intent_name
)
) and (combo_responses := intent_responses.get(slot_combo)):
response = combo_responses.get(domain, response)
entities = [
MatchEntity(name=slot_name, value=slot_value.value, text=slot_value.text)
for slot_name, slot_value in fuzzy_result.slots.items()
]
return RecognizeResult(
intent=Intent(name=fuzzy_result.intent_name),
intent_data=IntentData(sentence_texts=[]),
intent_metadata={METADATA_FUZZY_MATCH: True},
entities={entity.name: entity for entity in entities},
entities_list=entities,
response=response,
)
def _recognize_unknown_names(
self,
lang_intents: LanguageIntents,
user_input: ConversationInput,
slot_lists: dict[str, SlotList],
intent_context: dict[str, Any] | None,
) -> RecognizeResult | None:
"""Return result with unknown names for an error message."""
maybe_result: RecognizeResult | None = None
best_num_matched_entities = 0
best_num_unmatched_entities = 0
best_num_unmatched_ranges = 0
for result in recognize_all(
user_input.text,
lang_intents.intents,
slot_lists=slot_lists,
intent_context=intent_context,
allow_unmatched_entities=True,
):
if result.text_chunks_matched < 1:
# Skip results that don't match any literal text
continue
# Don't count missing entities that couldn't be filled from context
num_matched_entities = 0
for matched_entity in result.entities_list:
if matched_entity.name not in result.unmatched_entities:
num_matched_entities += 1
num_unmatched_entities = 0
num_unmatched_ranges = 0
for unmatched_entity in result.unmatched_entities_list:
if isinstance(unmatched_entity, UnmatchedTextEntity):
if unmatched_entity.text != MISSING_ENTITY:
num_unmatched_entities += 1
elif isinstance(unmatched_entity, UnmatchedRangeEntity):
num_unmatched_ranges += 1
num_unmatched_entities += 1
else:
num_unmatched_entities += 1
if (
(maybe_result is None) # first result
or (
# More literal text matched
result.text_chunks_matched > maybe_result.text_chunks_matched
)
or (
# More entities matched
num_matched_entities > best_num_matched_entities
)
or (
# Fewer unmatched entities
(num_matched_entities == best_num_matched_entities)
and (num_unmatched_entities < best_num_unmatched_entities)
)
or (
# Prefer unmatched ranges
(num_matched_entities == best_num_matched_entities)
and (num_unmatched_entities == best_num_unmatched_entities)
and (num_unmatched_ranges > best_num_unmatched_ranges)
)
or (
# Prefer match failures with entities
(result.text_chunks_matched == maybe_result.text_chunks_matched)
and (num_unmatched_entities == best_num_unmatched_entities)
and (num_unmatched_ranges == best_num_unmatched_ranges)
and (
("name" in result.entities)
or ("name" in result.unmatched_entities)
)
)
):
maybe_result = result
best_num_matched_entities = num_matched_entities
best_num_unmatched_entities = num_unmatched_entities
best_num_unmatched_ranges = num_unmatched_ranges
return maybe_result
def _get_unexposed_entity_names(self, text: str) -> TextSlotList:
"""Get filtered slot list with unexposed entity names in Home Assistant."""
if self._unexposed_names_trie is None:
@@ -962,7 +851,7 @@ class DefaultAgent(ConversationEntity):
if lang_intents is None:
return
await self._make_slot_lists()
self._make_slot_lists()
async def async_get_or_load_intents(self, language: str) -> LanguageIntents | None:
"""Load all intents of a language with lock."""
@@ -1113,85 +1002,12 @@ class DefaultAgent(ConversationEntity):
intent_responses = responses_dict.get("intents", {})
error_responses = responses_dict.get("errors", {})
if not self.fuzzy_matching:
_LOGGER.debug("Fuzzy matching is disabled")
return LanguageIntents(
intents,
intents_dict,
intent_responses,
error_responses,
language_variant,
)
# Load fuzzy
fuzzy_info = get_fuzzy_language(language_variant, json_load=json_load)
if fuzzy_info is None:
_LOGGER.debug(
"Fuzzy matching not available for language: %s", language_variant
)
return LanguageIntents(
intents,
intents_dict,
intent_responses,
error_responses,
language_variant,
)
if self._fuzzy_config is None:
# Load shared config
self._fuzzy_config = get_fuzzy_config(json_load=json_load)
_LOGGER.debug("Loaded shared fuzzy matching config")
assert self._fuzzy_config is not None
fuzzy_matcher: FuzzyNgramMatcher | None = None
fuzzy_responses: FuzzyLanguageResponses | None = None
start_time = time.monotonic()
fuzzy_responses = fuzzy_info.responses
fuzzy_matcher = FuzzyNgramMatcher(
intents=intents,
intent_models={
intent_name: Sqlite3NgramModel(
order=fuzzy_model.order,
words={
word: str(word_id)
for word, word_id in fuzzy_model.words.items()
},
database_path=fuzzy_model.database_path,
)
for intent_name, fuzzy_model in fuzzy_info.ngram_models.items()
},
intent_slot_list_names=self._fuzzy_config.slot_list_names,
slot_combinations={
intent_name: {
combo_key: [
SlotCombinationInfo(
name_domains=(set(name_domains) if name_domains else None)
)
]
for combo_key, name_domains in intent_combos.items()
}
for intent_name, intent_combos in self._fuzzy_config.slot_combinations.items()
},
domain_keywords=fuzzy_info.domain_keywords,
stop_words=fuzzy_info.stop_words,
)
_LOGGER.debug(
"Loaded fuzzy matcher in %s second(s): language=%s, intents=%s",
time.monotonic() - start_time,
language_variant,
sorted(fuzzy_matcher.intent_models.keys()),
)
return LanguageIntents(
intents,
intents_dict,
intent_responses,
error_responses,
language_variant,
fuzzy_matcher=fuzzy_matcher,
fuzzy_responses=fuzzy_responses,
)
@core.callback
@@ -1211,7 +1027,8 @@ class DefaultAgent(ConversationEntity):
# Slot lists have changed, so we must clear the cache
self._intent_cache.clear()
async def _make_slot_lists(self) -> dict[str, SlotList]:
@core.callback
def _make_slot_lists(self) -> dict[str, SlotList]:
"""Create slot lists with areas and entity names/aliases."""
if self._slot_lists is not None:
return self._slot_lists
@@ -1272,10 +1089,6 @@ class DefaultAgent(ConversationEntity):
"floor": TextSlotList.from_tuples(floor_names, allow_template=False),
}
# Reload fuzzy matchers with new slot lists
if self.fuzzy_matching:
await self.hass.async_add_executor_job(self._load_fuzzy_matchers)
self._listen_clear_slot_list()
_LOGGER.debug(
@@ -1285,25 +1098,6 @@ class DefaultAgent(ConversationEntity):
return self._slot_lists
def _load_fuzzy_matchers(self) -> None:
"""Reload fuzzy matchers for all loaded languages."""
for lang_intents in self._lang_intents.values():
if (not isinstance(lang_intents, LanguageIntents)) or (
lang_intents.fuzzy_matcher is None
):
continue
lang_matcher = lang_intents.fuzzy_matcher
lang_intents.fuzzy_matcher = FuzzyNgramMatcher(
intents=lang_matcher.intents,
intent_models=lang_matcher.intent_models,
intent_slot_list_names=lang_matcher.intent_slot_list_names,
slot_combinations=lang_matcher.slot_combinations,
domain_keywords=lang_matcher.domain_keywords,
stop_words=lang_matcher.stop_words,
slot_lists=self._slot_lists,
)
def _make_intent_context(
self, user_input: ConversationInput
) -> dict[str, Any] | None:
@@ -1389,7 +1183,7 @@ class DefaultAgent(ConversationEntity):
for trigger_intent in trigger_intents.intents.values():
for intent_data in trigger_intent.data:
for sentence in intent_data.sentences:
_collect_list_references(sentence.expression, wildcard_names)
_collect_list_references(sentence, wildcard_names)
for wildcard_name in wildcard_names:
trigger_intents.slot_lists[wildcard_name] = WildcardSlotList(wildcard_name)
@@ -1726,9 +1520,11 @@ def _get_match_error_response(
def _collect_list_references(expression: Expression, list_names: set[str]) -> None:
"""Collect list reference names recursively."""
if isinstance(expression, Group):
for item in expression.items:
if isinstance(expression, Sequence):
seq: Sequence = expression
for item in seq.items:
_collect_list_references(item, list_names)
elif isinstance(expression, ListReference):
# {list}
list_names.add(expression.slot_name)
list_ref: ListReference = expression
list_names.add(list_ref.slot_name)

View File

@@ -26,11 +26,7 @@ from .agent_manager import (
get_agent_manager,
)
from .const import DATA_COMPONENT, DATA_DEFAULT_ENTITY
from .default_agent import (
METADATA_CUSTOM_FILE,
METADATA_CUSTOM_SENTENCE,
METADATA_FUZZY_MATCH,
)
from .default_agent import METADATA_CUSTOM_FILE, METADATA_CUSTOM_SENTENCE
from .entity import ConversationEntity
from .models import ConversationInput
@@ -244,8 +240,6 @@ async def websocket_hass_agent_debug(
"sentence_template": "",
# When match is incomplete, this will contain the best slot guesses
"unmatched_slots": _get_unmatched_slots(intent_result),
# True if match was not exact
"fuzzy_match": False,
}
if successful_match:
@@ -257,19 +251,16 @@ async def websocket_hass_agent_debug(
if intent_result.intent_sentence is not None:
result_dict["sentence_template"] = intent_result.intent_sentence.text
if intent_result.intent_metadata:
# Inspect metadata to determine if this matched a custom sentence
if intent_result.intent_metadata.get(METADATA_CUSTOM_SENTENCE):
result_dict["source"] = "custom"
result_dict["file"] = intent_result.intent_metadata.get(
METADATA_CUSTOM_FILE
)
else:
result_dict["source"] = "builtin"
result_dict["fuzzy_match"] = intent_result.intent_metadata.get(
METADATA_FUZZY_MATCH, False
# Inspect metadata to determine if this matched a custom sentence
if intent_result.intent_metadata and intent_result.intent_metadata.get(
METADATA_CUSTOM_SENTENCE
):
result_dict["source"] = "custom"
result_dict["file"] = intent_result.intent_metadata.get(
METADATA_CUSTOM_FILE
)
else:
result_dict["source"] = "builtin"
result_dicts.append(result_dict)

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/conversation",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["hassil==3.1.0", "home-assistant-intents==2025.7.30"]
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.7.30"]
}

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["cookidoo_api"],
"quality_scale": "silver",
"requirements": ["cookidoo-api==0.14.0"]
"requirements": ["cookidoo-api==0.12.2"]
}

View File

@@ -5,7 +5,6 @@ from __future__ import annotations
from datetime import datetime
from typing import Any
from homeassistant.components import media_source
from homeassistant.components.media_player import (
BrowseMedia,
MediaClass,
@@ -397,15 +396,6 @@ class DemoBrowsePlayer(AbstractDemoPlayer):
_attr_supported_features = BROWSE_PLAYER_SUPPORT
async def async_browse_media(
self,
media_content_type: MediaType | str | None = None,
media_content_id: str | None = None,
) -> BrowseMedia:
"""Implement the websocket media browsing helper."""
return await media_source.async_browse_media(self.hass, media_content_id)
class DemoGroupPlayer(AbstractDemoPlayer):
"""A Demo media player that supports grouping."""

View File

@@ -15,7 +15,7 @@
],
"quality_scale": "internal",
"requirements": [
"aiodhcpwatcher==1.2.1",
"aiodhcpwatcher==1.2.0",
"aiodiscover==2.7.1",
"cached-ipaddress==0.10.0"
]

View File

@@ -30,7 +30,6 @@ class Dremel3DPrinterEntity(CoordinatorEntity[Dremel3DPrinterDataUpdateCoordinat
"""Return device information about this Dremel printer."""
return DeviceInfo(
identifiers={(DOMAIN, self._api.get_serial_number())},
serial_number=self._api.get_serial_number(),
manufacturer=self._api.get_manufacturer(),
model=self._api.get_model(),
name=self._api.get_title(),

View File

@@ -93,7 +93,6 @@ class EmonitorPowerSensor(CoordinatorEntity[EmonitorStatus], SensorEntity):
manufacturer="Powerhouse Dynamics, Inc.",
name=device_name,
sw_version=emonitor_status.hardware.firmware_version,
serial_number=emonitor_status.hardware.serial_number,
)
self._attr_extra_state_attributes = {"channel": channel_number}
self._attr_native_value = self._paired_attr(self.entity_description.key)

View File

@@ -2,8 +2,6 @@
from __future__ import annotations
from typing import TYPE_CHECKING
from pyenphase import Envoy
from homeassistant.const import CONF_HOST
@@ -44,21 +42,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: EnphaseConfigEntry) -> b
},
)
# register envoy before via_device is used
device_registry = dr.async_get(hass)
if TYPE_CHECKING:
assert envoy.serial_number
device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
identifiers={(DOMAIN, envoy.serial_number)},
manufacturer="Enphase",
name=coordinator.name,
model=envoy.envoy_model,
sw_version=str(envoy.firmware),
hw_version=envoy.part_number,
serial_number=envoy.serial_number,
)
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)

View File

@@ -6,7 +6,7 @@ from collections.abc import Callable
from dataclasses import dataclass
from operator import attrgetter
from pyenphase import EnvoyC6CC, EnvoyCollar, EnvoyEncharge, EnvoyEnpower
from pyenphase import EnvoyEncharge, EnvoyEnpower
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
@@ -72,42 +72,6 @@ ENPOWER_SENSORS = (
)
@dataclass(frozen=True, kw_only=True)
class EnvoyCollarBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Describes an Envoy IQ Meter Collar binary sensor entity."""
value_fn: Callable[[EnvoyCollar], bool]
COLLAR_SENSORS = (
EnvoyCollarBinarySensorEntityDescription(
key="communicating",
translation_key="communicating",
device_class=BinarySensorDeviceClass.CONNECTIVITY,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=attrgetter("communicating"),
),
)
@dataclass(frozen=True, kw_only=True)
class EnvoyC6CCBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Describes an C6 Combiner controller binary sensor entity."""
value_fn: Callable[[EnvoyC6CC], bool]
C6CC_SENSORS = (
EnvoyC6CCBinarySensorEntityDescription(
key="communicating",
translation_key="communicating",
device_class=BinarySensorDeviceClass.CONNECTIVITY,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=attrgetter("communicating"),
),
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: EnphaseConfigEntry,
@@ -131,18 +95,6 @@ async def async_setup_entry(
for description in ENPOWER_SENSORS
)
if envoy_data.collar:
entities.extend(
EnvoyCollarBinarySensorEntity(coordinator, description)
for description in COLLAR_SENSORS
)
if envoy_data.c6cc:
entities.extend(
EnvoyC6CCBinarySensorEntity(coordinator, description)
for description in C6CC_SENSORS
)
async_add_entities(entities)
@@ -216,69 +168,3 @@ class EnvoyEnpowerBinarySensorEntity(EnvoyBaseBinarySensorEntity):
enpower = self.data.enpower
assert enpower is not None
return self.entity_description.value_fn(enpower)
class EnvoyCollarBinarySensorEntity(EnvoyBaseBinarySensorEntity):
"""Defines an IQ Meter Collar binary_sensor entity."""
entity_description: EnvoyCollarBinarySensorEntityDescription
def __init__(
self,
coordinator: EnphaseUpdateCoordinator,
description: EnvoyCollarBinarySensorEntityDescription,
) -> None:
"""Init the Collar base entity."""
super().__init__(coordinator, description)
collar_data = self.data.collar
assert collar_data is not None
self._attr_unique_id = f"{collar_data.serial_number}_{description.key}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, collar_data.serial_number)},
manufacturer="Enphase",
model="IQ Meter Collar",
name=f"Collar {collar_data.serial_number}",
sw_version=str(collar_data.firmware_version),
via_device=(DOMAIN, self.envoy_serial_num),
serial_number=collar_data.serial_number,
)
@property
def is_on(self) -> bool:
"""Return the state of the Collar binary_sensor."""
collar_data = self.data.collar
assert collar_data is not None
return self.entity_description.value_fn(collar_data)
class EnvoyC6CCBinarySensorEntity(EnvoyBaseBinarySensorEntity):
"""Defines an C6 Combiner binary_sensor entity."""
entity_description: EnvoyC6CCBinarySensorEntityDescription
def __init__(
self,
coordinator: EnphaseUpdateCoordinator,
description: EnvoyC6CCBinarySensorEntityDescription,
) -> None:
"""Init the C6 Combiner base entity."""
super().__init__(coordinator, description)
c6cc_data = self.data.c6cc
assert c6cc_data is not None
self._attr_unique_id = f"{c6cc_data.serial_number}_{description.key}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, c6cc_data.serial_number)},
manufacturer="Enphase",
model="C6 COMBINER CONTROLLER",
name=f"C6 Combiner {c6cc_data.serial_number}",
sw_version=str(c6cc_data.firmware_version),
via_device=(DOMAIN, self.envoy_serial_num),
serial_number=c6cc_data.serial_number,
)
@property
def is_on(self) -> bool:
"""Return the state of the C6 Combiner binary_sensor."""
c6cc_data = self.data.c6cc
assert c6cc_data is not None
return self.entity_description.value_fn(c6cc_data)

View File

@@ -1,13 +1,13 @@
{
"domain": "enphase_envoy",
"name": "Enphase Envoy",
"codeowners": ["@bdraco", "@cgarwood", "@catsmanac"],
"codeowners": ["@bdraco", "@cgarwood", "@joostlek", "@catsmanac"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/enphase_envoy",
"iot_class": "local_polling",
"loggers": ["pyenphase"],
"quality_scale": "platinum",
"requirements": ["pyenphase==2.3.0"],
"requirements": ["pyenphase==2.2.3"],
"zeroconf": [
{
"type": "_enphase-envoy._tcp.local."

View File

@@ -12,8 +12,6 @@ from typing import TYPE_CHECKING
from pyenphase import (
EnvoyACBPower,
EnvoyBatteryAggregate,
EnvoyC6CC,
EnvoyCollar,
EnvoyEncharge,
EnvoyEnchargeAggregate,
EnvoyEnchargePower,
@@ -792,58 +790,6 @@ ENPOWER_SENSORS = (
)
@dataclass(frozen=True, kw_only=True)
class EnvoyCollarSensorEntityDescription(SensorEntityDescription):
"""Describes an Envoy Collar sensor entity."""
value_fn: Callable[[EnvoyCollar], datetime.datetime | int | float | str]
COLLAR_SENSORS = (
EnvoyCollarSensorEntityDescription(
key="temperature",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=SensorDeviceClass.TEMPERATURE,
value_fn=attrgetter("temperature"),
),
EnvoyCollarSensorEntityDescription(
key=LAST_REPORTED_KEY,
translation_key=LAST_REPORTED_KEY,
native_unit_of_measurement=None,
device_class=SensorDeviceClass.TIMESTAMP,
value_fn=lambda collar: dt_util.utc_from_timestamp(collar.last_report_date),
),
EnvoyCollarSensorEntityDescription(
key="grid_state",
translation_key="grid_status",
value_fn=lambda collar: collar.grid_state,
),
EnvoyCollarSensorEntityDescription(
key="mid_state",
translation_key="mid_state",
value_fn=lambda collar: collar.mid_state,
),
)
@dataclass(frozen=True, kw_only=True)
class EnvoyC6CCSensorEntityDescription(SensorEntityDescription):
"""Describes an Envoy C6 Combiner controller sensor entity."""
value_fn: Callable[[EnvoyC6CC], datetime.datetime]
C6CC_SENSORS = (
EnvoyC6CCSensorEntityDescription(
key=LAST_REPORTED_KEY,
translation_key=LAST_REPORTED_KEY,
native_unit_of_measurement=None,
device_class=SensorDeviceClass.TIMESTAMP,
value_fn=lambda c6cc: dt_util.utc_from_timestamp(c6cc.last_report_date),
),
)
@dataclass(frozen=True)
class EnvoyEnchargeAggregateRequiredKeysMixin:
"""Mixin for required keys."""
@@ -1104,15 +1050,6 @@ async def async_setup_entry(
AggregateBatteryEntity(coordinator, description)
for description in AGGREGATE_BATTERY_SENSORS
)
if envoy_data.collar:
entities.extend(
EnvoyCollarEntity(coordinator, description)
for description in COLLAR_SENSORS
)
if envoy_data.c6cc:
entities.extend(
EnvoyC6CCEntity(coordinator, description) for description in C6CC_SENSORS
)
async_add_entities(entities)
@@ -1551,70 +1488,3 @@ class AggregateBatteryEntity(EnvoySystemSensorEntity):
battery_aggregate = self.data.battery_aggregate
assert battery_aggregate is not None
return self.entity_description.value_fn(battery_aggregate)
class EnvoyCollarEntity(EnvoySensorBaseEntity):
"""Envoy Collar sensor entity."""
entity_description: EnvoyCollarSensorEntityDescription
def __init__(
self,
coordinator: EnphaseUpdateCoordinator,
description: EnvoyCollarSensorEntityDescription,
) -> None:
"""Initialize Collar entity."""
super().__init__(coordinator, description)
collar_data = self.data.collar
assert collar_data is not None
self._serial_number = collar_data.serial_number
self._attr_unique_id = f"{collar_data.serial_number}_{description.key}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, collar_data.serial_number)},
manufacturer="Enphase",
model="IQ Meter Collar",
name=f"Collar {collar_data.serial_number}",
sw_version=str(collar_data.firmware_version),
via_device=(DOMAIN, self.envoy_serial_num),
serial_number=collar_data.serial_number,
)
@property
def native_value(self) -> datetime.datetime | int | float | str:
"""Return the state of the collar sensors."""
collar_data = self.data.collar
assert collar_data is not None
return self.entity_description.value_fn(collar_data)
class EnvoyC6CCEntity(EnvoySensorBaseEntity):
"""Envoy C6CC sensor entity."""
entity_description: EnvoyC6CCSensorEntityDescription
def __init__(
self,
coordinator: EnphaseUpdateCoordinator,
description: EnvoyC6CCSensorEntityDescription,
) -> None:
"""Initialize Encharge entity."""
super().__init__(coordinator, description)
c6cc_data = self.data.c6cc
assert c6cc_data is not None
self._attr_unique_id = f"{c6cc_data.serial_number}_{description.key}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, c6cc_data.serial_number)},
manufacturer="Enphase",
model="C6 COMBINER CONTROLLER",
name=f"C6 Combiner {c6cc_data.serial_number}",
sw_version=str(c6cc_data.firmware_version),
via_device=(DOMAIN, self.envoy_serial_num),
serial_number=c6cc_data.serial_number,
)
@property
def native_value(self) -> datetime.datetime:
"""Return the state of the c6cc inventory sensors."""
c6cc_data = self.data.c6cc
assert c6cc_data is not None
return self.entity_description.value_fn(c6cc_data)

View File

@@ -407,12 +407,6 @@
},
"last_report_duration": {
"name": "Last report duration"
},
"grid_status": {
"name": "[%key:component::enphase_envoy::entity::binary_sensor::grid_status::name%]"
},
"mid_state": {
"name": "MID state"
}
},
"switch": {

View File

@@ -2,6 +2,7 @@
from __future__ import annotations
import asyncio
import base64
from functools import partial
import logging
@@ -14,6 +15,7 @@ from aioesphomeapi import (
APIVersion,
DeviceInfo as EsphomeDeviceInfo,
EncryptionPlaintextAPIError,
EntityInfo,
HomeassistantServiceCall,
InvalidAuthAPIError,
InvalidEncryptionKeyAPIError,
@@ -61,6 +63,7 @@ from homeassistant.helpers.issue_registry import (
)
from homeassistant.helpers.service import async_set_service_schema
from homeassistant.helpers.template import Template
from homeassistant.util.async_ import create_eager_task
from .bluetooth import async_connect_scanner
from .const import (
@@ -422,7 +425,14 @@ class ESPHomeManager:
unique_id_is_mac_address = unique_id and ":" in unique_id
if entry.options.get(CONF_SUBSCRIBE_LOGS):
self._async_subscribe_logs(self._async_get_equivalent_log_level())
device_info, entity_infos, services = await cli.device_info_and_list_entities()
results = await asyncio.gather(
create_eager_task(cli.device_info()),
create_eager_task(cli.list_entities_services()),
)
device_info: EsphomeDeviceInfo = results[0]
entity_infos_services: tuple[list[EntityInfo], list[UserService]] = results[1]
entity_infos, services = entity_infos_services
device_mac = format_mac(device_info.mac_address)
mac_address_matches = unique_id == device_mac
@@ -554,11 +564,11 @@ class ESPHomeManager:
)
entry_data.loaded_platforms.add(Platform.ASSIST_SATELLITE)
cli.subscribe_home_assistant_states_and_services(
on_state=entry_data.async_update_state,
on_service_call=self.async_on_service_call,
on_state_sub=self.async_on_state_subscription,
on_state_request=self.async_on_state_request,
cli.subscribe_states(entry_data.async_update_state)
cli.subscribe_service_calls(self.async_on_service_call)
cli.subscribe_home_assistant_states(
self.async_on_state_subscription,
self.async_on_state_request,
)
entry_data.async_save_to_store()

View File

@@ -17,7 +17,7 @@
"mqtt": ["esphome/discover/#"],
"quality_scale": "platinum",
"requirements": [
"aioesphomeapi==39.0.0",
"aioesphomeapi==37.2.2",
"esphome-dashboard-api==1.3.0",
"bleak-esphome==3.1.0"
],

View File

@@ -10,7 +10,6 @@ from urllib.parse import urlparse
from aioesphomeapi import (
EntityInfo,
MediaPlayerCommand,
MediaPlayerEntityFeature as EspMediaPlayerEntityFeature,
MediaPlayerEntityState,
MediaPlayerFormatPurpose,
MediaPlayerInfo,
@@ -51,36 +50,9 @@ _STATES: EsphomeEnumMapper[EspMediaPlayerState, MediaPlayerState] = EsphomeEnumM
EspMediaPlayerState.IDLE: MediaPlayerState.IDLE,
EspMediaPlayerState.PLAYING: MediaPlayerState.PLAYING,
EspMediaPlayerState.PAUSED: MediaPlayerState.PAUSED,
EspMediaPlayerState.OFF: MediaPlayerState.OFF,
EspMediaPlayerState.ON: MediaPlayerState.ON,
}
)
_FEATURES = {
EspMediaPlayerEntityFeature.PAUSE: MediaPlayerEntityFeature.PAUSE,
EspMediaPlayerEntityFeature.SEEK: MediaPlayerEntityFeature.SEEK,
EspMediaPlayerEntityFeature.VOLUME_SET: MediaPlayerEntityFeature.VOLUME_SET,
EspMediaPlayerEntityFeature.VOLUME_MUTE: MediaPlayerEntityFeature.VOLUME_MUTE,
EspMediaPlayerEntityFeature.PREVIOUS_TRACK: MediaPlayerEntityFeature.PREVIOUS_TRACK,
EspMediaPlayerEntityFeature.NEXT_TRACK: MediaPlayerEntityFeature.NEXT_TRACK,
EspMediaPlayerEntityFeature.TURN_ON: MediaPlayerEntityFeature.TURN_ON,
EspMediaPlayerEntityFeature.TURN_OFF: MediaPlayerEntityFeature.TURN_OFF,
EspMediaPlayerEntityFeature.PLAY_MEDIA: MediaPlayerEntityFeature.PLAY_MEDIA,
EspMediaPlayerEntityFeature.VOLUME_STEP: MediaPlayerEntityFeature.VOLUME_STEP,
EspMediaPlayerEntityFeature.SELECT_SOURCE: MediaPlayerEntityFeature.SELECT_SOURCE,
EspMediaPlayerEntityFeature.STOP: MediaPlayerEntityFeature.STOP,
EspMediaPlayerEntityFeature.CLEAR_PLAYLIST: MediaPlayerEntityFeature.CLEAR_PLAYLIST,
EspMediaPlayerEntityFeature.PLAY: MediaPlayerEntityFeature.PLAY,
EspMediaPlayerEntityFeature.SHUFFLE_SET: MediaPlayerEntityFeature.SHUFFLE_SET,
EspMediaPlayerEntityFeature.SELECT_SOUND_MODE: MediaPlayerEntityFeature.SELECT_SOUND_MODE,
EspMediaPlayerEntityFeature.BROWSE_MEDIA: MediaPlayerEntityFeature.BROWSE_MEDIA,
EspMediaPlayerEntityFeature.REPEAT_SET: MediaPlayerEntityFeature.REPEAT_SET,
EspMediaPlayerEntityFeature.GROUPING: MediaPlayerEntityFeature.GROUPING,
EspMediaPlayerEntityFeature.MEDIA_ANNOUNCE: MediaPlayerEntityFeature.MEDIA_ANNOUNCE,
EspMediaPlayerEntityFeature.MEDIA_ENQUEUE: MediaPlayerEntityFeature.MEDIA_ENQUEUE,
EspMediaPlayerEntityFeature.SEARCH_MEDIA: MediaPlayerEntityFeature.SEARCH_MEDIA,
}
ATTR_BYPASS_PROXY = "bypass_proxy"
@@ -95,12 +67,16 @@ class EsphomeMediaPlayer(
def _on_static_info_update(self, static_info: EntityInfo) -> None:
"""Set attrs from static info."""
super()._on_static_info_update(static_info)
esp_flags = EspMediaPlayerEntityFeature(
self._static_info.feature_flags_compat(self._api_version)
flags = (
MediaPlayerEntityFeature.PLAY_MEDIA
| MediaPlayerEntityFeature.BROWSE_MEDIA
| MediaPlayerEntityFeature.STOP
| MediaPlayerEntityFeature.VOLUME_SET
| MediaPlayerEntityFeature.VOLUME_MUTE
| MediaPlayerEntityFeature.MEDIA_ANNOUNCE
)
flags = MediaPlayerEntityFeature(0)
for espflag in esp_flags:
flags |= _FEATURES[espflag]
if self._static_info.supports_pause:
flags |= MediaPlayerEntityFeature.PAUSE | MediaPlayerEntityFeature.PLAY
self._attr_supported_features = flags
self._entry_data.media_player_formats[self.unique_id] = cast(
MediaPlayerInfo, static_info
@@ -281,24 +257,6 @@ class EsphomeMediaPlayer(
device_id=self._static_info.device_id,
)
@convert_api_error_ha_error
async def async_turn_on(self) -> None:
"""Send turn on command."""
self._client.media_player_command(
self._key,
command=MediaPlayerCommand.TURN_ON,
device_id=self._static_info.device_id,
)
@convert_api_error_ha_error
async def async_turn_off(self) -> None:
"""Send turn off command."""
self._client.media_player_command(
self._key,
command=MediaPlayerCommand.TURN_OFF,
device_id=self._static_info.device_id,
)
def _is_url(url: str) -> bool:
"""Validate the URL can be parsed and at least has scheme + netloc."""

View File

@@ -1,31 +0,0 @@
"""Intents for the fan integration."""
import voluptuous as vol
from homeassistant.core import HomeAssistant
from homeassistant.helpers import intent
from . import ATTR_PERCENTAGE, DOMAIN, SERVICE_TURN_ON
INTENT_FAN_SET_SPEED = "HassFanSetSpeed"
async def async_setup_intents(hass: HomeAssistant) -> None:
"""Set up the fan intents."""
intent.async_register(
hass,
intent.ServiceIntentHandler(
INTENT_FAN_SET_SPEED,
DOMAIN,
SERVICE_TURN_ON,
description="Sets a fan's speed by percentage",
required_domains={DOMAIN},
platforms={DOMAIN},
required_slots={
ATTR_PERCENTAGE: intent.IntentSlotInfo(
description="The speed percentage of the fan",
value_schema=vol.All(vol.Coerce(int), vol.Range(min=0, max=100)),
)
},
),
)

View File

@@ -30,6 +30,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: FoscamConfigEntry) -> bo
verbose=False,
)
coordinator = FoscamCoordinator(hass, entry, session)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
@@ -88,7 +89,7 @@ async def async_migrate_entry(hass: HomeAssistant, entry: FoscamConfigEntry) ->
async def async_migrate_entities(hass: HomeAssistant, entry: FoscamConfigEntry) -> None:
"""Migrate old entries to support config_entry_id-based unique IDs."""
"""Migrate old entry."""
@callback
def _update_unique_id(

View File

@@ -26,7 +26,7 @@ from .const import CONF_RTSP_PORT, CONF_STREAM, DOMAIN, LOGGER
STREAMS = ["Main", "Sub"]
DEFAULT_PORT = 88
DEFAULT_RTSP_PORT = 88
DEFAULT_RTSP_PORT = 554
DATA_SCHEMA = vol.Schema(

View File

@@ -11,16 +11,3 @@ CONF_STREAM = "stream"
SERVICE_PTZ = "ptz"
SERVICE_PTZ_PRESET = "ptz_preset"
SUPPORTED_SWITCHES = [
"flip_switch",
"mirror_switch",
"ir_switch",
"sleep_switch",
"white_light_switch",
"siren_alarm_switch",
"turn_off_volume_switch",
"light_status_switch",
"hdr_switch",
"wdr_switch",
]

View File

@@ -1,8 +1,8 @@
"""The foscam coordinator object."""
import asyncio
from dataclasses import dataclass
from datetime import timedelta
from typing import Any
from libpyfoscamcgi import FoscamCamera
@@ -15,35 +15,9 @@ from .const import DOMAIN, LOGGER
type FoscamConfigEntry = ConfigEntry[FoscamCoordinator]
@dataclass
class FoscamDeviceInfo:
"""A data class representing the current state and configuration of a Foscam camera device."""
dev_info: dict
product_info: dict
is_open_ir: bool
is_flip: bool
is_mirror: bool
is_asleep: dict
is_open_white_light: bool
is_siren_alarm: bool
volume: int
speak_volume: int
is_turn_off_volume: bool
is_turn_off_light: bool
is_open_wdr: bool | None = None
is_open_hdr: bool | None = None
class FoscamCoordinator(DataUpdateCoordinator[FoscamDeviceInfo]):
class FoscamCoordinator(DataUpdateCoordinator[dict[str, Any]]):
"""Foscam coordinator."""
config_entry: FoscamConfigEntry
def __init__(
self,
hass: HomeAssistant,
@@ -60,82 +34,24 @@ class FoscamCoordinator(DataUpdateCoordinator[FoscamDeviceInfo]):
)
self.session = session
def gather_all_configs(self) -> FoscamDeviceInfo:
"""Get all Foscam configurations."""
ret_dev_info, dev_info = self.session.get_dev_info()
dev_info = dev_info if ret_dev_info == 0 else {}
ret_product_info, product_info = self.session.get_product_all_info()
product_info = product_info if ret_product_info == 0 else {}
ret_ir, infra_led_config = self.session.get_infra_led_config()
is_open_ir = infra_led_config["mode"] == "1" if ret_ir == 0 else False
ret_mf, mirror_flip_setting = self.session.get_mirror_and_flip_setting()
is_flip = mirror_flip_setting["isFlip"] == "1" if ret_mf == 0 else False
is_mirror = mirror_flip_setting["isMirror"] == "1" if ret_mf == 0 else False
ret_sleep, sleep_setting = self.session.is_asleep()
is_asleep = {"supported": ret_sleep == 0, "status": bool(int(sleep_setting))}
ret_wl, is_open_white_light = self.session.getWhiteLightBrightness()
is_open_white_light_val = (
is_open_white_light["enable"] == "1" if ret_wl == 0 else False
)
ret_sc, is_siren_alarm = self.session.getSirenConfig()
is_siren_alarm_val = (
is_siren_alarm["sirenEnable"] == "1" if ret_sc == 0 else False
)
ret_vol, volume = self.session.getAudioVolume()
volume_val = int(volume["volume"]) if ret_vol == 0 else 0
ret_sv, speak_volume = self.session.getSpeakVolume()
speak_volume_val = int(speak_volume["SpeakVolume"]) if ret_sv == 0 else 0
ret_ves, is_turn_off_volume = self.session.getVoiceEnableState()
is_turn_off_volume_val = not (
ret_ves == 0 and is_turn_off_volume["isEnable"] == "1"
)
ret_les, is_turn_off_light = self.session.getLedEnableState()
is_turn_off_light_val = not (
ret_les == 0 and is_turn_off_light["isEnable"] == "0"
)
is_open_wdr = None
is_open_hdr = None
reserve3 = product_info.get("reserve3")
reserve3_int = int(reserve3) if reserve3 is not None else 0
if (reserve3_int & (1 << 8)) != 0:
ret_wdr, is_open_wdr_data = self.session.getWdrMode()
mode = is_open_wdr_data["mode"] if ret_wdr == 0 and is_open_wdr_data else 0
is_open_wdr = bool(int(mode))
else:
ret_hdr, is_open_hdr_data = self.session.getHdrMode()
mode = is_open_hdr_data["mode"] if ret_hdr == 0 and is_open_hdr_data else 0
is_open_hdr = bool(int(mode))
return FoscamDeviceInfo(
dev_info=dev_info,
product_info=product_info,
is_open_ir=is_open_ir,
is_flip=is_flip,
is_mirror=is_mirror,
is_asleep=is_asleep,
is_open_white_light=is_open_white_light_val,
is_siren_alarm=is_siren_alarm_val,
volume=volume_val,
speak_volume=speak_volume_val,
is_turn_off_volume=is_turn_off_volume_val,
is_turn_off_light=is_turn_off_light_val,
is_open_wdr=is_open_wdr,
is_open_hdr=is_open_hdr,
)
async def _async_update_data(self) -> FoscamDeviceInfo:
async def _async_update_data(self) -> dict[str, Any]:
"""Fetch data from API endpoint."""
async with asyncio.timeout(10):
return await self.hass.async_add_executor_job(self.gather_all_configs)
async with asyncio.timeout(30):
data = {}
ret, dev_info = await self.hass.async_add_executor_job(
self.session.get_dev_info
)
if ret == 0:
data["dev_info"] = dev_info
all_info = await self.hass.async_add_executor_job(
self.session.get_product_all_info
)
data["product_info"] = all_info[1]
ret, is_asleep = await self.hass.async_add_executor_job(
self.session.is_asleep
)
data["is_asleep"] = {"supported": ret == 0, "status": is_asleep}
return data

View File

@@ -13,15 +13,19 @@ from .coordinator import FoscamCoordinator
class FoscamEntity(CoordinatorEntity[FoscamCoordinator]):
"""Base entity for Foscam camera."""
def __init__(self, coordinator: FoscamCoordinator, config_entry_id: str) -> None:
def __init__(
self,
coordinator: FoscamCoordinator,
entry_id: str,
) -> None:
"""Initialize the base Foscam entity."""
super().__init__(coordinator)
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, config_entry_id)},
identifiers={(DOMAIN, entry_id)},
manufacturer="Foscam",
)
if dev_info := coordinator.data.dev_info:
if dev_info := coordinator.data.get("dev_info"):
self._attr_device_info[ATTR_MODEL] = dev_info["productName"]
self._attr_device_info[ATTR_SW_VERSION] = dev_info["firmwareVer"]
self._attr_device_info[ATTR_HW_VERSION] = dev_info["hardwareVer"]

View File

@@ -6,39 +6,5 @@
"ptz_preset": {
"service": "mdi:target-variant"
}
},
"entity": {
"switch": {
"flip_switch": {
"default": "mdi:flip-vertical"
},
"mirror_switch": {
"default": "mdi:mirror"
},
"ir_switch": {
"default": "mdi:theme-light-dark"
},
"sleep_switch": {
"default": "mdi:sleep"
},
"white_light_switch": {
"default": "mdi:light-flood-down"
},
"siren_alarm_switch": {
"default": "mdi:alarm-note"
},
"turn_off_volume_switch": {
"default": "mdi:volume-off"
},
"turn_off_light_switch": {
"default": "mdi:lightbulb-fluorescent-tube"
},
"hdr_switch": {
"default": "mdi:hdr"
},
"wdr_switch": {
"default": "mdi:alpha-w-box"
}
}
}
}

View File

@@ -11,12 +11,7 @@
"stream": "Stream"
},
"data_description": {
"host": "The hostname or IP address of your Foscam camera.",
"port": "The port of your Foscam camera, default is 88.",
"username": "The username to log in to your Foscam camera.",
"password": "The password to log in to your Foscam camera.",
"rtsp_port": "The RTSP protocol port of the camera, used to pull the camera's real-time video stream. New model cameras only support RTSP ports 88 and 554, while old model cameras only support ports 88 and 65534.",
"stream": "Select the video stream type to pull. The main stream offers higher clarity but requires a better network environment."
"host": "The hostname or IP address of your Foscam camera."
}
}
},
@@ -32,35 +27,8 @@
},
"entity": {
"switch": {
"flip_switch": {
"name": "Flip"
},
"mirror_switch": {
"name": "Mirror"
},
"ir_switch": {
"name": "Infrared mode"
},
"sleep_switch": {
"name": "Sleep mode"
},
"white_light_switch": {
"name": "White light"
},
"siren_alarm_switch": {
"name": "Siren alarm"
},
"turn_off_volume_switch": {
"name": "Volume muted"
},
"turn_off_light_switch": {
"name": "Light"
},
"hdr_switch": {
"name": "HDR"
},
"wdr_switch": {
"name": "WDR"
"name": "Sleep"
}
}
},

View File

@@ -2,117 +2,18 @@
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from typing import Any
from libpyfoscamcgi import FoscamCamera
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
from homeassistant.core import HomeAssistant
from homeassistant.components.switch import SwitchEntity
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import LOGGER
from .coordinator import FoscamConfigEntry, FoscamCoordinator
from .entity import FoscamEntity
def handle_ir_turn_on(session: FoscamCamera) -> None:
"""Turn on IR LED: sets IR mode to auto (if supported), then turns off the IR LED."""
session.set_infra_led_config(1)
session.open_infra_led()
def handle_ir_turn_off(session: FoscamCamera) -> None:
"""Turn off IR LED: sets IR mode to manual (if supported), then turns open the IR LED."""
session.set_infra_led_config(0)
session.close_infra_led()
@dataclass(frozen=True, kw_only=True)
class FoscamSwitchEntityDescription(SwitchEntityDescription):
"""A custom entity description that supports a turn_off function."""
native_value_fn: Callable[..., bool]
turn_off_fn: Callable[[FoscamCamera], None]
turn_on_fn: Callable[[FoscamCamera], None]
SWITCH_DESCRIPTIONS: list[FoscamSwitchEntityDescription] = [
FoscamSwitchEntityDescription(
key="is_flip",
translation_key="flip_switch",
native_value_fn=lambda data: data.is_flip,
turn_off_fn=lambda session: session.flip_video(0),
turn_on_fn=lambda session: session.flip_video(1),
),
FoscamSwitchEntityDescription(
key="is_mirror",
translation_key="mirror_switch",
native_value_fn=lambda data: data.is_mirror,
turn_off_fn=lambda session: session.mirror_video(0),
turn_on_fn=lambda session: session.mirror_video(1),
),
FoscamSwitchEntityDescription(
key="is_open_ir",
translation_key="ir_switch",
native_value_fn=lambda data: data.is_open_ir,
turn_off_fn=handle_ir_turn_off,
turn_on_fn=handle_ir_turn_on,
),
FoscamSwitchEntityDescription(
key="sleep_switch",
translation_key="sleep_switch",
native_value_fn=lambda data: data.is_asleep["status"],
turn_off_fn=lambda session: session.wake_up(),
turn_on_fn=lambda session: session.sleep(),
),
FoscamSwitchEntityDescription(
key="is_open_white_light",
translation_key="white_light_switch",
native_value_fn=lambda data: data.is_open_white_light,
turn_off_fn=lambda session: session.closeWhiteLight(),
turn_on_fn=lambda session: session.openWhiteLight(),
),
FoscamSwitchEntityDescription(
key="is_siren_alarm",
translation_key="siren_alarm_switch",
native_value_fn=lambda data: data.is_siren_alarm,
turn_off_fn=lambda session: session.setSirenConfig(0, 100, 0),
turn_on_fn=lambda session: session.setSirenConfig(1, 100, 0),
),
FoscamSwitchEntityDescription(
key="is_turn_off_volume",
translation_key="turn_off_volume_switch",
native_value_fn=lambda data: data.is_turn_off_volume,
turn_off_fn=lambda session: session.setVoiceEnableState(1),
turn_on_fn=lambda session: session.setVoiceEnableState(0),
),
FoscamSwitchEntityDescription(
key="is_turn_off_light",
translation_key="turn_off_light_switch",
native_value_fn=lambda data: data.is_turn_off_light,
turn_off_fn=lambda session: session.setLedEnableState(0),
turn_on_fn=lambda session: session.setLedEnableState(1),
),
FoscamSwitchEntityDescription(
key="is_open_hdr",
translation_key="hdr_switch",
native_value_fn=lambda data: data.is_open_hdr,
turn_off_fn=lambda session: session.setHdrMode(0),
turn_on_fn=lambda session: session.setHdrMode(1),
),
FoscamSwitchEntityDescription(
key="is_open_wdr",
translation_key="wdr_switch",
native_value_fn=lambda data: data.is_open_wdr,
turn_off_fn=lambda session: session.setWdrMode(0),
turn_on_fn=lambda session: session.setWdrMode(1),
),
]
async def async_setup_entry(
hass: HomeAssistant,
config_entry: FoscamConfigEntry,
@@ -121,61 +22,63 @@ async def async_setup_entry(
"""Set up foscam switch from a config entry."""
coordinator = config_entry.runtime_data
await coordinator.async_config_entry_first_refresh()
entities = []
product_info = coordinator.data.product_info
reserve3 = product_info.get("reserve3", "0")
for description in SWITCH_DESCRIPTIONS:
if description.key == "is_asleep":
if not coordinator.data.is_asleep["supported"]:
continue
elif description.key == "is_open_hdr":
if ((1 << 8) & int(reserve3)) != 0 or ((1 << 7) & int(reserve3)) == 0:
continue
elif description.key == "is_open_wdr":
if ((1 << 8) & int(reserve3)) == 0:
continue
entities.append(FoscamGenericSwitch(coordinator, description))
async_add_entities(entities)
if coordinator.data["is_asleep"]["supported"]:
async_add_entities([FoscamSleepSwitch(coordinator, config_entry)])
class FoscamGenericSwitch(FoscamEntity, SwitchEntity):
"""A generic switch class for Foscam entities."""
_attr_has_entity_name = True
entity_description: FoscamSwitchEntityDescription
class FoscamSleepSwitch(FoscamEntity, SwitchEntity):
"""An implementation for Sleep Switch."""
def __init__(
self,
coordinator: FoscamCoordinator,
description: FoscamSwitchEntityDescription,
config_entry: FoscamConfigEntry,
) -> None:
"""Initialize the generic switch."""
entry_id = coordinator.config_entry.entry_id
super().__init__(coordinator, entry_id)
"""Initialize a Foscam Sleep Switch."""
super().__init__(coordinator, config_entry.entry_id)
self.entity_description = description
self._attr_unique_id = f"{entry_id}_{description.key}"
self._attr_unique_id = f"{config_entry.entry_id}_sleep_switch"
self._attr_translation_key = "sleep_switch"
self._attr_has_entity_name = True
self.is_asleep = self.coordinator.data["is_asleep"]["status"]
@property
def is_on(self) -> bool:
"""Return the state of the switch."""
return self.entity_description.native_value_fn(self.coordinator.data)
def is_on(self):
"""Return true if camera is asleep."""
return self.is_asleep
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn off the entity."""
self.hass.async_add_executor_job(
self.entity_description.turn_off_fn, self.coordinator.session
"""Wake camera."""
LOGGER.debug("Wake camera")
ret, _ = await self.hass.async_add_executor_job(
self.coordinator.session.wake_up
)
if ret != 0:
raise HomeAssistantError(f"Error waking up: {ret}")
await self.coordinator.async_request_refresh()
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn on the entity."""
self.hass.async_add_executor_job(
self.entity_description.turn_on_fn, self.coordinator.session
)
"""But camera is sleep."""
LOGGER.debug("Sleep camera")
ret, _ = await self.hass.async_add_executor_job(self.coordinator.session.sleep)
if ret != 0:
raise HomeAssistantError(f"Error sleeping: {ret}")
await self.coordinator.async_request_refresh()
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
self.is_asleep = self.coordinator.data["is_asleep"]["status"]
self.async_write_ha_state()

View File

@@ -115,7 +115,6 @@ class FreeboxRouter:
self._api: Freepybox = api
self.name: str = freebox_config["model_info"]["pretty_name"]
self.model_id: str = freebox_config["model_info"]["name"]
self.mac: str = freebox_config["mac"]
self._sw_v: str = freebox_config["firmware_version"]
self._hw_v: str | None = freebox_config.get("board_name")
@@ -285,7 +284,6 @@ class FreeboxRouter:
manufacturer="Freebox SAS",
name=self.name,
model=self.name,
model_id=self.model_id,
sw_version=self._sw_v,
hw_version=self._hw_v,
)

View File

@@ -120,6 +120,7 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
self.fritz_guest_wifi: FritzGuestWLAN = None
self.fritz_hosts: FritzHosts = None
self.fritz_status: FritzStatus = None
self.hass = hass
self.host = host
self.mesh_role = MeshRoles.NONE
self.mesh_wifi_uplink = False

View File

@@ -2,8 +2,3 @@
DOMAIN = "fyta"
CONF_EXPIRATION = "expiration"
CONF_MAX_ACCEPTABLE = "max_acceptable"
CONF_MAX_GOOD = "max_good"
CONF_MIN_ACCEPTABLE = "min_acceptable"
CONF_MIN_GOOD = "min_good"

View File

@@ -25,12 +25,6 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
from .const import (
CONF_MAX_ACCEPTABLE,
CONF_MAX_GOOD,
CONF_MIN_ACCEPTABLE,
CONF_MIN_GOOD,
)
from .coordinator import FytaConfigEntry, FytaCoordinator
from .entity import FytaPlantEntity
@@ -42,13 +36,6 @@ class FytaSensorEntityDescription(SensorEntityDescription):
value_fn: Callable[[Plant], StateType | datetime]
@dataclass(frozen=True, kw_only=True)
class FytaMeasurementSensorEntityDescription(FytaSensorEntityDescription):
"""Describes Fyta sensor entity."""
attribute_fn: Callable[[Plant], dict[str, float | None]]
PLANT_STATUS_LIST: list[str] = ["deleted", "doing_great", "need_attention", "no_sensor"]
PLANT_MEASUREMENT_STATUS_LIST: list[str] = [
"no_data",
@@ -108,6 +95,35 @@ SENSORS: Final[list[FytaSensorEntityDescription]] = [
options=PLANT_MEASUREMENT_STATUS_LIST,
value_fn=lambda plant: plant.salinity_status.name.lower(),
),
FytaSensorEntityDescription(
key="temperature",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda plant: plant.temperature,
),
FytaSensorEntityDescription(
key="light",
translation_key="light",
native_unit_of_measurement="μmol/s⋅m²",
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda plant: plant.light,
),
FytaSensorEntityDescription(
key="moisture",
native_unit_of_measurement=PERCENTAGE,
device_class=SensorDeviceClass.MOISTURE,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda plant: plant.moisture,
),
FytaSensorEntityDescription(
key="salinity",
translation_key="salinity",
native_unit_of_measurement=UnitOfConductivity.MILLISIEMENS_PER_CM,
device_class=SensorDeviceClass.CONDUCTIVITY,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda plant: plant.salinity,
),
FytaSensorEntityDescription(
key="ph",
device_class=SensorDeviceClass.PH,
@@ -136,62 +152,6 @@ SENSORS: Final[list[FytaSensorEntityDescription]] = [
),
]
MEASUREMENT_SENSORS: Final[list[FytaMeasurementSensorEntityDescription]] = [
FytaMeasurementSensorEntityDescription(
key="temperature",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
attribute_fn=lambda plant: {
CONF_MAX_ACCEPTABLE: plant.temperature_max_acceptable,
CONF_MAX_GOOD: plant.temperature_max_good,
CONF_MIN_ACCEPTABLE: plant.temperature_min_acceptable,
CONF_MIN_GOOD: plant.temperature_min_good,
},
value_fn=lambda plant: plant.temperature,
),
FytaMeasurementSensorEntityDescription(
key="light",
translation_key="light",
native_unit_of_measurement="μmol/s⋅m²",
state_class=SensorStateClass.MEASUREMENT,
attribute_fn=lambda plant: {
CONF_MAX_ACCEPTABLE: plant.light_max_acceptable,
CONF_MAX_GOOD: plant.light_max_good,
CONF_MIN_ACCEPTABLE: plant.light_min_acceptable,
CONF_MIN_GOOD: plant.light_min_good,
},
value_fn=lambda plant: plant.light,
),
FytaMeasurementSensorEntityDescription(
key="moisture",
native_unit_of_measurement=PERCENTAGE,
device_class=SensorDeviceClass.MOISTURE,
state_class=SensorStateClass.MEASUREMENT,
attribute_fn=lambda plant: {
CONF_MAX_ACCEPTABLE: plant.moisture_max_acceptable,
CONF_MAX_GOOD: plant.moisture_max_good,
CONF_MIN_ACCEPTABLE: plant.moisture_min_acceptable,
CONF_MIN_GOOD: plant.moisture_min_good,
},
value_fn=lambda plant: plant.moisture,
),
FytaMeasurementSensorEntityDescription(
key="salinity",
translation_key="salinity",
native_unit_of_measurement=UnitOfConductivity.MILLISIEMENS_PER_CM,
device_class=SensorDeviceClass.CONDUCTIVITY,
state_class=SensorStateClass.MEASUREMENT,
attribute_fn=lambda plant: {
CONF_MAX_ACCEPTABLE: plant.salinity_max_acceptable,
CONF_MAX_GOOD: plant.salinity_max_good,
CONF_MIN_ACCEPTABLE: plant.salinity_min_acceptable,
CONF_MIN_GOOD: plant.salinity_min_good,
},
value_fn=lambda plant: plant.salinity,
),
]
async def async_setup_entry(
hass: HomeAssistant,
@@ -208,28 +168,14 @@ async def async_setup_entry(
if sensor.key in dir(coordinator.data.get(plant_id))
]
plant_entities.extend(
FytaPlantMeasurementSensor(coordinator, entry, sensor, plant_id)
for plant_id in coordinator.fyta.plant_list
for sensor in MEASUREMENT_SENSORS
if sensor.key in dir(coordinator.data.get(plant_id))
)
async_add_entities(plant_entities)
def _async_add_new_device(plant_id: int) -> None:
plant_entities = [
async_add_entities(
FytaPlantSensor(coordinator, entry, sensor, plant_id)
for sensor in SENSORS
if sensor.key in dir(coordinator.data.get(plant_id))
]
plant_entities.extend(
FytaPlantMeasurementSensor(coordinator, entry, sensor, plant_id)
for sensor in MEASUREMENT_SENSORS
if sensor.key in dir(coordinator.data.get(plant_id))
)
async_add_entities(plant_entities)
coordinator.new_device_callbacks.append(_async_add_new_device)
@@ -244,15 +190,3 @@ class FytaPlantSensor(FytaPlantEntity, SensorEntity):
"""Return the state for this sensor."""
return self.entity_description.value_fn(self.plant)
class FytaPlantMeasurementSensor(FytaPlantSensor):
"""Represents a Fyta measurement sensor."""
entity_description: FytaMeasurementSensorEntityDescription
@property
def extra_state_attributes(self) -> dict[str, float | None]:
"""Return the device state attributes."""
return self.entity_description.attribute_fn(self.plant)

View File

@@ -138,64 +138,10 @@
}
},
"light": {
"name": "Light",
"state_attributes": {
"max_acceptable": { "name": "Maximum acceptable" },
"max_good": { "name": "Maximum good" },
"min_acceptable": { "name": "Minimum acceptable" },
"min_good": { "name": "Minimum good" }
}
},
"moisture": {
"name": "[%key:component::sensor::entity_component::moisture::name%]",
"state_attributes": {
"max_acceptable": {
"name": "[%key:component::fyta::entity::sensor::light::state_attributes::max_acceptable::name%]"
},
"max_good": {
"name": "[%key:component::fyta::entity::sensor::light::state_attributes::max_good::name%]"
},
"min_acceptable": {
"name": "[%key:component::fyta::entity::sensor::light::state_attributes::min_acceptable::name%]"
},
"min_good": {
"name": "[%key:component::fyta::entity::sensor::light::state_attributes::min_good::name%]"
}
}
"name": "Light"
},
"salinity": {
"name": "Salinity",
"state_attributes": {
"max_acceptable": {
"name": "[%key:component::fyta::entity::sensor::light::state_attributes::max_acceptable::name%]"
},
"max_good": {
"name": "[%key:component::fyta::entity::sensor::light::state_attributes::max_good::name%]"
},
"min_acceptable": {
"name": "[%key:component::fyta::entity::sensor::light::state_attributes::min_acceptable::name%]"
},
"min_good": {
"name": "[%key:component::fyta::entity::sensor::light::state_attributes::min_good::name%]"
}
}
},
"temperature": {
"name": "[%key:component::sensor::entity_component::temperature::name%]",
"state_attributes": {
"max_acceptable": {
"name": "[%key:component::fyta::entity::sensor::light::state_attributes::max_acceptable::name%]"
},
"max_good": {
"name": "[%key:component::fyta::entity::sensor::light::state_attributes::max_good::name%]"
},
"min_acceptable": {
"name": "[%key:component::fyta::entity::sensor::light::state_attributes::min_acceptable::name%]"
},
"min_good": {
"name": "[%key:component::fyta::entity::sensor::light::state_attributes::min_good::name%]"
}
}
"name": "Salinity"
},
"last_fertilised": {
"name": "Last fertilized"

View File

@@ -29,6 +29,7 @@ class GlancesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
self, hass: HomeAssistant, entry: GlancesConfigEntry, api: Glances
) -> None:
"""Initialize the Glances data."""
self.hass = hass
self.host: str = entry.data[CONF_HOST]
self.api = api
super().__init__(

View File

@@ -146,20 +146,6 @@ async def light_switch_options_schema(
)
LIGHT_CONFIG_SCHEMA = basic_group_config_schema("light").extend(
{
vol.Required(CONF_ALL, default=False): selector.BooleanSelector(),
}
)
SWITCH_CONFIG_SCHEMA = basic_group_config_schema("switch").extend(
{
vol.Required(CONF_ALL, default=False): selector.BooleanSelector(),
}
)
GROUP_TYPES = [
"binary_sensor",
"button",
@@ -224,7 +210,7 @@ CONFIG_FLOW = {
validate_user_input=set_group_type("fan"),
),
"light": SchemaFlowFormStep(
LIGHT_CONFIG_SCHEMA,
basic_group_config_schema("light"),
preview="group",
validate_user_input=set_group_type("light"),
),
@@ -249,7 +235,7 @@ CONFIG_FLOW = {
validate_user_input=set_group_type("sensor"),
),
"switch": SchemaFlowFormStep(
SWITCH_CONFIG_SCHEMA,
basic_group_config_schema("switch"),
preview="group",
validate_user_input=set_group_type("switch"),
),

View File

@@ -66,13 +66,9 @@
"light": {
"title": "[%key:component::group::config::step::user::title%]",
"data": {
"all": "[%key:component::group::config::step::binary_sensor::data::all%]",
"entities": "[%key:component::group::config::step::binary_sensor::data::entities%]",
"hide_members": "[%key:component::group::config::step::binary_sensor::data::hide_members%]",
"name": "[%key:common::config_flow::data::name%]"
},
"data_description": {
"all": "[%key:component::group::config::step::binary_sensor::data_description::all%]"
}
},
"lock": {
@@ -119,13 +115,9 @@
"switch": {
"title": "[%key:component::group::config::step::user::title%]",
"data": {
"all": "[%key:component::group::config::step::binary_sensor::data::all%]",
"entities": "[%key:component::group::config::step::binary_sensor::data::entities%]",
"hide_members": "[%key:component::group::config::step::binary_sensor::data::hide_members%]",
"name": "[%key:common::config_flow::data::name%]"
},
"data_description": {
"all": "[%key:component::group::config::step::binary_sensor::data_description::all%]"
}
}
}

View File

@@ -74,7 +74,7 @@ class ValveControllerEntity(GuardianEntity):
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, entry.data[CONF_UID])},
manufacturer="Elexa",
sw_version=self._diagnostics_coordinator.data["firmware"],
model=self._diagnostics_coordinator.data["firmware"],
name=f"Guardian valve controller {entry.data[CONF_UID]}",
)
self._attr_unique_id = f"{entry.data[CONF_UID]}_{description.key}"

View File

@@ -1,26 +1,19 @@
"""The habitica integration."""
from uuid import UUID
from habiticalib import Habitica
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL, Platform
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import config_validation as cv, device_registry as dr
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import ConfigType
from homeassistant.util.hass_dict import HassKey
from .const import CONF_API_USER, DOMAIN, X_CLIENT
from .coordinator import (
HabiticaConfigEntry,
HabiticaDataUpdateCoordinator,
HabiticaPartyCoordinator,
)
from .coordinator import HabiticaConfigEntry, HabiticaDataUpdateCoordinator
from .services import async_setup_services
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
HABITICA_KEY: HassKey[dict[UUID, HabiticaPartyCoordinator]] = HassKey(DOMAIN)
PLATFORMS = [
Platform.BINARY_SENSOR,
@@ -44,8 +37,6 @@ async def async_setup_entry(
hass: HomeAssistant, config_entry: HabiticaConfigEntry
) -> bool:
"""Set up habitica from a config entry."""
party_added_by_this_entry: UUID | None = None
device_reg = dr.async_get(hass)
session = async_get_clientsession(
hass, verify_ssl=config_entry.data.get(CONF_VERIFY_SSL, True)
@@ -63,53 +54,11 @@ async def async_setup_entry(
await coordinator.async_config_entry_first_refresh()
config_entry.runtime_data = coordinator
party = coordinator.data.user.party.id
if HABITICA_KEY not in hass.data:
hass.data[HABITICA_KEY] = {}
if party is not None and party not in hass.data[HABITICA_KEY]:
party_coordinator = HabiticaPartyCoordinator(hass, config_entry, api)
await party_coordinator.async_config_entry_first_refresh()
hass.data[HABITICA_KEY][party] = party_coordinator
party_added_by_this_entry = party
@callback
def _party_update_listener() -> None:
"""On party change, unload coordinator, remove device and reload."""
nonlocal party, party_added_by_this_entry
party_updated = coordinator.data.user.party.id
if (
party is not None and (party not in hass.data[HABITICA_KEY])
) or party != party_updated:
if party_added_by_this_entry:
config_entry.async_create_task(
hass, shutdown_party_coordinator(hass, party_added_by_this_entry)
)
party_added_by_this_entry = None
if party:
identifier = {(DOMAIN, f"{config_entry.unique_id}_{party!s}")}
if device := device_reg.async_get_device(identifiers=identifier):
device_reg.async_update_device(
device.id, remove_config_entry_id=config_entry.entry_id
)
hass.config_entries.async_schedule_reload(config_entry.entry_id)
coordinator.async_add_listener(_party_update_listener)
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
return True
async def shutdown_party_coordinator(hass: HomeAssistant, party_added: UUID) -> None:
"""Handle party coordinator shutdown."""
await hass.data[HABITICA_KEY][party_added].async_shutdown()
hass.data[HABITICA_KEY].pop(party_added)
async def async_unload_entry(hass: HomeAssistant, entry: HabiticaConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@@ -6,20 +6,18 @@ from collections.abc import Callable
from dataclasses import dataclass
from enum import StrEnum
from habiticalib import ContentData, UserData
from habiticalib import UserData
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
BinarySensorEntity,
BinarySensorEntityDescription,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import HABITICA_KEY
from .const import ASSETS_URL
from .coordinator import HabiticaConfigEntry, HabiticaPartyCoordinator
from .entity import HabiticaBase, HabiticaPartyBase
from .coordinator import HabiticaConfigEntry
from .entity import HabiticaBase
PARALLEL_UPDATES = 1
@@ -36,7 +34,6 @@ class HabiticaBinarySensor(StrEnum):
"""Habitica Entities."""
PENDING_QUEST = "pending_quest"
QUEST_RUNNING = "quest_running"
def get_scroll_image_for_pending_quest_invitation(user: UserData) -> str | None:
@@ -65,21 +62,10 @@ async def async_setup_entry(
coordinator = config_entry.runtime_data
entities: list[BinarySensorEntity] = [
async_add_entities(
HabiticaBinarySensorEntity(coordinator, description)
for description in BINARY_SENSOR_DESCRIPTIONS
]
if party := coordinator.data.user.party.id:
party_coordinator = hass.data[HABITICA_KEY][party]
entities.append(
HabiticaPartyBinarySensorEntity(
party_coordinator,
config_entry,
coordinator.content,
)
)
async_add_entities(entities)
)
class HabiticaBinarySensorEntity(HabiticaBase, BinarySensorEntity):
@@ -100,27 +86,3 @@ class HabiticaBinarySensorEntity(HabiticaBase, BinarySensorEntity):
):
return f"{ASSETS_URL}{entity_picture}"
return None
class HabiticaPartyBinarySensorEntity(HabiticaPartyBase, BinarySensorEntity):
"""Representation of a Habitica party binary sensor."""
entity_description = BinarySensorEntityDescription(
key=HabiticaBinarySensor.QUEST_RUNNING,
translation_key=HabiticaBinarySensor.QUEST_RUNNING,
device_class=BinarySensorDeviceClass.RUNNING,
)
def __init__(
self,
coordinator: HabiticaPartyCoordinator,
config_entry: HabiticaConfigEntry,
content: ContentData,
) -> None:
"""Initialize the binary sensor."""
super().__init__(coordinator, config_entry, self.entity_description, content)
@property
def is_on(self) -> bool | None:
"""If the binary sensor is on."""
return self.coordinator.data.quest.active

View File

@@ -2,7 +2,6 @@
from __future__ import annotations
from abc import abstractmethod
from collections.abc import Callable
from dataclasses import dataclass
from datetime import timedelta
@@ -14,7 +13,6 @@ from aiohttp import ClientError
from habiticalib import (
Avatar,
ContentData,
GroupData,
Habitica,
HabiticaException,
NotAuthorizedError,
@@ -51,11 +49,10 @@ class HabiticaData:
type HabiticaConfigEntry = ConfigEntry[HabiticaDataUpdateCoordinator]
class HabiticaBaseCoordinator[_DataT](DataUpdateCoordinator[_DataT]):
"""Habitica coordinator base class."""
class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]):
"""Habitica Data Update Coordinator."""
config_entry: HabiticaConfigEntry
_update_interval: timedelta
def __init__(
self, hass: HomeAssistant, config_entry: HabiticaConfigEntry, habitica: Habitica
@@ -66,7 +63,7 @@ class HabiticaBaseCoordinator[_DataT](DataUpdateCoordinator[_DataT]):
_LOGGER,
config_entry=config_entry,
name=DOMAIN,
update_interval=self._update_interval,
update_interval=timedelta(seconds=60),
request_refresh_debouncer=Debouncer(
hass,
_LOGGER,
@@ -74,40 +71,8 @@ class HabiticaBaseCoordinator[_DataT](DataUpdateCoordinator[_DataT]):
immediate=False,
),
)
self.habitica = habitica
@abstractmethod
async def _update_data(self) -> _DataT:
"""Fetch data."""
async def _async_update_data(self) -> _DataT:
"""Fetch the latest party data."""
try:
return await self._update_data()
except TooManyRequestsError:
_LOGGER.debug("Rate limit exceeded, will try again later")
return self.data
except HabiticaException as e:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="service_call_exception",
translation_placeholders={"reason": str(e.error.message)},
) from e
except ClientError as e:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="service_call_exception",
translation_placeholders={"reason": str(e)},
) from e
class HabiticaDataUpdateCoordinator(HabiticaBaseCoordinator[HabiticaData]):
"""Habitica Data Update Coordinator."""
_update_interval = timedelta(seconds=30)
content: ContentData
self.content: ContentData
async def _async_setup(self) -> None:
"""Set up Habitica integration."""
@@ -141,16 +106,30 @@ class HabiticaDataUpdateCoordinator(HabiticaBaseCoordinator[HabiticaData]):
translation_placeholders={"reason": str(e)},
) from e
async def _update_data(self) -> HabiticaData:
"""Fetch the latest data."""
user = (await self.habitica.get_user()).data
tasks = (await self.habitica.get_tasks()).data
completed_todos = (
await self.habitica.get_tasks(TaskFilter.COMPLETED_TODOS)
).data
return HabiticaData(user=user, tasks=tasks + completed_todos)
async def _async_update_data(self) -> HabiticaData:
try:
user = (await self.habitica.get_user()).data
tasks = (await self.habitica.get_tasks()).data
completed_todos = (
await self.habitica.get_tasks(TaskFilter.COMPLETED_TODOS)
).data
except TooManyRequestsError:
_LOGGER.debug("Rate limit exceeded, will try again later")
return self.data
except HabiticaException as e:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="service_call_exception",
translation_placeholders={"reason": str(e.error.message)},
) from e
except ClientError as e:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="service_call_exception",
translation_placeholders={"reason": str(e)},
) from e
else:
return HabiticaData(user=user, tasks=tasks + completed_todos)
async def execute(self, func: Callable[[Habitica], Any]) -> None:
"""Execute an API call."""
@@ -190,13 +169,3 @@ class HabiticaDataUpdateCoordinator(HabiticaBaseCoordinator[HabiticaData]):
await self.habitica.generate_avatar(fp=png, avatar=avatar, fmt="PNG")
return png.getvalue()
class HabiticaPartyCoordinator(HabiticaBaseCoordinator[GroupData]):
"""Habitica Party Coordinator."""
_update_interval = timedelta(minutes=15)
async def _update_data(self) -> GroupData:
"""Fetch the latest party data."""
return (await self.habitica.get_group()).data

View File

@@ -4,7 +4,6 @@ from __future__ import annotations
from typing import TYPE_CHECKING
from habiticalib import ContentData
from yarl import URL
from homeassistant.const import CONF_URL
@@ -13,11 +12,7 @@ from homeassistant.helpers.entity import EntityDescription
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, MANUFACTURER, NAME
from .coordinator import (
HabiticaConfigEntry,
HabiticaDataUpdateCoordinator,
HabiticaPartyCoordinator,
)
from .coordinator import HabiticaDataUpdateCoordinator
class HabiticaBase(CoordinatorEntity[HabiticaDataUpdateCoordinator]):
@@ -50,33 +45,3 @@ class HabiticaBase(CoordinatorEntity[HabiticaDataUpdateCoordinator]):
),
identifiers={(DOMAIN, coordinator.config_entry.unique_id)},
)
class HabiticaPartyBase(CoordinatorEntity[HabiticaPartyCoordinator]):
"""Base Habitica entity representing a party."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: HabiticaPartyCoordinator,
config_entry: HabiticaConfigEntry,
entity_description: EntityDescription,
content: ContentData,
) -> None:
"""Initialize a Habitica party entity."""
super().__init__(coordinator)
if TYPE_CHECKING:
assert config_entry.unique_id
unique_id = f"{config_entry.unique_id}_{coordinator.data.id!s}"
self.entity_description = entity_description
self._attr_unique_id = f"{unique_id}_{entity_description.key}"
self._attr_device_info = DeviceInfo(
entry_type=DeviceEntryType.SERVICE,
manufacturer=MANUFACTURER,
model=NAME,
name=coordinator.data.summary,
identifiers={(DOMAIN, unique_id)},
via_device=(DOMAIN, config_entry.unique_id),
)
self.content = content

View File

@@ -156,24 +156,6 @@
},
"pending_quest_items": {
"default": "mdi:sack"
},
"group_leader": {
"default": "mdi:shield-crown"
},
"quest": {
"default": "mdi:script-text-outline"
},
"boss": {
"default": "mdi:emoticon-devil"
},
"boss_hp": {
"default": "mdi:heart"
},
"boss_hp_remaining": {
"default": "mdi:heart"
},
"collected_items": {
"default": "mdi:sack"
}
},
"switch": {
@@ -190,9 +172,6 @@
"state": {
"on": "mdi:script-text-outline"
}
},
"quest_running": {
"default": "mdi:script-text-play"
}
}
},

View File

@@ -4,21 +4,15 @@ from __future__ import annotations
from enum import StrEnum
from habiticalib import Avatar, ContentData, extract_avatar
from habiticalib import Avatar, extract_avatar
from homeassistant.components.image import Image, ImageEntity, ImageEntityDescription
from homeassistant.components.image import ImageEntity, ImageEntityDescription
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.util import dt as dt_util
from . import HABITICA_KEY
from .const import ASSETS_URL
from .coordinator import (
HabiticaConfigEntry,
HabiticaDataUpdateCoordinator,
HabiticaPartyCoordinator,
)
from .entity import HabiticaBase, HabiticaPartyBase
from .coordinator import HabiticaConfigEntry, HabiticaDataUpdateCoordinator
from .entity import HabiticaBase
PARALLEL_UPDATES = 1
@@ -27,7 +21,6 @@ class HabiticaImageEntity(StrEnum):
"""Image entities."""
AVATAR = "avatar"
QUEST_IMAGE = "quest_image"
async def async_setup_entry(
@@ -38,17 +31,8 @@ async def async_setup_entry(
"""Set up the habitica image platform."""
coordinator = config_entry.runtime_data
entities: list[ImageEntity] = [HabiticaImage(hass, coordinator)]
if party := coordinator.data.user.party.id:
party_coordinator = hass.data[HABITICA_KEY][party]
entities.append(
HabiticaPartyImage(
hass, party_coordinator, config_entry, coordinator.content
)
)
async_add_entities(entities)
async_add_entities([HabiticaImage(hass, coordinator)])
class HabiticaImage(HabiticaBase, ImageEntity):
@@ -88,58 +72,3 @@ class HabiticaImage(HabiticaBase, ImageEntity):
if not self._cache and self._avatar:
self._cache = await self.coordinator.generate_avatar(self._avatar)
return self._cache
class HabiticaPartyImage(HabiticaPartyBase, ImageEntity):
"""A Habitica image entity of a party."""
entity_description = ImageEntityDescription(
key=HabiticaImageEntity.QUEST_IMAGE,
translation_key=HabiticaImageEntity.QUEST_IMAGE,
)
_attr_content_type = "image/png"
def __init__(
self,
hass: HomeAssistant,
coordinator: HabiticaPartyCoordinator,
config_entry: HabiticaConfigEntry,
content: ContentData,
) -> None:
"""Initialize the image entity."""
super().__init__(coordinator, config_entry, self.entity_description, content)
ImageEntity.__init__(self, hass)
self._attr_image_url = self.image_url
self._attr_image_last_updated = dt_util.utcnow()
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
if self.image_url != self._attr_image_url:
self._attr_image_url = self.image_url
self._cached_image = None
self._attr_image_last_updated = dt_util.utcnow()
super()._handle_coordinator_update()
@property
def image_url(self) -> str | None:
"""Return URL of image."""
return (
f"{ASSETS_URL}quest_{key}.png"
if (key := self.coordinator.data.quest.key)
else None
)
async def _async_load_image_from_url(self, url: str) -> Image | None:
"""Load an image by url.
AWS sometimes returns 'application/octet-stream' as content-type
"""
if response := await self._fetch_url(url):
return Image(
content=response.content,
content_type=self._attr_content_type,
)
return None

View File

@@ -7,5 +7,5 @@
"iot_class": "cloud_polling",
"loggers": ["habiticalib"],
"quality_scale": "platinum",
"requirements": ["habiticalib==0.4.2"]
"requirements": ["habiticalib==0.4.1"]
}

View File

@@ -72,7 +72,7 @@ rules:
comment: Used to inform of deprecated entities and actions.
stale-devices:
status: done
comment: Party device is remove if stale.
comment: Not applicable. Only one device per config entry. Removed together with the config entry.
# Platinum
async-dependency: done

View File

@@ -8,7 +8,7 @@ from enum import StrEnum
import logging
from typing import Any
from habiticalib import ContentData, GroupData, HabiticaClass, TaskData, UserData, ha
from habiticalib import ContentData, HabiticaClass, TaskData, UserData, ha
from homeassistant.components.sensor import (
SensorDeviceClass,
@@ -20,19 +20,15 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
from homeassistant.util import dt as dt_util
from . import HABITICA_KEY
from .const import ASSETS_URL
from .coordinator import HabiticaConfigEntry
from .entity import HabiticaBase, HabiticaPartyBase
from .entity import HabiticaBase
from .util import (
collected_quest_items,
get_attribute_points,
get_attributes_total,
inventory_list,
pending_damage,
pending_quest_items,
quest_attributes,
quest_boss,
)
_LOGGER = logging.getLogger(__name__)
@@ -59,17 +55,6 @@ class HabiticaSensorEntityDescription(SensorEntityDescription):
entity_picture: str | None = None
@dataclass(kw_only=True, frozen=True)
class HabiticaPartySensorEntityDescription(SensorEntityDescription):
"""Habitica Party Sensor Description."""
value_fn: Callable[[GroupData, ContentData], StateType]
entity_picture: Callable[[GroupData], str | None] | str | None = None
attributes_fn: Callable[[GroupData, ContentData], dict[str, Any] | None] | None = (
None
)
@dataclass(kw_only=True, frozen=True)
class HabiticaTaskSensorEntityDescription(SensorEntityDescription):
"""Habitica Task Sensor Description."""
@@ -104,13 +89,6 @@ class HabiticaSensorEntity(StrEnum):
QUEST_SCROLLS = "quest_scrolls"
PENDING_DAMAGE = "pending_damage"
PENDING_QUEST_ITEMS = "pending_quest_items"
MEMBER_COUNT = "member_count"
GROUP_LEADER = "group_leader"
QUEST = "quest"
BOSS = "boss"
BOSS_HP = "boss_hp"
BOSS_HP_REMAINING = "boss_hp_remaining"
COLLECTED_ITEMS = "collected_items"
SENSOR_DESCRIPTIONS: tuple[HabiticaSensorEntityDescription, ...] = (
@@ -284,67 +262,6 @@ SENSOR_DESCRIPTIONS: tuple[HabiticaSensorEntityDescription, ...] = (
)
SENSOR_DESCRIPTIONS_PARTY: tuple[HabiticaPartySensorEntityDescription, ...] = (
HabiticaPartySensorEntityDescription(
key=HabiticaSensorEntity.MEMBER_COUNT,
translation_key=HabiticaSensorEntity.MEMBER_COUNT,
value_fn=lambda party, _: party.memberCount,
entity_picture=ha.PARTY,
),
HabiticaPartySensorEntityDescription(
key=HabiticaSensorEntity.GROUP_LEADER,
translation_key=HabiticaSensorEntity.GROUP_LEADER,
value_fn=lambda party, _: party.leader.profile.name,
),
HabiticaPartySensorEntityDescription(
key=HabiticaSensorEntity.QUEST,
translation_key=HabiticaSensorEntity.QUEST,
value_fn=lambda p, c: c.quests[p.quest.key].text if p.quest.key else None,
attributes_fn=quest_attributes,
entity_picture=(
lambda party: f"inventory_quest_scroll_{party.quest.key}.png"
if party.quest.key
else None
),
),
HabiticaPartySensorEntityDescription(
key=HabiticaSensorEntity.BOSS,
translation_key=HabiticaSensorEntity.BOSS,
value_fn=lambda p, c: boss.name if (boss := quest_boss(p, c)) else None,
),
HabiticaPartySensorEntityDescription(
key=HabiticaSensorEntity.BOSS_HP,
translation_key=HabiticaSensorEntity.BOSS_HP,
value_fn=lambda p, c: boss.hp if (boss := quest_boss(p, c)) else None,
entity_picture=ha.HP,
suggested_display_precision=0,
),
HabiticaPartySensorEntityDescription(
key=HabiticaSensorEntity.BOSS_HP_REMAINING,
translation_key=HabiticaSensorEntity.BOSS_HP_REMAINING,
value_fn=lambda p, _: p.quest.progress.hp,
entity_picture=ha.HP,
suggested_display_precision=2,
),
HabiticaPartySensorEntityDescription(
key=HabiticaSensorEntity.COLLECTED_ITEMS,
translation_key=HabiticaSensorEntity.COLLECTED_ITEMS,
value_fn=(
lambda p, _: sum(n for n in p.quest.progress.collect.values())
if p.quest.progress.collect
else None
),
attributes_fn=collected_quest_items,
entity_picture=(
lambda p: f"quest_{p.quest.key}_{k}.png"
if p.quest.progress.collect
and (k := next(iter(p.quest.progress.collect), None))
else None
),
),
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: HabiticaConfigEntry,
@@ -358,18 +275,6 @@ async def async_setup_entry(
HabiticaSensor(coordinator, description) for description in SENSOR_DESCRIPTIONS
)
if party := coordinator.data.user.party.id:
party_coordinator = hass.data[HABITICA_KEY][party]
async_add_entities(
HabiticaPartySensor(
party_coordinator,
config_entry,
description,
coordinator.content,
)
for description in SENSOR_DESCRIPTIONS_PARTY
)
class HabiticaSensor(HabiticaBase, SensorEntity):
"""A generic Habitica sensor."""
@@ -412,39 +317,3 @@ class HabiticaSensor(HabiticaBase, SensorEntity):
)
return None
class HabiticaPartySensor(HabiticaPartyBase, SensorEntity):
"""Habitica party sensor."""
entity_description: HabiticaPartySensorEntityDescription
@property
def native_value(self) -> StateType:
"""Return the state of the device."""
return self.entity_description.value_fn(self.coordinator.data, self.content)
@property
def entity_picture(self) -> str | None:
"""Return the entity picture to use in the frontend, if any."""
pic = self.entity_description.entity_picture
entity_picture = (
pic if isinstance(pic, str) or pic is None else pic(self.coordinator.data)
)
return (
None
if not entity_picture
else entity_picture
if entity_picture.startswith("data:image")
else f"{ASSETS_URL}{entity_picture}"
)
@property
def extra_state_attributes(self) -> dict[str, Any] | None:
"""Return entity specific state attributes."""
if func := self.entity_description.attributes_fn:
return func(self.coordinator.data, self.content)
return None

View File

@@ -7,7 +7,6 @@
"unit_health_points": "HP",
"unit_mana_points": "MP",
"unit_experience_points": "XP",
"unit_items": "items",
"config_entry_description": "Select the Habitica account to update a task.",
"task_description": "The name (or task ID) of the task you want to update.",
"rename_name": "Rename",
@@ -64,8 +63,7 @@
"repeat_weekly_options_name": "Weekly repeat days",
"repeat_weekly_options_description": "Options related to weekly repetition, applicable when the repetition interval is set to weekly.",
"repeat_monthly_options_name": "Monthly repeat day",
"repeat_monthly_options_description": "Options related to monthly repetition, applicable when the repetition interval is set to monthly.",
"quest_name": "Quest"
"repeat_monthly_options_description": "Options related to monthly repetition, applicable when the repetition interval is set to monthly."
},
"config": {
"abort": {
@@ -175,9 +173,6 @@
"binary_sensor": {
"pending_quest": {
"name": "Pending quest invitation"
},
"quest_running": {
"name": "Quest status"
}
},
"button": {
@@ -256,9 +251,6 @@
"image": {
"avatar": {
"name": "Avatar"
},
"quest_image": {
"name": "[%key:component::habitica::common::quest_name%]"
}
},
"sensor": {
@@ -428,37 +420,7 @@
},
"pending_quest_items": {
"name": "Pending quest items",
"unit_of_measurement": "[%key:component::habitica::common::unit_items%]"
},
"member_count": {
"name": "Member count",
"unit_of_measurement": "members"
},
"group_leader": {
"name": "Group leader"
},
"quest": {
"name": "[%key:component::habitica::common::quest_name%]",
"state_attributes": {
"quest_details": {
"name": "Quest details"
}
}
},
"boss": {
"name": "Quest boss"
},
"boss_hp": {
"name": "Boss health",
"unit_of_measurement": "[%key:component::habitica::common::unit_health_points%]"
},
"boss_hp_remaining": {
"name": "Boss health remaining",
"unit_of_measurement": "[%key:component::habitica::common::unit_health_points%]"
},
"collected_items": {
"name": "Collected quest items",
"unit_of_measurement": "[%key:component::habitica::common::unit_items%]"
"unit_of_measurement": "items"
}
},
"switch": {

View File

@@ -5,7 +5,7 @@ from __future__ import annotations
from dataclasses import asdict, fields
import datetime
from math import floor
from typing import TYPE_CHECKING, Any, Literal
from typing import TYPE_CHECKING, Literal
from dateutil.rrule import (
DAILY,
@@ -21,7 +21,7 @@ from dateutil.rrule import (
YEARLY,
rrule,
)
from habiticalib import ContentData, Frequency, GroupData, QuestBoss, TaskData, UserData
from habiticalib import ContentData, Frequency, TaskData, UserData
from homeassistant.util import dt as dt_util
@@ -184,32 +184,3 @@ def pending_damage(user: UserData, content: ContentData) -> float | None:
and content.quests[user.party.quest.key].boss is not None
else None
)
def quest_attributes(party: GroupData, content: ContentData) -> dict[str, Any]:
"""Quest description."""
return {
"quest_details": content.quests[party.quest.key].notes
if party.quest.key
else None,
"quest_participants": f"{sum(x is True for x in party.quest.members.values())} / {party.memberCount}",
}
def quest_boss(party: GroupData, content: ContentData) -> QuestBoss | None:
"""Quest boss."""
return content.quests[party.quest.key].boss if party.quest.key else None
def collected_quest_items(party: GroupData, content: ContentData) -> dict[str, Any]:
"""List collected quest items."""
return (
{
collect[k].text: f"{v} / {collect[k].count}"
for k, v in party.quest.progress.collect.items()
}
if party.quest.key and (collect := content.quests[party.quest.key].collect)
else {}
)

View File

@@ -103,7 +103,6 @@ ISSUE_KEYS_FOR_REPAIRS = {
ISSUE_KEY_SYSTEM_DOCKER_CONFIG,
ISSUE_KEY_ADDON_DETACHED_ADDON_MISSING,
ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED,
"issue_system_disk_lifetime",
}
_LOGGER = logging.getLogger(__name__)

View File

@@ -115,10 +115,6 @@
}
}
},
"issue_system_disk_lifetime": {
"title": "Disk lifetime exceeding 90%",
"description": "The data disk has exceeded 90% of its expected lifespan. The disk may soon malfunction which can lead to data loss. You should replace it soon and migrate your data."
},
"unhealthy": {
"title": "Unhealthy system - {reason}",
"description": "System is currently unhealthy due to {reason}. For troubleshooting information, select Learn more."

View File

@@ -2,6 +2,7 @@
from __future__ import annotations
import asyncio
from asyncio import sleep as asyncio_sleep
from collections import defaultdict
from collections.abc import Callable
@@ -53,6 +54,7 @@ _LOGGER = logging.getLogger(__name__)
MAX_EXECUTIONS_TIME_WINDOW = 60 * 60 # 1 hour
MAX_EXECUTIONS = 8
UPDATE_PROGRAMS_INTERVAL_SEC = 1 # 1 second
type HomeConnectConfigEntry = ConfigEntry[HomeConnectCoordinator]
@@ -251,11 +253,30 @@ class HomeConnectCoordinator(
EventKey.BSH_COMMON_ROOT_ACTIVE_PROGRAM,
EventKey.BSH_COMMON_ROOT_SELECTED_PROGRAM,
):
await self.update_options(
event_message_ha_id,
event_key,
ProgramKey(cast(str, event.value)),
program_key = ProgramKey(cast(str, event.value))
# If the active program is unknown, options must be updated using the selected program.
if (
event_key
is EventKey.BSH_COMMON_ROOT_ACTIVE_PROGRAM
and event.value is ProgramKey.UNKNOWN
):
select_program_event = events.get(
EventKey.BSH_COMMON_ROOT_SELECTED_PROGRAM
)
if select_program_event:
program_key = cast(
ProgramKey,
select_program_event.value,
)
# Wait for a second before updating options because it may take time for the Home Connect API
# to update the options after the program change.
await asyncio.sleep(
UPDATE_PROGRAMS_INTERVAL_SEC
)
await self.update_options(
event_message_ha_id, event_key, program_key
)
events[event_key] = event
self._call_event_listener(event_message)

Some files were not shown because too many files have changed in this diff Show More