mirror of
https://github.com/home-assistant/core.git
synced 2025-11-04 16:39:28 +00:00
Compare commits
3 Commits
llm_device
...
cdce8p-bui
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a2140bd033 | ||
|
|
133054693e | ||
|
|
f695fa182c |
@@ -41,7 +41,6 @@
|
||||
"python.terminal.activateEnvInCurrentTerminal": true,
|
||||
"python.testing.pytestArgs": ["--no-cov"],
|
||||
"pylint.importStrategy": "fromEnvironment",
|
||||
"python.analysis.typeCheckingMode": "basic",
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
|
||||
1
.github/copilot-instructions.md
vendored
1
.github/copilot-instructions.md
vendored
@@ -74,7 +74,6 @@ rules:
|
||||
- **Formatting**: Ruff
|
||||
- **Linting**: PyLint and Ruff
|
||||
- **Type Checking**: MyPy
|
||||
- **Lint/Type/Format Fixes**: Always prefer addressing the underlying issue (e.g., import the typed source, update shared stubs, align with Ruff expectations, or correct formatting at the source) before disabling a rule, adding `# type: ignore`, or skipping a formatter. Treat suppressions and `noqa` comments as a last resort once no compliant fix exists
|
||||
- **Testing**: pytest with plain functions and fixtures
|
||||
- **Language**: American English for all code, comments, and documentation (use sentence case, including titles)
|
||||
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -37,7 +37,7 @@ on:
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
CACHE_VERSION: 1
|
||||
CACHE_VERSION: 9
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2025.11"
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@16140ae1a102900babc80a33c44059580f687047 # v4.30.9
|
||||
uses: github/codeql-action/init@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # v4.30.8
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@16140ae1a102900babc80a33c44059580f687047 # v4.30.9
|
||||
uses: github/codeql-action/analyze@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # v4.30.8
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
26
.github/workflows/wheels.yml
vendored
26
.github/workflows/wheels.yml
vendored
@@ -80,6 +80,8 @@ jobs:
|
||||
|
||||
# Add additional pip wheel build constraints
|
||||
echo "PIP_CONSTRAINT=build_constraints.txt"
|
||||
|
||||
echo 'CFLAGS="-Wno-error=int-conversion"'
|
||||
) > .env_file
|
||||
|
||||
- name: Write pip wheel build constraints
|
||||
@@ -126,13 +128,13 @@ jobs:
|
||||
|
||||
core:
|
||||
name: Build Core wheels ${{ matrix.abi }} for ${{ matrix.arch }} (musllinux_1_2)
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
if: false && github.repository_owner == 'home-assistant'
|
||||
needs: init
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix: &matrix-build
|
||||
abi: ["cp313", "cp314"]
|
||||
abi: ["cp314"]
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
@@ -219,9 +221,29 @@ jobs:
|
||||
sed -i "/uv/d" requirements.txt
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Create requirements file for custom build
|
||||
run: |
|
||||
touch requirements_custom.txt
|
||||
echo "netifaces==0.11.0" >> requirements_custom.txt
|
||||
|
||||
- name: Build wheels (custom)
|
||||
uses: cdce8p/wheels@master
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements: "requirements_custom.txt"
|
||||
verbose: true
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
uses: *home-assistant-wheels
|
||||
if: false
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
|
||||
@@ -278,7 +278,6 @@ homeassistant.components.imap.*
|
||||
homeassistant.components.imgw_pib.*
|
||||
homeassistant.components.immich.*
|
||||
homeassistant.components.incomfort.*
|
||||
homeassistant.components.inels.*
|
||||
homeassistant.components.input_button.*
|
||||
homeassistant.components.input_select.*
|
||||
homeassistant.components.input_text.*
|
||||
@@ -478,7 +477,6 @@ homeassistant.components.skybell.*
|
||||
homeassistant.components.slack.*
|
||||
homeassistant.components.sleep_as_android.*
|
||||
homeassistant.components.sleepiq.*
|
||||
homeassistant.components.sma.*
|
||||
homeassistant.components.smhi.*
|
||||
homeassistant.components.smlight.*
|
||||
homeassistant.components.smtp.*
|
||||
|
||||
2
.vscode/settings.default.json
vendored
2
.vscode/settings.default.json
vendored
@@ -7,8 +7,6 @@
|
||||
"python.testing.pytestEnabled": false,
|
||||
// https://code.visualstudio.com/docs/python/linting#_general-settings
|
||||
"pylint.importStrategy": "fromEnvironment",
|
||||
// Pyright is too pedantic for Home Assistant
|
||||
"python.analysis.typeCheckingMode": "basic",
|
||||
"json.schemas": [
|
||||
{
|
||||
"fileMatch": [
|
||||
|
||||
2
CODEOWNERS
generated
2
CODEOWNERS
generated
@@ -741,8 +741,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/improv_ble/ @emontnemery
|
||||
/homeassistant/components/incomfort/ @jbouwh
|
||||
/tests/components/incomfort/ @jbouwh
|
||||
/homeassistant/components/inels/ @epdevlab
|
||||
/tests/components/inels/ @epdevlab
|
||||
/homeassistant/components/influxdb/ @mdegat01
|
||||
/tests/components/influxdb/ @mdegat01
|
||||
/homeassistant/components/inkbird/ @bdraco
|
||||
|
||||
4
Dockerfile
generated
4
Dockerfile
generated
@@ -25,13 +25,13 @@ RUN \
|
||||
"armv7") go2rtc_suffix='arm' ;; \
|
||||
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
||||
esac \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.11/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.9/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& chmod +x /bin/go2rtc \
|
||||
# Verify go2rtc can be executed
|
||||
&& go2rtc --version
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.9.5
|
||||
RUN pip3 install uv==0.8.9
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
|
||||
@@ -5,6 +5,9 @@ build_from:
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.10.1
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.10.1
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.10.1
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
cosign:
|
||||
base_identity: https://github.com/home-assistant/docker/.*
|
||||
identity: https://github.com/home-assistant/core/.*
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["airos==0.6.0"]
|
||||
"requirements": ["airos==0.5.6"]
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==6.4.6"]
|
||||
"requirements": ["aioamazondevices==6.4.4"]
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["bring_api"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["bring-api==1.1.1"]
|
||||
"requirements": ["bring-api==1.1.0"]
|
||||
}
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/bthome",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bthome-ble==3.15.0"]
|
||||
"requirements": ["bthome-ble==3.14.2"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pycync==0.4.2"]
|
||||
"requirements": ["pycync==0.4.1"]
|
||||
}
|
||||
|
||||
@@ -80,7 +80,8 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
class DevoloScannerEntity(
|
||||
# The pylint disable is needed because of https://github.com/pylint-dev/pylint/issues/9138
|
||||
class DevoloScannerEntity( # pylint: disable=hass-enforce-class-module
|
||||
CoordinatorEntity[DevoloDataUpdateCoordinator[dict[str, ConnectedStationInfo]]],
|
||||
ScannerEntity,
|
||||
):
|
||||
|
||||
@@ -6,4 +6,4 @@ CONF_DEBUG_UI = "debug_ui"
|
||||
DEBUG_UI_URL_MESSAGE = "Url and debug_ui cannot be set at the same time."
|
||||
HA_MANAGED_API_PORT = 11984
|
||||
HA_MANAGED_URL = f"http://localhost:{HA_MANAGED_API_PORT}/"
|
||||
RECOMMENDED_VERSION = "1.9.11"
|
||||
RECOMMENDED_VERSION = "1.9.9"
|
||||
|
||||
@@ -186,7 +186,6 @@ async def async_setup_entry(
|
||||
class InverterSensor(CoordinatorEntity[GoodweUpdateCoordinator], SensorEntity):
|
||||
"""Entity representing individual inverter sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
entity_description: GoodweSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
|
||||
@@ -59,7 +59,6 @@ class GoogleGenerativeAITextToSpeechEntity(
|
||||
"en-US",
|
||||
"es-US",
|
||||
"fr-FR",
|
||||
"he-IL",
|
||||
"hi-IN",
|
||||
"id-ID",
|
||||
"it-IT",
|
||||
|
||||
@@ -72,7 +72,6 @@ PLATFORMS = [
|
||||
Platform.NOTIFY,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
Platform.VALVE,
|
||||
]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -35,7 +35,6 @@ from .media_player import MediaPlayerGroup, async_create_preview_media_player
|
||||
from .notify import async_create_preview_notify
|
||||
from .sensor import async_create_preview_sensor
|
||||
from .switch import async_create_preview_switch
|
||||
from .valve import async_create_preview_valve
|
||||
|
||||
_STATISTIC_MEASURES = [
|
||||
"last",
|
||||
@@ -173,7 +172,6 @@ GROUP_TYPES = [
|
||||
"notify",
|
||||
"sensor",
|
||||
"switch",
|
||||
"valve",
|
||||
]
|
||||
|
||||
|
||||
@@ -255,11 +253,6 @@ CONFIG_FLOW = {
|
||||
preview="group",
|
||||
validate_user_input=set_group_type("switch"),
|
||||
),
|
||||
"valve": SchemaFlowFormStep(
|
||||
basic_group_config_schema("valve"),
|
||||
preview="group",
|
||||
validate_user_input=set_group_type("valve"),
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@@ -309,10 +302,6 @@ OPTIONS_FLOW = {
|
||||
partial(light_switch_options_schema, "switch"),
|
||||
preview="group",
|
||||
),
|
||||
"valve": SchemaFlowFormStep(
|
||||
partial(basic_group_options_schema, "valve"),
|
||||
preview="group",
|
||||
),
|
||||
}
|
||||
|
||||
PREVIEW_OPTIONS_SCHEMA: dict[str, vol.Schema] = {}
|
||||
@@ -332,7 +321,6 @@ CREATE_PREVIEW_ENTITY: dict[
|
||||
"notify": async_create_preview_notify,
|
||||
"sensor": async_create_preview_sensor,
|
||||
"switch": async_create_preview_switch,
|
||||
"valve": async_create_preview_valve,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -16,8 +16,7 @@
|
||||
"media_player": "Media player group",
|
||||
"notify": "Notify group",
|
||||
"sensor": "Sensor group",
|
||||
"switch": "Switch group",
|
||||
"valve": "Valve group"
|
||||
"switch": "Switch group"
|
||||
}
|
||||
},
|
||||
"binary_sensor": {
|
||||
@@ -128,18 +127,6 @@
|
||||
"data_description": {
|
||||
"all": "[%key:component::group::config::step::binary_sensor::data_description::all%]"
|
||||
}
|
||||
},
|
||||
"valve": {
|
||||
"title": "[%key:component::group::config::step::user::title%]",
|
||||
"data": {
|
||||
"all": "[%key:component::group::config::step::binary_sensor::data::all%]",
|
||||
"entities": "[%key:component::group::config::step::binary_sensor::data::entities%]",
|
||||
"hide_members": "[%key:component::group::config::step::binary_sensor::data::hide_members%]",
|
||||
"name": "[%key:common::config_flow::data::name%]"
|
||||
},
|
||||
"data_description": {
|
||||
"all": "[%key:component::group::config::step::binary_sensor::data_description::all%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -225,16 +212,6 @@
|
||||
"data_description": {
|
||||
"all": "[%key:component::group::config::step::binary_sensor::data_description::all%]"
|
||||
}
|
||||
},
|
||||
"valve": {
|
||||
"data": {
|
||||
"all": "[%key:component::group::config::step::binary_sensor::data::all%]",
|
||||
"entities": "[%key:component::group::config::step::binary_sensor::data::entities%]",
|
||||
"hide_members": "[%key:component::group::config::step::binary_sensor::data::hide_members%]"
|
||||
},
|
||||
"data_description": {
|
||||
"all": "[%key:component::group::config::step::binary_sensor::data_description::all%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,262 +0,0 @@
|
||||
"""Platform allowing several valves to be grouped into one valve."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.valve import (
|
||||
ATTR_CURRENT_POSITION,
|
||||
ATTR_POSITION,
|
||||
DOMAIN as VALVE_DOMAIN,
|
||||
PLATFORM_SCHEMA as VALVE_PLATFORM_SCHEMA,
|
||||
ValveEntity,
|
||||
ValveEntityFeature,
|
||||
ValveState,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_SUPPORTED_FEATURES,
|
||||
CONF_ENTITIES,
|
||||
CONF_NAME,
|
||||
CONF_UNIQUE_ID,
|
||||
SERVICE_CLOSE_VALVE,
|
||||
SERVICE_OPEN_VALVE,
|
||||
SERVICE_SET_VALVE_POSITION,
|
||||
SERVICE_STOP_VALVE,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, State, callback
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .entity import GroupEntity
|
||||
from .util import reduce_attribute
|
||||
|
||||
KEY_OPEN_CLOSE = "open_close"
|
||||
KEY_STOP = "stop"
|
||||
KEY_SET_POSITION = "set_position"
|
||||
|
||||
DEFAULT_NAME = "Valve Group"
|
||||
|
||||
# No limit on parallel updates to enable a group calling another group
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
PLATFORM_SCHEMA = VALVE_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_ENTITIES): cv.entities_domain(VALVE_DOMAIN),
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_UNIQUE_ID): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the Valve Group platform."""
|
||||
async_add_entities(
|
||||
[
|
||||
ValveGroup(
|
||||
config.get(CONF_UNIQUE_ID), config[CONF_NAME], config[CONF_ENTITIES]
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Initialize Valve Group config entry."""
|
||||
registry = er.async_get(hass)
|
||||
entities = er.async_validate_entity_ids(
|
||||
registry, config_entry.options[CONF_ENTITIES]
|
||||
)
|
||||
|
||||
async_add_entities(
|
||||
[ValveGroup(config_entry.entry_id, config_entry.title, entities)]
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_create_preview_valve(
|
||||
hass: HomeAssistant, name: str, validated_config: dict[str, Any]
|
||||
) -> ValveGroup:
|
||||
"""Create a preview valve."""
|
||||
return ValveGroup(
|
||||
None,
|
||||
name,
|
||||
validated_config[CONF_ENTITIES],
|
||||
)
|
||||
|
||||
|
||||
class ValveGroup(GroupEntity, ValveEntity):
|
||||
"""Representation of a ValveGroup."""
|
||||
|
||||
_attr_available: bool = False
|
||||
_attr_current_valve_position: int | None = None
|
||||
_attr_is_closed: bool | None = None
|
||||
_attr_is_closing: bool | None = False
|
||||
_attr_is_opening: bool | None = False
|
||||
_attr_reports_position: bool = False
|
||||
|
||||
def __init__(self, unique_id: str | None, name: str, entities: list[str]) -> None:
|
||||
"""Initialize a ValveGroup entity."""
|
||||
self._entity_ids = entities
|
||||
self._valves: dict[str, set[str]] = {
|
||||
KEY_OPEN_CLOSE: set(),
|
||||
KEY_STOP: set(),
|
||||
KEY_SET_POSITION: set(),
|
||||
}
|
||||
|
||||
self._attr_name = name
|
||||
self._attr_extra_state_attributes = {ATTR_ENTITY_ID: entities}
|
||||
self._attr_unique_id = unique_id
|
||||
|
||||
@callback
|
||||
def async_update_supported_features(
|
||||
self,
|
||||
entity_id: str,
|
||||
new_state: State | None,
|
||||
) -> None:
|
||||
"""Update dictionaries with supported features."""
|
||||
if not new_state:
|
||||
for values in self._valves.values():
|
||||
values.discard(entity_id)
|
||||
return
|
||||
|
||||
features = new_state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||
|
||||
if features & (ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE):
|
||||
self._valves[KEY_OPEN_CLOSE].add(entity_id)
|
||||
else:
|
||||
self._valves[KEY_OPEN_CLOSE].discard(entity_id)
|
||||
if features & (ValveEntityFeature.STOP):
|
||||
self._valves[KEY_STOP].add(entity_id)
|
||||
else:
|
||||
self._valves[KEY_STOP].discard(entity_id)
|
||||
if features & (ValveEntityFeature.SET_POSITION):
|
||||
self._valves[KEY_SET_POSITION].add(entity_id)
|
||||
else:
|
||||
self._valves[KEY_SET_POSITION].discard(entity_id)
|
||||
|
||||
async def async_open_valve(self) -> None:
|
||||
"""Open the valves."""
|
||||
data = {ATTR_ENTITY_ID: self._valves[KEY_OPEN_CLOSE]}
|
||||
await self.hass.services.async_call(
|
||||
VALVE_DOMAIN, SERVICE_OPEN_VALVE, data, blocking=True, context=self._context
|
||||
)
|
||||
|
||||
async def async_handle_open_valve(self) -> None: # type: ignore[misc]
|
||||
"""Open the valves.
|
||||
|
||||
Override the base class to avoid calling the set position service
|
||||
for all valves. Transfer the service call to the base class and let
|
||||
it decide if the valve uses set position or open service.
|
||||
"""
|
||||
await self.async_open_valve()
|
||||
|
||||
async def async_close_valve(self) -> None:
|
||||
"""Close valves."""
|
||||
data = {ATTR_ENTITY_ID: self._valves[KEY_OPEN_CLOSE]}
|
||||
await self.hass.services.async_call(
|
||||
VALVE_DOMAIN,
|
||||
SERVICE_CLOSE_VALVE,
|
||||
data,
|
||||
blocking=True,
|
||||
context=self._context,
|
||||
)
|
||||
|
||||
async def async_handle_close_valve(self) -> None: # type: ignore[misc]
|
||||
"""Close the valves.
|
||||
|
||||
Override the base class to avoid calling the set position service
|
||||
for all valves. Transfer the service call to the base class and let
|
||||
it decide if the valve uses set position or close service.
|
||||
"""
|
||||
await self.async_close_valve()
|
||||
|
||||
async def async_set_valve_position(self, position: int) -> None:
|
||||
"""Move the valves to a specific position."""
|
||||
data = {
|
||||
ATTR_ENTITY_ID: self._valves[KEY_SET_POSITION],
|
||||
ATTR_POSITION: position,
|
||||
}
|
||||
await self.hass.services.async_call(
|
||||
VALVE_DOMAIN,
|
||||
SERVICE_SET_VALVE_POSITION,
|
||||
data,
|
||||
blocking=True,
|
||||
context=self._context,
|
||||
)
|
||||
|
||||
async def async_stop_valve(self) -> None:
|
||||
"""Stop the valves."""
|
||||
data = {ATTR_ENTITY_ID: self._valves[KEY_STOP]}
|
||||
await self.hass.services.async_call(
|
||||
VALVE_DOMAIN, SERVICE_STOP_VALVE, data, blocking=True, context=self._context
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_update_group_state(self) -> None:
|
||||
"""Update state and attributes."""
|
||||
states = [
|
||||
state
|
||||
for entity_id in self._entity_ids
|
||||
if (state := self.hass.states.get(entity_id)) is not None
|
||||
]
|
||||
|
||||
# Set group as unavailable if all members are unavailable or missing
|
||||
self._attr_available = any(state.state != STATE_UNAVAILABLE for state in states)
|
||||
|
||||
self._attr_is_closed = True
|
||||
self._attr_is_closing = False
|
||||
self._attr_is_opening = False
|
||||
self._attr_reports_position = False
|
||||
self._update_assumed_state_from_members()
|
||||
for state in states:
|
||||
if state.attributes.get(ATTR_CURRENT_POSITION) is not None:
|
||||
self._attr_reports_position = True
|
||||
if state.state == ValveState.OPEN:
|
||||
self._attr_is_closed = False
|
||||
continue
|
||||
if state.state == ValveState.CLOSED:
|
||||
continue
|
||||
if state.state == ValveState.CLOSING:
|
||||
self._attr_is_closing = True
|
||||
continue
|
||||
if state.state == ValveState.OPENING:
|
||||
self._attr_is_opening = True
|
||||
continue
|
||||
|
||||
valid_state = any(
|
||||
state.state not in (STATE_UNKNOWN, STATE_UNAVAILABLE) for state in states
|
||||
)
|
||||
if not valid_state:
|
||||
# Set as unknown if all members are unknown or unavailable
|
||||
self._attr_is_closed = None
|
||||
|
||||
self._attr_current_valve_position = reduce_attribute(
|
||||
states, ATTR_CURRENT_POSITION
|
||||
)
|
||||
|
||||
supported_features = ValveEntityFeature(0)
|
||||
if self._valves[KEY_OPEN_CLOSE]:
|
||||
supported_features |= ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE
|
||||
if self._valves[KEY_STOP]:
|
||||
supported_features |= ValveEntityFeature.STOP
|
||||
if self._valves[KEY_SET_POSITION]:
|
||||
supported_features |= ValveEntityFeature.SET_POSITION
|
||||
self._attr_supported_features = supported_features
|
||||
@@ -3,15 +3,18 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from homeassistant.components.remote import ATTR_ACTIVITY, ATTR_DELAY_SECS
|
||||
from homeassistant.const import CONF_HOST, CONF_NAME, EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
|
||||
from .const import HARMONY_OPTIONS_UPDATE, PLATFORMS
|
||||
from .data import HarmonyConfigEntry, HarmonyData
|
||||
if sys.version_info < (3, 14):
|
||||
from .const import HARMONY_OPTIONS_UPDATE, PLATFORMS
|
||||
from .data import HarmonyConfigEntry, HarmonyData
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -22,6 +25,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: HarmonyConfigEntry) -> b
|
||||
# when setting up a config entry, we fallback to adding
|
||||
# the options to the config entry and pull them out here if
|
||||
# they are missing from the options
|
||||
if sys.version_info >= (3, 14):
|
||||
raise HomeAssistantError(
|
||||
"Logitech Harmony Hub is not supported on Python 3.14. Please use Python 3.13."
|
||||
)
|
||||
_async_import_options_from_data_if_missing(hass, entry)
|
||||
|
||||
address = entry.data[CONF_HOST]
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/harmony",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioharmony", "slixmpp"],
|
||||
"requirements": ["aioharmony==0.5.3"],
|
||||
"requirements": ["aioharmony==0.5.3;python_version<'3.14'"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Logitech",
|
||||
|
||||
@@ -94,14 +94,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: HomeeConfigEntry) -> boo
|
||||
for device in devices:
|
||||
# Check if the device is still present in homee
|
||||
device_identifiers = {identifier[1] for identifier in device.identifiers}
|
||||
# homee itself uses just the uid, nodes use {uid}-{nodeid}
|
||||
if homee.settings.uid in device_identifiers:
|
||||
continue # Hub itself is never removed.
|
||||
# homee itself uses just the uid, nodes use uid-nodeid
|
||||
is_homee_hub = homee.settings.uid in device_identifiers
|
||||
is_node_present = any(
|
||||
f"{homee.settings.uid}-{node.id}" in device_identifiers
|
||||
for node in homee.nodes
|
||||
)
|
||||
if not is_node_present:
|
||||
if not is_node_present and not is_homee_hub:
|
||||
_LOGGER.info("Removing device %s", device.name)
|
||||
device_registry.async_update_device(
|
||||
device_id=device.id,
|
||||
@@ -111,17 +110,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: HomeeConfigEntry) -> boo
|
||||
# Remove device at runtime when node is removed in homee
|
||||
async def _remove_node_callback(node: HomeeNode, add: bool) -> None:
|
||||
"""Call when a node is removed."""
|
||||
if add:
|
||||
return
|
||||
device = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, f"{entry.runtime_data.settings.uid}-{node.id}")}
|
||||
)
|
||||
if device:
|
||||
_LOGGER.info("Removing device %s", device.name)
|
||||
device_registry.async_update_device(
|
||||
device_id=device.id,
|
||||
remove_config_entry_id=entry.entry_id,
|
||||
if not add:
|
||||
device = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, f"{entry.runtime_data.settings.uid}-{node.id}")}
|
||||
)
|
||||
if device:
|
||||
_LOGGER.info("Removing device %s", device.name)
|
||||
device_registry.async_update_device(
|
||||
device_id=device.id,
|
||||
remove_config_entry_id=entry.entry_id,
|
||||
)
|
||||
|
||||
homee.add_nodes_listener(_remove_node_callback)
|
||||
|
||||
|
||||
@@ -43,22 +43,18 @@ def async_setup_forwarded(
|
||||
some proxies, for example, Kubernetes NGINX ingress, only retain one element
|
||||
in the X-Forwarded-Proto header. In that case, we'll just use what we have.
|
||||
|
||||
`X-Forwarded-Host: <host1>, <host2>, <host3>`
|
||||
e.g., `X-Forwarded-Host: example.com, proxy.example.com, backend.example.com`
|
||||
OR `X-Forwarded-Host: example.com` (one entry, even with multiple proxies)
|
||||
`X-Forwarded-Host: <host>`
|
||||
e.g., `X-Forwarded-Host: example.com`
|
||||
|
||||
If the previous headers are processed successfully, and the X-Forwarded-Host is
|
||||
present, the last one in the list will be used (set by the proxy nearest to the backend).
|
||||
|
||||
Multiple headers are valid as stated in https://www.rfc-editor.org/rfc/rfc7239#section-7.1
|
||||
If multiple headers are present, they are handled according to
|
||||
https://developer.mozilla.org/en-US/docs/Web/HTTP/Reference/Headers/X-Forwarded-For#parsing
|
||||
> "split each X-Forwarded-For header by comma into lists and then join the lists."
|
||||
present, it will be used.
|
||||
|
||||
Additionally:
|
||||
- If no X-Forwarded-For header is found, the processing of all headers is skipped.
|
||||
- Throw HTTP 400 status when untrusted connected peer provides
|
||||
X-Forwarded-For headers.
|
||||
- If multiple instances of X-Forwarded-For, X-Forwarded-Proto or
|
||||
X-Forwarded-Host are found, an HTTP 400 status code is thrown.
|
||||
- If malformed or invalid (IP) data in X-Forwarded-For header is found,
|
||||
an HTTP 400 status code is thrown.
|
||||
- The connected client peer on the socket of the incoming connection,
|
||||
@@ -115,12 +111,15 @@ def async_setup_forwarded(
|
||||
)
|
||||
raise HTTPBadRequest
|
||||
|
||||
# Process multiple X-Forwarded-For from the right side (by reversing the list)
|
||||
forwarded_for_split = list(
|
||||
reversed(
|
||||
[addr for header in forwarded_for_headers for addr in header.split(",")]
|
||||
# Multiple X-Forwarded-For headers
|
||||
if len(forwarded_for_headers) > 1:
|
||||
_LOGGER.error(
|
||||
"Too many headers for X-Forwarded-For: %s", forwarded_for_headers
|
||||
)
|
||||
)
|
||||
raise HTTPBadRequest
|
||||
|
||||
# Process X-Forwarded-For from the right side (by reversing the list)
|
||||
forwarded_for_split = list(reversed(forwarded_for_headers[0].split(",")))
|
||||
try:
|
||||
forwarded_for = [ip_address(addr.strip()) for addr in forwarded_for_split]
|
||||
except ValueError as err:
|
||||
@@ -149,15 +148,14 @@ def async_setup_forwarded(
|
||||
X_FORWARDED_PROTO, []
|
||||
)
|
||||
if forwarded_proto_headers:
|
||||
# Process multiple X-Forwarded-Proto from the right side (by reversing the list)
|
||||
forwarded_proto_split = list(
|
||||
reversed(
|
||||
[
|
||||
addr
|
||||
for header in forwarded_proto_headers
|
||||
for addr in header.split(",")
|
||||
]
|
||||
if len(forwarded_proto_headers) > 1:
|
||||
_LOGGER.error(
|
||||
"Too many headers for X-Forward-Proto: %s", forwarded_proto_headers
|
||||
)
|
||||
raise HTTPBadRequest
|
||||
|
||||
forwarded_proto_split = list(
|
||||
reversed(forwarded_proto_headers[0].split(","))
|
||||
)
|
||||
forwarded_proto = [proto.strip() for proto in forwarded_proto_split]
|
||||
|
||||
@@ -193,16 +191,14 @@ def async_setup_forwarded(
|
||||
# Handle X-Forwarded-Host
|
||||
forwarded_host_headers: list[str] = request.headers.getall(X_FORWARDED_HOST, [])
|
||||
if forwarded_host_headers:
|
||||
# Process multiple X-Forwarded-Host from the right side (by reversing the list)
|
||||
forwarded_host = list(
|
||||
reversed(
|
||||
[
|
||||
addr.strip()
|
||||
for header in forwarded_host_headers
|
||||
for addr in header.split(",")
|
||||
]
|
||||
# Multiple X-Forwarded-Host headers
|
||||
if len(forwarded_host_headers) > 1:
|
||||
_LOGGER.error(
|
||||
"Too many headers for X-Forwarded-Host: %s", forwarded_host_headers
|
||||
)
|
||||
)[0]
|
||||
raise HTTPBadRequest
|
||||
|
||||
forwarded_host = forwarded_host_headers[0].strip()
|
||||
if not forwarded_host:
|
||||
_LOGGER.error("Empty value received in X-Forward-Host header")
|
||||
raise HTTPBadRequest
|
||||
|
||||
@@ -41,7 +41,7 @@ rules:
|
||||
reauthentication-flow: done
|
||||
test-coverage:
|
||||
status: todo
|
||||
comment: Get percentage up there, add missing actual action press invocations in button tests' suspended state tests.
|
||||
comment: Get percentage up there, add missing actual action press invocations in button tests' suspended state tests, rename test_switch.py to test_switch.py + make its functions receive hass as first parameter where applicable.
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
|
||||
@@ -112,7 +112,7 @@ class AutomowerButtonEntity(AutomowerControlEntity, ButtonEntity):
|
||||
self.mower_attributes
|
||||
)
|
||||
|
||||
@handle_sending_exception
|
||||
@handle_sending_exception()
|
||||
async def async_press(self) -> None:
|
||||
"""Send a command to the mower."""
|
||||
await self.entity_description.press_fn(self.coordinator.api, self.mower_id)
|
||||
|
||||
@@ -182,6 +182,14 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[MowerDictionary]):
|
||||
"Failed to listen to websocket. Trying to reconnect: %s",
|
||||
err,
|
||||
)
|
||||
if not hass.is_stopping:
|
||||
await asyncio.sleep(self.reconnect_time)
|
||||
self.reconnect_time = min(self.reconnect_time * 2, MAX_WS_RECONNECT_TIME)
|
||||
entry.async_create_background_task(
|
||||
hass,
|
||||
self.client_listen(hass, entry, automower_client),
|
||||
"reconnect_task",
|
||||
)
|
||||
|
||||
def _should_poll(self) -> bool:
|
||||
"""Return True if at least one mower is connected and at least one is not OFF."""
|
||||
|
||||
@@ -6,7 +6,7 @@ import asyncio
|
||||
from collections.abc import Callable, Coroutine
|
||||
import functools
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Concatenate, ParamSpec, TypeVar, overload
|
||||
from typing import TYPE_CHECKING, Any, Concatenate
|
||||
|
||||
from aioautomower.exceptions import ApiError
|
||||
from aioautomower.model import MowerActivities, MowerAttributes, MowerStates, WorkArea
|
||||
@@ -37,42 +37,23 @@ ERROR_STATES = [
|
||||
]
|
||||
|
||||
|
||||
_Entity = TypeVar("_Entity", bound="AutomowerBaseEntity")
|
||||
_P = ParamSpec("_P")
|
||||
@callback
|
||||
def _work_area_translation_key(work_area_id: int, key: str) -> str:
|
||||
"""Return the translation key."""
|
||||
if work_area_id == 0:
|
||||
return f"my_lawn_{key}"
|
||||
return f"work_area_{key}"
|
||||
|
||||
|
||||
@overload
|
||||
def handle_sending_exception(
|
||||
_func: Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, Any]],
|
||||
) -> Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, None]]: ...
|
||||
type _FuncType[_T, **_P, _R] = Callable[Concatenate[_T, _P], Coroutine[Any, Any, _R]]
|
||||
|
||||
|
||||
@overload
|
||||
def handle_sending_exception(
|
||||
*,
|
||||
def handle_sending_exception[_Entity: AutomowerBaseEntity, **_P](
|
||||
poll_after_sending: bool = False,
|
||||
) -> Callable[
|
||||
[Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, Any]]],
|
||||
Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, None]],
|
||||
]: ...
|
||||
|
||||
|
||||
def handle_sending_exception(
|
||||
_func: Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, Any]] | None = None,
|
||||
*,
|
||||
poll_after_sending: bool = False,
|
||||
) -> (
|
||||
Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, None]]
|
||||
| Callable[
|
||||
[Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, Any]]],
|
||||
Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, None]],
|
||||
]
|
||||
):
|
||||
) -> Callable[[_FuncType[_Entity, _P, Any]], _FuncType[_Entity, _P, None]]:
|
||||
"""Handle exceptions while sending a command and optionally refresh coordinator."""
|
||||
|
||||
def decorator(
|
||||
func: Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, Any]],
|
||||
) -> Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, None]]:
|
||||
def decorator(func: _FuncType[_Entity, _P, Any]) -> _FuncType[_Entity, _P, None]:
|
||||
@functools.wraps(func)
|
||||
async def wrapper(self: _Entity, *args: _P.args, **kwargs: _P.kwargs) -> None:
|
||||
try:
|
||||
@@ -92,20 +73,7 @@ def handle_sending_exception(
|
||||
|
||||
return wrapper
|
||||
|
||||
if _func is None:
|
||||
# call with brackets: @handle_sending_exception(...)
|
||||
return decorator
|
||||
|
||||
# call without brackets: @handle_sending_exception
|
||||
return decorator(_func)
|
||||
|
||||
|
||||
@callback
|
||||
def _work_area_translation_key(work_area_id: int, key: str) -> str:
|
||||
"""Return the translation key."""
|
||||
if work_area_id == 0:
|
||||
return f"my_lawn_{key}"
|
||||
return f"work_area_{key}"
|
||||
return decorator
|
||||
|
||||
|
||||
class AutomowerBaseEntity(CoordinatorEntity[AutomowerDataUpdateCoordinator]):
|
||||
|
||||
@@ -135,22 +135,22 @@ class AutomowerLawnMowerEntity(AutomowerBaseEntity, LawnMowerEntity):
|
||||
"""Return the work areas of the mower."""
|
||||
return self.mower_attributes.work_areas
|
||||
|
||||
@handle_sending_exception
|
||||
@handle_sending_exception()
|
||||
async def async_start_mowing(self) -> None:
|
||||
"""Resume schedule."""
|
||||
await self.coordinator.api.commands.resume_schedule(self.mower_id)
|
||||
|
||||
@handle_sending_exception
|
||||
@handle_sending_exception()
|
||||
async def async_pause(self) -> None:
|
||||
"""Pauses the mower."""
|
||||
await self.coordinator.api.commands.pause_mowing(self.mower_id)
|
||||
|
||||
@handle_sending_exception
|
||||
@handle_sending_exception()
|
||||
async def async_dock(self) -> None:
|
||||
"""Parks the mower until next schedule."""
|
||||
await self.coordinator.api.commands.park_until_next_schedule(self.mower_id)
|
||||
|
||||
@handle_sending_exception
|
||||
@handle_sending_exception()
|
||||
async def async_override_schedule(
|
||||
self, override_mode: str, duration: timedelta
|
||||
) -> None:
|
||||
@@ -160,7 +160,7 @@ class AutomowerLawnMowerEntity(AutomowerBaseEntity, LawnMowerEntity):
|
||||
if override_mode == PARK:
|
||||
await self.coordinator.api.commands.park_for(self.mower_id, duration)
|
||||
|
||||
@handle_sending_exception
|
||||
@handle_sending_exception()
|
||||
async def async_override_schedule_work_area(
|
||||
self, work_area_id: int, duration: timedelta
|
||||
) -> None:
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioautomower"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioautomower==2.5.0"]
|
||||
"requirements": ["aioautomower==2.2.1"]
|
||||
}
|
||||
|
||||
@@ -67,7 +67,7 @@ class AutomowerSelectEntity(AutomowerControlEntity, SelectEntity):
|
||||
"""Return the current option for the entity."""
|
||||
return cast(HeadlightModes, self.mower_attributes.settings.headlight.mode)
|
||||
|
||||
@handle_sending_exception
|
||||
@handle_sending_exception()
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
await self.coordinator.api.commands.set_headlight_mode(
|
||||
|
||||
@@ -108,12 +108,12 @@ class AutomowerScheduleSwitchEntity(AutomowerControlEntity, SwitchEntity):
|
||||
"""Return the state of the switch."""
|
||||
return self.mower_attributes.mower.mode != MowerModes.HOME
|
||||
|
||||
@handle_sending_exception
|
||||
@handle_sending_exception()
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity off."""
|
||||
await self.coordinator.api.commands.park_until_further_notice(self.mower_id)
|
||||
|
||||
@handle_sending_exception
|
||||
@handle_sending_exception()
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
await self.coordinator.api.commands.resume_schedule(self.mower_id)
|
||||
|
||||
@@ -1,95 +0,0 @@
|
||||
"""The iNELS integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from inelsmqtt import InelsMqtt
|
||||
from inelsmqtt.devices import Device
|
||||
from inelsmqtt.discovery import InelsDiscovery
|
||||
|
||||
from homeassistant.components import mqtt as ha_mqtt
|
||||
from homeassistant.components.mqtt import (
|
||||
ReceiveMessage,
|
||||
async_prepare_subscribe_topics,
|
||||
async_subscribe_topics,
|
||||
async_unsubscribe_topics,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
|
||||
from .const import LOGGER, PLATFORMS
|
||||
|
||||
type InelsConfigEntry = ConfigEntry[InelsData]
|
||||
|
||||
|
||||
@dataclass
|
||||
class InelsData:
|
||||
"""Represents the data structure for INELS runtime data."""
|
||||
|
||||
mqtt: InelsMqtt
|
||||
devices: list[Device]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: InelsConfigEntry) -> bool:
|
||||
"""Set up iNELS from a config entry."""
|
||||
|
||||
async def mqtt_publish(topic: str, payload: str, qos: int, retain: bool) -> None:
|
||||
"""Publish an MQTT message using the Home Assistant MQTT client."""
|
||||
await ha_mqtt.async_publish(hass, topic, payload, qos, retain)
|
||||
|
||||
async def mqtt_subscribe(
|
||||
sub_state: dict[str, Any] | None,
|
||||
topic: str,
|
||||
callback_func: Callable[[str, str], None],
|
||||
) -> dict[str, Any]:
|
||||
"""Subscribe to MQTT topics using the Home Assistant MQTT client."""
|
||||
|
||||
@callback
|
||||
def mqtt_message_received(msg: ReceiveMessage) -> None:
|
||||
"""Handle iNELS mqtt messages."""
|
||||
# Payload is always str at runtime since we don't set encoding=None
|
||||
# HA uses UTF-8 by default
|
||||
callback_func(msg.topic, msg.payload) # type: ignore[arg-type]
|
||||
|
||||
topics = {
|
||||
"inels_subscribe_topic": {
|
||||
"topic": topic,
|
||||
"msg_callback": mqtt_message_received,
|
||||
}
|
||||
}
|
||||
|
||||
sub_state = async_prepare_subscribe_topics(hass, sub_state, topics)
|
||||
await async_subscribe_topics(hass, sub_state)
|
||||
return sub_state
|
||||
|
||||
async def mqtt_unsubscribe(sub_state: dict[str, Any]) -> None:
|
||||
async_unsubscribe_topics(hass, sub_state)
|
||||
|
||||
if not await ha_mqtt.async_wait_for_mqtt_client(hass):
|
||||
LOGGER.error("MQTT integration not available")
|
||||
raise ConfigEntryNotReady("MQTT integration not available")
|
||||
|
||||
inels_mqtt = InelsMqtt(mqtt_publish, mqtt_subscribe, mqtt_unsubscribe)
|
||||
devices: list[Device] = await InelsDiscovery(inels_mqtt).start()
|
||||
|
||||
# If no devices are discovered, continue with the setup
|
||||
if not devices:
|
||||
LOGGER.info("No devices discovered")
|
||||
|
||||
entry.runtime_data = InelsData(mqtt=inels_mqtt, devices=devices)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: InelsConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
await entry.runtime_data.mqtt.unsubscribe_topics()
|
||||
entry.runtime_data.mqtt.unsubscribe_listeners()
|
||||
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
@@ -1,73 +0,0 @@
|
||||
"""Config flow for iNELS."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components import mqtt
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.helpers.service_info.mqtt import MqttServiceInfo
|
||||
|
||||
from .const import DOMAIN, TITLE
|
||||
|
||||
|
||||
class INelsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle of iNELS config flow."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_mqtt(
|
||||
self, discovery_info: MqttServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by MQTT discovery."""
|
||||
if self._async_in_progress():
|
||||
return self.async_abort(reason="already_in_progress")
|
||||
|
||||
# Validate the message, abort if it fails.
|
||||
if not discovery_info.topic.endswith("/gw"):
|
||||
# Not an iNELS discovery message.
|
||||
return self.async_abort(reason="invalid_discovery_info")
|
||||
if not discovery_info.payload:
|
||||
# Empty payload, unexpected payload.
|
||||
return self.async_abort(reason="invalid_discovery_info")
|
||||
|
||||
return await self.async_step_confirm_from_mqtt()
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
try:
|
||||
if not mqtt.is_connected(self.hass):
|
||||
return self.async_abort(reason="mqtt_not_connected")
|
||||
except KeyError:
|
||||
return self.async_abort(reason="mqtt_not_configured")
|
||||
|
||||
return await self.async_step_confirm_from_user()
|
||||
|
||||
async def step_confirm(
|
||||
self, step_id: str, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm the setup."""
|
||||
|
||||
if user_input is not None:
|
||||
await self.async_set_unique_id(DOMAIN)
|
||||
return self.async_create_entry(title=TITLE, data={})
|
||||
|
||||
return self.async_show_form(step_id=step_id)
|
||||
|
||||
async def async_step_confirm_from_mqtt(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm the setup from MQTT discovered."""
|
||||
return await self.step_confirm(
|
||||
step_id="confirm_from_mqtt", user_input=user_input
|
||||
)
|
||||
|
||||
async def async_step_confirm_from_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm the setup from user add integration."""
|
||||
return await self.step_confirm(
|
||||
step_id="confirm_from_user", user_input=user_input
|
||||
)
|
||||
@@ -1,14 +0,0 @@
|
||||
"""Constants for the iNELS integration."""
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.const import Platform
|
||||
|
||||
DOMAIN = "inels"
|
||||
TITLE = "iNELS"
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.SWITCH,
|
||||
]
|
||||
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
@@ -1,61 +0,0 @@
|
||||
"""Base class for iNELS components."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from inelsmqtt.devices import Device
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
class InelsBaseEntity(Entity):
|
||||
"""Base iNELS entity."""
|
||||
|
||||
_attr_should_poll = False
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
device: Device,
|
||||
key: str,
|
||||
index: int,
|
||||
) -> None:
|
||||
"""Init base entity."""
|
||||
self._device = device
|
||||
self._device_id = device.unique_id
|
||||
self._attr_unique_id = self._device_id
|
||||
|
||||
# The referenced variable to read from
|
||||
self._key = key
|
||||
# The index of the variable list to read from. '-1' for no index
|
||||
self._index = index
|
||||
|
||||
info = device.info()
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, device.unique_id)},
|
||||
manufacturer=info.manufacturer,
|
||||
model=info.model_number,
|
||||
name=device.title,
|
||||
sw_version=info.sw_version,
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Add subscription of the data listener."""
|
||||
# Register the HA callback
|
||||
self._device.add_ha_callback(self._key, self._index, self._callback)
|
||||
# Subscribe to MQTT updates
|
||||
self._device.mqtt.subscribe_listener(
|
||||
self._device.state_topic, self._device.unique_id, self._device.callback
|
||||
)
|
||||
|
||||
def _callback(self) -> None:
|
||||
"""Get data from broker into the HA."""
|
||||
if hasattr(self, "hass"):
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return self._device.is_available
|
||||
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"entity": {
|
||||
"switch": {
|
||||
"bit": {
|
||||
"default": "mdi:power-socket-eu"
|
||||
},
|
||||
"simple_relay": {
|
||||
"default": "mdi:power-socket-eu"
|
||||
},
|
||||
"relay": {
|
||||
"default": "mdi:power-socket-eu"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
{
|
||||
"domain": "inels",
|
||||
"name": "iNELS",
|
||||
"codeowners": ["@epdevlab"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["mqtt"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/inels",
|
||||
"iot_class": "local_push",
|
||||
"mqtt": ["inels/status/#"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["elkoep-aio-mqtt==0.1.0b4"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
@@ -1,118 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
config-flow: done
|
||||
test-before-configure: done
|
||||
unique-config-entry: done
|
||||
config-flow-test-coverage: done
|
||||
runtime-data: done
|
||||
test-before-setup:
|
||||
status: done
|
||||
comment: >
|
||||
Raise "Invalid authentication" and "MQTT Broker is offline or
|
||||
cannot be reached" otherwise, async_setup_entry returns False
|
||||
appropriate-polling:
|
||||
status: done
|
||||
comment: |
|
||||
Integration uses local_push.
|
||||
entity-unique-id:
|
||||
status: done
|
||||
comment: |
|
||||
{MAC}_{DEVICE_ID} is used, for example, 0e97f8b7d30_02E8.
|
||||
has-entity-name:
|
||||
status: done
|
||||
comment: >
|
||||
Almost all devices are multi-functional, which means that all functions
|
||||
are equally important -> keep the descriptive name (not setting _attr_name to None).
|
||||
entity-event-setup:
|
||||
status: done
|
||||
comment: |
|
||||
Subscribe in async_added_to_hass & unsubscribe from async_unload_entry.
|
||||
dependency-transparency: done
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
common-modules: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions:
|
||||
status: done
|
||||
comment: |
|
||||
A link to the wiki is provided.
|
||||
docs-removal-instructions: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
brands: done
|
||||
# Silver
|
||||
config-entry-unloading: done
|
||||
log-when-unavailable: todo
|
||||
entity-unavailable:
|
||||
status: done
|
||||
comment: |
|
||||
available property.
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
reauthentication-flow: todo
|
||||
parallel-updates:
|
||||
status: todo
|
||||
comment: |
|
||||
For all platforms, add a constant PARALLEL_UPDATES = 0.
|
||||
test-coverage: done
|
||||
integration-owner: done
|
||||
docs-installation-parameters:
|
||||
status: done
|
||||
comment: |
|
||||
A link to the wiki is provided.
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: >
|
||||
There is the same options flow in the integration as there is in the
|
||||
configuration.
|
||||
|
||||
# Gold
|
||||
entity-translations: done
|
||||
entity-device-class: todo
|
||||
devices: done
|
||||
entity-category: todo
|
||||
entity-disabled-by-default: todo
|
||||
discovery:
|
||||
status: todo
|
||||
comment: |
|
||||
Currently blocked by a hw limitation.
|
||||
stale-devices:
|
||||
status: todo
|
||||
comment: >
|
||||
Same as discovery. The async_remove_config_entry_device function should be
|
||||
implemented at a minimum.
|
||||
diagnostics: todo
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
dynamic-devices: todo
|
||||
discovery-update-info:
|
||||
status: todo
|
||||
comment: |
|
||||
Same as discovery.
|
||||
repair-issues: todo
|
||||
docs-use-cases: todo
|
||||
docs-supported-devices:
|
||||
status: todo
|
||||
comment: >
|
||||
In regards to this and below doc requirements, I am not sure whether the
|
||||
wiki link is acceptable.
|
||||
docs-supported-functions: todo
|
||||
docs-data-update: todo
|
||||
docs-known-limitations: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-examples: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession:
|
||||
status: exempt
|
||||
comment: |
|
||||
The integration is not making any HTTP requests.
|
||||
strict-typing: todo
|
||||
@@ -1,30 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"confirm_from_user": {
|
||||
"description": "iNELS devices must be connected to the same broker as the Home Assistant MQTT integration client. Continue setup?"
|
||||
},
|
||||
"confirm_from_mqtt": {
|
||||
"description": "Do you want to set up iNELS?"
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"mqtt_not_connected": "Home Assistant MQTT integration not connected to MQTT broker.",
|
||||
"mqtt_not_configured": "Home Assistant MQTT integration not configured.",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"switch": {
|
||||
"bit": {
|
||||
"name": "Bit{addr}"
|
||||
},
|
||||
"simple_relay": {
|
||||
"name": "Simple relay{index}"
|
||||
},
|
||||
"relay": {
|
||||
"name": "Relay{index}"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,137 +0,0 @@
|
||||
"""iNELS switch entity."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from inelsmqtt.devices import Device
|
||||
from inelsmqtt.utils.common import Bit, Relay, SimpleRelay
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import InelsConfigEntry
|
||||
from .entity import InelsBaseEntity
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class InelsSwitchEntityDescription(SwitchEntityDescription):
|
||||
"""Class describing iNELS switch entities."""
|
||||
|
||||
get_state_fn: Callable[[Device, int], Bit | SimpleRelay | Relay]
|
||||
alerts: list[str] | None = None
|
||||
placeholder_fn: Callable[[Device, int, bool], dict[str, str]]
|
||||
|
||||
|
||||
SWITCH_TYPES = [
|
||||
InelsSwitchEntityDescription(
|
||||
key="bit",
|
||||
translation_key="bit",
|
||||
get_state_fn=lambda device, index: device.state.bit[index],
|
||||
placeholder_fn=lambda device, index, indexed: {
|
||||
"addr": f" {device.state.bit[index].addr}"
|
||||
},
|
||||
),
|
||||
InelsSwitchEntityDescription(
|
||||
key="simple_relay",
|
||||
translation_key="simple_relay",
|
||||
get_state_fn=lambda device, index: device.state.simple_relay[index],
|
||||
placeholder_fn=lambda device, index, indexed: {
|
||||
"index": f" {index + 1}" if indexed else ""
|
||||
},
|
||||
),
|
||||
InelsSwitchEntityDescription(
|
||||
key="relay",
|
||||
translation_key="relay",
|
||||
get_state_fn=lambda device, index: device.state.relay[index],
|
||||
alerts=["overflow"],
|
||||
placeholder_fn=lambda device, index, indexed: {
|
||||
"index": f" {index + 1}" if indexed else ""
|
||||
},
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: InelsConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Load iNELS switch."""
|
||||
entities: list[InelsSwitch] = []
|
||||
|
||||
for device in entry.runtime_data.devices:
|
||||
for description in SWITCH_TYPES:
|
||||
if hasattr(device.state, description.key):
|
||||
switch_count = len(getattr(device.state, description.key))
|
||||
entities.extend(
|
||||
InelsSwitch(
|
||||
device=device,
|
||||
description=description,
|
||||
index=idx,
|
||||
switch_count=switch_count,
|
||||
)
|
||||
for idx in range(switch_count)
|
||||
)
|
||||
|
||||
async_add_entities(entities, False)
|
||||
|
||||
|
||||
class InelsSwitch(InelsBaseEntity, SwitchEntity):
|
||||
"""The platform class required by Home Assistant."""
|
||||
|
||||
entity_description: InelsSwitchEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
device: Device,
|
||||
description: InelsSwitchEntityDescription,
|
||||
index: int = 0,
|
||||
switch_count: int = 1,
|
||||
) -> None:
|
||||
"""Initialize the switch."""
|
||||
super().__init__(device=device, key=description.key, index=index)
|
||||
self.entity_description = description
|
||||
self._switch_count = switch_count
|
||||
|
||||
# Include index in unique_id for devices with multiple switches
|
||||
unique_key = f"{description.key}{index}" if index else description.key
|
||||
|
||||
self._attr_unique_id = f"{self._attr_unique_id}_{unique_key}".lower()
|
||||
|
||||
# Set translation placeholders
|
||||
self._attr_translation_placeholders = self.entity_description.placeholder_fn(
|
||||
self._device, self._index, self._switch_count > 1
|
||||
)
|
||||
|
||||
def _check_alerts(self, current_state: Bit | SimpleRelay | Relay) -> None:
|
||||
"""Check if there are active alerts and raise ServiceValidationError if found."""
|
||||
if self.entity_description.alerts and any(
|
||||
getattr(current_state, alert_key, None)
|
||||
for alert_key in self.entity_description.alerts
|
||||
):
|
||||
raise ServiceValidationError("Cannot operate switch with active alerts")
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return if switch is on."""
|
||||
current_state = self.entity_description.get_state_fn(self._device, self._index)
|
||||
return current_state.is_on
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Instruct the switch to turn off."""
|
||||
current_state = self.entity_description.get_state_fn(self._device, self._index)
|
||||
self._check_alerts(current_state)
|
||||
current_state.is_on = False
|
||||
await self._device.set_ha_value(self._device.state)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Instruct the switch to turn on."""
|
||||
current_state = self.entity_description.get_state_fn(self._device, self._index)
|
||||
self._check_alerts(current_state)
|
||||
current_state.is_on = True
|
||||
await self._device.set_ha_value(self._device.state)
|
||||
@@ -65,8 +65,6 @@ INFO_SENSORS: tuple[JewishCalendarSensorDescription, ...] = (
|
||||
attr_fn=lambda info: {
|
||||
"hebrew_year": str(info.hdate.year),
|
||||
"hebrew_month_name": str(info.hdate.month),
|
||||
"hebrew_month_standard_order": str(info.hdate.month.value),
|
||||
"hebrew_month_biblical_order": str(info.hdate.month.biblical_order),
|
||||
"hebrew_day": str(info.hdate.day),
|
||||
},
|
||||
),
|
||||
|
||||
@@ -26,12 +26,6 @@
|
||||
"state_attributes": {
|
||||
"hebrew_year": { "name": "Hebrew year" },
|
||||
"hebrew_month_name": { "name": "Hebrew month name" },
|
||||
"hebrew_month_standard_order": {
|
||||
"name": "Hebrew month in standard order"
|
||||
},
|
||||
"hebrew_month_biblical_order": {
|
||||
"name": "Hebrew month in biblical order"
|
||||
},
|
||||
"hebrew_day": { "name": "Hebrew day" }
|
||||
}
|
||||
},
|
||||
|
||||
@@ -48,8 +48,6 @@ from homeassistant.util.network import is_link_local
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
|
||||
DEVICES_URL = "https://developer.lametric.com/user/devices"
|
||||
|
||||
|
||||
class LaMetricFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN):
|
||||
"""Handle a LaMetric config flow."""
|
||||
@@ -166,9 +164,6 @@ class LaMetricFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN):
|
||||
return self.async_show_form(
|
||||
step_id="manual_entry",
|
||||
data_schema=vol.Schema(schema),
|
||||
description_placeholders={
|
||||
"devices_url": DEVICES_URL,
|
||||
},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The IP address or hostname of your LaMetric TIME on your network.",
|
||||
"api_key": "You can find this API key in the [devices page in your LaMetric developer account]({devices_url})."
|
||||
"api_key": "You can find this API key in the [devices page in your LaMetric developer account](https://developer.lametric.com/user/devices)."
|
||||
}
|
||||
},
|
||||
"cloud_select_device": {
|
||||
|
||||
@@ -18,7 +18,6 @@ from homeassistant.const import (
|
||||
CONF_SOURCE,
|
||||
CONF_UNIT_OF_MEASUREMENT,
|
||||
LIGHT_LUX,
|
||||
PERCENTAGE,
|
||||
UnitOfElectricCurrent,
|
||||
UnitOfElectricPotential,
|
||||
UnitOfSpeed,
|
||||
@@ -51,7 +50,6 @@ DEVICE_CLASS_MAPPING = {
|
||||
pypck.lcn_defs.VarUnit.VOLT: SensorDeviceClass.VOLTAGE,
|
||||
pypck.lcn_defs.VarUnit.AMPERE: SensorDeviceClass.CURRENT,
|
||||
pypck.lcn_defs.VarUnit.PPM: SensorDeviceClass.CO2,
|
||||
pypck.lcn_defs.VarUnit.PERCENT: SensorDeviceClass.HUMIDITY,
|
||||
}
|
||||
|
||||
UNIT_OF_MEASUREMENT_MAPPING = {
|
||||
@@ -64,7 +62,6 @@ UNIT_OF_MEASUREMENT_MAPPING = {
|
||||
pypck.lcn_defs.VarUnit.VOLT: UnitOfElectricPotential.VOLT,
|
||||
pypck.lcn_defs.VarUnit.AMPERE: UnitOfElectricCurrent.AMPERE,
|
||||
pypck.lcn_defs.VarUnit.PPM: CONCENTRATION_PARTS_PER_MILLION,
|
||||
pypck.lcn_defs.VarUnit.PERCENT: PERCENTAGE,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -13,5 +13,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pylitterbot"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pylitterbot==2024.2.7"]
|
||||
"requirements": ["pylitterbot==2024.2.4"]
|
||||
}
|
||||
|
||||
@@ -36,21 +36,6 @@ if TYPE_CHECKING:
|
||||
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Due to variances in labeling implementations, labels are vendor and product specific.
|
||||
# This dictionary defines which labels to use for specific vendor/product combinations.
|
||||
# The keys are vendor IDs, the values are dictionaries with product IDs as keys
|
||||
# and lists of label names to use as values. If the value is None, no labels are used
|
||||
VENDOR_LABELING_LIST: dict[int, dict[int, list[str] | None]] = {
|
||||
4488: {259: ["position"]}, # TP-Link Dual Outdoor Plug US
|
||||
4874: {105: ["orientation"]}, # Eve Energy dual Outlet US
|
||||
4961: {
|
||||
1: ["inovelliname", "label", "name", "button"], # Inovelli VTM31
|
||||
2: ["label", "devicetype", "button"], # Inovelli VTM35
|
||||
4: None, # Inovelli VTM36
|
||||
16: ["label", "name", "button"], # Inovelli VTM30
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def catch_matter_error[_R, **P](
|
||||
func: Callable[Concatenate[MatterEntity, P], Coroutine[Any, Any, _R]],
|
||||
@@ -127,47 +112,30 @@ class MatterEntity(Entity):
|
||||
if self._platform_translation_key and not self.translation_key:
|
||||
self._attr_translation_key = self._platform_translation_key
|
||||
|
||||
# Matter labels can be used to modify the entity name
|
||||
# by appending the text.
|
||||
if name_modifier := self._get_name_modifier():
|
||||
self._name_postfix = name_modifier
|
||||
# prefer the label attribute for the entity name
|
||||
# Matter has a way for users and/or vendors to specify a name for an endpoint
|
||||
# which is always preferred over a standard HA (generated) name
|
||||
for attr in (
|
||||
clusters.FixedLabel.Attributes.LabelList,
|
||||
clusters.UserLabel.Attributes.LabelList,
|
||||
):
|
||||
if not (labels := self.get_matter_attribute_value(attr)):
|
||||
continue
|
||||
for label in labels:
|
||||
if label.label not in ["Label", "Button"]:
|
||||
continue
|
||||
# fixed or user label found: use it
|
||||
label_value: str = label.value
|
||||
# in the case the label is only the label id, use it as postfix only
|
||||
if label_value.isnumeric():
|
||||
self._name_postfix = label_value
|
||||
else:
|
||||
self._attr_name = label_value
|
||||
break
|
||||
|
||||
# make sure to update the attributes once
|
||||
self._update_from_device()
|
||||
|
||||
def _find_matching_labels(self) -> list[str]:
|
||||
"""Find all labels for a Matter entity."""
|
||||
|
||||
device_info = self._endpoint.device_info
|
||||
labeling_list = VENDOR_LABELING_LIST.get(device_info.vendorID, {}).get(
|
||||
device_info.productID
|
||||
)
|
||||
|
||||
# get the labels from the UserLabel and FixedLabel clusters
|
||||
user_label_list: list[clusters.UserLabel.Structs.LabelStruct] = (
|
||||
self.get_matter_attribute_value(clusters.UserLabel.Attributes.LabelList)
|
||||
or []
|
||||
)
|
||||
fixed_label_list: list[clusters.FixedLabel.Structs.LabelStruct] = (
|
||||
self.get_matter_attribute_value(clusters.FixedLabel.Attributes.LabelList)
|
||||
or []
|
||||
)
|
||||
|
||||
found_labels: list[str] = [
|
||||
lbl.value
|
||||
for label in labeling_list or []
|
||||
for lbl in (*user_label_list, *fixed_label_list)
|
||||
if lbl.label.lower() == label
|
||||
]
|
||||
return found_labels
|
||||
|
||||
def _get_name_modifier(self) -> str | None:
|
||||
"""Get the name modifier for the entity."""
|
||||
|
||||
if found_labels := self._find_matching_labels():
|
||||
return found_labels[0]
|
||||
return None
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle being added to Home Assistant."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
@@ -256,8 +256,7 @@ DISCOVERY_SCHEMAS = [
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.UPDATE,
|
||||
entity_description=MatterUpdateEntityDescription(
|
||||
key="MatterUpdate",
|
||||
device_class=UpdateDeviceClass.FIRMWARE,
|
||||
key="MatterUpdate", device_class=UpdateDeviceClass.FIRMWARE
|
||||
),
|
||||
entity_class=MatterUpdate,
|
||||
required_attributes=(
|
||||
|
||||
@@ -47,8 +47,6 @@ MCP_DISCOVERY_HEADERS = {
|
||||
"MCP-Protocol-Version": "2025-03-26",
|
||||
}
|
||||
|
||||
EXAMPLE_URL = "http://example/sse"
|
||||
|
||||
|
||||
@dataclass
|
||||
class OAuthConfig:
|
||||
@@ -184,10 +182,7 @@ class ModelContextProtocolConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
|
||||
return self.async_create_entry(title=info["title"], data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
errors=errors,
|
||||
description_placeholders={"example_url": EXAMPLE_URL},
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_auth_discovery(
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"url": "[%key:common::config_flow::data::url%]"
|
||||
},
|
||||
"data_description": {
|
||||
"url": "The remote MCP server URL for the SSE endpoint, for example {example_url}"
|
||||
"url": "The remote MCP server URL for the SSE endpoint, for example http://example/sse"
|
||||
}
|
||||
},
|
||||
"credentials_choice": {
|
||||
@@ -35,7 +35,7 @@
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]",
|
||||
"timeout_connect": "[%key:common::config_flow::error::timeout_connect%]",
|
||||
"invalid_url": "Must be a valid MCP server URL"
|
||||
"invalid_url": "Must be a valid MCP server URL e.g. https://example.com/sse"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["dnspython", "mcstatus"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["mcstatus==12.0.6"]
|
||||
"requirements": ["mcstatus==12.0.1"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["nsapi==3.1.3"]
|
||||
"requirements": ["nsapi==3.1.2"]
|
||||
}
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["nextdns"],
|
||||
"quality_scale": "silver",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["nextdns==4.1.0"]
|
||||
}
|
||||
|
||||
@@ -37,7 +37,9 @@ rules:
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
test-coverage:
|
||||
status: todo
|
||||
comment: Patch NextDns object instead of functions.
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
@@ -48,17 +50,19 @@ rules:
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: The integration is a cloud service and thus does not support discovery.
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations:
|
||||
status: todo
|
||||
comment: Add info that there are no known limitations.
|
||||
docs-supported-devices:
|
||||
status: exempt
|
||||
comment: This is a service, which doesn't integrate with any devices.
|
||||
docs-supported-functions: done
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting:
|
||||
status: exempt
|
||||
comment: No known issues that could be resolved by the user.
|
||||
docs-use-cases: done
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: This integration has a fixed single service.
|
||||
|
||||
@@ -320,7 +320,7 @@
|
||||
"name": "Block WhatsApp"
|
||||
},
|
||||
"block_xboxlive": {
|
||||
"name": "Block Xbox Network"
|
||||
"name": "Block Xbox Live"
|
||||
},
|
||||
"block_youtube": {
|
||||
"name": "Block YouTube"
|
||||
|
||||
@@ -2,17 +2,13 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import (
|
||||
_LOGGER,
|
||||
ALL_MATCH_REGEX,
|
||||
CONF_AREA_FILTER,
|
||||
CONF_FILTER_CORONA,
|
||||
CONF_FILTERS,
|
||||
CONF_HEADLINE_FILTER,
|
||||
NO_MATCH_REGEX,
|
||||
)
|
||||
@@ -23,6 +19,20 @@ PLATFORMS: list[str] = [Platform.BINARY_SENSOR]
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: NinaConfigEntry) -> bool:
|
||||
"""Set up platform from a ConfigEntry."""
|
||||
if CONF_HEADLINE_FILTER not in entry.data:
|
||||
filter_regex = NO_MATCH_REGEX
|
||||
|
||||
if entry.data[CONF_FILTER_CORONA]:
|
||||
filter_regex = ".*corona.*"
|
||||
|
||||
new_data = {**entry.data, CONF_HEADLINE_FILTER: filter_regex}
|
||||
new_data.pop(CONF_FILTER_CORONA, None)
|
||||
hass.config_entries.async_update_entry(entry, data=new_data)
|
||||
|
||||
if CONF_AREA_FILTER not in entry.data:
|
||||
new_data = {**entry.data, CONF_AREA_FILTER: ALL_MATCH_REGEX}
|
||||
hass.config_entries.async_update_entry(entry, data=new_data)
|
||||
|
||||
coordinator = NINADataUpdateCoordinator(hass, entry)
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
@@ -37,52 +47,3 @@ async def async_setup_entry(hass: HomeAssistant, entry: NinaConfigEntry) -> bool
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: NinaConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: NinaConfigEntry) -> bool:
|
||||
"""Migrate the config to the new format."""
|
||||
|
||||
version = entry.version
|
||||
minor_version = entry.minor_version
|
||||
|
||||
_LOGGER.debug("Migrating from version %s.%s", version, minor_version)
|
||||
if entry.version > 1:
|
||||
# This means the user has downgraded from a future version
|
||||
return False
|
||||
|
||||
new_data: dict[str, Any] = {**entry.data, CONF_FILTERS: {}}
|
||||
|
||||
if version == 1 and minor_version == 1:
|
||||
if CONF_HEADLINE_FILTER not in entry.data:
|
||||
filter_regex = NO_MATCH_REGEX
|
||||
|
||||
if entry.data.get(CONF_FILTER_CORONA, None):
|
||||
filter_regex = ".*corona.*"
|
||||
|
||||
new_data[CONF_HEADLINE_FILTER] = filter_regex
|
||||
new_data.pop(CONF_FILTER_CORONA, None)
|
||||
|
||||
if CONF_AREA_FILTER not in entry.data:
|
||||
new_data[CONF_AREA_FILTER] = ALL_MATCH_REGEX
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data=new_data,
|
||||
minor_version=2,
|
||||
)
|
||||
minor_version = 2
|
||||
|
||||
if version == 1 and minor_version == 2:
|
||||
new_data[CONF_FILTERS][CONF_HEADLINE_FILTER] = entry.data[CONF_HEADLINE_FILTER]
|
||||
new_data.pop(CONF_HEADLINE_FILTER, None)
|
||||
|
||||
new_data[CONF_FILTERS][CONF_AREA_FILTER] = entry.data[CONF_AREA_FILTER]
|
||||
new_data.pop(CONF_AREA_FILTER, None)
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data=new_data,
|
||||
minor_version=3,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
@@ -14,16 +14,13 @@ from homeassistant.config_entries import (
|
||||
OptionsFlowWithReload,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import section
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import VolDictType
|
||||
|
||||
from .const import (
|
||||
_LOGGER,
|
||||
ALL_MATCH_REGEX,
|
||||
CONF_AREA_FILTER,
|
||||
CONF_FILTERS,
|
||||
CONF_HEADLINE_FILTER,
|
||||
CONF_MESSAGE_SLOTS,
|
||||
CONF_REGIONS,
|
||||
@@ -90,7 +87,6 @@ class NinaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for NINA."""
|
||||
|
||||
VERSION: int = 1
|
||||
MINOR_VERSION: int = 3
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize."""
|
||||
@@ -130,8 +126,8 @@ class NinaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if group_input := user_input.get(group):
|
||||
user_input[CONF_REGIONS] += group_input
|
||||
|
||||
if not user_input[CONF_FILTERS][CONF_HEADLINE_FILTER]:
|
||||
user_input[CONF_FILTERS][CONF_HEADLINE_FILTER] = NO_MATCH_REGEX
|
||||
if not user_input[CONF_HEADLINE_FILTER]:
|
||||
user_input[CONF_HEADLINE_FILTER] = NO_MATCH_REGEX
|
||||
|
||||
if user_input[CONF_REGIONS]:
|
||||
return self.async_create_entry(
|
||||
@@ -154,18 +150,7 @@ class NinaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
vol.Required(CONF_MESSAGE_SLOTS, default=5): vol.All(
|
||||
int, vol.Range(min=1, max=20)
|
||||
),
|
||||
vol.Required(CONF_FILTERS): section(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_HEADLINE_FILTER, default=NO_MATCH_REGEX
|
||||
): cv.string,
|
||||
vol.Optional(
|
||||
CONF_AREA_FILTER, default=ALL_MATCH_REGEX
|
||||
): cv.string,
|
||||
}
|
||||
)
|
||||
),
|
||||
vol.Optional(CONF_HEADLINE_FILTER, default=""): cv.string,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
@@ -274,20 +259,14 @@ class OptionsFlowHandler(OptionsFlowWithReload):
|
||||
CONF_MESSAGE_SLOTS,
|
||||
default=self.data[CONF_MESSAGE_SLOTS],
|
||||
): vol.All(int, vol.Range(min=1, max=20)),
|
||||
vol.Required(CONF_FILTERS): section(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_HEADLINE_FILTER,
|
||||
default=self.data[CONF_FILTERS][CONF_HEADLINE_FILTER],
|
||||
): cv.string,
|
||||
vol.Optional(
|
||||
CONF_AREA_FILTER,
|
||||
default=self.data[CONF_FILTERS][CONF_AREA_FILTER],
|
||||
): cv.string,
|
||||
}
|
||||
)
|
||||
),
|
||||
vol.Optional(
|
||||
CONF_HEADLINE_FILTER,
|
||||
default=self.data[CONF_HEADLINE_FILTER],
|
||||
): cv.string,
|
||||
vol.Optional(
|
||||
CONF_AREA_FILTER,
|
||||
default=self.data[CONF_AREA_FILTER],
|
||||
): cv.string,
|
||||
}
|
||||
|
||||
return self.async_show_form(
|
||||
|
||||
@@ -17,7 +17,6 @@ ALL_MATCH_REGEX: str = ".*"
|
||||
|
||||
CONF_REGIONS: str = "regions"
|
||||
CONF_MESSAGE_SLOTS: str = "slots"
|
||||
CONF_FILTERS: str = "filters"
|
||||
CONF_FILTER_CORONA: str = "corona_filter" # deprecated
|
||||
CONF_HEADLINE_FILTER: str = "headline_filter"
|
||||
CONF_AREA_FILTER: str = "area_filter"
|
||||
|
||||
@@ -17,7 +17,6 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
|
||||
from .const import (
|
||||
_LOGGER,
|
||||
CONF_AREA_FILTER,
|
||||
CONF_FILTERS,
|
||||
CONF_HEADLINE_FILTER,
|
||||
CONF_REGIONS,
|
||||
DOMAIN,
|
||||
@@ -59,10 +58,8 @@ class NINADataUpdateCoordinator(
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
self._nina: Nina = Nina(async_get_clientsession(hass))
|
||||
self.headline_filter: str = config_entry.data[CONF_FILTERS][
|
||||
CONF_HEADLINE_FILTER
|
||||
]
|
||||
self.area_filter: str = config_entry.data[CONF_FILTERS][CONF_AREA_FILTER]
|
||||
self.headline_filter: str = config_entry.data[CONF_HEADLINE_FILTER]
|
||||
self.area_filter: str = config_entry.data[CONF_AREA_FILTER]
|
||||
|
||||
regions: dict[str, str] = config_entry.data[CONF_REGIONS]
|
||||
for region in regions:
|
||||
|
||||
@@ -10,21 +10,8 @@
|
||||
"_m_to_q": "City/county (M-Q)",
|
||||
"_r_to_u": "City/county (R-U)",
|
||||
"_v_to_z": "City/county (V-Z)",
|
||||
"slots": "Maximum warnings per city/county"
|
||||
},
|
||||
"sections": {
|
||||
"filters": {
|
||||
"name": "Filters",
|
||||
"description": "Filter warnings based on their attributes",
|
||||
"data": {
|
||||
"headline_filter": "Headline blocklist",
|
||||
"area_filter": "Affected area filter"
|
||||
},
|
||||
"data_description": {
|
||||
"headline_filter": "Blacklist regex to filter warning based on headlines",
|
||||
"area_filter": "Whitelist regex to filter warnings based on affected areas"
|
||||
}
|
||||
}
|
||||
"slots": "Maximum warnings per city/county",
|
||||
"headline_filter": "Headline blocklist"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -45,21 +32,9 @@
|
||||
"_m_to_q": "[%key:component::nina::config::step::user::data::_m_to_q%]",
|
||||
"_r_to_u": "[%key:component::nina::config::step::user::data::_r_to_u%]",
|
||||
"_v_to_z": "[%key:component::nina::config::step::user::data::_v_to_z%]",
|
||||
"slots": "[%key:component::nina::config::step::user::data::slots%]"
|
||||
},
|
||||
"sections": {
|
||||
"filters": {
|
||||
"name": "[%key:component::nina::config::step::user::sections::filters::name%]",
|
||||
"description": "[%key:component::nina::config::step::user::sections::filters::description%]",
|
||||
"data": {
|
||||
"headline_filter": "[%key:component::nina::config::step::user::sections::filters::data::headline_filter%]",
|
||||
"area_filter": "[%key:component::nina::config::step::user::sections::filters::data::area_filter%]"
|
||||
},
|
||||
"data_description": {
|
||||
"headline_filter": "[%key:component::nina::config::step::user::sections::filters::data_description::headline_filter%]",
|
||||
"area_filter": "[%key:component::nina::config::step::user::sections::filters::data_description::area_filter%]"
|
||||
}
|
||||
}
|
||||
"slots": "[%key:component::nina::config::step::user::data::slots%]",
|
||||
"headline_filter": "[%key:component::nina::config::step::user::data::headline_filter%]",
|
||||
"area_filter": "Affected area filter"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -16,12 +16,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from .const import CONF_SESSION_TOKEN, DOMAIN
|
||||
from .coordinator import NintendoParentalControlsConfigEntry, NintendoUpdateCoordinator
|
||||
|
||||
_PLATFORMS: list[Platform] = [
|
||||
Platform.SENSOR,
|
||||
Platform.TIME,
|
||||
Platform.SWITCH,
|
||||
Platform.NUMBER,
|
||||
]
|
||||
_PLATFORMS: list[Platform] = [Platform.SENSOR, Platform.TIME, Platform.SWITCH]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
|
||||
@@ -28,8 +28,6 @@ class NintendoDevice(CoordinatorEntity[NintendoUpdateCoordinator]):
|
||||
manufacturer="Nintendo",
|
||||
name=device.name,
|
||||
sw_version=device.extra["firmwareVersion"]["displayedVersion"],
|
||||
model=device.model,
|
||||
model_id=device.generation,
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
|
||||
@@ -1,91 +0,0 @@
|
||||
"""Number platform for Nintendo Parental controls."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from enum import StrEnum
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.number import (
|
||||
NumberEntity,
|
||||
NumberEntityDescription,
|
||||
NumberMode,
|
||||
)
|
||||
from homeassistant.const import UnitOfTime
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import NintendoParentalControlsConfigEntry, NintendoUpdateCoordinator
|
||||
from .entity import Device, NintendoDevice
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
class NintendoParentalNumber(StrEnum):
|
||||
"""Store keys for Nintendo Parental numbers."""
|
||||
|
||||
TODAY_MAX_SCREENTIME = "today_max_screentime"
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class NintendoParentalControlsNumberEntityDescription(NumberEntityDescription):
|
||||
"""Description for Nintendo Parental number entities."""
|
||||
|
||||
value_fn: Callable[[Device], int | float | None]
|
||||
set_native_value_fn: Callable[[Device, float], Coroutine[Any, Any, None]]
|
||||
|
||||
|
||||
NUMBER_DESCRIPTIONS: tuple[NintendoParentalControlsNumberEntityDescription, ...] = (
|
||||
NintendoParentalControlsNumberEntityDescription(
|
||||
key=NintendoParentalNumber.TODAY_MAX_SCREENTIME,
|
||||
translation_key=NintendoParentalNumber.TODAY_MAX_SCREENTIME,
|
||||
native_min_value=-1,
|
||||
native_step=1,
|
||||
native_max_value=360,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
mode=NumberMode.BOX,
|
||||
set_native_value_fn=lambda device, value: device.update_max_daily_playtime(
|
||||
minutes=value
|
||||
),
|
||||
value_fn=lambda device: device.limit_time,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: NintendoParentalControlsConfigEntry,
|
||||
async_add_devices: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up number platform."""
|
||||
async_add_devices(
|
||||
NintendoParentalControlsNumberEntity(entry.runtime_data, device, entity)
|
||||
for device in entry.runtime_data.api.devices.values()
|
||||
for entity in NUMBER_DESCRIPTIONS
|
||||
)
|
||||
|
||||
|
||||
class NintendoParentalControlsNumberEntity(NintendoDevice, NumberEntity):
|
||||
"""Represent a Nintendo Parental number entity."""
|
||||
|
||||
entity_description: NintendoParentalControlsNumberEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: NintendoUpdateCoordinator,
|
||||
device: Device,
|
||||
description: NintendoParentalControlsNumberEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the time entity."""
|
||||
super().__init__(coordinator=coordinator, device=device, key=description.key)
|
||||
self.entity_description = description
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
"""Return the state of the entity."""
|
||||
return self.entity_description.value_fn(self._device)
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Update entity state."""
|
||||
await self.entity_description.set_native_value_fn(self._device, value)
|
||||
@@ -48,11 +48,6 @@
|
||||
"suspend_software": {
|
||||
"name": "Suspend software"
|
||||
}
|
||||
},
|
||||
"number": {
|
||||
"today_max_screentime": {
|
||||
"name": "Max screentime today"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
|
||||
@@ -20,7 +20,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from .coordinator import NintendoParentalControlsConfigEntry, NintendoUpdateCoordinator
|
||||
from .entity import Device, NintendoDevice
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
class NintendoParentalSwitch(StrEnum):
|
||||
|
||||
@@ -23,7 +23,7 @@ from .entity import Device, NintendoDevice
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 1
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
class NintendoParentalControlsTime(StrEnum):
|
||||
|
||||
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/openrgb",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["openrgb-python==0.3.6"]
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["openrgb-python==0.3.5"]
|
||||
}
|
||||
|
||||
@@ -124,11 +124,7 @@ class PlexFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
return await self._async_step_plex_website_auth()
|
||||
if self.show_advanced_options:
|
||||
return await self.async_step_user_advanced(errors=errors)
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
errors=errors,
|
||||
description_placeholders={"plex_server_url": "[plex.tv](https://plex.tv)"},
|
||||
)
|
||||
return self.async_show_form(step_id="user", errors=errors)
|
||||
|
||||
async def async_step_user_advanced(
|
||||
self,
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"flow_title": "{name} ({host})",
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Continue to {plex_server_url} to link a Plex server."
|
||||
"description": "Continue to [plex.tv](https://plex.tv) to link a Plex server."
|
||||
},
|
||||
"user_advanced": {
|
||||
"data": {
|
||||
|
||||
@@ -13,7 +13,6 @@ from pyportainer import (
|
||||
PortainerTimeoutError,
|
||||
)
|
||||
from pyportainer.models.docker import DockerContainer
|
||||
from pyportainer.models.docker_inspect import DockerInfo, DockerVersion
|
||||
from pyportainer.models.portainer import Endpoint
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -39,8 +38,6 @@ class PortainerCoordinatorData:
|
||||
name: str | None
|
||||
endpoint: Endpoint
|
||||
containers: dict[str, DockerContainer]
|
||||
docker_version: DockerVersion
|
||||
docker_info: DockerInfo
|
||||
|
||||
|
||||
class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorData]]):
|
||||
@@ -123,8 +120,6 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
|
||||
|
||||
try:
|
||||
containers = await self.portainer.get_containers(endpoint.id)
|
||||
docker_version = await self.portainer.docker_version(endpoint.id)
|
||||
docker_info = await self.portainer.docker_info(endpoint.id)
|
||||
except PortainerConnectionError as err:
|
||||
_LOGGER.exception("Connection error")
|
||||
raise UpdateFailed(
|
||||
@@ -145,8 +140,6 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
|
||||
name=endpoint.name,
|
||||
endpoint=endpoint,
|
||||
containers={container.id: container for container in containers},
|
||||
docker_version=docker_version,
|
||||
docker_info=docker_info,
|
||||
)
|
||||
|
||||
return mapped_endpoints
|
||||
|
||||
@@ -3,45 +3,6 @@
|
||||
"sensor": {
|
||||
"image": {
|
||||
"default": "mdi:docker"
|
||||
},
|
||||
"operating_system": {
|
||||
"default": "mdi:chip"
|
||||
},
|
||||
"operating_system_version": {
|
||||
"default": "mdi:alpha-v-box"
|
||||
},
|
||||
"api_version": {
|
||||
"default": "mdi:api"
|
||||
},
|
||||
"kernel_version": {
|
||||
"default": "mdi:memory"
|
||||
},
|
||||
"architecture": {
|
||||
"default": "mdi:cpu-64-bit"
|
||||
},
|
||||
"containers_running": {
|
||||
"default": "mdi:play-circle-outline"
|
||||
},
|
||||
"containers_stopped": {
|
||||
"default": "mdi:stop-circle-outline"
|
||||
},
|
||||
"containers_paused": {
|
||||
"default": "mdi:pause-circle"
|
||||
},
|
||||
"images_count": {
|
||||
"default": "mdi:image-multiple"
|
||||
},
|
||||
"containers_count": {
|
||||
"default": "mdi:database"
|
||||
},
|
||||
"memory_total": {
|
||||
"default": "mdi:memory"
|
||||
},
|
||||
"docker_version": {
|
||||
"default": "mdi:docker"
|
||||
},
|
||||
"cpu_total": {
|
||||
"default": "mdi:cpu-64-bit"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
|
||||
@@ -7,149 +7,28 @@ from dataclasses import dataclass
|
||||
|
||||
from pyportainer.models.docker import DockerContainer
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
EntityCategory,
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
StateType,
|
||||
)
|
||||
from homeassistant.const import UnitOfInformation
|
||||
from homeassistant.components.sensor import SensorEntity, SensorEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import PortainerConfigEntry, PortainerCoordinator
|
||||
from .entity import (
|
||||
PortainerContainerEntity,
|
||||
PortainerCoordinatorData,
|
||||
PortainerEndpointEntity,
|
||||
)
|
||||
from .entity import PortainerContainerEntity, PortainerCoordinatorData
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class PortainerContainerSensorEntityDescription(SensorEntityDescription):
|
||||
"""Class to hold Portainer container sensor description."""
|
||||
class PortainerSensorEntityDescription(SensorEntityDescription):
|
||||
"""Class to hold Portainer sensor description."""
|
||||
|
||||
value_fn: Callable[[DockerContainer], StateType]
|
||||
value_fn: Callable[[DockerContainer], str | None]
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class PortainerEndpointSensorEntityDescription(SensorEntityDescription):
|
||||
"""Class to hold Portainer endpoint sensor description."""
|
||||
|
||||
value_fn: Callable[[PortainerCoordinatorData], StateType]
|
||||
|
||||
|
||||
CONTAINER_SENSORS: tuple[PortainerContainerSensorEntityDescription, ...] = (
|
||||
PortainerContainerSensorEntityDescription(
|
||||
CONTAINER_SENSORS: tuple[PortainerSensorEntityDescription, ...] = (
|
||||
PortainerSensorEntityDescription(
|
||||
key="image",
|
||||
translation_key="image",
|
||||
value_fn=lambda data: data.image,
|
||||
),
|
||||
)
|
||||
ENDPOINT_SENSORS: tuple[PortainerEndpointSensorEntityDescription, ...] = (
|
||||
PortainerEndpointSensorEntityDescription(
|
||||
key="api_version",
|
||||
translation_key="api_version",
|
||||
value_fn=lambda data: data.docker_version.api_version,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
PortainerEndpointSensorEntityDescription(
|
||||
key="kernel_version",
|
||||
translation_key="kernel_version",
|
||||
value_fn=lambda data: data.docker_version.kernel_version,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
PortainerEndpointSensorEntityDescription(
|
||||
key="operating_system",
|
||||
translation_key="operating_system",
|
||||
value_fn=lambda data: data.docker_info.os_type,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
PortainerEndpointSensorEntityDescription(
|
||||
key="operating_system_version",
|
||||
translation_key="operating_system_version",
|
||||
value_fn=lambda data: data.docker_info.os_version,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
PortainerEndpointSensorEntityDescription(
|
||||
key="docker_version",
|
||||
translation_key="docker_version",
|
||||
value_fn=lambda data: data.docker_info.server_version,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
PortainerEndpointSensorEntityDescription(
|
||||
key="architecture",
|
||||
translation_key="architecture",
|
||||
value_fn=lambda data: data.docker_info.architecture,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
PortainerEndpointSensorEntityDescription(
|
||||
key="containers_count",
|
||||
translation_key="containers_count",
|
||||
value_fn=lambda data: data.docker_info.containers,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
PortainerEndpointSensorEntityDescription(
|
||||
key="containers_running",
|
||||
translation_key="containers_running",
|
||||
value_fn=lambda data: data.docker_info.containers_running,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
PortainerEndpointSensorEntityDescription(
|
||||
key="containers_stopped",
|
||||
translation_key="containers_stopped",
|
||||
value_fn=lambda data: data.docker_info.containers_stopped,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
PortainerEndpointSensorEntityDescription(
|
||||
key="containers_paused",
|
||||
translation_key="containers_paused",
|
||||
value_fn=lambda data: data.docker_info.containers_paused,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
PortainerEndpointSensorEntityDescription(
|
||||
key="images_count",
|
||||
translation_key="images_count",
|
||||
value_fn=lambda data: data.docker_info.images,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
PortainerEndpointSensorEntityDescription(
|
||||
key="memory_total",
|
||||
translation_key="memory_total",
|
||||
value_fn=lambda data: data.docker_info.mem_total,
|
||||
device_class=SensorDeviceClass.DATA_SIZE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfInformation.BYTES,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
PortainerEndpointSensorEntityDescription(
|
||||
key="cpu_total",
|
||||
translation_key="cpu_total",
|
||||
value_fn=lambda data: data.docker_info.ncpu,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -159,41 +38,29 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up Portainer sensors based on a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
entities: list[SensorEntity] = []
|
||||
|
||||
for endpoint in coordinator.data.values():
|
||||
entities.extend(
|
||||
PortainerEndpointSensor(
|
||||
coordinator,
|
||||
entity_description,
|
||||
endpoint,
|
||||
)
|
||||
for entity_description in ENDPOINT_SENSORS
|
||||
async_add_entities(
|
||||
PortainerContainerSensor(
|
||||
coordinator,
|
||||
entity_description,
|
||||
container,
|
||||
endpoint,
|
||||
)
|
||||
|
||||
entities.extend(
|
||||
PortainerContainerSensor(
|
||||
coordinator,
|
||||
entity_description,
|
||||
container,
|
||||
endpoint,
|
||||
)
|
||||
for container in endpoint.containers.values()
|
||||
for entity_description in CONTAINER_SENSORS
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
for endpoint in coordinator.data.values()
|
||||
for container in endpoint.containers.values()
|
||||
for entity_description in CONTAINER_SENSORS
|
||||
)
|
||||
|
||||
|
||||
class PortainerContainerSensor(PortainerContainerEntity, SensorEntity):
|
||||
"""Representation of a Portainer container sensor."""
|
||||
|
||||
entity_description: PortainerContainerSensorEntityDescription
|
||||
entity_description: PortainerSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: PortainerContainerSensorEntityDescription,
|
||||
entity_description: PortainerSensorEntityDescription,
|
||||
device_info: DockerContainer,
|
||||
via_device: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
@@ -209,37 +76,8 @@ class PortainerContainerSensor(PortainerContainerEntity, SensorEntity):
|
||||
return super().available and self.endpoint_id in self.coordinator.data
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
def native_value(self) -> str | None:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(
|
||||
self.coordinator.data[self.endpoint_id].containers[self.device_id]
|
||||
)
|
||||
|
||||
|
||||
class PortainerEndpointSensor(PortainerEndpointEntity, SensorEntity):
|
||||
"""Representation of a Portainer endpoint sensor."""
|
||||
|
||||
entity_description: PortainerEndpointSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: PortainerEndpointSensorEntityDescription,
|
||||
device_info: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize the Portainer endpoint sensor."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if the device is available."""
|
||||
return super().available and self.device_id in self.coordinator.data
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
endpoint_data = self.coordinator.data[self._device_info.endpoint.id]
|
||||
return self.entity_description.value_fn(endpoint_data)
|
||||
|
||||
@@ -49,45 +49,6 @@
|
||||
"sensor": {
|
||||
"image": {
|
||||
"name": "Image"
|
||||
},
|
||||
"operating_system": {
|
||||
"name": "Operating system"
|
||||
},
|
||||
"operating_system_version": {
|
||||
"name": "Operating system version"
|
||||
},
|
||||
"api_version": {
|
||||
"name": "API version"
|
||||
},
|
||||
"kernel_version": {
|
||||
"name": "Kernel version"
|
||||
},
|
||||
"architecture": {
|
||||
"name": "Architecture"
|
||||
},
|
||||
"containers_running": {
|
||||
"name": "Containers running"
|
||||
},
|
||||
"containers_stopped": {
|
||||
"name": "Containers stopped"
|
||||
},
|
||||
"containers_paused": {
|
||||
"name": "Containers paused"
|
||||
},
|
||||
"images_count": {
|
||||
"name": "Image count"
|
||||
},
|
||||
"containers_count": {
|
||||
"name": "Container count"
|
||||
},
|
||||
"memory_total": {
|
||||
"name": "Total memory"
|
||||
},
|
||||
"docker_version": {
|
||||
"name": "Docker version"
|
||||
},
|
||||
"cpu_total": {
|
||||
"name": "Total CPU"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
|
||||
@@ -26,7 +26,6 @@ from .const import (
|
||||
DEFAULT_ALIAS,
|
||||
DEFAULT_NAME,
|
||||
DOMAIN,
|
||||
PS4_DOCS_URL,
|
||||
)
|
||||
|
||||
CONF_MODE = "Config Mode"
|
||||
@@ -67,10 +66,7 @@ class PlayStation4FlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
failed = await self.hass.async_add_executor_job(self.helper.port_bind, ports)
|
||||
if failed in ports:
|
||||
reason = PORT_MSG[failed]
|
||||
return self.async_abort(
|
||||
reason=reason,
|
||||
description_placeholders={"ps4_docs_url": PS4_DOCS_URL},
|
||||
)
|
||||
return self.async_abort(reason=reason)
|
||||
return await self.async_step_creds()
|
||||
|
||||
async def async_step_creds(
|
||||
@@ -89,11 +85,7 @@ class PlayStation4FlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
except CredentialTimeout:
|
||||
errors["base"] = "credential_timeout"
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="creds",
|
||||
errors=errors,
|
||||
description_placeholders={"ps4_docs_url": PS4_DOCS_URL},
|
||||
)
|
||||
return self.async_show_form(step_id="creds", errors=errors)
|
||||
|
||||
async def async_step_mode(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
|
||||
@@ -9,8 +9,6 @@ from homeassistant.util.hass_dict import HassKey
|
||||
if TYPE_CHECKING:
|
||||
from . import PS4Data
|
||||
|
||||
PS4_DOCS_URL = "https://www.home-assistant.io/components/ps4/"
|
||||
|
||||
ATTR_MEDIA_IMAGE_URL = "media_image_url"
|
||||
CONFIG_ENTRY_VERSION = 3
|
||||
DEFAULT_NAME = "PlayStation 4"
|
||||
|
||||
@@ -2,13 +2,7 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"creds": {
|
||||
"description": "Credentials needed. Select **Submit** and then in the PS4 2nd Screen App, refresh devices and select the **Home-Assistant** device to continue.",
|
||||
"data": {
|
||||
"token": "PSN Token"
|
||||
},
|
||||
"data_description": {
|
||||
"token": "To get your PSN token, please follow these [instructions]({ps4_docs_url})."
|
||||
}
|
||||
"description": "Credentials needed. Select **Submit** and then in the PS4 2nd Screen App, refresh devices and select the **Home-Assistant** device to continue."
|
||||
},
|
||||
"mode": {
|
||||
"data": {
|
||||
@@ -41,8 +35,8 @@
|
||||
"credential_error": "Error fetching credentials.",
|
||||
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"port_987_bind_error": "Could not bind to port 987. Refer to the [documentation]({ps4_docs_url}) for additional info.",
|
||||
"port_997_bind_error": "Could not bind to port 997. Refer to the [documentation]({ps4_docs_url}) for additional info."
|
||||
"port_987_bind_error": "Could not bind to port 987. Refer to the [documentation](https://www.home-assistant.io/components/ps4/) for additional info.",
|
||||
"port_997_bind_error": "Could not bind to port 997. Refer to the [documentation](https://www.home-assistant.io/components/ps4/) for additional info."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
"""Base entity for the Pterodactyl integration."""
|
||||
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_URL
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
@@ -35,9 +33,7 @@ class PterodactylEntity(CoordinatorEntity[PterodactylCoordinator]):
|
||||
name=self.game_server_data.name,
|
||||
model=self.game_server_data.name,
|
||||
model_id=self.game_server_data.uuid,
|
||||
configuration_url=str(
|
||||
URL(config_entry.data[CONF_URL]) / "server" / identifier
|
||||
),
|
||||
configuration_url=f"{config_entry.data[CONF_URL]}/server/{identifier}",
|
||||
)
|
||||
|
||||
@property
|
||||
|
||||
@@ -5,5 +5,8 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/python_script",
|
||||
"loggers": ["RestrictedPython"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["RestrictedPython==8.1"]
|
||||
"requirements": [
|
||||
"RestrictedPython==8.0;python_version<'3.14'",
|
||||
"RestrictedPython==8.1a1.dev0;python_version>='3.14'"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -30,7 +30,6 @@ from homeassistant.const import (
|
||||
UnitOfEnergy,
|
||||
UnitOfFrequency,
|
||||
UnitOfPower,
|
||||
UnitOfPrecipitationDepth,
|
||||
UnitOfPressure,
|
||||
UnitOfTemperature,
|
||||
UnitOfTime,
|
||||
@@ -1492,27 +1491,6 @@ RPC_SENSORS: Final = {
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=["dark", "twilight", "bright"],
|
||||
),
|
||||
"number_average_temperature": RpcSensorDescription(
|
||||
key="number",
|
||||
sub_key="value",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
suggested_display_precision=1,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
role="average_temperature",
|
||||
removal_condition=lambda config, _s, _k: not config.get("service:0", {}).get(
|
||||
"weather_api", False
|
||||
),
|
||||
),
|
||||
"number_last_precipitation": RpcSensorDescription(
|
||||
key="number",
|
||||
sub_key="value",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.MILLIMETERS,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
role="last_precipitation",
|
||||
removal_condition=lambda config, _s, _k: not config.get("service:0", {}).get(
|
||||
"weather_api", False
|
||||
),
|
||||
),
|
||||
"number_current_humidity": RpcSensorDescription(
|
||||
key="number",
|
||||
sub_key="value",
|
||||
|
||||
@@ -137,15 +137,15 @@
|
||||
"btn_down": "Button down",
|
||||
"btn_up": "Button up",
|
||||
"double_push": "Double push",
|
||||
"double": "[%key:component::shelly::entity::event::input::state_attributes::event_type::state::double_push%]",
|
||||
"double": "Double push",
|
||||
"long_push": "Long push",
|
||||
"long_single": "Long push and then short push",
|
||||
"long": "[%key:component::shelly::entity::event::input::state_attributes::event_type::state::long_push%]",
|
||||
"long": "Long push",
|
||||
"single_long": "Short push and then long push",
|
||||
"single_push": "Single push",
|
||||
"single": "[%key:component::shelly::entity::event::input::state_attributes::event_type::state::single_push%]",
|
||||
"single": "Single push",
|
||||
"triple_push": "Triple push",
|
||||
"triple": "[%key:component::shelly::entity::event::input::state_attributes::event_type::state::triple_push%]"
|
||||
"triple": "Triple push"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -276,7 +276,7 @@
|
||||
"fix_flow": {
|
||||
"step": {
|
||||
"confirm": {
|
||||
"title": "[%key:component::shelly::issues::ble_scanner_firmware_unsupported::title%]",
|
||||
"title": "{device_name} is running unsupported firmware",
|
||||
"description": "Your Shelly device {device_name} with IP address {ip_address} is running firmware {firmware} and acts as BLE scanner with active mode. This firmware version is not supported for BLE scanner active mode.\n\nSelect **Submit** button to start the OTA update to the latest stable firmware version."
|
||||
}
|
||||
},
|
||||
@@ -303,7 +303,7 @@
|
||||
"fix_flow": {
|
||||
"step": {
|
||||
"confirm": {
|
||||
"title": "[%key:component::shelly::issues::outbound_websocket_incorrectly_enabled::title%]",
|
||||
"title": "Outbound WebSocket is enabled for {device_name}",
|
||||
"description": "Your Shelly device {device_name} with IP address {ip_address} is a non-sleeping device and Outbound WebSocket should be disabled in its configuration.\n\nSelect **Submit** button to disable Outbound WebSocket."
|
||||
}
|
||||
},
|
||||
@@ -317,7 +317,7 @@
|
||||
"fix_flow": {
|
||||
"step": {
|
||||
"confirm": {
|
||||
"title": "[%key:component::shelly::issues::deprecated_firmware::title%]",
|
||||
"title": "{device_name} is running outdated firmware",
|
||||
"description": "Your Shelly device {device_name} with IP address {ip_address} is running firmware {firmware}. This firmware version will not be supported by Shelly integration starting from Home Assistant {ha_version}.\n\nSelect **Submit** button to start the OTA update to the latest stable firmware version."
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,69 +1,153 @@
|
||||
"""The SMA integration."""
|
||||
"""The sma integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from pysma import SMAWebConnect
|
||||
import pysma
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
ATTR_CONNECTIONS,
|
||||
CONF_HOST,
|
||||
CONF_MAC,
|
||||
CONF_PASSWORD,
|
||||
CONF_SCAN_INTERVAL,
|
||||
CONF_SSL,
|
||||
CONF_VERIFY_SSL,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import CONF_GROUP
|
||||
from .coordinator import SMADataUpdateCoordinator
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
from .const import (
|
||||
CONF_GROUP,
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DOMAIN,
|
||||
PLATFORMS,
|
||||
PYSMA_COORDINATOR,
|
||||
PYSMA_DEVICE_INFO,
|
||||
PYSMA_OBJECT,
|
||||
PYSMA_REMOVE_LISTENER,
|
||||
PYSMA_SENSORS,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
type SMAConfigEntry = ConfigEntry[SMADataUpdateCoordinator]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: SMAConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up sma from a config entry."""
|
||||
|
||||
# Init the SMA interface
|
||||
protocol = "https" if entry.data[CONF_SSL] else "http"
|
||||
url = f"{protocol}://{entry.data[CONF_HOST]}"
|
||||
verify_ssl = entry.data[CONF_VERIFY_SSL]
|
||||
group = entry.data[CONF_GROUP]
|
||||
password = entry.data[CONF_PASSWORD]
|
||||
|
||||
sma = SMAWebConnect(
|
||||
session=async_get_clientsession(
|
||||
hass=hass, verify_ssl=entry.data[CONF_VERIFY_SSL]
|
||||
),
|
||||
url=url,
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
group=entry.data[CONF_GROUP],
|
||||
session = async_get_clientsession(hass, verify_ssl=verify_ssl)
|
||||
sma = pysma.SMA(session, url, password, group)
|
||||
|
||||
try:
|
||||
# Get updated device info
|
||||
sma_device_info = await sma.device_info()
|
||||
# Get all device sensors
|
||||
sensor_def = await sma.get_sensors()
|
||||
except (
|
||||
pysma.exceptions.SmaReadException,
|
||||
pysma.exceptions.SmaConnectionException,
|
||||
) as exc:
|
||||
raise ConfigEntryNotReady from exc
|
||||
except pysma.exceptions.SmaAuthenticationException as exc:
|
||||
raise ConfigEntryAuthFailed from exc
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert entry.unique_id
|
||||
|
||||
# Create DeviceInfo object from sma_device_info
|
||||
device_info = DeviceInfo(
|
||||
configuration_url=url,
|
||||
identifiers={(DOMAIN, entry.unique_id)},
|
||||
manufacturer=sma_device_info["manufacturer"],
|
||||
model=sma_device_info["type"],
|
||||
name=sma_device_info["name"],
|
||||
sw_version=sma_device_info["sw_version"],
|
||||
serial_number=sma_device_info["serial"],
|
||||
)
|
||||
|
||||
coordinator = SMADataUpdateCoordinator(hass, entry, sma)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
# Add the MAC address to connections, if it comes via DHCP
|
||||
if CONF_MAC in entry.data:
|
||||
device_info[ATTR_CONNECTIONS] = {
|
||||
(dr.CONNECTION_NETWORK_MAC, entry.data[CONF_MAC])
|
||||
}
|
||||
|
||||
# Define the coordinator
|
||||
async def async_update_data():
|
||||
"""Update the used SMA sensors."""
|
||||
try:
|
||||
await sma.read(sensor_def)
|
||||
except (
|
||||
pysma.exceptions.SmaReadException,
|
||||
pysma.exceptions.SmaConnectionException,
|
||||
) as exc:
|
||||
raise UpdateFailed(exc) from exc
|
||||
|
||||
interval = timedelta(
|
||||
seconds=entry.options.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL)
|
||||
)
|
||||
|
||||
coordinator = DataUpdateCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name="sma",
|
||||
update_method=async_update_data,
|
||||
update_interval=interval,
|
||||
)
|
||||
|
||||
try:
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
except ConfigEntryNotReady:
|
||||
await sma.close_session()
|
||||
raise
|
||||
|
||||
# Ensure we logout on shutdown
|
||||
async def async_close_session(event):
|
||||
"""Close the session."""
|
||||
await sma.close_session()
|
||||
|
||||
remove_stop_listener = hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_STOP, async_close_session
|
||||
)
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
hass.data[DOMAIN][entry.entry_id] = {
|
||||
PYSMA_OBJECT: sma,
|
||||
PYSMA_COORDINATOR: coordinator,
|
||||
PYSMA_SENSORS: sensor_def,
|
||||
PYSMA_REMOVE_LISTENER: remove_stop_listener,
|
||||
PYSMA_DEVICE_INFO: device_info,
|
||||
}
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
# Ensure the SMA session closes when Home Assistant stops
|
||||
async def _async_handle_shutdown(event: Event) -> None:
|
||||
await coordinator.async_close_sma_session()
|
||||
|
||||
entry.async_on_unload(
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _async_handle_shutdown)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: SMAConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
if unload_ok:
|
||||
data = hass.data[DOMAIN].pop(entry.entry_id)
|
||||
await data[PYSMA_OBJECT].close_session()
|
||||
data[PYSMA_REMOVE_LISTENER]()
|
||||
|
||||
return unload_ok
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
@@ -72,6 +156,7 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
_LOGGER.debug("Migrating from version %s", entry.version)
|
||||
|
||||
if entry.version == 1:
|
||||
# 1 -> 2: Unique ID from integer to string
|
||||
if entry.minor_version == 1:
|
||||
minor_version = 2
|
||||
hass.config_entries.async_update_entry(
|
||||
|
||||
@@ -6,13 +6,7 @@ from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import attrs
|
||||
from pysma import (
|
||||
SmaAuthenticationException,
|
||||
SmaConnectionException,
|
||||
SmaReadException,
|
||||
SMAWebConnect,
|
||||
)
|
||||
import pysma
|
||||
import voluptuous as vol
|
||||
from yarl import URL
|
||||
|
||||
@@ -48,7 +42,7 @@ async def validate_input(
|
||||
host = data[CONF_HOST] if data is not None else user_input[CONF_HOST]
|
||||
url = URL.build(scheme=protocol, host=host)
|
||||
|
||||
sma = SMAWebConnect(
|
||||
sma = pysma.SMA(
|
||||
session, str(url), user_input[CONF_PASSWORD], group=user_input[CONF_GROUP]
|
||||
)
|
||||
|
||||
@@ -57,7 +51,7 @@ async def validate_input(
|
||||
device_info = await sma.device_info()
|
||||
await sma.close_session()
|
||||
|
||||
return attrs.asdict(device_info)
|
||||
return device_info
|
||||
|
||||
|
||||
class SmaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
@@ -96,11 +90,11 @@ class SmaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
device_info = await validate_input(
|
||||
self.hass, user_input=user_input, data=self._data
|
||||
)
|
||||
except SmaConnectionException:
|
||||
except pysma.exceptions.SmaConnectionException:
|
||||
errors["base"] = "cannot_connect"
|
||||
except SmaAuthenticationException:
|
||||
except pysma.exceptions.SmaAuthenticationException:
|
||||
errors["base"] = "invalid_auth"
|
||||
except SmaReadException:
|
||||
except pysma.exceptions.SmaReadException:
|
||||
errors["base"] = "cannot_retrieve_device_info"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
"""Constants for the sma integration."""
|
||||
|
||||
from homeassistant.const import Platform
|
||||
|
||||
DOMAIN = "sma"
|
||||
|
||||
PYSMA_COORDINATOR = "coordinator"
|
||||
@@ -8,6 +10,7 @@ PYSMA_REMOVE_LISTENER = "remove_listener"
|
||||
PYSMA_SENSORS = "pysma_sensors"
|
||||
PYSMA_DEVICE_INFO = "device_info"
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
CONF_GROUP = "group"
|
||||
|
||||
|
||||
@@ -1,114 +0,0 @@
|
||||
"""Coordinator for the SMA integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pysma import (
|
||||
SmaAuthenticationException,
|
||||
SmaConnectionException,
|
||||
SmaReadException,
|
||||
SMAWebConnect,
|
||||
)
|
||||
from pysma.helpers import DeviceInfo
|
||||
from pysma.sensor import Sensors
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_SCAN_INTERVAL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DEFAULT_SCAN_INTERVAL, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class SMACoordinatorData:
|
||||
"""Data class for SMA sensors."""
|
||||
|
||||
sma_device_info: DeviceInfo
|
||||
sensors: Sensors
|
||||
|
||||
|
||||
class SMADataUpdateCoordinator(DataUpdateCoordinator[SMACoordinatorData]):
|
||||
"""Data Update Coordinator for SMA."""
|
||||
|
||||
config_entry: ConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
sma: SMAWebConnect,
|
||||
) -> None:
|
||||
"""Initialize the SMA Data Update Coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(
|
||||
seconds=config_entry.options.get(
|
||||
CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL
|
||||
)
|
||||
),
|
||||
)
|
||||
self.sma = sma
|
||||
self._sma_device_info = DeviceInfo()
|
||||
self._sensors = Sensors()
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
"""Setup the SMA Data Update Coordinator."""
|
||||
try:
|
||||
self._sma_device_info = await self.sma.device_info()
|
||||
self._sensors = await self.sma.get_sensors()
|
||||
except (
|
||||
SmaReadException,
|
||||
SmaConnectionException,
|
||||
) as err:
|
||||
await self.async_close_sma_session()
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
except SmaAuthenticationException as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
|
||||
async def _async_update_data(self) -> SMACoordinatorData:
|
||||
"""Update the used SMA sensors."""
|
||||
try:
|
||||
await self.sma.read(self._sensors)
|
||||
except (
|
||||
SmaReadException,
|
||||
SmaConnectionException,
|
||||
) as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
except SmaAuthenticationException as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
|
||||
return SMACoordinatorData(
|
||||
sma_device_info=self._sma_device_info,
|
||||
sensors=self._sensors,
|
||||
)
|
||||
|
||||
async def async_close_sma_session(self) -> None:
|
||||
"""Close the SMA session."""
|
||||
await self.sma.close_session()
|
||||
_LOGGER.debug("SMA session closed")
|
||||
@@ -13,5 +13,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/sma",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pysma"],
|
||||
"requirements": ["pysma==1.0.2"]
|
||||
"requirements": ["pysma==0.7.5"]
|
||||
}
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pysma.sensor import Sensor
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pysma
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
@@ -10,9 +12,8 @@ from homeassistant.components.sensor import (
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_SSL,
|
||||
PERCENTAGE,
|
||||
EntityCategory,
|
||||
UnitOfApparentPower,
|
||||
@@ -28,11 +29,12 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
CoordinatorEntity,
|
||||
DataUpdateCoordinator,
|
||||
)
|
||||
|
||||
from . import SMAConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .coordinator import SMADataUpdateCoordinator
|
||||
from .const import DOMAIN, PYSMA_COORDINATOR, PYSMA_DEVICE_INFO, PYSMA_SENSORS
|
||||
|
||||
SENSOR_ENTITIES: dict[str, SensorEntityDescription] = {
|
||||
"status": SensorEntityDescription(
|
||||
@@ -835,32 +837,41 @@ SENSOR_ENTITIES: dict[str, SensorEntityDescription] = {
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: SMAConfigEntry,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Setup SMA sensors."""
|
||||
coordinator = entry.runtime_data
|
||||
"""Set up SMA sensors."""
|
||||
sma_data = hass.data[DOMAIN][config_entry.entry_id]
|
||||
|
||||
coordinator = sma_data[PYSMA_COORDINATOR]
|
||||
used_sensors = sma_data[PYSMA_SENSORS]
|
||||
device_info = sma_data[PYSMA_DEVICE_INFO]
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert config_entry.unique_id
|
||||
|
||||
async_add_entities(
|
||||
SMAsensor(
|
||||
coordinator,
|
||||
config_entry.unique_id,
|
||||
SENSOR_ENTITIES.get(sensor.name),
|
||||
device_info,
|
||||
sensor,
|
||||
entry,
|
||||
)
|
||||
for sensor in coordinator.data.sensors
|
||||
for sensor in used_sensors
|
||||
)
|
||||
|
||||
|
||||
class SMAsensor(CoordinatorEntity[SMADataUpdateCoordinator], SensorEntity):
|
||||
class SMAsensor(CoordinatorEntity, SensorEntity):
|
||||
"""Representation of a SMA sensor."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: SMADataUpdateCoordinator,
|
||||
coordinator: DataUpdateCoordinator,
|
||||
config_entry_unique_id: str,
|
||||
description: SensorEntityDescription | None,
|
||||
pysma_sensor: Sensor,
|
||||
entry: SMAConfigEntry,
|
||||
device_info: DeviceInfo,
|
||||
pysma_sensor: pysma.sensor.Sensor,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator)
|
||||
@@ -869,24 +880,11 @@ class SMAsensor(CoordinatorEntity[SMADataUpdateCoordinator], SensorEntity):
|
||||
else:
|
||||
self._attr_name = pysma_sensor.name
|
||||
|
||||
protocol = "https" if entry.data[CONF_SSL] else "http"
|
||||
url = f"{protocol}://{entry.data[CONF_HOST]}"
|
||||
|
||||
self._sensor = pysma_sensor
|
||||
self._serial = coordinator.data.sma_device_info.serial
|
||||
assert entry.unique_id
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
configuration_url=url,
|
||||
identifiers={(DOMAIN, entry.unique_id)},
|
||||
manufacturer=coordinator.data.sma_device_info.manufacturer,
|
||||
model=coordinator.data.sma_device_info.type,
|
||||
name=coordinator.data.sma_device_info.name,
|
||||
sw_version=coordinator.data.sma_device_info.sw_version,
|
||||
serial_number=coordinator.data.sma_device_info.serial,
|
||||
)
|
||||
self._attr_device_info = device_info
|
||||
self._attr_unique_id = (
|
||||
f"{entry.unique_id}-{pysma_sensor.key}_{pysma_sensor.key_idx}"
|
||||
f"{config_entry_unique_id}-{pysma_sensor.key}_{pysma_sensor.key_idx}"
|
||||
)
|
||||
|
||||
# Set sensor enabled to False.
|
||||
@@ -903,14 +901,6 @@ class SMAsensor(CoordinatorEntity[SMADataUpdateCoordinator], SensorEntity):
|
||||
|
||||
return f"{name_prefix} {super().name}"
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if the device is available."""
|
||||
return (
|
||||
super().available
|
||||
and self._serial == self.coordinator.data.sma_device_info.serial
|
||||
)
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
|
||||
@@ -3,18 +3,14 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import socket
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientError, ClientResponseError
|
||||
import aiosolaredge
|
||||
from solaredge_web import SolarEdgeWeb
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_RECONFIGURE,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_API_KEY, CONF_NAME, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import section
|
||||
@@ -95,28 +91,17 @@ class SolarEdgeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Step when user initializes an integration or reconfigures it."""
|
||||
"""Step when user initializes a integration."""
|
||||
self._errors = {}
|
||||
entry = None
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
entry = self._get_reconfigure_entry()
|
||||
|
||||
if user_input is not None:
|
||||
name = slugify(user_input.get(CONF_NAME, DEFAULT_NAME))
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
if TYPE_CHECKING:
|
||||
assert entry
|
||||
site_id = entry.data[CONF_SITE_ID]
|
||||
else:
|
||||
site_id = user_input[CONF_SITE_ID]
|
||||
site_id = user_input[CONF_SITE_ID]
|
||||
api_auth = user_input.get(CONF_SECTION_API_AUTH, {})
|
||||
web_auth = user_input.get(CONF_SECTION_WEB_AUTH, {})
|
||||
api_key = api_auth.get(CONF_API_KEY)
|
||||
username = web_auth.get(CONF_USERNAME)
|
||||
|
||||
if self.source != SOURCE_RECONFIGURE and self._site_in_configuration_exists(
|
||||
site_id
|
||||
):
|
||||
if self._site_in_configuration_exists(site_id):
|
||||
self._errors[CONF_SITE_ID] = "already_configured"
|
||||
elif not api_key and not username:
|
||||
self._errors["base"] = "auth_missing"
|
||||
@@ -135,92 +120,54 @@ class SolarEdgeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data = {CONF_SITE_ID: site_id}
|
||||
data.update(api_auth)
|
||||
data.update(web_auth)
|
||||
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
if TYPE_CHECKING:
|
||||
assert entry
|
||||
return self.async_update_reload_and_abort(entry, data=data)
|
||||
|
||||
return self.async_create_entry(title=name, data=data)
|
||||
elif self.source == SOURCE_RECONFIGURE:
|
||||
if TYPE_CHECKING:
|
||||
assert entry
|
||||
user_input = {
|
||||
CONF_SECTION_API_AUTH: {CONF_API_KEY: entry.data.get(CONF_API_KEY, "")},
|
||||
CONF_SECTION_WEB_AUTH: {
|
||||
CONF_USERNAME: entry.data.get(CONF_USERNAME, ""),
|
||||
CONF_PASSWORD: entry.data.get(CONF_PASSWORD, ""),
|
||||
},
|
||||
}
|
||||
else:
|
||||
user_input = {}
|
||||
|
||||
data_schema_dict: dict[vol.Marker, Any] = {}
|
||||
if self.source != SOURCE_RECONFIGURE:
|
||||
data_schema_dict[
|
||||
vol.Required(CONF_NAME, default=user_input.get(CONF_NAME, DEFAULT_NAME))
|
||||
] = str
|
||||
data_schema_dict[
|
||||
vol.Required(CONF_SITE_ID, default=user_input.get(CONF_SITE_ID, ""))
|
||||
] = str
|
||||
|
||||
data_schema_dict.update(
|
||||
{
|
||||
vol.Optional(CONF_SECTION_API_AUTH): section(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_API_KEY,
|
||||
default=user_input.get(CONF_SECTION_API_AUTH, {}).get(
|
||||
CONF_API_KEY, ""
|
||||
),
|
||||
): str,
|
||||
}
|
||||
),
|
||||
options={"collapsed": False},
|
||||
),
|
||||
vol.Optional(CONF_SECTION_WEB_AUTH): section(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Inclusive(
|
||||
CONF_USERNAME,
|
||||
"web_account",
|
||||
default=user_input.get(CONF_SECTION_WEB_AUTH, {}).get(
|
||||
CONF_USERNAME, ""
|
||||
),
|
||||
): str,
|
||||
vol.Inclusive(
|
||||
CONF_PASSWORD,
|
||||
"web_account",
|
||||
default=user_input.get(CONF_SECTION_WEB_AUTH, {}).get(
|
||||
CONF_PASSWORD, ""
|
||||
),
|
||||
): str,
|
||||
}
|
||||
),
|
||||
options={"collapsed": False},
|
||||
),
|
||||
}
|
||||
)
|
||||
data_schema = vol.Schema(data_schema_dict)
|
||||
|
||||
step_id = "user"
|
||||
description_placeholders = {}
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
if TYPE_CHECKING:
|
||||
assert entry
|
||||
step_id = "reconfigure"
|
||||
description_placeholders["site_id"] = entry.data[CONF_SITE_ID]
|
||||
|
||||
return self.async_show_form(
|
||||
step_id=step_id,
|
||||
data_schema=data_schema,
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_NAME, default=user_input.get(CONF_NAME, DEFAULT_NAME)
|
||||
): str,
|
||||
vol.Required(
|
||||
CONF_SITE_ID, default=user_input.get(CONF_SITE_ID, "")
|
||||
): str,
|
||||
vol.Optional(CONF_SECTION_API_AUTH): section(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_API_KEY,
|
||||
default=user_input.get(
|
||||
CONF_SECTION_API_AUTH, {}
|
||||
).get(CONF_API_KEY, ""),
|
||||
): str,
|
||||
}
|
||||
),
|
||||
options={"collapsed": False},
|
||||
),
|
||||
vol.Optional(CONF_SECTION_WEB_AUTH): section(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Inclusive(
|
||||
CONF_USERNAME,
|
||||
"web_account",
|
||||
default=user_input.get(
|
||||
CONF_SECTION_WEB_AUTH, {}
|
||||
).get(CONF_USERNAME, ""),
|
||||
): str,
|
||||
vol.Inclusive(
|
||||
CONF_PASSWORD,
|
||||
"web_account",
|
||||
default=user_input.get(
|
||||
CONF_SECTION_WEB_AUTH, {}
|
||||
).get(CONF_PASSWORD, ""),
|
||||
): str,
|
||||
}
|
||||
),
|
||||
options={"collapsed": False},
|
||||
),
|
||||
}
|
||||
),
|
||||
errors=self._errors,
|
||||
description_placeholders=description_placeholders,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a reconfiguration flow initiated by the user."""
|
||||
return await self.async_step_user(user_input)
|
||||
|
||||
@@ -33,37 +33,6 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"reconfigure": {
|
||||
"title": "Reconfigure SolarEdge",
|
||||
"description": "Update your API key or web account credentials for site {site_id}.",
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_key": "[%key:component::solaredge::config::step::user::data_description::api_key%]",
|
||||
"username": "[%key:component::solaredge::config::step::user::data_description::username%]",
|
||||
"password": "[%key:component::solaredge::config::step::user::data_description::password%]"
|
||||
},
|
||||
"sections": {
|
||||
"api_auth": {
|
||||
"name": "[%key:component::solaredge::config::step::user::sections::api_auth::name%]",
|
||||
"description": "[%key:component::solaredge::config::step::user::sections::api_auth::description%]",
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
}
|
||||
},
|
||||
"web_auth": {
|
||||
"name": "[%key:component::solaredge::config::step::user::sections::web_auth::name%]",
|
||||
"description": "[%key:component::solaredge::config::step::user::sections::web_auth::description%]",
|
||||
"data": {
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
@@ -76,8 +45,7 @@
|
||||
"auth_missing": "You must provide either an API key or a username and password."
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
||||
@@ -39,7 +39,6 @@ from .const import (
|
||||
DOMAIN,
|
||||
PLATFORMS,
|
||||
)
|
||||
from .services import async_setup_services
|
||||
from .util import redact_credentials, validate_sql_select
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -72,8 +71,6 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up SQL from yaml config."""
|
||||
async_setup_services(hass)
|
||||
|
||||
if (conf := config.get(DOMAIN)) is None:
|
||||
return True
|
||||
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"services": {
|
||||
"query": {
|
||||
"service": "mdi:database-search"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,131 +0,0 @@
|
||||
"""Services for the SQL integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import datetime
|
||||
import decimal
|
||||
import logging
|
||||
|
||||
from sqlalchemy.engine import Result
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy.orm import Session
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.recorder import CONF_DB_URL, get_instance
|
||||
from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.util.json import JsonValueType
|
||||
|
||||
from .const import CONF_QUERY, DOMAIN
|
||||
from .util import (
|
||||
async_create_sessionmaker,
|
||||
generate_lambda_stmt,
|
||||
redact_credentials,
|
||||
resolve_db_url,
|
||||
validate_query,
|
||||
validate_sql_select,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SERVICE_QUERY = "query"
|
||||
SERVICE_QUERY_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_QUERY): vol.All(cv.string, validate_sql_select),
|
||||
vol.Optional(CONF_DB_URL): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def _async_query_service(
|
||||
call: ServiceCall,
|
||||
) -> ServiceResponse:
|
||||
"""Execute a SQL query service and return the result."""
|
||||
db_url = resolve_db_url(call.hass, call.data.get(CONF_DB_URL))
|
||||
query_str = call.data[CONF_QUERY]
|
||||
(
|
||||
sessmaker,
|
||||
uses_recorder_db,
|
||||
use_database_executor,
|
||||
) = await async_create_sessionmaker(call.hass, db_url)
|
||||
try:
|
||||
validate_query(call.hass, query_str, uses_recorder_db, None)
|
||||
except ValueError as err:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="query_not_allowed",
|
||||
translation_placeholders={"error": str(err)},
|
||||
) from err
|
||||
if sessmaker is None:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="db_connection_failed",
|
||||
translation_placeholders={"db_url": redact_credentials(db_url)},
|
||||
)
|
||||
|
||||
def _execute_and_convert_query() -> list[JsonValueType]:
|
||||
"""Execute the query and return the results with converted types."""
|
||||
sess: Session = sessmaker()
|
||||
try:
|
||||
result: Result = sess.execute(generate_lambda_stmt(query_str))
|
||||
except SQLAlchemyError as err:
|
||||
_LOGGER.debug(
|
||||
"Error executing query %s: %s",
|
||||
query_str,
|
||||
redact_credentials(str(err)),
|
||||
)
|
||||
sess.rollback()
|
||||
raise
|
||||
else:
|
||||
rows: list[JsonValueType] = []
|
||||
for row in result.mappings():
|
||||
processed_row: dict[str, JsonValueType] = {}
|
||||
for key, value in row.items():
|
||||
if isinstance(value, decimal.Decimal):
|
||||
processed_row[key] = float(value)
|
||||
elif isinstance(value, datetime.date):
|
||||
processed_row[key] = value.isoformat()
|
||||
elif isinstance(value, (bytes, bytearray)):
|
||||
processed_row[key] = f"0x{value.hex()}"
|
||||
else:
|
||||
processed_row[key] = value
|
||||
rows.append(processed_row)
|
||||
return rows
|
||||
finally:
|
||||
sess.close()
|
||||
|
||||
try:
|
||||
if use_database_executor:
|
||||
result = await get_instance(call.hass).async_add_executor_job(
|
||||
_execute_and_convert_query
|
||||
)
|
||||
else:
|
||||
result = await call.hass.async_add_executor_job(_execute_and_convert_query)
|
||||
except SQLAlchemyError as err:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="query_execution_error",
|
||||
translation_placeholders={"error": redact_credentials(str(err))},
|
||||
) from err
|
||||
|
||||
return {"result": result}
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the SQL integration."""
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_QUERY,
|
||||
_async_query_service,
|
||||
schema=SERVICE_QUERY_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
@@ -1,28 +0,0 @@
|
||||
# Describes the format for services provided by the SQL integration.
|
||||
|
||||
query:
|
||||
fields:
|
||||
query:
|
||||
required: true
|
||||
example: |
|
||||
SELECT
|
||||
states.state,
|
||||
last_updated_ts
|
||||
FROM
|
||||
states
|
||||
INNER JOIN states_meta ON
|
||||
states.metadata_id = states_meta.metadata_id
|
||||
WHERE
|
||||
states_meta.entity_id = 'sun.sun'
|
||||
ORDER BY
|
||||
last_updated_ts DESC
|
||||
LIMIT
|
||||
10;
|
||||
selector:
|
||||
text:
|
||||
multiline: true
|
||||
db_url:
|
||||
required: false
|
||||
example: "sqlite:////config/home-assistant_v2.db"
|
||||
selector:
|
||||
text:
|
||||
@@ -51,33 +51,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"db_connection_failed": {
|
||||
"message": "Failed to connect to the database: {db_url}"
|
||||
},
|
||||
"query_execution_error": {
|
||||
"message": "An error occurred when executing the query: {error}"
|
||||
},
|
||||
"query_not_allowed": {
|
||||
"message": "The provided query is not allowed: {error}"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"query": {
|
||||
"name": "Query",
|
||||
"description": "Executes a SQL query and returns the result.",
|
||||
"fields": {
|
||||
"query": {
|
||||
"name": "Query",
|
||||
"description": "The SELECT query to execute."
|
||||
},
|
||||
"db_url": {
|
||||
"name": "Database URL",
|
||||
"description": "The URL of the database to connect to. If not provided, the default Home Assistant recorder database will be used."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
|
||||
@@ -15,7 +15,6 @@ from . import SqueezeboxConfigEntry
|
||||
from .const import SIGNAL_PLAYER_DISCOVERED
|
||||
from .coordinator import SqueezeBoxPlayerUpdateCoordinator
|
||||
from .entity import SqueezeboxEntity
|
||||
from .util import safe_library_call
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -158,10 +157,4 @@ class SqueezeboxButtonEntity(SqueezeboxEntity, ButtonEntity):
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Execute the button action."""
|
||||
await safe_library_call(
|
||||
self._player.async_query,
|
||||
"button",
|
||||
self.entity_description.press_action,
|
||||
translation_key="press_failed",
|
||||
translation_placeholders={"action": self.entity_description.press_action},
|
||||
)
|
||||
await self._player.async_query("button", self.entity_description.press_action)
|
||||
|
||||
@@ -70,7 +70,6 @@ from .const import (
|
||||
)
|
||||
from .coordinator import SqueezeBoxPlayerUpdateCoordinator
|
||||
from .entity import SqueezeboxEntity
|
||||
from .util import safe_library_call
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import SqueezeboxConfigEntry
|
||||
@@ -434,98 +433,58 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
|
||||
async def async_turn_off(self) -> None:
|
||||
"""Turn off media player."""
|
||||
await safe_library_call(
|
||||
self._player.async_set_power, False, translation_key="turn_off_failed"
|
||||
)
|
||||
await self._player.async_set_power(False)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_set_volume_level(self, volume: float) -> None:
|
||||
"""Set volume level, range 0..1."""
|
||||
volume_percent = str(round(volume * 100))
|
||||
await safe_library_call(
|
||||
self._player.async_set_volume,
|
||||
volume_percent,
|
||||
translation_key="set_volume_failed",
|
||||
translation_placeholders={"volume": volume_percent},
|
||||
)
|
||||
await self._player.async_set_volume(volume_percent)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_mute_volume(self, mute: bool) -> None:
|
||||
"""Mute (true) or unmute (false) media player."""
|
||||
await safe_library_call(
|
||||
self._player.async_set_muting,
|
||||
mute,
|
||||
translation_key="set_mute_failed",
|
||||
)
|
||||
await self._player.async_set_muting(mute)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_media_stop(self) -> None:
|
||||
"""Send stop command to media player."""
|
||||
await safe_library_call(
|
||||
self._player.async_stop,
|
||||
translation_key="stop_failed",
|
||||
)
|
||||
await self._player.async_stop()
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_media_play_pause(self) -> None:
|
||||
"""Send pause/play toggle command to media player."""
|
||||
await safe_library_call(
|
||||
self._player.async_toggle_pause,
|
||||
translation_key="play_pause_failed",
|
||||
)
|
||||
"""Send pause command to media player."""
|
||||
await self._player.async_toggle_pause()
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_media_play(self) -> None:
|
||||
"""Send play command to media player."""
|
||||
await safe_library_call(
|
||||
self._player.async_play,
|
||||
translation_key="play_failed",
|
||||
)
|
||||
await self._player.async_play()
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_media_pause(self) -> None:
|
||||
"""Send pause command to media player."""
|
||||
await safe_library_call(
|
||||
self._player.async_pause,
|
||||
translation_key="pause_failed",
|
||||
)
|
||||
await self._player.async_pause()
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_media_next_track(self) -> None:
|
||||
"""Send next track command."""
|
||||
await safe_library_call(
|
||||
self._player.async_index,
|
||||
"+1",
|
||||
translation_key="next_track_failed",
|
||||
)
|
||||
await self._player.async_index("+1")
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_media_previous_track(self) -> None:
|
||||
"""Send previous track command."""
|
||||
await safe_library_call(
|
||||
self._player.async_index,
|
||||
"-1",
|
||||
translation_key="previous_track_failed",
|
||||
)
|
||||
"""Send next track command."""
|
||||
await self._player.async_index("-1")
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_media_seek(self, position: float) -> None:
|
||||
"""Send seek command."""
|
||||
await safe_library_call(
|
||||
self._player.async_time,
|
||||
position,
|
||||
translation_key="seek_failed",
|
||||
translation_placeholders={"position": position},
|
||||
)
|
||||
await self._player.async_time(position)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_turn_on(self) -> None:
|
||||
"""Turn the media player on."""
|
||||
await safe_library_call(
|
||||
self._player.async_set_power,
|
||||
True,
|
||||
translation_key="turn_on_failed",
|
||||
)
|
||||
await self._player.async_set_power(True)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_play_media(
|
||||
@@ -564,7 +523,9 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_announce_media_type",
|
||||
translation_placeholders={"media_type": str(media_type)},
|
||||
translation_placeholders={
|
||||
"media_type": str(media_type),
|
||||
},
|
||||
)
|
||||
|
||||
extra = kwargs.get(ATTR_MEDIA_EXTRA, {})
|
||||
@@ -575,7 +536,9 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_announce_volume",
|
||||
translation_placeholders={"announce_volume": ATTR_ANNOUNCE_VOLUME},
|
||||
translation_placeholders={
|
||||
"announce_volume": ATTR_ANNOUNCE_VOLUME,
|
||||
},
|
||||
) from None
|
||||
else:
|
||||
self._player.set_announce_volume(announce_volume)
|
||||
@@ -587,7 +550,7 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_announce_timeout",
|
||||
translation_placeholders={
|
||||
"announce_timeout": ATTR_ANNOUNCE_TIMEOUT
|
||||
"announce_timeout": ATTR_ANNOUNCE_TIMEOUT,
|
||||
},
|
||||
) from None
|
||||
else:
|
||||
@@ -595,19 +558,15 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
|
||||
if media_type in MediaType.MUSIC:
|
||||
if not media_id.startswith(SQUEEZEBOX_SOURCE_STRINGS):
|
||||
# do not process special squeezebox "source" media ids
|
||||
media_id = async_process_play_media_url(self.hass, media_id)
|
||||
|
||||
await safe_library_call(
|
||||
self._player.async_load_url,
|
||||
media_id,
|
||||
cmd,
|
||||
translation_key="load_url_failed",
|
||||
translation_placeholders={"media_id": media_id, "cmd": cmd},
|
||||
)
|
||||
await self._player.async_load_url(media_id, cmd)
|
||||
return
|
||||
|
||||
if media_type == MediaType.PLAYLIST:
|
||||
try:
|
||||
# a saved playlist by number
|
||||
payload = {
|
||||
"search_id": media_id,
|
||||
"search_type": MediaType.PLAYLIST,
|
||||
@@ -616,6 +575,7 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
self._player, payload, self.browse_limit, self._browse_data
|
||||
)
|
||||
except BrowseError:
|
||||
# a list of urls
|
||||
content = json.loads(media_id)
|
||||
playlist = content["urls"]
|
||||
index = content["index"]
|
||||
@@ -627,19 +587,12 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
playlist = await generate_playlist(
|
||||
self._player, payload, self.browse_limit, self._browse_data
|
||||
)
|
||||
|
||||
_LOGGER.debug("Generated playlist: %s", playlist)
|
||||
|
||||
await safe_library_call(
|
||||
self._player.async_load_playlist,
|
||||
playlist,
|
||||
cmd,
|
||||
translation_key="load_playlist_failed",
|
||||
translation_placeholders={"cmd": cmd},
|
||||
)
|
||||
|
||||
await self._player.async_load_playlist(playlist, cmd)
|
||||
if index is not None:
|
||||
await self._player.async_index(index)
|
||||
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_search_media(
|
||||
@@ -719,29 +672,18 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
else:
|
||||
repeat_mode = "none"
|
||||
|
||||
await safe_library_call(
|
||||
self._player.async_set_repeat,
|
||||
repeat_mode,
|
||||
translation_key="set_repeat_failed",
|
||||
)
|
||||
await self._player.async_set_repeat(repeat_mode)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_set_shuffle(self, shuffle: bool) -> None:
|
||||
"""Enable or disable shuffle mode."""
|
||||
"""Enable/disable shuffle mode."""
|
||||
shuffle_mode = "song" if shuffle else "none"
|
||||
await safe_library_call(
|
||||
self._player.async_set_shuffle,
|
||||
shuffle_mode,
|
||||
translation_key="set_shuffle_failed",
|
||||
)
|
||||
await self._player.async_set_shuffle(shuffle_mode)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_clear_playlist(self) -> None:
|
||||
"""Send the media player the command to clear the playlist."""
|
||||
await safe_library_call(
|
||||
self._player.async_clear_playlist,
|
||||
translation_key="clear_playlist_failed",
|
||||
)
|
||||
"""Send the media player the command for clear playlist."""
|
||||
await self._player.async_clear_playlist()
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_call_method(
|
||||
@@ -750,18 +692,12 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
"""Call Squeezebox JSON/RPC method.
|
||||
|
||||
Additional parameters are added to the command to form the list of
|
||||
positional parameters (p0, p1..., pN) passed to JSON/RPC server.
|
||||
positional parameters (p0, p1..., pN) passed to JSON/RPC server.
|
||||
"""
|
||||
all_params = [command]
|
||||
if parameters:
|
||||
all_params.extend(parameters)
|
||||
|
||||
await safe_library_call(
|
||||
self._player.async_query,
|
||||
*all_params,
|
||||
translation_key="call_method_failed",
|
||||
translation_placeholders={"command": command},
|
||||
)
|
||||
await self._player.async_query(*all_params)
|
||||
|
||||
async def async_call_query(
|
||||
self, command: str, parameters: list[str] | None = None
|
||||
@@ -769,18 +705,12 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
"""Call Squeezebox JSON/RPC method where we care about the result.
|
||||
|
||||
Additional parameters are added to the command to form the list of
|
||||
positional parameters (p0, p1..., pN) passed to JSON/RPC server.
|
||||
positional parameters (p0, p1..., pN) passed to JSON/RPC server.
|
||||
"""
|
||||
all_params = [command]
|
||||
if parameters:
|
||||
all_params.extend(parameters)
|
||||
|
||||
self._query_result = await safe_library_call(
|
||||
self._player.async_query,
|
||||
*all_params,
|
||||
translation_key="call_query_failed",
|
||||
translation_placeholders={"command": command},
|
||||
)
|
||||
self._query_result = await self._player.async_query(*all_params)
|
||||
_LOGGER.debug("call_query got result %s", self._query_result)
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -814,10 +744,7 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
|
||||
async def async_unjoin_player(self) -> None:
|
||||
"""Unsync this Squeezebox player."""
|
||||
await safe_library_call(
|
||||
self._player.async_unsync,
|
||||
translation_key="unjoin_failed",
|
||||
)
|
||||
await self._player.async_unsync()
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
def get_synthetic_id_and_cache_url(self, url: str) -> str:
|
||||
@@ -881,19 +808,14 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
image_url = self._synthetic_media_browser_thumbnail_items.get(
|
||||
media_image_id
|
||||
)
|
||||
|
||||
if image_url is None:
|
||||
_LOGGER.debug("Synthetic ID %s not found in cache", media_image_id)
|
||||
return (None, None)
|
||||
else:
|
||||
image_url = await safe_library_call(
|
||||
self._player.generate_image_url_from_track_id,
|
||||
media_image_id,
|
||||
translation_key="generate_image_url_failed",
|
||||
translation_placeholders={"track_id": media_image_id},
|
||||
)
|
||||
image_url = self._player.generate_image_url_from_track_id(media_image_id)
|
||||
|
||||
result = await self._async_fetch_image(image_url)
|
||||
if result == (None, None):
|
||||
_LOGGER.debug("Error retrieving proxied album art from %s", image_url)
|
||||
|
||||
return result
|
||||
|
||||
@@ -207,69 +207,6 @@
|
||||
},
|
||||
"invalid_search_media_content_type": {
|
||||
"message": "If specified, Media content type must be one of {media_content_type}"
|
||||
},
|
||||
"turn_on_failed": {
|
||||
"message": "Failed to turn on the player."
|
||||
},
|
||||
"turn_off_failed": {
|
||||
"message": "Failed to turn off the player."
|
||||
},
|
||||
"set_shuffle_failed": {
|
||||
"message": "Failed to set shuffle mode."
|
||||
},
|
||||
"set_volume_failed": {
|
||||
"message": "Failed to set volume to {volume}%."
|
||||
},
|
||||
"set_mute_failed": {
|
||||
"message": "Failed to mute/unmute the player."
|
||||
},
|
||||
"stop_failed": {
|
||||
"message": "Failed to stop playback."
|
||||
},
|
||||
"play_pause_failed": {
|
||||
"message": "Failed to toggle play/pause."
|
||||
},
|
||||
"play_failed": {
|
||||
"message": "Failed to start playback."
|
||||
},
|
||||
"pause_failed": {
|
||||
"message": "Failed to pause playback."
|
||||
},
|
||||
"next_track_failed": {
|
||||
"message": "Failed to skip to the next track."
|
||||
},
|
||||
"previous_track_failed": {
|
||||
"message": "Failed to return to the previous track."
|
||||
},
|
||||
"seek_failed": {
|
||||
"message": "Failed to seek to position {position} seconds."
|
||||
},
|
||||
"set_repeat_failed": {
|
||||
"message": "Failed to set repeat mode."
|
||||
},
|
||||
"clear_playlist_failed": {
|
||||
"message": "Failed to clear the playlist."
|
||||
},
|
||||
"call_method_failed": {
|
||||
"message": "Failed to call method {command}."
|
||||
},
|
||||
"call_query_failed": {
|
||||
"message": "Failed to query method {command}."
|
||||
},
|
||||
"unjoin_failed": {
|
||||
"message": "Failed to unsync the player."
|
||||
},
|
||||
"press_failed": {
|
||||
"message": "Failed to execute button action {action}."
|
||||
},
|
||||
"load_url_failed": {
|
||||
"message": "Failed to load media URL {media_id} with command {cmd}."
|
||||
},
|
||||
"load_playlist_failed": {
|
||||
"message": "Failed to load playlist with command {cmd}."
|
||||
},
|
||||
"generate_image_url_failed": {
|
||||
"message": "Failed to generate image URL for track ID {track_id}."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user