mirror of
https://github.com/home-assistant/core.git
synced 2026-05-13 12:01:46 +00:00
Compare commits
13 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| d3b2be7e86 | |||
| a2131c0d45 | |||
| b179d71658 | |||
| 070ef8f0b0 | |||
| aaeb55b132 | |||
| 1f5cb05f50 | |||
| cee87ed1f5 | |||
| e2ae9c1b95 | |||
| 8b257cdd6c | |||
| f756392b6a | |||
| 894ee88033 | |||
| d5d56e6e23 | |||
| a19a1ec6e8 |
@@ -30,6 +30,7 @@ This repository contains the core of Home Assistant, a Python 3 based home autom
|
||||
|
||||
- When writing or modifying tests, ensure all test function parameters have type annotations.
|
||||
- Prefer concrete types (for example, `HomeAssistant`, `MockConfigEntry`, etc.) over `Any`.
|
||||
- Prefer `@pytest.mark.usefixtures` over arguments, if the argument is not going to be used.
|
||||
- Avoid using conditions/branching in tests. Instead, either split tests or adjust the test parametrization to cover all cases without branching.
|
||||
- If multiple tests share most of their code, use `pytest.mark.parametrize` to merge them into a single parameterized test instead of duplicating the body.
|
||||
|
||||
|
||||
@@ -338,7 +338,7 @@ jobs:
|
||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||
steps:
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@cad07c2e89fa2edd6e2d7bab4c1aa38e53f76003 # v4.1.1
|
||||
uses: sigstore/cosign-installer@6f9f17788090df1f26f669e9d70d6ae9567deba6 # v4.1.2
|
||||
with:
|
||||
cosign-release: "v2.5.3"
|
||||
|
||||
|
||||
@@ -423,6 +423,7 @@ homeassistant.components.opower.*
|
||||
homeassistant.components.oralb.*
|
||||
homeassistant.components.otbr.*
|
||||
homeassistant.components.otp.*
|
||||
homeassistant.components.ouman_eh_800.*
|
||||
homeassistant.components.overkiz.*
|
||||
homeassistant.components.overseerr.*
|
||||
homeassistant.components.p1_monitor.*
|
||||
|
||||
@@ -20,6 +20,7 @@ This repository contains the core of Home Assistant, a Python 3 based home autom
|
||||
|
||||
- When writing or modifying tests, ensure all test function parameters have type annotations.
|
||||
- Prefer concrete types (for example, `HomeAssistant`, `MockConfigEntry`, etc.) over `Any`.
|
||||
- Prefer `@pytest.mark.usefixtures` over arguments, if the argument is not going to be used.
|
||||
- Avoid using conditions/branching in tests. Instead, either split tests or adjust the test parametrization to cover all cases without branching.
|
||||
- If multiple tests share most of their code, use `pytest.mark.parametrize` to merge them into a single parameterized test instead of duplicating the body.
|
||||
|
||||
|
||||
Generated
+2
@@ -1305,6 +1305,8 @@ CLAUDE.md @home-assistant/core
|
||||
/tests/components/osoenergy/ @osohotwateriot
|
||||
/homeassistant/components/otbr/ @home-assistant/core
|
||||
/tests/components/otbr/ @home-assistant/core
|
||||
/homeassistant/components/ouman_eh_800/ @Markus98
|
||||
/tests/components/ouman_eh_800/ @Markus98
|
||||
/homeassistant/components/ourgroceries/ @OnFreund
|
||||
/tests/components/ourgroceries/ @OnFreund
|
||||
/homeassistant/components/overkiz/ @imicknl
|
||||
|
||||
@@ -59,6 +59,8 @@ class AemetConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
schema = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_API_KEY): str,
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): str,
|
||||
vol.Optional(
|
||||
CONF_LATITUDE, default=self.hass.config.latitude
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
import asyncio
|
||||
from asyncio import timeout
|
||||
from collections.abc import Awaitable, Callable, Iterable, Mapping
|
||||
import contextlib
|
||||
from dataclasses import asdict as dataclass_asdict, dataclass, field
|
||||
from datetime import datetime
|
||||
import random
|
||||
@@ -298,24 +297,20 @@ class Analytics:
|
||||
if stored:
|
||||
self._data = AnalyticsData.from_dict(stored)
|
||||
|
||||
if self.supervisor and not self.onboarded:
|
||||
# This may raise HassioNotReadyError if Supervisor was unreachable
|
||||
# during setup of the Supervisor integration. That will fail setup
|
||||
# of this integration. However there is no better option at this time
|
||||
# since we need to get the diagnostic setting from Supervisor to correctly
|
||||
# setup this integration and we can't raise ConfigEntryNotReady to
|
||||
# trigger a retry from async_setup.
|
||||
supervisor_info = hassio.get_supervisor_info(self._hass)
|
||||
|
||||
# User have not configured analytics, get this setting from the supervisor
|
||||
if supervisor_info[ATTR_DIAGNOSTICS] and not self.preferences.get(
|
||||
ATTR_DIAGNOSTICS, False
|
||||
):
|
||||
self._data.preferences[ATTR_DIAGNOSTICS] = True
|
||||
elif not supervisor_info[ATTR_DIAGNOSTICS] and self.preferences.get(
|
||||
ATTR_DIAGNOSTICS, False
|
||||
):
|
||||
self._data.preferences[ATTR_DIAGNOSTICS] = False
|
||||
if (
|
||||
self.supervisor
|
||||
and (supervisor_info := hassio.get_supervisor_info(self._hass)) is not None
|
||||
):
|
||||
if not self.onboarded:
|
||||
# User have not configured analytics, get this setting from the supervisor
|
||||
if supervisor_info[ATTR_DIAGNOSTICS] and not self.preferences.get(
|
||||
ATTR_DIAGNOSTICS, False
|
||||
):
|
||||
self._data.preferences[ATTR_DIAGNOSTICS] = True
|
||||
elif not supervisor_info[ATTR_DIAGNOSTICS] and self.preferences.get(
|
||||
ATTR_DIAGNOSTICS, False
|
||||
):
|
||||
self._data.preferences[ATTR_DIAGNOSTICS] = False
|
||||
|
||||
async def _save(self) -> None:
|
||||
"""Save data."""
|
||||
@@ -349,14 +344,9 @@ class Analytics:
|
||||
await self._save()
|
||||
|
||||
if self.supervisor:
|
||||
# get_supervisor_info was called during setup so we can't get here
|
||||
# if it raised. The others may raise HassioNotReadyError if only some
|
||||
# data was successfully fetched from Supervisor
|
||||
supervisor_info = hassio.get_supervisor_info(hass)
|
||||
with contextlib.suppress(hassio.HassioNotReadyError):
|
||||
operating_system_info = hassio.get_os_info(hass)
|
||||
with contextlib.suppress(hassio.HassioNotReadyError):
|
||||
addons_info = hassio.get_addons_info(hass)
|
||||
operating_system_info = hassio.get_os_info(hass) or {}
|
||||
addons_info = hassio.get_addons_info(hass) or {}
|
||||
|
||||
system_info = await async_get_system_info(hass)
|
||||
integrations = []
|
||||
@@ -429,7 +419,7 @@ class Analytics:
|
||||
|
||||
integrations.append(integration.domain)
|
||||
|
||||
if addons_info:
|
||||
if addons_info is not None:
|
||||
supervisor_client = hassio.get_supervisor_client(hass)
|
||||
installed_addons = await asyncio.gather(
|
||||
*(supervisor_client.addons.addon_info(slug) for slug in addons_info)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Config flow for the Bayesian integration."""
|
||||
# pylint: disable=hass-config-flow-name-field # Name field is no longer allowed in config flow schemas
|
||||
|
||||
from collections.abc import Mapping
|
||||
from enum import StrEnum
|
||||
|
||||
@@ -313,6 +313,8 @@ class BroadlinkFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
},
|
||||
)
|
||||
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
data_schema = {vol.Required(CONF_NAME, default=device.name): str}
|
||||
return self.async_show_form(
|
||||
step_id="finish", data_schema=vol.Schema(data_schema), errors=errors
|
||||
|
||||
@@ -53,6 +53,8 @@ def _schema_with_defaults(
|
||||
{
|
||||
vol.Required(CONF_HOST, default=host): str,
|
||||
**AUTH_VOL_DICT,
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Optional(CONF_NAME, default=name): str,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -47,6 +47,8 @@ class EmulatedRokuFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Required(CONF_NAME, default=default_name): str,
|
||||
vol.Required(CONF_LISTEN_PORT, default=default_port): vol.Coerce(
|
||||
int
|
||||
|
||||
@@ -23,6 +23,8 @@ DATA_SCHEMA = vol.Schema(
|
||||
)
|
||||
),
|
||||
vol.Required(CONF_HOST): str,
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Required(CONF_NAME, default=DOMAIN): str,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -63,14 +63,9 @@ class ESPHomeDashboardManager:
|
||||
if is_hassio(self._hass):
|
||||
from homeassistant.components.hassio import get_addons_info # noqa: PLC0415
|
||||
|
||||
# This may raise HassioNotReadyError if Supervisor was unreachable
|
||||
# during setup of the Supervisor integration. That will fail setup
|
||||
# of this integration. However there is no better option at this time
|
||||
# since we need to know if the addon is installed from Supervisor to
|
||||
# correctly setup this integration and we can't raise ConfigEntryNotReady
|
||||
# to trigger a retry from async_setup.
|
||||
addons = get_addons_info(self._hass)
|
||||
if info["addon_slug"] not in addons:
|
||||
if (addons := get_addons_info(self._hass)) is not None and info[
|
||||
"addon_slug"
|
||||
] not in addons:
|
||||
# The addon is not installed anymore, but it make come back
|
||||
# so we don't want to remove the dashboard, but for now
|
||||
# we don't want to use it.
|
||||
|
||||
@@ -130,6 +130,8 @@ def get_model_selection_schema(
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
)
|
||||
),
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Required(
|
||||
CONF_NAME,
|
||||
default=options.get(CONF_NAME) or vol.UNDEFINED,
|
||||
|
||||
@@ -34,7 +34,6 @@ from homeassistant.helpers.service_info.ssdp import (
|
||||
ATTR_UPNP_UDN,
|
||||
SsdpServiceInfo,
|
||||
)
|
||||
from homeassistant.helpers.typing import VolDictType
|
||||
|
||||
from .const import (
|
||||
CONF_FEATURE_DEVICE_TRACKING,
|
||||
@@ -225,19 +224,12 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self, errors: dict[str, str] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Show the setup form to the user."""
|
||||
|
||||
advanced_data_schema: VolDictType = {}
|
||||
if self.show_advanced_options:
|
||||
advanced_data_schema = {
|
||||
vol.Optional(CONF_PORT): vol.Coerce(int),
|
||||
}
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_HOST, default=DEFAULT_HOST): str,
|
||||
**advanced_data_schema,
|
||||
vol.Optional(CONF_PORT): vol.Coerce(int),
|
||||
vol.Required(CONF_USERNAME): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
vol.Optional(CONF_SSL, default=DEFAULT_SSL): bool,
|
||||
@@ -357,18 +349,14 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self, user_input: dict[str, Any], errors: dict[str, str] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Show the reconfigure form to the user."""
|
||||
advanced_data_schema: VolDictType = {}
|
||||
if self.show_advanced_options:
|
||||
advanced_data_schema = {
|
||||
vol.Optional(CONF_PORT, default=user_input[CONF_PORT]): vol.Coerce(int),
|
||||
}
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST, default=user_input[CONF_HOST]): str,
|
||||
**advanced_data_schema,
|
||||
vol.Optional(CONF_PORT, default=user_input[CONF_PORT]): vol.Coerce(
|
||||
int
|
||||
),
|
||||
vol.Required(CONF_SSL, default=user_input[CONF_SSL]): bool,
|
||||
}
|
||||
),
|
||||
@@ -382,11 +370,21 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle reconfigure flow."""
|
||||
if user_input is None:
|
||||
reconfigure_entry_data = self._get_reconfigure_entry().data
|
||||
port = reconfigure_entry_data[CONF_PORT]
|
||||
ssl = reconfigure_entry_data.get(CONF_SSL, DEFAULT_SSL)
|
||||
|
||||
if (port == DEFAULT_HTTP_PORT and not ssl) or (
|
||||
port == DEFAULT_HTTPS_PORT and ssl
|
||||
):
|
||||
# don't show default ports in reconfigure flow, as they are determined by ssl value
|
||||
# this allows the user to toggle ssl without having to change the port
|
||||
port = vol.UNDEFINED
|
||||
|
||||
return self._show_setup_form_reconfigure(
|
||||
{
|
||||
CONF_HOST: reconfigure_entry_data[CONF_HOST],
|
||||
CONF_PORT: reconfigure_entry_data[CONF_PORT],
|
||||
CONF_SSL: reconfigure_entry_data.get(CONF_SSL, DEFAULT_SSL),
|
||||
CONF_PORT: port,
|
||||
CONF_SSL: ssl,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@@ -90,6 +90,8 @@ class GoalZeroFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
vol.Required(
|
||||
CONF_HOST, default=user_input.get(CONF_HOST) or ""
|
||||
): str,
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Optional(
|
||||
CONF_NAME, default=user_input.get(CONF_NAME) or DEFAULT_NAME
|
||||
): str,
|
||||
|
||||
@@ -69,6 +69,8 @@ def _get_location_schema(hass: HomeAssistant) -> vol.Schema:
|
||||
"""Return the schema for a location with default values from the hass config."""
|
||||
return vol.Schema(
|
||||
{
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Required(CONF_NAME, default=hass.config.location_name): str,
|
||||
vol.Required(
|
||||
CONF_LOCATION,
|
||||
|
||||
@@ -321,6 +321,8 @@ async def google_generative_ai_config_option_schema(
|
||||
else:
|
||||
default_name = DEFAULT_CONVERSATION_NAME
|
||||
schema: dict[vol.Required | vol.Optional, Any] = {
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Required(CONF_NAME, default=default_name): str,
|
||||
}
|
||||
else:
|
||||
|
||||
@@ -67,6 +67,8 @@ RECONFIGURE_SCHEMA = vol.Schema(
|
||||
|
||||
CONFIG_SCHEMA = RECONFIGURE_SCHEMA.extend(
|
||||
{
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Required(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -70,6 +70,8 @@ def _get_location_schema(hass: HomeAssistant) -> vol.Schema:
|
||||
"""Return the schema for a location with default values from the hass config."""
|
||||
return vol.Schema(
|
||||
{
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Required(CONF_NAME, default=hass.config.location_name): str,
|
||||
vol.Required(
|
||||
CONF_LOCATION,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""The Hardkernel integration."""
|
||||
|
||||
from homeassistant.components.hassio import HassioNotReadyError, get_os_info
|
||||
from homeassistant.components.hassio import get_os_info
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
@@ -14,10 +14,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
hass.async_create_task(hass.config_entries.async_remove(entry.entry_id))
|
||||
return False
|
||||
|
||||
try:
|
||||
os_info = get_os_info(hass)
|
||||
except HassioNotReadyError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
if (os_info := get_os_info(hass)) is None:
|
||||
# The hassio integration has not yet fetched data from the supervisor
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
board: str | None
|
||||
if (board := os_info.get("board")) is None or not board.startswith("odroid"):
|
||||
|
||||
@@ -20,7 +20,8 @@ BOARD_NAMES = {
|
||||
@callback
|
||||
def async_info(hass: HomeAssistant) -> list[HardwareInfo]:
|
||||
"""Return board info."""
|
||||
os_info = get_os_info(hass)
|
||||
if (os_info := get_os_info(hass)) is None:
|
||||
raise HomeAssistantError
|
||||
board: str | None
|
||||
if (board := os_info.get("board")) is None:
|
||||
raise HomeAssistantError
|
||||
|
||||
@@ -38,7 +38,10 @@ from .util import (
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{vol.Required(CONF_HOST): str, vol.Required(CONF_NAME): str}, extra=vol.ALLOW_EXTRA
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
{vol.Required(CONF_HOST): str, vol.Required(CONF_NAME): str},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioharmony", "slixmpp"],
|
||||
"requirements": ["aioharmony==0.5.3"],
|
||||
"requirements": ["aioharmony==1.0.3"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:myharmony-com:device:harmony:1",
|
||||
|
||||
@@ -2,24 +2,32 @@
|
||||
|
||||
import asyncio
|
||||
from dataclasses import replace
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import os
|
||||
import struct
|
||||
from typing import Any
|
||||
from typing import Any, cast
|
||||
|
||||
from aiohasupervisor import SupervisorError
|
||||
from aiohasupervisor.models import (
|
||||
GreenOptions,
|
||||
HomeAssistantInfo,
|
||||
HomeAssistantOptions,
|
||||
HostInfo,
|
||||
InstalledAddon,
|
||||
NetworkInfo,
|
||||
OSInfo,
|
||||
RootInfo,
|
||||
StoreInfo,
|
||||
SupervisorInfo,
|
||||
SupervisorOptions,
|
||||
YellowOptions,
|
||||
)
|
||||
|
||||
from homeassistant.auth.const import GROUP_ID_ADMIN
|
||||
from homeassistant.auth.models import RefreshToken, User
|
||||
from homeassistant.auth.models import RefreshToken
|
||||
from homeassistant.components import frontend
|
||||
from homeassistant.components.homeassistant import async_set_stop_handler
|
||||
from homeassistant.components.homeassistant.const import DATA_STOP_HANDLER
|
||||
from homeassistant.components.http import (
|
||||
CONF_SERVER_HOST,
|
||||
CONF_SERVER_PORT,
|
||||
@@ -32,8 +40,7 @@ from homeassistant.const import (
|
||||
SERVER_PORT,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.core import Event, HassJob, HomeAssistant, callback
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
@@ -41,8 +48,10 @@ from homeassistant.helpers import (
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.async_ import create_eager_task
|
||||
|
||||
# config_flow, diagnostics, system_health, and entity platforms are imported to
|
||||
# ensure other dependencies that wait for hassio are not waiting
|
||||
@@ -65,13 +74,19 @@ from .auth import async_setup_auth_view
|
||||
from .config import HassioConfig
|
||||
from .const import (
|
||||
ADDONS_COORDINATOR,
|
||||
DATA_ADDONS_LIST,
|
||||
DATA_COMPONENT,
|
||||
DATA_CONFIG_STORE,
|
||||
DATA_HASSIO_HOST,
|
||||
DATA_HASSIO_HTTP_CONFIG,
|
||||
DATA_HASSIO_SUPERVISOR_USER,
|
||||
DATA_CORE_INFO,
|
||||
DATA_HOST_INFO,
|
||||
DATA_INFO,
|
||||
DATA_KEY_SUPERVISOR_ISSUES,
|
||||
DATA_NETWORK_INFO,
|
||||
DATA_OS_INFO,
|
||||
DATA_STORE,
|
||||
DATA_SUPERVISOR_INFO,
|
||||
DOMAIN,
|
||||
HASSIO_MAIN_UPDATE_INTERVAL,
|
||||
MAIN_COORDINATOR,
|
||||
STATS_COORDINATOR,
|
||||
)
|
||||
@@ -93,7 +108,6 @@ from .coordinator import (
|
||||
get_supervisor_stats,
|
||||
)
|
||||
from .discovery import async_setup_discovery_view
|
||||
from .exceptions import HassioNotReadyError
|
||||
from .handler import HassIO, async_update_diagnostics, get_supervisor_client
|
||||
from .http import HassIOView
|
||||
from .ingress import async_setup_ingress_view
|
||||
@@ -110,7 +124,6 @@ __all__ = [
|
||||
"AddonManager",
|
||||
"AddonState",
|
||||
"GreenOptions",
|
||||
"HassioNotReadyError",
|
||||
"SupervisorError",
|
||||
"YellowOptions",
|
||||
"async_update_diagnostics",
|
||||
@@ -175,59 +188,6 @@ def hostname_from_addon_slug(addon_slug: str) -> str:
|
||||
return addon_slug.replace("_", "-")
|
||||
|
||||
|
||||
@callback
|
||||
def _check_deprecated_setup(hass: HomeAssistant) -> None:
|
||||
"""Create issues for deprecated installation types and architectures."""
|
||||
os_info = get_os_info(hass)
|
||||
info = get_info(hass)
|
||||
is_haos = info.get("hassos") is not None
|
||||
board = os_info.get("board")
|
||||
arch = info.get("arch", "unknown")
|
||||
unsupported_board = board in {"tinker", "odroid-xu4", "rpi2"}
|
||||
unsupported_os_on_board = board in {"rpi3", "rpi4"}
|
||||
if is_haos and (unsupported_board or unsupported_os_on_board):
|
||||
issue_id = "deprecated_os_"
|
||||
if unsupported_os_on_board:
|
||||
issue_id += "aarch64"
|
||||
elif unsupported_board:
|
||||
issue_id += "armv7"
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
"homeassistant",
|
||||
issue_id,
|
||||
learn_more_url=DEPRECATION_URL,
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key=issue_id,
|
||||
translation_placeholders={
|
||||
"installation_guide": "https://www.home-assistant.io/installation/",
|
||||
},
|
||||
)
|
||||
bit32 = _is_32_bit()
|
||||
deprecated_architecture = bit32 and not (
|
||||
unsupported_board or unsupported_os_on_board
|
||||
)
|
||||
if not is_haos or deprecated_architecture:
|
||||
issue_id = "deprecated"
|
||||
if not is_haos:
|
||||
issue_id += "_method"
|
||||
if deprecated_architecture:
|
||||
issue_id += "_architecture"
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
"homeassistant",
|
||||
issue_id,
|
||||
learn_more_url=DEPRECATION_URL,
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key=issue_id,
|
||||
translation_placeholders={
|
||||
"installation_type": "OS" if is_haos else "Supervised",
|
||||
"arch": arch,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Hass.io component."""
|
||||
# Check local setup
|
||||
@@ -241,22 +201,30 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
)
|
||||
return False
|
||||
|
||||
async_load_websocket_api(hass)
|
||||
frontend.async_register_built_in_panel(hass, "app")
|
||||
|
||||
host = os.environ["SUPERVISOR"]
|
||||
websession = async_get_clientsession(hass)
|
||||
hass.data[DATA_COMPONENT] = HassIO(hass.loop, websession, host)
|
||||
hass.data[DATA_HASSIO_HOST] = host
|
||||
hass.data[DATA_HASSIO_HTTP_CONFIG] = config.get("http", {})
|
||||
supervisor_client = get_supervisor_client(hass)
|
||||
|
||||
try:
|
||||
await supervisor_client.supervisor.ping()
|
||||
except SupervisorError:
|
||||
_LOGGER.warning("Not connected with the supervisor / system too busy!")
|
||||
|
||||
# Load the store
|
||||
config_store = HassioConfig(hass)
|
||||
await config_store.load()
|
||||
hass.data[DATA_CONFIG_STORE] = config_store
|
||||
|
||||
# Cache the Supervisor user. Create one if necessary
|
||||
user: User | None = None
|
||||
refresh_token = None
|
||||
if (hassio_user := config_store.data.hassio_user) is not None:
|
||||
user = await hass.auth.async_get_user(hassio_user)
|
||||
if user:
|
||||
if user and user.refresh_tokens:
|
||||
refresh_token = list(user.refresh_tokens.values())[0]
|
||||
|
||||
# Migrate old Hass.io users to be admin.
|
||||
if not user.is_admin:
|
||||
await hass.auth.async_update_user(user, group_ids=[GROUP_ID_ADMIN])
|
||||
@@ -265,100 +233,41 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
if user.name == "Hass.io":
|
||||
await hass.auth.async_update_user(user, name=HASSIO_USER_NAME)
|
||||
|
||||
if user is None:
|
||||
if refresh_token is None:
|
||||
user = await hass.auth.async_create_system_user(
|
||||
HASSIO_USER_NAME, group_ids=[GROUP_ID_ADMIN]
|
||||
)
|
||||
refresh_token = await hass.auth.async_create_refresh_token(user)
|
||||
config_store.update(hassio_user=user.id)
|
||||
|
||||
assert user is not None
|
||||
hass.data[DATA_HASSIO_SUPERVISOR_USER] = user
|
||||
|
||||
async_load_websocket_api(hass)
|
||||
hass.http.register_view(HassIOView(host, websession))
|
||||
async_setup_services(hass)
|
||||
async_setup_discovery_view(hass)
|
||||
async_setup_auth_view(hass)
|
||||
async_setup_ingress_view(hass)
|
||||
async_setup_addon_panel(hass)
|
||||
frontend.async_register_built_in_panel(hass, "app")
|
||||
|
||||
discovery_flow.async_create_flow(
|
||||
hass, DOMAIN, context={"source": SOURCE_SYSTEM}, data={}
|
||||
async def update_hass_api(http_config: dict[str, Any], refresh_token: RefreshToken):
|
||||
"""Update Home Assistant API data on Hass.io."""
|
||||
options = HomeAssistantOptions(
|
||||
ssl=CONF_SSL_CERTIFICATE in http_config,
|
||||
port=http_config.get(CONF_SERVER_PORT) or SERVER_PORT,
|
||||
refresh_token=refresh_token.token,
|
||||
)
|
||||
|
||||
if http_config.get(CONF_SERVER_HOST) is not None:
|
||||
options = replace(options, watchdog=False)
|
||||
_LOGGER.warning(
|
||||
"Found incompatible HTTP option 'server_host'. Watchdog feature"
|
||||
" disabled"
|
||||
)
|
||||
|
||||
try:
|
||||
await supervisor_client.homeassistant.set_options(options)
|
||||
except SupervisorError as err:
|
||||
_LOGGER.warning(
|
||||
"Failed to update Home Assistant options in Supervisor: %s", err
|
||||
)
|
||||
|
||||
update_hass_api_task = hass.async_create_task(
|
||||
update_hass_api(config.get("http", {}), refresh_token), eager_start=True
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a config entry."""
|
||||
supervisor_client = get_supervisor_client(hass)
|
||||
|
||||
try:
|
||||
await supervisor_client.supervisor.ping()
|
||||
except SupervisorError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="supervisor_not_connected",
|
||||
) from err
|
||||
|
||||
# Get or create a refresh token for the Supervisor user
|
||||
user = hass.data[DATA_HASSIO_SUPERVISOR_USER]
|
||||
if user.refresh_tokens:
|
||||
refresh_token = list(user.refresh_tokens.values())[0]
|
||||
else:
|
||||
refresh_token = await hass.auth.async_create_refresh_token(user)
|
||||
|
||||
# Set up coordinators — these can raise ConfigEntryNotReady.
|
||||
# Register listeners only after all refreshes succeed to avoid accumulation
|
||||
# across retries.
|
||||
dev_reg = dr.async_get(hass)
|
||||
|
||||
coordinator = HassioMainDataUpdateCoordinator(hass, entry, dev_reg)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
hass.data[MAIN_COORDINATOR] = coordinator
|
||||
|
||||
addon_coordinator = HassioAddOnDataUpdateCoordinator(
|
||||
hass, entry, dev_reg, coordinator.jobs
|
||||
)
|
||||
await addon_coordinator.async_config_entry_first_refresh()
|
||||
hass.data[ADDONS_COORDINATOR] = addon_coordinator
|
||||
|
||||
stats_coordinator = HassioStatsDataUpdateCoordinator(hass, entry)
|
||||
await stats_coordinator.async_config_entry_first_refresh()
|
||||
hass.data[STATS_COORDINATOR] = stats_coordinator
|
||||
|
||||
# All coordinators refreshed successfully. Start the issues listener and
|
||||
# install the stop handler now so they are never left in a partial state
|
||||
# if a coordinator refresh raises ConfigEntryNotReady.
|
||||
hass.data[DATA_KEY_SUPERVISOR_ISSUES] = issues = SupervisorIssues(hass)
|
||||
|
||||
def _unload_supervisor_issues() -> None:
|
||||
if (
|
||||
supervisor_issues := hass.data.pop(DATA_KEY_SUPERVISOR_ISSUES, None)
|
||||
) is not None:
|
||||
supervisor_issues.unload()
|
||||
|
||||
entry.async_on_unload(_unload_supervisor_issues)
|
||||
|
||||
async def _async_stop(hass: HomeAssistant, restart: bool) -> None:
|
||||
"""Stop or restart home assistant."""
|
||||
if restart:
|
||||
await supervisor_client.homeassistant.restart()
|
||||
else:
|
||||
await supervisor_client.homeassistant.stop()
|
||||
|
||||
# Install a custom handler for the homeassistant.restart / stop services,
|
||||
# and restore the previous one when this entry unloads.
|
||||
prev_stop_handler = hass.data.get(DATA_STOP_HANDLER)
|
||||
async_set_stop_handler(hass, _async_stop)
|
||||
|
||||
def _restore_stop_handler() -> None:
|
||||
if prev_stop_handler is not None:
|
||||
async_set_stop_handler(hass, prev_stop_handler)
|
||||
else:
|
||||
hass.data.pop(DATA_STOP_HANDLER, None)
|
||||
|
||||
entry.async_on_unload(_restore_stop_handler)
|
||||
last_timezone = None
|
||||
last_country = None
|
||||
|
||||
@@ -381,50 +290,206 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
except SupervisorError as err:
|
||||
_LOGGER.warning("Failed to update Supervisor options: %s", err)
|
||||
|
||||
entry.async_on_unload(hass.bus.async_listen(EVENT_CORE_CONFIG_UPDATE, push_config))
|
||||
hass.bus.async_listen(EVENT_CORE_CONFIG_UPDATE, push_config)
|
||||
|
||||
http_config: dict[str, Any] = hass.data.get(DATA_HASSIO_HTTP_CONFIG, {})
|
||||
push_config_task = hass.async_create_task(push_config(None), eager_start=True)
|
||||
# Start listening for problems with supervisor and making issues
|
||||
hass.data[DATA_KEY_SUPERVISOR_ISSUES] = issues = SupervisorIssues(hass)
|
||||
issues_task = hass.async_create_task(issues.setup(), eager_start=True)
|
||||
|
||||
async def update_hass_api(refresh_token: RefreshToken) -> None:
|
||||
"""Update Home Assistant API data on Hass.io."""
|
||||
options = HomeAssistantOptions(
|
||||
ssl=CONF_SSL_CERTIFICATE in http_config,
|
||||
port=http_config.get(CONF_SERVER_PORT) or SERVER_PORT,
|
||||
refresh_token=refresh_token.token,
|
||||
)
|
||||
# Register services
|
||||
async_setup_services(hass, supervisor_client)
|
||||
|
||||
if http_config.get(CONF_SERVER_HOST) is not None:
|
||||
options = replace(options, watchdog=False)
|
||||
_LOGGER.warning(
|
||||
"Found incompatible HTTP option 'server_host'. Watchdog feature"
|
||||
" disabled"
|
||||
)
|
||||
async def update_info_data(_: datetime | None = None) -> None:
|
||||
"""Update last available supervisor information."""
|
||||
supervisor_client = get_supervisor_client(hass)
|
||||
|
||||
try:
|
||||
await supervisor_client.homeassistant.set_options(options)
|
||||
except SupervisorError as err:
|
||||
_LOGGER.warning(
|
||||
"Failed to update Home Assistant options in Supervisor: %s", err
|
||||
(
|
||||
root_info,
|
||||
host_info,
|
||||
store_info,
|
||||
homeassistant_info,
|
||||
supervisor_info,
|
||||
os_info,
|
||||
network_info,
|
||||
addons_list,
|
||||
) = cast(
|
||||
tuple[
|
||||
RootInfo,
|
||||
HostInfo,
|
||||
StoreInfo,
|
||||
HomeAssistantInfo,
|
||||
SupervisorInfo,
|
||||
OSInfo,
|
||||
NetworkInfo,
|
||||
list[InstalledAddon],
|
||||
],
|
||||
await asyncio.gather(
|
||||
create_eager_task(supervisor_client.info()),
|
||||
create_eager_task(supervisor_client.host.info()),
|
||||
create_eager_task(supervisor_client.store.info()),
|
||||
create_eager_task(supervisor_client.homeassistant.info()),
|
||||
create_eager_task(supervisor_client.supervisor.info()),
|
||||
create_eager_task(supervisor_client.os.info()),
|
||||
create_eager_task(supervisor_client.network.info()),
|
||||
create_eager_task(supervisor_client.addons.list()),
|
||||
),
|
||||
)
|
||||
|
||||
# Push initial config to Supervisor and start issues listener
|
||||
await asyncio.gather(
|
||||
update_hass_api(refresh_token), push_config(None), issues.setup()
|
||||
except SupervisorError as err:
|
||||
_LOGGER.warning("Can't read Supervisor data: %s", err)
|
||||
else:
|
||||
hass.data[DATA_INFO] = root_info
|
||||
hass.data[DATA_HOST_INFO] = host_info
|
||||
hass.data[DATA_STORE] = store_info
|
||||
hass.data[DATA_CORE_INFO] = homeassistant_info
|
||||
hass.data[DATA_SUPERVISOR_INFO] = supervisor_info
|
||||
hass.data[DATA_OS_INFO] = os_info
|
||||
hass.data[DATA_NETWORK_INFO] = network_info
|
||||
hass.data[DATA_ADDONS_LIST] = addons_list
|
||||
|
||||
# Fetch data
|
||||
update_info_task = hass.async_create_task(update_info_data(), eager_start=True)
|
||||
|
||||
async def _async_stop(hass: HomeAssistant, restart: bool) -> None:
|
||||
"""Stop or restart home assistant."""
|
||||
if restart:
|
||||
await supervisor_client.homeassistant.restart()
|
||||
else:
|
||||
await supervisor_client.homeassistant.stop()
|
||||
|
||||
# Set a custom handler for the homeassistant.restart and homeassistant.stop services
|
||||
async_set_stop_handler(hass, _async_stop)
|
||||
|
||||
# Init discovery Hass.io feature
|
||||
async_setup_discovery_view(hass)
|
||||
|
||||
# Init auth Hass.io feature
|
||||
assert user is not None
|
||||
async_setup_auth_view(hass, user)
|
||||
|
||||
# Init ingress Hass.io feature
|
||||
async_setup_ingress_view(hass, host)
|
||||
|
||||
# Init add-on ingress panels
|
||||
panels_task = hass.async_create_task(
|
||||
async_setup_addon_panel(hass), eager_start=True
|
||||
)
|
||||
|
||||
# Make sure to await the update_info task before
|
||||
# _async_setup_hardware_integration is called
|
||||
# so the hardware integration can be set up
|
||||
# and does not fallback to calling later
|
||||
await update_hass_api_task
|
||||
await panels_task
|
||||
await update_info_task
|
||||
await push_config_task
|
||||
await issues_task
|
||||
|
||||
# Setup hardware integration for the detected board type
|
||||
# This is done after the initial data refresh to ensure that the board info is available.
|
||||
os_info = get_os_info(hass)
|
||||
if (board := os_info.get("board")) is not None and (
|
||||
hw_integration := HARDWARE_INTEGRATIONS.get(board)
|
||||
) is not None:
|
||||
@callback
|
||||
def _async_setup_hardware_integration(_: datetime | None = None) -> None:
|
||||
"""Set up hardware integration for the detected board type."""
|
||||
if (os_info := get_os_info(hass)) is None:
|
||||
# os info not yet fetched from supervisor, retry later
|
||||
async_call_later(
|
||||
hass,
|
||||
HASSIO_MAIN_UPDATE_INTERVAL,
|
||||
async_setup_hardware_integration_job,
|
||||
)
|
||||
return
|
||||
if (board := os_info.get("board")) is None:
|
||||
return
|
||||
if (hw_integration := HARDWARE_INTEGRATIONS.get(board)) is None:
|
||||
return
|
||||
discovery_flow.async_create_flow(
|
||||
hass, hw_integration, context={"source": SOURCE_SYSTEM}, data={}
|
||||
)
|
||||
|
||||
# Check for deprecated setup and create issues if needed.
|
||||
# This is done after the initial data refresh to ensure that the info needed is available.
|
||||
_check_deprecated_setup(hass)
|
||||
async_setup_hardware_integration_job = HassJob(
|
||||
_async_setup_hardware_integration, cancel_on_shutdown=True
|
||||
)
|
||||
|
||||
_async_setup_hardware_integration()
|
||||
discovery_flow.async_create_flow(
|
||||
hass, DOMAIN, context={"source": SOURCE_SYSTEM}, data={}
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a config entry."""
|
||||
dev_reg = dr.async_get(hass)
|
||||
|
||||
coordinator = HassioMainDataUpdateCoordinator(hass, entry, dev_reg)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
hass.data[MAIN_COORDINATOR] = coordinator
|
||||
|
||||
addon_coordinator = HassioAddOnDataUpdateCoordinator(
|
||||
hass, entry, dev_reg, coordinator.jobs
|
||||
)
|
||||
await addon_coordinator.async_config_entry_first_refresh()
|
||||
hass.data[ADDONS_COORDINATOR] = addon_coordinator
|
||||
|
||||
stats_coordinator = HassioStatsDataUpdateCoordinator(hass, entry)
|
||||
await stats_coordinator.async_config_entry_first_refresh()
|
||||
hass.data[STATS_COORDINATOR] = stats_coordinator
|
||||
|
||||
def deprecated_setup_issue() -> None:
|
||||
os_info = get_os_info(hass)
|
||||
info = get_info(hass)
|
||||
if os_info is None or info is None:
|
||||
return
|
||||
is_haos = info.get("hassos") is not None
|
||||
board = os_info.get("board")
|
||||
arch = info.get("arch", "unknown")
|
||||
unsupported_board = board in {"tinker", "odroid-xu4", "rpi2"}
|
||||
unsupported_os_on_board = board in {"rpi3", "rpi4"}
|
||||
if is_haos and (unsupported_board or unsupported_os_on_board):
|
||||
issue_id = "deprecated_os_"
|
||||
if unsupported_os_on_board:
|
||||
issue_id += "aarch64"
|
||||
elif unsupported_board:
|
||||
issue_id += "armv7"
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
"homeassistant",
|
||||
issue_id,
|
||||
learn_more_url=DEPRECATION_URL,
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key=issue_id,
|
||||
translation_placeholders={
|
||||
"installation_guide": "https://www.home-assistant.io/installation/",
|
||||
},
|
||||
)
|
||||
bit32 = _is_32_bit()
|
||||
deprecated_architecture = bit32 and not (
|
||||
unsupported_board or unsupported_os_on_board
|
||||
)
|
||||
if not is_haos or deprecated_architecture:
|
||||
issue_id = "deprecated"
|
||||
if not is_haos:
|
||||
issue_id += "_method"
|
||||
if deprecated_architecture:
|
||||
issue_id += "_architecture"
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
"homeassistant",
|
||||
issue_id,
|
||||
learn_more_url=DEPRECATION_URL,
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key=issue_id,
|
||||
translation_placeholders={
|
||||
"installation_type": "OS" if is_haos else "Supervised",
|
||||
"arch": arch,
|
||||
},
|
||||
)
|
||||
listener()
|
||||
|
||||
listener = coordinator.async_add_listener(deprecated_setup_issue)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
@@ -435,7 +500,11 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
# Pop coordinators and entry-level data
|
||||
# Unload coordinator
|
||||
coordinator: HassioMainDataUpdateCoordinator = hass.data[MAIN_COORDINATOR]
|
||||
coordinator.unload()
|
||||
|
||||
# Pop coordinators
|
||||
hass.data.pop(MAIN_COORDINATOR, None)
|
||||
hass.data.pop(ADDONS_COORDINATOR, None)
|
||||
hass.data.pop(STATS_COORDINATOR, None)
|
||||
|
||||
@@ -9,33 +9,27 @@ from aiohttp import web
|
||||
|
||||
from homeassistant.components import frontend
|
||||
from homeassistant.components.http import HomeAssistantView, require_admin
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_START
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .handler import get_supervisor_client
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def async_setup_addon_panel(hass: HomeAssistant) -> None:
|
||||
async def async_setup_addon_panel(hass: HomeAssistant) -> None:
|
||||
"""Add-on Ingress Panel setup."""
|
||||
hassio_addon_panel = HassIOAddonPanel(hass)
|
||||
hass.http.register_view(hassio_addon_panel)
|
||||
|
||||
# Handle existing panels on startup
|
||||
async def _async_panel_start_handler(event: Event) -> None:
|
||||
"""Process all existing panels on startup."""
|
||||
# Check if there are panels to register
|
||||
if not (panels := await hassio_addon_panel.get_panels()):
|
||||
return
|
||||
# If panels are exists
|
||||
if not (panels := await hassio_addon_panel.get_panels()):
|
||||
return
|
||||
|
||||
# Register available panels
|
||||
for addon, data in panels.items():
|
||||
if not data.enable:
|
||||
continue
|
||||
_register_panel(hass, addon, data)
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, _async_panel_start_handler)
|
||||
# Register available panels
|
||||
for addon, data in panels.items():
|
||||
if not data.enable:
|
||||
continue
|
||||
_register_panel(hass, addon, data)
|
||||
|
||||
|
||||
class HassIOAddonPanel(HomeAssistantView):
|
||||
|
||||
@@ -6,13 +6,10 @@ import logging
|
||||
import os
|
||||
|
||||
from aiohttp import web
|
||||
from aiohttp.web_exceptions import (
|
||||
HTTPNotFound,
|
||||
HTTPServiceUnavailable,
|
||||
HTTPUnauthorized,
|
||||
)
|
||||
from aiohttp.web_exceptions import HTTPNotFound, HTTPUnauthorized
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.auth.models import User
|
||||
from homeassistant.auth.providers import homeassistant as auth_ha
|
||||
from homeassistant.components.http import KEY_HASS, KEY_HASS_USER, HomeAssistantView
|
||||
from homeassistant.components.http.const import is_supervisor_unix_socket_request
|
||||
@@ -20,31 +17,31 @@ from homeassistant.components.http.data_validator import RequestDataValidator
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import ATTR_ADDON, ATTR_PASSWORD, ATTR_USERNAME, DATA_HASSIO_SUPERVISOR_USER
|
||||
from .const import ATTR_ADDON, ATTR_PASSWORD, ATTR_USERNAME
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_auth_view(hass: HomeAssistant) -> None:
|
||||
def async_setup_auth_view(hass: HomeAssistant, user: User) -> None:
|
||||
"""Auth setup."""
|
||||
hass.http.register_view(HassIOAuth(hass))
|
||||
hass.http.register_view(HassIOPasswordReset(hass))
|
||||
hassio_auth = HassIOAuth(hass, user)
|
||||
hassio_password_reset = HassIOPasswordReset(hass, user)
|
||||
|
||||
hass.http.register_view(hassio_auth)
|
||||
hass.http.register_view(hassio_password_reset)
|
||||
|
||||
|
||||
class HassIOBaseAuth(HomeAssistantView):
|
||||
"""Hass.io view to handle auth requests."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
def __init__(self, hass: HomeAssistant, user: User) -> None:
|
||||
"""Initialize WebView."""
|
||||
self.hass = hass
|
||||
self.user = user
|
||||
|
||||
def _check_access(self, request: web.Request) -> None:
|
||||
"""Check if this call is from Supervisor."""
|
||||
user = self.hass.data.get(DATA_HASSIO_SUPERVISOR_USER)
|
||||
if user is None:
|
||||
raise HTTPServiceUnavailable
|
||||
|
||||
# Requests over the Supervisor Unix socket are authenticated by the
|
||||
# http auth middleware as the Supervisor user, so the caller-IP check
|
||||
# below does not apply (and would crash, since `peername` is empty for
|
||||
@@ -59,7 +56,7 @@ class HassIOBaseAuth(HomeAssistantView):
|
||||
raise HTTPUnauthorized
|
||||
|
||||
# Check caller token
|
||||
if request[KEY_HASS_USER].id != user.id:
|
||||
if request[KEY_HASS_USER].id != self.user.id:
|
||||
_LOGGER.error("Invalid auth request from %s", request[KEY_HASS_USER].name)
|
||||
raise HTTPUnauthorized
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from datetime import timedelta
|
||||
from enum import StrEnum
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
@@ -20,8 +20,6 @@ if TYPE_CHECKING:
|
||||
SupervisorInfo,
|
||||
)
|
||||
|
||||
from homeassistant.auth.models import User
|
||||
|
||||
from .config import HassioConfig
|
||||
from .coordinator import (
|
||||
HassioAddOnDataUpdateCoordinator,
|
||||
@@ -147,9 +145,6 @@ DATA_KEY_CORE = "core"
|
||||
DATA_KEY_HOST = "host"
|
||||
DATA_KEY_SUPERVISOR_ISSUES: HassKey[SupervisorIssues] = HassKey("supervisor_issues")
|
||||
DATA_KEY_MOUNTS = "mounts"
|
||||
DATA_HASSIO_HTTP_CONFIG: HassKey[dict[str, Any]] = HassKey("hassio_http_config")
|
||||
DATA_HASSIO_HOST: HassKey[str] = HassKey("hassio_host")
|
||||
DATA_HASSIO_SUPERVISOR_USER: HassKey[User] = HassKey("hassio_supervisor_user")
|
||||
|
||||
PLACEHOLDER_KEY_ADDON = "addon"
|
||||
PLACEHOLDER_KEY_ADDON_INFO = "addon_info"
|
||||
@@ -167,6 +162,7 @@ ISSUE_KEY_ADDON_PWNED = "issue_addon_pwned"
|
||||
ISSUE_KEY_SYSTEM_FREE_SPACE = "issue_system_free_space"
|
||||
ISSUE_KEY_ADDON_DEPRECATED = "issue_addon_deprecated_addon"
|
||||
ISSUE_KEY_ADDON_DEPRECATED_ARCH = "issue_addon_deprecated_arch_addon"
|
||||
ISSUE_KEY_LEGACY_HOMEASSISTANT_FOLDER = "legacy_homeassistant_folder"
|
||||
|
||||
ISSUE_MOUNT_MOUNT_FAILED = "issue_mount_mount_failed"
|
||||
|
||||
|
||||
@@ -70,7 +70,6 @@ from .const import (
|
||||
UPDATE_KEY_SUPERVISOR,
|
||||
SupervisorEntityModel,
|
||||
)
|
||||
from .exceptions import HassioNotReadyError
|
||||
from .handler import get_supervisor_client
|
||||
from .jobs import SupervisorJobs
|
||||
|
||||
@@ -181,50 +180,44 @@ def _installed_addon_from_complete(info: InstalledAddonComplete) -> InstalledAdd
|
||||
|
||||
|
||||
@callback
|
||||
def get_info(hass: HomeAssistant) -> dict[str, Any]:
|
||||
def get_info(hass: HomeAssistant) -> dict[str, Any] | None:
|
||||
"""Return generic information from Supervisor.
|
||||
|
||||
Async friendly.
|
||||
"""
|
||||
info = hass.data.get(DATA_INFO)
|
||||
if info is None:
|
||||
raise HassioNotReadyError
|
||||
return info.to_dict()
|
||||
return info.to_dict() if info is not None else None
|
||||
|
||||
|
||||
@callback
|
||||
def get_host_info(hass: HomeAssistant) -> dict[str, Any]:
|
||||
def get_host_info(hass: HomeAssistant) -> dict[str, Any] | None:
|
||||
"""Return generic host information.
|
||||
|
||||
Async friendly.
|
||||
"""
|
||||
info = hass.data.get(DATA_HOST_INFO)
|
||||
if info is None:
|
||||
raise HassioNotReadyError
|
||||
return info.to_dict()
|
||||
return info.to_dict() if info is not None else None
|
||||
|
||||
|
||||
@callback
|
||||
def get_store(hass: HomeAssistant) -> dict[str, Any]:
|
||||
def get_store(hass: HomeAssistant) -> dict[str, Any] | None:
|
||||
"""Return store information.
|
||||
|
||||
Async friendly.
|
||||
"""
|
||||
info = hass.data.get(DATA_STORE)
|
||||
if info is None:
|
||||
raise HassioNotReadyError
|
||||
return info.to_dict()
|
||||
return info.to_dict() if info is not None else None
|
||||
|
||||
|
||||
@callback
|
||||
def get_supervisor_info(hass: HomeAssistant) -> dict[str, Any]:
|
||||
def get_supervisor_info(hass: HomeAssistant) -> dict[str, Any] | None:
|
||||
"""Return Supervisor information.
|
||||
|
||||
Async friendly.
|
||||
"""
|
||||
info = hass.data.get(DATA_SUPERVISOR_INFO)
|
||||
if info is None:
|
||||
raise HassioNotReadyError
|
||||
return None
|
||||
result = info.to_dict()
|
||||
# Deprecated 2026.4.0: Folding repositories and addons into supervisor_info
|
||||
# for backwards compatibility. Can be removed after deprecation period.
|
||||
@@ -236,19 +229,17 @@ def get_supervisor_info(hass: HomeAssistant) -> dict[str, Any]:
|
||||
|
||||
|
||||
@callback
|
||||
def get_network_info(hass: HomeAssistant) -> dict[str, Any]:
|
||||
def get_network_info(hass: HomeAssistant) -> dict[str, Any] | None:
|
||||
"""Return Host Network information.
|
||||
|
||||
Async friendly.
|
||||
"""
|
||||
info = hass.data.get(DATA_NETWORK_INFO)
|
||||
if info is None:
|
||||
raise HassioNotReadyError
|
||||
return info.to_dict()
|
||||
return info.to_dict() if info is not None else None
|
||||
|
||||
|
||||
@callback
|
||||
def get_addons_info(hass: HomeAssistant) -> dict[str, dict[str, Any] | None]:
|
||||
def get_addons_info(hass: HomeAssistant) -> dict[str, dict[str, Any] | None] | None:
|
||||
"""Return Addons info.
|
||||
|
||||
Async friendly.
|
||||
@@ -257,7 +248,7 @@ def get_addons_info(hass: HomeAssistant) -> dict[str, dict[str, Any] | None]:
|
||||
DATA_ADDONS_INFO
|
||||
)
|
||||
if addons_info is None:
|
||||
raise HassioNotReadyError
|
||||
return None
|
||||
# Converting these fields for compatibility as that is what was returned here.
|
||||
# We'll leave it this way as long as these component APIs continue to return
|
||||
# dictionaries. If/when we switch to using the aiohasupervisor models for everything
|
||||
@@ -275,15 +266,13 @@ def get_addons_info(hass: HomeAssistant) -> dict[str, dict[str, Any] | None]:
|
||||
|
||||
|
||||
@callback
|
||||
def get_addons_list(hass: HomeAssistant) -> list[dict[str, Any]]:
|
||||
def get_addons_list(hass: HomeAssistant) -> list[dict[str, Any]] | None:
|
||||
"""Return list of installed addons and subset of details for each.
|
||||
|
||||
Async friendly.
|
||||
"""
|
||||
addons = hass.data.get(DATA_ADDONS_LIST)
|
||||
if addons is None:
|
||||
raise HassioNotReadyError
|
||||
return [addon.to_dict() for addon in addons]
|
||||
return [addon.to_dict() for addon in addons] if addons is not None else None
|
||||
|
||||
|
||||
@callback
|
||||
@@ -320,27 +309,23 @@ def get_supervisor_stats(hass: HomeAssistant) -> dict[str, Any]:
|
||||
|
||||
|
||||
@callback
|
||||
def get_os_info(hass: HomeAssistant) -> dict[str, Any]:
|
||||
def get_os_info(hass: HomeAssistant) -> dict[str, Any] | None:
|
||||
"""Return OS information.
|
||||
|
||||
Async friendly.
|
||||
"""
|
||||
info = hass.data.get(DATA_OS_INFO)
|
||||
if info is None:
|
||||
raise HassioNotReadyError
|
||||
return info.to_dict()
|
||||
return info.to_dict() if info is not None else None
|
||||
|
||||
|
||||
@callback
|
||||
def get_core_info(hass: HomeAssistant) -> dict[str, Any]:
|
||||
def get_core_info(hass: HomeAssistant) -> dict[str, Any] | None:
|
||||
"""Return Home Assistant Core information from Supervisor.
|
||||
|
||||
Async friendly.
|
||||
"""
|
||||
info = hass.data.get(DATA_CORE_INFO)
|
||||
if info is None:
|
||||
raise HassioNotReadyError
|
||||
return info.to_dict()
|
||||
return info.to_dict() if info is not None else None
|
||||
|
||||
|
||||
@callback
|
||||
@@ -795,7 +780,10 @@ class HassioMainDataUpdateCoordinator(DataUpdateCoordinator[HassioMainData]):
|
||||
)
|
||||
self.entry_id = config_entry.entry_id
|
||||
self.dev_reg = dev_reg
|
||||
self.is_hass_os = False
|
||||
if info := self.hass.data.get(DATA_INFO):
|
||||
self.is_hass_os = info.hassos is not None
|
||||
else:
|
||||
self.is_hass_os = False
|
||||
self.supervisor_client = get_supervisor_client(hass)
|
||||
self.jobs = SupervisorJobs(hass)
|
||||
self._dispatcher_disconnect = async_dispatcher_connect(
|
||||
@@ -855,7 +843,6 @@ class HassioMainDataUpdateCoordinator(DataUpdateCoordinator[HassioMainData]):
|
||||
raise UpdateFailed(f"Error on Supervisor API: {err}") from err
|
||||
|
||||
# Build clean coordinator data
|
||||
self.is_hass_os = info.hassos is not None
|
||||
new_data = HassioMainData(
|
||||
core=core_info,
|
||||
supervisor=supervisor_info,
|
||||
@@ -941,8 +928,8 @@ class HassioMainDataUpdateCoordinator(DataUpdateCoordinator[HassioMainData]):
|
||||
log_failures, raise_on_auth_failed, scheduled, raise_on_entry_error
|
||||
)
|
||||
|
||||
async def async_shutdown(self) -> None:
|
||||
"""Shut down and clean up when config entry unloaded."""
|
||||
await super().async_shutdown()
|
||||
@callback
|
||||
def unload(self) -> None:
|
||||
"""Clean up when config entry unloaded."""
|
||||
self._dispatcher_disconnect()
|
||||
self.jobs.unload()
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
"""Exceptions for the Hassio integration."""
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
|
||||
class HassioNotReadyError(HomeAssistantError):
|
||||
"""Raised when Hassio data is not yet available."""
|
||||
@@ -20,7 +20,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import UNDEFINED
|
||||
from homeassistant.util.async_ import create_eager_task
|
||||
|
||||
from .const import DATA_HASSIO_HOST, X_HASS_SOURCE, X_INGRESS_PATH
|
||||
from .const import X_HASS_SOURCE, X_INGRESS_PATH
|
||||
from .http import should_compress
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -50,9 +50,8 @@ DISABLED_TIMEOUT = ClientTimeout(total=None)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_ingress_view(hass: HomeAssistant) -> None:
|
||||
"""Set up the Hass.io ingress HTTP view."""
|
||||
host = hass.data[DATA_HASSIO_HOST]
|
||||
def async_setup_ingress_view(hass: HomeAssistant, host: str) -> None:
|
||||
"""Auth setup."""
|
||||
websession = async_get_clientsession(hass)
|
||||
|
||||
hassio_ingress = HassIOIngress(host, websession)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Supervisor events monitor."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
import logging
|
||||
@@ -181,8 +180,6 @@ class SupervisorIssues:
|
||||
self._unhealthy_reasons: set[str] = set()
|
||||
self._issues: dict[UUID, Issue] = {}
|
||||
self._supervisor_client = get_supervisor_client(hass)
|
||||
self._disconnect: Callable[[], None] | None = None
|
||||
self._cancel_update_retry: Callable[[], None] | None = None
|
||||
|
||||
@property
|
||||
def unhealthy_reasons(self) -> set[str]:
|
||||
@@ -353,41 +350,24 @@ class SupervisorIssues:
|
||||
|
||||
async def setup(self) -> None:
|
||||
"""Create supervisor events listener."""
|
||||
await self.async_update()
|
||||
await self._update()
|
||||
|
||||
self._disconnect = async_dispatcher_connect(
|
||||
async_dispatcher_connect(
|
||||
self._hass, EVENT_SUPERVISOR_EVENT, self._supervisor_events_to_issues
|
||||
)
|
||||
|
||||
def unload(self) -> None:
|
||||
"""Remove supervisor events listener."""
|
||||
if self._disconnect is not None:
|
||||
self._disconnect()
|
||||
self._disconnect = None
|
||||
if self._cancel_update_retry is not None:
|
||||
self._cancel_update_retry()
|
||||
self._cancel_update_retry = None
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update issues from Supervisor resolution center."""
|
||||
if self._cancel_update_retry:
|
||||
self._cancel_update_retry()
|
||||
self._cancel_update_retry = None
|
||||
await self._update()
|
||||
|
||||
async def _update(self, _: datetime | None = None) -> None:
|
||||
"""Update issues from Supervisor resolution center with retry on failure."""
|
||||
"""Update issues from Supervisor resolution center."""
|
||||
try:
|
||||
data = await self._supervisor_client.resolution.info()
|
||||
except SupervisorError as err:
|
||||
_LOGGER.error("Failed to update supervisor issues: %r", err)
|
||||
self._cancel_update_retry = async_call_later(
|
||||
async_call_later(
|
||||
self._hass,
|
||||
REQUEST_REFRESH_DELAY,
|
||||
HassJob(self._update, cancel_on_shutdown=True),
|
||||
)
|
||||
return
|
||||
self._cancel_update_retry = None
|
||||
self.unhealthy_reasons = set(data.unhealthy)
|
||||
self.unsupported_reasons = set(data.unsupported)
|
||||
|
||||
@@ -411,7 +391,7 @@ class SupervisorIssues:
|
||||
and event.get(ATTR_UPDATE_KEY) == UPDATE_KEY_SUPERVISOR
|
||||
and event.get(ATTR_DATA, {}).get(ATTR_STARTUP) == STARTUP_COMPLETE
|
||||
):
|
||||
self._hass.async_create_task(self.async_update())
|
||||
self._hass.async_create_task(self._update())
|
||||
|
||||
elif event[ATTR_WS_EVENT] == EVENT_HEALTH_CHANGED:
|
||||
self.unhealthy_reasons = (
|
||||
|
||||
@@ -8,7 +8,11 @@ from aiohasupervisor import SupervisorError
|
||||
from aiohasupervisor.models import ContextType
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.repairs import RepairsFlow, RepairsFlowResult
|
||||
from homeassistant.components.repairs import (
|
||||
ConfirmRepairFlow,
|
||||
RepairsFlow,
|
||||
RepairsFlowResult,
|
||||
)
|
||||
from homeassistant.const import ATTR_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
@@ -21,6 +25,7 @@ from .const import (
|
||||
ISSUE_KEY_ADDON_DEPRECATED_ARCH,
|
||||
ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED,
|
||||
ISSUE_KEY_ADDON_PWNED,
|
||||
ISSUE_KEY_LEGACY_HOMEASSISTANT_FOLDER,
|
||||
ISSUE_KEY_SYSTEM_DOCKER_CONFIG,
|
||||
PLACEHOLDER_KEY_ADDON,
|
||||
PLACEHOLDER_KEY_ADDON_DOCUMENTATION,
|
||||
@@ -226,6 +231,8 @@ async def async_create_fix_flow(
|
||||
data: dict[str, str | int | float | None] | None,
|
||||
) -> RepairsFlow:
|
||||
"""Create flow."""
|
||||
if issue_id == ISSUE_KEY_LEGACY_HOMEASSISTANT_FOLDER:
|
||||
return ConfirmRepairFlow()
|
||||
supervisor_issues = get_issues_info(hass)
|
||||
issue = supervisor_issues and supervisor_issues.get_issue(issue_id)
|
||||
if issue and issue.key == ISSUE_KEY_SYSTEM_DOCKER_CONFIG:
|
||||
|
||||
@@ -28,6 +28,7 @@ from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
issue_registry as ir,
|
||||
selector,
|
||||
)
|
||||
from homeassistant.helpers.service import async_register_admin_service
|
||||
@@ -47,12 +48,11 @@ from .const import (
|
||||
ATTR_PASSWORD,
|
||||
ATTR_SLUG,
|
||||
DOMAIN,
|
||||
ISSUE_KEY_LEGACY_HOMEASSISTANT_FOLDER,
|
||||
MAIN_COORDINATOR,
|
||||
SupervisorEntityModel,
|
||||
)
|
||||
from .coordinator import HassioMainDataUpdateCoordinator, get_addons_info
|
||||
from .exceptions import HassioNotReadyError
|
||||
from .handler import get_supervisor_client
|
||||
|
||||
SERVICE_ADDON_START = "addon_start"
|
||||
SERVICE_ADDON_STOP = "addon_stop"
|
||||
@@ -78,7 +78,9 @@ VALID_ADDON_SLUG = vol.Match(re.compile(r"^[-_.A-Za-z0-9]+$"))
|
||||
LEGACY_FOLDER_HOMEASSISTANT = "homeassistant"
|
||||
|
||||
|
||||
def _normalize_partial_options_data(data: dict[str, Any]) -> dict[str, Any]:
|
||||
def _normalize_partial_options_data(
|
||||
hass: HomeAssistant, data: dict[str, Any]
|
||||
) -> dict[str, Any]:
|
||||
"""Map legacy aliases used by both partial backup and partial restore handlers."""
|
||||
if ATTR_APPS in data:
|
||||
data[ATTR_ADDONS] = data.pop(ATTR_APPS)
|
||||
@@ -92,6 +94,16 @@ def _normalize_partial_options_data(data: dict[str, Any]) -> dict[str, Any]:
|
||||
f"{LEGACY_FOLDER_HOMEASSISTANT!r} entry in {ATTR_FOLDERS}"
|
||||
)
|
||||
data[ATTR_HOMEASSISTANT] = True
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
ISSUE_KEY_LEGACY_HOMEASSISTANT_FOLDER,
|
||||
breaks_in_ha_version="2026.12.0",
|
||||
is_fixable=True,
|
||||
is_persistent=True,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key=ISSUE_KEY_LEGACY_HOMEASSISTANT_FOLDER,
|
||||
)
|
||||
if folders:
|
||||
data[ATTR_FOLDERS] = folders
|
||||
else:
|
||||
@@ -104,13 +116,7 @@ def valid_addon(value: Any) -> str:
|
||||
value = VALID_ADDON_SLUG(value)
|
||||
hass = async_get_hass_or_none()
|
||||
|
||||
if not hass:
|
||||
return value
|
||||
try:
|
||||
addons = get_addons_info(hass)
|
||||
except HassioNotReadyError:
|
||||
return value
|
||||
if value not in addons:
|
||||
if hass and (addons := get_addons_info(hass)) is not None and value not in addons:
|
||||
raise vol.Invalid("Not a valid app slug")
|
||||
return value
|
||||
|
||||
@@ -203,9 +209,10 @@ SCHEMA_MOUNT_RELOAD = vol.Schema(
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
def async_setup_services(
|
||||
hass: HomeAssistant, supervisor_client: SupervisorClient
|
||||
) -> None:
|
||||
"""Register the Supervisor services."""
|
||||
supervisor_client = get_supervisor_client(hass)
|
||||
async_register_app_services(hass, supervisor_client)
|
||||
async_register_host_services(hass, supervisor_client)
|
||||
async_register_backup_restore_services(hass, supervisor_client)
|
||||
@@ -382,7 +389,7 @@ def async_register_backup_restore_services(
|
||||
service: ServiceCall,
|
||||
) -> ServiceResponse:
|
||||
"""Handler for create partial backup service. Returns the new backup's ID."""
|
||||
data = _normalize_partial_options_data(service.data.copy())
|
||||
data = _normalize_partial_options_data(hass, service.data.copy())
|
||||
options = PartialBackupOptions(**data)
|
||||
|
||||
try:
|
||||
@@ -429,7 +436,7 @@ def async_register_backup_restore_services(
|
||||
"""Handler for partial restore service."""
|
||||
data = service.data.copy()
|
||||
backup_slug = data.pop(ATTR_SLUG)
|
||||
data = _normalize_partial_options_data(data)
|
||||
data = _normalize_partial_options_data(hass, data)
|
||||
options = PartialRestoreOptions(**data)
|
||||
|
||||
try:
|
||||
|
||||
@@ -52,9 +52,6 @@
|
||||
},
|
||||
"mount_reload_unknown_device_id": {
|
||||
"message": "Device ID not found"
|
||||
},
|
||||
"supervisor_not_connected": {
|
||||
"message": "Not connected with the supervisor / system too busy"
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
@@ -206,6 +203,17 @@
|
||||
},
|
||||
"title": "Reboot required"
|
||||
},
|
||||
"legacy_homeassistant_folder": {
|
||||
"fix_flow": {
|
||||
"step": {
|
||||
"confirm": {
|
||||
"description": "An automation or script called the `hassio.backup_partial` or `hassio.restore_partial` action with `\"homeassistant\"` listed in `folders`. This is a legacy alias for the `homeassistant: true` option and will stop being accepted in a future release.\n\nUpdate the affected automations and scripts to set `homeassistant: true` and remove `\"homeassistant\"` from the `folders` list. When this is done, select **Submit** to mark this issue as resolved.",
|
||||
"title": "Legacy \"homeassistant\" folder used in partial backup/restore"
|
||||
}
|
||||
}
|
||||
},
|
||||
"title": "Legacy \"homeassistant\" folder used in partial backup/restore"
|
||||
},
|
||||
"unhealthy": {
|
||||
"description": "System is currently unhealthy due to {reason}. For troubleshooting information, select Learn more.",
|
||||
"title": "Unhealthy system - {reason}"
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""Provide info to system health."""
|
||||
|
||||
from collections.abc import Callable
|
||||
import os
|
||||
from typing import Any
|
||||
|
||||
@@ -15,7 +14,6 @@ from .coordinator import (
|
||||
get_os_info,
|
||||
get_supervisor_info,
|
||||
)
|
||||
from .exceptions import HassioNotReadyError
|
||||
|
||||
SUPERVISOR_PING = "http://{ip_address}/supervisor/ping"
|
||||
OBSERVER_URL = "http://{ip_address}:4357"
|
||||
@@ -29,30 +27,17 @@ def async_register(
|
||||
register.async_register_info(system_health_info)
|
||||
|
||||
|
||||
def _get_supervisor_data_if_available(
|
||||
hass: HomeAssistant, get_info_dict: Callable[[HomeAssistant], dict[str, Any]]
|
||||
) -> dict[str, Any]:
|
||||
"""Get data from supervisor if available."""
|
||||
try:
|
||||
return get_info_dict(hass)
|
||||
except HassioNotReadyError:
|
||||
return {}
|
||||
|
||||
|
||||
async def system_health_info(hass: HomeAssistant) -> dict[str, Any]:
|
||||
"""Get info for the info page."""
|
||||
ip_address = os.environ["SUPERVISOR"]
|
||||
info = _get_supervisor_data_if_available(hass, get_info)
|
||||
host_info = _get_supervisor_data_if_available(hass, get_host_info)
|
||||
supervisor_info = _get_supervisor_data_if_available(hass, get_supervisor_info)
|
||||
network_info = _get_supervisor_data_if_available(hass, get_network_info)
|
||||
try:
|
||||
addons_list = get_addons_list(hass)
|
||||
except HassioNotReadyError:
|
||||
addons_list = []
|
||||
info = get_info(hass) or {}
|
||||
host_info = get_host_info(hass) or {}
|
||||
supervisor_info = get_supervisor_info(hass)
|
||||
network_info = get_network_info(hass) or {}
|
||||
addons_list = get_addons_list(hass) or []
|
||||
|
||||
healthy: bool | dict[str, str]
|
||||
if supervisor_info and supervisor_info.get("healthy"):
|
||||
if supervisor_info is not None and supervisor_info.get("healthy"):
|
||||
healthy = True
|
||||
else:
|
||||
healthy = {
|
||||
@@ -61,7 +46,7 @@ async def system_health_info(hass: HomeAssistant) -> dict[str, Any]:
|
||||
}
|
||||
|
||||
supported: bool | dict[str, str]
|
||||
if supervisor_info and supervisor_info.get("supported"):
|
||||
if supervisor_info is not None and supervisor_info.get("supported"):
|
||||
supported = True
|
||||
else:
|
||||
supported = {
|
||||
@@ -96,7 +81,7 @@ async def system_health_info(hass: HomeAssistant) -> dict[str, Any]:
|
||||
}
|
||||
|
||||
if info.get("hassos") is not None:
|
||||
os_info = get_os_info(hass)
|
||||
os_info = get_os_info(hass) or {}
|
||||
information["board"] = os_info.get("board")
|
||||
|
||||
# Not using aiohasupervisor for ping call below intentionally. Given system health
|
||||
|
||||
@@ -39,7 +39,6 @@ from .const import (
|
||||
WS_TYPE_SUBSCRIBE,
|
||||
)
|
||||
from .coordinator import get_addons_list
|
||||
from .exceptions import HassioNotReadyError
|
||||
from .handler import HassioAPIError
|
||||
from .update_helper import update_addon, update_core
|
||||
|
||||
@@ -175,20 +174,7 @@ async def websocket_update_addon(
|
||||
"""Websocket handler to update an addon."""
|
||||
addon_name: str | None = None
|
||||
addon_version: str | None = None
|
||||
try:
|
||||
addons_list: list[dict[str, Any]] = get_addons_list(hass)
|
||||
except HassioNotReadyError:
|
||||
_LOGGER.error(
|
||||
"Update command received for app %s but apps list is not available",
|
||||
msg["addon"],
|
||||
)
|
||||
connection.send_error(
|
||||
msg[WS_ID],
|
||||
code=websocket_api.ERR_UNKNOWN_ERROR,
|
||||
message="Apps list is not available",
|
||||
)
|
||||
return
|
||||
|
||||
addons_list: list[dict[str, Any]] = get_addons_list(hass) or []
|
||||
for addon in addons_list:
|
||||
if addon[ATTR_SLUG] == msg["addon"]:
|
||||
addon_name = addon[ATTR_NAME]
|
||||
|
||||
@@ -88,6 +88,8 @@ def get_user_step_schema(data: Mapping[str, Any]) -> vol.Schema:
|
||||
travel_mode = TRAVEL_MODE_PUBLIC
|
||||
return vol.Schema(
|
||||
{
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Optional(
|
||||
CONF_NAME, default=data.get(CONF_NAME, DEFAULT_NAME)
|
||||
): cv.string,
|
||||
|
||||
@@ -156,6 +156,8 @@ class HiveFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
errors["base"] = "unknown"
|
||||
|
||||
schema = vol.Schema(
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
{vol.Optional(CONF_DEVICE_NAME, default=self.device_name): str}
|
||||
)
|
||||
return self.async_show_form(
|
||||
|
||||
@@ -5,7 +5,7 @@ import logging
|
||||
|
||||
from awesomeversion import AwesomeVersion, AwesomeVersionStrategy
|
||||
|
||||
from homeassistant.components.hassio import HassioNotReadyError, get_supervisor_info
|
||||
from homeassistant.components.hassio import get_supervisor_info
|
||||
from homeassistant.const import __version__
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
@@ -78,9 +78,7 @@ class AlertUpdateCoordinator(DataUpdateCoordinator[dict[str, IntegrationAlert]])
|
||||
continue
|
||||
|
||||
if self.supervisor and "supervisor" in alert:
|
||||
try:
|
||||
supervisor_info = get_supervisor_info(self.hass)
|
||||
except HassioNotReadyError:
|
||||
if (supervisor_info := get_supervisor_info(self.hass)) is None:
|
||||
continue
|
||||
|
||||
if "affected_from_version" in alert["supervisor"]:
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""The Home Assistant Green integration."""
|
||||
|
||||
from homeassistant.components.hassio import HassioNotReadyError, get_os_info
|
||||
from homeassistant.components.hassio import get_os_info
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
@@ -14,10 +14,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
hass.async_create_task(hass.config_entries.async_remove(entry.entry_id))
|
||||
return False
|
||||
|
||||
try:
|
||||
os_info = get_os_info(hass)
|
||||
except HassioNotReadyError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
if (os_info := get_os_info(hass)) is None:
|
||||
# The hassio integration has not yet fetched data from the supervisor
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
board: str | None
|
||||
if (board := os_info.get("board")) is None or board != "green":
|
||||
|
||||
@@ -16,7 +16,8 @@ MODEL = "green"
|
||||
@callback
|
||||
def async_info(hass: HomeAssistant) -> list[HardwareInfo]:
|
||||
"""Return board info."""
|
||||
os_info = get_os_info(hass)
|
||||
if (os_info := get_os_info(hass)) is None:
|
||||
raise HomeAssistantError
|
||||
board: str | None
|
||||
if (board := os_info.get("board")) is None:
|
||||
raise HomeAssistantError
|
||||
|
||||
@@ -16,7 +16,6 @@ from homeassistant.components.hassio import (
|
||||
AddonError,
|
||||
AddonManager,
|
||||
AddonState,
|
||||
HassioNotReadyError,
|
||||
get_apps_list,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
@@ -301,11 +300,7 @@ async def guess_hardware_owners(
|
||||
)
|
||||
|
||||
# Z2M can be provided by one of many add-ons, we match them by name
|
||||
try:
|
||||
apps_list = get_apps_list(hass)
|
||||
except HassioNotReadyError:
|
||||
apps_list = []
|
||||
for app_info in apps_list:
|
||||
for app_info in get_apps_list(hass) or []:
|
||||
slug = app_info.get("slug")
|
||||
|
||||
if not isinstance(slug, str) or Z2M_ADDON_SLUG_REGEX.fullmatch(slug) is None:
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
from homeassistant.components.hassio import HassioNotReadyError, get_os_info
|
||||
from homeassistant.components.hassio import get_os_info
|
||||
from homeassistant.components.homeassistant_hardware.coordinator import (
|
||||
FirmwareUpdateCoordinator,
|
||||
)
|
||||
@@ -58,10 +58,9 @@ async def async_setup_entry(
|
||||
hass.async_create_task(hass.config_entries.async_remove(entry.entry_id))
|
||||
return False
|
||||
|
||||
try:
|
||||
os_info = get_os_info(hass)
|
||||
except HassioNotReadyError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
if (os_info := get_os_info(hass)) is None:
|
||||
# The hassio integration has not yet fetched data from the supervisor
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
if os_info.get("board") != "yellow":
|
||||
# Not running on a Home Assistant Yellow, Home Assistant may have been migrated
|
||||
|
||||
@@ -16,7 +16,8 @@ MODEL = "yellow"
|
||||
@callback
|
||||
def async_info(hass: HomeAssistant) -> list[HardwareInfo]:
|
||||
"""Return board info."""
|
||||
os_info = get_os_info(hass)
|
||||
if (os_info := get_os_info(hass)) is None:
|
||||
raise HomeAssistantError
|
||||
board: str | None
|
||||
if (board := os_info.get("board")) is None:
|
||||
raise HomeAssistantError
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Lutron Homeworks Series 4 and 8 config flow."""
|
||||
# pylint: disable=hass-config-flow-name-field # Name field is no longer allowed in config flow schemas
|
||||
|
||||
from functools import partial
|
||||
import logging
|
||||
|
||||
@@ -380,6 +380,8 @@ class HuaweiLteOptionsFlow(OptionsFlow):
|
||||
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Optional(
|
||||
CONF_NAME,
|
||||
default=self.config_entry.options.get(
|
||||
|
||||
@@ -290,7 +290,9 @@ class InfluxDBConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
scheme="https" if entry.data.get(CONF_SSL) else "http",
|
||||
host=entry.data.get(CONF_HOST, ""),
|
||||
port=entry.data.get(CONF_PORT),
|
||||
path=entry.data.get(CONF_PATH, ""),
|
||||
path=""
|
||||
if entry.data.get(CONF_PATH) is None
|
||||
else entry.data[CONF_PATH],
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pyintesishome"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pyintesishome==1.8.0"]
|
||||
"requirements": ["pyintesishome==1.8.7"]
|
||||
}
|
||||
|
||||
@@ -79,6 +79,8 @@ class IslamicPrayerFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Optional(CONF_NAME, default=NAME): TextSelector(),
|
||||
vol.Required(
|
||||
CONF_LOCATION, default=home_location
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Config flow for konnected.io integration."""
|
||||
# pylint: disable=hass-config-flow-name-field # Name field is no longer allowed in config flow schemas
|
||||
|
||||
import asyncio
|
||||
import copy
|
||||
|
||||
@@ -42,6 +42,8 @@ DATA_SCHEMA_OPTIONS = vol.Schema(
|
||||
)
|
||||
DATA_SCHEMA_SETUP = vol.Schema(
|
||||
{
|
||||
# Approved exemption: user names the local file camera
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): TextSelector(),
|
||||
}
|
||||
).extend(DATA_SCHEMA_OPTIONS.schema)
|
||||
|
||||
@@ -110,6 +110,8 @@ class LoqedConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if self._host
|
||||
else vol.Schema(
|
||||
{
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Required(CONF_NAME): str,
|
||||
vol.Required(CONF_API_TOKEN): str,
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Config flow to configure Met component."""
|
||||
# pylint: disable=hass-config-flow-name-field # Name field is no longer allowed in config flow schemas
|
||||
|
||||
from typing import Any
|
||||
|
||||
|
||||
@@ -31,6 +31,8 @@ class MetEireannFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Required(CONF_NAME, default=HOME_LOCATION_NAME): str,
|
||||
vol.Required(
|
||||
CONF_LATITUDE, default=self.hass.config.latitude
|
||||
|
||||
@@ -57,6 +57,8 @@ def async_get_schema(
|
||||
|
||||
if show_name:
|
||||
schema = {
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Required(CONF_NAME, default=defaults.get(CONF_NAME)): str,
|
||||
**schema,
|
||||
}
|
||||
|
||||
@@ -19,6 +19,8 @@ DEFAULT_NAME = "myStrom Device"
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): str,
|
||||
vol.Required(CONF_HOST): str,
|
||||
}
|
||||
|
||||
@@ -71,6 +71,7 @@ from .media_source import (
|
||||
async_get_media_source_devices,
|
||||
async_get_transcoder,
|
||||
)
|
||||
from .services import async_setup_services
|
||||
from .types import DevicesAddedListener, NestConfigEntry, NestData
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -115,6 +116,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up Nest components with dispatch between old/new flows."""
|
||||
hass.http.register_view(NestEventMediaView(hass))
|
||||
hass.http.register_view(NestEventMediaThumbnailView(hass))
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Support for Google Nest SDM climate devices."""
|
||||
|
||||
from datetime import timedelta
|
||||
from typing import Any, cast
|
||||
|
||||
from google_nest_sdm.device import Device
|
||||
@@ -67,7 +68,7 @@ FAN_MODE_MAP = {
|
||||
FAN_INV_MODE_MAP = {v: k for k, v in FAN_MODE_MAP.items()}
|
||||
FAN_INV_MODES = list(FAN_INV_MODE_MAP)
|
||||
|
||||
MAX_FAN_DURATION = 43200 # 15 hours is the max in the SDM API
|
||||
MAX_FAN_DURATION = 43200 # 12 hours is the max in the SDM API
|
||||
MIN_TEMP = 10
|
||||
MAX_TEMP = 32
|
||||
MIN_TEMP_RANGE = 1.66667
|
||||
@@ -344,3 +345,27 @@ class ThermostatEntity(ClimateEntity):
|
||||
raise HomeAssistantError(
|
||||
f"Error setting {self.entity_id} fan mode to {fan_mode}: {err}"
|
||||
) from err
|
||||
|
||||
async def async_set_fan_timer(self, duration: timedelta) -> None:
|
||||
"""Set a short term fan timer."""
|
||||
if not self.supported_features & ClimateEntityFeature.FAN_MODE:
|
||||
raise HomeAssistantError(f"Entity {self.entity_id} does not support fan")
|
||||
|
||||
if self.hvac_mode == HVACMode.OFF:
|
||||
raise HomeAssistantError(
|
||||
f"Cannot turn on fan for {self.entity_id}, please set an HVAC mode (e.g. heat/cool) first"
|
||||
)
|
||||
|
||||
seconds = int(duration.total_seconds())
|
||||
if seconds <= 0 or seconds > MAX_FAN_DURATION:
|
||||
raise ValueError(
|
||||
f"Duration {seconds} for {self.entity_id} must be between 1 and {MAX_FAN_DURATION} seconds"
|
||||
)
|
||||
|
||||
trait = self._device.traits[FanTrait.NAME]
|
||||
try:
|
||||
await trait.set_timer(FAN_INV_MODE_MAP[FAN_ON], duration=seconds)
|
||||
except ApiException as err:
|
||||
raise HomeAssistantError(
|
||||
f"Error setting {self.entity_id} fan timer: {err}"
|
||||
) from err
|
||||
|
||||
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"services": {
|
||||
"set_fan_timer": {
|
||||
"service": "mdi:fan-clock"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
"""Define services for the Nest integration."""
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
DOMAIN as CLIMATE_DOMAIN,
|
||||
ClimateEntityFeature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, service
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
SERVICE_SET_FAN_TIMER = "set_fan_timer"
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register services for the Nest integration."""
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_SET_FAN_TIMER,
|
||||
entity_domain=CLIMATE_DOMAIN,
|
||||
schema={
|
||||
vol.Required("duration"): cv.time_period,
|
||||
},
|
||||
func="async_set_fan_timer",
|
||||
required_features=[ClimateEntityFeature.FAN_MODE],
|
||||
)
|
||||
@@ -0,0 +1,12 @@
|
||||
set_fan_timer:
|
||||
target:
|
||||
entity:
|
||||
domain: climate
|
||||
integration: nest
|
||||
supported_features:
|
||||
- climate.ClimateEntityFeature.FAN_MODE
|
||||
fields:
|
||||
duration:
|
||||
required: true
|
||||
selector:
|
||||
duration:
|
||||
@@ -163,5 +163,17 @@
|
||||
"create_new_topic": "Create new topic"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"set_fan_timer": {
|
||||
"description": "Sets the fan to run for a specific duration.",
|
||||
"fields": {
|
||||
"duration": {
|
||||
"description": "The duration the fan should run for.",
|
||||
"name": "Duration"
|
||||
}
|
||||
},
|
||||
"name": "Set fan timer"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -144,6 +144,8 @@ TOPIC_FILTER_SCHEMA = vol.Schema(
|
||||
STEP_USER_TOPIC_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_TOPIC): str,
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Optional(CONF_NAME): str,
|
||||
vol.Required(SECTION_FILTER): data_entry_flow.section(
|
||||
TOPIC_FILTER_SCHEMA,
|
||||
|
||||
@@ -69,6 +69,8 @@ class NZBGetConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
data_schema = {
|
||||
vol.Required(CONF_HOST): str,
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): str,
|
||||
vol.Optional(CONF_USERNAME): str,
|
||||
vol.Optional(CONF_PASSWORD): str,
|
||||
|
||||
@@ -418,6 +418,8 @@ def ollama_config_option_schema(
|
||||
default_name = DEFAULT_CONVERSATION_NAME
|
||||
|
||||
schema: dict = {
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Required(CONF_NAME, default=default_name): str,
|
||||
}
|
||||
else:
|
||||
|
||||
@@ -274,6 +274,8 @@ class OnvifFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
step_id="configure",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Required(CONF_NAME, default=conf(CONF_NAME)): str,
|
||||
vol.Required(CONF_HOST, default=conf(CONF_HOST)): str,
|
||||
vol.Required(CONF_PORT, default=conf(CONF_PORT, DEFAULT_PORT)): int,
|
||||
|
||||
@@ -17,6 +17,8 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Required(CONF_NAME): str,
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
|
||||
@@ -99,6 +99,8 @@ class OpenThermGwConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
step_id="init",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Required(CONF_NAME): str,
|
||||
vol.Required(CONF_DEVICE): str,
|
||||
vol.Optional(CONF_ID): str,
|
||||
|
||||
@@ -26,6 +26,8 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_TOKEN): str,
|
||||
vol.Optional(CONF_NEW_TOKEN): BooleanSelector(BooleanSelectorConfig()),
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Required(CONF_NAME, default=DEFAULT_NAME): str,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -0,0 +1,29 @@
|
||||
"""The Ouman EH-800 integration."""
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import OumanEh800ConfigEntry, OumanEh800Coordinator
|
||||
|
||||
_PLATFORMS: list[Platform] = [
|
||||
Platform.SENSOR,
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: OumanEh800ConfigEntry) -> bool:
|
||||
"""Set up Ouman EH-800 from a config entry."""
|
||||
coordinator = OumanEh800Coordinator(hass, entry)
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
coordinator.sync_circuit_device_names()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: OumanEh800ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)
|
||||
@@ -0,0 +1,79 @@
|
||||
"""Config flow for the Ouman EH-800 integration."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from ouman_eh_800_api import (
|
||||
OumanClientAuthenticationError,
|
||||
OumanClientCommunicationError,
|
||||
OumanEh800Client,
|
||||
)
|
||||
import voluptuous as vol
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_URL): str,
|
||||
vol.Required(CONF_USERNAME): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _normalize_url(url: str) -> str:
|
||||
"""Reduce URL to scheme://host[:port], discarding any path, query, or fragment."""
|
||||
return str(URL(url.strip()).origin())
|
||||
|
||||
|
||||
class OumanEh800ConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Ouman EH-800."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
try:
|
||||
user_input[CONF_URL] = _normalize_url(user_input[CONF_URL])
|
||||
except ValueError:
|
||||
errors[CONF_URL] = "invalid_url"
|
||||
else:
|
||||
self._async_abort_entries_match({CONF_URL: user_input[CONF_URL]})
|
||||
client = OumanEh800Client(
|
||||
session=async_get_clientsession(self.hass),
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
address=user_input[CONF_URL],
|
||||
)
|
||||
try:
|
||||
await client.login()
|
||||
except OumanClientCommunicationError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except OumanClientAuthenticationError:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_create_entry(
|
||||
title="Ouman EH-800", data=user_input
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
STEP_USER_DATA_SCHEMA, user_input
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
@@ -0,0 +1,15 @@
|
||||
"""Constants for the Ouman EH-800 integration."""
|
||||
|
||||
from enum import StrEnum
|
||||
|
||||
DOMAIN = "ouman_eh_800"
|
||||
|
||||
DEFAULT_SCAN_INTERVAL_SECONDS = 60
|
||||
|
||||
|
||||
class OumanDevice(StrEnum):
|
||||
"""Logical device that an entity belongs to."""
|
||||
|
||||
MAIN = "main"
|
||||
L1 = "l1"
|
||||
L2 = "l2"
|
||||
@@ -0,0 +1,117 @@
|
||||
"""Data update coordinator for the Ouman EH-800 integration."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from ouman_eh_800_api import (
|
||||
L1BaseEndpoints,
|
||||
L2BaseEndpoints,
|
||||
OumanClientAuthenticationError,
|
||||
OumanClientCommunicationError,
|
||||
OumanEh800Client,
|
||||
OumanEndpoint,
|
||||
OumanRegistrySet,
|
||||
OumanValues,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DEFAULT_SCAN_INTERVAL_SECONDS, DOMAIN, OumanDevice
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type OumanEh800ConfigEntry = ConfigEntry[OumanEh800Coordinator]
|
||||
|
||||
|
||||
class OumanEh800Coordinator(DataUpdateCoordinator[dict[OumanEndpoint, OumanValues]]):
|
||||
"""Ouman EH-800 data update coordinator."""
|
||||
|
||||
_registry_set: OumanRegistrySet
|
||||
config_entry: OumanEh800ConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: OumanEh800ConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name="Ouman EH-800",
|
||||
config_entry=config_entry,
|
||||
update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL_SECONDS),
|
||||
always_update=False,
|
||||
)
|
||||
self.client: OumanEh800Client = OumanEh800Client(
|
||||
session=async_get_clientsession(hass),
|
||||
username=config_entry.data[CONF_USERNAME],
|
||||
password=config_entry.data[CONF_PASSWORD],
|
||||
address=config_entry.data[CONF_URL],
|
||||
)
|
||||
|
||||
entry_id = config_entry.entry_id
|
||||
main_device_identifier = (DOMAIN, entry_id)
|
||||
self.device_info: dict[OumanDevice, DeviceInfo] = {
|
||||
OumanDevice.MAIN: DeviceInfo(
|
||||
identifiers={main_device_identifier},
|
||||
manufacturer="Ouman",
|
||||
model="EH-800",
|
||||
configuration_url=config_entry.data[CONF_URL],
|
||||
),
|
||||
OumanDevice.L1: DeviceInfo(
|
||||
identifiers={(DOMAIN, f"{entry_id}_{OumanDevice.L1}")},
|
||||
translation_key="heating_circuit",
|
||||
translation_placeholders={"circuit_number": "1"},
|
||||
via_device=main_device_identifier,
|
||||
),
|
||||
OumanDevice.L2: DeviceInfo(
|
||||
identifiers={(DOMAIN, f"{entry_id}_{OumanDevice.L2}")},
|
||||
translation_key="heating_circuit",
|
||||
translation_placeholders={"circuit_number": "2"},
|
||||
via_device=main_device_identifier,
|
||||
),
|
||||
}
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
try:
|
||||
# Even though not required to fetch values, perform login once
|
||||
# at the start to verify that the credentials are valid.
|
||||
await self.client.login()
|
||||
self._registry_set = await self.client.get_active_registries()
|
||||
except OumanClientAuthenticationError as err:
|
||||
raise ConfigEntryError("Invalid credentials") from err
|
||||
except OumanClientCommunicationError as err:
|
||||
raise ConfigEntryNotReady("Error communicating with API") from err
|
||||
|
||||
async def _async_update_data(self) -> dict[OumanEndpoint, OumanValues]:
|
||||
"""Fetch registry values from the device."""
|
||||
try:
|
||||
return await self.client.get_values(self._registry_set)
|
||||
except OumanClientCommunicationError as err:
|
||||
raise UpdateFailed("Error communicating with API") from err
|
||||
|
||||
def sync_circuit_device_names(self) -> None:
|
||||
"""Set the device-reported circuit names for the L1/L2 sub-device names.
|
||||
|
||||
Should be called after the data update so that platforms register
|
||||
L1/L2 devices with the resolved names.
|
||||
"""
|
||||
for device, endpoint, circuit_number in (
|
||||
(OumanDevice.L1, L1BaseEndpoints.CIRCUIT_NAME, "1"),
|
||||
(OumanDevice.L2, L2BaseEndpoints.CIRCUIT_NAME, "2"),
|
||||
):
|
||||
if circuit_name := self.data.get(endpoint):
|
||||
assert isinstance(circuit_name, str)
|
||||
device_info = self.device_info[device]
|
||||
device_info["translation_key"] = "heating_circuit_with_name"
|
||||
device_info["translation_placeholders"] = {
|
||||
"circuit_number": circuit_number,
|
||||
"circuit_name": circuit_name,
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
"""Base entity for Ouman EH-800."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from ouman_eh_800_api import OumanEndpoint
|
||||
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import OumanDevice
|
||||
from .coordinator import OumanEh800Coordinator
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class OumanEh800EntityDescription(EntityDescription):
|
||||
"""Common Ouman EH-800 entity description fields."""
|
||||
|
||||
device: OumanDevice
|
||||
|
||||
|
||||
class OumanEh800Entity(CoordinatorEntity[OumanEh800Coordinator]):
|
||||
"""Base entity for Ouman EH-800."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
entity_description: OumanEh800EntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: OumanEh800Coordinator,
|
||||
endpoint: OumanEndpoint,
|
||||
description: OumanEh800EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
self._endpoint = endpoint
|
||||
self.entity_description = description
|
||||
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator.config_entry.entry_id}"
|
||||
f"_{description.device}_{description.key}"
|
||||
)
|
||||
self._attr_device_info = coordinator.device_info[description.device]
|
||||
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"valve_position": {
|
||||
"default": "mdi:pipe-valve"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"domain": "ouman_eh_800",
|
||||
"name": "Ouman EH-800",
|
||||
"codeowners": ["@Markus98"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/ouman_eh_800",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["ouman-eh-800-api==0.5.0"]
|
||||
}
|
||||
@@ -0,0 +1,76 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: Integration does not provide actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: Integration does not provide actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: Integration does not use events.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: Integration does not provide actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: todo
|
||||
test-coverage: todo
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: Integration is local polling only, no discovery.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: Integration is local polling only, no discovery.
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: Integration supports a single device per config entry.
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: Integration supports a single device per config entry.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: done
|
||||
@@ -0,0 +1,186 @@
|
||||
"""Sensor platform for the Ouman EH-800 integration."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from ouman_eh_800_api import (
|
||||
L1BaseEndpoints,
|
||||
L1RoomSensor,
|
||||
L2BaseEndpoints,
|
||||
L2RoomSensor,
|
||||
OumanEndpoint,
|
||||
SystemEndpoints,
|
||||
)
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import OumanDevice
|
||||
from .coordinator import OumanEh800ConfigEntry
|
||||
from .entity import OumanEh800Entity, OumanEh800EntityDescription
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class OumanEh800SensorDescription(OumanEh800EntityDescription, SensorEntityDescription):
|
||||
"""Sensor description with main/L1/L2 device assignment."""
|
||||
|
||||
|
||||
def _temperature_sensor(
|
||||
*,
|
||||
device: OumanDevice,
|
||||
key: str,
|
||||
device_class: SensorDeviceClass = SensorDeviceClass.TEMPERATURE,
|
||||
entity_category: EntityCategory | None = None,
|
||||
enabled_by_default: bool = True,
|
||||
) -> OumanEh800SensorDescription:
|
||||
return OumanEh800SensorDescription(
|
||||
device=device,
|
||||
key=key,
|
||||
translation_key=key,
|
||||
device_class=device_class,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
suggested_display_precision=1,
|
||||
entity_category=entity_category,
|
||||
entity_registry_enabled_default=enabled_by_default,
|
||||
)
|
||||
|
||||
|
||||
def _percentage_sensor(
|
||||
*,
|
||||
device: OumanDevice,
|
||||
key: str,
|
||||
) -> OumanEh800SensorDescription:
|
||||
return OumanEh800SensorDescription(
|
||||
device=device,
|
||||
key=key,
|
||||
translation_key=key,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
suggested_display_precision=1,
|
||||
)
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS: dict[OumanEndpoint, OumanEh800SensorDescription] = {
|
||||
SystemEndpoints.OUTSIDE_TEMPERATURE: _temperature_sensor(
|
||||
device=OumanDevice.MAIN, key="outside_temperature"
|
||||
),
|
||||
L1BaseEndpoints.SUPPLY_WATER_TEMPERATURE: _temperature_sensor(
|
||||
device=OumanDevice.L1, key="supply_water_temperature"
|
||||
),
|
||||
L1BaseEndpoints.VALVE_POSITION: _percentage_sensor(
|
||||
device=OumanDevice.L1, key="valve_position"
|
||||
),
|
||||
L1BaseEndpoints.SUPPLY_WATER_TEMPERATURE_SETPOINT: _temperature_sensor(
|
||||
device=OumanDevice.L1,
|
||||
key="supply_water_temperature_setpoint",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
L1BaseEndpoints.CURVE_SUPPLY_WATER_TEMPERATURE: _temperature_sensor(
|
||||
device=OumanDevice.L1,
|
||||
key="curve_supply_water_temperature",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
enabled_by_default=False,
|
||||
),
|
||||
L1BaseEndpoints.FINE_ADJUSTMENT_EFFECT: _temperature_sensor(
|
||||
device=OumanDevice.L1,
|
||||
key="fine_adjustment_effect",
|
||||
device_class=SensorDeviceClass.TEMPERATURE_DELTA,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
enabled_by_default=False,
|
||||
),
|
||||
L1RoomSensor.ROOM_TEMPERATURE: _temperature_sensor(
|
||||
device=OumanDevice.L1, key="room_temperature"
|
||||
),
|
||||
L1RoomSensor.ROOM_TEMPERATURE_SETPOINT: _temperature_sensor(
|
||||
device=OumanDevice.L1,
|
||||
key="room_temperature_setpoint",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
L1RoomSensor.DELAYED_ROOM_TEMPERATURE: _temperature_sensor(
|
||||
device=OumanDevice.L1,
|
||||
key="delayed_room_temperature",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
enabled_by_default=False,
|
||||
),
|
||||
L1RoomSensor.ROOM_SENSOR_POTENTIOMETER: _temperature_sensor(
|
||||
device=OumanDevice.L1,
|
||||
key="room_sensor_potentiometer",
|
||||
device_class=SensorDeviceClass.TEMPERATURE_DELTA,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
enabled_by_default=False,
|
||||
),
|
||||
L2BaseEndpoints.SUPPLY_WATER_TEMPERATURE: _temperature_sensor(
|
||||
device=OumanDevice.L2, key="supply_water_temperature"
|
||||
),
|
||||
L2BaseEndpoints.VALVE_POSITION: _percentage_sensor(
|
||||
device=OumanDevice.L2, key="valve_position"
|
||||
),
|
||||
L2BaseEndpoints.SUPPLY_WATER_TEMPERATURE_SETPOINT: _temperature_sensor(
|
||||
device=OumanDevice.L2,
|
||||
key="supply_water_temperature_setpoint",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
L2BaseEndpoints.CURVE_SUPPLY_WATER_TEMPERATURE: _temperature_sensor(
|
||||
device=OumanDevice.L2,
|
||||
key="curve_supply_water_temperature",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
enabled_by_default=False,
|
||||
),
|
||||
L2BaseEndpoints.DELAYED_OUTDOOR_TEMPERATURE_EFFECT: _temperature_sensor(
|
||||
device=OumanDevice.L2,
|
||||
key="delayed_outdoor_temperature_effect",
|
||||
device_class=SensorDeviceClass.TEMPERATURE_DELTA,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
enabled_by_default=False,
|
||||
),
|
||||
L2RoomSensor.ROOM_TEMPERATURE: _temperature_sensor(
|
||||
device=OumanDevice.L2, key="room_temperature"
|
||||
),
|
||||
L2RoomSensor.ROOM_TEMPERATURE_SETPOINT: _temperature_sensor(
|
||||
device=OumanDevice.L2,
|
||||
key="room_temperature_setpoint",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
L2RoomSensor.DELAYED_ROOM_TEMPERATURE: _temperature_sensor(
|
||||
device=OumanDevice.L2,
|
||||
key="delayed_room_temperature",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
enabled_by_default=False,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: OumanEh800ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Ouman EH-800 sensors based on a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
async_add_entities(
|
||||
OumanEh800SensorEntity(coordinator, endpoint, description)
|
||||
for endpoint in coordinator.data
|
||||
if (description := SENSOR_DESCRIPTIONS.get(endpoint)) is not None
|
||||
)
|
||||
|
||||
|
||||
class OumanEh800SensorEntity(OumanEh800Entity, SensorEntity):
|
||||
"""Ouman EH-800 sensor entity."""
|
||||
|
||||
entity_description: OumanEh800SensorDescription
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | str:
|
||||
"""Return the current sensor value."""
|
||||
value = self.coordinator.data[self._endpoint]
|
||||
assert isinstance(value, float | str)
|
||||
return value
|
||||
@@ -0,0 +1,54 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"invalid_url": "Invalid URL",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"url": "[%key:common::config_flow::data::url%]",
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "Password for the Ouman EH-800 web interface",
|
||||
"url": "The URL of the Ouman EH-800 web interface",
|
||||
"username": "Username for the Ouman EH-800 web interface"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"device": {
|
||||
"heating_circuit": { "name": "Heating circuit {circuit_number}" },
|
||||
"heating_circuit_with_name": {
|
||||
"name": "Heating circuit {circuit_number} {circuit_name}"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"curve_supply_water_temperature": {
|
||||
"name": "Curve supply water temperature"
|
||||
},
|
||||
"delayed_outdoor_temperature_effect": {
|
||||
"name": "Delayed outdoor temperature effect"
|
||||
},
|
||||
"delayed_room_temperature": { "name": "Delayed room temperature" },
|
||||
"fine_adjustment_effect": { "name": "Fine adjustment effect" },
|
||||
"outside_temperature": { "name": "Outside temperature" },
|
||||
"room_sensor_potentiometer": { "name": "Room sensor potentiometer" },
|
||||
"room_temperature": { "name": "Room temperature" },
|
||||
"room_temperature_setpoint": { "name": "Room temperature setpoint" },
|
||||
"supply_water_temperature": { "name": "Supply water temperature" },
|
||||
"supply_water_temperature_setpoint": {
|
||||
"name": "Supply water temperature setpoint"
|
||||
},
|
||||
"valve_position": { "name": "Valve position" }
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -93,6 +93,8 @@ class PanasonicVieraConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if self._data[CONF_HOST] is not None
|
||||
else "",
|
||||
): str,
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Optional(
|
||||
CONF_NAME,
|
||||
default=self._data[CONF_NAME]
|
||||
|
||||
@@ -76,6 +76,8 @@ class PiHoleFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
vol.Required(
|
||||
CONF_PORT, default=user_input.get(CONF_PORT, 80)
|
||||
): vol.Coerce(int),
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Required(
|
||||
CONF_NAME, default=user_input.get(CONF_NAME, DEFAULT_NAME)
|
||||
): str,
|
||||
|
||||
@@ -59,6 +59,8 @@ class PlaatoConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Required(
|
||||
CONF_DEVICE_NAME,
|
||||
default=self._init_info.get(CONF_DEVICE_NAME, None),
|
||||
|
||||
@@ -45,6 +45,8 @@ class ProwlConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_API_KEY): str,
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Required(CONF_NAME): str,
|
||||
},
|
||||
),
|
||||
|
||||
@@ -215,6 +215,8 @@ class PlayStation4FlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
link_schema[vol.Required(CONF_CODE)] = vol.All(
|
||||
vol.Strip, vol.Length(max=PIN_LENGTH), vol.Coerce(int)
|
||||
)
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
link_schema[vol.Required(CONF_NAME, default=DEFAULT_NAME)] = str
|
||||
|
||||
return self.async_show_form(
|
||||
|
||||
@@ -13,6 +13,8 @@ from .const import DEFAULT_NAME, DOMAIN
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): selector.TextSelector(),
|
||||
vol.Required(CONF_API_KEY): selector.TextSelector(),
|
||||
}
|
||||
|
||||
@@ -14,6 +14,8 @@ from .const import CONF_USER_KEY, DEFAULT_NAME, DOMAIN
|
||||
|
||||
USER_SCHEMA = vol.Schema(
|
||||
{
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): str,
|
||||
vol.Required(CONF_API_KEY): str,
|
||||
vol.Required(CONF_USER_KEY): str,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""The Raspberry Pi integration."""
|
||||
|
||||
from homeassistant.components.hassio import HassioNotReadyError, get_os_info
|
||||
from homeassistant.components.hassio import get_os_info
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
@@ -14,10 +14,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
hass.async_create_task(hass.config_entries.async_remove(entry.entry_id))
|
||||
return False
|
||||
|
||||
try:
|
||||
os_info = get_os_info(hass)
|
||||
except HassioNotReadyError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
if (os_info := get_os_info(hass)) is None:
|
||||
# The hassio integration has not yet fetched data from the supervisor
|
||||
raise ConfigEntryNotReady
|
||||
|
||||
board: str | None
|
||||
if (board := os_info.get("board")) is None or not board.startswith("rpi"):
|
||||
|
||||
@@ -35,7 +35,8 @@ MODELS = {
|
||||
@callback
|
||||
def async_info(hass: HomeAssistant) -> list[HardwareInfo]:
|
||||
"""Return board info."""
|
||||
os_info = get_os_info(hass)
|
||||
if (os_info := get_os_info(hass)) is None:
|
||||
raise HomeAssistantError
|
||||
board: str | None
|
||||
if (board := os_info.get("board")) is None:
|
||||
raise HomeAssistantError
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Config flow for Satel Integra."""
|
||||
# pylint: disable=hass-config-flow-name-field # Name field is no longer allowed in config flow schemas
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -189,6 +189,8 @@ SENSOR_SETTINGS = vol.Schema(
|
||||
}
|
||||
)
|
||||
SENSOR_SETUP = vol.Schema(
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
{vol.Optional(CONF_NAME, default=DEFAULT_NAME): TextSelector()}
|
||||
).extend(SENSOR_SETTINGS.schema)
|
||||
|
||||
|
||||
@@ -67,6 +67,8 @@ class SimplePushFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_DEVICE_KEY): str,
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Required(CONF_NAME, default=DEFAULT_NAME): str,
|
||||
vol.Inclusive(CONF_PASSWORD, ATTR_ENCRYPTED): str,
|
||||
vol.Inclusive(CONF_SALT, ATTR_ENCRYPTED): str,
|
||||
|
||||
@@ -114,6 +114,8 @@ class SnoozConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Required(CONF_NAME): vol.In(
|
||||
[
|
||||
d.device.display_name
|
||||
|
||||
@@ -159,6 +159,8 @@ class SolarEdgeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data_schema_dict: dict[vol.Marker, Any] = {}
|
||||
if self.source != SOURCE_RECONFIGURE:
|
||||
data_schema_dict[
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Required(CONF_NAME, default=user_input.get(CONF_NAME, DEFAULT_NAME))
|
||||
] = str
|
||||
data_schema_dict[
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Config flow for Splunk integration."""
|
||||
# pylint: disable=hass-config-flow-name-field # Name field is no longer allowed in config flow schemas
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
|
||||
@@ -84,6 +84,8 @@ OPTIONS_SCHEMA: vol.Schema = vol.Schema(
|
||||
|
||||
CONFIG_SCHEMA: vol.Schema = vol.Schema(
|
||||
{
|
||||
# Approved exemption: user names the SQL query sensor
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Required(CONF_NAME, default="Select SQL Query"): selector.TextSelector(),
|
||||
vol.Optional(CONF_DB_URL): selector.TextSelector(),
|
||||
}
|
||||
|
||||
@@ -86,6 +86,8 @@ class SRPEnergyConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if self.source == SOURCE_USER
|
||||
else self._get_reconfigure_entry().data[CONF_ID]
|
||||
),
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Required(
|
||||
CONF_NAME, default=self.hass.config.location_name
|
||||
): str,
|
||||
|
||||
@@ -92,6 +92,8 @@ class SyncThruConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_URL, default=user_input.get(CONF_URL, "")): str,
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Optional(CONF_NAME, default=user_input.get(CONF_NAME, "")): str,
|
||||
}
|
||||
),
|
||||
|
||||
@@ -167,6 +167,8 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Required(
|
||||
CONF_NAME, default=user_input.get(CONF_NAME, "")
|
||||
): cv.string,
|
||||
|
||||
@@ -59,6 +59,8 @@ def _get_config_schema(input_dict: dict[str, Any] | None = None) -> vol.Schema:
|
||||
|
||||
return vol.Schema(
|
||||
{
|
||||
# Name field is no longer allowed in config flow schemas
|
||||
# pylint: disable-next=hass-config-flow-name-field
|
||||
vol.Required(
|
||||
CONF_NAME, default=input_dict.get(CONF_NAME, DEFAULT_NAME)
|
||||
): str,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user