mirror of
https://github.com/home-assistant/core.git
synced 2025-07-28 15:47:12 +00:00
Add Uptime Kuma integration (#146393)
This commit is contained in:
parent
37ae476c67
commit
66641356cc
@ -535,6 +535,7 @@ homeassistant.components.unifiprotect.*
|
||||
homeassistant.components.upcloud.*
|
||||
homeassistant.components.update.*
|
||||
homeassistant.components.uptime.*
|
||||
homeassistant.components.uptime_kuma.*
|
||||
homeassistant.components.uptimerobot.*
|
||||
homeassistant.components.usb.*
|
||||
homeassistant.components.uvc.*
|
||||
|
2
CODEOWNERS
generated
2
CODEOWNERS
generated
@ -1658,6 +1658,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/upnp/ @StevenLooman
|
||||
/homeassistant/components/uptime/ @frenck
|
||||
/tests/components/uptime/ @frenck
|
||||
/homeassistant/components/uptime_kuma/ @tr4nt0r
|
||||
/tests/components/uptime_kuma/ @tr4nt0r
|
||||
/homeassistant/components/uptimerobot/ @ludeeus @chemelli74
|
||||
/tests/components/uptimerobot/ @ludeeus @chemelli74
|
||||
/homeassistant/components/usb/ @bdraco
|
||||
|
27
homeassistant/components/uptime_kuma/__init__.py
Normal file
27
homeassistant/components/uptime_kuma/__init__.py
Normal file
@ -0,0 +1,27 @@
|
||||
"""The Uptime Kuma integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import UptimeKumaConfigEntry, UptimeKumaDataUpdateCoordinator
|
||||
|
||||
_PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: UptimeKumaConfigEntry) -> bool:
|
||||
"""Set up Uptime Kuma from a config entry."""
|
||||
|
||||
coordinator = UptimeKumaDataUpdateCoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: UptimeKumaConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)
|
79
homeassistant/components/uptime_kuma/config_flow.py
Normal file
79
homeassistant/components/uptime_kuma/config_flow.py
Normal file
@ -0,0 +1,79 @@
|
||||
"""Config flow for the Uptime Kuma integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pythonkuma import (
|
||||
UptimeKuma,
|
||||
UptimeKumaAuthenticationException,
|
||||
UptimeKumaException,
|
||||
)
|
||||
import voluptuous as vol
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import (
|
||||
TextSelector,
|
||||
TextSelectorConfig,
|
||||
TextSelectorType,
|
||||
)
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_URL): TextSelector(
|
||||
TextSelectorConfig(
|
||||
type=TextSelectorType.URL,
|
||||
autocomplete="url",
|
||||
),
|
||||
),
|
||||
vol.Required(CONF_VERIFY_SSL, default=True): bool,
|
||||
vol.Optional(CONF_API_KEY, default=""): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class UptimeKumaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Uptime Kuma."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
url = URL(user_input[CONF_URL])
|
||||
self._async_abort_entries_match({CONF_URL: url.human_repr()})
|
||||
|
||||
session = async_get_clientsession(self.hass, user_input[CONF_VERIFY_SSL])
|
||||
uptime_kuma = UptimeKuma(session, url, user_input[CONF_API_KEY])
|
||||
|
||||
try:
|
||||
await uptime_kuma.metrics()
|
||||
except UptimeKumaAuthenticationException:
|
||||
errors["base"] = "invalid_auth"
|
||||
except UptimeKumaException:
|
||||
errors["base"] = "cannot_connect"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_create_entry(
|
||||
title=url.host or "",
|
||||
data={**user_input, CONF_URL: url.human_repr()},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
data_schema=STEP_USER_DATA_SCHEMA, suggested_values=user_input
|
||||
),
|
||||
errors=errors,
|
||||
)
|
26
homeassistant/components/uptime_kuma/const.py
Normal file
26
homeassistant/components/uptime_kuma/const.py
Normal file
@ -0,0 +1,26 @@
|
||||
"""Constants for the Uptime Kuma integration."""
|
||||
|
||||
from pythonkuma import MonitorType
|
||||
|
||||
DOMAIN = "uptime_kuma"
|
||||
|
||||
HAS_CERT = {
|
||||
MonitorType.HTTP,
|
||||
MonitorType.KEYWORD,
|
||||
MonitorType.JSON_QUERY,
|
||||
}
|
||||
HAS_URL = HAS_CERT | {MonitorType.REAL_BROWSER}
|
||||
HAS_PORT = {
|
||||
MonitorType.PORT,
|
||||
MonitorType.STEAM,
|
||||
MonitorType.GAMEDIG,
|
||||
MonitorType.MQTT,
|
||||
MonitorType.RADIUS,
|
||||
MonitorType.SNMP,
|
||||
MonitorType.SMTP,
|
||||
}
|
||||
HAS_HOST = HAS_PORT | {
|
||||
MonitorType.PING,
|
||||
MonitorType.TAILSCALE_PING,
|
||||
MonitorType.DNS,
|
||||
}
|
107
homeassistant/components/uptime_kuma/coordinator.py
Normal file
107
homeassistant/components/uptime_kuma/coordinator.py
Normal file
@ -0,0 +1,107 @@
|
||||
"""Coordinator for the Uptime Kuma integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pythonkuma import (
|
||||
UptimeKuma,
|
||||
UptimeKumaAuthenticationException,
|
||||
UptimeKumaException,
|
||||
UptimeKumaMonitor,
|
||||
UptimeKumaVersion,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type UptimeKumaConfigEntry = ConfigEntry[UptimeKumaDataUpdateCoordinator]
|
||||
|
||||
|
||||
class UptimeKumaDataUpdateCoordinator(
|
||||
DataUpdateCoordinator[dict[str | int, UptimeKumaMonitor]]
|
||||
):
|
||||
"""Update coordinator for Uptime Kuma."""
|
||||
|
||||
config_entry: UptimeKumaConfigEntry
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, config_entry: UptimeKumaConfigEntry
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(seconds=30),
|
||||
)
|
||||
session = async_get_clientsession(hass, config_entry.data[CONF_VERIFY_SSL])
|
||||
self.api = UptimeKuma(
|
||||
session, config_entry.data[CONF_URL], config_entry.data[CONF_API_KEY]
|
||||
)
|
||||
self.version: UptimeKumaVersion | None = None
|
||||
|
||||
async def _async_update_data(self) -> dict[str | int, UptimeKumaMonitor]:
|
||||
"""Fetch the latest data from Uptime Kuma."""
|
||||
|
||||
try:
|
||||
metrics = await self.api.metrics()
|
||||
except UptimeKumaAuthenticationException as e:
|
||||
raise ConfigEntryError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_failed_exception",
|
||||
) from e
|
||||
except UptimeKumaException as e:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="request_failed_exception",
|
||||
) from e
|
||||
else:
|
||||
async_migrate_entities_unique_ids(self.hass, self, metrics)
|
||||
self.version = self.api.version
|
||||
|
||||
return metrics
|
||||
|
||||
|
||||
@callback
|
||||
def async_migrate_entities_unique_ids(
|
||||
hass: HomeAssistant,
|
||||
coordinator: UptimeKumaDataUpdateCoordinator,
|
||||
metrics: dict[str | int, UptimeKumaMonitor],
|
||||
) -> None:
|
||||
"""Migrate unique_ids in the entity registry after updating Uptime Kuma."""
|
||||
|
||||
if (
|
||||
coordinator.version is coordinator.api.version
|
||||
or int(coordinator.api.version.major) < 2
|
||||
):
|
||||
return
|
||||
|
||||
entity_registry = er.async_get(hass)
|
||||
registry_entries = er.async_entries_for_config_entry(
|
||||
entity_registry, coordinator.config_entry.entry_id
|
||||
)
|
||||
|
||||
for registry_entry in registry_entries:
|
||||
name = registry_entry.unique_id.removeprefix(
|
||||
f"{registry_entry.config_entry_id}_"
|
||||
).removesuffix(f"_{registry_entry.translation_key}")
|
||||
if monitor := next(
|
||||
(m for m in metrics.values() if m.monitor_name == name), None
|
||||
):
|
||||
entity_registry.async_update_entity(
|
||||
registry_entry.entity_id,
|
||||
new_unique_id=f"{registry_entry.config_entry_id}_{monitor.monitor_id!s}_{registry_entry.translation_key}",
|
||||
)
|
32
homeassistant/components/uptime_kuma/icons.json
Normal file
32
homeassistant/components/uptime_kuma/icons.json
Normal file
@ -0,0 +1,32 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"cert_days_remaining": {
|
||||
"default": "mdi:certificate"
|
||||
},
|
||||
"response_time": {
|
||||
"default": "mdi:timeline-clock-outline"
|
||||
},
|
||||
"status": {
|
||||
"default": "mdi:lan-connect",
|
||||
"state": {
|
||||
"down": "mdi:lan-disconnect",
|
||||
"pending": "mdi:lan-pending",
|
||||
"maintenance": "mdi:account-hard-hat-outline"
|
||||
}
|
||||
},
|
||||
"type": {
|
||||
"default": "mdi:protocol"
|
||||
},
|
||||
"url": {
|
||||
"default": "mdi:web"
|
||||
},
|
||||
"hostname": {
|
||||
"default": "mdi:ip-outline"
|
||||
},
|
||||
"port": {
|
||||
"default": "mdi:ip-outline"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
11
homeassistant/components/uptime_kuma/manifest.json
Normal file
11
homeassistant/components/uptime_kuma/manifest.json
Normal file
@ -0,0 +1,11 @@
|
||||
{
|
||||
"domain": "uptime_kuma",
|
||||
"name": "Uptime Kuma",
|
||||
"codeowners": ["@tr4nt0r"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/uptime_kuma",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pythonkuma"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pythonkuma==0.3.0"]
|
||||
}
|
78
homeassistant/components/uptime_kuma/quality_scale.yaml
Normal file
78
homeassistant/components/uptime_kuma/quality_scale.yaml
Normal file
@ -0,0 +1,78 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: integration has no actions
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: integration has no actions
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: integration has no events
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: integration has no actions
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: integration has no options
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: is not locally discoverable
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: is not locally discoverable
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices:
|
||||
status: exempt
|
||||
comment: integration is a service
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: done
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: has no repairs
|
||||
stale-devices: done
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: done
|
178
homeassistant/components/uptime_kuma/sensor.py
Normal file
178
homeassistant/components/uptime_kuma/sensor.py
Normal file
@ -0,0 +1,178 @@
|
||||
"""Sensor platform for the Uptime Kuma integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from enum import StrEnum
|
||||
|
||||
from pythonkuma import MonitorType, UptimeKumaMonitor
|
||||
from pythonkuma.models import MonitorStatus
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import CONF_URL, EntityCategory, UnitOfTime
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, HAS_CERT, HAS_HOST, HAS_PORT, HAS_URL
|
||||
from .coordinator import UptimeKumaConfigEntry, UptimeKumaDataUpdateCoordinator
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
class UptimeKumaSensor(StrEnum):
|
||||
"""Uptime Kuma sensors."""
|
||||
|
||||
CERT_DAYS_REMAINING = "cert_days_remaining"
|
||||
RESPONSE_TIME = "response_time"
|
||||
STATUS = "status"
|
||||
TYPE = "type"
|
||||
URL = "url"
|
||||
HOSTNAME = "hostname"
|
||||
PORT = "port"
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class UptimeKumaSensorEntityDescription(SensorEntityDescription):
|
||||
"""Uptime Kuma sensor description."""
|
||||
|
||||
value_fn: Callable[[UptimeKumaMonitor], StateType]
|
||||
create_entity: Callable[[MonitorType], bool]
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS: tuple[UptimeKumaSensorEntityDescription, ...] = (
|
||||
UptimeKumaSensorEntityDescription(
|
||||
key=UptimeKumaSensor.CERT_DAYS_REMAINING,
|
||||
translation_key=UptimeKumaSensor.CERT_DAYS_REMAINING,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.DAYS,
|
||||
value_fn=lambda m: m.monitor_cert_days_remaining,
|
||||
create_entity=lambda t: t in HAS_CERT,
|
||||
),
|
||||
UptimeKumaSensorEntityDescription(
|
||||
key=UptimeKumaSensor.RESPONSE_TIME,
|
||||
translation_key=UptimeKumaSensor.RESPONSE_TIME,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.MILLISECONDS,
|
||||
value_fn=(
|
||||
lambda m: m.monitor_response_time if m.monitor_response_time > -1 else None
|
||||
),
|
||||
create_entity=lambda _: True,
|
||||
),
|
||||
UptimeKumaSensorEntityDescription(
|
||||
key=UptimeKumaSensor.STATUS,
|
||||
translation_key=UptimeKumaSensor.STATUS,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=[m.name.lower() for m in MonitorStatus],
|
||||
value_fn=lambda m: m.monitor_status.name.lower(),
|
||||
create_entity=lambda _: True,
|
||||
),
|
||||
UptimeKumaSensorEntityDescription(
|
||||
key=UptimeKumaSensor.TYPE,
|
||||
translation_key=UptimeKumaSensor.TYPE,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=[m.name.lower() for m in MonitorType],
|
||||
value_fn=lambda m: m.monitor_type.name.lower(),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
create_entity=lambda _: True,
|
||||
),
|
||||
UptimeKumaSensorEntityDescription(
|
||||
key=UptimeKumaSensor.URL,
|
||||
translation_key=UptimeKumaSensor.URL,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda m: m.monitor_url,
|
||||
create_entity=lambda t: t in HAS_URL,
|
||||
),
|
||||
UptimeKumaSensorEntityDescription(
|
||||
key=UptimeKumaSensor.HOSTNAME,
|
||||
translation_key=UptimeKumaSensor.HOSTNAME,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda m: m.monitor_hostname,
|
||||
create_entity=lambda t: t in HAS_HOST,
|
||||
),
|
||||
UptimeKumaSensorEntityDescription(
|
||||
key=UptimeKumaSensor.PORT,
|
||||
translation_key=UptimeKumaSensor.PORT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda m: m.monitor_port,
|
||||
create_entity=lambda t: t in HAS_PORT,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: UptimeKumaConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the sensor platform."""
|
||||
coordinator = config_entry.runtime_data
|
||||
monitor_added: set[str | int] = set()
|
||||
|
||||
@callback
|
||||
def add_entities() -> None:
|
||||
"""Add sensor entities."""
|
||||
nonlocal monitor_added
|
||||
|
||||
if new_monitor := set(coordinator.data.keys()) - monitor_added:
|
||||
async_add_entities(
|
||||
UptimeKumaSensorEntity(coordinator, monitor, description)
|
||||
for description in SENSOR_DESCRIPTIONS
|
||||
for monitor in new_monitor
|
||||
if description.create_entity(coordinator.data[monitor].monitor_type)
|
||||
)
|
||||
monitor_added |= new_monitor
|
||||
|
||||
coordinator.async_add_listener(add_entities)
|
||||
add_entities()
|
||||
|
||||
|
||||
class UptimeKumaSensorEntity(
|
||||
CoordinatorEntity[UptimeKumaDataUpdateCoordinator], SensorEntity
|
||||
):
|
||||
"""An Uptime Kuma sensor entity."""
|
||||
|
||||
entity_description: UptimeKumaSensorEntityDescription
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: UptimeKumaDataUpdateCoordinator,
|
||||
monitor: str | int,
|
||||
entity_description: UptimeKumaSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
|
||||
super().__init__(coordinator)
|
||||
self.monitor = monitor
|
||||
self.entity_description = entity_description
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator.config_entry.entry_id}_{monitor!s}_{entity_description.key}"
|
||||
)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
name=coordinator.data[monitor].monitor_name,
|
||||
identifiers={(DOMAIN, f"{coordinator.config_entry.entry_id}_{monitor!s}")},
|
||||
manufacturer="Uptime Kuma",
|
||||
configuration_url=coordinator.config_entry.data[CONF_URL],
|
||||
sw_version=coordinator.api.version.version,
|
||||
)
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
|
||||
return self.entity_description.value_fn(self.coordinator.data[self.monitor])
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return super().available and self.monitor in self.coordinator.data
|
94
homeassistant/components/uptime_kuma/strings.json
Normal file
94
homeassistant/components/uptime_kuma/strings.json
Normal file
@ -0,0 +1,94 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Set up **Uptime Kuma** monitoring service",
|
||||
"data": {
|
||||
"url": "[%key:common::config_flow::data::url%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]",
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
},
|
||||
"data_description": {
|
||||
"url": "Enter the full URL of your Uptime Kuma instance. Be sure to include the protocol (`http` or `https`), the hostname or IP address, the port number (if it is a non-default port), and any path prefix if applicable. Example: `https://uptime.example.com`",
|
||||
"verify_ssl": "Enable SSL certificate verification for secure connections. Disable only if connecting to an Uptime Kuma instance using a self-signed certificate or via IP address",
|
||||
"api_key": "Enter an API key. To create a new API key navigate to **Settings → API Keys** and select **Add API Key**"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"cert_days_remaining": {
|
||||
"name": "Certificate expiry"
|
||||
},
|
||||
"response_time": {
|
||||
"name": "Response time"
|
||||
},
|
||||
"status": {
|
||||
"name": "Status",
|
||||
"state": {
|
||||
"up": "Up",
|
||||
"down": "Down",
|
||||
"pending": "Pending",
|
||||
"maintenance": "Maintenance"
|
||||
}
|
||||
},
|
||||
"type": {
|
||||
"name": "Monitor type",
|
||||
"state": {
|
||||
"http": "HTTP(s)",
|
||||
"port": "TCP port",
|
||||
"ping": "Ping",
|
||||
"keyword": "HTTP(s) - Keyword",
|
||||
"dns": "DNS",
|
||||
"push": "Push",
|
||||
"steam": "Steam Game Server",
|
||||
"mqtt": "MQTT",
|
||||
"sqlserver": "Microsoft SQL Server",
|
||||
"json_query": "HTTP(s) - JSON query",
|
||||
"group": "Group",
|
||||
"docker": "Docker",
|
||||
"grpc_keyword": "gRPC(s) - Keyword",
|
||||
"real_browser": "HTTP(s) - Browser engine",
|
||||
"gamedig": "GameDig",
|
||||
"kafka_producer": "Kafka Producer",
|
||||
"postgres": "PostgreSQL",
|
||||
"mysql": "MySQL/MariaDB",
|
||||
"mongodb": "MongoDB",
|
||||
"radius": "Radius",
|
||||
"redis": "Redis",
|
||||
"tailscale_ping": "Tailscale Ping",
|
||||
"snmp": "SNMP",
|
||||
"smtp": "SMTP",
|
||||
"rabbit_mq": "RabbitMQ",
|
||||
"manual": "Manual"
|
||||
}
|
||||
},
|
||||
"url": {
|
||||
"name": "Monitored URL"
|
||||
},
|
||||
"hostname": {
|
||||
"name": "Monitored hostname"
|
||||
},
|
||||
"port": {
|
||||
"name": "Monitored port"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"auth_failed_exception": {
|
||||
"message": "Authentication with Uptime Kuma failed. Please check that your API key is correct and still valid"
|
||||
},
|
||||
"request_failed_exception": {
|
||||
"message": "Connection to Uptime Kuma failed"
|
||||
}
|
||||
}
|
||||
}
|
1
homeassistant/generated/config_flows.py
generated
1
homeassistant/generated/config_flows.py
generated
@ -680,6 +680,7 @@ FLOWS = {
|
||||
"upcloud",
|
||||
"upnp",
|
||||
"uptime",
|
||||
"uptime_kuma",
|
||||
"uptimerobot",
|
||||
"v2c",
|
||||
"vallox",
|
||||
|
@ -7080,6 +7080,12 @@
|
||||
"iot_class": "local_push",
|
||||
"single_config_entry": true
|
||||
},
|
||||
"uptime_kuma": {
|
||||
"name": "Uptime Kuma",
|
||||
"integration_type": "hub",
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_polling"
|
||||
},
|
||||
"uptimerobot": {
|
||||
"name": "UptimeRobot",
|
||||
"integration_type": "hub",
|
||||
|
10
mypy.ini
generated
10
mypy.ini
generated
@ -5109,6 +5109,16 @@ disallow_untyped_defs = true
|
||||
warn_return_any = true
|
||||
warn_unreachable = true
|
||||
|
||||
[mypy-homeassistant.components.uptime_kuma.*]
|
||||
check_untyped_defs = true
|
||||
disallow_incomplete_defs = true
|
||||
disallow_subclassing_any = true
|
||||
disallow_untyped_calls = true
|
||||
disallow_untyped_decorators = true
|
||||
disallow_untyped_defs = true
|
||||
warn_return_any = true
|
||||
warn_unreachable = true
|
||||
|
||||
[mypy-homeassistant.components.uptimerobot.*]
|
||||
check_untyped_defs = true
|
||||
disallow_incomplete_defs = true
|
||||
|
3
requirements_all.txt
generated
3
requirements_all.txt
generated
@ -2525,6 +2525,9 @@ python-vlc==3.0.18122
|
||||
# homeassistant.components.egardia
|
||||
pythonegardia==1.0.52
|
||||
|
||||
# homeassistant.components.uptime_kuma
|
||||
pythonkuma==0.3.0
|
||||
|
||||
# homeassistant.components.tile
|
||||
pytile==2024.12.0
|
||||
|
||||
|
3
requirements_test_all.txt
generated
3
requirements_test_all.txt
generated
@ -2089,6 +2089,9 @@ python-technove==2.0.0
|
||||
# homeassistant.components.telegram_bot
|
||||
python-telegram-bot[socks]==21.5
|
||||
|
||||
# homeassistant.components.uptime_kuma
|
||||
pythonkuma==0.3.0
|
||||
|
||||
# homeassistant.components.tile
|
||||
pytile==2024.12.0
|
||||
|
||||
|
1
tests/components/uptime_kuma/__init__.py
Normal file
1
tests/components/uptime_kuma/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
"""Tests for the Uptime Kuma integration."""
|
101
tests/components/uptime_kuma/conftest.py
Normal file
101
tests/components/uptime_kuma/conftest.py
Normal file
@ -0,0 +1,101 @@
|
||||
"""Common fixtures for the Uptime Kuma tests."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
from pythonkuma import MonitorType, UptimeKumaMonitor, UptimeKumaVersion
|
||||
from pythonkuma.models import MonitorStatus
|
||||
|
||||
from homeassistant.components.uptime_kuma.const import DOMAIN
|
||||
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_setup_entry() -> Generator[AsyncMock]:
|
||||
"""Override async_setup_entry."""
|
||||
with patch(
|
||||
"homeassistant.components.uptime_kuma.async_setup_entry", return_value=True
|
||||
) as mock_setup_entry:
|
||||
yield mock_setup_entry
|
||||
|
||||
|
||||
@pytest.fixture(name="config_entry")
|
||||
def mock_config_entry() -> MockConfigEntry:
|
||||
"""Mock Uptime Kuma configuration entry."""
|
||||
return MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
title="uptime.example.org",
|
||||
data={
|
||||
CONF_URL: "https://uptime.example.org/",
|
||||
CONF_VERIFY_SSL: True,
|
||||
CONF_API_KEY: "apikey",
|
||||
},
|
||||
entry_id="123456789",
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_pythonkuma() -> Generator[AsyncMock]:
|
||||
"""Mock pythonkuma client."""
|
||||
|
||||
monitor_1 = UptimeKumaMonitor(
|
||||
monitor_id=1,
|
||||
monitor_cert_days_remaining=90,
|
||||
monitor_cert_is_valid=1,
|
||||
monitor_hostname=None,
|
||||
monitor_name="Monitor 1",
|
||||
monitor_port=None,
|
||||
monitor_response_time=120,
|
||||
monitor_status=MonitorStatus.UP,
|
||||
monitor_type=MonitorType.HTTP,
|
||||
monitor_url="https://example.org",
|
||||
)
|
||||
monitor_2 = UptimeKumaMonitor(
|
||||
monitor_id=2,
|
||||
monitor_cert_days_remaining=0,
|
||||
monitor_cert_is_valid=0,
|
||||
monitor_hostname=None,
|
||||
monitor_name="Monitor 2",
|
||||
monitor_port=None,
|
||||
monitor_response_time=28,
|
||||
monitor_status=MonitorStatus.UP,
|
||||
monitor_type=MonitorType.PORT,
|
||||
monitor_url=None,
|
||||
)
|
||||
monitor_3 = UptimeKumaMonitor(
|
||||
monitor_id=3,
|
||||
monitor_cert_days_remaining=90,
|
||||
monitor_cert_is_valid=1,
|
||||
monitor_hostname=None,
|
||||
monitor_name="Monitor 3",
|
||||
monitor_port=None,
|
||||
monitor_response_time=120,
|
||||
monitor_status=MonitorStatus.DOWN,
|
||||
monitor_type=MonitorType.JSON_QUERY,
|
||||
monitor_url="https://down.example.org",
|
||||
)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.uptime_kuma.config_flow.UptimeKuma", autospec=True
|
||||
) as mock_client,
|
||||
patch(
|
||||
"homeassistant.components.uptime_kuma.coordinator.UptimeKuma",
|
||||
new=mock_client,
|
||||
),
|
||||
):
|
||||
client = mock_client.return_value
|
||||
|
||||
client.metrics.return_value = {
|
||||
1: monitor_1,
|
||||
2: monitor_2,
|
||||
3: monitor_3,
|
||||
}
|
||||
client.version = UptimeKumaVersion(
|
||||
version="2.0.0", major="2", minor="0", patch="0"
|
||||
)
|
||||
|
||||
yield client
|
968
tests/components/uptime_kuma/snapshots/test_sensor.ambr
Normal file
968
tests/components/uptime_kuma/snapshots/test_sensor.ambr
Normal file
@ -0,0 +1,968 @@
|
||||
# serializer version: 1
|
||||
# name: test_setup[sensor.monitor_1_certificate_expiry-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.monitor_1_certificate_expiry',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.DURATION: 'duration'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Certificate expiry',
|
||||
'platform': 'uptime_kuma',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': <UptimeKumaSensor.CERT_DAYS_REMAINING: 'cert_days_remaining'>,
|
||||
'unique_id': '123456789_1_cert_days_remaining',
|
||||
'unit_of_measurement': <UnitOfTime.DAYS: 'd'>,
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_1_certificate_expiry-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'duration',
|
||||
'friendly_name': 'Monitor 1 Certificate expiry',
|
||||
'unit_of_measurement': <UnitOfTime.DAYS: 'd'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.monitor_1_certificate_expiry',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '90',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_1_monitor_type-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'options': list([
|
||||
'http',
|
||||
'port',
|
||||
'ping',
|
||||
'keyword',
|
||||
'dns',
|
||||
'push',
|
||||
'steam',
|
||||
'mqtt',
|
||||
'sqlserver',
|
||||
'json_query',
|
||||
'group',
|
||||
'docker',
|
||||
'grpc_keyword',
|
||||
'real_browser',
|
||||
'gamedig',
|
||||
'kafka_producer',
|
||||
'postgres',
|
||||
'mysql',
|
||||
'mongodb',
|
||||
'radius',
|
||||
'redis',
|
||||
'tailscale_ping',
|
||||
'smtp',
|
||||
'snmp',
|
||||
'rabbit_mq',
|
||||
'manual',
|
||||
'unknown',
|
||||
]),
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.monitor_1_monitor_type',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENUM: 'enum'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Monitor type',
|
||||
'platform': 'uptime_kuma',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': <UptimeKumaSensor.TYPE: 'type'>,
|
||||
'unique_id': '123456789_1_type',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_1_monitor_type-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'enum',
|
||||
'friendly_name': 'Monitor 1 Monitor type',
|
||||
'options': list([
|
||||
'http',
|
||||
'port',
|
||||
'ping',
|
||||
'keyword',
|
||||
'dns',
|
||||
'push',
|
||||
'steam',
|
||||
'mqtt',
|
||||
'sqlserver',
|
||||
'json_query',
|
||||
'group',
|
||||
'docker',
|
||||
'grpc_keyword',
|
||||
'real_browser',
|
||||
'gamedig',
|
||||
'kafka_producer',
|
||||
'postgres',
|
||||
'mysql',
|
||||
'mongodb',
|
||||
'radius',
|
||||
'redis',
|
||||
'tailscale_ping',
|
||||
'smtp',
|
||||
'snmp',
|
||||
'rabbit_mq',
|
||||
'manual',
|
||||
'unknown',
|
||||
]),
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.monitor_1_monitor_type',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'http',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_1_monitored_url-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.monitor_1_monitored_url',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Monitored URL',
|
||||
'platform': 'uptime_kuma',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': <UptimeKumaSensor.URL: 'url'>,
|
||||
'unique_id': '123456789_1_url',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_1_monitored_url-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'Monitor 1 Monitored URL',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.monitor_1_monitored_url',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'https://example.org',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_1_response_time-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.monitor_1_response_time',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 0,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.DURATION: 'duration'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Response time',
|
||||
'platform': 'uptime_kuma',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': <UptimeKumaSensor.RESPONSE_TIME: 'response_time'>,
|
||||
'unique_id': '123456789_1_response_time',
|
||||
'unit_of_measurement': <UnitOfTime.MILLISECONDS: 'ms'>,
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_1_response_time-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'duration',
|
||||
'friendly_name': 'Monitor 1 Response time',
|
||||
'unit_of_measurement': <UnitOfTime.MILLISECONDS: 'ms'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.monitor_1_response_time',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '120',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_1_status-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'options': list([
|
||||
'down',
|
||||
'up',
|
||||
'pending',
|
||||
'maintenance',
|
||||
]),
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.monitor_1_status',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENUM: 'enum'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Status',
|
||||
'platform': 'uptime_kuma',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': <UptimeKumaSensor.STATUS: 'status'>,
|
||||
'unique_id': '123456789_1_status',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_1_status-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'enum',
|
||||
'friendly_name': 'Monitor 1 Status',
|
||||
'options': list([
|
||||
'down',
|
||||
'up',
|
||||
'pending',
|
||||
'maintenance',
|
||||
]),
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.monitor_1_status',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'up',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_2_monitor_type-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'options': list([
|
||||
'http',
|
||||
'port',
|
||||
'ping',
|
||||
'keyword',
|
||||
'dns',
|
||||
'push',
|
||||
'steam',
|
||||
'mqtt',
|
||||
'sqlserver',
|
||||
'json_query',
|
||||
'group',
|
||||
'docker',
|
||||
'grpc_keyword',
|
||||
'real_browser',
|
||||
'gamedig',
|
||||
'kafka_producer',
|
||||
'postgres',
|
||||
'mysql',
|
||||
'mongodb',
|
||||
'radius',
|
||||
'redis',
|
||||
'tailscale_ping',
|
||||
'smtp',
|
||||
'snmp',
|
||||
'rabbit_mq',
|
||||
'manual',
|
||||
'unknown',
|
||||
]),
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.monitor_2_monitor_type',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENUM: 'enum'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Monitor type',
|
||||
'platform': 'uptime_kuma',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': <UptimeKumaSensor.TYPE: 'type'>,
|
||||
'unique_id': '123456789_2_type',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_2_monitor_type-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'enum',
|
||||
'friendly_name': 'Monitor 2 Monitor type',
|
||||
'options': list([
|
||||
'http',
|
||||
'port',
|
||||
'ping',
|
||||
'keyword',
|
||||
'dns',
|
||||
'push',
|
||||
'steam',
|
||||
'mqtt',
|
||||
'sqlserver',
|
||||
'json_query',
|
||||
'group',
|
||||
'docker',
|
||||
'grpc_keyword',
|
||||
'real_browser',
|
||||
'gamedig',
|
||||
'kafka_producer',
|
||||
'postgres',
|
||||
'mysql',
|
||||
'mongodb',
|
||||
'radius',
|
||||
'redis',
|
||||
'tailscale_ping',
|
||||
'smtp',
|
||||
'snmp',
|
||||
'rabbit_mq',
|
||||
'manual',
|
||||
'unknown',
|
||||
]),
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.monitor_2_monitor_type',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'port',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_2_monitored_hostname-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.monitor_2_monitored_hostname',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Monitored hostname',
|
||||
'platform': 'uptime_kuma',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': <UptimeKumaSensor.HOSTNAME: 'hostname'>,
|
||||
'unique_id': '123456789_2_hostname',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_2_monitored_hostname-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'Monitor 2 Monitored hostname',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.monitor_2_monitored_hostname',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'unknown',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_2_monitored_port-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.monitor_2_monitored_port',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Monitored port',
|
||||
'platform': 'uptime_kuma',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': <UptimeKumaSensor.PORT: 'port'>,
|
||||
'unique_id': '123456789_2_port',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_2_monitored_port-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'Monitor 2 Monitored port',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.monitor_2_monitored_port',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'unknown',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_2_response_time-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.monitor_2_response_time',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 0,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.DURATION: 'duration'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Response time',
|
||||
'platform': 'uptime_kuma',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': <UptimeKumaSensor.RESPONSE_TIME: 'response_time'>,
|
||||
'unique_id': '123456789_2_response_time',
|
||||
'unit_of_measurement': <UnitOfTime.MILLISECONDS: 'ms'>,
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_2_response_time-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'duration',
|
||||
'friendly_name': 'Monitor 2 Response time',
|
||||
'unit_of_measurement': <UnitOfTime.MILLISECONDS: 'ms'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.monitor_2_response_time',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '28',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_2_status-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'options': list([
|
||||
'down',
|
||||
'up',
|
||||
'pending',
|
||||
'maintenance',
|
||||
]),
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.monitor_2_status',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENUM: 'enum'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Status',
|
||||
'platform': 'uptime_kuma',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': <UptimeKumaSensor.STATUS: 'status'>,
|
||||
'unique_id': '123456789_2_status',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_2_status-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'enum',
|
||||
'friendly_name': 'Monitor 2 Status',
|
||||
'options': list([
|
||||
'down',
|
||||
'up',
|
||||
'pending',
|
||||
'maintenance',
|
||||
]),
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.monitor_2_status',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'up',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_3_certificate_expiry-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.monitor_3_certificate_expiry',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.DURATION: 'duration'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Certificate expiry',
|
||||
'platform': 'uptime_kuma',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': <UptimeKumaSensor.CERT_DAYS_REMAINING: 'cert_days_remaining'>,
|
||||
'unique_id': '123456789_3_cert_days_remaining',
|
||||
'unit_of_measurement': <UnitOfTime.DAYS: 'd'>,
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_3_certificate_expiry-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'duration',
|
||||
'friendly_name': 'Monitor 3 Certificate expiry',
|
||||
'unit_of_measurement': <UnitOfTime.DAYS: 'd'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.monitor_3_certificate_expiry',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '90',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_3_monitor_type-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'options': list([
|
||||
'http',
|
||||
'port',
|
||||
'ping',
|
||||
'keyword',
|
||||
'dns',
|
||||
'push',
|
||||
'steam',
|
||||
'mqtt',
|
||||
'sqlserver',
|
||||
'json_query',
|
||||
'group',
|
||||
'docker',
|
||||
'grpc_keyword',
|
||||
'real_browser',
|
||||
'gamedig',
|
||||
'kafka_producer',
|
||||
'postgres',
|
||||
'mysql',
|
||||
'mongodb',
|
||||
'radius',
|
||||
'redis',
|
||||
'tailscale_ping',
|
||||
'smtp',
|
||||
'snmp',
|
||||
'rabbit_mq',
|
||||
'manual',
|
||||
'unknown',
|
||||
]),
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.monitor_3_monitor_type',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENUM: 'enum'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Monitor type',
|
||||
'platform': 'uptime_kuma',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': <UptimeKumaSensor.TYPE: 'type'>,
|
||||
'unique_id': '123456789_3_type',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_3_monitor_type-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'enum',
|
||||
'friendly_name': 'Monitor 3 Monitor type',
|
||||
'options': list([
|
||||
'http',
|
||||
'port',
|
||||
'ping',
|
||||
'keyword',
|
||||
'dns',
|
||||
'push',
|
||||
'steam',
|
||||
'mqtt',
|
||||
'sqlserver',
|
||||
'json_query',
|
||||
'group',
|
||||
'docker',
|
||||
'grpc_keyword',
|
||||
'real_browser',
|
||||
'gamedig',
|
||||
'kafka_producer',
|
||||
'postgres',
|
||||
'mysql',
|
||||
'mongodb',
|
||||
'radius',
|
||||
'redis',
|
||||
'tailscale_ping',
|
||||
'smtp',
|
||||
'snmp',
|
||||
'rabbit_mq',
|
||||
'manual',
|
||||
'unknown',
|
||||
]),
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.monitor_3_monitor_type',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'json_query',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_3_monitored_url-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.monitor_3_monitored_url',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Monitored URL',
|
||||
'platform': 'uptime_kuma',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': <UptimeKumaSensor.URL: 'url'>,
|
||||
'unique_id': '123456789_3_url',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_3_monitored_url-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'Monitor 3 Monitored URL',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.monitor_3_monitored_url',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'https://down.example.org',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_3_response_time-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.monitor_3_response_time',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 0,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.DURATION: 'duration'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Response time',
|
||||
'platform': 'uptime_kuma',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': <UptimeKumaSensor.RESPONSE_TIME: 'response_time'>,
|
||||
'unique_id': '123456789_3_response_time',
|
||||
'unit_of_measurement': <UnitOfTime.MILLISECONDS: 'ms'>,
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_3_response_time-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'duration',
|
||||
'friendly_name': 'Monitor 3 Response time',
|
||||
'unit_of_measurement': <UnitOfTime.MILLISECONDS: 'ms'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.monitor_3_response_time',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '120',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_3_status-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'options': list([
|
||||
'down',
|
||||
'up',
|
||||
'pending',
|
||||
'maintenance',
|
||||
]),
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.monitor_3_status',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENUM: 'enum'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Status',
|
||||
'platform': 'uptime_kuma',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': <UptimeKumaSensor.STATUS: 'status'>,
|
||||
'unique_id': '123456789_3_status',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_setup[sensor.monitor_3_status-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'enum',
|
||||
'friendly_name': 'Monitor 3 Status',
|
||||
'options': list([
|
||||
'down',
|
||||
'up',
|
||||
'pending',
|
||||
'maintenance',
|
||||
]),
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.monitor_3_status',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'down',
|
||||
})
|
||||
# ---
|
122
tests/components/uptime_kuma/test_config_flow.py
Normal file
122
tests/components/uptime_kuma/test_config_flow.py
Normal file
@ -0,0 +1,122 @@
|
||||
"""Test the Uptime Kuma config flow."""
|
||||
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
import pytest
|
||||
from pythonkuma import UptimeKumaAuthenticationException, UptimeKumaConnectionException
|
||||
|
||||
from homeassistant.components.uptime_kuma.const import DOMAIN
|
||||
from homeassistant.config_entries import SOURCE_USER
|
||||
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_pythonkuma")
|
||||
async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None:
|
||||
"""Test we get the form."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {}
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_URL: "https://uptime.example.org/",
|
||||
CONF_VERIFY_SSL: True,
|
||||
CONF_API_KEY: "apikey",
|
||||
},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "uptime.example.org"
|
||||
assert result["data"] == {
|
||||
CONF_URL: "https://uptime.example.org/",
|
||||
CONF_VERIFY_SSL: True,
|
||||
CONF_API_KEY: "apikey",
|
||||
}
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("raise_error", "text_error"),
|
||||
[
|
||||
(UptimeKumaConnectionException, "cannot_connect"),
|
||||
(UptimeKumaAuthenticationException, "invalid_auth"),
|
||||
(ValueError, "unknown"),
|
||||
],
|
||||
)
|
||||
async def test_form_errors(
|
||||
hass: HomeAssistant,
|
||||
mock_setup_entry: AsyncMock,
|
||||
mock_pythonkuma: AsyncMock,
|
||||
raise_error: Exception,
|
||||
text_error: str,
|
||||
) -> None:
|
||||
"""Test we handle errors and recover."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
|
||||
mock_pythonkuma.metrics.side_effect = raise_error
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_URL: "https://uptime.example.org/",
|
||||
CONF_VERIFY_SSL: True,
|
||||
CONF_API_KEY: "apikey",
|
||||
},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {"base": text_error}
|
||||
|
||||
mock_pythonkuma.metrics.side_effect = None
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_URL: "https://uptime.example.org/",
|
||||
CONF_VERIFY_SSL: True,
|
||||
CONF_API_KEY: "apikey",
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "uptime.example.org"
|
||||
assert result["data"] == {
|
||||
CONF_URL: "https://uptime.example.org/",
|
||||
CONF_VERIFY_SSL: True,
|
||||
CONF_API_KEY: "apikey",
|
||||
}
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_pythonkuma")
|
||||
async def test_form_already_configured(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test we abort when entry is already configured."""
|
||||
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_URL: "https://uptime.example.org/",
|
||||
CONF_VERIFY_SSL: True,
|
||||
CONF_API_KEY: "apikey",
|
||||
},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "already_configured"
|
52
tests/components/uptime_kuma/test_init.py
Normal file
52
tests/components/uptime_kuma/test_init.py
Normal file
@ -0,0 +1,52 @@
|
||||
"""Tests for the Uptime Kuma integration."""
|
||||
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
import pytest
|
||||
from pythonkuma import UptimeKumaAuthenticationException, UptimeKumaException
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_pythonkuma")
|
||||
async def test_entry_setup_unload(
|
||||
hass: HomeAssistant, config_entry: MockConfigEntry
|
||||
) -> None:
|
||||
"""Test integration setup and unload."""
|
||||
|
||||
config_entry.add_to_hass(hass)
|
||||
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert config_entry.state is ConfigEntryState.LOADED
|
||||
|
||||
assert await hass.config_entries.async_unload(config_entry.entry_id)
|
||||
|
||||
assert config_entry.state is ConfigEntryState.NOT_LOADED
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("exception", "state"),
|
||||
[
|
||||
(UptimeKumaAuthenticationException, ConfigEntryState.SETUP_ERROR),
|
||||
(UptimeKumaException, ConfigEntryState.SETUP_RETRY),
|
||||
],
|
||||
)
|
||||
async def test_config_entry_not_ready(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
mock_pythonkuma: AsyncMock,
|
||||
exception: Exception,
|
||||
state: ConfigEntryState,
|
||||
) -> None:
|
||||
"""Test config entry not ready."""
|
||||
|
||||
mock_pythonkuma.metrics.side_effect = exception
|
||||
config_entry.add_to_hass(hass)
|
||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert config_entry.state is state
|
97
tests/components/uptime_kuma/test_sensor.py
Normal file
97
tests/components/uptime_kuma/test_sensor.py
Normal file
@ -0,0 +1,97 @@
|
||||
"""Test for Uptime Kuma sensor platform."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from datetime import timedelta
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from freezegun.api import FrozenDateTimeFactory
|
||||
import pytest
|
||||
from pythonkuma import MonitorStatus, UptimeKumaMonitor, UptimeKumaVersion
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def sensor_only() -> Generator[None]:
|
||||
"""Enable only the sensor platform."""
|
||||
with patch(
|
||||
"homeassistant.components.uptime_kuma._PLATFORMS",
|
||||
[Platform.SENSOR],
|
||||
):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_pythonkuma", "entity_registry_enabled_by_default")
|
||||
async def test_setup(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
snapshot: SnapshotAssertion,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Snapshot test states of sensor platform."""
|
||||
|
||||
config_entry.add_to_hass(hass)
|
||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert config_entry.state is ConfigEntryState.LOADED
|
||||
|
||||
await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_migrate_unique_id(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
mock_pythonkuma: AsyncMock,
|
||||
snapshot: SnapshotAssertion,
|
||||
entity_registry: er.EntityRegistry,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
) -> None:
|
||||
"""Snapshot test states of sensor platform."""
|
||||
mock_pythonkuma.metrics.return_value = {
|
||||
"Monitor": UptimeKumaMonitor(
|
||||
monitor_name="Monitor",
|
||||
monitor_hostname="null",
|
||||
monitor_port="null",
|
||||
monitor_status=MonitorStatus.UP,
|
||||
monitor_url="test",
|
||||
)
|
||||
}
|
||||
mock_pythonkuma.version = UptimeKumaVersion(
|
||||
version="1.23.16", major="1", minor="23", patch="16"
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert config_entry.state is ConfigEntryState.LOADED
|
||||
|
||||
assert (entity := entity_registry.async_get("sensor.monitor_status"))
|
||||
assert entity.unique_id == "123456789_Monitor_status"
|
||||
|
||||
mock_pythonkuma.metrics.return_value = {
|
||||
1: UptimeKumaMonitor(
|
||||
monitor_id=1,
|
||||
monitor_name="Monitor",
|
||||
monitor_hostname="null",
|
||||
monitor_port="null",
|
||||
monitor_status=MonitorStatus.UP,
|
||||
monitor_url="test",
|
||||
)
|
||||
}
|
||||
mock_pythonkuma.version = UptimeKumaVersion(
|
||||
version="2.0.0-beta.3", major="2", minor="0", patch="0-beta.3"
|
||||
)
|
||||
freezer.tick(timedelta(seconds=30))
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert (entity := entity_registry.async_get("sensor.monitor_status"))
|
||||
assert entity.unique_id == "123456789_1_status"
|
Loading…
x
Reference in New Issue
Block a user