mirror of
https://github.com/home-assistant/core.git
synced 2025-09-27 22:09:24 +00:00
Compare commits
3 Commits
dev
...
improve-ha
Author | SHA1 | Date | |
---|---|---|---|
![]() |
47dc51511c | ||
![]() |
127fcd4e16 | ||
![]() |
bfea5eec26 |
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@303c0aef88fc2fe5ff6d63d3b1596bfd83dfa1f9 # v3.30.4
|
||||
uses: github/codeql-action/init@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@303c0aef88fc2fe5ff6d63d3b1596bfd83dfa1f9 # v3.30.4
|
||||
uses: github/codeql-action/analyze@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
4
.github/workflows/wheels.yml
vendored
4
.github/workflows/wheels.yml
vendored
@@ -160,7 +160,7 @@ jobs:
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.09.1
|
||||
uses: home-assistant/wheels@2025.09.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
@@ -221,7 +221,7 @@ jobs:
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.09.1
|
||||
uses: home-assistant/wheels@2025.09.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
|
10
build.yaml
10
build.yaml
@@ -1,10 +1,10 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.3
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.3
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.3
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.3
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.3
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.1
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.1
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.1
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.1
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.1
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
|
@@ -4,18 +4,10 @@ from __future__ import annotations
|
||||
|
||||
from airos.airos8 import AirOS8
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_SSL,
|
||||
CONF_USERNAME,
|
||||
CONF_VERIFY_SSL,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, SECTION_ADVANCED_SETTINGS
|
||||
from .coordinator import AirOSConfigEntry, AirOSDataUpdateCoordinator
|
||||
|
||||
_PLATFORMS: list[Platform] = [
|
||||
@@ -29,16 +21,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> boo
|
||||
|
||||
# By default airOS 8 comes with self-signed SSL certificates,
|
||||
# with no option in the web UI to change or upload a custom certificate.
|
||||
session = async_get_clientsession(
|
||||
hass, verify_ssl=entry.data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL]
|
||||
)
|
||||
session = async_get_clientsession(hass, verify_ssl=False)
|
||||
|
||||
airos_device = AirOS8(
|
||||
host=entry.data[CONF_HOST],
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
session=session,
|
||||
use_ssl=entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL],
|
||||
)
|
||||
|
||||
coordinator = AirOSDataUpdateCoordinator(hass, entry, airos_device)
|
||||
@@ -51,30 +40,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> boo
|
||||
return True
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
|
||||
"""Migrate old config entry."""
|
||||
|
||||
if entry.version > 1:
|
||||
# This means the user has downgraded from a future version
|
||||
return False
|
||||
|
||||
if entry.version == 1 and entry.minor_version == 1:
|
||||
new_data = {**entry.data}
|
||||
advanced_data = {
|
||||
CONF_SSL: DEFAULT_SSL,
|
||||
CONF_VERIFY_SSL: DEFAULT_VERIFY_SSL,
|
||||
}
|
||||
new_data[SECTION_ADVANCED_SETTINGS] = advanced_data
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data=new_data,
|
||||
minor_version=2,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)
|
||||
|
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -15,23 +14,11 @@ from airos.exceptions import (
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_SSL,
|
||||
CONF_USERNAME,
|
||||
CONF_VERIFY_SSL,
|
||||
)
|
||||
from homeassistant.data_entry_flow import section
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import (
|
||||
TextSelector,
|
||||
TextSelectorConfig,
|
||||
TextSelectorType,
|
||||
)
|
||||
|
||||
from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN, SECTION_ADVANCED_SETTINGS
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirOS8
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -41,15 +28,6 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_USERNAME, default="ubnt"): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_SSL, default=DEFAULT_SSL): bool,
|
||||
vol.Required(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): bool,
|
||||
}
|
||||
),
|
||||
{"collapsed": True},
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -58,109 +36,47 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Ubiquiti airOS."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
super().__init__()
|
||||
self.airos_device: AirOS8
|
||||
self.errors: dict[str, str] = {}
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
self,
|
||||
user_input: dict[str, Any] | None = None,
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the manual input of host and credentials."""
|
||||
self.errors = {}
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
validated_info = await self._validate_and_get_device_info(user_input)
|
||||
if validated_info:
|
||||
return self.async_create_entry(
|
||||
title=validated_info["title"],
|
||||
data=validated_info["data"],
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=self.errors
|
||||
)
|
||||
# By default airOS 8 comes with self-signed SSL certificates,
|
||||
# with no option in the web UI to change or upload a custom certificate.
|
||||
session = async_get_clientsession(self.hass, verify_ssl=False)
|
||||
|
||||
async def _validate_and_get_device_info(
|
||||
self, config_data: dict[str, Any]
|
||||
) -> dict[str, Any] | None:
|
||||
"""Validate user input with the device API."""
|
||||
# By default airOS 8 comes with self-signed SSL certificates,
|
||||
# with no option in the web UI to change or upload a custom certificate.
|
||||
session = async_get_clientsession(
|
||||
self.hass,
|
||||
verify_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL],
|
||||
)
|
||||
airos_device = AirOS8(
|
||||
host=user_input[CONF_HOST],
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
session=session,
|
||||
)
|
||||
try:
|
||||
await airos_device.login()
|
||||
airos_data = await airos_device.status()
|
||||
|
||||
airos_device = AirOS8(
|
||||
host=config_data[CONF_HOST],
|
||||
username=config_data[CONF_USERNAME],
|
||||
password=config_data[CONF_PASSWORD],
|
||||
session=session,
|
||||
use_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_SSL],
|
||||
)
|
||||
try:
|
||||
await airos_device.login()
|
||||
airos_data = await airos_device.status()
|
||||
|
||||
except (
|
||||
AirOSConnectionSetupError,
|
||||
AirOSDeviceConnectionError,
|
||||
):
|
||||
self.errors["base"] = "cannot_connect"
|
||||
except (AirOSConnectionAuthenticationError, AirOSDataMissingError):
|
||||
self.errors["base"] = "invalid_auth"
|
||||
except AirOSKeyDataMissingError:
|
||||
self.errors["base"] = "key_data_missing"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception during credential validation")
|
||||
self.errors["base"] = "unknown"
|
||||
else:
|
||||
await self.async_set_unique_id(airos_data.derived.mac)
|
||||
|
||||
if self.source == SOURCE_REAUTH:
|
||||
self._abort_if_unique_id_mismatch()
|
||||
except (
|
||||
AirOSConnectionSetupError,
|
||||
AirOSDeviceConnectionError,
|
||||
):
|
||||
errors["base"] = "cannot_connect"
|
||||
except (AirOSConnectionAuthenticationError, AirOSDataMissingError):
|
||||
errors["base"] = "invalid_auth"
|
||||
except AirOSKeyDataMissingError:
|
||||
errors["base"] = "key_data_missing"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
await self.async_set_unique_id(airos_data.derived.mac)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return {"title": airos_data.host.hostname, "data": config_data}
|
||||
|
||||
return None
|
||||
|
||||
async def async_step_reauth(
|
||||
self,
|
||||
user_input: Mapping[str, Any],
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauthentication upon an API authentication error."""
|
||||
return await self.async_step_reauth_confirm(user_input)
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self,
|
||||
user_input: Mapping[str, Any],
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauthentication upon an API authentication error."""
|
||||
self.errors = {}
|
||||
|
||||
if user_input:
|
||||
validate_data = {**self._get_reauth_entry().data, **user_input}
|
||||
if await self._validate_and_get_device_info(config_data=validate_data):
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(),
|
||||
data_updates=validate_data,
|
||||
return self.async_create_entry(
|
||||
title=airos_data.host.hostname, data=user_input
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PASSWORD): TextSelector(
|
||||
TextSelectorConfig(
|
||||
type=TextSelectorType.PASSWORD,
|
||||
autocomplete="current-password",
|
||||
)
|
||||
),
|
||||
}
|
||||
),
|
||||
errors=self.errors,
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
@@ -7,8 +7,3 @@ DOMAIN = "airos"
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
MANUFACTURER = "Ubiquiti"
|
||||
|
||||
DEFAULT_VERIFY_SSL = False
|
||||
DEFAULT_SSL = True
|
||||
|
||||
SECTION_ADVANCED_SETTINGS = "advanced_settings"
|
||||
|
@@ -14,7 +14,7 @@ from airos.exceptions import (
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.exceptions import ConfigEntryError
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN, SCAN_INTERVAL
|
||||
@@ -47,9 +47,9 @@ class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOS8Data]):
|
||||
try:
|
||||
await self.airos_device.login()
|
||||
return await self.airos_device.status()
|
||||
except AirOSConnectionAuthenticationError as err:
|
||||
except (AirOSConnectionAuthenticationError,) as err:
|
||||
_LOGGER.exception("Error authenticating with airOS device")
|
||||
raise ConfigEntryAuthFailed(
|
||||
raise ConfigEntryError(
|
||||
translation_domain=DOMAIN, translation_key="invalid_auth"
|
||||
) from err
|
||||
except (
|
||||
|
@@ -2,11 +2,11 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.const import CONF_HOST, CONF_SSL
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, MANUFACTURER, SECTION_ADVANCED_SETTINGS
|
||||
from .const import DOMAIN, MANUFACTURER
|
||||
from .coordinator import AirOSDataUpdateCoordinator
|
||||
|
||||
|
||||
@@ -20,14 +20,9 @@ class AirOSEntity(CoordinatorEntity[AirOSDataUpdateCoordinator]):
|
||||
super().__init__(coordinator)
|
||||
|
||||
airos_data = self.coordinator.data
|
||||
url_schema = (
|
||||
"https"
|
||||
if coordinator.config_entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL]
|
||||
else "http"
|
||||
)
|
||||
|
||||
configuration_url: str | None = (
|
||||
f"{url_schema}://{coordinator.config_entry.data[CONF_HOST]}"
|
||||
f"https://{coordinator.config_entry.data[CONF_HOST]}"
|
||||
)
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airos",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["airos==0.5.3"]
|
||||
"requirements": ["airos==0.5.1"]
|
||||
}
|
||||
|
@@ -2,14 +2,6 @@
|
||||
"config": {
|
||||
"flow_title": "Ubiquiti airOS device",
|
||||
"step": {
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "[%key:component::airos::config::step::user::data_description::password%]"
|
||||
}
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
@@ -20,18 +12,6 @@
|
||||
"host": "IP address or hostname of the airOS device",
|
||||
"username": "Administrator username for the airOS device, normally 'ubnt'",
|
||||
"password": "Password configured through the UISP app or web interface"
|
||||
},
|
||||
"sections": {
|
||||
"advanced_settings": {
|
||||
"data": {
|
||||
"ssl": "Use HTTPS",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"ssl": "Whether the connection should be encrypted (required for most devices)",
|
||||
"verify_ssl": "Whether the certificate should be verified when using HTTPS. This should be off for self-signed certificates"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -42,9 +22,7 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"unique_id_mismatch": "Re-authentication should be used for the same device not a new one"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
@@ -75,6 +75,13 @@ async def async_setup_entry(
|
||||
"detectionState",
|
||||
)
|
||||
|
||||
async_add_entities(
|
||||
AmazonBinarySensorEntity(coordinator, serial_num, sensor_desc)
|
||||
for sensor_desc in BINARY_SENSORS
|
||||
for serial_num in coordinator.data
|
||||
if sensor_desc.is_supported(coordinator.data[serial_num], sensor_desc.key)
|
||||
)
|
||||
|
||||
known_devices: set[str] = set()
|
||||
|
||||
def _check_device() -> None:
|
||||
|
@@ -1308,9 +1308,7 @@ class PipelineRun:
|
||||
# instead of a full response.
|
||||
all_targets_in_satellite_area = (
|
||||
self._get_all_targets_in_satellite_area(
|
||||
conversation_result.response,
|
||||
self._satellite_id,
|
||||
self._device_id,
|
||||
conversation_result.response, self._device_id
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1339,62 +1337,39 @@ class PipelineRun:
|
||||
return (speech, all_targets_in_satellite_area)
|
||||
|
||||
def _get_all_targets_in_satellite_area(
|
||||
self,
|
||||
intent_response: intent.IntentResponse,
|
||||
satellite_id: str | None,
|
||||
device_id: str | None,
|
||||
self, intent_response: intent.IntentResponse, device_id: str | None
|
||||
) -> bool:
|
||||
"""Return true if all targeted entities were in the same area as the device."""
|
||||
if (
|
||||
intent_response.response_type != intent.IntentResponseType.ACTION_DONE
|
||||
or not intent_response.matched_states
|
||||
(intent_response.response_type != intent.IntentResponseType.ACTION_DONE)
|
||||
or (not intent_response.matched_states)
|
||||
or (not device_id)
|
||||
):
|
||||
return False
|
||||
|
||||
device_registry = dr.async_get(self.hass)
|
||||
|
||||
if (not (device := device_registry.async_get(device_id))) or (
|
||||
not device.area_id
|
||||
):
|
||||
return False
|
||||
|
||||
entity_registry = er.async_get(self.hass)
|
||||
device_registry = dr.async_get(self.hass)
|
||||
|
||||
area_id: str | None = None
|
||||
|
||||
if (
|
||||
satellite_id is not None
|
||||
and (target_entity_entry := entity_registry.async_get(satellite_id))
|
||||
is not None
|
||||
):
|
||||
area_id = target_entity_entry.area_id
|
||||
device_id = target_entity_entry.device_id
|
||||
|
||||
if area_id is None:
|
||||
if device_id is None:
|
||||
return False
|
||||
|
||||
device_entry = device_registry.async_get(device_id)
|
||||
if device_entry is None:
|
||||
return False
|
||||
|
||||
area_id = device_entry.area_id
|
||||
if area_id is None:
|
||||
return False
|
||||
|
||||
for state in intent_response.matched_states:
|
||||
target_entity_entry = entity_registry.async_get(state.entity_id)
|
||||
if target_entity_entry is None:
|
||||
entity = entity_registry.async_get(state.entity_id)
|
||||
if not entity:
|
||||
return False
|
||||
|
||||
target_area_id = target_entity_entry.area_id
|
||||
if target_area_id is None:
|
||||
if target_entity_entry.device_id is None:
|
||||
if (entity_area_id := entity.area_id) is None:
|
||||
if (entity.device_id is None) or (
|
||||
(entity_device := device_registry.async_get(entity.device_id))
|
||||
is None
|
||||
):
|
||||
return False
|
||||
|
||||
target_device_entry = device_registry.async_get(
|
||||
target_entity_entry.device_id
|
||||
)
|
||||
if target_device_entry is None:
|
||||
return False
|
||||
entity_area_id = entity_device.area_id
|
||||
|
||||
target_area_id = target_device_entry.area_id
|
||||
|
||||
if target_area_id != area_id:
|
||||
if entity_area_id != device.area_id:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@@ -69,9 +69,7 @@ class EcovacsMap(
|
||||
await super().async_added_to_hass()
|
||||
|
||||
async def on_info(event: CachedMapInfoEvent) -> None:
|
||||
for map_obj in event.maps:
|
||||
if map_obj.using:
|
||||
self._attr_extra_state_attributes["map_name"] = map_obj.name
|
||||
self._attr_extra_state_attributes["map_name"] = event.name
|
||||
|
||||
async def on_changed(event: MapChangedEvent) -> None:
|
||||
self._attr_image_last_updated = event.when
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==15.0.0"]
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==14.0.0"]
|
||||
}
|
||||
|
@@ -3,15 +3,14 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from enum import IntEnum
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyephember2.pyephember2 import (
|
||||
EphEmber,
|
||||
ZoneMode,
|
||||
boiler_state,
|
||||
zone_current_temperature,
|
||||
zone_is_active,
|
||||
zone_is_hotwater,
|
||||
zone_mode,
|
||||
zone_name,
|
||||
@@ -54,15 +53,6 @@ EPH_TO_HA_STATE = {
|
||||
"OFF": HVACMode.OFF,
|
||||
}
|
||||
|
||||
|
||||
class EPHBoilerStates(IntEnum):
|
||||
"""Boiler states for a zone given by the api."""
|
||||
|
||||
FIXME = 0
|
||||
OFF = 1
|
||||
ON = 2
|
||||
|
||||
|
||||
HA_STATE_TO_EPH = {value: key for key, value in EPH_TO_HA_STATE.items()}
|
||||
|
||||
|
||||
@@ -133,7 +123,7 @@ class EphEmberThermostat(ClimateEntity):
|
||||
@property
|
||||
def hvac_action(self) -> HVACAction:
|
||||
"""Return current HVAC action."""
|
||||
if boiler_state(self._zone) == EPHBoilerStates.ON:
|
||||
if zone_is_active(self._zone):
|
||||
return HVACAction.HEATING
|
||||
|
||||
return HVACAction.IDLE
|
||||
|
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==41.11.0",
|
||||
"aioesphomeapi==41.10.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.3.0"
|
||||
],
|
||||
|
@@ -26,14 +26,11 @@ class EzvizEntity(CoordinatorEntity[EzvizDataUpdateCoordinator], Entity):
|
||||
super().__init__(coordinator)
|
||||
self._serial = serial
|
||||
self._camera_name = self.data["name"]
|
||||
|
||||
connections = set()
|
||||
if mac_address := self.data["mac_address"]:
|
||||
connections.add((CONNECTION_NETWORK_MAC, mac_address))
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, serial)},
|
||||
connections=connections,
|
||||
connections={
|
||||
(CONNECTION_NETWORK_MAC, self.data["mac_address"]),
|
||||
},
|
||||
manufacturer=MANUFACTURER,
|
||||
model=self.data["device_sub_category"],
|
||||
name=self.data["name"],
|
||||
@@ -65,14 +62,11 @@ class EzvizBaseEntity(Entity):
|
||||
self._serial = serial
|
||||
self.coordinator = coordinator
|
||||
self._camera_name = self.data["name"]
|
||||
|
||||
connections = set()
|
||||
if mac_address := self.data["mac_address"]:
|
||||
connections.add((CONNECTION_NETWORK_MAC, mac_address))
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, serial)},
|
||||
connections=connections,
|
||||
connections={
|
||||
(CONNECTION_NETWORK_MAC, self.data["mac_address"]),
|
||||
},
|
||||
manufacturer=MANUFACTURER,
|
||||
model=self.data["device_sub_category"],
|
||||
name=self.data["name"],
|
||||
|
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250926.0"]
|
||||
"requirements": ["home-assistant-frontend==20250925.1"]
|
||||
}
|
||||
|
@@ -108,7 +108,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry,
|
||||
options={**entry.options, CONF_HUMIDIFIER: source_entity_id},
|
||||
)
|
||||
hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
|
||||
entry.async_on_unload(
|
||||
# We use async_handle_source_entity_changes to track changes to the humidifer,
|
||||
@@ -141,7 +140,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry,
|
||||
options={**entry.options, CONF_SENSOR: data["entity_id"]},
|
||||
)
|
||||
hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
|
||||
entry.async_on_unload(
|
||||
async_track_entity_registry_updated_event(
|
||||
@@ -150,6 +148,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, (Platform.HUMIDIFIER,))
|
||||
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
|
||||
return True
|
||||
|
||||
|
||||
@@ -187,6 +186,11 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
|
||||
return True
|
||||
|
||||
|
||||
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Update listener, called when the config entry options are changed."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(
|
||||
|
@@ -96,7 +96,6 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
|
||||
config_flow = CONFIG_FLOW
|
||||
options_flow = OPTIONS_FLOW
|
||||
options_flow_reloads = True
|
||||
|
||||
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
|
||||
"""Return config entry title."""
|
||||
|
@@ -77,10 +77,10 @@ class GeniusDevice(GeniusEntity):
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update an entity's state data."""
|
||||
if (state := self._device.data.get("_state")) and (
|
||||
last_comms := state.get("lastComms")
|
||||
) is not None: # only via v3 API
|
||||
self._last_comms = dt_util.utc_from_timestamp(last_comms)
|
||||
if "_state" in self._device.data: # only via v3 API
|
||||
self._last_comms = dt_util.utc_from_timestamp(
|
||||
self._device.data["_state"]["lastComms"]
|
||||
)
|
||||
|
||||
|
||||
class GeniusZone(GeniusEntity):
|
||||
|
@@ -27,6 +27,12 @@
|
||||
"install_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::install_addon::title%]"
|
||||
},
|
||||
"install_thread_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
|
||||
},
|
||||
"notify_channel_change": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::description%]"
|
||||
@@ -69,12 +75,10 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
|
||||
@@ -129,14 +133,21 @@
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
|
||||
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
|
||||
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
|
||||
}
|
||||
},
|
||||
"config": {
|
||||
"flow_title": "{model}",
|
||||
"step": {
|
||||
"install_thread_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
|
||||
},
|
||||
"pick_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::description%]",
|
||||
@@ -158,12 +169,10 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
|
||||
@@ -215,9 +224,10 @@
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
|
||||
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
|
||||
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
|
@@ -23,12 +23,16 @@
|
||||
"description": "Your {model} is now a Zigbee coordinator and will be shown as discovered by the Zigbee Home Automation integration."
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "Installing OpenThread Border Router add-on",
|
||||
"description": "The OpenThread Border Router (OTBR) add-on is being installed."
|
||||
"title": "Configuring Thread"
|
||||
},
|
||||
"install_thread_firmware": {
|
||||
"title": "Updating adapter"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "Updating adapter"
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "Starting OpenThread Border Router add-on",
|
||||
"description": "The OpenThread Border Router (OTBR) add-on is now starting."
|
||||
"title": "Configuring Thread"
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "Failed to set up OpenThread Border Router",
|
||||
@@ -72,7 +76,9 @@
|
||||
"fw_install_failed": "{firmware_name} firmware failed to install, check Home Assistant logs for more information."
|
||||
},
|
||||
"progress": {
|
||||
"install_firmware": "Please wait while {firmware_name} firmware is installed to your {model}, this will take a few minutes. Do not make any changes to your hardware or software until this finishes."
|
||||
"install_firmware": "Installing {firmware_name} firmware. Do not make any changes to your hardware or software until this finishes.",
|
||||
"install_otbr_addon": "Installing add-on",
|
||||
"start_otbr_addon": "Starting add-on"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -27,6 +27,12 @@
|
||||
"install_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::install_addon::title%]"
|
||||
},
|
||||
"install_thread_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
|
||||
},
|
||||
"notify_channel_change": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::description%]"
|
||||
@@ -69,12 +75,10 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
|
||||
@@ -129,9 +133,10 @@
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
|
||||
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
|
||||
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
|
||||
}
|
||||
},
|
||||
"config": {
|
||||
@@ -158,12 +163,16 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
|
||||
},
|
||||
"install_thread_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
|
||||
@@ -215,9 +224,10 @@
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
|
||||
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
|
||||
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
|
@@ -35,6 +35,12 @@
|
||||
"install_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::install_addon::title%]"
|
||||
},
|
||||
"install_thread_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
|
||||
},
|
||||
"notify_channel_change": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::description%]"
|
||||
@@ -92,12 +98,10 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
|
||||
@@ -154,9 +158,10 @@
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
|
||||
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
|
||||
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
@@ -3,9 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from email.message import Message
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aioimaplib import IMAP4_SSL, AioImapException, Response
|
||||
import voluptuous as vol
|
||||
@@ -35,7 +33,6 @@ from .coordinator import (
|
||||
ImapPollingDataUpdateCoordinator,
|
||||
ImapPushDataUpdateCoordinator,
|
||||
connect_to_server,
|
||||
get_parts,
|
||||
)
|
||||
from .errors import InvalidAuth, InvalidFolder
|
||||
|
||||
@@ -43,7 +40,6 @@ PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
CONF_ENTRY = "entry"
|
||||
CONF_SEEN = "seen"
|
||||
CONF_PART = "part"
|
||||
CONF_UID = "uid"
|
||||
CONF_TARGET_FOLDER = "target_folder"
|
||||
|
||||
@@ -68,11 +64,6 @@ SERVICE_MOVE_SCHEMA = _SERVICE_UID_SCHEMA.extend(
|
||||
)
|
||||
SERVICE_DELETE_SCHEMA = _SERVICE_UID_SCHEMA
|
||||
SERVICE_FETCH_TEXT_SCHEMA = _SERVICE_UID_SCHEMA
|
||||
SERVICE_FETCH_PART_SCHEMA = _SERVICE_UID_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_PART): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
type ImapConfigEntry = ConfigEntry[ImapDataUpdateCoordinator]
|
||||
|
||||
@@ -225,14 +216,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
translation_placeholders={"error": str(exc)},
|
||||
) from exc
|
||||
raise_on_error(response, "fetch_failed")
|
||||
# Index 1 of of the response lines contains the bytearray with the message data
|
||||
message = ImapMessage(response.lines[1])
|
||||
await client.close()
|
||||
return {
|
||||
"text": message.text,
|
||||
"sender": message.sender,
|
||||
"subject": message.subject,
|
||||
"parts": get_parts(message.email_message),
|
||||
"uid": uid,
|
||||
}
|
||||
|
||||
@@ -244,73 +233,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
async def async_fetch_part(call: ServiceCall) -> ServiceResponse:
|
||||
"""Process fetch email part service and return content."""
|
||||
|
||||
@callback
|
||||
def get_message_part(message: Message, part_key: str) -> Message:
|
||||
part: Message | Any = message
|
||||
for index in part_key.split(","):
|
||||
sub_parts = part.get_payload()
|
||||
try:
|
||||
assert isinstance(sub_parts, list)
|
||||
part = sub_parts[int(index)]
|
||||
except (AssertionError, ValueError, IndexError) as exc:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_part_index",
|
||||
) from exc
|
||||
|
||||
return part
|
||||
|
||||
entry_id: str = call.data[CONF_ENTRY]
|
||||
uid: str = call.data[CONF_UID]
|
||||
part_key: str = call.data[CONF_PART]
|
||||
_LOGGER.debug(
|
||||
"Fetch part %s for message %s. Entry: %s",
|
||||
part_key,
|
||||
uid,
|
||||
entry_id,
|
||||
)
|
||||
client = await async_get_imap_client(hass, entry_id)
|
||||
try:
|
||||
response = await client.fetch(uid, "BODY.PEEK[]")
|
||||
except (TimeoutError, AioImapException) as exc:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="imap_server_fail",
|
||||
translation_placeholders={"error": str(exc)},
|
||||
) from exc
|
||||
raise_on_error(response, "fetch_failed")
|
||||
# Index 1 of of the response lines contains the bytearray with the message data
|
||||
message = ImapMessage(response.lines[1])
|
||||
await client.close()
|
||||
part_data = get_message_part(message.email_message, part_key)
|
||||
part_data_content = part_data.get_payload(decode=False)
|
||||
try:
|
||||
assert isinstance(part_data_content, str)
|
||||
except AssertionError as exc:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_part_index",
|
||||
) from exc
|
||||
return {
|
||||
"part_data": part_data_content,
|
||||
"content_type": part_data.get_content_type(),
|
||||
"content_transfer_encoding": part_data.get("Content-Transfer-Encoding"),
|
||||
"filename": part_data.get_filename(),
|
||||
"part": part_key,
|
||||
"uid": uid,
|
||||
}
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
"fetch_part",
|
||||
async_fetch_part,
|
||||
SERVICE_FETCH_PART_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
@@ -21,7 +21,7 @@ from homeassistant.const import (
|
||||
CONF_VERIFY_SSL,
|
||||
CONTENT_TYPE_TEXT_PLAIN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import (
|
||||
ConfigEntryAuthFailed,
|
||||
ConfigEntryError,
|
||||
@@ -209,28 +209,6 @@ class ImapMessage:
|
||||
return str(self.email_message.get_payload())
|
||||
|
||||
|
||||
@callback
|
||||
def get_parts(message: Message, prefix: str | None = None) -> dict[str, Any]:
|
||||
"""Return information about the parts of a multipart message."""
|
||||
parts: dict[str, Any] = {}
|
||||
if not message.is_multipart():
|
||||
return {}
|
||||
for index, part in enumerate(message.get_payload(), 0):
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(part, Message)
|
||||
key = f"{prefix},{index}" if prefix else f"{index}"
|
||||
if part.is_multipart():
|
||||
parts |= get_parts(part, key)
|
||||
continue
|
||||
parts[key] = {"content_type": part.get_content_type()}
|
||||
if filename := part.get_filename():
|
||||
parts[key]["filename"] = filename
|
||||
if content_transfer_encoding := part.get("Content-Transfer-Encoding"):
|
||||
parts[key]["content_transfer_encoding"] = content_transfer_encoding
|
||||
|
||||
return parts
|
||||
|
||||
|
||||
class ImapDataUpdateCoordinator(DataUpdateCoordinator[int | None]):
|
||||
"""Base class for imap client."""
|
||||
|
||||
@@ -297,7 +275,6 @@ class ImapDataUpdateCoordinator(DataUpdateCoordinator[int | None]):
|
||||
"sender": message.sender,
|
||||
"subject": message.subject,
|
||||
"uid": last_message_uid,
|
||||
"parts": get_parts(message.email_message),
|
||||
}
|
||||
data.update({key: getattr(message, key) for key in self._event_data_keys})
|
||||
if self.custom_event_template is not None:
|
||||
|
@@ -21,9 +21,6 @@
|
||||
},
|
||||
"fetch": {
|
||||
"service": "mdi:email-sync-outline"
|
||||
},
|
||||
"fetch_part": {
|
||||
"service": "mdi:email-sync-outline"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -56,22 +56,3 @@ fetch:
|
||||
example: "12"
|
||||
selector:
|
||||
text:
|
||||
|
||||
fetch_part:
|
||||
fields:
|
||||
entry:
|
||||
required: true
|
||||
selector:
|
||||
config_entry:
|
||||
integration: "imap"
|
||||
uid:
|
||||
required: true
|
||||
example: "12"
|
||||
selector:
|
||||
text:
|
||||
|
||||
part:
|
||||
required: true
|
||||
example: "0,1"
|
||||
selector:
|
||||
text:
|
||||
|
@@ -84,9 +84,6 @@
|
||||
"imap_server_fail": {
|
||||
"message": "The IMAP server failed to connect: {error}."
|
||||
},
|
||||
"invalid_part_index": {
|
||||
"message": "Invalid part index."
|
||||
},
|
||||
"seen_failed": {
|
||||
"message": "Marking message as seen failed with \"{error}\"."
|
||||
}
|
||||
@@ -151,24 +148,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"fetch_part": {
|
||||
"name": "Fetch message part",
|
||||
"description": "Fetches a message part or attachment from an email message.",
|
||||
"fields": {
|
||||
"entry": {
|
||||
"name": "[%key:component::imap::services::fetch::fields::entry::name%]",
|
||||
"description": "[%key:component::imap::services::fetch::fields::entry::description%]"
|
||||
},
|
||||
"uid": {
|
||||
"name": "[%key:component::imap::services::fetch::fields::uid::name%]",
|
||||
"description": "[%key:component::imap::services::fetch::fields::uid::description%]"
|
||||
},
|
||||
"part": {
|
||||
"name": "Part",
|
||||
"description": "The message part index."
|
||||
}
|
||||
}
|
||||
},
|
||||
"seen": {
|
||||
"name": "Mark message as seen",
|
||||
"description": "Marks an email as seen.",
|
||||
|
@@ -142,7 +142,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -
|
||||
)
|
||||
|
||||
coordinators = LaMarzoccoRuntimeData(
|
||||
LaMarzoccoConfigUpdateCoordinator(hass, entry, device, cloud_client),
|
||||
LaMarzoccoConfigUpdateCoordinator(hass, entry, device),
|
||||
LaMarzoccoSettingsUpdateCoordinator(hass, entry, device),
|
||||
LaMarzoccoScheduleUpdateCoordinator(hass, entry, device),
|
||||
LaMarzoccoStatisticsUpdateCoordinator(hass, entry, device),
|
||||
|
@@ -8,7 +8,7 @@ from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pylamarzocco import LaMarzoccoCloudClient, LaMarzoccoMachine
|
||||
from pylamarzocco import LaMarzoccoMachine
|
||||
from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -19,7 +19,7 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=60)
|
||||
SCAN_INTERVAL = timedelta(seconds=15)
|
||||
SETTINGS_UPDATE_INTERVAL = timedelta(hours=8)
|
||||
SCHEDULE_UPDATE_INTERVAL = timedelta(minutes=30)
|
||||
STATISTICS_UPDATE_INTERVAL = timedelta(minutes=15)
|
||||
@@ -51,7 +51,6 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
hass: HomeAssistant,
|
||||
entry: LaMarzoccoConfigEntry,
|
||||
device: LaMarzoccoMachine,
|
||||
cloud_client: LaMarzoccoCloudClient | None = None,
|
||||
) -> None:
|
||||
"""Initialize coordinator."""
|
||||
super().__init__(
|
||||
@@ -62,7 +61,6 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
update_interval=self._default_update_interval,
|
||||
)
|
||||
self.device = device
|
||||
self.cloud_client = cloud_client
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Do the data update."""
|
||||
@@ -87,17 +85,11 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
class LaMarzoccoConfigUpdateCoordinator(LaMarzoccoUpdateCoordinator):
|
||||
"""Class to handle fetching data from the La Marzocco API centrally."""
|
||||
|
||||
cloud_client: LaMarzoccoCloudClient
|
||||
|
||||
async def _internal_async_update_data(self) -> None:
|
||||
"""Fetch data from API endpoint."""
|
||||
|
||||
# ensure token stays valid; does nothing if token is still valid
|
||||
await self.cloud_client.async_get_access_token()
|
||||
|
||||
if self.device.websocket.connected:
|
||||
return
|
||||
|
||||
await self.device.get_dashboard()
|
||||
_LOGGER.debug("Current status: %s", self.device.dashboard.to_dict())
|
||||
|
||||
|
@@ -37,5 +37,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pylamarzocco"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pylamarzocco==2.1.1"]
|
||||
"requirements": ["pylamarzocco==2.1.0"]
|
||||
}
|
||||
|
@@ -12,7 +12,7 @@ from homeassistant.components.number import (
|
||||
NumberEntityDescription,
|
||||
NumberMode,
|
||||
)
|
||||
from homeassistant.const import PRECISION_WHOLE, EntityCategory, UnitOfTime
|
||||
from homeassistant.const import PRECISION_WHOLE, EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
@@ -72,7 +72,6 @@ NUMBERS: tuple[LetPotNumberEntityDescription, ...] = (
|
||||
LetPotNumberEntityDescription(
|
||||
key="plant_days",
|
||||
translation_key="plant_days",
|
||||
native_unit_of_measurement=UnitOfTime.DAYS,
|
||||
value_fn=lambda coordinator: coordinator.data.plant_days,
|
||||
set_value_fn=(
|
||||
lambda device_client, serial, value: device_client.set_plant_days(
|
||||
|
@@ -54,7 +54,8 @@
|
||||
"name": "Light brightness"
|
||||
},
|
||||
"plant_days": {
|
||||
"name": "Plants age"
|
||||
"name": "Plants age",
|
||||
"unit_of_measurement": "days"
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
|
@@ -22,6 +22,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
entry.async_on_unload(entry.add_update_listener(update_listener))
|
||||
|
||||
return True
|
||||
|
||||
@@ -29,3 +30,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload Local file config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Handle options update."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
@@ -65,7 +65,6 @@ class LocalFileConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
|
||||
config_flow = CONFIG_FLOW
|
||||
options_flow = OPTIONS_FLOW
|
||||
options_flow_reloads = True
|
||||
|
||||
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
|
||||
"""Return config entry title."""
|
||||
|
@@ -7,9 +7,8 @@ from aiomealie import MealieAuthenticationError, MealieClient, MealieConnectionE
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_API_TOKEN, CONF_HOST, CONF_PORT, CONF_VERIFY_SSL
|
||||
from homeassistant.const import CONF_API_TOKEN, CONF_HOST, CONF_VERIFY_SSL
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.service_info.hassio import HassioServiceInfo
|
||||
|
||||
from .const import DOMAIN, LOGGER, MIN_REQUIRED_MEALIE_VERSION
|
||||
from .utils import create_version
|
||||
@@ -26,21 +25,13 @@ REAUTH_SCHEMA = vol.Schema(
|
||||
vol.Required(CONF_API_TOKEN): str,
|
||||
}
|
||||
)
|
||||
DISCOVERY_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_API_TOKEN): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class MealieConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Mealie config flow."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
host: str | None = None
|
||||
verify_ssl: bool = True
|
||||
_hassio_discovery: dict[str, Any] | None = None
|
||||
|
||||
async def check_connection(
|
||||
self, api_token: str
|
||||
@@ -152,59 +143,3 @@ class MealieConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data_schema=USER_SCHEMA,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_hassio(
|
||||
self, discovery_info: HassioServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Prepare configuration for a Mealie add-on.
|
||||
|
||||
This flow is triggered by the discovery component.
|
||||
"""
|
||||
await self._async_handle_discovery_without_unique_id()
|
||||
|
||||
self._hassio_discovery = discovery_info.config
|
||||
|
||||
return await self.async_step_hassio_confirm()
|
||||
|
||||
async def async_step_hassio_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm Supervisor discovery and prompt for API token."""
|
||||
if user_input is None:
|
||||
return await self._show_hassio_form()
|
||||
|
||||
assert self._hassio_discovery
|
||||
|
||||
self.host = (
|
||||
f"{self._hassio_discovery[CONF_HOST]}:{self._hassio_discovery[CONF_PORT]}"
|
||||
)
|
||||
self.verify_ssl = True
|
||||
|
||||
errors, user_id = await self.check_connection(
|
||||
user_input[CONF_API_TOKEN],
|
||||
)
|
||||
|
||||
if not errors:
|
||||
await self.async_set_unique_id(user_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title="Mealie",
|
||||
data={
|
||||
CONF_HOST: self.host,
|
||||
CONF_API_TOKEN: user_input[CONF_API_TOKEN],
|
||||
CONF_VERIFY_SSL: self.verify_ssl,
|
||||
},
|
||||
)
|
||||
return await self._show_hassio_form(errors)
|
||||
|
||||
async def _show_hassio_form(
|
||||
self, errors: dict[str, str] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Show the Hass.io confirmation form to the user."""
|
||||
assert self._hassio_discovery
|
||||
return self.async_show_form(
|
||||
step_id="hassio_confirm",
|
||||
data_schema=DISCOVERY_SCHEMA,
|
||||
description_placeholders={"addon": self._hassio_discovery["addon"]},
|
||||
errors=errors or {},
|
||||
)
|
||||
|
@@ -39,14 +39,8 @@ rules:
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration will only discover a Mealie addon that is local, not on the network.
|
||||
discovery:
|
||||
status: done
|
||||
comment: |
|
||||
The integration will discover a Mealie addon posting a discovery message.
|
||||
discovery-update-info: todo
|
||||
discovery: todo
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: todo
|
||||
|
@@ -39,16 +39,6 @@
|
||||
"api_token": "[%key:component::mealie::common::data_description_api_token%]",
|
||||
"verify_ssl": "[%key:component::mealie::common::data_description_verify_ssl%]"
|
||||
}
|
||||
},
|
||||
"hassio_confirm": {
|
||||
"title": "Mealie via Home Assistant add-on",
|
||||
"description": "Do you want to configure Home Assistant to connect to the Mealie instance provided by the add-on: {addon}?",
|
||||
"data": {
|
||||
"api_token": "[%key:common::config_flow::data::api_token%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_token": "[%key:component::mealie::common::data_description_api_token%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
@@ -60,7 +50,6 @@
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"wrong_account": "You have to use the same account that was used to configure the integration."
|
||||
|
@@ -1,7 +1,8 @@
|
||||
set_text_overlay:
|
||||
target:
|
||||
device:
|
||||
integration: motioneye
|
||||
entity:
|
||||
domain: camera
|
||||
integration: motioneye
|
||||
fields:
|
||||
left_text:
|
||||
@@ -47,8 +48,9 @@ set_text_overlay:
|
||||
|
||||
action:
|
||||
target:
|
||||
device:
|
||||
integration: motioneye
|
||||
entity:
|
||||
domain: camera
|
||||
integration: motioneye
|
||||
fields:
|
||||
action:
|
||||
@@ -86,6 +88,7 @@ action:
|
||||
|
||||
snapshot:
|
||||
target:
|
||||
entity:
|
||||
domain: camera
|
||||
device:
|
||||
integration: motioneye
|
||||
entity:
|
||||
integration: motioneye
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/nibe_heatpump",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["nibe==2.19.0"]
|
||||
"requirements": ["nibe==2.18.0"]
|
||||
}
|
||||
|
@@ -11,7 +11,7 @@
|
||||
"_r_to_u": "City/county (R-U)",
|
||||
"_v_to_z": "City/county (V-Z)",
|
||||
"slots": "Maximum warnings per city/county",
|
||||
"headline_filter": "Headline blocklist"
|
||||
"headline_filter": "Blacklist regex to filter warning headlines"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -34,7 +34,7 @@
|
||||
"_v_to_z": "[%key:component::nina::config::step::user::data::_v_to_z%]",
|
||||
"slots": "[%key:component::nina::config::step::user::data::slots%]",
|
||||
"headline_filter": "[%key:component::nina::config::step::user::data::headline_filter%]",
|
||||
"area_filter": "Affected area filter"
|
||||
"area_filter": "Whitelist regex to filter warnings based on affected areas"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -13,6 +13,6 @@ NMAP_TRACKED_DEVICES: Final = "nmap_tracked_devices"
|
||||
# Interval in minutes to exclude devices from a scan while they are home
|
||||
CONF_HOME_INTERVAL: Final = "home_interval"
|
||||
CONF_OPTIONS: Final = "scan_options"
|
||||
DEFAULT_OPTIONS: Final = "-n -sn -PR -T4 --min-rate 10 --host-timeout 5s"
|
||||
DEFAULT_OPTIONS: Final = "-F -T4 --min-rate 10 --host-timeout 5s"
|
||||
|
||||
TRACKER_SCAN_INTERVAL: Final = 120
|
||||
|
@@ -5,14 +5,7 @@ from __future__ import annotations
|
||||
from pyportainer import Portainer
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_API_KEY,
|
||||
CONF_API_TOKEN,
|
||||
CONF_HOST,
|
||||
CONF_URL,
|
||||
CONF_VERIFY_SSL,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.const import CONF_API_KEY, CONF_HOST, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
@@ -26,12 +19,11 @@ type PortainerConfigEntry = ConfigEntry[PortainerCoordinator]
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: PortainerConfigEntry) -> bool:
|
||||
"""Set up Portainer from a config entry."""
|
||||
|
||||
session = async_create_clientsession(hass)
|
||||
client = Portainer(
|
||||
api_url=entry.data[CONF_URL],
|
||||
api_key=entry.data[CONF_API_TOKEN],
|
||||
session=async_create_clientsession(
|
||||
hass=hass, verify_ssl=entry.data[CONF_VERIFY_SSL]
|
||||
),
|
||||
api_url=entry.data[CONF_HOST],
|
||||
api_key=entry.data[CONF_API_KEY],
|
||||
session=session,
|
||||
)
|
||||
|
||||
coordinator = PortainerCoordinator(hass, entry, client)
|
||||
@@ -46,15 +38,3 @@ async def async_setup_entry(hass: HomeAssistant, entry: PortainerConfigEntry) ->
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: PortainerConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: PortainerConfigEntry) -> bool:
|
||||
"""Migrate old entry."""
|
||||
|
||||
if entry.version < 2:
|
||||
data = dict(entry.data)
|
||||
data[CONF_URL] = data.pop(CONF_HOST)
|
||||
data[CONF_API_TOKEN] = data.pop(CONF_API_KEY)
|
||||
hass.config_entries.async_update_entry(entry=entry, data=data, version=2)
|
||||
|
||||
return True
|
||||
|
@@ -14,7 +14,7 @@ from pyportainer import (
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_API_TOKEN, CONF_URL, CONF_VERIFY_SSL
|
||||
from homeassistant.const import CONF_API_KEY, CONF_HOST
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
@@ -24,9 +24,8 @@ from .const import DOMAIN
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_URL): str,
|
||||
vol.Required(CONF_API_TOKEN): str,
|
||||
vol.Optional(CONF_VERIFY_SSL, default=True): bool,
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_API_KEY): str,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -35,11 +34,9 @@ async def _validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
||||
"""Validate the user input allows us to connect."""
|
||||
|
||||
client = Portainer(
|
||||
api_url=data[CONF_URL],
|
||||
api_key=data[CONF_API_TOKEN],
|
||||
session=async_get_clientsession(
|
||||
hass=hass, verify_ssl=data.get(CONF_VERIFY_SSL, True)
|
||||
),
|
||||
api_url=data[CONF_HOST],
|
||||
api_key=data[CONF_API_KEY],
|
||||
session=async_get_clientsession(hass),
|
||||
)
|
||||
try:
|
||||
await client.get_endpoints()
|
||||
@@ -50,21 +47,19 @@ async def _validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
||||
except PortainerTimeoutError as err:
|
||||
raise PortainerTimeout from err
|
||||
|
||||
_LOGGER.debug("Connected to Portainer API: %s", data[CONF_URL])
|
||||
_LOGGER.debug("Connected to Portainer API: %s", data[CONF_HOST])
|
||||
|
||||
|
||||
class PortainerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Portainer."""
|
||||
|
||||
VERSION = 2
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match({CONF_URL: user_input[CONF_URL]})
|
||||
self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]})
|
||||
try:
|
||||
await _validate_input(self.hass, user_input)
|
||||
except CannotConnect:
|
||||
@@ -77,10 +72,10 @@ class PortainerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
await self.async_set_unique_id(user_input[CONF_API_TOKEN])
|
||||
await self.async_set_unique_id(user_input[CONF_API_KEY])
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_URL], data=user_input
|
||||
title=user_input[CONF_HOST], data=user_input
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
|
@@ -16,7 +16,7 @@ from pyportainer.models.docker import DockerContainer
|
||||
from pyportainer.models.portainer import Endpoint
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_URL
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
@@ -87,7 +87,7 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
|
||||
async def _async_update_data(self) -> dict[int, PortainerCoordinatorData]:
|
||||
"""Fetch data from Portainer API."""
|
||||
_LOGGER.debug(
|
||||
"Fetching data from Portainer API: %s", self.config_entry.data[CONF_URL]
|
||||
"Fetching data from Portainer API: %s", self.config_entry.data[CONF_HOST]
|
||||
)
|
||||
|
||||
try:
|
||||
|
@@ -3,16 +3,14 @@
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"url": "[%key:common::config_flow::data::url%]",
|
||||
"api_token": "[%key:common::config_flow::data::api_token%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
},
|
||||
"data_description": {
|
||||
"url": "The URL, including the port, of your Portainer instance",
|
||||
"api_token": "The API access token for authenticating with Portainer",
|
||||
"verify_ssl": "Whether to verify SSL certificates. Disable only if you have a self-signed certificate"
|
||||
"host": "The host/URL, including the port, of your Portainer instance",
|
||||
"api_key": "The API key for authenticating with Portainer"
|
||||
},
|
||||
"description": "You can create an access token in the Portainer UI. Go to **My account > Access tokens** and select **Add access token**"
|
||||
"description": "You can create an API key in the Portainer UI. Go to **My account > API keys** and select **Add API key**"
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
|
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
import contextlib
|
||||
from dataclasses import dataclass, field
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
from typing import Any
|
||||
|
||||
from pysqueezebox import Player
|
||||
|
||||
@@ -14,6 +14,7 @@ from homeassistant.components.media_player import (
|
||||
BrowseError,
|
||||
BrowseMedia,
|
||||
MediaClass,
|
||||
MediaPlayerEntity,
|
||||
MediaType,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -21,9 +22,6 @@ from homeassistant.helpers.network import is_internal_request
|
||||
|
||||
from .const import DOMAIN, UNPLAYABLE_TYPES
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .media_player import SqueezeBoxMediaPlayerEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
LIBRARY = [
|
||||
@@ -246,13 +244,14 @@ def _build_response_favorites(item: dict[str, Any]) -> BrowseMedia:
|
||||
def _get_item_thumbnail(
|
||||
item: dict[str, Any],
|
||||
player: Player,
|
||||
entity: SqueezeBoxMediaPlayerEntity,
|
||||
entity: MediaPlayerEntity,
|
||||
item_type: str | MediaType | None,
|
||||
search_type: str,
|
||||
internal_request: bool,
|
||||
known_apps_radios: set[str],
|
||||
) -> str | None:
|
||||
"""Construct path to thumbnail image."""
|
||||
item_thumbnail: str | None = None
|
||||
|
||||
track_id = item.get("artwork_track_id") or (
|
||||
item.get("id")
|
||||
@@ -263,27 +262,21 @@ def _get_item_thumbnail(
|
||||
|
||||
if track_id:
|
||||
if internal_request:
|
||||
return cast(str, player.generate_image_url_from_track_id(track_id))
|
||||
if item_type is not None:
|
||||
return entity.get_browse_image_url(item_type, item["id"], track_id)
|
||||
item_thumbnail = player.generate_image_url_from_track_id(track_id)
|
||||
elif item_type is not None:
|
||||
item_thumbnail = entity.get_browse_image_url(
|
||||
item_type, item["id"], track_id
|
||||
)
|
||||
|
||||
url = None
|
||||
content_type = item_type or "unknown"
|
||||
|
||||
if search_type in ["apps", "radios"]:
|
||||
url = cast(str, player.generate_image_url(item["icon"]))
|
||||
elif image_url := item.get("image_url"):
|
||||
url = image_url
|
||||
|
||||
if internal_request or not url:
|
||||
return url
|
||||
|
||||
synthetic_id = entity.get_synthetic_id_and_cache_url(url)
|
||||
return entity.get_browse_image_url(content_type, "synthetic", synthetic_id)
|
||||
elif search_type in ["apps", "radios"]:
|
||||
item_thumbnail = player.generate_image_url(item["icon"])
|
||||
if item_thumbnail is None:
|
||||
item_thumbnail = item.get("image_url") # will not be proxied by HA
|
||||
return item_thumbnail
|
||||
|
||||
|
||||
async def build_item_response(
|
||||
entity: SqueezeBoxMediaPlayerEntity,
|
||||
entity: MediaPlayerEntity,
|
||||
player: Player,
|
||||
payload: dict[str, str | None],
|
||||
browse_limit: int,
|
||||
|
@@ -8,7 +8,6 @@ import json
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
from lru import LRU
|
||||
from pysqueezebox import Server, async_discover
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -44,7 +43,6 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.start import async_at_start
|
||||
from homeassistant.util.dt import utcnow
|
||||
from homeassistant.util.ulid import ulid_now
|
||||
|
||||
from . import SQUEEZEBOX_HASS_DATA
|
||||
from .browse_media import (
|
||||
@@ -262,7 +260,6 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
self._previous_media_position = 0
|
||||
self._attr_unique_id = format_mac(self._player.player_id)
|
||||
self._browse_data = BrowseData()
|
||||
self._synthetic_media_browser_thumbnail_items: LRU[str, str] = LRU(5000)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
@@ -745,17 +742,6 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
await self._player.async_unsync()
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
def get_synthetic_id_and_cache_url(self, url: str) -> str:
|
||||
"""Cache a thumbnail URL and return a synthetic ID.
|
||||
|
||||
This enables us to proxy thumbnails for apps and favorites, as those do not have IDs.
|
||||
"""
|
||||
synthetic_id = f"s_{ulid_now()}"
|
||||
|
||||
self._synthetic_media_browser_thumbnail_items[synthetic_id] = url
|
||||
|
||||
return synthetic_id
|
||||
|
||||
async def async_browse_media(
|
||||
self,
|
||||
media_content_type: MediaType | str | None = None,
|
||||
@@ -799,21 +785,11 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
media_image_id: str | None = None,
|
||||
) -> tuple[bytes | None, str | None]:
|
||||
"""Get album art from Squeezebox server."""
|
||||
if not media_image_id:
|
||||
return (None, None)
|
||||
|
||||
if media_content_id == "synthetic":
|
||||
image_url = self._synthetic_media_browser_thumbnail_items.get(
|
||||
media_image_id
|
||||
)
|
||||
|
||||
if image_url is None:
|
||||
_LOGGER.debug("Synthetic ID %s not found in cache", media_image_id)
|
||||
return (None, None)
|
||||
else:
|
||||
if media_image_id:
|
||||
image_url = self._player.generate_image_url_from_track_id(media_image_id)
|
||||
result = await self._async_fetch_image(image_url)
|
||||
if result == (None, None):
|
||||
_LOGGER.debug("Error retrieving proxied album art from %s", image_url)
|
||||
return result
|
||||
|
||||
result = await self._async_fetch_image(image_url)
|
||||
if result == (None, None):
|
||||
_LOGGER.debug("Error retrieving proxied album art from %s", image_url)
|
||||
return result
|
||||
return (None, None)
|
||||
|
@@ -100,7 +100,6 @@ PLATFORMS_BY_TYPE = {
|
||||
SupportedModels.RGBICWW_STRIP_LIGHT.value: [Platform.LIGHT, Platform.SENSOR],
|
||||
SupportedModels.PLUG_MINI_EU.value: [Platform.SWITCH, Platform.SENSOR],
|
||||
SupportedModels.RELAY_SWITCH_2PM.value: [Platform.SWITCH, Platform.SENSOR],
|
||||
SupportedModels.GARAGE_DOOR_OPENER.value: [Platform.COVER, Platform.SENSOR],
|
||||
}
|
||||
CLASS_BY_DEVICE = {
|
||||
SupportedModels.CEILING_LIGHT.value: switchbot.SwitchbotCeilingLight,
|
||||
@@ -134,7 +133,6 @@ CLASS_BY_DEVICE = {
|
||||
SupportedModels.RGBICWW_STRIP_LIGHT.value: switchbot.SwitchbotRgbicLight,
|
||||
SupportedModels.PLUG_MINI_EU.value: switchbot.SwitchbotRelaySwitch,
|
||||
SupportedModels.RELAY_SWITCH_2PM.value: switchbot.SwitchbotRelaySwitch2PM,
|
||||
SupportedModels.GARAGE_DOOR_OPENER.value: switchbot.SwitchbotGarageDoorOpener,
|
||||
}
|
||||
|
||||
|
||||
|
@@ -56,7 +56,6 @@ class SupportedModels(StrEnum):
|
||||
PLUG_MINI_EU = "plug_mini_eu"
|
||||
RELAY_SWITCH_2PM = "relay_switch_2pm"
|
||||
K11_PLUS_VACUUM = "k11+_vacuum"
|
||||
GARAGE_DOOR_OPENER = "garage_door_opener"
|
||||
|
||||
|
||||
CONNECTABLE_SUPPORTED_MODEL_TYPES = {
|
||||
@@ -92,7 +91,6 @@ CONNECTABLE_SUPPORTED_MODEL_TYPES = {
|
||||
SwitchbotModel.PLUG_MINI_EU: SupportedModels.PLUG_MINI_EU,
|
||||
SwitchbotModel.RELAY_SWITCH_2PM: SupportedModels.RELAY_SWITCH_2PM,
|
||||
SwitchbotModel.K11_VACUUM: SupportedModels.K11_PLUS_VACUUM,
|
||||
SwitchbotModel.GARAGE_DOOR_OPENER: SupportedModels.GARAGE_DOOR_OPENER,
|
||||
}
|
||||
|
||||
NON_CONNECTABLE_SUPPORTED_MODEL_TYPES = {
|
||||
@@ -128,7 +126,6 @@ ENCRYPTED_MODELS = {
|
||||
SwitchbotModel.RGBICWW_FLOOR_LAMP,
|
||||
SwitchbotModel.PLUG_MINI_EU,
|
||||
SwitchbotModel.RELAY_SWITCH_2PM,
|
||||
SwitchbotModel.GARAGE_DOOR_OPENER,
|
||||
}
|
||||
|
||||
ENCRYPTED_SWITCHBOT_MODEL_TO_CLASS: dict[
|
||||
@@ -149,7 +146,6 @@ ENCRYPTED_SWITCHBOT_MODEL_TO_CLASS: dict[
|
||||
SwitchbotModel.RGBICWW_FLOOR_LAMP: switchbot.SwitchbotRgbicLight,
|
||||
SwitchbotModel.PLUG_MINI_EU: switchbot.SwitchbotRelaySwitch,
|
||||
SwitchbotModel.RELAY_SWITCH_2PM: switchbot.SwitchbotRelaySwitch2PM,
|
||||
SwitchbotModel.GARAGE_DOOR_OPENER: switchbot.SwitchbotRelaySwitch,
|
||||
}
|
||||
|
||||
HASS_SENSOR_TYPE_TO_SWITCHBOT_MODEL = {
|
||||
|
@@ -35,9 +35,7 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up Switchbot curtain based on a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
if isinstance(coordinator.device, switchbot.SwitchbotGarageDoorOpener):
|
||||
async_add_entities([SwitchbotGarageDoorOpenerEntity(coordinator)])
|
||||
elif isinstance(coordinator.device, switchbot.SwitchbotBlindTilt):
|
||||
if isinstance(coordinator.device, switchbot.SwitchbotBlindTilt):
|
||||
async_add_entities([SwitchBotBlindTiltEntity(coordinator)])
|
||||
elif isinstance(coordinator.device, switchbot.SwitchbotRollerShade):
|
||||
async_add_entities([SwitchBotRollerShadeEntity(coordinator)])
|
||||
@@ -297,30 +295,3 @@ class SwitchBotRollerShadeEntity(SwitchbotEntity, CoverEntity, RestoreEntity):
|
||||
self._attr_is_closed = self.parsed_data["position"] <= 20
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
class SwitchbotGarageDoorOpenerEntity(SwitchbotEntity, CoverEntity):
|
||||
"""Representation of a Switchbot garage door."""
|
||||
|
||||
_device: switchbot.SwitchbotGarageDoorOpener
|
||||
_attr_device_class = CoverDeviceClass.GARAGE
|
||||
_attr_supported_features = CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE
|
||||
_attr_translation_key = "garage_door"
|
||||
_attr_name = None
|
||||
|
||||
@property
|
||||
def is_closed(self) -> bool | None:
|
||||
"""Return true if cover is closed, else False."""
|
||||
return not self._device.door_open()
|
||||
|
||||
@exception_handler
|
||||
async def async_open_cover(self, **kwargs: Any) -> None:
|
||||
"""Open the garage door."""
|
||||
await self._device.open()
|
||||
self.async_write_ha_state()
|
||||
|
||||
@exception_handler
|
||||
async def async_close_cover(self, **kwargs: Any) -> None:
|
||||
"""Close the garage door."""
|
||||
await self._device.close()
|
||||
self.async_write_ha_state()
|
||||
|
@@ -56,6 +56,7 @@ async def async_setup_entry(
|
||||
entry.runtime_data = SystemMonitorData(coordinator, psutil_wrapper)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
entry.async_on_unload(entry.add_update_listener(update_listener))
|
||||
return True
|
||||
|
||||
|
||||
@@ -66,6 +67,11 @@ async def async_unload_entry(
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def update_listener(hass: HomeAssistant, entry: SystemMonitorConfigEntry) -> None:
|
||||
"""Handle options update."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def async_migrate_entry(
|
||||
hass: HomeAssistant, entry: SystemMonitorConfigEntry
|
||||
) -> bool:
|
||||
|
@@ -92,8 +92,6 @@ class SystemMonitorConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
|
||||
config_flow = CONFIG_FLOW
|
||||
options_flow = OPTIONS_FLOW
|
||||
options_flow_reloads = True
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 3
|
||||
|
||||
|
@@ -5,11 +5,7 @@ from __future__ import annotations
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components import onboarding
|
||||
from homeassistant.config_entries import (
|
||||
DEFAULT_DISCOVERY_UNIQUE_ID,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import DOMAIN
|
||||
@@ -22,18 +18,14 @@ class ThreadConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
async def async_step_import(self, import_data: None) -> ConfigFlowResult:
|
||||
"""Set up by import from async_setup."""
|
||||
await self.async_set_unique_id(
|
||||
DEFAULT_DISCOVERY_UNIQUE_ID, raise_on_progress=False
|
||||
)
|
||||
await self._async_handle_discovery_without_unique_id()
|
||||
return self.async_create_entry(title="Thread", data={})
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Set up by import from async_setup."""
|
||||
await self.async_set_unique_id(
|
||||
DEFAULT_DISCOVERY_UNIQUE_ID, raise_on_progress=False
|
||||
)
|
||||
await self._async_handle_discovery_without_unique_id()
|
||||
return self.async_create_entry(title="Thread", data={})
|
||||
|
||||
async def async_step_zeroconf(
|
||||
|
@@ -8,6 +8,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["python-otbr-api==2.7.0", "pyroute2==0.7.5"],
|
||||
"single_config_entry": true,
|
||||
"zeroconf": ["_meshcop._udp.local."]
|
||||
}
|
||||
|
@@ -13,9 +13,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry, (Platform.BINARY_SENSOR,)
|
||||
)
|
||||
|
||||
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Update listener, called when the config entry options are changed."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(
|
||||
|
@@ -43,7 +43,6 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
|
||||
config_flow = CONFIG_FLOW
|
||||
options_flow = OPTIONS_FLOW
|
||||
options_flow_reloads = True
|
||||
|
||||
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
|
||||
"""Return config entry title."""
|
||||
|
@@ -36,7 +36,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry,
|
||||
options={**entry.options, CONF_ENTITY_ID: source_entity_id},
|
||||
)
|
||||
hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
|
||||
async def source_entity_removed() -> None:
|
||||
# The source entity has been removed, we remove the config entry because
|
||||
@@ -58,6 +57,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
|
||||
|
||||
return True
|
||||
|
||||
@@ -96,6 +96,11 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
|
||||
return True
|
||||
|
||||
|
||||
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Handle an Trend options update."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
@@ -110,7 +110,6 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
options_flow = {
|
||||
"init": SchemaFlowFormStep(get_extended_options_schema),
|
||||
}
|
||||
options_flow_reloads = True
|
||||
|
||||
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
|
||||
"""Return config entry title."""
|
||||
|
@@ -37,12 +37,11 @@ async def async_setup_entry(
|
||||
known_devices: set[int] = set()
|
||||
|
||||
def _check_device() -> None:
|
||||
entities: list[UptimeRobotSensor] = []
|
||||
for monitor in coordinator.data:
|
||||
if monitor.id in known_devices:
|
||||
continue
|
||||
known_devices.add(monitor.id)
|
||||
entities.append(
|
||||
current_devices = {monitor.id for monitor in coordinator.data}
|
||||
new_devices = current_devices - known_devices
|
||||
if new_devices:
|
||||
known_devices.update(new_devices)
|
||||
async_add_entities(
|
||||
UptimeRobotSensor(
|
||||
coordinator,
|
||||
SensorEntityDescription(
|
||||
@@ -60,9 +59,9 @@ async def async_setup_entry(
|
||||
),
|
||||
monitor=monitor,
|
||||
)
|
||||
for monitor in coordinator.data
|
||||
if monitor.id in new_devices
|
||||
)
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
|
||||
_check_device()
|
||||
entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||
|
@@ -34,12 +34,11 @@ async def async_setup_entry(
|
||||
known_devices: set[int] = set()
|
||||
|
||||
def _check_device() -> None:
|
||||
entities: list[UptimeRobotSwitch] = []
|
||||
for monitor in coordinator.data:
|
||||
if monitor.id in known_devices:
|
||||
continue
|
||||
known_devices.add(monitor.id)
|
||||
entities.append(
|
||||
current_devices = {monitor.id for monitor in coordinator.data}
|
||||
new_devices = current_devices - known_devices
|
||||
if new_devices:
|
||||
known_devices.update(new_devices)
|
||||
async_add_entities(
|
||||
UptimeRobotSwitch(
|
||||
coordinator,
|
||||
SwitchEntityDescription(
|
||||
@@ -48,9 +47,9 @@ async def async_setup_entry(
|
||||
),
|
||||
monitor=monitor,
|
||||
)
|
||||
for monitor in coordinator.data
|
||||
if monitor.id in new_devices
|
||||
)
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
|
||||
_check_device()
|
||||
entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||
|
@@ -141,9 +141,7 @@ class VeSyncFanHA(VeSyncBaseEntity, FanEntity):
|
||||
attr["active_time"] = self.device.state.active_time
|
||||
|
||||
if hasattr(self.device.state, "display_status"):
|
||||
attr["display_status"] = getattr(
|
||||
self.device.state.display_status, "value", None
|
||||
)
|
||||
attr["display_status"] = self.device.state.display_status.value
|
||||
|
||||
if hasattr(self.device.state, "child_lock"):
|
||||
attr["child_lock"] = self.device.state.child_lock
|
||||
|
@@ -354,19 +354,13 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up sensors."""
|
||||
|
||||
entities: dict[str, VolvoSensor] = {}
|
||||
coordinators = entry.runtime_data.interval_coordinators
|
||||
|
||||
for coordinator in coordinators:
|
||||
for description in _DESCRIPTIONS:
|
||||
if description.key in entities:
|
||||
continue
|
||||
|
||||
if description.api_field in coordinator.data:
|
||||
entities[description.key] = VolvoSensor(coordinator, description)
|
||||
|
||||
async_add_entities(entities.values())
|
||||
async_add_entities(
|
||||
VolvoSensor(coordinator, description)
|
||||
for coordinator in coordinators
|
||||
for description in _DESCRIPTIONS
|
||||
if description.api_field in coordinator.data
|
||||
)
|
||||
|
||||
|
||||
class VolvoSensor(VolvoEntity, SensorEntity):
|
||||
|
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import date, datetime, timedelta
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from holidays import HolidayBase
|
||||
|
||||
@@ -15,6 +15,8 @@ from . import WorkdayConfigEntry
|
||||
from .const import CONF_EXCLUDES, CONF_OFFSET, CONF_WORKDAYS
|
||||
from .entity import BaseWorkdayEntity
|
||||
|
||||
CALENDAR_DAYS_AHEAD = 365
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -71,10 +73,8 @@ class WorkdayCalendarEntity(BaseWorkdayEntity, CalendarEntity):
|
||||
def update_data(self, now: datetime) -> None:
|
||||
"""Update data."""
|
||||
event_list = []
|
||||
start_date = date(now.year, 1, 1)
|
||||
end_number_of_days = date(now.year + 1, 12, 31) - start_date
|
||||
for i in range(end_number_of_days.days + 1):
|
||||
future_date = start_date + timedelta(days=i)
|
||||
for i in range(CALENDAR_DAYS_AHEAD):
|
||||
future_date = now.date() + timedelta(days=i)
|
||||
if self.date_is_workday(future_date):
|
||||
event = CalendarEvent(
|
||||
summary=self._name,
|
||||
|
@@ -155,7 +155,6 @@ def validate_custom_dates(user_input: dict[str, Any]) -> None:
|
||||
subdiv=province,
|
||||
years=year,
|
||||
language=language,
|
||||
categories=[PUBLIC, *user_input.get(CONF_CATEGORY, [])],
|
||||
)
|
||||
|
||||
else:
|
||||
|
@@ -376,10 +376,10 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
new_addon_config = addon_config | config_updates
|
||||
|
||||
if new_addon_config.get(CONF_ADDON_DEVICE) is None:
|
||||
new_addon_config.pop(CONF_ADDON_DEVICE, None)
|
||||
if new_addon_config.get(CONF_ADDON_SOCKET) is None:
|
||||
new_addon_config.pop(CONF_ADDON_SOCKET, None)
|
||||
if not new_addon_config[CONF_ADDON_DEVICE]:
|
||||
new_addon_config.pop(CONF_ADDON_DEVICE)
|
||||
if not new_addon_config[CONF_ADDON_SOCKET]:
|
||||
new_addon_config.pop(CONF_ADDON_SOCKET)
|
||||
|
||||
if new_addon_config == addon_config:
|
||||
return
|
||||
@@ -1470,33 +1470,14 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if not is_hassio(self.hass):
|
||||
return self.async_abort(reason="not_hassio")
|
||||
|
||||
if (
|
||||
discovery_info.zwave_home_id
|
||||
and (
|
||||
current_config_entries := self._async_current_entries(
|
||||
include_ignore=False
|
||||
)
|
||||
if discovery_info.zwave_home_id:
|
||||
await self.async_set_unique_id(str(discovery_info.zwave_home_id))
|
||||
self._abort_if_unique_id_configured(
|
||||
{
|
||||
CONF_USB_PATH: None,
|
||||
CONF_SOCKET_PATH: discovery_info.socket_path,
|
||||
}
|
||||
)
|
||||
and (home_id := str(discovery_info.zwave_home_id))
|
||||
and (
|
||||
existing_entry := next(
|
||||
(
|
||||
entry
|
||||
for entry in current_config_entries
|
||||
if entry.unique_id == home_id
|
||||
),
|
||||
None,
|
||||
)
|
||||
)
|
||||
# Only update existing entries that are configured via sockets
|
||||
and existing_entry.data.get(CONF_SOCKET_PATH)
|
||||
):
|
||||
await self._async_set_addon_config(
|
||||
{CONF_ADDON_SOCKET: discovery_info.socket_path}
|
||||
)
|
||||
# Reloading will sync add-on options to config entry data
|
||||
self.hass.config_entries.async_schedule_reload(existing_entry.entry_id)
|
||||
return self.async_abort(reason="already_configured")
|
||||
|
||||
self.socket_path = discovery_info.socket_path
|
||||
self.context["title_placeholders"] = {
|
||||
|
@@ -6807,8 +6807,7 @@
|
||||
"name": "Thread",
|
||||
"integration_type": "service",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling",
|
||||
"single_config_entry": true
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
"tibber": {
|
||||
"name": "Tibber",
|
||||
|
@@ -1162,7 +1162,7 @@ class ObjectSelectorConfig(BaseSelectorConfig):
|
||||
fields: dict[str, ObjectSelectorField]
|
||||
multiple: bool
|
||||
label_field: str
|
||||
description_field: str
|
||||
description_field: bool
|
||||
translation_key: str
|
||||
|
||||
|
||||
|
@@ -39,7 +39,7 @@ habluetooth==5.6.4
|
||||
hass-nabucasa==1.1.2
|
||||
hassil==3.2.0
|
||||
home-assistant-bluetooth==1.13.1
|
||||
home-assistant-frontend==20250926.0
|
||||
home-assistant-frontend==20250925.1
|
||||
home-assistant-intents==2025.9.24
|
||||
httpx==0.28.1
|
||||
ifaddr==0.2.0
|
||||
|
12
requirements_all.txt
generated
12
requirements_all.txt
generated
@@ -247,7 +247,7 @@ aioelectricitymaps==1.1.1
|
||||
aioemonitor==1.0.5
|
||||
|
||||
# homeassistant.components.esphome
|
||||
aioesphomeapi==41.11.0
|
||||
aioesphomeapi==41.10.0
|
||||
|
||||
# homeassistant.components.flo
|
||||
aioflo==2021.11.0
|
||||
@@ -453,7 +453,7 @@ airgradient==0.9.2
|
||||
airly==1.1.0
|
||||
|
||||
# homeassistant.components.airos
|
||||
airos==0.5.3
|
||||
airos==0.5.1
|
||||
|
||||
# homeassistant.components.airthings_ble
|
||||
airthings-ble==0.9.2
|
||||
@@ -782,7 +782,7 @@ decora-wifi==1.4
|
||||
# decora==0.6
|
||||
|
||||
# homeassistant.components.ecovacs
|
||||
deebot-client==15.0.0
|
||||
deebot-client==14.0.0
|
||||
|
||||
# homeassistant.components.ihc
|
||||
# homeassistant.components.namecheapdns
|
||||
@@ -1186,7 +1186,7 @@ hole==0.9.0
|
||||
holidays==0.81
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20250926.0
|
||||
home-assistant-frontend==20250925.1
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2025.9.24
|
||||
@@ -1545,7 +1545,7 @@ nextdns==4.1.0
|
||||
nhc==0.4.12
|
||||
|
||||
# homeassistant.components.nibe_heatpump
|
||||
nibe==2.19.0
|
||||
nibe==2.18.0
|
||||
|
||||
# homeassistant.components.nice_go
|
||||
nice-go==1.0.1
|
||||
@@ -2132,7 +2132,7 @@ pykwb==0.0.8
|
||||
pylacrosse==0.4
|
||||
|
||||
# homeassistant.components.lamarzocco
|
||||
pylamarzocco==2.1.1
|
||||
pylamarzocco==2.1.0
|
||||
|
||||
# homeassistant.components.lastfm
|
||||
pylast==5.1.0
|
||||
|
12
requirements_test_all.txt
generated
12
requirements_test_all.txt
generated
@@ -235,7 +235,7 @@ aioelectricitymaps==1.1.1
|
||||
aioemonitor==1.0.5
|
||||
|
||||
# homeassistant.components.esphome
|
||||
aioesphomeapi==41.11.0
|
||||
aioesphomeapi==41.10.0
|
||||
|
||||
# homeassistant.components.flo
|
||||
aioflo==2021.11.0
|
||||
@@ -435,7 +435,7 @@ airgradient==0.9.2
|
||||
airly==1.1.0
|
||||
|
||||
# homeassistant.components.airos
|
||||
airos==0.5.3
|
||||
airos==0.5.1
|
||||
|
||||
# homeassistant.components.airthings_ble
|
||||
airthings-ble==0.9.2
|
||||
@@ -682,7 +682,7 @@ debugpy==1.8.16
|
||||
# decora==0.6
|
||||
|
||||
# homeassistant.components.ecovacs
|
||||
deebot-client==15.0.0
|
||||
deebot-client==14.0.0
|
||||
|
||||
# homeassistant.components.ihc
|
||||
# homeassistant.components.namecheapdns
|
||||
@@ -1035,7 +1035,7 @@ hole==0.9.0
|
||||
holidays==0.81
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20250926.0
|
||||
home-assistant-frontend==20250925.1
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2025.9.24
|
||||
@@ -1325,7 +1325,7 @@ nextdns==4.1.0
|
||||
nhc==0.4.12
|
||||
|
||||
# homeassistant.components.nibe_heatpump
|
||||
nibe==2.19.0
|
||||
nibe==2.18.0
|
||||
|
||||
# homeassistant.components.nice_go
|
||||
nice-go==1.0.1
|
||||
@@ -1777,7 +1777,7 @@ pykrakenapi==0.1.8
|
||||
pykulersky==0.5.8
|
||||
|
||||
# homeassistant.components.lamarzocco
|
||||
pylamarzocco==2.1.1
|
||||
pylamarzocco==2.1.0
|
||||
|
||||
# homeassistant.components.lastfm
|
||||
pylast==5.1.0
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""Common fixtures for the Ubiquiti airOS tests."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from airos.airos8 import AirOS8Data
|
||||
import pytest
|
||||
@@ -28,26 +28,22 @@ def mock_setup_entry() -> Generator[AsyncMock]:
|
||||
yield mock_setup_entry
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_airos_class() -> Generator[MagicMock]:
|
||||
"""Fixture to mock the AirOS class itself."""
|
||||
with (
|
||||
patch("homeassistant.components.airos.AirOS8", autospec=True) as mock_class,
|
||||
patch("homeassistant.components.airos.config_flow.AirOS8", new=mock_class),
|
||||
patch("homeassistant.components.airos.coordinator.AirOS8", new=mock_class),
|
||||
):
|
||||
yield mock_class
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_airos_client(
|
||||
mock_airos_class: MagicMock, ap_fixture: AirOS8Data
|
||||
request: pytest.FixtureRequest, ap_fixture: AirOS8Data
|
||||
) -> Generator[AsyncMock]:
|
||||
"""Fixture to mock the AirOS API client."""
|
||||
client = mock_airos_class.return_value
|
||||
client.status.return_value = ap_fixture
|
||||
client.login.return_value = True
|
||||
return client
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.airos.config_flow.AirOS8", autospec=True
|
||||
) as mock_airos,
|
||||
patch("homeassistant.components.airos.coordinator.AirOS8", new=mock_airos),
|
||||
patch("homeassistant.components.airos.AirOS8", new=mock_airos),
|
||||
):
|
||||
client = mock_airos.return_value
|
||||
client.status.return_value = ap_fixture
|
||||
client.login.return_value = True
|
||||
yield client
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
@@ -632,10 +632,6 @@
|
||||
}),
|
||||
}),
|
||||
'entry_data': dict({
|
||||
'advanced_settings': dict({
|
||||
'ssl': True,
|
||||
'verify_ssl': False,
|
||||
}),
|
||||
'host': '**REDACTED**',
|
||||
'password': '**REDACTED**',
|
||||
'username': 'ubnt',
|
||||
|
@@ -10,36 +10,18 @@ from airos.exceptions import (
|
||||
)
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.airos.const import DOMAIN, SECTION_ADVANCED_SETTINGS
|
||||
from homeassistant.components.airos.const import DOMAIN
|
||||
from homeassistant.config_entries import SOURCE_USER
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_SSL,
|
||||
CONF_USERNAME,
|
||||
CONF_VERIFY_SSL,
|
||||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
NEW_PASSWORD = "new_password"
|
||||
REAUTH_STEP = "reauth_confirm"
|
||||
|
||||
MOCK_CONFIG = {
|
||||
CONF_HOST: "1.1.1.1",
|
||||
CONF_USERNAME: "ubnt",
|
||||
CONF_PASSWORD: "test-password",
|
||||
SECTION_ADVANCED_SETTINGS: {
|
||||
CONF_SSL: True,
|
||||
CONF_VERIFY_SSL: False,
|
||||
},
|
||||
}
|
||||
MOCK_CONFIG_REAUTH = {
|
||||
CONF_HOST: "1.1.1.1",
|
||||
CONF_USERNAME: "ubnt",
|
||||
CONF_PASSWORD: "wrong-password",
|
||||
}
|
||||
|
||||
|
||||
@@ -51,8 +33,7 @@ async def test_form_creates_entry(
|
||||
) -> None:
|
||||
"""Test we get the form and create the appropriate entry."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_USER},
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {}
|
||||
@@ -97,6 +78,7 @@ async def test_form_duplicate_entry(
|
||||
@pytest.mark.parametrize(
|
||||
("exception", "error"),
|
||||
[
|
||||
(AirOSConnectionAuthenticationError, "invalid_auth"),
|
||||
(AirOSDeviceConnectionError, "cannot_connect"),
|
||||
(AirOSKeyDataMissingError, "key_data_missing"),
|
||||
(Exception, "unknown"),
|
||||
@@ -135,95 +117,3 @@ async def test_form_exception_handling(
|
||||
assert result["title"] == "NanoStation 5AC ap name"
|
||||
assert result["data"] == MOCK_CONFIG
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("reauth_exception", "expected_error"),
|
||||
[
|
||||
(None, None),
|
||||
(AirOSConnectionAuthenticationError, "invalid_auth"),
|
||||
(AirOSDeviceConnectionError, "cannot_connect"),
|
||||
(AirOSKeyDataMissingError, "key_data_missing"),
|
||||
(Exception, "unknown"),
|
||||
],
|
||||
ids=[
|
||||
"reauth_succes",
|
||||
"invalid_auth",
|
||||
"cannot_connect",
|
||||
"key_data_missing",
|
||||
"unknown",
|
||||
],
|
||||
)
|
||||
async def test_reauth_flow_scenarios(
|
||||
hass: HomeAssistant,
|
||||
mock_airos_client: AsyncMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
reauth_exception: Exception,
|
||||
expected_error: str,
|
||||
) -> None:
|
||||
"""Test reauthentication from start (failure) to finish (success)."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
mock_airos_client.login.side_effect = AirOSConnectionAuthenticationError
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
|
||||
flows = hass.config_entries.flow.async_progress()
|
||||
assert len(flows) == 1
|
||||
|
||||
flow = flows[0]
|
||||
assert flow["step_id"] == REAUTH_STEP
|
||||
|
||||
mock_airos_client.login.side_effect = reauth_exception
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
flow["flow_id"],
|
||||
user_input={CONF_PASSWORD: NEW_PASSWORD},
|
||||
)
|
||||
|
||||
if expected_error:
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == REAUTH_STEP
|
||||
assert result["errors"] == {"base": expected_error}
|
||||
|
||||
# Retry
|
||||
mock_airos_client.login.side_effect = None
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
flow["flow_id"],
|
||||
user_input={CONF_PASSWORD: NEW_PASSWORD},
|
||||
)
|
||||
|
||||
# Always test resolution
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "reauth_successful"
|
||||
|
||||
updated_entry = hass.config_entries.async_get_entry(mock_config_entry.entry_id)
|
||||
assert updated_entry.data[CONF_PASSWORD] == NEW_PASSWORD
|
||||
|
||||
|
||||
async def test_reauth_unique_id_mismatch(
|
||||
hass: HomeAssistant,
|
||||
mock_airos_client: AsyncMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test reauthentication failure when the unique ID changes."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
mock_airos_client.login.side_effect = AirOSConnectionAuthenticationError
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
|
||||
flows = hass.config_entries.flow.async_progress()
|
||||
flow = flows[0]
|
||||
|
||||
mock_airos_client.login.side_effect = None
|
||||
mock_airos_client.status.return_value.derived.mac = "FF:23:45:67:89:AB"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
flow["flow_id"],
|
||||
user_input={CONF_PASSWORD: NEW_PASSWORD},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "unique_id_mismatch"
|
||||
|
||||
updated_entry = hass.config_entries.async_get_entry(mock_config_entry.entry_id)
|
||||
assert updated_entry.data[CONF_PASSWORD] != NEW_PASSWORD
|
||||
|
@@ -1,169 +0,0 @@
|
||||
"""Test for airOS integration setup."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import ANY, MagicMock
|
||||
|
||||
from homeassistant.components.airos.const import (
|
||||
DEFAULT_SSL,
|
||||
DEFAULT_VERIFY_SSL,
|
||||
DOMAIN,
|
||||
SECTION_ADVANCED_SETTINGS,
|
||||
)
|
||||
from homeassistant.config_entries import SOURCE_USER, ConfigEntryState
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_SSL,
|
||||
CONF_USERNAME,
|
||||
CONF_VERIFY_SSL,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
MOCK_CONFIG_V1 = {
|
||||
CONF_HOST: "1.1.1.1",
|
||||
CONF_USERNAME: "ubnt",
|
||||
CONF_PASSWORD: "test-password",
|
||||
}
|
||||
|
||||
MOCK_CONFIG_PLAIN = {
|
||||
CONF_HOST: "1.1.1.1",
|
||||
CONF_USERNAME: "ubnt",
|
||||
CONF_PASSWORD: "test-password",
|
||||
SECTION_ADVANCED_SETTINGS: {
|
||||
CONF_SSL: False,
|
||||
CONF_VERIFY_SSL: False,
|
||||
},
|
||||
}
|
||||
|
||||
MOCK_CONFIG_V1_2 = {
|
||||
CONF_HOST: "1.1.1.1",
|
||||
CONF_USERNAME: "ubnt",
|
||||
CONF_PASSWORD: "test-password",
|
||||
SECTION_ADVANCED_SETTINGS: {
|
||||
CONF_SSL: DEFAULT_SSL,
|
||||
CONF_VERIFY_SSL: DEFAULT_VERIFY_SSL,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
async def test_setup_entry_with_default_ssl(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_airos_client: MagicMock,
|
||||
mock_airos_class: MagicMock,
|
||||
) -> None:
|
||||
"""Test setting up a config entry with default SSL options."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert mock_config_entry.state is ConfigEntryState.LOADED
|
||||
|
||||
mock_airos_class.assert_called_once_with(
|
||||
host=mock_config_entry.data[CONF_HOST],
|
||||
username=mock_config_entry.data[CONF_USERNAME],
|
||||
password=mock_config_entry.data[CONF_PASSWORD],
|
||||
session=ANY,
|
||||
use_ssl=DEFAULT_SSL,
|
||||
)
|
||||
|
||||
assert mock_config_entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL] is True
|
||||
assert mock_config_entry.data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL] is False
|
||||
|
||||
|
||||
async def test_setup_entry_without_ssl(
|
||||
hass: HomeAssistant,
|
||||
mock_airos_client: MagicMock,
|
||||
mock_airos_class: MagicMock,
|
||||
) -> None:
|
||||
"""Test setting up a config entry adjusted to plain HTTP."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data=MOCK_CONFIG_PLAIN,
|
||||
entry_id="1",
|
||||
unique_id="airos_device",
|
||||
version=1,
|
||||
minor_version=2,
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert entry.state is ConfigEntryState.LOADED
|
||||
|
||||
mock_airos_class.assert_called_once_with(
|
||||
host=entry.data[CONF_HOST],
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
session=ANY,
|
||||
use_ssl=False,
|
||||
)
|
||||
|
||||
assert entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL] is False
|
||||
assert entry.data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL] is False
|
||||
|
||||
|
||||
async def test_migrate_entry(hass: HomeAssistant, mock_airos_client: MagicMock) -> None:
|
||||
"""Test migrate entry unique id."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
source=SOURCE_USER,
|
||||
data=MOCK_CONFIG_V1,
|
||||
entry_id="1",
|
||||
unique_id="airos_device",
|
||||
version=1,
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert entry.state is ConfigEntryState.LOADED
|
||||
assert entry.version == 1
|
||||
assert entry.minor_version == 2
|
||||
assert entry.data == MOCK_CONFIG_V1_2
|
||||
|
||||
|
||||
async def test_migrate_future_return(
|
||||
hass: HomeAssistant,
|
||||
mock_airos_client: MagicMock,
|
||||
) -> None:
|
||||
"""Test migrate entry unique id."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
source=SOURCE_USER,
|
||||
data=MOCK_CONFIG_V1_2,
|
||||
entry_id="1",
|
||||
unique_id="airos_device",
|
||||
version=2,
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert entry.state is ConfigEntryState.MIGRATION_ERROR
|
||||
|
||||
|
||||
async def test_load_unload_entry(
|
||||
hass: HomeAssistant,
|
||||
mock_airos_client: MagicMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test setup and unload config entry."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert mock_config_entry.state is ConfigEntryState.LOADED
|
||||
|
||||
assert await hass.config_entries.async_unload(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert mock_config_entry.state is ConfigEntryState.NOT_LOADED
|
@@ -3,7 +3,11 @@
|
||||
from datetime import timedelta
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
from airos.exceptions import AirOSDataMissingError, AirOSDeviceConnectionError
|
||||
from airos.exceptions import (
|
||||
AirOSConnectionAuthenticationError,
|
||||
AirOSDataMissingError,
|
||||
AirOSDeviceConnectionError,
|
||||
)
|
||||
from freezegun.api import FrozenDateTimeFactory
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
@@ -35,6 +39,7 @@ async def test_all_entities(
|
||||
@pytest.mark.parametrize(
|
||||
("exception"),
|
||||
[
|
||||
AirOSConnectionAuthenticationError,
|
||||
TimeoutError,
|
||||
AirOSDeviceConnectionError,
|
||||
AirOSDataMissingError,
|
||||
|
@@ -1797,7 +1797,6 @@ async def test_chat_log_tts_streaming(
|
||||
assert process_events(events) == snapshot
|
||||
|
||||
|
||||
@pytest.mark.parametrize(("use_satellite_entity"), [True, False])
|
||||
async def test_acknowledge(
|
||||
hass: HomeAssistant,
|
||||
init_components,
|
||||
@@ -1806,7 +1805,6 @@ async def test_acknowledge(
|
||||
entity_registry: er.EntityRegistry,
|
||||
area_registry: ar.AreaRegistry,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
use_satellite_entity: bool,
|
||||
) -> None:
|
||||
"""Test that acknowledge sound is played when targets are in the same area."""
|
||||
area_1 = area_registry.async_get_or_create("area_1")
|
||||
@@ -1821,16 +1819,12 @@ async def test_acknowledge(
|
||||
|
||||
entry = MockConfigEntry()
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
satellite = entity_registry.async_get_or_create("assist_satellite", "test", "1234")
|
||||
entity_registry.async_update_entity(satellite.entity_id, area_id=area_1.id)
|
||||
|
||||
satellite_device = device_registry.async_get_or_create(
|
||||
satellite = device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
connections=set(),
|
||||
identifiers={("demo", "id-1234")},
|
||||
)
|
||||
device_registry.async_update_device(satellite_device.id, area_id=area_1.id)
|
||||
device_registry.async_update_device(satellite.id, area_id=area_1.id)
|
||||
|
||||
events: list[assist_pipeline.PipelineEvent] = []
|
||||
turn_on = async_mock_service(hass, "light", "turn_on")
|
||||
@@ -1843,8 +1837,7 @@ async def test_acknowledge(
|
||||
pipeline_input = assist_pipeline.pipeline.PipelineInput(
|
||||
intent_input=text,
|
||||
session=mock_chat_session,
|
||||
satellite_id=satellite.entity_id if use_satellite_entity else None,
|
||||
device_id=satellite_device.id if not use_satellite_entity else None,
|
||||
device_id=satellite.id,
|
||||
run=assist_pipeline.pipeline.PipelineRun(
|
||||
hass,
|
||||
context=Context(),
|
||||
@@ -1896,8 +1889,7 @@ async def test_acknowledge(
|
||||
)
|
||||
|
||||
# 3. Remove satellite device area
|
||||
entity_registry.async_update_entity(satellite.entity_id, area_id=None)
|
||||
device_registry.async_update_device(satellite_device.id, area_id=None)
|
||||
device_registry.async_update_device(satellite.id, area_id=None)
|
||||
|
||||
_reset()
|
||||
await _run("turn on light 1")
|
||||
@@ -1908,8 +1900,7 @@ async def test_acknowledge(
|
||||
assert len(turn_on) == 1
|
||||
|
||||
# Restore
|
||||
entity_registry.async_update_entity(satellite.entity_id, area_id=area_1.id)
|
||||
device_registry.async_update_device(satellite_device.id, area_id=area_1.id)
|
||||
device_registry.async_update_device(satellite.id, area_id=area_1.id)
|
||||
|
||||
# 4. Check device area instead of entity area
|
||||
light_device = device_registry.async_get_or_create(
|
||||
|
@@ -27,9 +27,6 @@ TEST_MESSAGE_HEADERS2 = (
|
||||
TEST_MULTIPART_HEADER = (
|
||||
b'Content-Type: multipart/related;\r\n\tboundary="Mark=_100584970350292485166"'
|
||||
)
|
||||
TEST_MULTIPART_ATTACHMENT_HEADER = (
|
||||
b'Content-Type: multipart/mixed; boundary="------------qIuh0xG6dsImymfJo6f2M4Zv"'
|
||||
)
|
||||
|
||||
TEST_MESSAGE_HEADERS3 = b""
|
||||
|
||||
@@ -39,13 +36,6 @@ TEST_MESSAGE_MULTIPART = (
|
||||
TEST_MESSAGE_HEADERS1 + DATE_HEADER1 + TEST_MESSAGE_HEADERS2 + TEST_MULTIPART_HEADER
|
||||
)
|
||||
|
||||
TEST_MESSAGE_MULTIPART_ATTACHMENT = (
|
||||
TEST_MESSAGE_HEADERS1
|
||||
+ DATE_HEADER1
|
||||
+ TEST_MESSAGE_HEADERS2
|
||||
+ TEST_MULTIPART_ATTACHMENT_HEADER
|
||||
)
|
||||
|
||||
TEST_MESSAGE_NO_SUBJECT_TO_FROM = (
|
||||
TEST_MESSAGE_HEADERS1 + DATE_HEADER1 + TEST_MESSAGE_HEADERS3
|
||||
)
|
||||
@@ -150,45 +140,6 @@ TEST_CONTENT_MULTIPART_BASE64_INVALID = (
|
||||
+ b"\r\n--Mark=_100584970350292485166--\r\n"
|
||||
)
|
||||
|
||||
TEST_CONTENT_MULTIPART_WITH_ATTACHMENT = b"""
|
||||
\nThis is a multi-part message in MIME format.
|
||||
--------------qIuh0xG6dsImymfJo6f2M4Zv
|
||||
Content-Type: multipart/alternative;
|
||||
boundary="------------N4zNjp2QWnOfrYQhtLL02Bk1"
|
||||
|
||||
--------------N4zNjp2QWnOfrYQhtLL02Bk1
|
||||
Content-Type: text/plain; charset=UTF-8; format=flowed
|
||||
Content-Transfer-Encoding: 7bit
|
||||
|
||||
*Multi* part Test body
|
||||
|
||||
--------------N4zNjp2QWnOfrYQhtLL02Bk1
|
||||
Content-Type: text/html; charset=UTF-8
|
||||
Content-Transfer-Encoding: 7bit
|
||||
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
|
||||
<meta http-equiv="content-type" content="text/html; charset=UTF-8">
|
||||
</head>
|
||||
<body>
|
||||
<p><b>Multi</b> part Test body</p>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
--------------N4zNjp2QWnOfrYQhtLL02Bk1--
|
||||
--------------qIuh0xG6dsImymfJo6f2M4Zv
|
||||
Content-Type: text/plain; charset=UTF-8; name="Text attachment content.txt"
|
||||
Content-Disposition: attachment; filename="Text attachment content.txt"
|
||||
Content-Transfer-Encoding: base64
|
||||
|
||||
VGV4dCBhdHRhY2htZW50IGNvbnRlbnQ=
|
||||
|
||||
--------------qIuh0xG6dsImymfJo6f2M4Zv--
|
||||
"""
|
||||
|
||||
|
||||
EMPTY_SEARCH_RESPONSE = ("OK", [b"", b"Search completed (0.0001 + 0.000 secs)."])
|
||||
EMPTY_SEARCH_RESPONSE_ALT = ("OK", [b"Search completed (0.0001 + 0.000 secs)."])
|
||||
|
||||
@@ -352,24 +303,6 @@ TEST_FETCH_RESPONSE_MULTIPART_BASE64 = (
|
||||
b"Fetch completed (0.0001 + 0.000 secs).",
|
||||
],
|
||||
)
|
||||
TEST_FETCH_RESPONSE_MULTIPART_WITH_ATTACHMENT = (
|
||||
"OK",
|
||||
[
|
||||
b"1 FETCH (BODY[] {"
|
||||
+ str(
|
||||
len(
|
||||
TEST_MESSAGE_MULTIPART_ATTACHMENT
|
||||
+ TEST_CONTENT_MULTIPART_WITH_ATTACHMENT
|
||||
)
|
||||
).encode("utf-8")
|
||||
+ b"}",
|
||||
bytearray(
|
||||
TEST_MESSAGE_MULTIPART_ATTACHMENT + TEST_CONTENT_MULTIPART_WITH_ATTACHMENT
|
||||
),
|
||||
b")",
|
||||
b"Fetch completed (0.0001 + 0.000 secs).",
|
||||
],
|
||||
)
|
||||
|
||||
TEST_FETCH_RESPONSE_MULTIPART_BASE64_INVALID = (
|
||||
"OK",
|
||||
|
@@ -1,7 +1,6 @@
|
||||
"""Test the imap entry initialization."""
|
||||
|
||||
import asyncio
|
||||
from base64 import b64decode
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Any
|
||||
from unittest.mock import AsyncMock, MagicMock, call, patch
|
||||
@@ -32,7 +31,6 @@ from .const import (
|
||||
TEST_FETCH_RESPONSE_MULTIPART_BASE64,
|
||||
TEST_FETCH_RESPONSE_MULTIPART_BASE64_INVALID,
|
||||
TEST_FETCH_RESPONSE_MULTIPART_EMPTY_PLAIN,
|
||||
TEST_FETCH_RESPONSE_MULTIPART_WITH_ATTACHMENT,
|
||||
TEST_FETCH_RESPONSE_NO_SUBJECT_TO_FROM,
|
||||
TEST_FETCH_RESPONSE_TEXT_BARE,
|
||||
TEST_FETCH_RESPONSE_TEXT_OTHER,
|
||||
@@ -109,72 +107,20 @@ async def test_entry_startup_fails(
|
||||
|
||||
@pytest.mark.parametrize("imap_search", [TEST_SEARCH_RESPONSE])
|
||||
@pytest.mark.parametrize(
|
||||
("imap_fetch", "valid_date", "parts"),
|
||||
("imap_fetch", "valid_date"),
|
||||
[
|
||||
(TEST_FETCH_RESPONSE_TEXT_BARE, True, {}),
|
||||
(TEST_FETCH_RESPONSE_TEXT_PLAIN, True, {}),
|
||||
(TEST_FETCH_RESPONSE_TEXT_PLAIN_ALT, True, {}),
|
||||
(TEST_FETCH_RESPONSE_INVALID_DATE1, False, {}),
|
||||
(TEST_FETCH_RESPONSE_INVALID_DATE2, False, {}),
|
||||
(TEST_FETCH_RESPONSE_INVALID_DATE3, False, {}),
|
||||
(TEST_FETCH_RESPONSE_TEXT_OTHER, True, {}),
|
||||
(TEST_FETCH_RESPONSE_HTML, True, {}),
|
||||
(
|
||||
TEST_FETCH_RESPONSE_MULTIPART,
|
||||
True,
|
||||
{
|
||||
"0": {
|
||||
"content_type": "text/plain",
|
||||
"content_transfer_encoding": "7bit",
|
||||
},
|
||||
"1": {"content_type": "text/html", "content_transfer_encoding": "7bit"},
|
||||
},
|
||||
),
|
||||
(
|
||||
TEST_FETCH_RESPONSE_MULTIPART_EMPTY_PLAIN,
|
||||
True,
|
||||
{
|
||||
"0": {
|
||||
"content_type": "text/plain",
|
||||
"content_transfer_encoding": "7bit",
|
||||
},
|
||||
"1": {"content_type": "text/html", "content_transfer_encoding": "7bit"},
|
||||
},
|
||||
),
|
||||
(
|
||||
TEST_FETCH_RESPONSE_MULTIPART_BASE64,
|
||||
True,
|
||||
{
|
||||
"0": {
|
||||
"content_type": "text/plain",
|
||||
"content_transfer_encoding": "base64",
|
||||
},
|
||||
"1": {
|
||||
"content_type": "text/html",
|
||||
"content_transfer_encoding": "base64",
|
||||
},
|
||||
},
|
||||
),
|
||||
(
|
||||
TEST_FETCH_RESPONSE_MULTIPART_WITH_ATTACHMENT,
|
||||
True,
|
||||
{
|
||||
"0,0": {
|
||||
"content_type": "text/plain",
|
||||
"content_transfer_encoding": "7bit",
|
||||
},
|
||||
"0,1": {
|
||||
"content_type": "text/html",
|
||||
"content_transfer_encoding": "7bit",
|
||||
},
|
||||
"1": {
|
||||
"content_type": "text/plain",
|
||||
"filename": "Text attachment content.txt",
|
||||
"content_transfer_encoding": "base64",
|
||||
},
|
||||
},
|
||||
),
|
||||
(TEST_FETCH_RESPONSE_BINARY, True, {}),
|
||||
(TEST_FETCH_RESPONSE_TEXT_BARE, True),
|
||||
(TEST_FETCH_RESPONSE_TEXT_PLAIN, True),
|
||||
(TEST_FETCH_RESPONSE_TEXT_PLAIN_ALT, True),
|
||||
(TEST_FETCH_RESPONSE_INVALID_DATE1, False),
|
||||
(TEST_FETCH_RESPONSE_INVALID_DATE2, False),
|
||||
(TEST_FETCH_RESPONSE_INVALID_DATE3, False),
|
||||
(TEST_FETCH_RESPONSE_TEXT_OTHER, True),
|
||||
(TEST_FETCH_RESPONSE_HTML, True),
|
||||
(TEST_FETCH_RESPONSE_MULTIPART, True),
|
||||
(TEST_FETCH_RESPONSE_MULTIPART_EMPTY_PLAIN, True),
|
||||
(TEST_FETCH_RESPONSE_MULTIPART_BASE64, True),
|
||||
(TEST_FETCH_RESPONSE_BINARY, True),
|
||||
],
|
||||
ids=[
|
||||
"bare",
|
||||
@@ -188,18 +134,13 @@ async def test_entry_startup_fails(
|
||||
"multipart",
|
||||
"multipart_empty_plain",
|
||||
"multipart_base64",
|
||||
"multipart_attachment",
|
||||
"binary",
|
||||
],
|
||||
)
|
||||
@pytest.mark.parametrize("imap_has_capability", [True, False], ids=["push", "poll"])
|
||||
@pytest.mark.parametrize("charset", ["utf-8", "us-ascii"], ids=["utf-8", "us-ascii"])
|
||||
async def test_receiving_message_successfully(
|
||||
hass: HomeAssistant,
|
||||
mock_imap_protocol: MagicMock,
|
||||
valid_date: bool,
|
||||
charset: str,
|
||||
parts: dict[str, Any],
|
||||
hass: HomeAssistant, mock_imap_protocol: MagicMock, valid_date: bool, charset: str
|
||||
) -> None:
|
||||
"""Test receiving a message successfully."""
|
||||
event_called = async_capture_events(hass, "imap_content")
|
||||
@@ -229,7 +170,6 @@ async def test_receiving_message_successfully(
|
||||
assert data["sender"] == "john.doe@example.com"
|
||||
assert data["subject"] == "Test subject"
|
||||
assert data["uid"] == "1"
|
||||
assert data["parts"] == parts
|
||||
assert "Test body" in data["text"]
|
||||
assert (valid_date and isinstance(data["date"], datetime)) or (
|
||||
not valid_date and data["date"] is None
|
||||
@@ -886,33 +826,11 @@ async def test_enforce_polling(
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("imap_search", "imap_fetch", "message_parts"),
|
||||
[
|
||||
(
|
||||
TEST_SEARCH_RESPONSE,
|
||||
TEST_FETCH_RESPONSE_MULTIPART_WITH_ATTACHMENT,
|
||||
{
|
||||
"0,0": {
|
||||
"content_type": "text/plain",
|
||||
"content_transfer_encoding": "7bit",
|
||||
},
|
||||
"0,1": {
|
||||
"content_type": "text/html",
|
||||
"content_transfer_encoding": "7bit",
|
||||
},
|
||||
"1": {
|
||||
"content_type": "text/plain",
|
||||
"filename": "Text attachment content.txt",
|
||||
"content_transfer_encoding": "base64",
|
||||
},
|
||||
},
|
||||
)
|
||||
],
|
||||
("imap_search", "imap_fetch"),
|
||||
[(TEST_SEARCH_RESPONSE, TEST_FETCH_RESPONSE_TEXT_PLAIN)],
|
||||
)
|
||||
@pytest.mark.parametrize("imap_has_capability", [True, False], ids=["push", "poll"])
|
||||
async def test_services(
|
||||
hass: HomeAssistant, mock_imap_protocol: MagicMock, message_parts: dict[str, Any]
|
||||
) -> None:
|
||||
async def test_services(hass: HomeAssistant, mock_imap_protocol: MagicMock) -> None:
|
||||
"""Test receiving a message successfully."""
|
||||
event_called = async_capture_events(hass, "imap_content")
|
||||
|
||||
@@ -941,7 +859,6 @@ async def test_services(
|
||||
assert data["subject"] == "Test subject"
|
||||
assert data["uid"] == "1"
|
||||
assert data["entry_id"] == config_entry.entry_id
|
||||
assert data["parts"] == message_parts
|
||||
|
||||
# Test seen service
|
||||
data = {"entry": config_entry.entry_id, "uid": "1"}
|
||||
@@ -972,42 +889,16 @@ async def test_services(
|
||||
mock_imap_protocol.store.assert_called_with("1", "+FLAGS (\\Deleted)")
|
||||
mock_imap_protocol.protocol.expunge.assert_called_once()
|
||||
|
||||
# Test fetch service with text response
|
||||
mock_imap_protocol.reset_mock()
|
||||
# Test fetch service
|
||||
data = {"entry": config_entry.entry_id, "uid": "1"}
|
||||
response = await hass.services.async_call(
|
||||
DOMAIN, "fetch", data, blocking=True, return_response=True
|
||||
)
|
||||
mock_imap_protocol.fetch.assert_called_with("1", "BODY.PEEK[]")
|
||||
assert response["text"] == "*Multi* part Test body\n"
|
||||
assert response["text"] == "Test body\r\n"
|
||||
assert response["sender"] == "john.doe@example.com"
|
||||
assert response["subject"] == "Test subject"
|
||||
assert response["uid"] == "1"
|
||||
assert response["parts"] == message_parts
|
||||
|
||||
# Test fetch part service with attachment response
|
||||
mock_imap_protocol.reset_mock()
|
||||
data = {"entry": config_entry.entry_id, "uid": "1", "part": "1"}
|
||||
response = await hass.services.async_call(
|
||||
DOMAIN, "fetch_part", data, blocking=True, return_response=True
|
||||
)
|
||||
mock_imap_protocol.fetch.assert_called_with("1", "BODY.PEEK[]")
|
||||
assert response["part_data"] == "VGV4dCBhdHRhY2htZW50IGNvbnRlbnQ=\n"
|
||||
assert response["content_type"] == "text/plain"
|
||||
assert response["content_transfer_encoding"] == "base64"
|
||||
assert response["filename"] == "Text attachment content.txt"
|
||||
assert response["part"] == "1"
|
||||
assert response["uid"] == "1"
|
||||
assert b64decode(response["part_data"]) == b"Text attachment content"
|
||||
|
||||
# Test fetch part service with invalid part index
|
||||
for part in ("A", "2", "0"):
|
||||
data = {"entry": config_entry.entry_id, "uid": "1", "part": part}
|
||||
with pytest.raises(ServiceValidationError) as exc:
|
||||
await hass.services.async_call(
|
||||
DOMAIN, "fetch_part", data, blocking=True, return_response=True
|
||||
)
|
||||
assert exc.value.translation_key == "invalid_part_index"
|
||||
|
||||
# Test with invalid entry_id
|
||||
data = {"entry": "invalid", "uid": "1"}
|
||||
@@ -1052,14 +943,12 @@ async def test_services(
|
||||
),
|
||||
"delete": ({"entry": config_entry.entry_id, "uid": "1"}, False),
|
||||
"fetch": ({"entry": config_entry.entry_id, "uid": "1"}, True),
|
||||
"fetch_part": ({"entry": config_entry.entry_id, "uid": "1", "part": "1"}, True),
|
||||
}
|
||||
patch_error_translation_key = {
|
||||
"seen": ("store", "seen_failed"),
|
||||
"move": ("copy", "copy_failed"),
|
||||
"delete": ("store", "delete_failed"),
|
||||
"fetch": ("fetch", "fetch_failed"),
|
||||
"fetch_part": ("fetch", "fetch_failed"),
|
||||
}
|
||||
for service, (data, response) in service_calls_response.items():
|
||||
with (
|
||||
|
@@ -93,7 +93,7 @@
|
||||
'supported_features': 0,
|
||||
'translation_key': 'plant_days',
|
||||
'unique_id': 'a1b2c3d4e5f6a1b2c3d4e5f6_LPH63ABCD_plant_days',
|
||||
'unit_of_measurement': <UnitOfTime.DAYS: 'd'>,
|
||||
'unit_of_measurement': 'days',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[number.garden_plants_age-state]
|
||||
@@ -104,7 +104,7 @@
|
||||
'min': 0.0,
|
||||
'mode': <NumberMode.BOX: 'box'>,
|
||||
'step': 1,
|
||||
'unit_of_measurement': <UnitOfTime.DAYS: 'd'>,
|
||||
'unit_of_measurement': 'days',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'number.garden_plants_age',
|
||||
|
@@ -6,11 +6,10 @@ from aiomealie import About, MealieAuthenticationError, MealieConnectionError
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.mealie.const import DOMAIN
|
||||
from homeassistant.config_entries import SOURCE_HASSIO, SOURCE_IGNORE, SOURCE_USER
|
||||
from homeassistant.config_entries import SOURCE_USER
|
||||
from homeassistant.const import CONF_API_TOKEN, CONF_HOST, CONF_VERIFY_SSL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.helpers.service_info.hassio import HassioServiceInfo
|
||||
|
||||
from . import setup_integration
|
||||
|
||||
@@ -362,137 +361,3 @@ async def test_reconfigure_flow_exceptions(
|
||||
)
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "reconfigure_successful"
|
||||
|
||||
|
||||
async def test_hassio_success(
|
||||
hass: HomeAssistant,
|
||||
mock_mealie_client: AsyncMock,
|
||||
mock_setup_entry: AsyncMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test successful Supervisor flow."""
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
data=HassioServiceInfo(
|
||||
config={"addon": "Mealie", "host": "http://test", "port": 9090},
|
||||
name="mealie",
|
||||
slug="mealie",
|
||||
uuid="1234",
|
||||
),
|
||||
context={"source": SOURCE_HASSIO},
|
||||
)
|
||||
|
||||
assert result.get("type") is FlowResultType.FORM
|
||||
assert result.get("step_id") == "hassio_confirm"
|
||||
assert result.get("description_placeholders") == {"addon": "Mealie"}
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], {CONF_API_TOKEN: "token"}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "Mealie"
|
||||
assert result["data"] == {
|
||||
CONF_HOST: "http://test:9090",
|
||||
CONF_API_TOKEN: "token",
|
||||
CONF_VERIFY_SSL: True,
|
||||
}
|
||||
assert result["result"].unique_id == "bf1c62fe-4941-4332-9886-e54e88dbdba0"
|
||||
|
||||
|
||||
async def test_hassio_already_configured(
|
||||
hass: HomeAssistant, mock_config_entry: MockConfigEntry
|
||||
) -> None:
|
||||
"""Test we only allow a single config flow."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
data=HassioServiceInfo(
|
||||
config={
|
||||
"addon": "Mealie",
|
||||
"host": "mock-mealie",
|
||||
"port": "9090",
|
||||
},
|
||||
name="Mealie",
|
||||
slug="mealie",
|
||||
uuid="1234",
|
||||
),
|
||||
context={"source": SOURCE_HASSIO},
|
||||
)
|
||||
assert result
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
|
||||
async def test_hassio_ignored(hass: HomeAssistant) -> None:
|
||||
"""Test the supervisor discovered instance can be ignored."""
|
||||
MockConfigEntry(domain=DOMAIN, source=SOURCE_IGNORE).add_to_hass(hass)
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
data=HassioServiceInfo(
|
||||
config={
|
||||
"addon": "Mealie",
|
||||
"host": "mock-mealie",
|
||||
"port": "9090",
|
||||
},
|
||||
name="Mealie",
|
||||
slug="mealie",
|
||||
uuid="1234",
|
||||
),
|
||||
context={"source": SOURCE_HASSIO},
|
||||
)
|
||||
assert result
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("exception", "error"),
|
||||
[
|
||||
(MealieConnectionError, "cannot_connect"),
|
||||
(MealieAuthenticationError, "invalid_auth"),
|
||||
(Exception, "unknown"),
|
||||
],
|
||||
)
|
||||
async def test_hassio_connection_error(
|
||||
hass: HomeAssistant,
|
||||
mock_mealie_client: AsyncMock,
|
||||
mock_setup_entry: AsyncMock,
|
||||
exception: Exception,
|
||||
error: str,
|
||||
) -> None:
|
||||
"""Test flow errors."""
|
||||
mock_mealie_client.get_user_info.side_effect = exception
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
data=HassioServiceInfo(
|
||||
config={"addon": "Mealie", "host": "http://test", "port": 9090},
|
||||
name="mealie",
|
||||
slug="mealie",
|
||||
uuid="1234",
|
||||
),
|
||||
context={"source": SOURCE_HASSIO},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "hassio_confirm"
|
||||
assert result["description_placeholders"] == {"addon": "Mealie"}
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], {CONF_API_TOKEN: "token"}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {"base": error}
|
||||
|
||||
mock_mealie_client.get_user_info.side_effect = None
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], {CONF_API_TOKEN: "token"}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
|
@@ -213,7 +213,7 @@ async def test_options_flow(hass: HomeAssistant) -> None:
|
||||
CONF_HOSTS: "192.168.1.0/24",
|
||||
CONF_CONSIDER_HOME: 180,
|
||||
CONF_SCAN_INTERVAL: 120,
|
||||
CONF_OPTIONS: "-n -sn -PR -T4 --min-rate 10 --host-timeout 5s",
|
||||
CONF_OPTIONS: "-F -T4 --min-rate 10 --host-timeout 5s",
|
||||
}
|
||||
|
||||
with patch(
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -3,35 +3,49 @@
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.components.plugwise.const import DOMAIN
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_component import async_update_entity
|
||||
|
||||
from tests.common import MockConfigEntry, snapshot_platform
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
@pytest.mark.parametrize("platforms", [(SENSOR_DOMAIN,)])
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_adam_sensor_snapshot(
|
||||
hass: HomeAssistant,
|
||||
mock_smile_adam: MagicMock,
|
||||
snapshot: SnapshotAssertion,
|
||||
entity_registry: er.EntityRegistry,
|
||||
setup_platform: MockConfigEntry,
|
||||
async def test_adam_climate_sensor_entities(
|
||||
hass: HomeAssistant, mock_smile_adam: MagicMock, init_integration: MockConfigEntry
|
||||
) -> None:
|
||||
"""Test Adam sensor snapshot."""
|
||||
await snapshot_platform(hass, entity_registry, snapshot, setup_platform.entry_id)
|
||||
"""Test creation of climate related sensor entities."""
|
||||
state = hass.states.get("sensor.adam_outdoor_temperature")
|
||||
assert state
|
||||
assert float(state.state) == 7.81
|
||||
|
||||
state = hass.states.get("sensor.cv_pomp_electricity_consumed")
|
||||
assert state
|
||||
assert float(state.state) == 35.6
|
||||
|
||||
state = hass.states.get("sensor.onoff_water_temperature")
|
||||
assert state
|
||||
assert float(state.state) == 70.0
|
||||
|
||||
state = hass.states.get("sensor.cv_pomp_electricity_consumed_interval")
|
||||
assert state
|
||||
assert float(state.state) == 7.37
|
||||
|
||||
await async_update_entity(hass, "sensor.zone_lisa_wk_battery")
|
||||
|
||||
state = hass.states.get("sensor.zone_lisa_wk_battery")
|
||||
assert state
|
||||
assert int(state.state) == 34
|
||||
|
||||
|
||||
async def test_adam_climate_sensor_humidity(
|
||||
async def test_adam_climate_sensor_entity_2(
|
||||
hass: HomeAssistant,
|
||||
mock_smile_adam_jip: MagicMock,
|
||||
init_integration: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test creation of climate related humidity sensor entity."""
|
||||
"""Test creation of climate related sensor entities."""
|
||||
state = hass.states.get("sensor.woonkamer_humidity")
|
||||
assert state
|
||||
assert float(state.state) == 56.2
|
||||
@@ -82,51 +96,83 @@ async def test_unique_id_migration_humidity(
|
||||
|
||||
@pytest.mark.parametrize("chosen_env", ["anna_heatpump_heating"], indirect=True)
|
||||
@pytest.mark.parametrize("cooling_present", [True], indirect=True)
|
||||
@pytest.mark.parametrize("platforms", [(SENSOR_DOMAIN,)])
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_anna_sensor_snapshot(
|
||||
hass: HomeAssistant,
|
||||
mock_smile_anna: MagicMock,
|
||||
snapshot: SnapshotAssertion,
|
||||
entity_registry: er.EntityRegistry,
|
||||
setup_platform: MockConfigEntry,
|
||||
async def test_anna_as_smt_climate_sensor_entities(
|
||||
hass: HomeAssistant, mock_smile_anna: MagicMock, init_integration: MockConfigEntry
|
||||
) -> None:
|
||||
"""Test Anna sensor snapshot."""
|
||||
await snapshot_platform(hass, entity_registry, snapshot, setup_platform.entry_id)
|
||||
"""Test creation of climate related sensor entities."""
|
||||
state = hass.states.get("sensor.opentherm_outdoor_air_temperature")
|
||||
assert state
|
||||
assert float(state.state) == 3.0
|
||||
|
||||
state = hass.states.get("sensor.opentherm_water_temperature")
|
||||
assert state
|
||||
assert float(state.state) == 29.1
|
||||
|
||||
state = hass.states.get("sensor.opentherm_dhw_temperature")
|
||||
assert state
|
||||
assert float(state.state) == 46.3
|
||||
|
||||
state = hass.states.get("sensor.anna_illuminance")
|
||||
assert state
|
||||
assert float(state.state) == 86.0
|
||||
|
||||
|
||||
@pytest.mark.parametrize("chosen_env", ["p1v4_442_single"], indirect=True)
|
||||
@pytest.mark.parametrize(
|
||||
"gateway_id", ["a455b61e52394b2db5081ce025a430f3"], indirect=True
|
||||
)
|
||||
@pytest.mark.parametrize("platforms", [(SENSOR_DOMAIN,)])
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_p1_dsmr_sensor_snapshot(
|
||||
hass: HomeAssistant,
|
||||
mock_smile_p1: MagicMock,
|
||||
snapshot: SnapshotAssertion,
|
||||
entity_registry: er.EntityRegistry,
|
||||
setup_platform: MockConfigEntry,
|
||||
async def test_p1_dsmr_sensor_entities(
|
||||
hass: HomeAssistant, mock_smile_p1: MagicMock, init_integration: MockConfigEntry
|
||||
) -> None:
|
||||
"""Test P1 1-phase sensor snapshot."""
|
||||
await snapshot_platform(hass, entity_registry, snapshot, setup_platform.entry_id)
|
||||
"""Test creation of power related sensor entities."""
|
||||
state = hass.states.get("sensor.p1_net_electricity_point")
|
||||
assert state
|
||||
assert int(state.state) == 486
|
||||
|
||||
state = hass.states.get("sensor.p1_electricity_consumed_off_peak_cumulative")
|
||||
assert state
|
||||
assert float(state.state) == 17643.423
|
||||
|
||||
state = hass.states.get("sensor.p1_electricity_produced_peak_point")
|
||||
assert state
|
||||
assert int(state.state) == 0
|
||||
|
||||
state = hass.states.get("sensor.p1_electricity_consumed_peak_cumulative")
|
||||
assert state
|
||||
assert float(state.state) == 13966.608
|
||||
|
||||
state = hass.states.get("sensor.p1_gas_consumed_cumulative")
|
||||
assert not state
|
||||
|
||||
|
||||
@pytest.mark.parametrize("chosen_env", ["p1v4_442_triple"], indirect=True)
|
||||
@pytest.mark.parametrize(
|
||||
"gateway_id", ["03e65b16e4b247a29ae0d75a78cb492e"], indirect=True
|
||||
)
|
||||
@pytest.mark.parametrize("platforms", [(SENSOR_DOMAIN,)])
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_p1_3ph_dsmr_sensor_snapshot(
|
||||
async def test_p1_3ph_dsmr_sensor_entities(
|
||||
hass: HomeAssistant,
|
||||
mock_smile_p1: MagicMock,
|
||||
snapshot: SnapshotAssertion,
|
||||
entity_registry: er.EntityRegistry,
|
||||
setup_platform: MockConfigEntry,
|
||||
mock_smile_p1: MagicMock,
|
||||
init_integration: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test P1 3-phase sensor snapshot."""
|
||||
await snapshot_platform(hass, entity_registry, snapshot, setup_platform.entry_id)
|
||||
"""Test creation of power related sensor entities."""
|
||||
state = hass.states.get("sensor.p1_electricity_phase_one_consumed")
|
||||
assert state
|
||||
assert int(state.state) == 1763
|
||||
|
||||
state = hass.states.get("sensor.p1_electricity_phase_two_consumed")
|
||||
assert state
|
||||
assert int(state.state) == 1703
|
||||
|
||||
state = hass.states.get("sensor.p1_electricity_phase_three_consumed")
|
||||
assert state
|
||||
assert int(state.state) == 2080
|
||||
|
||||
# Default disabled sensor test
|
||||
state = hass.states.get("sensor.p1_voltage_phase_one")
|
||||
assert state
|
||||
assert float(state.state) == 233.2
|
||||
|
||||
|
||||
@pytest.mark.parametrize("chosen_env", ["p1v4_442_triple"], indirect=True)
|
||||
@@ -140,29 +186,18 @@ async def test_p1_3ph_dsmr_sensor_disabled_entities(
|
||||
init_integration: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test disabled power related sensor entities intent."""
|
||||
entity_id = "sensor.p1_voltage_phase_one"
|
||||
state = hass.states.get(entity_id)
|
||||
state = hass.states.get("sensor.p1_voltage_phase_one")
|
||||
assert not state
|
||||
|
||||
entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await hass.config_entries.async_reload(init_integration.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
state = hass.states.get("sensor.p1_voltage_phase_one")
|
||||
assert state
|
||||
assert float(state.state) == 233.2
|
||||
|
||||
|
||||
@pytest.mark.parametrize("platforms", [(SENSOR_DOMAIN,)])
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_stretch_sensor_snapshot(
|
||||
hass: HomeAssistant,
|
||||
mock_stretch: MagicMock,
|
||||
snapshot: SnapshotAssertion,
|
||||
entity_registry: er.EntityRegistry,
|
||||
setup_platform: MockConfigEntry,
|
||||
async def test_stretch_sensor_entities(
|
||||
hass: HomeAssistant, mock_stretch: MagicMock, init_integration: MockConfigEntry
|
||||
) -> None:
|
||||
"""Test Stretch sensor snapshot."""
|
||||
await snapshot_platform(hass, entity_registry, snapshot, setup_platform.entry_id)
|
||||
"""Test creation of power related sensor entities."""
|
||||
state = hass.states.get("sensor.koelkast_92c4a_electricity_consumed")
|
||||
assert state
|
||||
assert float(state.state) == 50.5
|
||||
|
||||
state = hass.states.get("sensor.droger_52559_electricity_consumed_interval")
|
||||
assert state
|
||||
assert float(state.state) == 0.0
|
||||
|
@@ -4,7 +4,6 @@ from unittest.mock import MagicMock
|
||||
|
||||
from plugwise.exceptions import PlugwiseException
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.components.plugwise.const import DOMAIN
|
||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||
@@ -20,20 +19,53 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from tests.common import MockConfigEntry, snapshot_platform
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
@pytest.mark.parametrize("platforms", [(SWITCH_DOMAIN,)])
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_adam_switch_snapshot(
|
||||
hass: HomeAssistant,
|
||||
mock_smile_adam: MagicMock,
|
||||
snapshot: SnapshotAssertion,
|
||||
entity_registry: er.EntityRegistry,
|
||||
setup_platform: MockConfigEntry,
|
||||
async def test_adam_climate_switch_entities(
|
||||
hass: HomeAssistant, mock_smile_adam: MagicMock, init_integration: MockConfigEntry
|
||||
) -> None:
|
||||
"""Test Adam switch snapshot."""
|
||||
await snapshot_platform(hass, entity_registry, snapshot, setup_platform.entry_id)
|
||||
"""Test creation of climate related switch entities."""
|
||||
state = hass.states.get("switch.cv_pomp_relay")
|
||||
assert state
|
||||
assert state.state == STATE_ON
|
||||
|
||||
state = hass.states.get("switch.fibaro_hc2_relay")
|
||||
assert state
|
||||
assert state.state == STATE_ON
|
||||
|
||||
|
||||
async def test_adam_climate_switch_negative_testing(
|
||||
hass: HomeAssistant, mock_smile_adam: MagicMock, init_integration: MockConfigEntry
|
||||
) -> None:
|
||||
"""Test exceptions of climate related switch entities."""
|
||||
mock_smile_adam.set_switch_state.side_effect = PlugwiseException
|
||||
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
SWITCH_DOMAIN,
|
||||
SERVICE_TURN_OFF,
|
||||
{ATTR_ENTITY_ID: "switch.cv_pomp_relay"},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
assert mock_smile_adam.set_switch_state.call_count == 1
|
||||
mock_smile_adam.set_switch_state.assert_called_with(
|
||||
"78d1126fc4c743db81b61c20e88342a7", None, "relay", STATE_OFF
|
||||
)
|
||||
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
SWITCH_DOMAIN,
|
||||
SERVICE_TURN_ON,
|
||||
{ATTR_ENTITY_ID: "switch.fibaro_hc2_relay"},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
assert mock_smile_adam.set_switch_state.call_count == 2
|
||||
mock_smile_adam.set_switch_state.assert_called_with(
|
||||
"a28f588dc4a049a483fd03a30361ad3a", None, "relay", STATE_ON
|
||||
)
|
||||
|
||||
|
||||
async def test_adam_climate_switch_changes(
|
||||
@@ -77,50 +109,17 @@ async def test_adam_climate_switch_changes(
|
||||
)
|
||||
|
||||
|
||||
async def test_adam_climate_switch_negative_testing(
|
||||
hass: HomeAssistant, mock_smile_adam: MagicMock, init_integration: MockConfigEntry
|
||||
async def test_stretch_switch_entities(
|
||||
hass: HomeAssistant, mock_stretch: MagicMock, init_integration: MockConfigEntry
|
||||
) -> None:
|
||||
"""Test exceptions of climate related switch entities."""
|
||||
mock_smile_adam.set_switch_state.side_effect = PlugwiseException
|
||||
"""Test creation of climate related switch entities."""
|
||||
state = hass.states.get("switch.koelkast_92c4a_relay")
|
||||
assert state
|
||||
assert state.state == STATE_ON
|
||||
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
SWITCH_DOMAIN,
|
||||
SERVICE_TURN_OFF,
|
||||
{ATTR_ENTITY_ID: "switch.cv_pomp_relay"},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
assert mock_smile_adam.set_switch_state.call_count == 1
|
||||
mock_smile_adam.set_switch_state.assert_called_with(
|
||||
"78d1126fc4c743db81b61c20e88342a7", None, "relay", STATE_OFF
|
||||
)
|
||||
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
SWITCH_DOMAIN,
|
||||
SERVICE_TURN_ON,
|
||||
{ATTR_ENTITY_ID: "switch.fibaro_hc2_relay"},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
assert mock_smile_adam.set_switch_state.call_count == 2
|
||||
mock_smile_adam.set_switch_state.assert_called_with(
|
||||
"a28f588dc4a049a483fd03a30361ad3a", None, "relay", STATE_ON
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("platforms", [(SWITCH_DOMAIN,)])
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_stretch_switch_snapshot(
|
||||
hass: HomeAssistant,
|
||||
mock_stretch: MagicMock,
|
||||
snapshot: SnapshotAssertion,
|
||||
entity_registry: er.EntityRegistry,
|
||||
setup_platform: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test Stretch switch snapshot."""
|
||||
await snapshot_platform(hass, entity_registry, snapshot, setup_platform.entry_id)
|
||||
state = hass.states.get("switch.droger_52559_relay")
|
||||
assert state
|
||||
assert state.state == STATE_ON
|
||||
|
||||
|
||||
async def test_stretch_switch_changes(
|
||||
|
@@ -8,14 +8,13 @@ from pyportainer.models.portainer import Endpoint
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.portainer.const import DOMAIN
|
||||
from homeassistant.const import CONF_API_TOKEN, CONF_URL, CONF_VERIFY_SSL
|
||||
from homeassistant.const import CONF_API_KEY, CONF_HOST
|
||||
|
||||
from tests.common import MockConfigEntry, load_json_array_fixture
|
||||
|
||||
MOCK_TEST_CONFIG = {
|
||||
CONF_URL: "https://127.0.0.1:9000/",
|
||||
CONF_API_TOKEN: "test_api_token",
|
||||
CONF_VERIFY_SSL: True,
|
||||
CONF_HOST: "https://127.0.0.1:9000/",
|
||||
CONF_API_KEY: "test_api_key",
|
||||
}
|
||||
|
||||
|
||||
@@ -61,5 +60,4 @@ def mock_config_entry() -> MockConfigEntry:
|
||||
title="Portainer test",
|
||||
data=MOCK_TEST_CONFIG,
|
||||
entry_id="portainer_test_entry_123",
|
||||
version=2,
|
||||
)
|
||||
|
@@ -11,7 +11,7 @@ import pytest
|
||||
|
||||
from homeassistant.components.portainer.const import DOMAIN
|
||||
from homeassistant.config_entries import SOURCE_USER
|
||||
from homeassistant.const import CONF_API_TOKEN, CONF_URL, CONF_VERIFY_SSL
|
||||
from homeassistant.const import CONF_API_KEY, CONF_HOST
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
|
||||
@@ -20,9 +20,8 @@ from .conftest import MOCK_TEST_CONFIG
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
MOCK_USER_SETUP = {
|
||||
CONF_URL: "https://127.0.0.1:9000/",
|
||||
CONF_API_TOKEN: "test_api_token",
|
||||
CONF_VERIFY_SSL: True,
|
||||
CONF_HOST: "https://127.0.0.1:9000/",
|
||||
CONF_API_KEY: "test_api_key",
|
||||
}
|
||||
|
||||
|
||||
|
@@ -9,9 +9,7 @@ from pyportainer.exceptions import (
|
||||
)
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.portainer.const import DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, CONF_HOST, CONF_URL
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import setup_integration
|
||||
@@ -38,25 +36,3 @@ async def test_setup_exceptions(
|
||||
mock_portainer_client.get_endpoints.side_effect = exception
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
assert mock_config_entry.state == expected_state
|
||||
|
||||
|
||||
async def test_v1_migration(hass: HomeAssistant) -> None:
|
||||
"""Test migration from v1 to v2 config entry."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
CONF_HOST: "http://test_host",
|
||||
CONF_API_KEY: "test_key",
|
||||
},
|
||||
unique_id="1",
|
||||
version=1,
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert entry.version == 2
|
||||
assert CONF_HOST not in entry.data
|
||||
assert CONF_API_KEY not in entry.data
|
||||
assert entry.data[CONF_URL] == "http://test_host"
|
||||
assert entry.data[CONF_API_TOKEN] == "test_key"
|
||||
|
@@ -368,39 +368,70 @@ async def configure_squeezebox_media_player_button_platform(
|
||||
await hass.async_block_till_done(wait_background_tasks=True)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def setup_squeezebox(
|
||||
hass: HomeAssistant, config_entry: MockConfigEntry, lms: MagicMock
|
||||
) -> MockConfigEntry:
|
||||
"""Fixture setting up a squeezebox config entry with one player."""
|
||||
with patch("homeassistant.components.squeezebox.Server", return_value=lms):
|
||||
async def configure_squeezebox_switch_platform(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
lms: MagicMock,
|
||||
) -> None:
|
||||
"""Configure a squeezebox config entry with appropriate mocks for switch."""
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.squeezebox.PLATFORMS",
|
||||
[Platform.SWITCH],
|
||||
),
|
||||
patch("homeassistant.components.squeezebox.Server", return_value=lms),
|
||||
):
|
||||
# Set up the switch platform.
|
||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
return config_entry
|
||||
await hass.async_block_till_done(wait_background_tasks=True)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def mock_alarms_player(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
lms: MagicMock,
|
||||
) -> MagicMock:
|
||||
"""Mock the alarms of a configured player."""
|
||||
players = await lms.async_get_players()
|
||||
players[0].alarms = [
|
||||
{
|
||||
"id": TEST_ALARM_ID,
|
||||
"enabled": True,
|
||||
"time": "07:00",
|
||||
"dow": [0, 1, 2, 3, 4, 5, 6],
|
||||
"repeat": False,
|
||||
"url": "CURRENT_PLAYLIST",
|
||||
"volume": 50,
|
||||
},
|
||||
]
|
||||
await configure_squeezebox_switch_platform(hass, config_entry, lms)
|
||||
return players[0]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def configured_player(
|
||||
hass: HomeAssistant,
|
||||
setup_squeezebox: MockConfigEntry, # depend on your setup fixture
|
||||
lms: MagicMock,
|
||||
hass: HomeAssistant, config_entry: MockConfigEntry, lms: MagicMock
|
||||
) -> MagicMock:
|
||||
"""Fixture mocking calls to pysqueezebox Player from a configured squeezebox."""
|
||||
# At this point, setup_squeezebox has already patched Server and set up the entry
|
||||
await configure_squeezebox_media_player_platform(hass, config_entry, lms)
|
||||
return (await lms.async_get_players())[0]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def configured_player_with_button(
|
||||
hass: HomeAssistant, config_entry: MockConfigEntry, lms: MagicMock
|
||||
) -> MagicMock:
|
||||
"""Fixture mocking calls to pysqueezebox Player from a configured squeezebox."""
|
||||
await configure_squeezebox_media_player_button_platform(hass, config_entry, lms)
|
||||
return (await lms.async_get_players())[0]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def configured_players(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
lms_factory: MagicMock,
|
||||
hass: HomeAssistant, config_entry: MockConfigEntry, lms_factory: MagicMock
|
||||
) -> list[MagicMock]:
|
||||
"""Fixture mocking calls to multiple pysqueezebox Players from a configured squeezebox."""
|
||||
"""Fixture mocking calls to two pysqueezebox Players from a configured squeezebox."""
|
||||
lms = lms_factory(3, uuid=SERVER_UUIDS[0])
|
||||
|
||||
with patch("homeassistant.components.squeezebox.Server", return_value=lms):
|
||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await configure_squeezebox_media_player_platform(hass, config_entry, lms)
|
||||
return await lms.async_get_players()
|
||||
|
@@ -1,23 +1,14 @@
|
||||
"""Tests for the squeezebox button component."""
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS
|
||||
from homeassistant.const import ATTR_ENTITY_ID, Platform
|
||||
from homeassistant.const import ATTR_ENTITY_ID
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def squeezebox_button_platform():
|
||||
"""Only set up the media_player platform for squeezebox tests."""
|
||||
with patch("homeassistant.components.squeezebox.PLATFORMS", [Platform.BUTTON]):
|
||||
yield
|
||||
|
||||
|
||||
async def test_squeezebox_press(
|
||||
hass: HomeAssistant, configured_player: MagicMock
|
||||
hass: HomeAssistant, configured_player_with_button: MagicMock
|
||||
) -> None:
|
||||
"""Test press service call."""
|
||||
await hass.services.async_call(
|
||||
@@ -27,4 +18,6 @@ async def test_squeezebox_press(
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
configured_player.async_query.assert_called_with("button", "preset_1.single")
|
||||
configured_player_with_button.async_query.assert_called_with(
|
||||
"button", "preset_1.single"
|
||||
)
|
||||
|
@@ -15,8 +15,6 @@ from homeassistant.components.squeezebox.const import (
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
@@ -316,15 +314,11 @@ async def test_form_validate_exception(hass: HomeAssistant) -> None:
|
||||
|
||||
|
||||
async def test_form_cannot_connect(hass: HomeAssistant) -> None:
|
||||
"""Test we handle cannot connect error, then succeed after retry."""
|
||||
|
||||
# Start the flow
|
||||
"""Test we handle cannot connect error."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": "edit"}
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
|
||||
# First attempt: simulate cannot connect
|
||||
with patch(
|
||||
"pysqueezebox.Server.async_query",
|
||||
return_value=False,
|
||||
@@ -334,47 +328,17 @@ async def test_form_cannot_connect(hass: HomeAssistant) -> None:
|
||||
{
|
||||
CONF_HOST: HOST,
|
||||
CONF_PORT: PORT,
|
||||
CONF_USERNAME: "",
|
||||
CONF_PASSWORD: "",
|
||||
CONF_USERNAME: "test-username",
|
||||
CONF_PASSWORD: "test-password",
|
||||
},
|
||||
)
|
||||
|
||||
# We should still be in a form, with an error
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {"base": "cannot_connect"}
|
||||
|
||||
# Second attempt: simulate a successful connection
|
||||
with patch(
|
||||
"pysqueezebox.Server.async_query",
|
||||
return_value={"uuid": UUID},
|
||||
):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_HOST: HOST,
|
||||
CONF_PORT: PORT,
|
||||
CONF_USERNAME: "",
|
||||
CONF_PASSWORD: "",
|
||||
CONF_HTTPS: False,
|
||||
},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == HOST # the flow uses host as title
|
||||
assert result["data"] == {
|
||||
CONF_HOST: HOST,
|
||||
CONF_PORT: PORT,
|
||||
CONF_USERNAME: "",
|
||||
CONF_PASSWORD: "",
|
||||
CONF_HTTPS: False,
|
||||
}
|
||||
assert result["context"]["unique_id"] == UUID
|
||||
|
||||
|
||||
async def test_discovery(hass: HomeAssistant) -> None:
|
||||
"""Test handling of discovered server, then completing the flow."""
|
||||
|
||||
# Initial discovery: server responds with a uuid
|
||||
"""Test handling of discovered server."""
|
||||
with patch(
|
||||
"pysqueezebox.Server.async_query",
|
||||
return_value={"uuid": UUID},
|
||||
@@ -384,109 +348,24 @@ async def test_discovery(hass: HomeAssistant) -> None:
|
||||
context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY},
|
||||
data={CONF_HOST: HOST, CONF_PORT: PORT, "uuid": UUID},
|
||||
)
|
||||
|
||||
# Discovery puts us into the edit step
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "edit"
|
||||
|
||||
# Complete the edit step with user input
|
||||
with patch(
|
||||
"pysqueezebox.Server.async_query",
|
||||
return_value={"uuid": UUID},
|
||||
):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_HOST: HOST,
|
||||
CONF_PORT: PORT,
|
||||
CONF_USERNAME: "",
|
||||
CONF_PASSWORD: "",
|
||||
CONF_HTTPS: False,
|
||||
},
|
||||
)
|
||||
|
||||
# Flow should now complete with a config entry
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == HOST
|
||||
assert result["data"] == {
|
||||
CONF_HOST: HOST,
|
||||
CONF_PORT: PORT,
|
||||
CONF_USERNAME: "",
|
||||
CONF_PASSWORD: "",
|
||||
CONF_HTTPS: False,
|
||||
}
|
||||
assert result["context"]["unique_id"] == UUID
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "edit"
|
||||
|
||||
|
||||
async def test_discovery_no_uuid(hass: HomeAssistant) -> None:
|
||||
"""Test discovery without uuid first fails, then succeeds when uuid is available."""
|
||||
|
||||
# Initial discovery: no uuid returned
|
||||
with patch(
|
||||
"pysqueezebox.Server.async_query",
|
||||
new=patch_async_query_unauthorized,
|
||||
):
|
||||
"""Test handling of discovered server with unavailable uuid."""
|
||||
with patch("pysqueezebox.Server.async_query", new=patch_async_query_unauthorized):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY},
|
||||
data={CONF_HOST: HOST, CONF_PORT: PORT, CONF_HTTPS: False},
|
||||
)
|
||||
|
||||
# Flow shows the edit form
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "edit"
|
||||
|
||||
# First attempt to complete: still no uuid → error on the form
|
||||
with patch(
|
||||
"pysqueezebox.Server.async_query",
|
||||
new=patch_async_query_unauthorized,
|
||||
):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_HOST: HOST,
|
||||
CONF_PORT: PORT,
|
||||
CONF_USERNAME: "",
|
||||
CONF_PASSWORD: "",
|
||||
CONF_HTTPS: False,
|
||||
},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {"base": "invalid_auth"}
|
||||
|
||||
# Second attempt: now the server responds with a uuid
|
||||
with patch(
|
||||
"pysqueezebox.Server.async_query",
|
||||
return_value={"uuid": UUID},
|
||||
):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_HOST: HOST,
|
||||
CONF_PORT: PORT,
|
||||
CONF_USERNAME: "",
|
||||
CONF_PASSWORD: "",
|
||||
CONF_HTTPS: False,
|
||||
},
|
||||
)
|
||||
|
||||
# Flow should now complete successfully
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == HOST
|
||||
assert result["data"] == {
|
||||
CONF_HOST: HOST,
|
||||
CONF_PORT: PORT,
|
||||
CONF_USERNAME: "",
|
||||
CONF_PASSWORD: "",
|
||||
CONF_HTTPS: False,
|
||||
}
|
||||
assert result["context"]["unique_id"] == UUID
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "edit"
|
||||
|
||||
|
||||
async def test_dhcp_discovery(hass: HomeAssistant) -> None:
|
||||
"""Test we can process discovery from dhcp and complete the flow."""
|
||||
|
||||
"""Test we can process discovery from dhcp."""
|
||||
with (
|
||||
patch(
|
||||
"pysqueezebox.Server.async_query",
|
||||
@@ -501,48 +380,17 @@ async def test_dhcp_discovery(hass: HomeAssistant) -> None:
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_DHCP},
|
||||
data=DhcpServiceInfo(
|
||||
ip=HOST,
|
||||
ip="1.1.1.1",
|
||||
macaddress="aabbccddeeff",
|
||||
hostname="any",
|
||||
),
|
||||
)
|
||||
|
||||
# DHCP discovery puts us into the edit step
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "edit"
|
||||
|
||||
# Complete the edit step with user input
|
||||
with patch(
|
||||
"pysqueezebox.Server.async_query",
|
||||
return_value={"uuid": UUID},
|
||||
):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_HOST: HOST,
|
||||
CONF_PORT: PORT,
|
||||
CONF_USERNAME: "",
|
||||
CONF_PASSWORD: "",
|
||||
CONF_HTTPS: False,
|
||||
},
|
||||
)
|
||||
|
||||
# Flow should now complete with a config entry
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == HOST
|
||||
assert result["data"] == {
|
||||
CONF_HOST: HOST,
|
||||
CONF_PORT: PORT,
|
||||
CONF_USERNAME: "",
|
||||
CONF_PASSWORD: "",
|
||||
CONF_HTTPS: False,
|
||||
}
|
||||
assert result["context"]["unique_id"] == UUID
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "edit"
|
||||
|
||||
|
||||
async def test_dhcp_discovery_no_server_found(hass: HomeAssistant) -> None:
|
||||
"""Test we can handle dhcp discovery when no server is found."""
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.squeezebox.config_flow.async_discover",
|
||||
@@ -554,68 +402,28 @@ async def test_dhcp_discovery_no_server_found(hass: HomeAssistant) -> None:
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_DHCP},
|
||||
data=DhcpServiceInfo(
|
||||
ip=HOST,
|
||||
ip="1.1.1.1",
|
||||
macaddress="aabbccddeeff",
|
||||
hostname="any",
|
||||
),
|
||||
)
|
||||
|
||||
# First step: user form with only host
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "user"
|
||||
|
||||
# Provide just the host to move into edit step
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{CONF_HOST: HOST},
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "edit"
|
||||
|
||||
# Now try to complete the edit step with full schema
|
||||
with patch(
|
||||
"homeassistant.components.squeezebox.config_flow.async_discover",
|
||||
mock_failed_discover,
|
||||
):
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_HOST: HOST,
|
||||
CONF_PORT: PORT,
|
||||
CONF_USERNAME: "",
|
||||
CONF_PASSWORD: "",
|
||||
CONF_HTTPS: False,
|
||||
},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "edit"
|
||||
assert result["errors"] == {"base": "unknown"}
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "user"
|
||||
|
||||
|
||||
async def test_dhcp_discovery_existing_player(
|
||||
hass: HomeAssistant, entity_registry: er.EntityRegistry
|
||||
) -> None:
|
||||
async def test_dhcp_discovery_existing_player(hass: HomeAssistant) -> None:
|
||||
"""Test that we properly ignore known players during dhcp discover."""
|
||||
|
||||
# Register a squeezebox media_player entity with the same MAC unique_id
|
||||
entity_registry.async_get_or_create(
|
||||
domain="media_player",
|
||||
platform=DOMAIN,
|
||||
unique_id=format_mac("aabbccddeeff"),
|
||||
)
|
||||
|
||||
# Now fire a DHCP discovery for the same MAC
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_DHCP},
|
||||
data=DhcpServiceInfo(
|
||||
ip="1.1.1.1",
|
||||
macaddress="aabbccddeeff",
|
||||
hostname="any",
|
||||
),
|
||||
)
|
||||
|
||||
# Because the player is already known, the flow should abort
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
with patch(
|
||||
"homeassistant.helpers.entity_registry.EntityRegistry.async_get_entity_id",
|
||||
return_value="test_entity",
|
||||
):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_DHCP},
|
||||
data=DhcpServiceInfo(
|
||||
ip="1.1.1.1",
|
||||
macaddress="aabbccddeeff",
|
||||
hostname="any",
|
||||
),
|
||||
)
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
|
@@ -3,12 +3,10 @@
|
||||
from http import HTTPStatus
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.components.squeezebox.const import DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceRegistry
|
||||
|
||||
@@ -17,15 +15,6 @@ from .conftest import TEST_MAC
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def squeezebox_media_player_platform():
|
||||
"""Only set up the media_player platform for squeezebox tests."""
|
||||
with patch(
|
||||
"homeassistant.components.squeezebox.PLATFORMS", [Platform.MEDIA_PLAYER]
|
||||
):
|
||||
yield
|
||||
|
||||
|
||||
async def test_init_api_fail(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
|
@@ -428,35 +428,3 @@ async def test_play_browse_item_bad_category(
|
||||
},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
|
||||
async def test_synthetic_thumbnail_item_ids(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
) -> None:
|
||||
"""Test synthetic ID generation and url caching for items without stable IDs."""
|
||||
with patch(
|
||||
"homeassistant.components.squeezebox.browse_media.is_internal_request",
|
||||
return_value=False,
|
||||
):
|
||||
client = await hass_ws_client()
|
||||
|
||||
await client.send_json(
|
||||
{
|
||||
"id": 1,
|
||||
"type": "media_player/browse_media",
|
||||
"entity_id": "media_player.test_player",
|
||||
"media_content_id": "",
|
||||
"media_content_type": "apps",
|
||||
}
|
||||
)
|
||||
response = await client.receive_json()
|
||||
assert response["success"]
|
||||
|
||||
children = response["result"]["children"]
|
||||
assert len(children) > 0
|
||||
for child in children:
|
||||
if thumbnail := child.get("thumbnail"):
|
||||
assert not thumbnail.startswith("http://lms.internal")
|
||||
assert thumbnail.startswith("/api/media_player_proxy/")
|
||||
|
@@ -65,27 +65,22 @@ from homeassistant.const import (
|
||||
SERVICE_VOLUME_UP,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers.entity_registry import EntityRegistry
|
||||
from homeassistant.util.dt import utcnow
|
||||
|
||||
from .conftest import FAKE_VALID_ITEM_ID, TEST_MAC, TEST_VOLUME_STEP
|
||||
from .conftest import (
|
||||
FAKE_VALID_ITEM_ID,
|
||||
TEST_MAC,
|
||||
TEST_VOLUME_STEP,
|
||||
configure_squeezebox_media_player_platform,
|
||||
)
|
||||
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def squeezebox_media_player_platform():
|
||||
"""Only set up the media_player platform for squeezebox tests."""
|
||||
with patch(
|
||||
"homeassistant.components.squeezebox.PLATFORMS", [Platform.MEDIA_PLAYER]
|
||||
):
|
||||
yield
|
||||
|
||||
|
||||
async def test_entity_registry(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: EntityRegistry,
|
||||
@@ -103,11 +98,10 @@ async def test_squeezebox_new_player_discovery(
|
||||
lms: MagicMock,
|
||||
player_factory: MagicMock,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
setup_squeezebox: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test discovery of a new squeezebox player."""
|
||||
# Initial setup with one player (from the 'lms' fixture)
|
||||
# await setup_squeezebox
|
||||
await configure_squeezebox_media_player_platform(hass, config_entry, lms)
|
||||
await hass.async_block_till_done(wait_background_tasks=True)
|
||||
assert hass.states.get("media_player.test_player") is not None
|
||||
assert hass.states.get("media_player.test_player_2") is None
|
||||
|
@@ -1,20 +1,14 @@
|
||||
"""Tests for the Squeezebox alarm switch platform."""
|
||||
|
||||
from datetime import timedelta
|
||||
from unittest.mock import MagicMock, patch
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from freezegun.api import FrozenDateTimeFactory
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.components.squeezebox.const import PLAYER_UPDATE_INTERVAL
|
||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||
from homeassistant.const import (
|
||||
CONF_ENTITY_ID,
|
||||
SERVICE_TURN_OFF,
|
||||
SERVICE_TURN_ON,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.const import CONF_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_registry import EntityRegistry
|
||||
|
||||
@@ -23,40 +17,6 @@ from .conftest import TEST_ALARM_ID
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def squeezebox_alarm_platform():
|
||||
"""Only set up the media_player platform for squeezebox tests."""
|
||||
with patch("homeassistant.components.squeezebox.PLATFORMS", [Platform.SWITCH]):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def mock_alarms_player(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
lms: MagicMock,
|
||||
) -> MagicMock:
|
||||
"""Mock the alarms of a configured player."""
|
||||
players = await lms.async_get_players()
|
||||
players[0].alarms = [
|
||||
{
|
||||
"id": TEST_ALARM_ID,
|
||||
"enabled": True,
|
||||
"time": "07:00",
|
||||
"dow": [0, 1, 2, 3, 4, 5, 6],
|
||||
"repeat": False,
|
||||
"url": "CURRENT_PLAYLIST",
|
||||
"volume": 50,
|
||||
},
|
||||
]
|
||||
|
||||
with patch("homeassistant.components.squeezebox.Server", return_value=lms):
|
||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
return players[0]
|
||||
|
||||
|
||||
async def test_entity_registry(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: EntityRegistry,
|
||||
|
@@ -1127,47 +1127,3 @@ K11_PLUS_VACUUM_SERVICE_INFO = BluetoothServiceInfoBleak(
|
||||
connectable=True,
|
||||
tx_power=-127,
|
||||
)
|
||||
|
||||
|
||||
RELAY_SWITCH_1_SERVICE_INFO = BluetoothServiceInfoBleak(
|
||||
name="Relay Switch 1",
|
||||
manufacturer_data={2409: b"$X|\x0866G\x81\x00\x00\x001\x00\x00\x00\x00"},
|
||||
service_data={"0000fd3d-0000-1000-8000-00805f9b34fb": b";\x00\x00\x00"},
|
||||
service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"],
|
||||
address="AA:BB:CC:DD:EE:FF",
|
||||
rssi=-60,
|
||||
source="local",
|
||||
advertisement=generate_advertisement_data(
|
||||
local_name="Relay Switch 1",
|
||||
manufacturer_data={2409: b"$X|\x0866G\x81\x00\x00\x001\x00\x00\x00\x00"},
|
||||
service_data={"0000fd3d-0000-1000-8000-00805f9b34fb": b"=\x00\x00\x00"},
|
||||
service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"],
|
||||
),
|
||||
device=generate_ble_device("AA:BB:CC:DD:EE:FF", "Relay Switch 1"),
|
||||
time=0,
|
||||
connectable=True,
|
||||
tx_power=-127,
|
||||
)
|
||||
|
||||
|
||||
GARAGE_DOOR_OPENER_SERVICE_INFO = BluetoothServiceInfoBleak(
|
||||
name="Garage Door Opener",
|
||||
manufacturer_data={2409: b"$X|\x05BN\x0f\x00\x00\x03\x00\x00\x00\x00\x00\x00"},
|
||||
service_data={
|
||||
"0000fd3d-0000-1000-8000-00805f9b34fb": b">\x00\x00\x00",
|
||||
},
|
||||
service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"],
|
||||
address="AA:BB:CC:DD:EE:FF",
|
||||
rssi=-60,
|
||||
source="local",
|
||||
advertisement=generate_advertisement_data(
|
||||
local_name="Garage Door Opener",
|
||||
manufacturer_data={2409: b"$X|\x05BN\x0f\x00\x00\x03\x00\x00\x00\x00\x00\x00"},
|
||||
service_data={"0000fd3d-0000-1000-8000-00805f9b34fb": b">\x00\x00\x00"},
|
||||
service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"],
|
||||
),
|
||||
device=generate_ble_device("AA:BB:CC:DD:EE:FF", "Garage Door Opener"),
|
||||
time=0,
|
||||
connectable=True,
|
||||
tx_power=-127,
|
||||
)
|
||||
|
@@ -30,7 +30,6 @@ from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from . import (
|
||||
GARAGE_DOOR_OPENER_SERVICE_INFO,
|
||||
ROLLER_SHADE_SERVICE_INFO,
|
||||
WOBLINDTILT_SERVICE_INFO,
|
||||
WOCURTAIN3_SERVICE_INFO,
|
||||
@@ -649,41 +648,3 @@ async def test_exception_handling_cover_service(
|
||||
{**service_data, ATTR_ENTITY_ID: entity_id},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("service", "mock_method"),
|
||||
[
|
||||
(SERVICE_OPEN_COVER, "open"),
|
||||
(SERVICE_CLOSE_COVER, "close"),
|
||||
],
|
||||
)
|
||||
async def test_garage_door_opener_controlling(
|
||||
hass: HomeAssistant,
|
||||
mock_entry_encrypted_factory: Callable[[str], MockConfigEntry],
|
||||
service: str,
|
||||
mock_method: str,
|
||||
) -> None:
|
||||
"""Test Garage Door Opener controlling."""
|
||||
inject_bluetooth_service_info(hass, GARAGE_DOOR_OPENER_SERVICE_INFO)
|
||||
|
||||
entry = mock_entry_encrypted_factory(sensor_type="garage_door_opener")
|
||||
entry.add_to_hass(hass)
|
||||
entity_id = "cover.test_name"
|
||||
|
||||
mocked_instance = AsyncMock(return_value=True)
|
||||
with patch.multiple(
|
||||
"homeassistant.components.switchbot.cover.switchbot.SwitchbotGarageDoorOpener",
|
||||
update=AsyncMock(),
|
||||
**{mock_method: mocked_instance},
|
||||
):
|
||||
assert await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await hass.services.async_call(
|
||||
COVER_DOMAIN,
|
||||
service,
|
||||
{ATTR_ENTITY_ID: entity_id},
|
||||
blocking=True,
|
||||
)
|
||||
mocked_instance.assert_awaited_once()
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user