mirror of
https://github.com/home-assistant/core.git
synced 2025-07-16 09:47:13 +00:00
Merge branch 'dev' into prepare_protobuf6
This commit is contained in:
commit
4388a32b61
@ -16,8 +16,8 @@
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"city": "City",
|
||||
"country": "Country",
|
||||
"state": "State"
|
||||
"state": "State",
|
||||
"country": "[%key:common::config_flow::data::country%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
@ -56,12 +56,12 @@
|
||||
"sensor": {
|
||||
"pollutant_label": {
|
||||
"state": {
|
||||
"co": "Carbon monoxide",
|
||||
"n2": "Nitrogen dioxide",
|
||||
"o3": "Ozone",
|
||||
"p1": "PM10",
|
||||
"p2": "PM2.5",
|
||||
"s2": "Sulfur dioxide"
|
||||
"co": "[%key:component::sensor::entity_component::carbon_monoxide::name%]",
|
||||
"n2": "[%key:component::sensor::entity_component::nitrogen_dioxide::name%]",
|
||||
"o3": "[%key:component::sensor::entity_component::ozone::name%]",
|
||||
"p1": "[%key:component::sensor::entity_component::pm10::name%]",
|
||||
"p2": "[%key:component::sensor::entity_component::pm25::name%]",
|
||||
"s2": "[%key:component::sensor::entity_component::sulphur_dioxide::name%]"
|
||||
}
|
||||
},
|
||||
"pollutant_level": {
|
||||
|
@ -175,7 +175,8 @@ class AzureStorageBackupAgent(BackupAgent):
|
||||
"""Find a blob by backup id."""
|
||||
async for blob in self._client.list_blobs(include="metadata"):
|
||||
if (
|
||||
backup_id == blob.metadata.get("backup_id", "")
|
||||
blob.metadata is not None
|
||||
and backup_id == blob.metadata.get("backup_id", "")
|
||||
and blob.metadata.get("metadata_version") == METADATA_VERSION
|
||||
):
|
||||
return blob
|
||||
|
@ -98,13 +98,13 @@
|
||||
"name": "Preset",
|
||||
"state": {
|
||||
"none": "None",
|
||||
"eco": "Eco",
|
||||
"away": "Away",
|
||||
"home": "[%key:common::state::home%]",
|
||||
"away": "[%key:common::state::not_home%]",
|
||||
"activity": "Activity",
|
||||
"boost": "Boost",
|
||||
"comfort": "Comfort",
|
||||
"home": "[%key:common::state::home%]",
|
||||
"sleep": "Sleep",
|
||||
"activity": "Activity"
|
||||
"eco": "Eco",
|
||||
"sleep": "Sleep"
|
||||
}
|
||||
},
|
||||
"preset_modes": {
|
||||
|
@ -354,6 +354,35 @@ class ChatLog:
|
||||
if self.delta_listener:
|
||||
self.delta_listener(self, asdict(tool_result))
|
||||
|
||||
async def _async_expand_prompt_template(
|
||||
self,
|
||||
llm_context: llm.LLMContext,
|
||||
prompt: str,
|
||||
language: str,
|
||||
user_name: str | None = None,
|
||||
) -> str:
|
||||
try:
|
||||
return template.Template(prompt, self.hass).async_render(
|
||||
{
|
||||
"ha_name": self.hass.config.location_name,
|
||||
"user_name": user_name,
|
||||
"llm_context": llm_context,
|
||||
},
|
||||
parse_result=False,
|
||||
)
|
||||
except TemplateError as err:
|
||||
LOGGER.error("Error rendering prompt: %s", err)
|
||||
intent_response = intent.IntentResponse(language=language)
|
||||
intent_response.async_set_error(
|
||||
intent.IntentResponseErrorCode.UNKNOWN,
|
||||
"Sorry, I had a problem with my template",
|
||||
)
|
||||
raise ConverseError(
|
||||
"Error rendering prompt",
|
||||
conversation_id=self.conversation_id,
|
||||
response=intent_response,
|
||||
) from err
|
||||
|
||||
async def async_update_llm_data(
|
||||
self,
|
||||
conversing_domain: str,
|
||||
@ -409,38 +438,28 @@ class ChatLog:
|
||||
):
|
||||
user_name = user.name
|
||||
|
||||
try:
|
||||
prompt_parts = [
|
||||
template.Template(
|
||||
llm.BASE_PROMPT
|
||||
+ (user_llm_prompt or llm.DEFAULT_INSTRUCTIONS_PROMPT),
|
||||
self.hass,
|
||||
).async_render(
|
||||
{
|
||||
"ha_name": self.hass.config.location_name,
|
||||
"user_name": user_name,
|
||||
"llm_context": llm_context,
|
||||
},
|
||||
parse_result=False,
|
||||
)
|
||||
]
|
||||
|
||||
except TemplateError as err:
|
||||
LOGGER.error("Error rendering prompt: %s", err)
|
||||
intent_response = intent.IntentResponse(language=user_input.language)
|
||||
intent_response.async_set_error(
|
||||
intent.IntentResponseErrorCode.UNKNOWN,
|
||||
"Sorry, I had a problem with my template",
|
||||
prompt_parts = []
|
||||
prompt_parts.append(
|
||||
await self._async_expand_prompt_template(
|
||||
llm_context,
|
||||
(user_llm_prompt or llm.DEFAULT_INSTRUCTIONS_PROMPT),
|
||||
user_input.language,
|
||||
user_name,
|
||||
)
|
||||
raise ConverseError(
|
||||
"Error rendering prompt",
|
||||
conversation_id=self.conversation_id,
|
||||
response=intent_response,
|
||||
) from err
|
||||
)
|
||||
|
||||
if llm_api:
|
||||
prompt_parts.append(llm_api.api_prompt)
|
||||
|
||||
prompt_parts.append(
|
||||
await self._async_expand_prompt_template(
|
||||
llm_context,
|
||||
llm.BASE_PROMPT,
|
||||
user_input.language,
|
||||
user_name,
|
||||
)
|
||||
)
|
||||
|
||||
if extra_system_prompt := (
|
||||
# Take new system prompt if one was given
|
||||
user_input.extra_system_prompt or self.extra_system_prompt
|
||||
|
@ -38,8 +38,8 @@
|
||||
"protect_mode": {
|
||||
"name": "Protect mode",
|
||||
"state": {
|
||||
"away": "Away",
|
||||
"home": "Home",
|
||||
"away": "[%key:common::state::not_home%]",
|
||||
"home": "[%key:common::state::home%]",
|
||||
"schedule": "Schedule"
|
||||
}
|
||||
}
|
||||
|
@ -100,7 +100,11 @@ class ElkEntity(Entity):
|
||||
return {"index": self._element.index + 1}
|
||||
|
||||
def _element_changed(self, element: Element, changeset: dict[str, Any]) -> None:
|
||||
pass
|
||||
"""Handle changes to the element.
|
||||
|
||||
This method is called when the element changes. It should be
|
||||
overridden by subclasses to handle the changes.
|
||||
"""
|
||||
|
||||
@callback
|
||||
def _element_callback(self, element: Element, changeset: dict[str, Any]) -> None:
|
||||
@ -111,7 +115,7 @@ class ElkEntity(Entity):
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callback for ElkM1 changes and update entity state."""
|
||||
self._element.add_callback(self._element_callback)
|
||||
self._element_callback(self._element, {})
|
||||
self._element_changed(self._element, {})
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
|
@ -25,6 +25,7 @@ from homeassistant.core import (
|
||||
split_entity_id,
|
||||
valid_entity_id,
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import async_track_state_change_event
|
||||
@ -122,6 +123,10 @@ SOURCE_ADAPTERS: Final = (
|
||||
)
|
||||
|
||||
|
||||
class EntityNotFoundError(HomeAssistantError):
|
||||
"""When a referenced entity was not found."""
|
||||
|
||||
|
||||
class SensorManager:
|
||||
"""Class to handle creation/removal of sensor data."""
|
||||
|
||||
@ -311,43 +316,25 @@ class EnergyCostSensor(SensorEntity):
|
||||
except ValueError:
|
||||
return
|
||||
|
||||
# Determine energy price
|
||||
if self._config["entity_energy_price"] is not None:
|
||||
energy_price_state = self.hass.states.get(
|
||||
self._config["entity_energy_price"]
|
||||
try:
|
||||
energy_price, energy_price_unit = self._get_energy_price(
|
||||
valid_units, default_price_unit
|
||||
)
|
||||
|
||||
if energy_price_state is None:
|
||||
return
|
||||
|
||||
try:
|
||||
energy_price = float(energy_price_state.state)
|
||||
except ValueError:
|
||||
if self._last_energy_sensor_state is None:
|
||||
# Initialize as it's the first time all required entities except
|
||||
# price are in place. This means that the cost will update the first
|
||||
# time the energy is updated after the price entity is in place.
|
||||
self._reset(energy_state)
|
||||
return
|
||||
|
||||
energy_price_unit: str | None = energy_price_state.attributes.get(
|
||||
ATTR_UNIT_OF_MEASUREMENT, ""
|
||||
).partition("/")[2]
|
||||
|
||||
# For backwards compatibility we don't validate the unit of the price
|
||||
# If it is not valid, we assume it's our default price unit.
|
||||
if energy_price_unit not in valid_units:
|
||||
energy_price_unit = default_price_unit
|
||||
|
||||
else:
|
||||
energy_price = cast(float, self._config["number_energy_price"])
|
||||
energy_price_unit = default_price_unit
|
||||
except EntityNotFoundError:
|
||||
return
|
||||
except ValueError:
|
||||
energy_price = None
|
||||
|
||||
if self._last_energy_sensor_state is None:
|
||||
# Initialize as it's the first time all required entities are in place.
|
||||
# Initialize as it's the first time all required entities are in place or
|
||||
# only the price is missing. In the later case, cost will update the first
|
||||
# time the energy is updated after the price entity is in place.
|
||||
self._reset(energy_state)
|
||||
return
|
||||
|
||||
if energy_price is None:
|
||||
return
|
||||
|
||||
energy_unit: str | None = energy_state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
|
||||
|
||||
if energy_unit is None or energy_unit not in valid_units:
|
||||
@ -383,20 +370,9 @@ class EnergyCostSensor(SensorEntity):
|
||||
old_energy_value = float(self._last_energy_sensor_state.state)
|
||||
cur_value = cast(float, self._attr_native_value)
|
||||
|
||||
if energy_price_unit is None:
|
||||
converted_energy_price = energy_price
|
||||
else:
|
||||
converter: Callable[[float, str, str], float]
|
||||
if energy_unit in VALID_ENERGY_UNITS:
|
||||
converter = unit_conversion.EnergyConverter.convert
|
||||
else:
|
||||
converter = unit_conversion.VolumeConverter.convert
|
||||
|
||||
converted_energy_price = converter(
|
||||
energy_price,
|
||||
energy_unit,
|
||||
energy_price_unit,
|
||||
)
|
||||
converted_energy_price = self._convert_energy_price(
|
||||
energy_price, energy_price_unit, energy_unit
|
||||
)
|
||||
|
||||
self._attr_native_value = (
|
||||
cur_value + (energy - old_energy_value) * converted_energy_price
|
||||
@ -404,6 +380,52 @@ class EnergyCostSensor(SensorEntity):
|
||||
|
||||
self._last_energy_sensor_state = energy_state
|
||||
|
||||
def _get_energy_price(
|
||||
self, valid_units: set[str], default_unit: str | None
|
||||
) -> tuple[float, str | None]:
|
||||
"""Get the energy price.
|
||||
|
||||
Raises:
|
||||
EntityNotFoundError: When the energy price entity is not found.
|
||||
ValueError: When the entity state is not a valid float.
|
||||
|
||||
"""
|
||||
|
||||
if self._config["entity_energy_price"] is None:
|
||||
return cast(float, self._config["number_energy_price"]), default_unit
|
||||
|
||||
energy_price_state = self.hass.states.get(self._config["entity_energy_price"])
|
||||
if energy_price_state is None:
|
||||
raise EntityNotFoundError
|
||||
|
||||
energy_price = float(energy_price_state.state)
|
||||
|
||||
energy_price_unit: str | None = energy_price_state.attributes.get(
|
||||
ATTR_UNIT_OF_MEASUREMENT, ""
|
||||
).partition("/")[2]
|
||||
|
||||
# For backwards compatibility we don't validate the unit of the price
|
||||
# If it is not valid, we assume it's our default price unit.
|
||||
if energy_price_unit not in valid_units:
|
||||
energy_price_unit = default_unit
|
||||
|
||||
return energy_price, energy_price_unit
|
||||
|
||||
def _convert_energy_price(
|
||||
self, energy_price: float, energy_price_unit: str | None, energy_unit: str
|
||||
) -> float:
|
||||
"""Convert the energy price to the correct unit."""
|
||||
if energy_price_unit is None:
|
||||
return energy_price
|
||||
|
||||
converter: Callable[[float, str, str], float]
|
||||
if energy_unit in VALID_ENERGY_UNITS:
|
||||
converter = unit_conversion.EnergyConverter.convert
|
||||
else:
|
||||
converter = unit_conversion.VolumeConverter.convert
|
||||
|
||||
return converter(energy_price, energy_unit, energy_price_unit)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
energy_state = self.hass.states.get(self._config[self._adapter.stat_energy_key])
|
||||
|
@ -128,8 +128,23 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self._password = ""
|
||||
return await self._async_authenticate_or_add()
|
||||
|
||||
if error is None and entry_data.get(CONF_NOISE_PSK):
|
||||
return await self.async_step_reauth_encryption_removed_confirm()
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_encryption_removed_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauthorization flow when encryption was removed."""
|
||||
if user_input is not None:
|
||||
self._noise_psk = None
|
||||
return self._async_get_entry()
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_encryption_removed_confirm",
|
||||
description_placeholders={"name": self._name},
|
||||
)
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
|
@ -13,6 +13,7 @@ from aioesphomeapi import (
|
||||
APIConnectionError,
|
||||
APIVersion,
|
||||
DeviceInfo as EsphomeDeviceInfo,
|
||||
EncryptionHelloAPIError,
|
||||
EntityInfo,
|
||||
HomeassistantServiceCall,
|
||||
InvalidAuthAPIError,
|
||||
@ -570,6 +571,7 @@ class ESPHomeManager:
|
||||
if isinstance(
|
||||
err,
|
||||
(
|
||||
EncryptionHelloAPIError,
|
||||
RequiresEncryptionAPIError,
|
||||
InvalidEncryptionKeyAPIError,
|
||||
InvalidAuthAPIError,
|
||||
|
@ -16,7 +16,7 @@
|
||||
"loggers": ["aioesphomeapi", "noiseprotocol", "bleak_esphome"],
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"requirements": [
|
||||
"aioesphomeapi==29.7.0",
|
||||
"aioesphomeapi==29.8.0",
|
||||
"esphome-dashboard-api==1.2.3",
|
||||
"bleak-esphome==2.12.0"
|
||||
],
|
||||
|
@ -43,6 +43,9 @@
|
||||
},
|
||||
"description": "The ESPHome device {name} enabled transport encryption or changed the encryption key. Please enter the updated key. You can find it in the ESPHome Dashboard or in your device configuration."
|
||||
},
|
||||
"reauth_encryption_removed_confirm": {
|
||||
"description": "The ESPHome device {name} disabled transport encryption. Please confirm that you want to remove the encryption key and allow unencrypted connections."
|
||||
},
|
||||
"discovery_confirm": {
|
||||
"description": "Do you want to add the ESPHome node `{name}` to Home Assistant?",
|
||||
"title": "Discovered ESPHome node"
|
||||
|
@ -238,6 +238,8 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = (
|
||||
key="link_noise_margin_sent",
|
||||
translation_key="link_noise_margin_sent",
|
||||
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=_retrieve_link_noise_margin_sent_state,
|
||||
is_suitable=lambda info: info.wan_enabled and info.connection == DSL_CONNECTION,
|
||||
),
|
||||
@ -245,6 +247,8 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = (
|
||||
key="link_noise_margin_received",
|
||||
translation_key="link_noise_margin_received",
|
||||
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=_retrieve_link_noise_margin_received_state,
|
||||
is_suitable=lambda info: info.wan_enabled and info.connection == DSL_CONNECTION,
|
||||
),
|
||||
@ -252,6 +256,8 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = (
|
||||
key="link_attenuation_sent",
|
||||
translation_key="link_attenuation_sent",
|
||||
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=_retrieve_link_attenuation_sent_state,
|
||||
is_suitable=lambda info: info.wan_enabled and info.connection == DSL_CONNECTION,
|
||||
),
|
||||
@ -259,6 +265,8 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = (
|
||||
key="link_attenuation_received",
|
||||
translation_key="link_attenuation_received",
|
||||
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=_retrieve_link_attenuation_received_state,
|
||||
is_suitable=lambda info: info.wan_enabled and info.connection == DSL_CONNECTION,
|
||||
),
|
||||
|
@ -28,10 +28,10 @@
|
||||
"presets": {
|
||||
"title": "Temperature presets",
|
||||
"data": {
|
||||
"away_temp": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::away%]",
|
||||
"home_temp": "[%key:common::state::home%]",
|
||||
"away_temp": "[%key:common::state::not_home%]",
|
||||
"comfort_temp": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::comfort%]",
|
||||
"eco_temp": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::eco%]",
|
||||
"home_temp": "[%key:common::state::home%]",
|
||||
"sleep_temp": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::sleep%]",
|
||||
"activity_temp": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::activity%]"
|
||||
}
|
||||
@ -63,10 +63,10 @@
|
||||
"presets": {
|
||||
"title": "[%key:component::generic_thermostat::config::step::presets::title%]",
|
||||
"data": {
|
||||
"away_temp": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::away%]",
|
||||
"home_temp": "[%key:common::state::home%]",
|
||||
"away_temp": "[%key:common::state::not_home%]",
|
||||
"comfort_temp": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::comfort%]",
|
||||
"eco_temp": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::eco%]",
|
||||
"home_temp": "[%key:common::state::home%]",
|
||||
"sleep_temp": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::sleep%]",
|
||||
"activity_temp": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::activity%]"
|
||||
}
|
||||
|
@ -356,6 +356,15 @@ class GoogleGenerativeAIConversationEntity(
|
||||
|
||||
messages.append(_convert_content(chat_content))
|
||||
|
||||
# The SDK requires the first message to be a user message
|
||||
# This is not the case if user used `start_conversation`
|
||||
# Workaround from https://github.com/googleapis/python-genai/issues/529#issuecomment-2740964537
|
||||
if messages and messages[0].role != "user":
|
||||
messages.insert(
|
||||
0,
|
||||
Content(role="user", parts=[Part.from_text(text=" ")]),
|
||||
)
|
||||
|
||||
if tool_results:
|
||||
messages.append(_create_google_tool_response_content(tool_results))
|
||||
generateContentConfig = GenerateContentConfig(
|
||||
|
@ -55,7 +55,7 @@
|
||||
"preset_mode": {
|
||||
"state": {
|
||||
"hold": "Hold",
|
||||
"away": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::away%]",
|
||||
"away": "[%key:common::state::not_home%]",
|
||||
"none": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::none%]"
|
||||
}
|
||||
}
|
||||
|
@ -63,14 +63,14 @@
|
||||
"name": "Mode",
|
||||
"state": {
|
||||
"normal": "Normal",
|
||||
"eco": "Eco",
|
||||
"away": "Away",
|
||||
"home": "[%key:common::state::home%]",
|
||||
"away": "[%key:common::state::not_home%]",
|
||||
"auto": "Auto",
|
||||
"baby": "Baby",
|
||||
"boost": "Boost",
|
||||
"comfort": "Comfort",
|
||||
"home": "[%key:common::state::home%]",
|
||||
"sleep": "Sleep",
|
||||
"auto": "Auto",
|
||||
"baby": "Baby"
|
||||
"eco": "Eco",
|
||||
"sleep": "Sleep"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -87,11 +87,11 @@
|
||||
"state": {
|
||||
"available": "Available",
|
||||
"charging": "[%key:common::state::charging%]",
|
||||
"connected": "Connected",
|
||||
"connected": "[%key:common::state::connected%]",
|
||||
"error": "Error",
|
||||
"locked": "Locked",
|
||||
"locked": "[%key:common::state::locked%]",
|
||||
"need_auth": "Waiting for authentication",
|
||||
"paused": "Paused",
|
||||
"paused": "[%key:common::state::paused%]",
|
||||
"paused_by_scheduler": "Paused by scheduler",
|
||||
"updating_firmware": "Updating firmware"
|
||||
}
|
||||
|
@ -3,12 +3,15 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import cast
|
||||
|
||||
from homeassistant.components.application_credentials import AuthorizationServer
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import llm
|
||||
from homeassistant.helpers import config_entry_oauth2_flow, llm
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import ModelContextProtocolCoordinator
|
||||
from .application_credentials import authorization_server_context
|
||||
from .const import CONF_ACCESS_TOKEN, CONF_AUTHORIZATION_URL, CONF_TOKEN_URL, DOMAIN
|
||||
from .coordinator import ModelContextProtocolCoordinator, TokenManager
|
||||
from .types import ModelContextProtocolConfigEntry
|
||||
|
||||
__all__ = [
|
||||
@ -20,11 +23,45 @@ __all__ = [
|
||||
API_PROMPT = "The following tools are available from a remote server named {name}."
|
||||
|
||||
|
||||
async def async_get_config_entry_implementation(
|
||||
hass: HomeAssistant, entry: ModelContextProtocolConfigEntry
|
||||
) -> config_entry_oauth2_flow.AbstractOAuth2Implementation | None:
|
||||
"""OAuth implementation for the config entry."""
|
||||
if "auth_implementation" not in entry.data:
|
||||
return None
|
||||
with authorization_server_context(
|
||||
AuthorizationServer(
|
||||
authorize_url=entry.data[CONF_AUTHORIZATION_URL],
|
||||
token_url=entry.data[CONF_TOKEN_URL],
|
||||
)
|
||||
):
|
||||
return await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
hass, entry
|
||||
)
|
||||
|
||||
|
||||
async def _create_token_manager(
|
||||
hass: HomeAssistant, entry: ModelContextProtocolConfigEntry
|
||||
) -> TokenManager | None:
|
||||
"""Create a OAuth token manager for the config entry if the server requires authentication."""
|
||||
if not (implementation := await async_get_config_entry_implementation(hass, entry)):
|
||||
return None
|
||||
|
||||
session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
|
||||
|
||||
async def token_manager() -> str:
|
||||
await session.async_ensure_token_valid()
|
||||
return cast(str, session.token[CONF_ACCESS_TOKEN])
|
||||
|
||||
return token_manager
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: ModelContextProtocolConfigEntry
|
||||
) -> bool:
|
||||
"""Set up Model Context Protocol from a config entry."""
|
||||
coordinator = ModelContextProtocolCoordinator(hass, entry)
|
||||
token_manager = await _create_token_manager(hass, entry)
|
||||
coordinator = ModelContextProtocolCoordinator(hass, entry, token_manager)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
unsub = llm.async_register_api(
|
||||
|
35
homeassistant/components/mcp/application_credentials.py
Normal file
35
homeassistant/components/mcp/application_credentials.py
Normal file
@ -0,0 +1,35 @@
|
||||
"""Application credentials platform for Model Context Protocol."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Generator
|
||||
from contextlib import contextmanager
|
||||
import contextvars
|
||||
|
||||
from homeassistant.components.application_credentials import AuthorizationServer
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
CONF_ACTIVE_AUTHORIZATION_SERVER = "active_authorization_server"
|
||||
|
||||
_mcp_context: contextvars.ContextVar[AuthorizationServer] = contextvars.ContextVar(
|
||||
"mcp_authorization_server_context"
|
||||
)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def authorization_server_context(
|
||||
authorization_server: AuthorizationServer,
|
||||
) -> Generator[None]:
|
||||
"""Context manager for setting the active authorization server."""
|
||||
token = _mcp_context.set(authorization_server)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
_mcp_context.reset(token)
|
||||
|
||||
|
||||
async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer:
|
||||
"""Return authorization server, for the default auth implementation."""
|
||||
if _mcp_context.get() is None:
|
||||
raise RuntimeError("No MCP authorization server set in context")
|
||||
return _mcp_context.get()
|
@ -2,20 +2,29 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
from typing import Any, cast
|
||||
|
||||
import httpx
|
||||
import voluptuous as vol
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_URL
|
||||
from homeassistant.components.application_credentials import AuthorizationServer
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult
|
||||
from homeassistant.const import CONF_TOKEN, CONF_URL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import (
|
||||
AbstractOAuth2FlowHandler,
|
||||
async_get_implementations,
|
||||
)
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import mcp_client
|
||||
from . import async_get_config_entry_implementation
|
||||
from .application_credentials import authorization_server_context
|
||||
from .const import CONF_ACCESS_TOKEN, CONF_AUTHORIZATION_URL, CONF_TOKEN_URL, DOMAIN
|
||||
from .coordinator import TokenManager, mcp_client
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@ -25,8 +34,62 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
# OAuth server discovery endpoint for rfc8414
|
||||
OAUTH_DISCOVERY_ENDPOINT = ".well-known/oauth-authorization-server"
|
||||
MCP_DISCOVERY_HEADERS = {
|
||||
"MCP-Protocol-Version": "2025-03-26",
|
||||
}
|
||||
|
||||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, Any]:
|
||||
|
||||
async def async_discover_oauth_config(
|
||||
hass: HomeAssistant, mcp_server_url: str
|
||||
) -> AuthorizationServer:
|
||||
"""Discover the OAuth configuration for the MCP server.
|
||||
|
||||
This implements the functionality in the MCP spec for discovery. If the MCP server URL
|
||||
is https://api.example.com/v1/mcp, then:
|
||||
- The authorization base URL is https://api.example.com
|
||||
- The metadata endpoint MUST be at https://api.example.com/.well-known/oauth-authorization-server
|
||||
- For servers that do not implement OAuth 2.0 Authorization Server Metadata, the client uses
|
||||
default paths relative to the authorization base URL.
|
||||
"""
|
||||
parsed_url = URL(mcp_server_url)
|
||||
discovery_endpoint = str(parsed_url.with_path(OAUTH_DISCOVERY_ENDPOINT))
|
||||
try:
|
||||
async with httpx.AsyncClient(headers=MCP_DISCOVERY_HEADERS) as client:
|
||||
response = await client.get(discovery_endpoint)
|
||||
response.raise_for_status()
|
||||
except httpx.TimeoutException as error:
|
||||
_LOGGER.info("Timeout connecting to MCP server: %s", error)
|
||||
raise TimeoutConnectError from error
|
||||
except httpx.HTTPStatusError as error:
|
||||
if error.response.status_code == 404:
|
||||
_LOGGER.info("Authorization Server Metadata not found, using default paths")
|
||||
return AuthorizationServer(
|
||||
authorize_url=str(parsed_url.with_path("/authorize")),
|
||||
token_url=str(parsed_url.with_path("/token")),
|
||||
)
|
||||
raise CannotConnect from error
|
||||
except httpx.HTTPError as error:
|
||||
_LOGGER.info("Cannot discover OAuth configuration: %s", error)
|
||||
raise CannotConnect from error
|
||||
|
||||
data = response.json()
|
||||
authorize_url = data["authorization_endpoint"]
|
||||
token_url = data["token_endpoint"]
|
||||
if authorize_url.startswith("/"):
|
||||
authorize_url = str(parsed_url.with_path(authorize_url))
|
||||
if token_url.startswith("/"):
|
||||
token_url = str(parsed_url.with_path(token_url))
|
||||
return AuthorizationServer(
|
||||
authorize_url=authorize_url,
|
||||
token_url=token_url,
|
||||
)
|
||||
|
||||
|
||||
async def validate_input(
|
||||
hass: HomeAssistant, data: dict[str, Any], token_manager: TokenManager | None = None
|
||||
) -> dict[str, Any]:
|
||||
"""Validate the user input and connect to the MCP server."""
|
||||
url = data[CONF_URL]
|
||||
try:
|
||||
@ -34,7 +97,7 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str,
|
||||
except vol.Invalid as error:
|
||||
raise InvalidUrl from error
|
||||
try:
|
||||
async with mcp_client(url) as session:
|
||||
async with mcp_client(url, token_manager=token_manager) as session:
|
||||
response = await session.initialize()
|
||||
except httpx.TimeoutException as error:
|
||||
_LOGGER.info("Timeout connecting to MCP server: %s", error)
|
||||
@ -56,10 +119,17 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str,
|
||||
return {"title": response.serverInfo.name}
|
||||
|
||||
|
||||
class ModelContextProtocolConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
class ModelContextProtocolConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
|
||||
"""Handle a config flow for Model Context Protocol."""
|
||||
|
||||
VERSION = 1
|
||||
DOMAIN = DOMAIN
|
||||
logger = _LOGGER
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
super().__init__()
|
||||
self.data: dict[str, Any] = {}
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@ -76,7 +146,8 @@ class ModelContextProtocolConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
return self.async_abort(reason="invalid_auth")
|
||||
self.data[CONF_URL] = user_input[CONF_URL]
|
||||
return await self.async_step_auth_discovery()
|
||||
except MissingCapabilities:
|
||||
return self.async_abort(reason="missing_capabilities")
|
||||
except Exception:
|
||||
@ -90,6 +161,130 @@ class ModelContextProtocolConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_auth_discovery(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the OAuth server discovery step.
|
||||
|
||||
Since this OAuth server requires authentication, this step will attempt
|
||||
to find the OAuth medata then run the OAuth authentication flow.
|
||||
"""
|
||||
try:
|
||||
authorization_server = await async_discover_oauth_config(
|
||||
self.hass, self.data[CONF_URL]
|
||||
)
|
||||
except TimeoutConnectError:
|
||||
return self.async_abort(reason="timeout_connect")
|
||||
except CannotConnect:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
return self.async_abort(reason="unknown")
|
||||
else:
|
||||
_LOGGER.info("OAuth configuration: %s", authorization_server)
|
||||
self.data.update(
|
||||
{
|
||||
CONF_AUTHORIZATION_URL: authorization_server.authorize_url,
|
||||
CONF_TOKEN_URL: authorization_server.token_url,
|
||||
}
|
||||
)
|
||||
return await self.async_step_credentials_choice()
|
||||
|
||||
def authorization_server(self) -> AuthorizationServer:
|
||||
"""Return the authorization server provided by the MCP server."""
|
||||
return AuthorizationServer(
|
||||
self.data[CONF_AUTHORIZATION_URL],
|
||||
self.data[CONF_TOKEN_URL],
|
||||
)
|
||||
|
||||
async def async_step_credentials_choice(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Step to ask they user if they would like to add credentials.
|
||||
|
||||
This is needed since we can't automatically assume existing credentials
|
||||
should be used given they may be for another existing server.
|
||||
"""
|
||||
with authorization_server_context(self.authorization_server()):
|
||||
if not await async_get_implementations(self.hass, self.DOMAIN):
|
||||
return await self.async_step_new_credentials()
|
||||
return self.async_show_menu(
|
||||
step_id="credentials_choice",
|
||||
menu_options=["pick_implementation", "new_credentials"],
|
||||
)
|
||||
|
||||
async def async_step_new_credentials(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Step to take the frontend flow to enter new credentials."""
|
||||
return self.async_abort(reason="missing_credentials")
|
||||
|
||||
async def async_step_pick_implementation(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the pick implementation step.
|
||||
|
||||
This exists to dynamically set application credentials Authorization Server
|
||||
based on the values form the OAuth discovery step.
|
||||
"""
|
||||
with authorization_server_context(self.authorization_server()):
|
||||
return await super().async_step_pick_implementation(user_input)
|
||||
|
||||
async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult:
|
||||
"""Create an entry for the flow.
|
||||
|
||||
Ok to override if you want to fetch extra info or even add another step.
|
||||
"""
|
||||
config_entry_data = {
|
||||
**self.data,
|
||||
**data,
|
||||
}
|
||||
|
||||
async def token_manager() -> str:
|
||||
return cast(str, data[CONF_TOKEN][CONF_ACCESS_TOKEN])
|
||||
|
||||
try:
|
||||
info = await validate_input(self.hass, config_entry_data, token_manager)
|
||||
except TimeoutConnectError:
|
||||
return self.async_abort(reason="timeout_connect")
|
||||
except CannotConnect:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except MissingCapabilities:
|
||||
return self.async_abort(reason="missing_capabilities")
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
return self.async_abort(reason="unknown")
|
||||
|
||||
# Unique id based on the application credentials OAuth Client ID
|
||||
if self.source == SOURCE_REAUTH:
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(), data=config_entry_data
|
||||
)
|
||||
await self.async_set_unique_id(config_entry_data["auth_implementation"])
|
||||
return self.async_create_entry(
|
||||
title=info["title"],
|
||||
data=config_entry_data,
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauth upon an API authentication error."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: Mapping[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm reauth dialog."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(step_id="reauth_confirm")
|
||||
config_entry = self._get_reauth_entry()
|
||||
self.data = {**config_entry.data}
|
||||
self.flow_impl = await async_get_config_entry_implementation( # type: ignore[assignment]
|
||||
self.hass, config_entry
|
||||
)
|
||||
return await self.async_step_auth()
|
||||
|
||||
|
||||
class InvalidUrl(HomeAssistantError):
|
||||
"""Error to indicate the URL format is invalid."""
|
||||
|
@ -1,3 +1,7 @@
|
||||
"""Constants for the Model Context Protocol integration."""
|
||||
|
||||
DOMAIN = "mcp"
|
||||
|
||||
CONF_ACCESS_TOKEN = "access_token"
|
||||
CONF_AUTHORIZATION_URL = "authorization_url"
|
||||
CONF_TOKEN_URL = "token_url"
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""Types for the Model Context Protocol integration."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import AsyncGenerator
|
||||
from collections.abc import AsyncGenerator, Awaitable, Callable
|
||||
from contextlib import asynccontextmanager
|
||||
import datetime
|
||||
import logging
|
||||
@ -15,7 +15,7 @@ from voluptuous_openapi import convert_to_voluptuous
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_URL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, HomeAssistantError
|
||||
from homeassistant.helpers import llm
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util.json import JsonObjectType
|
||||
@ -27,16 +27,28 @@ _LOGGER = logging.getLogger(__name__)
|
||||
UPDATE_INTERVAL = datetime.timedelta(minutes=30)
|
||||
TIMEOUT = 10
|
||||
|
||||
TokenManager = Callable[[], Awaitable[str]]
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def mcp_client(url: str) -> AsyncGenerator[ClientSession]:
|
||||
async def mcp_client(
|
||||
url: str,
|
||||
token_manager: TokenManager | None = None,
|
||||
) -> AsyncGenerator[ClientSession]:
|
||||
"""Create a server-sent event MCP client.
|
||||
|
||||
This is an asynccontext manager that exists to wrap other async context managers
|
||||
so that the coordinator has a single object to manage.
|
||||
"""
|
||||
headers: dict[str, str] = {}
|
||||
if token_manager is not None:
|
||||
token = await token_manager()
|
||||
headers["Authorization"] = f"Bearer {token}"
|
||||
try:
|
||||
async with sse_client(url=url) as streams, ClientSession(*streams) as session:
|
||||
async with (
|
||||
sse_client(url=url, headers=headers) as streams,
|
||||
ClientSession(*streams) as session,
|
||||
):
|
||||
await session.initialize()
|
||||
yield session
|
||||
except ExceptionGroup as err:
|
||||
@ -53,12 +65,14 @@ class ModelContextProtocolTool(llm.Tool):
|
||||
description: str | None,
|
||||
parameters: vol.Schema,
|
||||
server_url: str,
|
||||
token_manager: TokenManager | None = None,
|
||||
) -> None:
|
||||
"""Initialize the tool."""
|
||||
self.name = name
|
||||
self.description = description
|
||||
self.parameters = parameters
|
||||
self.server_url = server_url
|
||||
self.token_manager = token_manager
|
||||
|
||||
async def async_call(
|
||||
self,
|
||||
@ -69,7 +83,7 @@ class ModelContextProtocolTool(llm.Tool):
|
||||
"""Call the tool."""
|
||||
try:
|
||||
async with asyncio.timeout(TIMEOUT):
|
||||
async with mcp_client(self.server_url) as session:
|
||||
async with mcp_client(self.server_url, self.token_manager) as session:
|
||||
result = await session.call_tool(
|
||||
tool_input.tool_name, tool_input.tool_args
|
||||
)
|
||||
@ -87,7 +101,12 @@ class ModelContextProtocolCoordinator(DataUpdateCoordinator[list[llm.Tool]]):
|
||||
|
||||
config_entry: ConfigEntry
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
token_manager: TokenManager | None = None,
|
||||
) -> None:
|
||||
"""Initialize ModelContextProtocolCoordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
@ -96,6 +115,7 @@ class ModelContextProtocolCoordinator(DataUpdateCoordinator[list[llm.Tool]]):
|
||||
config_entry=config_entry,
|
||||
update_interval=UPDATE_INTERVAL,
|
||||
)
|
||||
self.token_manager = token_manager
|
||||
|
||||
async def _async_update_data(self) -> list[llm.Tool]:
|
||||
"""Fetch data from API endpoint.
|
||||
@ -105,11 +125,20 @@ class ModelContextProtocolCoordinator(DataUpdateCoordinator[list[llm.Tool]]):
|
||||
"""
|
||||
try:
|
||||
async with asyncio.timeout(TIMEOUT):
|
||||
async with mcp_client(self.config_entry.data[CONF_URL]) as session:
|
||||
async with mcp_client(
|
||||
self.config_entry.data[CONF_URL], self.token_manager
|
||||
) as session:
|
||||
result = await session.list_tools()
|
||||
except TimeoutError as error:
|
||||
_LOGGER.debug("Timeout when listing tools: %s", error)
|
||||
raise UpdateFailed(f"Timeout when listing tools: {error}") from error
|
||||
except httpx.HTTPStatusError as error:
|
||||
_LOGGER.debug("Error communicating with API: %s", error)
|
||||
if error.response.status_code == 401 and self.token_manager is not None:
|
||||
raise ConfigEntryAuthFailed(
|
||||
"The MCP server requires authentication"
|
||||
) from error
|
||||
raise UpdateFailed(f"Error communicating with API: {error}") from error
|
||||
except httpx.HTTPError as err:
|
||||
_LOGGER.debug("Error communicating with API: %s", err)
|
||||
raise UpdateFailed(f"Error communicating with API: {err}") from err
|
||||
@ -129,6 +158,7 @@ class ModelContextProtocolCoordinator(DataUpdateCoordinator[list[llm.Tool]]):
|
||||
tool.description,
|
||||
parameters,
|
||||
self.config_entry.data[CONF_URL],
|
||||
self.token_manager,
|
||||
)
|
||||
)
|
||||
return tools
|
||||
|
@ -3,6 +3,7 @@
|
||||
"name": "Model Context Protocol",
|
||||
"codeowners": ["@allenporter"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["application_credentials"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/mcp",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "silver",
|
||||
|
@ -44,9 +44,7 @@ rules:
|
||||
parallel-updates:
|
||||
status: exempt
|
||||
comment: Integration does not have platforms.
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: Integration does not support authentication.
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
|
@ -8,6 +8,15 @@
|
||||
"data_description": {
|
||||
"url": "The remote MCP server URL for the SSE endpoint, for example http://example/sse"
|
||||
}
|
||||
},
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "Credentials"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "The credentials to use for the OAuth2 flow"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
@ -17,9 +26,15 @@
|
||||
"invalid_url": "Must be a valid MCP server URL e.g. https://example.com/sse"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"missing_capabilities": "The MCP server does not support a required capability (Tools)",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
"missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]",
|
||||
"reauth_account_mismatch": "The authenticated user does not match the MCP Server user that needed re-authentication.",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"timeout_connect": "[%key:common::config_flow::error::timeout_connect%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -72,8 +72,8 @@
|
||||
"connection": {
|
||||
"name": "Connection status",
|
||||
"state": {
|
||||
"connected": "Connected",
|
||||
"disconnected": "Disconnected",
|
||||
"connected": "[%key:common::state::connected%]",
|
||||
"disconnected": "[%key:common::state::disconnected%]",
|
||||
"connecting": "Connecting",
|
||||
"disconnecting": "Disconnecting"
|
||||
}
|
||||
|
@ -46,7 +46,7 @@
|
||||
"global_override": {
|
||||
"name": "Global override",
|
||||
"state": {
|
||||
"away": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::away%]",
|
||||
"away": "[%key:common::state::not_home%]",
|
||||
"comfort": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::comfort%]",
|
||||
"eco": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::eco%]",
|
||||
"none": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::none%]"
|
||||
|
@ -23,14 +23,10 @@ from homeassistant.const import (
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, format_mac
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import (
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DOMAIN,
|
||||
INTEGRATION_SUPPORTED_COMMANDS,
|
||||
PLATFORMS,
|
||||
)
|
||||
from .const import DOMAIN, INTEGRATION_SUPPORTED_COMMANDS, PLATFORMS
|
||||
|
||||
NUT_FAKE_SERIAL = ["unknown", "blank"]
|
||||
|
||||
@ -68,7 +64,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: NutConfigEntry) -> bool:
|
||||
alias = config.get(CONF_ALIAS)
|
||||
username = config.get(CONF_USERNAME)
|
||||
password = config.get(CONF_PASSWORD)
|
||||
scan_interval = entry.options.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL)
|
||||
if CONF_SCAN_INTERVAL in entry.options:
|
||||
current_options = {**entry.options}
|
||||
current_options.pop(CONF_SCAN_INTERVAL)
|
||||
hass.config_entries.async_update_entry(entry, options=current_options)
|
||||
|
||||
data = PyNUTData(host, port, alias, username, password)
|
||||
|
||||
@ -101,7 +100,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: NutConfigEntry) -> bool:
|
||||
config_entry=entry,
|
||||
name="NUT resource status",
|
||||
update_method=async_update_data,
|
||||
update_interval=timedelta(seconds=scan_interval),
|
||||
update_interval=timedelta(seconds=60),
|
||||
always_update=False,
|
||||
)
|
||||
|
||||
@ -122,6 +121,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: NutConfigEntry) -> bool:
|
||||
if unique_id is None:
|
||||
unique_id = entry.entry_id
|
||||
|
||||
elif entry.unique_id is None:
|
||||
hass.config_entries.async_update_entry(entry, unique_id=unique_id)
|
||||
|
||||
if username is not None and password is not None:
|
||||
# Dynamically add outlet integration commands
|
||||
additional_integration_commands = set()
|
||||
@ -155,10 +157,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: NutConfigEntry) -> bool:
|
||||
coordinator, data, unique_id, user_available_commands
|
||||
)
|
||||
|
||||
connections: set[tuple[str, str]] | None = None
|
||||
if data.device_info.mac_address is not None:
|
||||
connections = {(CONNECTION_NETWORK_MAC, data.device_info.mac_address)}
|
||||
|
||||
device_registry = dr.async_get(hass)
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
identifiers={(DOMAIN, unique_id)},
|
||||
connections=connections,
|
||||
name=data.name.title(),
|
||||
manufacturer=data.device_info.manufacturer,
|
||||
model=data.device_info.model,
|
||||
@ -246,6 +253,7 @@ class NUTDeviceInfo:
|
||||
model_id: str | None = None
|
||||
firmware: str | None = None
|
||||
serial: str | None = None
|
||||
mac_address: str | None = None
|
||||
device_location: str | None = None
|
||||
|
||||
|
||||
@ -309,9 +317,18 @@ class PyNUTData:
|
||||
model_id: str | None = self._status.get("device.part")
|
||||
firmware = _firmware_from_status(self._status)
|
||||
serial = _serial_from_status(self._status)
|
||||
mac_address: str | None = self._status.get("device.macaddr")
|
||||
if mac_address is not None:
|
||||
mac_address = format_mac(mac_address.rstrip().replace(" ", ":"))
|
||||
device_location: str | None = self._status.get("device.location")
|
||||
return NUTDeviceInfo(
|
||||
manufacturer, model, model_id, firmware, serial, device_location
|
||||
manufacturer,
|
||||
model,
|
||||
model_id,
|
||||
firmware,
|
||||
serial,
|
||||
mac_address,
|
||||
device_location,
|
||||
)
|
||||
|
||||
async def _async_get_status(self) -> dict[str, str]:
|
||||
|
@ -9,27 +9,21 @@ from typing import Any
|
||||
from aionut import NUTError, NUTLoginError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import (
|
||||
CONF_ALIAS,
|
||||
CONF_BASE,
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
CONF_SCAN_INTERVAL,
|
||||
CONF_USERNAME,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import AbortFlow
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from . import PyNUTData
|
||||
from .const import DEFAULT_HOST, DEFAULT_PORT, DEFAULT_SCAN_INTERVAL, DOMAIN
|
||||
from . import PyNUTData, _unique_id_from_status
|
||||
from .const import DEFAULT_HOST, DEFAULT_PORT, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@ -125,6 +119,11 @@ class NutConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
if self._host_port_alias_already_configured(nut_config):
|
||||
return self.async_abort(reason="already_configured")
|
||||
|
||||
if unique_id := _unique_id_from_status(info["available_resources"]):
|
||||
await self.async_set_unique_id(unique_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
title = _format_host_port_alias(nut_config)
|
||||
return self.async_create_entry(title=title, data=nut_config)
|
||||
|
||||
@ -147,8 +146,13 @@ class NutConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self.nut_config.update(user_input)
|
||||
if self._host_port_alias_already_configured(nut_config):
|
||||
return self.async_abort(reason="already_configured")
|
||||
_, errors, placeholders = await self._async_validate_or_error(nut_config)
|
||||
|
||||
info, errors, placeholders = await self._async_validate_or_error(nut_config)
|
||||
if not errors:
|
||||
if unique_id := _unique_id_from_status(info["available_resources"]):
|
||||
await self.async_set_unique_id(unique_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
title = _format_host_port_alias(nut_config)
|
||||
return self.async_create_entry(title=title, data=nut_config)
|
||||
|
||||
@ -230,32 +234,3 @@ class NutConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data_schema=vol.Schema(AUTH_SCHEMA),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow:
|
||||
"""Get the options flow for this handler."""
|
||||
return OptionsFlowHandler()
|
||||
|
||||
|
||||
class OptionsFlowHandler(OptionsFlow):
|
||||
"""Handle a option flow for nut."""
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle options flow."""
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(title="", data=user_input)
|
||||
|
||||
scan_interval = self.config_entry.options.get(
|
||||
CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL
|
||||
)
|
||||
|
||||
base_schema = {
|
||||
vol.Optional(CONF_SCAN_INTERVAL, default=scan_interval): vol.All(
|
||||
vol.Coerce(int), vol.Clamp(min=10, max=300)
|
||||
)
|
||||
}
|
||||
|
||||
return self.async_show_form(step_id="init", data_schema=vol.Schema(base_schema))
|
||||
|
@ -19,8 +19,6 @@ DEFAULT_PORT = 3493
|
||||
KEY_STATUS = "ups.status"
|
||||
KEY_STATUS_DISPLAY = "ups.status.display"
|
||||
|
||||
DEFAULT_SCAN_INTERVAL = 60
|
||||
|
||||
STATE_TYPES = {
|
||||
"OL": "Online",
|
||||
"OB": "On Battery",
|
||||
|
@ -38,15 +38,6 @@
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"scan_interval": "Scan Interval (seconds)"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"device_automation": {
|
||||
"action_type": {
|
||||
"beeper_disable": "Disable UPS beeper/buzzer",
|
||||
|
@ -2,8 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from ohme import ApiException, ChargerStatus, OhmeApiClient
|
||||
|
||||
@ -23,7 +24,7 @@ PARALLEL_UPDATES = 1
|
||||
class OhmeButtonDescription(OhmeEntityDescription, ButtonEntityDescription):
|
||||
"""Class describing Ohme button entities."""
|
||||
|
||||
press_fn: Callable[[OhmeApiClient], Awaitable[None]]
|
||||
press_fn: Callable[[OhmeApiClient], Coroutine[Any, Any, bool]]
|
||||
|
||||
|
||||
BUTTON_DESCRIPTIONS = [
|
||||
|
@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["ohme==1.4.1"]
|
||||
"requirements": ["ohme==1.5.1"]
|
||||
}
|
||||
|
@ -1,7 +1,8 @@
|
||||
"""Platform for number."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from ohme import ApiException, OhmeApiClient
|
||||
|
||||
@ -22,7 +23,7 @@ PARALLEL_UPDATES = 1
|
||||
class OhmeNumberDescription(OhmeEntityDescription, NumberEntityDescription):
|
||||
"""Class describing Ohme number entities."""
|
||||
|
||||
set_fn: Callable[[OhmeApiClient, float], Awaitable[None]]
|
||||
set_fn: Callable[[OhmeApiClient, float], Coroutine[Any, Any, bool]]
|
||||
value_fn: Callable[[OhmeApiClient], float]
|
||||
|
||||
|
||||
@ -31,7 +32,7 @@ NUMBER_DESCRIPTION = [
|
||||
key="target_percentage",
|
||||
translation_key="target_percentage",
|
||||
value_fn=lambda client: client.target_soc,
|
||||
set_fn=lambda client, value: client.async_set_target(target_percent=value),
|
||||
set_fn=lambda client, value: client.async_set_target(target_percent=int(value)),
|
||||
native_min_value=0,
|
||||
native_max_value=100,
|
||||
native_step=1,
|
||||
@ -42,7 +43,7 @@ NUMBER_DESCRIPTION = [
|
||||
translation_key="preconditioning_duration",
|
||||
value_fn=lambda client: client.preconditioning,
|
||||
set_fn=lambda client, value: client.async_set_target(
|
||||
pre_condition_length=value
|
||||
pre_condition_length=int(value)
|
||||
),
|
||||
native_min_value=0,
|
||||
native_max_value=60,
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Final
|
||||
|
||||
@ -24,7 +24,7 @@ PARALLEL_UPDATES = 1
|
||||
class OhmeSelectDescription(OhmeEntityDescription, SelectEntityDescription):
|
||||
"""Class to describe an Ohme select entity."""
|
||||
|
||||
select_fn: Callable[[OhmeApiClient, Any], Awaitable[None]]
|
||||
select_fn: Callable[[OhmeApiClient, Any], Coroutine[Any, Any, bool | None]]
|
||||
options: list[str] | None = None
|
||||
options_fn: Callable[[OhmeApiClient], list[str]] | None = None
|
||||
current_option_fn: Callable[[OhmeApiClient], str | None]
|
||||
|
@ -34,7 +34,7 @@ PARALLEL_UPDATES = 0
|
||||
class OhmeSensorDescription(OhmeEntityDescription, SensorEntityDescription):
|
||||
"""Class describing Ohme sensor entities."""
|
||||
|
||||
value_fn: Callable[[OhmeApiClient], str | int | float]
|
||||
value_fn: Callable[[OhmeApiClient], str | int | float | None]
|
||||
|
||||
|
||||
SENSOR_CHARGE_SESSION = [
|
||||
@ -130,6 +130,6 @@ class OhmeSensor(OhmeEntity, SensorEntity):
|
||||
entity_description: OhmeSensorDescription
|
||||
|
||||
@property
|
||||
def native_value(self) -> str | int | float:
|
||||
def native_value(self) -> str | int | float | None:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(self.coordinator.client)
|
||||
|
@ -78,7 +78,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""List of charge slots."""
|
||||
client = __get_client(service_call)
|
||||
|
||||
return {"slots": client.slots}
|
||||
return {"slots": [slot.to_dict() for slot in client.slots]}
|
||||
|
||||
async def set_price_cap(
|
||||
service_call: ServiceCall,
|
||||
|
@ -89,7 +89,7 @@
|
||||
"state": {
|
||||
"smart_charge": "Smart charge",
|
||||
"max_charge": "Max charge",
|
||||
"paused": "Paused"
|
||||
"paused": "[%key:common::state::paused%]"
|
||||
}
|
||||
},
|
||||
"vehicle": {
|
||||
@ -100,8 +100,8 @@
|
||||
"status": {
|
||||
"name": "Status",
|
||||
"state": {
|
||||
"unplugged": "Unplugged",
|
||||
"plugged_in": "Plugged in",
|
||||
"unplugged": "[%key:component::binary_sensor::entity_component::plug::state::off%]",
|
||||
"plugged_in": "[%key:component::binary_sensor::entity_component::plug::state::on%]",
|
||||
"charging": "[%key:common::state::charging%]",
|
||||
"paused": "[%key:common::state::paused%]",
|
||||
"pending_approval": "Pending approval",
|
||||
|
@ -1,8 +1,9 @@
|
||||
"""Platform for time."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from datetime import time
|
||||
from typing import Any
|
||||
|
||||
from ohme import ApiException, OhmeApiClient
|
||||
|
||||
@ -22,7 +23,7 @@ PARALLEL_UPDATES = 1
|
||||
class OhmeTimeDescription(OhmeEntityDescription, TimeEntityDescription):
|
||||
"""Class describing Ohme time entities."""
|
||||
|
||||
set_fn: Callable[[OhmeApiClient, time], Awaitable[None]]
|
||||
set_fn: Callable[[OhmeApiClient, time], Coroutine[Any, Any, bool]]
|
||||
value_fn: Callable[[OhmeApiClient], time]
|
||||
|
||||
|
||||
|
@ -172,8 +172,8 @@
|
||||
"vcc": "Vcc (5V)",
|
||||
"led_e": "LED E",
|
||||
"led_f": "LED F",
|
||||
"home": "Home",
|
||||
"away": "Away",
|
||||
"home": "[%key:common::state::home%]",
|
||||
"away": "[%key:common::state::not_home%]",
|
||||
"ds1820": "DS1820",
|
||||
"dhw_block": "Block hot water"
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
from pypglab.mqtt import (
|
||||
Client as PyPGLabMqttClient,
|
||||
Sub_State as PyPGLabSubState,
|
||||
Subcribe_CallBack as PyPGLabSubscribeCallBack,
|
||||
Subscribe_CallBack as PyPGLabSubscribeCallBack,
|
||||
)
|
||||
|
||||
from homeassistant.components import mqtt
|
||||
|
@ -7,7 +7,7 @@ from typing import TYPE_CHECKING, Any
|
||||
|
||||
from pypglab.const import SENSOR_REBOOT_TIME, SENSOR_TEMPERATURE, SENSOR_VOLTAGE
|
||||
from pypglab.device import Device as PyPGLabDevice
|
||||
from pypglab.sensor import Sensor as PyPGLabSensors
|
||||
from pypglab.sensor import StatusSensor as PyPGLabSensors
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
@ -31,7 +31,7 @@ class PGLabSensorsCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
"""Initialize."""
|
||||
|
||||
# get a reference of PG Lab device internal sensors state
|
||||
self._sensors: PyPGLabSensors = pglab_device.sensors
|
||||
self._sensors: PyPGLabSensors = pglab_device.status_sensor
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
|
@ -220,7 +220,7 @@ class PGLabDiscovery:
|
||||
configuration_url=f"http://{pglab_device.ip}/",
|
||||
connections={(CONNECTION_NETWORK_MAC, pglab_device.mac)},
|
||||
identifiers={(DOMAIN, pglab_device.id)},
|
||||
manufacturer=pglab_device.manufactor,
|
||||
manufacturer=pglab_device.manufacturer,
|
||||
model=pglab_device.type,
|
||||
name=pglab_device.name,
|
||||
sw_version=pglab_device.firmware_version,
|
||||
|
@ -37,7 +37,7 @@ class PGLabBaseEntity(Entity):
|
||||
sw_version=pglab_device.firmware_version,
|
||||
hw_version=pglab_device.hardware_version,
|
||||
model=pglab_device.type,
|
||||
manufacturer=pglab_device.manufactor,
|
||||
manufacturer=pglab_device.manufacturer,
|
||||
configuration_url=f"http://{pglab_device.ip}/",
|
||||
connections={(CONNECTION_NETWORK_MAC, pglab_device.mac)},
|
||||
)
|
||||
|
@ -9,6 +9,6 @@
|
||||
"loggers": ["pglab"],
|
||||
"mqtt": ["pglab/discovery/#"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pypglab==0.0.3"],
|
||||
"requirements": ["pypglab==0.0.5"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@ -14,7 +14,6 @@ from plexauth import PlexAuth
|
||||
import requests.exceptions
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import http
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView
|
||||
from homeassistant.components.media_player import DOMAIN as MP_DOMAIN
|
||||
from homeassistant.config_entries import (
|
||||
@ -36,7 +35,7 @@ from homeassistant.const import (
|
||||
CONF_VERIFY_SSL,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, discovery_flow
|
||||
from homeassistant.helpers import config_validation as cv, discovery_flow, http
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import (
|
||||
|
@ -85,7 +85,7 @@
|
||||
"preset_mode": {
|
||||
"state": {
|
||||
"asleep": "Night",
|
||||
"away": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::away%]",
|
||||
"away": "[%key:common::state::not_home%]",
|
||||
"home": "[%key:common::state::home%]",
|
||||
"no_frost": "Anti-frost",
|
||||
"vacation": "Vacation"
|
||||
|
@ -7,7 +7,7 @@ from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import PterodactylConfigEntry, PterodactylCoordinator
|
||||
|
||||
_PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR]
|
||||
_PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: PterodactylConfigEntry) -> bool:
|
||||
|
@ -1,6 +1,7 @@
|
||||
"""API module of the Pterodactyl integration."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from enum import StrEnum
|
||||
import logging
|
||||
|
||||
from pydactyl import PterodactylClient
|
||||
@ -32,14 +33,26 @@ class PterodactylData:
|
||||
uuid: str
|
||||
identifier: str
|
||||
state: str
|
||||
memory_utilization: int
|
||||
cpu_utilization: float
|
||||
disk_utilization: int
|
||||
network_rx_utilization: int
|
||||
network_tx_utilization: int
|
||||
cpu_limit: int
|
||||
disk_usage: int
|
||||
disk_limit: int
|
||||
memory_usage: int
|
||||
memory_limit: int
|
||||
network_inbound: int
|
||||
network_outbound: int
|
||||
uptime: int
|
||||
|
||||
|
||||
class PterodactylCommand(StrEnum):
|
||||
"""Command enum for the Pterodactyl server."""
|
||||
|
||||
START_SERVER = "start"
|
||||
STOP_SERVER = "stop"
|
||||
RESTART_SERVER = "restart"
|
||||
FORCE_STOP_SERVER = "kill"
|
||||
|
||||
|
||||
class PterodactylAPI:
|
||||
"""Wrapper for Pterodactyl's API."""
|
||||
|
||||
@ -108,13 +121,33 @@ class PterodactylAPI:
|
||||
identifier=identifier,
|
||||
state=utilization["current_state"],
|
||||
cpu_utilization=utilization["resources"]["cpu_absolute"],
|
||||
memory_utilization=utilization["resources"]["memory_bytes"],
|
||||
disk_utilization=utilization["resources"]["disk_bytes"],
|
||||
network_rx_utilization=utilization["resources"]["network_rx_bytes"],
|
||||
network_tx_utilization=utilization["resources"]["network_tx_bytes"],
|
||||
cpu_limit=server["limits"]["cpu"],
|
||||
memory_usage=utilization["resources"]["memory_bytes"],
|
||||
memory_limit=server["limits"]["memory"],
|
||||
disk_usage=utilization["resources"]["disk_bytes"],
|
||||
disk_limit=server["limits"]["disk"],
|
||||
network_inbound=utilization["resources"]["network_rx_bytes"],
|
||||
network_outbound=utilization["resources"]["network_tx_bytes"],
|
||||
uptime=utilization["resources"]["uptime"],
|
||||
)
|
||||
|
||||
_LOGGER.debug("%s", data[identifier])
|
||||
|
||||
return data
|
||||
|
||||
async def async_send_command(
|
||||
self, identifier: str, command: PterodactylCommand
|
||||
) -> None:
|
||||
"""Send a command to the Pterodactyl server."""
|
||||
try:
|
||||
await self.hass.async_add_executor_job(
|
||||
self.pterodactyl.client.servers.send_power_action, # type: ignore[union-attr]
|
||||
identifier,
|
||||
command,
|
||||
)
|
||||
except (
|
||||
PydactylError,
|
||||
BadRequestError,
|
||||
PterodactylApiError,
|
||||
) as error:
|
||||
raise PterodactylConnectionError(error) from error
|
||||
|
98
homeassistant/components/pterodactyl/button.py
Normal file
98
homeassistant/components/pterodactyl/button.py
Normal file
@ -0,0 +1,98 @@
|
||||
"""Button platform for the Pterodactyl integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .api import PterodactylCommand, PterodactylConnectionError
|
||||
from .coordinator import PterodactylConfigEntry, PterodactylCoordinator
|
||||
from .entity import PterodactylEntity
|
||||
|
||||
KEY_START_SERVER = "start_server"
|
||||
KEY_STOP_SERVER = "stop_server"
|
||||
KEY_RESTART_SERVER = "restart_server"
|
||||
KEY_FORCE_STOP_SERVER = "force_stop_server"
|
||||
|
||||
# Coordinator is used to centralize the data updates.
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class PterodactylButtonEntityDescription(ButtonEntityDescription):
|
||||
"""Class describing Pterodactyl button entities."""
|
||||
|
||||
command: PterodactylCommand
|
||||
|
||||
|
||||
BUTTON_DESCRIPTIONS = [
|
||||
PterodactylButtonEntityDescription(
|
||||
key=KEY_START_SERVER,
|
||||
translation_key=KEY_START_SERVER,
|
||||
command=PterodactylCommand.START_SERVER,
|
||||
),
|
||||
PterodactylButtonEntityDescription(
|
||||
key=KEY_STOP_SERVER,
|
||||
translation_key=KEY_STOP_SERVER,
|
||||
command=PterodactylCommand.STOP_SERVER,
|
||||
),
|
||||
PterodactylButtonEntityDescription(
|
||||
key=KEY_RESTART_SERVER,
|
||||
translation_key=KEY_RESTART_SERVER,
|
||||
command=PterodactylCommand.RESTART_SERVER,
|
||||
),
|
||||
PterodactylButtonEntityDescription(
|
||||
key=KEY_FORCE_STOP_SERVER,
|
||||
translation_key=KEY_FORCE_STOP_SERVER,
|
||||
command=PterodactylCommand.FORCE_STOP_SERVER,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: PterodactylConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Pterodactyl button platform."""
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
PterodactylButtonEntity(coordinator, identifier, description, config_entry)
|
||||
for identifier in coordinator.api.identifiers
|
||||
for description in BUTTON_DESCRIPTIONS
|
||||
)
|
||||
|
||||
|
||||
class PterodactylButtonEntity(PterodactylEntity, ButtonEntity):
|
||||
"""Representation of a Pterodactyl button entity."""
|
||||
|
||||
entity_description: PterodactylButtonEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PterodactylCoordinator,
|
||||
identifier: str,
|
||||
description: PterodactylButtonEntityDescription,
|
||||
config_entry: PterodactylConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize the button entity."""
|
||||
super().__init__(coordinator, identifier, config_entry)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{self.game_server_data.uuid}_{description.key}"
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Handle the button press."""
|
||||
try:
|
||||
await self.coordinator.api.async_send_command(
|
||||
self.identifier, self.entity_description.command
|
||||
)
|
||||
except PterodactylConnectionError as err:
|
||||
raise HomeAssistantError(
|
||||
f"Failed to send action '{self.entity_description.key}'"
|
||||
) from err
|
47
homeassistant/components/pterodactyl/icons.json
Normal file
47
homeassistant/components/pterodactyl/icons.json
Normal file
@ -0,0 +1,47 @@
|
||||
{
|
||||
"entity": {
|
||||
"button": {
|
||||
"start_server": {
|
||||
"default": "mdi:play"
|
||||
},
|
||||
"stop_server": {
|
||||
"default": "mdi:stop"
|
||||
},
|
||||
"restart_server": {
|
||||
"default": "mdi:refresh"
|
||||
},
|
||||
"force_stop_server": {
|
||||
"default": "mdi:flash-alert"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"cpu_utilization": {
|
||||
"default": "mdi:cpu-64-bit"
|
||||
},
|
||||
"cpu_limit": {
|
||||
"default": "mdi:cpu-64-bit"
|
||||
},
|
||||
"memory_usage": {
|
||||
"default": "mdi:memory"
|
||||
},
|
||||
"memory_limit": {
|
||||
"default": "mdi:memory"
|
||||
},
|
||||
"disk_usage": {
|
||||
"default": "mdi:harddisk"
|
||||
},
|
||||
"disk_limit": {
|
||||
"default": "mdi:harddisk"
|
||||
},
|
||||
"network_inbound": {
|
||||
"default": "mdi:download"
|
||||
},
|
||||
"network_outbound": {
|
||||
"default": "mdi:upload"
|
||||
},
|
||||
"uptime": {
|
||||
"default": "mdi:timer"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
183
homeassistant/components/pterodactyl/sensor.py
Normal file
183
homeassistant/components/pterodactyl/sensor.py
Normal file
@ -0,0 +1,183 @@
|
||||
"""Sensor platform of the Pterodactyl integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfInformation
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .coordinator import PterodactylConfigEntry, PterodactylCoordinator, PterodactylData
|
||||
from .entity import PterodactylEntity
|
||||
|
||||
KEY_CPU_UTILIZATION = "cpu_utilization"
|
||||
KEY_CPU_LIMIT = "cpu_limit"
|
||||
KEY_MEMORY_USAGE = "memory_usage"
|
||||
KEY_MEMORY_LIMIT = "memory_limit"
|
||||
KEY_DISK_USAGE = "disk_usage"
|
||||
KEY_DISK_LIMIT = "disk_limit"
|
||||
KEY_NETWORK_INBOUND = "network_inbound"
|
||||
KEY_NETWORK_OUTBOUND = "network_outbound"
|
||||
KEY_UPTIME = "uptime"
|
||||
|
||||
# Coordinator is used to centralize the data updates.
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class PterodactylSensorEntityDescription(SensorEntityDescription):
|
||||
"""Class describing Pterodactyl sensor entities."""
|
||||
|
||||
value_fn: Callable[[PterodactylData], StateType | datetime]
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS = [
|
||||
PterodactylSensorEntityDescription(
|
||||
key=KEY_CPU_UTILIZATION,
|
||||
translation_key=KEY_CPU_UTILIZATION,
|
||||
value_fn=lambda data: data.cpu_utilization,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
PterodactylSensorEntityDescription(
|
||||
key=KEY_CPU_LIMIT,
|
||||
translation_key=KEY_CPU_LIMIT,
|
||||
value_fn=lambda data: data.cpu_limit,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
suggested_display_precision=0,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
PterodactylSensorEntityDescription(
|
||||
key=KEY_MEMORY_USAGE,
|
||||
translation_key=KEY_MEMORY_USAGE,
|
||||
value_fn=lambda data: data.memory_usage,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_class=SensorDeviceClass.DATA_SIZE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfInformation.BYTES,
|
||||
suggested_unit_of_measurement=UnitOfInformation.GIGABYTES,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
PterodactylSensorEntityDescription(
|
||||
key=KEY_MEMORY_LIMIT,
|
||||
translation_key=KEY_MEMORY_LIMIT,
|
||||
value_fn=lambda data: data.memory_limit,
|
||||
device_class=SensorDeviceClass.DATA_SIZE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
native_unit_of_measurement=UnitOfInformation.MEGABYTES,
|
||||
suggested_unit_of_measurement=UnitOfInformation.GIGABYTES,
|
||||
suggested_display_precision=1,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
PterodactylSensorEntityDescription(
|
||||
key=KEY_DISK_USAGE,
|
||||
translation_key=KEY_DISK_USAGE,
|
||||
value_fn=lambda data: data.disk_usage,
|
||||
device_class=SensorDeviceClass.DATA_SIZE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
native_unit_of_measurement=UnitOfInformation.BYTES,
|
||||
suggested_unit_of_measurement=UnitOfInformation.GIGABYTES,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
PterodactylSensorEntityDescription(
|
||||
key=KEY_DISK_LIMIT,
|
||||
translation_key=KEY_DISK_LIMIT,
|
||||
value_fn=lambda data: data.disk_limit,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_class=SensorDeviceClass.DATA_SIZE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfInformation.MEGABYTES,
|
||||
suggested_unit_of_measurement=UnitOfInformation.GIGABYTES,
|
||||
suggested_display_precision=1,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
PterodactylSensorEntityDescription(
|
||||
key=KEY_NETWORK_INBOUND,
|
||||
translation_key=KEY_NETWORK_INBOUND,
|
||||
value_fn=lambda data: data.network_inbound,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_class=SensorDeviceClass.DATA_SIZE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfInformation.BYTES,
|
||||
suggested_unit_of_measurement=UnitOfInformation.GIGABYTES,
|
||||
suggested_display_precision=1,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
PterodactylSensorEntityDescription(
|
||||
key=KEY_NETWORK_OUTBOUND,
|
||||
translation_key=KEY_NETWORK_OUTBOUND,
|
||||
value_fn=lambda data: data.network_outbound,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_class=SensorDeviceClass.DATA_SIZE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfInformation.BYTES,
|
||||
suggested_unit_of_measurement=UnitOfInformation.GIGABYTES,
|
||||
suggested_display_precision=1,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
PterodactylSensorEntityDescription(
|
||||
key=KEY_UPTIME,
|
||||
translation_key=KEY_UPTIME,
|
||||
value_fn=(
|
||||
lambda data: dt_util.utcnow() - timedelta(milliseconds=data.uptime)
|
||||
if data.uptime > 0
|
||||
else None
|
||||
),
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: PterodactylConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Pterodactyl sensor platform."""
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
PterodactylSensorEntity(coordinator, identifier, description, config_entry)
|
||||
for identifier in coordinator.api.identifiers
|
||||
for description in SENSOR_DESCRIPTIONS
|
||||
)
|
||||
|
||||
|
||||
class PterodactylSensorEntity(PterodactylEntity, SensorEntity):
|
||||
"""Representation of a Pterodactyl sensor base entity."""
|
||||
|
||||
entity_description: PterodactylSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PterodactylCoordinator,
|
||||
identifier: str,
|
||||
description: PterodactylSensorEntityDescription,
|
||||
config_entry: PterodactylConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize sensor base entity."""
|
||||
super().__init__(coordinator, identifier, config_entry)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{self.game_server_data.uuid}_{description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType | datetime:
|
||||
"""Return native value of sensor."""
|
||||
return self.entity_description.value_fn(self.game_server_data)
|
@ -25,6 +25,49 @@
|
||||
"status": {
|
||||
"name": "Status"
|
||||
}
|
||||
},
|
||||
"button": {
|
||||
"start_server": {
|
||||
"name": "Start server"
|
||||
},
|
||||
"stop_server": {
|
||||
"name": "Stop server"
|
||||
},
|
||||
"restart_server": {
|
||||
"name": "Restart server"
|
||||
},
|
||||
"force_stop_server": {
|
||||
"name": "Force stop server"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"cpu_utilization": {
|
||||
"name": "CPU utilization"
|
||||
},
|
||||
"cpu_limit": {
|
||||
"name": "CPU limit"
|
||||
},
|
||||
"memory_usage": {
|
||||
"name": "Memory usage"
|
||||
},
|
||||
"memory_limit": {
|
||||
"name": "Memory limit"
|
||||
},
|
||||
"disk_usage": {
|
||||
"name": "Disk usage"
|
||||
},
|
||||
"disk_limit": {
|
||||
"name": "Disk limit"
|
||||
},
|
||||
"network_inbound": {
|
||||
"name": "Network inbound"
|
||||
},
|
||||
"network_outbound": {
|
||||
"name": "Network outbound"
|
||||
},
|
||||
"uptime": {
|
||||
"name": "Uptime"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -53,9 +53,9 @@
|
||||
"connection_status": {
|
||||
"name": "Connection status",
|
||||
"state": {
|
||||
"connected": "Connected",
|
||||
"connected": "[%key:common::state::connected%]",
|
||||
"firewalled": "Firewalled",
|
||||
"disconnected": "Disconnected"
|
||||
"disconnected": "[%key:common::state::disconnected%]"
|
||||
}
|
||||
},
|
||||
"active_torrents": {
|
||||
|
@ -7,7 +7,7 @@
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "internal",
|
||||
"requirements": [
|
||||
"SQLAlchemy==2.0.39",
|
||||
"SQLAlchemy==2.0.40",
|
||||
"fnv-hash-fast==1.4.0",
|
||||
"psutil-home-assistant==0.0.1"
|
||||
]
|
||||
|
@ -1,5 +1,6 @@
|
||||
"""Config flow for Remote Calendar integration."""
|
||||
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@ -50,6 +51,13 @@ class RemoteCalendarConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
client = get_async_client(self.hass)
|
||||
try:
|
||||
res = await client.get(user_input[CONF_URL], follow_redirects=True)
|
||||
if res.status_code == HTTPStatus.FORBIDDEN:
|
||||
errors["base"] = "forbidden"
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
errors=errors,
|
||||
)
|
||||
res.raise_for_status()
|
||||
except (HTTPError, InvalidURL) as err:
|
||||
errors["base"] = "cannot_connect"
|
||||
|
@ -19,6 +19,7 @@
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"forbidden": "The server understood the request but refuses to authorize it.",
|
||||
"invalid_ics_file": "[%key:component::local_calendar::config::error::invalid_ics_file%]"
|
||||
}
|
||||
},
|
||||
|
@ -842,8 +842,8 @@
|
||||
"state": {
|
||||
"off": "[%key:common::state::off%]",
|
||||
"disarm": "Disarmed",
|
||||
"home": "Home",
|
||||
"away": "Away"
|
||||
"home": "[%key:common::state::home%]",
|
||||
"away": "[%key:common::state::not_home%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -7,6 +7,6 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiorussound"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aiorussound==4.4.0"],
|
||||
"requirements": ["aiorussound==4.5.0"],
|
||||
"zeroconf": ["_rio._tcp.local."]
|
||||
}
|
||||
|
@ -478,7 +478,27 @@ def process_status(status: dict[str, ComponentStatus]) -> dict[str, ComponentSta
|
||||
if (main_component := status.get(MAIN)) is None:
|
||||
return status
|
||||
if (
|
||||
disabled_capabilities_capability := main_component.get(
|
||||
disabled_components_capability := main_component.get(
|
||||
Capability.CUSTOM_DISABLED_COMPONENTS
|
||||
)
|
||||
) is not None:
|
||||
disabled_components = cast(
|
||||
list[str],
|
||||
disabled_components_capability[Attribute.DISABLED_COMPONENTS].value,
|
||||
)
|
||||
if disabled_components is not None:
|
||||
for component in disabled_components:
|
||||
if component in status:
|
||||
del status[component]
|
||||
for component_status in status.values():
|
||||
process_component_status(component_status)
|
||||
return status
|
||||
|
||||
|
||||
def process_component_status(status: ComponentStatus) -> None:
|
||||
"""Remove disabled capabilities from component status."""
|
||||
if (
|
||||
disabled_capabilities_capability := status.get(
|
||||
Capability.CUSTOM_DISABLED_CAPABILITIES
|
||||
)
|
||||
) is not None:
|
||||
@ -488,9 +508,8 @@ def process_status(status: dict[str, ComponentStatus]) -> dict[str, ComponentSta
|
||||
)
|
||||
if disabled_capabilities is not None:
|
||||
for capability in disabled_capabilities:
|
||||
if capability in main_component and (
|
||||
if capability in status and (
|
||||
capability not in KEEP_CAPABILITY_QUIRK
|
||||
or not KEEP_CAPABILITY_QUIRK[capability](main_component[capability])
|
||||
or not KEEP_CAPABILITY_QUIRK[capability](status[capability])
|
||||
):
|
||||
del main_component[capability]
|
||||
return status
|
||||
del status[capability]
|
||||
|
@ -281,7 +281,7 @@ class SmartThingsThermostat(SmartThingsEntity, ClimateEntity):
|
||||
return [
|
||||
state
|
||||
for mode in supported_thermostat_modes
|
||||
if (state := AC_MODE_TO_STATE.get(mode)) is not None
|
||||
if (state := MODE_TO_STATE.get(mode)) is not None
|
||||
]
|
||||
|
||||
@property
|
||||
@ -466,12 +466,14 @@ class SmartThingsAirConditioner(SmartThingsEntity, ClimateEntity):
|
||||
Capability.DEMAND_RESPONSE_LOAD_CONTROL,
|
||||
Attribute.DEMAND_RESPONSE_LOAD_CONTROL_STATUS,
|
||||
)
|
||||
return {
|
||||
"drlc_status_duration": drlc_status["duration"],
|
||||
"drlc_status_level": drlc_status["drlcLevel"],
|
||||
"drlc_status_start": drlc_status["start"],
|
||||
"drlc_status_override": drlc_status["override"],
|
||||
}
|
||||
res = {}
|
||||
for key in ("duration", "start", "override", "drlcLevel"):
|
||||
if key in drlc_status:
|
||||
dict_key = {"drlcLevel": "drlc_status_level"}.get(
|
||||
key, f"drlc_status_{key}"
|
||||
)
|
||||
res[dict_key] = drlc_status[key]
|
||||
return res
|
||||
|
||||
@property
|
||||
def fan_mode(self) -> str:
|
||||
|
@ -6,5 +6,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/sql",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["SQLAlchemy==2.0.39", "sqlparse==0.5.0"]
|
||||
"requirements": ["SQLAlchemy==2.0.40", "sqlparse==0.5.0"]
|
||||
}
|
||||
|
@ -154,7 +154,7 @@ class SwitchBotBlindTiltEntity(SwitchbotEntity, CoverEntity, RestoreEntity):
|
||||
ATTR_CURRENT_TILT_POSITION
|
||||
)
|
||||
self._last_run_success = last_state.attributes.get("last_run_success")
|
||||
if (_tilt := self._attr_current_cover_position) is not None:
|
||||
if (_tilt := self._attr_current_cover_tilt_position) is not None:
|
||||
self._attr_is_closed = (_tilt < self.CLOSED_DOWN_THRESHOLD) or (
|
||||
_tilt > self.CLOSED_UP_THRESHOLD
|
||||
)
|
||||
|
@ -534,7 +534,7 @@
|
||||
"vin": {
|
||||
"name": "Vehicle",
|
||||
"state": {
|
||||
"disconnected": "Disconnected"
|
||||
"disconnected": "[%key:common::state::disconnected%]"
|
||||
}
|
||||
},
|
||||
"vpp_backup_reserve_percent": {
|
||||
|
@ -14,7 +14,7 @@
|
||||
}
|
||||
},
|
||||
"scan": {
|
||||
"description": "Use Smart Life app or Tuya Smart app to scan the following QR-code to complete the login.\n\nContinue to the next step once you have completed this step in the app."
|
||||
"description": "Use the Smart Life app or Tuya Smart app to scan the following QR code to complete the login.\n\nContinue to the next step once you have completed this step in the app."
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
|
@ -152,8 +152,8 @@
|
||||
"selector": {
|
||||
"profile": {
|
||||
"options": {
|
||||
"home": "Home",
|
||||
"away": "Away",
|
||||
"home": "[%key:common::state::home%]",
|
||||
"away": "[%key:common::state::not_home%]",
|
||||
"boost": "Boost",
|
||||
"fireplace": "Fireplace",
|
||||
"extra": "Extra"
|
||||
|
@ -30,7 +30,6 @@ from propcache.api import cached_property
|
||||
import voluptuous as vol
|
||||
|
||||
from . import data_entry_flow, loader
|
||||
from .components import persistent_notification
|
||||
from .const import (
|
||||
CONF_NAME,
|
||||
EVENT_HOMEASSISTANT_STARTED,
|
||||
@ -178,7 +177,6 @@ class ConfigEntryState(Enum):
|
||||
|
||||
|
||||
DEFAULT_DISCOVERY_UNIQUE_ID = "default_discovery_unique_id"
|
||||
DISCOVERY_NOTIFICATION_ID = "config_entry_discovery"
|
||||
DISCOVERY_SOURCES = {
|
||||
SOURCE_BLUETOOTH,
|
||||
SOURCE_DHCP,
|
||||
@ -1385,14 +1383,6 @@ class ConfigEntriesFlowManager(
|
||||
|
||||
await asyncio.wait(current.values())
|
||||
|
||||
@callback
|
||||
def _async_has_other_discovery_flows(self, flow_id: str) -> bool:
|
||||
"""Check if there are any other discovery flows in progress."""
|
||||
for flow in self._progress.values():
|
||||
if flow.flow_id != flow_id and flow.context["source"] in DISCOVERY_SOURCES:
|
||||
return True
|
||||
return False
|
||||
|
||||
async def async_init(
|
||||
self,
|
||||
handler: str,
|
||||
@ -1527,10 +1517,6 @@ class ConfigEntriesFlowManager(
|
||||
# init to be done.
|
||||
self._set_pending_import_done(flow)
|
||||
|
||||
# Remove notification if no other discovery config entries in progress
|
||||
if not self._async_has_other_discovery_flows(flow.flow_id):
|
||||
persistent_notification.async_dismiss(self.hass, DISCOVERY_NOTIFICATION_ID)
|
||||
|
||||
# Clean up issue if this is a reauth flow
|
||||
if flow.context["source"] == SOURCE_REAUTH:
|
||||
if (entry_id := flow.context.get("entry_id")) is not None and (
|
||||
@ -1719,15 +1705,6 @@ class ConfigEntriesFlowManager(
|
||||
# async_fire_internal is used here because this is only
|
||||
# called from the Debouncer so we know the usage is safe
|
||||
self.hass.bus.async_fire_internal(EVENT_FLOW_DISCOVERED)
|
||||
persistent_notification.async_create(
|
||||
self.hass,
|
||||
title="New devices discovered",
|
||||
message=(
|
||||
"We have discovered new devices on your network. "
|
||||
"[Check it out](/config/integrations)."
|
||||
),
|
||||
notification_id=DISCOVERY_NOTIFICATION_ID,
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_has_matching_discovery_flow(
|
||||
|
@ -1935,13 +1935,14 @@ class State:
|
||||
# to avoid callers outside of this module
|
||||
# from misusing it by mistake.
|
||||
context = state_context._as_dict # noqa: SLF001
|
||||
last_changed_timestamp = self.last_changed_timestamp
|
||||
compressed_state: CompressedState = {
|
||||
COMPRESSED_STATE_STATE: self.state,
|
||||
COMPRESSED_STATE_ATTRIBUTES: self.attributes,
|
||||
COMPRESSED_STATE_CONTEXT: context,
|
||||
COMPRESSED_STATE_LAST_CHANGED: self.last_changed_timestamp,
|
||||
COMPRESSED_STATE_LAST_CHANGED: last_changed_timestamp,
|
||||
}
|
||||
if self.last_changed != self.last_updated:
|
||||
if last_changed_timestamp != self.last_updated_timestamp:
|
||||
compressed_state[COMPRESSED_STATE_LAST_UPDATED] = (
|
||||
self.last_updated_timestamp
|
||||
)
|
||||
|
@ -19,6 +19,7 @@ APPLICATION_CREDENTIALS = [
|
||||
"iotty",
|
||||
"lametric",
|
||||
"lyric",
|
||||
"mcp",
|
||||
"microbees",
|
||||
"monzo",
|
||||
"myuplink",
|
||||
|
@ -27,11 +27,11 @@ import voluptuous as vol
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components import http
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.loader import async_get_application_credentials
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from . import http
|
||||
from .aiohttp_client import async_get_clientsession
|
||||
from .network import NoURLAvailableError
|
||||
|
||||
|
@ -573,9 +573,9 @@ class EntityPlatform:
|
||||
|
||||
async def _async_add_and_update_entities(
|
||||
self,
|
||||
coros: list[Coroutine[Any, Any, None]],
|
||||
entities: list[Entity],
|
||||
timeout: float,
|
||||
config_subentry_id: str | None,
|
||||
) -> None:
|
||||
"""Add entities for a single platform and update them.
|
||||
|
||||
@ -585,10 +585,21 @@ class EntityPlatform:
|
||||
event loop and will finish faster if we run them concurrently.
|
||||
"""
|
||||
results: list[BaseException | None] | None = None
|
||||
tasks = [create_eager_task(coro, loop=self.hass.loop) for coro in coros]
|
||||
entity_registry = ent_reg.async_get(self.hass)
|
||||
try:
|
||||
async with self.hass.timeout.async_timeout(timeout, self.domain):
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
results = await asyncio.gather(
|
||||
*(
|
||||
create_eager_task(
|
||||
self._async_add_entity(
|
||||
entity, True, entity_registry, config_subentry_id
|
||||
),
|
||||
loop=self.hass.loop,
|
||||
)
|
||||
for entity in entities
|
||||
),
|
||||
return_exceptions=True,
|
||||
)
|
||||
except TimeoutError:
|
||||
self.logger.warning(
|
||||
"Timed out adding entities for domain %s with platform %s after %ds",
|
||||
@ -615,9 +626,9 @@ class EntityPlatform:
|
||||
|
||||
async def _async_add_entities(
|
||||
self,
|
||||
coros: list[Coroutine[Any, Any, None]],
|
||||
entities: list[Entity],
|
||||
timeout: float,
|
||||
config_subentry_id: str | None,
|
||||
) -> None:
|
||||
"""Add entities for a single platform without updating.
|
||||
|
||||
@ -626,13 +637,15 @@ class EntityPlatform:
|
||||
to the event loop so we can await the coros directly without
|
||||
scheduling them as tasks.
|
||||
"""
|
||||
entity_registry = ent_reg.async_get(self.hass)
|
||||
try:
|
||||
async with self.hass.timeout.async_timeout(timeout, self.domain):
|
||||
for idx, coro in enumerate(coros):
|
||||
for entity in entities:
|
||||
try:
|
||||
await coro
|
||||
await self._async_add_entity(
|
||||
entity, False, entity_registry, config_subentry_id
|
||||
)
|
||||
except Exception as ex:
|
||||
entity = entities[idx]
|
||||
self.logger.exception(
|
||||
"Error adding entity %s for domain %s with platform %s",
|
||||
entity.entity_id,
|
||||
@ -670,33 +683,20 @@ class EntityPlatform:
|
||||
f"entry {self.config_entry.entry_id if self.config_entry else None}"
|
||||
)
|
||||
|
||||
entities: list[Entity] = (
|
||||
new_entities if type(new_entities) is list else list(new_entities)
|
||||
)
|
||||
# handle empty list from component/platform
|
||||
if not new_entities: # type: ignore[truthy-iterable]
|
||||
if not entities:
|
||||
return
|
||||
|
||||
hass = self.hass
|
||||
entity_registry = ent_reg.async_get(hass)
|
||||
coros: list[Coroutine[Any, Any, None]] = []
|
||||
entities: list[Entity] = []
|
||||
for entity in new_entities:
|
||||
coros.append(
|
||||
self._async_add_entity(
|
||||
entity, update_before_add, entity_registry, config_subentry_id
|
||||
)
|
||||
)
|
||||
entities.append(entity)
|
||||
|
||||
# No entities for processing
|
||||
if not coros:
|
||||
return
|
||||
|
||||
timeout = max(SLOW_ADD_ENTITY_MAX_WAIT * len(coros), SLOW_ADD_MIN_TIMEOUT)
|
||||
timeout = max(SLOW_ADD_ENTITY_MAX_WAIT * len(entities), SLOW_ADD_MIN_TIMEOUT)
|
||||
if update_before_add:
|
||||
add_func = self._async_add_and_update_entities
|
||||
await self._async_add_and_update_entities(
|
||||
entities, timeout, config_subentry_id
|
||||
)
|
||||
else:
|
||||
add_func = self._async_add_entities
|
||||
|
||||
await add_func(coros, entities, timeout)
|
||||
await self._async_add_entities(entities, timeout, config_subentry_id)
|
||||
|
||||
if (
|
||||
(self.config_entry and self.config_entry.pref_disable_polling)
|
||||
|
@ -9,6 +9,7 @@ from datetime import timedelta
|
||||
from decimal import Decimal
|
||||
from enum import Enum
|
||||
from functools import cache, partial
|
||||
from operator import attrgetter
|
||||
from typing import Any, cast
|
||||
|
||||
import slugify as unicode_slug
|
||||
@ -496,7 +497,7 @@ def _get_exposed_entities(
|
||||
CALENDAR_DOMAIN: {},
|
||||
}
|
||||
|
||||
for state in hass.states.async_all():
|
||||
for state in sorted(hass.states.async_all(), key=attrgetter("name")):
|
||||
if not async_should_expose(hass, assistant, state.entity_id):
|
||||
continue
|
||||
|
||||
|
@ -10,12 +10,12 @@ from aiohttp import hdrs
|
||||
from hass_nabucasa import remote
|
||||
import yarl
|
||||
|
||||
from homeassistant.components import http
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util.network import is_ip_address, is_loopback, normalize_url
|
||||
|
||||
from . import http
|
||||
from .hassio import is_hassio
|
||||
|
||||
TYPE_URL_INTERNAL = "internal_url"
|
||||
|
@ -62,7 +62,7 @@ PyTurboJPEG==1.7.5
|
||||
PyYAML==6.0.2
|
||||
requests==2.32.3
|
||||
securetar==2025.2.1
|
||||
SQLAlchemy==2.0.39
|
||||
SQLAlchemy==2.0.40
|
||||
standard-aifc==3.13.0
|
||||
standard-telnetlib==3.13.0
|
||||
typing-extensions>=4.13.0,<5.0
|
||||
@ -87,9 +87,9 @@ httplib2>=0.19.0
|
||||
# gRPC is an implicit dependency that we want to make explicit so we manage
|
||||
# upgrades intentionally. It is a large package to build from source and we
|
||||
# want to ensure we have wheels built.
|
||||
grpcio==1.67.1
|
||||
grpcio-status==1.67.1
|
||||
grpcio-reflection==1.67.1
|
||||
grpcio==1.71.0
|
||||
grpcio-status==1.71.0
|
||||
grpcio-reflection==1.71.0
|
||||
|
||||
# This is a old unmaintained library and is replaced with pycryptodome
|
||||
pycrypto==1000000000.0.0
|
||||
|
@ -68,7 +68,7 @@ dependencies = [
|
||||
"PyYAML==6.0.2",
|
||||
"requests==2.32.3",
|
||||
"securetar==2025.2.1",
|
||||
"SQLAlchemy==2.0.39",
|
||||
"SQLAlchemy==2.0.40",
|
||||
"standard-aifc==3.13.0",
|
||||
"standard-telnetlib==3.13.0",
|
||||
"typing-extensions>=4.13.0,<5.0",
|
||||
|
2
requirements.txt
generated
2
requirements.txt
generated
@ -40,7 +40,7 @@ python-slugify==8.0.4
|
||||
PyYAML==6.0.2
|
||||
requests==2.32.3
|
||||
securetar==2025.2.1
|
||||
SQLAlchemy==2.0.39
|
||||
SQLAlchemy==2.0.40
|
||||
standard-aifc==3.13.0
|
||||
standard-telnetlib==3.13.0
|
||||
typing-extensions>=4.13.0,<5.0
|
||||
|
10
requirements_all.txt
generated
10
requirements_all.txt
generated
@ -116,7 +116,7 @@ RtmAPI==0.7.2
|
||||
|
||||
# homeassistant.components.recorder
|
||||
# homeassistant.components.sql
|
||||
SQLAlchemy==2.0.39
|
||||
SQLAlchemy==2.0.40
|
||||
|
||||
# homeassistant.components.tami4
|
||||
Tami4EdgeAPI==3.0
|
||||
@ -243,7 +243,7 @@ aioelectricitymaps==0.4.0
|
||||
aioemonitor==1.0.5
|
||||
|
||||
# homeassistant.components.esphome
|
||||
aioesphomeapi==29.7.0
|
||||
aioesphomeapi==29.8.0
|
||||
|
||||
# homeassistant.components.flo
|
||||
aioflo==2021.11.0
|
||||
@ -362,7 +362,7 @@ aioridwell==2024.01.0
|
||||
aioruckus==0.42
|
||||
|
||||
# homeassistant.components.russound_rio
|
||||
aiorussound==4.4.0
|
||||
aiorussound==4.5.0
|
||||
|
||||
# homeassistant.components.ruuvi_gateway
|
||||
aioruuvigateway==0.1.0
|
||||
@ -1559,7 +1559,7 @@ odp-amsterdam==6.0.2
|
||||
oemthermostat==1.1.1
|
||||
|
||||
# homeassistant.components.ohme
|
||||
ohme==1.4.1
|
||||
ohme==1.5.1
|
||||
|
||||
# homeassistant.components.ollama
|
||||
ollama==0.4.7
|
||||
@ -2223,7 +2223,7 @@ pypca==0.0.7
|
||||
pypck==0.8.5
|
||||
|
||||
# homeassistant.components.pglab
|
||||
pypglab==0.0.3
|
||||
pypglab==0.0.5
|
||||
|
||||
# homeassistant.components.pjlink
|
||||
pypjlink2==1.2.1
|
||||
|
10
requirements_test_all.txt
generated
10
requirements_test_all.txt
generated
@ -110,7 +110,7 @@ RtmAPI==0.7.2
|
||||
|
||||
# homeassistant.components.recorder
|
||||
# homeassistant.components.sql
|
||||
SQLAlchemy==2.0.39
|
||||
SQLAlchemy==2.0.40
|
||||
|
||||
# homeassistant.components.tami4
|
||||
Tami4EdgeAPI==3.0
|
||||
@ -231,7 +231,7 @@ aioelectricitymaps==0.4.0
|
||||
aioemonitor==1.0.5
|
||||
|
||||
# homeassistant.components.esphome
|
||||
aioesphomeapi==29.7.0
|
||||
aioesphomeapi==29.8.0
|
||||
|
||||
# homeassistant.components.flo
|
||||
aioflo==2021.11.0
|
||||
@ -344,7 +344,7 @@ aioridwell==2024.01.0
|
||||
aioruckus==0.42
|
||||
|
||||
# homeassistant.components.russound_rio
|
||||
aiorussound==4.4.0
|
||||
aiorussound==4.5.0
|
||||
|
||||
# homeassistant.components.ruuvi_gateway
|
||||
aioruuvigateway==0.1.0
|
||||
@ -1305,7 +1305,7 @@ objgraph==3.5.0
|
||||
odp-amsterdam==6.0.2
|
||||
|
||||
# homeassistant.components.ohme
|
||||
ohme==1.4.1
|
||||
ohme==1.5.1
|
||||
|
||||
# homeassistant.components.ollama
|
||||
ollama==0.4.7
|
||||
@ -1814,7 +1814,7 @@ pypalazzetti==0.1.19
|
||||
pypck==0.8.5
|
||||
|
||||
# homeassistant.components.pglab
|
||||
pypglab==0.0.3
|
||||
pypglab==0.0.5
|
||||
|
||||
# homeassistant.components.pjlink
|
||||
pypjlink2==1.2.1
|
||||
|
@ -117,9 +117,9 @@ httplib2>=0.19.0
|
||||
# gRPC is an implicit dependency that we want to make explicit so we manage
|
||||
# upgrades intentionally. It is a large package to build from source and we
|
||||
# want to ensure we have wheels built.
|
||||
grpcio==1.67.1
|
||||
grpcio-status==1.67.1
|
||||
grpcio-reflection==1.67.1
|
||||
grpcio==1.71.0
|
||||
grpcio-status==1.71.0
|
||||
grpcio-reflection==1.71.0
|
||||
|
||||
# This is a old unmaintained library and is replaced with pycryptodome
|
||||
pycrypto==1000000000.0.0
|
||||
|
89
script/quality_scale_summary.py
Normal file
89
script/quality_scale_summary.py
Normal file
@ -0,0 +1,89 @@
|
||||
"""Generate a summary of integration quality scales.
|
||||
|
||||
Run with python3 -m script.quality_scale_summary
|
||||
Data collected at https://docs.google.com/spreadsheets/d/1xEiwovRJyPohAv8S4ad2LAB-0A38s1HWmzHng8v-4NI
|
||||
"""
|
||||
|
||||
import csv
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
from homeassistant.const import __version__ as current_version
|
||||
from homeassistant.util.json import load_json
|
||||
|
||||
COMPONENTS_DIR = Path("homeassistant/components")
|
||||
|
||||
|
||||
def generate_quality_scale_summary() -> list[str, int]:
|
||||
"""Generate a summary of integration quality scales."""
|
||||
quality_scales = {
|
||||
"virtual": 0,
|
||||
"unknown": 0,
|
||||
"legacy": 0,
|
||||
"internal": 0,
|
||||
"bronze": 0,
|
||||
"silver": 0,
|
||||
"gold": 0,
|
||||
"platinum": 0,
|
||||
}
|
||||
|
||||
for manifest_path in COMPONENTS_DIR.glob("*/manifest.json"):
|
||||
manifest = load_json(manifest_path)
|
||||
|
||||
if manifest.get("integration_type") == "virtual":
|
||||
quality_scales["virtual"] += 1
|
||||
elif quality_scale := manifest.get("quality_scale"):
|
||||
quality_scales[quality_scale] += 1
|
||||
else:
|
||||
quality_scales["unknown"] += 1
|
||||
|
||||
return quality_scales
|
||||
|
||||
|
||||
def output_csv(quality_scales: dict[str, int], print_header: bool) -> None:
|
||||
"""Output the quality scale summary as CSV."""
|
||||
writer = csv.writer(sys.stdout)
|
||||
if print_header:
|
||||
writer.writerow(
|
||||
[
|
||||
"Version",
|
||||
"Total",
|
||||
"Virtual",
|
||||
"Unknown",
|
||||
"Legacy",
|
||||
"Internal",
|
||||
"Bronze",
|
||||
"Silver",
|
||||
"Gold",
|
||||
"Platinum",
|
||||
]
|
||||
)
|
||||
|
||||
# Calculate total
|
||||
total = sum(quality_scales.values())
|
||||
|
||||
# Write the summary
|
||||
writer.writerow(
|
||||
[
|
||||
current_version,
|
||||
total,
|
||||
quality_scales["virtual"],
|
||||
quality_scales["unknown"],
|
||||
quality_scales["legacy"],
|
||||
quality_scales["internal"],
|
||||
quality_scales["bronze"],
|
||||
quality_scales["silver"],
|
||||
quality_scales["gold"],
|
||||
quality_scales["platinum"],
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Run the script."""
|
||||
quality_scales = generate_quality_scale_summary()
|
||||
output_csv(quality_scales, "--header" in sys.argv)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -3,11 +3,11 @@
|
||||
list([
|
||||
dict({
|
||||
'content': '''
|
||||
Current time is 16:00:00. Today's date is 2024-06-03.
|
||||
You are a voice assistant for Home Assistant.
|
||||
Answer questions about the world truthfully.
|
||||
Answer in plain text. Keep it simple and to the point.
|
||||
Only if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant.
|
||||
Current time is 16:00:00. Today's date is 2024-06-03.
|
||||
''',
|
||||
'role': 'system',
|
||||
}),
|
||||
|
@ -1047,6 +1047,36 @@ async def test_reauth_confirm_invalid_with_unique_id(
|
||||
assert entry.data[CONF_NOISE_PSK] == VALID_NOISE_PSK
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_zeroconf")
|
||||
async def test_reauth_encryption_key_removed(
|
||||
hass: HomeAssistant, mock_client, mock_setup_entry: None
|
||||
) -> None:
|
||||
"""Test reauth when the encryption key was removed."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
CONF_HOST: "127.0.0.1",
|
||||
CONF_PORT: 6053,
|
||||
CONF_PASSWORD: "",
|
||||
CONF_NOISE_PSK: VALID_NOISE_PSK,
|
||||
},
|
||||
unique_id="test",
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
result = await entry.start_reauth_flow(hass)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "reauth_encryption_removed_confirm"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input={}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "reauth_successful"
|
||||
assert entry.data[CONF_NOISE_PSK] == ""
|
||||
|
||||
|
||||
async def test_discovery_dhcp_updates_host(
|
||||
hass: HomeAssistant, mock_client: APIClient, mock_setup_entry: None
|
||||
) -> None:
|
||||
|
@ -357,7 +357,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.mock_title_link_download_noise_margin',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
@ -405,7 +405,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.mock_title_link_download_power_attenuation',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
@ -502,7 +502,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.mock_title_link_upload_noise_margin',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
@ -550,7 +550,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.mock_title_link_upload_power_attenuation',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
|
@ -6,7 +6,7 @@
|
||||
tuple(
|
||||
),
|
||||
dict({
|
||||
'config': GenerateContentConfig(http_options=None, system_instruction="Current time is 05:00:00. Today's date is 2024-05-24.\nYou are a voice assistant for Home Assistant.\nAnswer questions about the world truthfully.\nAnswer in plain text. Keep it simple and to the point.\nOnly if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant.", temperature=1.0, top_p=0.95, top_k=64.0, candidate_count=None, max_output_tokens=150, stop_sequences=None, response_logprobs=None, logprobs=None, presence_penalty=None, frequency_penalty=None, seed=None, response_mime_type=None, response_schema=None, routing_config=None, safety_settings=[SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_HATE_SPEECH: 'HARM_CATEGORY_HATE_SPEECH'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>), SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_HARASSMENT: 'HARM_CATEGORY_HARASSMENT'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>), SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: 'HARM_CATEGORY_DANGEROUS_CONTENT'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>), SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: 'HARM_CATEGORY_SEXUALLY_EXPLICIT'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>)], tools=[Tool(function_declarations=[FunctionDeclaration(response=None, description='Test function', name='test_tool', parameters=Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties={'param1': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description='Test parameters', enum=None, format=None, items=Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=<Type.STRING: 'STRING'>), max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=<Type.ARRAY: 'ARRAY'>), 'param2': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=None), 'param3': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties={'json': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=<Type.STRING: 'STRING'>)}, property_ordering=None, required=[], type=<Type.OBJECT: 'OBJECT'>)}, property_ordering=None, required=[], type=<Type.OBJECT: 'OBJECT'>))], retrieval=None, google_search=None, google_search_retrieval=None, code_execution=None)], tool_config=None, labels=None, cached_content=None, response_modalities=None, media_resolution=None, speech_config=None, audio_timestamp=None, automatic_function_calling=AutomaticFunctionCallingConfig(disable=True, maximum_remote_calls=None, ignore_call_history=None), thinking_config=None),
|
||||
'config': GenerateContentConfig(http_options=None, system_instruction="You are a voice assistant for Home Assistant.\nAnswer questions about the world truthfully.\nAnswer in plain text. Keep it simple and to the point.\nOnly if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant.\nCurrent time is 05:00:00. Today's date is 2024-05-24.", temperature=1.0, top_p=0.95, top_k=64.0, candidate_count=None, max_output_tokens=150, stop_sequences=None, response_logprobs=None, logprobs=None, presence_penalty=None, frequency_penalty=None, seed=None, response_mime_type=None, response_schema=None, routing_config=None, safety_settings=[SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_HATE_SPEECH: 'HARM_CATEGORY_HATE_SPEECH'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>), SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_HARASSMENT: 'HARM_CATEGORY_HARASSMENT'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>), SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: 'HARM_CATEGORY_DANGEROUS_CONTENT'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>), SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: 'HARM_CATEGORY_SEXUALLY_EXPLICIT'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>)], tools=[Tool(function_declarations=[FunctionDeclaration(response=None, description='Test function', name='test_tool', parameters=Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties={'param1': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description='Test parameters', enum=None, format=None, items=Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=<Type.STRING: 'STRING'>), max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=<Type.ARRAY: 'ARRAY'>), 'param2': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=None), 'param3': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties={'json': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=<Type.STRING: 'STRING'>)}, property_ordering=None, required=[], type=<Type.OBJECT: 'OBJECT'>)}, property_ordering=None, required=[], type=<Type.OBJECT: 'OBJECT'>))], retrieval=None, google_search=None, google_search_retrieval=None, code_execution=None)], tool_config=None, labels=None, cached_content=None, response_modalities=None, media_resolution=None, speech_config=None, audio_timestamp=None, automatic_function_calling=AutomaticFunctionCallingConfig(disable=True, maximum_remote_calls=None, ignore_call_history=None), thinking_config=None),
|
||||
'history': list([
|
||||
]),
|
||||
'model': 'models/gemini-2.0-flash',
|
||||
@ -39,7 +39,7 @@
|
||||
tuple(
|
||||
),
|
||||
dict({
|
||||
'config': GenerateContentConfig(http_options=None, system_instruction="Current time is 05:00:00. Today's date is 2024-05-24.\nYou are a voice assistant for Home Assistant.\nAnswer questions about the world truthfully.\nAnswer in plain text. Keep it simple and to the point.\nOnly if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant.", temperature=1.0, top_p=0.95, top_k=64.0, candidate_count=None, max_output_tokens=150, stop_sequences=None, response_logprobs=None, logprobs=None, presence_penalty=None, frequency_penalty=None, seed=None, response_mime_type=None, response_schema=None, routing_config=None, safety_settings=[SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_HATE_SPEECH: 'HARM_CATEGORY_HATE_SPEECH'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>), SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_HARASSMENT: 'HARM_CATEGORY_HARASSMENT'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>), SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: 'HARM_CATEGORY_DANGEROUS_CONTENT'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>), SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: 'HARM_CATEGORY_SEXUALLY_EXPLICIT'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>)], tools=[Tool(function_declarations=[FunctionDeclaration(response=None, description='Test function', name='test_tool', parameters=None)], retrieval=None, google_search=None, google_search_retrieval=None, code_execution=None)], tool_config=None, labels=None, cached_content=None, response_modalities=None, media_resolution=None, speech_config=None, audio_timestamp=None, automatic_function_calling=AutomaticFunctionCallingConfig(disable=True, maximum_remote_calls=None, ignore_call_history=None), thinking_config=None),
|
||||
'config': GenerateContentConfig(http_options=None, system_instruction="You are a voice assistant for Home Assistant.\nAnswer questions about the world truthfully.\nAnswer in plain text. Keep it simple and to the point.\nOnly if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant.\nCurrent time is 05:00:00. Today's date is 2024-05-24.", temperature=1.0, top_p=0.95, top_k=64.0, candidate_count=None, max_output_tokens=150, stop_sequences=None, response_logprobs=None, logprobs=None, presence_penalty=None, frequency_penalty=None, seed=None, response_mime_type=None, response_schema=None, routing_config=None, safety_settings=[SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_HATE_SPEECH: 'HARM_CATEGORY_HATE_SPEECH'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>), SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_HARASSMENT: 'HARM_CATEGORY_HARASSMENT'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>), SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: 'HARM_CATEGORY_DANGEROUS_CONTENT'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>), SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: 'HARM_CATEGORY_SEXUALLY_EXPLICIT'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>)], tools=[Tool(function_declarations=[FunctionDeclaration(response=None, description='Test function', name='test_tool', parameters=None)], retrieval=None, google_search=None, google_search_retrieval=None, code_execution=None)], tool_config=None, labels=None, cached_content=None, response_modalities=None, media_resolution=None, speech_config=None, audio_timestamp=None, automatic_function_calling=AutomaticFunctionCallingConfig(disable=True, maximum_remote_calls=None, ignore_call_history=None), thinking_config=None),
|
||||
'history': list([
|
||||
]),
|
||||
'model': 'models/gemini-2.0-flash',
|
||||
@ -72,7 +72,7 @@
|
||||
tuple(
|
||||
),
|
||||
dict({
|
||||
'config': GenerateContentConfig(http_options=None, system_instruction="Current time is 05:00:00. Today's date is 2024-05-24.\nYou are a voice assistant for Home Assistant.\nAnswer questions about the world truthfully.\nAnswer in plain text. Keep it simple and to the point.\nOnly if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant.", temperature=1.0, top_p=0.95, top_k=64.0, candidate_count=None, max_output_tokens=150, stop_sequences=None, response_logprobs=None, logprobs=None, presence_penalty=None, frequency_penalty=None, seed=None, response_mime_type=None, response_schema=None, routing_config=None, safety_settings=[SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_HATE_SPEECH: 'HARM_CATEGORY_HATE_SPEECH'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>), SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_HARASSMENT: 'HARM_CATEGORY_HARASSMENT'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>), SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: 'HARM_CATEGORY_DANGEROUS_CONTENT'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>), SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: 'HARM_CATEGORY_SEXUALLY_EXPLICIT'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>)], tools=[Tool(function_declarations=[FunctionDeclaration(response=None, description='Test function', name='test_tool', parameters=Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties={'param1': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description='Test parameters', enum=None, format=None, items=Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=<Type.STRING: 'STRING'>), max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=<Type.ARRAY: 'ARRAY'>), 'param2': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=None), 'param3': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties={'json': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=<Type.STRING: 'STRING'>)}, property_ordering=None, required=[], type=<Type.OBJECT: 'OBJECT'>)}, property_ordering=None, required=[], type=<Type.OBJECT: 'OBJECT'>))], retrieval=None, google_search=None, google_search_retrieval=None, code_execution=None), Tool(function_declarations=None, retrieval=None, google_search=GoogleSearch(), google_search_retrieval=None, code_execution=None)], tool_config=None, labels=None, cached_content=None, response_modalities=None, media_resolution=None, speech_config=None, audio_timestamp=None, automatic_function_calling=AutomaticFunctionCallingConfig(disable=True, maximum_remote_calls=None, ignore_call_history=None), thinking_config=None),
|
||||
'config': GenerateContentConfig(http_options=None, system_instruction="You are a voice assistant for Home Assistant.\nAnswer questions about the world truthfully.\nAnswer in plain text. Keep it simple and to the point.\nOnly if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant.\nCurrent time is 05:00:00. Today's date is 2024-05-24.", temperature=1.0, top_p=0.95, top_k=64.0, candidate_count=None, max_output_tokens=150, stop_sequences=None, response_logprobs=None, logprobs=None, presence_penalty=None, frequency_penalty=None, seed=None, response_mime_type=None, response_schema=None, routing_config=None, safety_settings=[SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_HATE_SPEECH: 'HARM_CATEGORY_HATE_SPEECH'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>), SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_HARASSMENT: 'HARM_CATEGORY_HARASSMENT'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>), SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: 'HARM_CATEGORY_DANGEROUS_CONTENT'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>), SafetySetting(method=None, category=<HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: 'HARM_CATEGORY_SEXUALLY_EXPLICIT'>, threshold=<HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE: 'BLOCK_MEDIUM_AND_ABOVE'>)], tools=[Tool(function_declarations=[FunctionDeclaration(response=None, description='Test function', name='test_tool', parameters=Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties={'param1': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description='Test parameters', enum=None, format=None, items=Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=<Type.STRING: 'STRING'>), max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=<Type.ARRAY: 'ARRAY'>), 'param2': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=None), 'param3': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties={'json': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=<Type.STRING: 'STRING'>)}, property_ordering=None, required=[], type=<Type.OBJECT: 'OBJECT'>)}, property_ordering=None, required=[], type=<Type.OBJECT: 'OBJECT'>))], retrieval=None, google_search=None, google_search_retrieval=None, code_execution=None), Tool(function_declarations=None, retrieval=None, google_search=GoogleSearch(), google_search_retrieval=None, code_execution=None)], tool_config=None, labels=None, cached_content=None, response_modalities=None, media_resolution=None, speech_config=None, audio_timestamp=None, automatic_function_calling=AutomaticFunctionCallingConfig(disable=True, maximum_remote_calls=None, ignore_call_history=None), thinking_config=None),
|
||||
'history': list([
|
||||
]),
|
||||
'model': 'models/gemini-2.0-flash',
|
||||
|
@ -715,3 +715,48 @@ async def test_empty_content_in_chat_history(
|
||||
|
||||
assert actual_history[0].parts[0].text == first_input
|
||||
assert actual_history[1].parts[0].text == " "
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_init_component")
|
||||
async def test_history_always_user_first_turn(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test that the user is always first in the chat history."""
|
||||
with (
|
||||
chat_session.async_get_chat_session(hass) as session,
|
||||
async_get_chat_log(hass, session) as chat_log,
|
||||
):
|
||||
chat_log.async_add_assistant_content_without_tools(
|
||||
conversation.AssistantContent(
|
||||
agent_id="conversation.google_generative_ai_conversation",
|
||||
content="Garage door left open, do you want to close it?",
|
||||
)
|
||||
)
|
||||
|
||||
with patch("google.genai.chats.AsyncChats.create") as mock_create:
|
||||
mock_chat = AsyncMock()
|
||||
mock_create.return_value.send_message = mock_chat
|
||||
chat_response = Mock(prompt_feedback=None)
|
||||
mock_chat.return_value = chat_response
|
||||
chat_response.candidates = [Mock(content=Mock(parts=[]))]
|
||||
|
||||
await conversation.async_converse(
|
||||
hass,
|
||||
"hello",
|
||||
chat_log.conversation_id,
|
||||
Context(),
|
||||
agent_id="conversation.google_generative_ai_conversation",
|
||||
)
|
||||
|
||||
_, kwargs = mock_create.call_args
|
||||
actual_history = kwargs.get("history")
|
||||
|
||||
assert actual_history[0].parts[0].text == " "
|
||||
assert actual_history[0].role == "user"
|
||||
assert (
|
||||
actual_history[1].parts[0].text
|
||||
== "Garage door left open, do you want to close it?"
|
||||
)
|
||||
assert actual_history[1].role == "model"
|
||||
|
@ -42,6 +42,7 @@ def mock_all(
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
supervisor_is_connected: AsyncMock,
|
||||
resolution_info: AsyncMock,
|
||||
addon_info: AsyncMock,
|
||||
) -> None:
|
||||
"""Mock all setup requests."""
|
||||
aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"})
|
||||
|
@ -558,6 +558,7 @@ async def test_config_flow_zigbee_not_hassio(hass: HomeAssistant) -> None:
|
||||
assert zha_flow["step_id"] == "confirm"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("addon_store_info")
|
||||
async def test_options_flow_zigbee_to_thread(hass: HomeAssistant) -> None:
|
||||
"""Test the options flow, migrating Zigbee to Thread."""
|
||||
config_entry = MockConfigEntry(
|
||||
@ -649,6 +650,7 @@ async def test_options_flow_zigbee_to_thread(hass: HomeAssistant) -> None:
|
||||
assert config_entry.data["firmware"] == "spinel"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("addon_store_info")
|
||||
async def test_options_flow_thread_to_zigbee(hass: HomeAssistant) -> None:
|
||||
"""Test the options flow, migrating Thread to Zigbee."""
|
||||
config_entry = MockConfigEntry(
|
||||
|
@ -660,6 +660,7 @@ async def test_options_flow_zigbee_to_thread_zha_configured(
|
||||
"ignore_translations_for_mock_domains",
|
||||
["test_firmware_domain"],
|
||||
)
|
||||
@pytest.mark.usefixtures("addon_store_info")
|
||||
async def test_options_flow_thread_to_zigbee_otbr_configured(
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
|
@ -18,7 +18,6 @@ from homeassistant.auth.models import User
|
||||
from homeassistant.auth.providers import trusted_networks
|
||||
from homeassistant.auth.providers.homeassistant import HassAuthProvider
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.components.http import KEY_HASS
|
||||
from homeassistant.components.http.auth import (
|
||||
CONTENT_USER_NAME,
|
||||
DATA_SIGN_SECRET,
|
||||
@ -28,13 +27,13 @@ from homeassistant.components.http.auth import (
|
||||
async_sign_path,
|
||||
async_user_not_allowed_do_auth,
|
||||
)
|
||||
from homeassistant.components.http.const import KEY_AUTHENTICATED
|
||||
from homeassistant.components.http.forwarded import async_setup_forwarded
|
||||
from homeassistant.components.http.request_context import (
|
||||
current_request,
|
||||
setup_request_context,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.http import KEY_AUTHENTICATED, KEY_HASS
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from . import HTTP_HEADER_HA_AUTH
|
||||
|
@ -11,7 +11,6 @@ from aiohttp.web_middlewares import middleware
|
||||
import pytest
|
||||
|
||||
from homeassistant.components import http
|
||||
from homeassistant.components.http import KEY_AUTHENTICATED, KEY_HASS
|
||||
from homeassistant.components.http.ban import (
|
||||
IP_BANS_FILE,
|
||||
KEY_BAN_MANAGER,
|
||||
@ -22,6 +21,7 @@ from homeassistant.components.http.ban import (
|
||||
from homeassistant.components.http.view import request_handler_factory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.http import KEY_AUTHENTICATED, KEY_HASS
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from tests.common import async_get_persistent_notifications
|
||||
|
@ -18,9 +18,8 @@ from aiohttp.test_utils import TestClient
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.http.cors import setup_cors
|
||||
from homeassistant.components.http.view import HomeAssistantView
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.http import KEY_ALLOW_CONFIGURED_CORS
|
||||
from homeassistant.helpers.http import KEY_ALLOW_CONFIGURED_CORS, HomeAssistantView
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from . import HTTP_HEADER_HA_AUTH
|
||||
|
@ -1,17 +1,34 @@
|
||||
"""Common fixtures for the Model Context Protocol tests."""
|
||||
|
||||
from collections.abc import Generator
|
||||
import datetime
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.mcp.const import DOMAIN
|
||||
from homeassistant.const import CONF_URL
|
||||
from homeassistant.components.application_credentials import (
|
||||
ClientCredential,
|
||||
async_import_client_credential,
|
||||
)
|
||||
from homeassistant.components.mcp.const import (
|
||||
CONF_ACCESS_TOKEN,
|
||||
CONF_AUTHORIZATION_URL,
|
||||
CONF_TOKEN_URL,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.const import CONF_TOKEN, CONF_URL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
TEST_API_NAME = "Memory Server"
|
||||
MCP_SERVER_URL = "http://1.1.1.1:8080/sse"
|
||||
CLIENT_ID = "test-client-id"
|
||||
CLIENT_SECRET = "test-client-secret"
|
||||
AUTH_DOMAIN = "some-auth-domain"
|
||||
OAUTH_AUTHORIZE_URL = "https://example-auth-server.com/authorize-path"
|
||||
OAUTH_TOKEN_URL = "https://example-auth-server.com/token-path"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@ -29,6 +46,7 @@ def mock_mcp_client() -> Generator[AsyncMock]:
|
||||
with (
|
||||
patch("homeassistant.components.mcp.coordinator.sse_client"),
|
||||
patch("homeassistant.components.mcp.coordinator.ClientSession") as mock_session,
|
||||
patch("homeassistant.components.mcp.coordinator.TIMEOUT", 1),
|
||||
):
|
||||
yield mock_session.return_value.__aenter__
|
||||
|
||||
@ -43,3 +61,47 @@ def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry:
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
return config_entry
|
||||
|
||||
|
||||
@pytest.fixture(name="credential")
|
||||
async def mock_credential(hass: HomeAssistant) -> None:
|
||||
"""Fixture that provides the ClientCredential for the test."""
|
||||
assert await async_setup_component(hass, "application_credentials", {})
|
||||
await async_import_client_credential(
|
||||
hass,
|
||||
DOMAIN,
|
||||
ClientCredential(CLIENT_ID, CLIENT_SECRET),
|
||||
AUTH_DOMAIN,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(name="config_entry_token_expiration")
|
||||
def mock_config_entry_token_expiration() -> datetime.datetime:
|
||||
"""Fixture to mock the token expiration."""
|
||||
return datetime.datetime.now(datetime.UTC) + datetime.timedelta(days=1)
|
||||
|
||||
|
||||
@pytest.fixture(name="config_entry_with_auth")
|
||||
def mock_config_entry_with_auth(
|
||||
hass: HomeAssistant,
|
||||
config_entry_token_expiration: datetime.datetime,
|
||||
) -> MockConfigEntry:
|
||||
"""Fixture to load the integration with authentication."""
|
||||
config_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
unique_id=AUTH_DOMAIN,
|
||||
data={
|
||||
"auth_implementation": AUTH_DOMAIN,
|
||||
CONF_URL: MCP_SERVER_URL,
|
||||
CONF_AUTHORIZATION_URL: OAUTH_AUTHORIZE_URL,
|
||||
CONF_TOKEN_URL: OAUTH_TOKEN_URL,
|
||||
CONF_TOKEN: {
|
||||
CONF_ACCESS_TOKEN: "test-access-token",
|
||||
"refresh_token": "test-refresh-token",
|
||||
"expires_at": config_entry_token_expiration.timestamp(),
|
||||
},
|
||||
},
|
||||
title=TEST_API_NAME,
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
return config_entry
|
||||
|
@ -1,20 +1,70 @@
|
||||
"""Test the Model Context Protocol config flow."""
|
||||
|
||||
import json
|
||||
from typing import Any
|
||||
from unittest.mock import AsyncMock, Mock
|
||||
|
||||
import httpx
|
||||
import pytest
|
||||
import respx
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.mcp.const import DOMAIN
|
||||
from homeassistant.const import CONF_URL
|
||||
from homeassistant.components.mcp.const import (
|
||||
CONF_AUTHORIZATION_URL,
|
||||
CONF_TOKEN_URL,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.const import CONF_TOKEN, CONF_URL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
|
||||
from .conftest import TEST_API_NAME
|
||||
from .conftest import (
|
||||
AUTH_DOMAIN,
|
||||
CLIENT_ID,
|
||||
MCP_SERVER_URL,
|
||||
OAUTH_AUTHORIZE_URL,
|
||||
OAUTH_TOKEN_URL,
|
||||
TEST_API_NAME,
|
||||
)
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
from tests.typing import ClientSessionGenerator
|
||||
|
||||
MCP_SERVER_BASE_URL = "http://1.1.1.1:8080"
|
||||
OAUTH_DISCOVERY_ENDPOINT = (
|
||||
f"{MCP_SERVER_BASE_URL}/.well-known/oauth-authorization-server"
|
||||
)
|
||||
OAUTH_SERVER_METADATA_RESPONSE = httpx.Response(
|
||||
status_code=200,
|
||||
text=json.dumps(
|
||||
{
|
||||
"authorization_endpoint": OAUTH_AUTHORIZE_URL,
|
||||
"token_endpoint": OAUTH_TOKEN_URL,
|
||||
}
|
||||
),
|
||||
)
|
||||
CALLBACK_PATH = "/auth/external/callback"
|
||||
OAUTH_CALLBACK_URL = f"https://example.com{CALLBACK_PATH}"
|
||||
OAUTH_CODE = "abcd"
|
||||
OAUTH_TOKEN_PAYLOAD = {
|
||||
"refresh_token": "mock-refresh-token",
|
||||
"access_token": "mock-access-token",
|
||||
"type": "Bearer",
|
||||
"expires_in": 60,
|
||||
}
|
||||
|
||||
|
||||
def encode_state(hass: HomeAssistant, flow_id: str) -> str:
|
||||
"""Encode the OAuth JWT."""
|
||||
return config_entry_oauth2_flow._encode_jwt(
|
||||
hass,
|
||||
{
|
||||
"flow_id": flow_id,
|
||||
"redirect_uri": OAUTH_CALLBACK_URL,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def test_form(
|
||||
@ -34,15 +84,19 @@ async def test_form(
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_URL: "http://1.1.1.1/sse",
|
||||
CONF_URL: MCP_SERVER_URL,
|
||||
},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == TEST_API_NAME
|
||||
assert result["data"] == {
|
||||
CONF_URL: "http://1.1.1.1/sse",
|
||||
CONF_URL: MCP_SERVER_URL,
|
||||
}
|
||||
# Config entry does not have a unique id
|
||||
assert result["result"]
|
||||
assert result["result"].unique_id is None
|
||||
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
@ -73,7 +127,7 @@ async def test_form_mcp_client_error(
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_URL: "http://1.1.1.1/sse",
|
||||
CONF_URL: MCP_SERVER_URL,
|
||||
},
|
||||
)
|
||||
|
||||
@ -89,50 +143,18 @@ async def test_form_mcp_client_error(
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_URL: "http://1.1.1.1/sse",
|
||||
CONF_URL: MCP_SERVER_URL,
|
||||
},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == TEST_API_NAME
|
||||
assert result["data"] == {
|
||||
CONF_URL: "http://1.1.1.1/sse",
|
||||
CONF_URL: MCP_SERVER_URL,
|
||||
}
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("side_effect", "expected_error"),
|
||||
[
|
||||
(
|
||||
httpx.HTTPStatusError("", request=None, response=httpx.Response(401)),
|
||||
"invalid_auth",
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_form_mcp_client_error_abort(
|
||||
hass: HomeAssistant,
|
||||
mock_setup_entry: AsyncMock,
|
||||
mock_mcp_client: Mock,
|
||||
side_effect: Exception,
|
||||
expected_error: str,
|
||||
) -> None:
|
||||
"""Test we handle different client library errors that end with an abort."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
mock_mcp_client.side_effect = side_effect
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_URL: "http://1.1.1.1/sse",
|
||||
},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == expected_error
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"user_input",
|
||||
[
|
||||
@ -165,14 +187,14 @@ async def test_input_form_validation_error(
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_URL: "http://1.1.1.1/sse",
|
||||
CONF_URL: MCP_SERVER_URL,
|
||||
},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == TEST_API_NAME
|
||||
assert result["data"] == {
|
||||
CONF_URL: "http://1.1.1.1/sse",
|
||||
CONF_URL: MCP_SERVER_URL,
|
||||
}
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
@ -183,7 +205,7 @@ async def test_unique_url(
|
||||
"""Test that the same url cannot be configured twice."""
|
||||
config_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={CONF_URL: "http://1.1.1.1/sse"},
|
||||
data={CONF_URL: MCP_SERVER_URL},
|
||||
title=TEST_API_NAME,
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
@ -201,7 +223,7 @@ async def test_unique_url(
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_URL: "http://1.1.1.1/sse",
|
||||
CONF_URL: MCP_SERVER_URL,
|
||||
},
|
||||
)
|
||||
|
||||
@ -226,9 +248,409 @@ async def test_server_missing_capbilities(
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_URL: "http://1.1.1.1/sse",
|
||||
CONF_URL: MCP_SERVER_URL,
|
||||
},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "missing_capabilities"
|
||||
|
||||
|
||||
@respx.mock
|
||||
async def test_oauth_discovery_flow_without_credentials(
|
||||
hass: HomeAssistant,
|
||||
mock_setup_entry: AsyncMock,
|
||||
mock_mcp_client: Mock,
|
||||
) -> None:
|
||||
"""Test for an OAuth discoveryflow for an MCP server where the user has not yet entered credentials."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
# MCP Server returns 401 indicating the client needs to authenticate
|
||||
mock_mcp_client.side_effect = httpx.HTTPStatusError(
|
||||
"Authentication required", request=None, response=httpx.Response(401)
|
||||
)
|
||||
# Prepare the OAuth Server metadata
|
||||
respx.get(OAUTH_DISCOVERY_ENDPOINT).mock(
|
||||
return_value=OAUTH_SERVER_METADATA_RESPONSE
|
||||
)
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_URL: MCP_SERVER_URL,
|
||||
},
|
||||
)
|
||||
|
||||
# The config flow will abort and the user will be taken to the application credentials UI
|
||||
# to enter their credentials.
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "missing_credentials"
|
||||
|
||||
|
||||
async def perform_oauth_flow(
|
||||
hass: HomeAssistant,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
hass_client_no_auth: ClientSessionGenerator,
|
||||
result: config_entries.ConfigFlowResult,
|
||||
authorize_url: str = OAUTH_AUTHORIZE_URL,
|
||||
token_url: str = OAUTH_TOKEN_URL,
|
||||
) -> config_entries.ConfigFlowResult:
|
||||
"""Perform the common steps of the OAuth flow.
|
||||
|
||||
Expects to be called from the step where the user selects credentials.
|
||||
"""
|
||||
state = config_entry_oauth2_flow._encode_jwt(
|
||||
hass,
|
||||
{
|
||||
"flow_id": result["flow_id"],
|
||||
"redirect_uri": OAUTH_CALLBACK_URL,
|
||||
},
|
||||
)
|
||||
assert result["url"] == (
|
||||
f"{authorize_url}?response_type=code&client_id={CLIENT_ID}"
|
||||
f"&redirect_uri={OAUTH_CALLBACK_URL}"
|
||||
f"&state={state}"
|
||||
)
|
||||
|
||||
client = await hass_client_no_auth()
|
||||
resp = await client.get(f"{CALLBACK_PATH}?code={OAUTH_CODE}&state={state}")
|
||||
assert resp.status == 200
|
||||
assert resp.headers["content-type"] == "text/html; charset=utf-8"
|
||||
|
||||
aioclient_mock.post(
|
||||
token_url,
|
||||
json=OAUTH_TOKEN_PAYLOAD,
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("oauth_server_metadata_response", "expected_authorize_url", "expected_token_url"),
|
||||
[
|
||||
(OAUTH_SERVER_METADATA_RESPONSE, OAUTH_AUTHORIZE_URL, OAUTH_TOKEN_URL),
|
||||
(
|
||||
httpx.Response(
|
||||
status_code=200,
|
||||
text=json.dumps(
|
||||
{
|
||||
"authorization_endpoint": "/authorize-path",
|
||||
"token_endpoint": "/token-path",
|
||||
}
|
||||
),
|
||||
),
|
||||
f"{MCP_SERVER_BASE_URL}/authorize-path",
|
||||
f"{MCP_SERVER_BASE_URL}/token-path",
|
||||
),
|
||||
(
|
||||
httpx.Response(status_code=404),
|
||||
f"{MCP_SERVER_BASE_URL}/authorize",
|
||||
f"{MCP_SERVER_BASE_URL}/token",
|
||||
),
|
||||
],
|
||||
ids=(
|
||||
"discovery",
|
||||
"relative_paths",
|
||||
"no_discovery_metadata",
|
||||
),
|
||||
)
|
||||
@pytest.mark.usefixtures("current_request_with_host")
|
||||
@respx.mock
|
||||
async def test_authentication_flow(
|
||||
hass: HomeAssistant,
|
||||
mock_setup_entry: AsyncMock,
|
||||
mock_mcp_client: Mock,
|
||||
credential: None,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
hass_client_no_auth: ClientSessionGenerator,
|
||||
oauth_server_metadata_response: httpx.Response,
|
||||
expected_authorize_url: str,
|
||||
expected_token_url: str,
|
||||
) -> None:
|
||||
"""Test for an OAuth authentication flow for an MCP server."""
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
# MCP Server returns 401 indicating the client needs to authenticate
|
||||
mock_mcp_client.side_effect = httpx.HTTPStatusError(
|
||||
"Authentication required", request=None, response=httpx.Response(401)
|
||||
)
|
||||
# Prepare the OAuth Server metadata
|
||||
respx.get(OAUTH_DISCOVERY_ENDPOINT).mock(
|
||||
return_value=oauth_server_metadata_response
|
||||
)
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_URL: MCP_SERVER_URL,
|
||||
},
|
||||
)
|
||||
assert result["type"] is FlowResultType.MENU
|
||||
assert result["step_id"] == "credentials_choice"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
"next_step_id": "pick_implementation",
|
||||
},
|
||||
)
|
||||
assert result["type"] is FlowResultType.EXTERNAL_STEP
|
||||
result = await perform_oauth_flow(
|
||||
hass,
|
||||
aioclient_mock,
|
||||
hass_client_no_auth,
|
||||
result,
|
||||
authorize_url=expected_authorize_url,
|
||||
token_url=expected_token_url,
|
||||
)
|
||||
|
||||
# Client now accepts credentials
|
||||
mock_mcp_client.side_effect = None
|
||||
response = Mock()
|
||||
response.serverInfo.name = TEST_API_NAME
|
||||
mock_mcp_client.return_value.initialize.return_value = response
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(result["flow_id"])
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == TEST_API_NAME
|
||||
data = result["data"]
|
||||
token = data.pop(CONF_TOKEN)
|
||||
assert data == {
|
||||
"auth_implementation": AUTH_DOMAIN,
|
||||
CONF_URL: MCP_SERVER_URL,
|
||||
CONF_AUTHORIZATION_URL: expected_authorize_url,
|
||||
CONF_TOKEN_URL: expected_token_url,
|
||||
}
|
||||
assert token
|
||||
token.pop("expires_at")
|
||||
assert token == OAUTH_TOKEN_PAYLOAD
|
||||
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("side_effect", "expected_error"),
|
||||
[
|
||||
(httpx.TimeoutException("Some timeout"), "timeout_connect"),
|
||||
(
|
||||
httpx.HTTPStatusError("", request=None, response=httpx.Response(500)),
|
||||
"cannot_connect",
|
||||
),
|
||||
(httpx.HTTPError("Some HTTP error"), "cannot_connect"),
|
||||
(Exception, "unknown"),
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures("current_request_with_host")
|
||||
@respx.mock
|
||||
async def test_oauth_discovery_failure(
|
||||
hass: HomeAssistant,
|
||||
mock_setup_entry: AsyncMock,
|
||||
mock_mcp_client: Mock,
|
||||
credential: None,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
hass_client_no_auth: ClientSessionGenerator,
|
||||
side_effect: Exception,
|
||||
expected_error: str,
|
||||
) -> None:
|
||||
"""Test for an OAuth authentication flow for an MCP server."""
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
# MCP Server returns 401 indicating the client needs to authenticate
|
||||
mock_mcp_client.side_effect = httpx.HTTPStatusError(
|
||||
"Authentication required", request=None, response=httpx.Response(401)
|
||||
)
|
||||
# Prepare the OAuth Server metadata
|
||||
respx.get(OAUTH_DISCOVERY_ENDPOINT).mock(side_effect=side_effect)
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_URL: MCP_SERVER_URL,
|
||||
},
|
||||
)
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == expected_error
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("side_effect", "expected_error"),
|
||||
[
|
||||
(httpx.TimeoutException("Some timeout"), "timeout_connect"),
|
||||
(
|
||||
httpx.HTTPStatusError("", request=None, response=httpx.Response(500)),
|
||||
"cannot_connect",
|
||||
),
|
||||
(httpx.HTTPError("Some HTTP error"), "cannot_connect"),
|
||||
(Exception, "unknown"),
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures("current_request_with_host")
|
||||
@respx.mock
|
||||
async def test_authentication_flow_server_failure_abort(
|
||||
hass: HomeAssistant,
|
||||
mock_setup_entry: AsyncMock,
|
||||
mock_mcp_client: Mock,
|
||||
credential: None,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
hass_client_no_auth: ClientSessionGenerator,
|
||||
side_effect: Exception,
|
||||
expected_error: str,
|
||||
) -> None:
|
||||
"""Test for an OAuth authentication flow for an MCP server."""
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
# MCP Server returns 401 indicating the client needs to authenticate
|
||||
mock_mcp_client.side_effect = httpx.HTTPStatusError(
|
||||
"Authentication required", request=None, response=httpx.Response(401)
|
||||
)
|
||||
# Prepare the OAuth Server metadata
|
||||
respx.get(OAUTH_DISCOVERY_ENDPOINT).mock(
|
||||
return_value=OAUTH_SERVER_METADATA_RESPONSE
|
||||
)
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_URL: MCP_SERVER_URL,
|
||||
},
|
||||
)
|
||||
assert result["type"] is FlowResultType.MENU
|
||||
assert result["step_id"] == "credentials_choice"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
"next_step_id": "pick_implementation",
|
||||
},
|
||||
)
|
||||
assert result["type"] is FlowResultType.EXTERNAL_STEP
|
||||
result = await perform_oauth_flow(
|
||||
hass,
|
||||
aioclient_mock,
|
||||
hass_client_no_auth,
|
||||
result,
|
||||
)
|
||||
|
||||
# Client fails with an error
|
||||
mock_mcp_client.side_effect = side_effect
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(result["flow_id"])
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == expected_error
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("current_request_with_host")
|
||||
@respx.mock
|
||||
async def test_authentication_flow_server_missing_tool_capabilities(
|
||||
hass: HomeAssistant,
|
||||
mock_setup_entry: AsyncMock,
|
||||
mock_mcp_client: Mock,
|
||||
credential: None,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
hass_client_no_auth: ClientSessionGenerator,
|
||||
) -> None:
|
||||
"""Test for an OAuth authentication flow for an MCP server."""
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
# MCP Server returns 401 indicating the client needs to authenticate
|
||||
mock_mcp_client.side_effect = httpx.HTTPStatusError(
|
||||
"Authentication required", request=None, response=httpx.Response(401)
|
||||
)
|
||||
# Prepare the OAuth Server metadata
|
||||
respx.get(OAUTH_DISCOVERY_ENDPOINT).mock(
|
||||
return_value=OAUTH_SERVER_METADATA_RESPONSE
|
||||
)
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_URL: MCP_SERVER_URL,
|
||||
},
|
||||
)
|
||||
assert result["type"] is FlowResultType.MENU
|
||||
assert result["step_id"] == "credentials_choice"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
"next_step_id": "pick_implementation",
|
||||
},
|
||||
)
|
||||
assert result["type"] is FlowResultType.EXTERNAL_STEP
|
||||
result = await perform_oauth_flow(
|
||||
hass,
|
||||
aioclient_mock,
|
||||
hass_client_no_auth,
|
||||
result,
|
||||
)
|
||||
|
||||
# Client can now authenticate
|
||||
mock_mcp_client.side_effect = None
|
||||
|
||||
response = Mock()
|
||||
response.serverInfo.name = TEST_API_NAME
|
||||
response.capabilities.tools = None
|
||||
mock_mcp_client.return_value.initialize.return_value = response
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(result["flow_id"])
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "missing_capabilities"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("current_request_with_host")
|
||||
@respx.mock
|
||||
async def test_reauth_flow(
|
||||
hass: HomeAssistant,
|
||||
mock_setup_entry: AsyncMock,
|
||||
mock_mcp_client: Mock,
|
||||
credential: None,
|
||||
config_entry_with_auth: MockConfigEntry,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
hass_client_no_auth: ClientSessionGenerator,
|
||||
) -> None:
|
||||
"""Test for an OAuth authentication flow for an MCP server."""
|
||||
config_entry_with_auth.async_start_reauth(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
flows = hass.config_entries.flow.async_progress()
|
||||
assert len(flows) == 1
|
||||
result = flows[0]
|
||||
assert result["step_id"] == "reauth_confirm"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
|
||||
|
||||
result = await perform_oauth_flow(hass, aioclient_mock, hass_client_no_auth, result)
|
||||
|
||||
# Verify we can connect to the server
|
||||
response = Mock()
|
||||
response.serverInfo.name = TEST_API_NAME
|
||||
mock_mcp_client.return_value.initialize.return_value = response
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(result["flow_id"])
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "reauth_successful"
|
||||
|
||||
assert config_entry_with_auth.unique_id == AUTH_DOMAIN
|
||||
assert config_entry_with_auth.title == TEST_API_NAME
|
||||
data = {**config_entry_with_auth.data}
|
||||
token = data.pop(CONF_TOKEN)
|
||||
assert data == {
|
||||
"auth_implementation": AUTH_DOMAIN,
|
||||
CONF_URL: MCP_SERVER_URL,
|
||||
CONF_AUTHORIZATION_URL: OAUTH_AUTHORIZE_URL,
|
||||
CONF_TOKEN_URL: OAUTH_TOKEN_URL,
|
||||
}
|
||||
assert token
|
||||
token.pop("expires_at")
|
||||
assert token == OAUTH_TOKEN_PAYLOAD
|
||||
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
@ -76,17 +76,45 @@ async def test_init(
|
||||
assert config_entry.state is ConfigEntryState.NOT_LOADED
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("side_effect"),
|
||||
[
|
||||
(httpx.TimeoutException("Some timeout")),
|
||||
(httpx.HTTPStatusError("", request=None, response=httpx.Response(500))),
|
||||
(httpx.HTTPStatusError("", request=None, response=httpx.Response(401))),
|
||||
(httpx.HTTPError("Some HTTP error")),
|
||||
],
|
||||
)
|
||||
async def test_mcp_server_failure(
|
||||
hass: HomeAssistant, config_entry: MockConfigEntry, mock_mcp_client: Mock
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
mock_mcp_client: Mock,
|
||||
side_effect: Exception,
|
||||
) -> None:
|
||||
"""Test the integration fails to setup if the server fails initialization."""
|
||||
mock_mcp_client.side_effect = side_effect
|
||||
|
||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
assert config_entry.state is ConfigEntryState.SETUP_RETRY
|
||||
|
||||
|
||||
async def test_mcp_server_authentication_failure(
|
||||
hass: HomeAssistant,
|
||||
credential: None,
|
||||
config_entry_with_auth: MockConfigEntry,
|
||||
mock_mcp_client: Mock,
|
||||
) -> None:
|
||||
"""Test the integration fails to setup if the server fails authentication."""
|
||||
mock_mcp_client.side_effect = httpx.HTTPStatusError(
|
||||
"", request=None, response=httpx.Response(500)
|
||||
"Authentication required", request=None, response=httpx.Response(401)
|
||||
)
|
||||
|
||||
with patch("homeassistant.components.mcp.coordinator.TIMEOUT", 1):
|
||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
assert config_entry.state is ConfigEntryState.SETUP_RETRY
|
||||
await hass.config_entries.async_setup(config_entry_with_auth.entry_id)
|
||||
assert config_entry_with_auth.state is ConfigEntryState.SETUP_ERROR
|
||||
|
||||
flows = hass.config_entries.flow.async_progress()
|
||||
assert len(flows) == 1
|
||||
assert flows[0]["step_id"] == "reauth_confirm"
|
||||
|
||||
|
||||
async def test_list_tools_failure(
|
||||
|
@ -14,14 +14,13 @@ from homeassistant.const import (
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
CONF_RESOURCES,
|
||||
CONF_SCAN_INTERVAL,
|
||||
CONF_USERNAME,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .util import _get_mock_nutclient
|
||||
from .util import _get_mock_nutclient, async_init_integration
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
@ -525,6 +524,104 @@ async def test_abort_if_already_setup(hass: HomeAssistant) -> None:
|
||||
assert result2["reason"] == "already_configured"
|
||||
|
||||
|
||||
async def test_abort_duplicate_unique_ids(hass: HomeAssistant) -> None:
|
||||
"""Test we abort if unique_id is already setup."""
|
||||
|
||||
list_vars = {
|
||||
"device.mfr": "Some manufacturer",
|
||||
"device.model": "Some model",
|
||||
"device.serial": "0000-1",
|
||||
}
|
||||
await async_init_integration(
|
||||
hass,
|
||||
list_ups={"ups1": "UPS 1"},
|
||||
list_vars=list_vars,
|
||||
)
|
||||
|
||||
mock_pynut = _get_mock_nutclient(list_ups={"ups2": "UPS 2"}, list_vars=list_vars)
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.nut.AIONUTClient",
|
||||
return_value=mock_pynut,
|
||||
):
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_HOST: "1.1.1.1",
|
||||
CONF_PORT: 2222,
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result2["type"] is FlowResultType.ABORT
|
||||
assert result2["reason"] == "already_configured"
|
||||
|
||||
|
||||
async def test_abort_multiple_ups_duplicate_unique_ids(hass: HomeAssistant) -> None:
|
||||
"""Test we abort on multiple devices if unique_id is already setup."""
|
||||
|
||||
list_vars = {
|
||||
"device.mfr": "Some manufacturer",
|
||||
"device.model": "Some model",
|
||||
"device.serial": "0000-1",
|
||||
}
|
||||
|
||||
mock_pynut = _get_mock_nutclient(
|
||||
list_ups={"ups2": "UPS 2", "ups3": "UPS 3"}, list_vars=list_vars
|
||||
)
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {}
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.nut.AIONUTClient",
|
||||
return_value=mock_pynut,
|
||||
):
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_HOST: "1.1.1.1",
|
||||
CONF_PORT: 2222,
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result2["step_id"] == "ups"
|
||||
assert result2["type"] is FlowResultType.FORM
|
||||
|
||||
await async_init_integration(
|
||||
hass,
|
||||
list_ups={"ups1": "UPS 1"},
|
||||
list_vars=list_vars,
|
||||
)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.nut.AIONUTClient",
|
||||
return_value=mock_pynut,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.nut.async_setup_entry",
|
||||
return_value=True,
|
||||
),
|
||||
):
|
||||
result3 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{CONF_ALIAS: "ups2"},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result3["type"] is FlowResultType.ABORT
|
||||
assert result3["reason"] == "already_configured"
|
||||
|
||||
|
||||
async def test_abort_if_already_setup_alias(hass: HomeAssistant) -> None:
|
||||
"""Test we abort if component is already setup with same alias."""
|
||||
config_entry = MockConfigEntry(
|
||||
@ -573,45 +670,3 @@ async def test_abort_if_already_setup_alias(hass: HomeAssistant) -> None:
|
||||
|
||||
assert result3["type"] is FlowResultType.ABORT
|
||||
assert result3["reason"] == "already_configured"
|
||||
|
||||
|
||||
async def test_options_flow(hass: HomeAssistant) -> None:
|
||||
"""Test config flow options."""
|
||||
|
||||
config_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
unique_id="abcde12345",
|
||||
data=VALID_CONFIG,
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
with patch("homeassistant.components.nut.async_setup_entry", return_value=True):
|
||||
result = await hass.config_entries.options.async_init(config_entry.entry_id)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "init"
|
||||
|
||||
result = await hass.config_entries.options.async_configure(
|
||||
result["flow_id"], user_input={}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert config_entry.options == {
|
||||
CONF_SCAN_INTERVAL: 60,
|
||||
}
|
||||
|
||||
with patch("homeassistant.components.nut.async_setup_entry", return_value=True):
|
||||
result2 = await hass.config_entries.options.async_init(config_entry.entry_id)
|
||||
|
||||
assert result2["type"] is FlowResultType.FORM
|
||||
assert result2["step_id"] == "init"
|
||||
|
||||
result2 = await hass.config_entries.options.async_configure(
|
||||
result2["flow_id"],
|
||||
user_input={CONF_SCAN_INTERVAL: 12},
|
||||
)
|
||||
|
||||
assert result2["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert config_entry.options == {
|
||||
CONF_SCAN_INTERVAL: 12,
|
||||
}
|
||||
|
@ -12,6 +12,7 @@ from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
CONF_SCAN_INTERVAL,
|
||||
CONF_USERNAME,
|
||||
STATE_UNAVAILABLE,
|
||||
)
|
||||
@ -23,6 +24,32 @@ from .util import _get_mock_nutclient, async_init_integration
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
async def test_config_entry_migrations(hass: HomeAssistant) -> None:
|
||||
"""Test that config entries were migrated."""
|
||||
mock_pynut = _get_mock_nutclient(
|
||||
list_vars={"battery.voltage": "voltage"},
|
||||
list_ups={"ups1": "UPS 1"},
|
||||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.nut.AIONUTClient",
|
||||
return_value=mock_pynut,
|
||||
):
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
CONF_HOST: "1.1.1.1",
|
||||
CONF_PORT: 123,
|
||||
},
|
||||
options={CONF_SCAN_INTERVAL: 30},
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
assert await hass.config_entries.async_setup(entry.entry_id)
|
||||
|
||||
assert CONF_SCAN_INTERVAL not in entry.options
|
||||
|
||||
|
||||
async def test_async_setup_entry(hass: HomeAssistant) -> None:
|
||||
"""Test a successful setup entry."""
|
||||
entry = MockConfigEntry(
|
||||
|
@ -1,7 +1,9 @@
|
||||
"""Tests for services."""
|
||||
|
||||
from datetime import datetime
|
||||
from unittest.mock import AsyncMock, MagicMock
|
||||
|
||||
from ohme import ChargeSlot
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
@ -30,11 +32,11 @@ async def test_list_charge_slots(
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
mock_client.slots = [
|
||||
{
|
||||
"start": "2024-12-30T04:00:00+00:00",
|
||||
"end": "2024-12-30T04:30:39+00:00",
|
||||
"energy": 2.042,
|
||||
}
|
||||
ChargeSlot(
|
||||
datetime.fromisoformat("2024-12-30T04:00:00+00:00"),
|
||||
datetime.fromisoformat("2024-12-30T04:30:39+00:00"),
|
||||
2.042,
|
||||
)
|
||||
]
|
||||
|
||||
assert snapshot == await hass.services.async_call(
|
||||
|
50
tests/components/pglab/test_common.py
Normal file
50
tests/components/pglab/test_common.py
Normal file
@ -0,0 +1,50 @@
|
||||
"""Common code for PG LAB Electronics tests."""
|
||||
|
||||
import json
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.common import async_fire_mqtt_message
|
||||
|
||||
|
||||
def get_device_discovery_payload(
|
||||
number_of_shutters: int,
|
||||
number_of_boards: int,
|
||||
device_name: str = "test",
|
||||
) -> dict[str, any]:
|
||||
"""Return the device discovery payload."""
|
||||
|
||||
# be sure the number of shutters and boards are in the correct range
|
||||
assert 0 <= number_of_boards <= 8
|
||||
assert 0 <= number_of_shutters <= (number_of_boards * 4)
|
||||
|
||||
# define the number of E-RELAY boards connected to E-BOARD
|
||||
boards = "1" * number_of_boards + "0" * (8 - number_of_boards)
|
||||
|
||||
return {
|
||||
"ip": "192.168.1.16",
|
||||
"mac": "80:34:28:1B:18:5A",
|
||||
"name": device_name,
|
||||
"hw": "1.0.7",
|
||||
"fw": "1.0.0",
|
||||
"type": "E-BOARD",
|
||||
"id": "E-BOARD-DD53AC85",
|
||||
"manufacturer": "PG LAB Electronics",
|
||||
"params": {"shutters": number_of_shutters, "boards": boards},
|
||||
}
|
||||
|
||||
|
||||
async def send_discovery_message(
|
||||
hass: HomeAssistant,
|
||||
payload: dict[str, any] | None,
|
||||
) -> None:
|
||||
"""Send the discovery message to make E-BOARD device discoverable."""
|
||||
|
||||
topic = "pglab/discovery/E-BOARD-DD53AC85/config"
|
||||
|
||||
async_fire_mqtt_message(
|
||||
hass,
|
||||
topic,
|
||||
json.dumps(payload if payload is not None else ""),
|
||||
)
|
||||
await hass.async_block_till_done()
|
@ -1,7 +1,5 @@
|
||||
"""The tests for the PG LAB Electronics cover."""
|
||||
|
||||
import json
|
||||
|
||||
from homeassistant.components import cover
|
||||
from homeassistant.components.cover import (
|
||||
DOMAIN as COVER_DOMAIN,
|
||||
@ -19,6 +17,8 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .test_common import get_device_discovery_payload, send_discovery_message
|
||||
|
||||
from tests.common import async_fire_mqtt_message
|
||||
from tests.typing import MqttMockHAClient
|
||||
|
||||
@ -43,25 +43,13 @@ async def test_cover_features(
|
||||
hass: HomeAssistant, mqtt_mock: MqttMockHAClient, setup_pglab
|
||||
) -> None:
|
||||
"""Test cover features."""
|
||||
topic = "pglab/discovery/E-Board-DD53AC85/config"
|
||||
payload = {
|
||||
"ip": "192.168.1.16",
|
||||
"mac": "80:34:28:1B:18:5A",
|
||||
"name": "test",
|
||||
"hw": "1.0.7",
|
||||
"fw": "1.0.0",
|
||||
"type": "E-Board",
|
||||
"id": "E-Board-DD53AC85",
|
||||
"manufacturer": "PG LAB Electronics",
|
||||
"params": {"shutters": 4, "boards": "10000000"},
|
||||
}
|
||||
|
||||
async_fire_mqtt_message(
|
||||
hass,
|
||||
topic,
|
||||
json.dumps(payload),
|
||||
payload = get_device_discovery_payload(
|
||||
number_of_shutters=4,
|
||||
number_of_boards=1,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await send_discovery_message(hass, payload)
|
||||
|
||||
assert len(hass.states.async_all("cover")) == 4
|
||||
|
||||
@ -75,25 +63,13 @@ async def test_cover_availability(
|
||||
hass: HomeAssistant, mqtt_mock: MqttMockHAClient, setup_pglab
|
||||
) -> None:
|
||||
"""Check if covers are properly created."""
|
||||
topic = "pglab/discovery/E-Board-DD53AC85/config"
|
||||
payload = {
|
||||
"ip": "192.168.1.16",
|
||||
"mac": "80:34:28:1B:18:5A",
|
||||
"name": "test",
|
||||
"hw": "1.0.7",
|
||||
"fw": "1.0.0",
|
||||
"type": "E-Board",
|
||||
"id": "E-Board-DD53AC85",
|
||||
"manufacturer": "PG LAB Electronics",
|
||||
"params": {"shutters": 6, "boards": "11000000"},
|
||||
}
|
||||
|
||||
async_fire_mqtt_message(
|
||||
hass,
|
||||
topic,
|
||||
json.dumps(payload),
|
||||
payload = get_device_discovery_payload(
|
||||
number_of_shutters=6,
|
||||
number_of_boards=2,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await send_discovery_message(hass, payload)
|
||||
|
||||
# We are creating 6 covers using two E-RELAY devices connected to E-BOARD.
|
||||
# Now we are going to check if all covers are created and their state is unknown.
|
||||
@ -111,25 +87,12 @@ async def test_cover_change_state_via_mqtt(
|
||||
hass: HomeAssistant, mqtt_mock: MqttMockHAClient, setup_pglab
|
||||
) -> None:
|
||||
"""Test state update via MQTT."""
|
||||
topic = "pglab/discovery/E-Board-DD53AC85/config"
|
||||
payload = {
|
||||
"ip": "192.168.1.16",
|
||||
"mac": "80:34:28:1B:18:5A",
|
||||
"name": "test",
|
||||
"hw": "1.0.7",
|
||||
"fw": "1.0.0",
|
||||
"type": "E-Board",
|
||||
"id": "E-Board-DD53AC85",
|
||||
"manufacturer": "PG LAB Electronics",
|
||||
"params": {"shutters": 2, "boards": "10000000"},
|
||||
}
|
||||
|
||||
async_fire_mqtt_message(
|
||||
hass,
|
||||
topic,
|
||||
json.dumps(payload),
|
||||
payload = get_device_discovery_payload(
|
||||
number_of_shutters=2,
|
||||
number_of_boards=1,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await send_discovery_message(hass, payload)
|
||||
|
||||
# Check initial state is unknown
|
||||
cover = hass.states.get("cover.test_shutter_0")
|
||||
@ -165,25 +128,13 @@ async def test_cover_mqtt_state_by_calling_service(
|
||||
hass: HomeAssistant, mqtt_mock: MqttMockHAClient, setup_pglab
|
||||
) -> None:
|
||||
"""Calling service to OPEN/CLOSE cover and check mqtt state."""
|
||||
topic = "pglab/discovery/E-Board-DD53AC85/config"
|
||||
payload = {
|
||||
"ip": "192.168.1.16",
|
||||
"mac": "80:34:28:1B:18:5A",
|
||||
"name": "test",
|
||||
"hw": "1.0.7",
|
||||
"fw": "1.0.0",
|
||||
"type": "E-Board",
|
||||
"id": "E-Board-DD53AC85",
|
||||
"manufacturer": "PG LAB Electronics",
|
||||
"params": {"shutters": 2, "boards": "10000000"},
|
||||
}
|
||||
|
||||
async_fire_mqtt_message(
|
||||
hass,
|
||||
topic,
|
||||
json.dumps(payload),
|
||||
payload = get_device_discovery_payload(
|
||||
number_of_shutters=2,
|
||||
number_of_boards=1,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await send_discovery_message(hass, payload)
|
||||
|
||||
cover = hass.states.get("cover.test_shutter_0")
|
||||
assert cover.state == STATE_UNKNOWN
|
||||
|
@ -1,13 +1,12 @@
|
||||
"""The tests for the PG LAB Electronics discovery device."""
|
||||
|
||||
import json
|
||||
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
|
||||
from tests.common import async_fire_mqtt_message
|
||||
from .test_common import get_device_discovery_payload, send_discovery_message
|
||||
|
||||
from tests.typing import MqttMockHAClient
|
||||
|
||||
|
||||
@ -19,25 +18,13 @@ async def test_device_discover(
|
||||
setup_pglab,
|
||||
) -> None:
|
||||
"""Test setting up a device."""
|
||||
topic = "pglab/discovery/E-Board-DD53AC85/config"
|
||||
payload = {
|
||||
"ip": "192.168.1.16",
|
||||
"mac": "80:34:28:1B:18:5A",
|
||||
"name": "test",
|
||||
"hw": "1.0.7",
|
||||
"fw": "1.0.0",
|
||||
"type": "E-Board",
|
||||
"id": "E-Board-DD53AC85",
|
||||
"manufacturer": "PG LAB Electronics",
|
||||
"params": {"shutters": 0, "boards": "11000000"},
|
||||
}
|
||||
|
||||
async_fire_mqtt_message(
|
||||
hass,
|
||||
topic,
|
||||
json.dumps(payload),
|
||||
payload = get_device_discovery_payload(
|
||||
number_of_shutters=0,
|
||||
number_of_boards=2,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await send_discovery_message(hass, payload)
|
||||
|
||||
# Verify device and registry entries are created
|
||||
device_entry = device_reg.async_get_device(
|
||||
@ -60,25 +47,12 @@ async def test_device_update(
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test update a device."""
|
||||
topic = "pglab/discovery/E-Board-DD53AC85/config"
|
||||
payload = {
|
||||
"ip": "192.168.1.16",
|
||||
"mac": "80:34:28:1B:18:5A",
|
||||
"name": "test",
|
||||
"hw": "1.0.7",
|
||||
"fw": "1.0.0",
|
||||
"type": "E-Board",
|
||||
"id": "E-Board-DD53AC85",
|
||||
"manufacturer": "PG LAB Electronics",
|
||||
"params": {"shutters": 0, "boards": "11000000"},
|
||||
}
|
||||
|
||||
async_fire_mqtt_message(
|
||||
hass,
|
||||
topic,
|
||||
json.dumps(payload),
|
||||
payload = get_device_discovery_payload(
|
||||
number_of_shutters=0,
|
||||
number_of_boards=2,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await send_discovery_message(hass, payload)
|
||||
|
||||
# Verify device is created
|
||||
device_entry = device_reg.async_get_device(
|
||||
@ -90,12 +64,7 @@ async def test_device_update(
|
||||
payload["fw"] = "1.0.1"
|
||||
payload["hw"] = "1.0.8"
|
||||
|
||||
async_fire_mqtt_message(
|
||||
hass,
|
||||
topic,
|
||||
json.dumps(payload),
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
await send_discovery_message(hass, payload)
|
||||
|
||||
# Verify device is created
|
||||
device_entry = device_reg.async_get_device(
|
||||
@ -114,25 +83,12 @@ async def test_device_remove(
|
||||
setup_pglab,
|
||||
) -> None:
|
||||
"""Test remove a device."""
|
||||
topic = "pglab/discovery/E-Board-DD53AC85/config"
|
||||
payload = {
|
||||
"ip": "192.168.1.16",
|
||||
"mac": "80:34:28:1B:18:5A",
|
||||
"name": "test",
|
||||
"hw": "1.0.7",
|
||||
"fw": "1.0.0",
|
||||
"type": "E-Board",
|
||||
"id": "E-Board-DD53AC85",
|
||||
"manufacturer": "PG LAB Electronics",
|
||||
"params": {"shutters": 0, "boards": "11000000"},
|
||||
}
|
||||
|
||||
async_fire_mqtt_message(
|
||||
hass,
|
||||
topic,
|
||||
json.dumps(payload),
|
||||
payload = get_device_discovery_payload(
|
||||
number_of_shutters=0,
|
||||
number_of_boards=2,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await send_discovery_message(hass, payload)
|
||||
|
||||
# Verify device is created
|
||||
device_entry = device_reg.async_get_device(
|
||||
@ -140,12 +96,7 @@ async def test_device_remove(
|
||||
)
|
||||
assert device_entry is not None
|
||||
|
||||
async_fire_mqtt_message(
|
||||
hass,
|
||||
topic,
|
||||
"",
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
await send_discovery_message(hass, None)
|
||||
|
||||
# Verify device entry is removed
|
||||
device_entry = device_reg.async_get_device(
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user